Compare commits
130 Commits
e0c956859b
...
claude/com
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dfd1bccb89 | ||
| 629d112850 | |||
| 32506d6a2e | |||
| 9a60970837 | |||
| 1ade446187 | |||
|
|
d430b96b51 | ||
|
|
140cd7acb7 | ||
| 2f6297ae68 | |||
| 121b8138e3 | |||
| 2b331bb3ef | |||
| ae856bfb2f | |||
| 31f8274b8d | |||
| 06bc0181f8 | |||
| 66e2f87a96 | |||
| db8b520009 | |||
| e2d72223c8 | |||
| a996208cb8 | |||
| 5845334454 | |||
| 170461c359 | |||
| aacec1cce3 | |||
| 6b12fcd7d7 | |||
| 8e58d04568 | |||
| c2e52bee64 | |||
| 9cd641923d | |||
| 8abab40778 | |||
| 19fb2c037c | |||
| e62f3e7b07 | |||
| af68304692 | |||
| 20438b7ef5 | |||
| e3022f20c5 | |||
| e9738420ea | |||
| dfcd172ef3 | |||
| 9cd20a1867 | |||
| 420227d70c | |||
| e893319cfe | |||
| 93eeacfe8f | |||
| 17bdebfb52 | |||
| 267d92933e | |||
| 159c59734e | |||
| 7ba5c414b1 | |||
| a98a7192bb | |||
| 1d1073cba1 | |||
| cf061c1505 | |||
| 5ebfc4f3aa | |||
| f20f54b128 | |||
| f2b0b57535 | |||
| e6fe2314de | |||
| c8d77aaa48 | |||
| b13fbfe8c7 | |||
| 280a5996f6 | |||
| 9a082d2950 | |||
| 82433955bd | |||
| 8e2456dcae | |||
| 1acd8c3bff | |||
| 2de0cde94c | |||
| 94c7c90b91 | |||
| f47fbfcf93 | |||
| 04771f370c | |||
| 208c1dd7bc | |||
| 61a4f27af4 | |||
| a047144922 | |||
| 508a86d16c | |||
| 16e1ada261 | |||
| 6bd080f8c4 | |||
| be3a5191c5 | |||
| b0282b7f8b | |||
| ac72905ecb | |||
| 7d4df25d16 | |||
| 538828b91a | |||
| 14160854b9 | |||
| 36d486d78c | |||
| 9d137a40d3 | |||
| 3bf6b8c6c9 | |||
| 4759374883 | |||
| cb6e34d5ce | |||
| 2b72951e66 | |||
| 69dad7cc74 | |||
| efa5aca35f | |||
| 2b83defbc3 | |||
| a59dac7fe1 | |||
| 1e31595d7f | |||
| c429dcc033 | |||
| 9146118df1 | |||
| 07d15001ae | |||
| a0b366e94a | |||
| 3790a3bd9e | |||
| 0a07c61ca3 | |||
| 337b6061b2 | |||
| 467fdd2a6c | |||
| c12ad94b7f | |||
| 05e241c792 | |||
| 5ee4c71fc1 | |||
| 81908e48ea | |||
| 6230558b91 | |||
| 2c215353d4 | |||
| d526025926 | |||
| 411239bea4 | |||
| 7e6c4c16ce | |||
| ea0e3d6f29 | |||
| 8db89373e0 | |||
| e719f593de | |||
| 16adfd6f26 | |||
| 704f29362a | |||
| 42767e3119 | |||
| a550a8d0be | |||
| 063741adc7 | |||
| ad2f16d93b | |||
| b0b36df4e4 | |||
| aa7f2dab32 | |||
| d2d553eed6 | |||
| 2ca277b6e6 | |||
| bfcbe086f2 | |||
| c92eb1b57b | |||
| 07347a644f | |||
| f1e66966f3 | |||
| d1c40c633f | |||
| 0e82e238c1 | |||
| 2fed5d6ce1 | |||
| d9bb9363dd | |||
| e156cf7c87 | |||
| 76ab63a200 | |||
| a32d4cc179 | |||
| 0bd30a0eb8 | |||
| 0626b8d496 | |||
| 25663fc79e | |||
| fe4989bbcc | |||
| 36271585d9 | |||
| 18c7989983 | |||
| c28d7aeffc | |||
| f7e9c98bd9 |
36
.env.example
36
.env.example
@@ -5,3 +5,39 @@ DATABASE_URL=postgresql://hoafinance:change_me@postgres:5432/hoafinance
|
||||
REDIS_URL=redis://redis:6379
|
||||
JWT_SECRET=change_me_to_random_string
|
||||
NODE_ENV=development
|
||||
|
||||
# AI Investment Advisor (OpenAI-compatible API)
|
||||
AI_API_URL=https://integrate.api.nvidia.com/v1
|
||||
AI_API_KEY=your_nvidia_api_key_here
|
||||
AI_MODEL=qwen/qwen3.5-397b-a17b
|
||||
# Set to 'true' to enable detailed AI prompt/response logging
|
||||
AI_DEBUG=false
|
||||
|
||||
# Stripe Billing
|
||||
STRIPE_SECRET_KEY=sk_test_your_stripe_secret_key
|
||||
STRIPE_WEBHOOK_SECRET=whsec_your_webhook_secret
|
||||
|
||||
# Stripe Price IDs (Monthly)
|
||||
STRIPE_STARTER_MONTHLY_PRICE_ID=price_starter_monthly
|
||||
STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID=price_professional_monthly
|
||||
STRIPE_ENTERPRISE_MONTHLY_PRICE_ID=price_enterprise_monthly
|
||||
|
||||
# Stripe Price IDs (Annual — 25% discount)
|
||||
STRIPE_STARTER_ANNUAL_PRICE_ID=price_starter_annual
|
||||
STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID=price_professional_annual
|
||||
STRIPE_ENTERPRISE_ANNUAL_PRICE_ID=price_enterprise_annual
|
||||
|
||||
# Trial configuration
|
||||
REQUIRE_PAYMENT_METHOD_FOR_TRIAL=false
|
||||
|
||||
# Email (Resend)
|
||||
RESEND_API_KEY=re_your_resend_api_key
|
||||
|
||||
# Application
|
||||
APP_URL=http://localhost
|
||||
INVITE_TOKEN_SECRET=dev-invite-secret
|
||||
|
||||
# New Relic APM — set ENABLED=true and provide your license key to activate
|
||||
NEW_RELIC_ENABLED=false
|
||||
NEW_RELIC_LICENSE_KEY=your_new_relic_license_key_here
|
||||
NEW_RELIC_APP_NAME=HOALedgerIQ_App
|
||||
|
||||
28
.env.test.example
Normal file
28
.env.test.example
Normal file
@@ -0,0 +1,28 @@
|
||||
# ─── Playwright E2E Test Environment ────────────────────────────────
|
||||
# Copy to .env.test and fill in values for your local or CI setup.
|
||||
|
||||
# Base URL of the running application (nginx proxy)
|
||||
# Local dev: http://localhost (Docker Compose nginx on port 80)
|
||||
# Production: https://your-production-domain.com
|
||||
BASE_URL=http://localhost
|
||||
|
||||
# ─── Test Database ──────────────────────────────────────────────────
|
||||
# Direct Postgres connection for test data seeding/cleanup.
|
||||
# Use the SAME database as Docker Compose postgres service.
|
||||
# WARNING: Tests will create/delete data — never point at production.
|
||||
TEST_DB_URL=postgresql://hoafinance:change_me@localhost:5432/hoafinance
|
||||
|
||||
# ─── Test User Credentials ──────────────────────────────────────────
|
||||
# Pre-seeded user for authenticated test flows.
|
||||
# The seed script (tests/fixtures/db.fixture.ts) creates this user.
|
||||
TEST_USER_EMAIL=e2e-treasurer@test.hoaledgeriq.com
|
||||
TEST_USER_PASSWORD=TestPass123!
|
||||
TEST_USER_ROLE=treasurer
|
||||
|
||||
# ─── API Base URL ───────────────────────────────────────────────────
|
||||
# Backend API base (through nginx). Usually same as BASE_URL + /api
|
||||
API_BASE_URL=http://localhost/api
|
||||
|
||||
# ─── CI Settings ────────────────────────────────────────────────────
|
||||
# CI=true is typically set by CI providers automatically.
|
||||
# CI=true
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -24,6 +24,11 @@ postgres_data/
|
||||
redis_data/
|
||||
pgdata/
|
||||
|
||||
# Database backups
|
||||
backups/
|
||||
*.dump
|
||||
*.dump.gz
|
||||
|
||||
# SSL
|
||||
letsencrypt/
|
||||
|
||||
@@ -39,3 +44,13 @@ coverage/
|
||||
|
||||
# TypeScript
|
||||
*.tsbuildinfo
|
||||
|
||||
# Playwright
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
tests/.auth/
|
||||
*-snapshots/
|
||||
|
||||
# Test environment
|
||||
.env.test
|
||||
|
||||
229
CLAUDE.md
Normal file
229
CLAUDE.md
Normal file
@@ -0,0 +1,229 @@
|
||||
# CLAUDE.md – HOA Financial Platform (HOALedgerIQ)
|
||||
|
||||
## Project Overview
|
||||
|
||||
Multi-tenant SaaS platform for HOA (Homeowners Association) financial management. Handles chart of accounts, journal entries, budgets, invoices, payments, reserve planning, and board scenario planning.
|
||||
|
||||
---
|
||||
|
||||
## Stack & Framework
|
||||
|
||||
| Layer | Technology |
|
||||
| --------- | --------------------------------------------------- |
|
||||
| Backend | **NestJS 10** (TypeScript), runs on port 3000 |
|
||||
| Frontend | **React 18** + Vite 5 + Mantine UI + Zustand |
|
||||
| Database | **PostgreSQL** via **TypeORM 0.3** |
|
||||
| Cache | **Redis** (BullMQ for queues) |
|
||||
| Auth | **Passport.js** – JWT access + httpOnly refresh |
|
||||
| Payments | **Stripe** (checkout, subscriptions, webhooks) |
|
||||
| Email | **Resend** |
|
||||
| AI | NVIDIA API (Qwen model) for investment advisor |
|
||||
| Monitoring| **New Relic** APM (app name: `HOALedgerIQ_App`) |
|
||||
| Infra | Docker Compose (dev + prod), Nginx reverse proxy |
|
||||
|
||||
---
|
||||
|
||||
## Auth Pattern
|
||||
|
||||
- **Access token**: JWT, 1-hour TTL, payload `{ sub, email, orgId, role, isSuperadmin }`
|
||||
- **Refresh token**: 64-byte random, SHA256-hashed in DB, 30-day TTL, sent as httpOnly cookie `ledgeriq_rt`
|
||||
- **MFA**: TOTP via `otplib`, challenge token (5-min TTL), recovery codes
|
||||
- **Passkeys**: WebAuthn via `@simplewebauthn/server`
|
||||
- **SSO**: Google OAuth 2.0, Azure AD
|
||||
- **Password hashing**: bcryptjs, cost 12
|
||||
- **Rate limiting**: 100 req/min global (Throttler), custom per endpoint
|
||||
|
||||
### Guards & Middleware
|
||||
|
||||
- `TenantMiddleware` – extracts `orgId` from JWT, sets tenant schema (60s cache)
|
||||
- `JwtAuthGuard` – Passport JWT guard on all protected routes
|
||||
- `WriteAccessGuard` – blocks write ops for `viewer` role and `past_due` orgs
|
||||
- `@AllowViewer()` decorator – exempts read endpoints from WriteAccessGuard
|
||||
|
||||
### Roles
|
||||
|
||||
`president`, `treasurer`, `secretary`, `member_at_large`, `manager`, `homeowner`, `admin`, `viewer`
|
||||
|
||||
---
|
||||
|
||||
## Multi-Tenant Architecture
|
||||
|
||||
- **Shared schema** (`shared`): users, organizations, user_organizations, refresh_tokens, invite_tokens, login_history, cd_rates
|
||||
- **Tenant schemas** (dynamic, per org): accounts, journal_entries, budgets, invoices, payments, units, vendors, etc.
|
||||
- Schema name stored in `shared.organizations.schema_name`
|
||||
|
||||
---
|
||||
|
||||
## Route Map (180+ endpoints)
|
||||
|
||||
### Auth (`/api/auth`)
|
||||
| Method | Path | Purpose |
|
||||
| ------ | ----------------------- | -------------------------------- |
|
||||
| POST | /login | Email/password login |
|
||||
| POST | /refresh | Refresh access token (cookie) |
|
||||
| POST | /logout | Revoke refresh token |
|
||||
| POST | /logout-everywhere | Revoke all sessions |
|
||||
| GET | /profile | Current user profile |
|
||||
| POST | /register | Register (disabled by default) |
|
||||
| POST | /activate | Activate invited user |
|
||||
| POST | /forgot-password | Request password reset |
|
||||
| POST | /reset-password | Reset with token |
|
||||
| PATCH | /change-password | Change password (authed) |
|
||||
| POST | /switch-org | Switch active organization |
|
||||
|
||||
### Auth MFA (`/api/auth/mfa`)
|
||||
| POST | /setup | POST /enable | POST /verify | POST /disable | GET /status |
|
||||
|
||||
### Auth Passkeys (`/api/auth/passkeys`)
|
||||
| POST /register-options | POST /register | POST /login-options | POST /login | GET / | DELETE /:id |
|
||||
|
||||
### Admin (`/api/admin`) – superadmin only
|
||||
| GET /metrics | GET /users | GET /organizations | PUT /organizations/:id/subscription | POST /impersonate/:userId | POST /tenants |
|
||||
|
||||
### Organizations (`/api/organizations`)
|
||||
| POST / | GET / | PATCH /settings | GET /members | POST /members | PUT /members/:id/role | DELETE /members/:id |
|
||||
|
||||
### Accounts (`/api/accounts`)
|
||||
| GET / | GET /trial-balance | POST / | PUT /:id | PUT /:id/set-primary | POST /bulk-opening-balances | POST /:id/opening-balance | POST /:id/adjust-balance |
|
||||
|
||||
### Journal Entries (`/api/journal-entries`)
|
||||
| GET / | GET /:id | POST / | POST /:id/post | POST /:id/void |
|
||||
|
||||
### Budgets (`/api/budgets`)
|
||||
| GET /:year | PUT /:year | GET /:year/vs-actual | POST /:year/import | GET /:year/template |
|
||||
|
||||
### Invoices (`/api/invoices`)
|
||||
| GET / | GET /:id | POST /generate-preview | POST /generate-bulk | POST /apply-late-fees |
|
||||
|
||||
### Payments (`/api/payments`)
|
||||
| GET / | GET /:id | POST / | PUT /:id | DELETE /:id |
|
||||
|
||||
### Units (`/api/units`)
|
||||
| GET / | GET /:id | POST / | PUT /:id | DELETE /:id | GET /export | POST /import |
|
||||
|
||||
### Vendors (`/api/vendors`)
|
||||
| GET / | GET /:id | POST / | PUT /:id | GET /export | POST /import | GET /1099-data |
|
||||
|
||||
### Reports (`/api/reports`)
|
||||
| GET /dashboard | GET /balance-sheet | GET /income-statement | GET /cash-flow | GET /cash-flow-sankey | GET /aging | GET /year-end | GET /cash-flow-forecast | GET /quarterly |
|
||||
|
||||
### Board Planning (`/api/board-planning`)
|
||||
Scenarios CRUD, scenario investments, scenario assessments, projections, budget plans – 28 endpoints total.
|
||||
|
||||
### Other Modules
|
||||
- `/api/fiscal-periods` – list, close, lock
|
||||
- `/api/reserve-components` – CRUD
|
||||
- `/api/capital-projects` – CRUD
|
||||
- `/api/projects` – CRUD + planning + import/export
|
||||
- `/api/assessment-groups` – CRUD + summary + default
|
||||
- `/api/monthly-actuals` – GET/POST /:year/:month
|
||||
- `/api/health-scores` – latest + calculate
|
||||
- `/api/investment-planning` – snapshot, market-rates, recommendations
|
||||
- `/api/investment-accounts` – CRUD
|
||||
- `/api/attachments` – upload, list, download, delete (10MB limit)
|
||||
- `/api/onboarding` – progress get/patch
|
||||
- `/api/billing` – trial, checkout, webhook, subscription, portal
|
||||
|
||||
---
|
||||
|
||||
## Database
|
||||
|
||||
- **Connection pool**: min 5, max 30, 30s idle, 5s connect timeout
|
||||
- **Migrations**: SQL files in `db/migrations/` (manual execution, no ORM runner)
|
||||
- **Init script**: `db/init/00-init.sql` (shared schema DDL)
|
||||
|
||||
---
|
||||
|
||||
## Key File Paths
|
||||
|
||||
| Purpose | Path |
|
||||
| ---------------------- | ------------------------------------------------- |
|
||||
| NestJS bootstrap | `backend/src/main.ts` |
|
||||
| Root module | `backend/src/app.module.ts` |
|
||||
| Auth controller | `backend/src/modules/auth/auth.controller.ts` |
|
||||
| Auth service | `backend/src/modules/auth/auth.service.ts` |
|
||||
| Refresh token svc | `backend/src/modules/auth/refresh-token.service.ts` |
|
||||
| JWT strategy | `backend/src/modules/auth/strategies/jwt.strategy.ts` |
|
||||
| Tenant middleware | `backend/src/database/tenant.middleware.ts` |
|
||||
| Write-access guard | `backend/src/common/guards/write-access.guard.ts` |
|
||||
| DB schema init | `db/init/00-init.sql` |
|
||||
| Env example | `.env.example` |
|
||||
| Docker compose (dev) | `docker-compose.yml` |
|
||||
| Frontend entry | `frontend/src/main.tsx` |
|
||||
| Frontend pages | `frontend/src/pages/` |
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables (critical)
|
||||
|
||||
```
|
||||
DATABASE_URL – PostgreSQL connection string
|
||||
REDIS_URL – Redis connection
|
||||
JWT_SECRET – JWT signing key
|
||||
INVITE_TOKEN_SECRET – Invite token signing
|
||||
STRIPE_SECRET_KEY – Stripe API key
|
||||
STRIPE_WEBHOOK_SECRET – Stripe webhook verification
|
||||
RESEND_API_KEY – Email service
|
||||
NEW_RELIC_APP_NAME – "HOALedgerIQ_App"
|
||||
NEW_RELIC_LICENSE_KEY – New Relic license
|
||||
APP_URL – Base URL for email links
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## New Relic
|
||||
|
||||
- **App name**: `HOALedgerIQ_App` (env: `NEW_RELIC_APP_NAME`)
|
||||
- Enabled via `NEW_RELIC_ENABLED=true`
|
||||
- NRQL query library: `load-tests/analysis/nrql-queries.sql`
|
||||
|
||||
---
|
||||
|
||||
## Load Testing
|
||||
|
||||
### Run k6 scenarios
|
||||
|
||||
```bash
|
||||
# Auth + Dashboard flow (staging)
|
||||
k6 run --env TARGET_ENV=staging load-tests/scenarios/auth-dashboard-flow.js
|
||||
|
||||
# CRUD flow (staging)
|
||||
k6 run --env TARGET_ENV=staging load-tests/scenarios/crud-flow.js
|
||||
|
||||
# Local dev
|
||||
k6 run --env TARGET_ENV=local load-tests/scenarios/auth-dashboard-flow.js
|
||||
```
|
||||
|
||||
### Conventions
|
||||
|
||||
- Scenarios live in `load-tests/scenarios/`
|
||||
- Config in `load-tests/config/environments.json` (staging/production/local thresholds)
|
||||
- Test users parameterized from `load-tests/config/user-pool.csv`
|
||||
- Baseline results stored in `load-tests/analysis/baseline.json`
|
||||
- NRQL queries for New Relic in `load-tests/analysis/nrql-queries.sql`
|
||||
- All k6 scripts use `SharedArray` for user pool, `http.batch()` for parallel requests
|
||||
- Custom metrics: `*_duration` trends + `*_error_rate` rates per journey
|
||||
- Thresholds: p95 latency + error rate per environment
|
||||
|
||||
### User Pool CSV Format
|
||||
|
||||
```
|
||||
email,password,orgId,role
|
||||
```
|
||||
|
||||
Roles match the app: `treasurer`, `admin`, `president`, `manager`, `member_at_large`, `viewer`, `homeowner`
|
||||
|
||||
---
|
||||
|
||||
## Fix Conventions
|
||||
|
||||
- Backend tests: `npm run test` (Jest, `*.spec.ts` co-located with source)
|
||||
- E2E tests: `npm run test:e2e`
|
||||
- Backend build: `npm run build` (NestJS CLI)
|
||||
- Frontend dev: `npm run dev` (Vite, port 5173)
|
||||
- Frontend build: `npm run build`
|
||||
- Always run `npm run build` in `backend/` after changes to verify compilation
|
||||
- TypeORM entities use decorators (`@Entity`, `@Column`, etc.)
|
||||
- Multi-tenant: any new module touching tenant data must use `TenantService` to get the correct schema connection
|
||||
- New endpoints need `@UseGuards(JwtAuthGuard)` and should respect `WriteAccessGuard`
|
||||
- Use `@AllowViewer()` on read-only endpoints
|
||||
587
ONBOARDING-AND-AUTH.md
Normal file
587
ONBOARDING-AND-AUTH.md
Normal file
@@ -0,0 +1,587 @@
|
||||
# HOA LedgerIQ -- Payment, Onboarding & Authentication Guide
|
||||
|
||||
> **Version:** 2026.03.18
|
||||
> **Last updated:** March 18, 2026
|
||||
> **Migrations:** `db/migrations/015-saas-onboarding-auth.sql`, `db/migrations/017-billing-enhancements.sql`
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [High-Level Flow](#1-high-level-flow)
|
||||
2. [Stripe Billing & Checkout](#2-stripe-billing--checkout)
|
||||
3. [14-Day Free Trial](#3-14-day-free-trial)
|
||||
4. [Monthly / Annual Billing](#4-monthly--annual-billing)
|
||||
5. [Provisioning Pipeline](#5-provisioning-pipeline)
|
||||
6. [Account Activation (Magic Link)](#6-account-activation-magic-link)
|
||||
7. [Guided Onboarding Checklist](#7-guided-onboarding-checklist)
|
||||
8. [Subscription Management & Upgrade/Downgrade](#8-subscription-management--upgradedowngrade)
|
||||
9. [ACH / Invoice Billing](#9-ach--invoice-billing)
|
||||
10. [Access Control & Grace Periods](#10-access-control--grace-periods)
|
||||
11. [Authentication & Sessions](#11-authentication--sessions)
|
||||
12. [Multi-Factor Authentication (TOTP)](#12-multi-factor-authentication-totp)
|
||||
13. [Single Sign-On (SSO)](#13-single-sign-on-sso)
|
||||
14. [Passkeys (WebAuthn)](#14-passkeys-webauthn)
|
||||
15. [Environment Variables Reference](#15-environment-variables-reference)
|
||||
16. [Manual Intervention & Ops Tasks](#16-manual-intervention--ops-tasks)
|
||||
17. [What's Stubbed vs. Production-Ready](#17-whats-stubbed-vs-production-ready)
|
||||
18. [API Endpoint Reference](#18-api-endpoint-reference)
|
||||
|
||||
---
|
||||
|
||||
## 1. High-Level Flow
|
||||
|
||||
```
|
||||
Visitor hits /pricing
|
||||
|
|
||||
v
|
||||
Selects plan (Starter / Professional / Enterprise)
|
||||
Chooses billing frequency (Monthly / Annual — 25% discount)
|
||||
Enters email + business name
|
||||
|
|
||||
v
|
||||
POST /api/billing/start-trial (no card required)
|
||||
|
|
||||
v
|
||||
Backend creates Stripe customer + subscription with trial_period_days=14
|
||||
Backend provisions: org -> schema -> user -> invite token -> email
|
||||
|
|
||||
v
|
||||
Frontend navigates to /onboarding/pending?session_id=xxx
|
||||
(polls GET /api/billing/status every 3s)
|
||||
|
|
||||
v
|
||||
Status returns "active" -> user is redirected to /login
|
||||
|
|
||||
v
|
||||
User clicks activation link from email
|
||||
|
|
||||
v
|
||||
GET /activate?token=xxx -> validates token
|
||||
POST /activate -> sets password + name, issues session
|
||||
|
|
||||
v
|
||||
Redirect to /onboarding (4-step guided wizard)
|
||||
|
|
||||
v
|
||||
Dashboard (14-day trial active)
|
||||
|
|
||||
v
|
||||
Day 11: Stripe fires customer.subscription.trial_will_end webhook
|
||||
Backend sends trial-ending reminder email
|
||||
|
|
||||
v
|
||||
User adds payment method via Stripe Portal (Settings > Manage Billing)
|
||||
|
|
||||
v
|
||||
Trial ends -> Stripe charges card -> subscription becomes 'active'
|
||||
OR: No card -> subscription cancelled -> org archived
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Stripe Billing & Checkout
|
||||
|
||||
### Plans & Pricing
|
||||
|
||||
| Plan | Monthly | Annual (25% off) | Unit Limit |
|
||||
|------|---------|-------------------|------------|
|
||||
| Starter | $29/mo | $261/yr ($21.75/mo) | 50 units |
|
||||
| Professional | $79/mo | $711/yr ($59.25/mo) | 200 units |
|
||||
| Enterprise | Custom | Custom | Unlimited |
|
||||
|
||||
### Stripe Products & Prices
|
||||
|
||||
Each plan has **two Stripe Prices** (monthly and annual):
|
||||
|
||||
| Env Variable | Description |
|
||||
|-------------|-------------|
|
||||
| `STRIPE_STARTER_MONTHLY_PRICE_ID` | Starter monthly recurring price |
|
||||
| `STRIPE_STARTER_ANNUAL_PRICE_ID` | Starter annual recurring price |
|
||||
| `STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID` | Professional monthly recurring price |
|
||||
| `STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID` | Professional annual recurring price |
|
||||
| `STRIPE_ENTERPRISE_MONTHLY_PRICE_ID` | Enterprise monthly recurring price |
|
||||
| `STRIPE_ENTERPRISE_ANNUAL_PRICE_ID` | Enterprise annual recurring price |
|
||||
|
||||
Backward compatibility: `STRIPE_STARTER_PRICE_ID` (old single var) maps to monthly if the new `_MONTHLY_` var is not set.
|
||||
|
||||
### Two Billing Paths
|
||||
|
||||
| Path | Audience | Payment | Trial |
|
||||
|------|----------|---------|-------|
|
||||
| **Path A: Self-serve (Card)** | Starter & Professional | Automatic card charge | 14-day no-card trial |
|
||||
| **Path B: Invoice / ACH** | Enterprise (admin-set) | Invoice with Net-30 terms | Admin configures |
|
||||
|
||||
### Webhook Events Handled
|
||||
|
||||
| Event | Action |
|
||||
|-------|--------|
|
||||
| `checkout.session.completed` | Triggers full provisioning pipeline (card-required flow) |
|
||||
| `invoice.payment_succeeded` | Sets org status to `active` (reactivation after trial/past_due) |
|
||||
| `invoice.payment_failed` | Sets org to `past_due`, sends payment-failed email |
|
||||
| `customer.subscription.deleted` | Sets org status to `archived` |
|
||||
| `customer.subscription.trial_will_end` | Sends trial-ending reminder email (3 days before) |
|
||||
| `customer.subscription.updated` | Syncs plan, interval, status, and collection_method to DB |
|
||||
|
||||
All webhook events are deduplicated via the `shared.stripe_events` table (idempotency by Stripe event ID).
|
||||
|
||||
---
|
||||
|
||||
## 3. 14-Day Free Trial
|
||||
|
||||
### How It Works
|
||||
|
||||
1. User visits `/pricing`, selects a plan and billing frequency
|
||||
2. User enters email + business name (required)
|
||||
3. Clicks "Start Free Trial"
|
||||
4. Backend creates Stripe customer (no payment method)
|
||||
5. Backend creates subscription with `trial_period_days: 14`
|
||||
6. Backend provisions org with `status = 'trial'` immediately
|
||||
7. User receives activation email, sets password, starts using the app
|
||||
|
||||
### Trial Configuration
|
||||
|
||||
| Setting | Description |
|
||||
|---------|-------------|
|
||||
| `REQUIRE_PAYMENT_METHOD_FOR_TRIAL` | `false` (default): no-card trial. `true`: uses Stripe Checkout (card required upfront). |
|
||||
|
||||
### Trial Lifecycle
|
||||
|
||||
| Day | Event |
|
||||
|-----|-------|
|
||||
| 0 | Trial starts, full access granted |
|
||||
| 11 | `customer.subscription.trial_will_end` webhook fires |
|
||||
| 11 | Trial-ending email sent ("Your trial ends in 3 days") |
|
||||
| 14 | Trial ends |
|
||||
| 14 | If card on file: Stripe charges, subscription becomes `active` |
|
||||
| 14 | If no card: subscription cancelled, org set to `archived` |
|
||||
|
||||
### Trial Behavior by Plan Frequency
|
||||
|
||||
- **Monthly trial**: Trial ends, charge monthly price
|
||||
- **Annual trial**: Trial ends, charge full annual amount
|
||||
|
||||
### Trial End Behavior
|
||||
|
||||
Configured in Stripe subscription: `trial_settings.end_behavior.missing_payment_method: 'cancel'`
|
||||
|
||||
When trial ends without a payment method, the subscription is cancelled and the org is archived. Users can resubscribe at any time.
|
||||
|
||||
---
|
||||
|
||||
## 4. Monthly / Annual Billing
|
||||
|
||||
### Pricing Page Toggle
|
||||
|
||||
The pricing page (`PricingPage.tsx`) features a segmented control toggle:
|
||||
- **Monthly**: Shows monthly prices ($29/mo, $79/mo)
|
||||
- **Annual (Save 25%)**: Shows effective monthly rate + annual total ($21.75/mo billed annually at $261/yr)
|
||||
|
||||
The selected billing frequency is passed to the backend when starting a trial or creating a checkout session.
|
||||
|
||||
### Annual Discount
|
||||
|
||||
Annual pricing = Monthly price x 12 x 0.75 (25% discount):
|
||||
- Starter: $29 x 12 x 0.75 = **$261/yr**
|
||||
- Professional: $79 x 12 x 0.75 = **$711/yr**
|
||||
|
||||
---
|
||||
|
||||
## 5. Provisioning Pipeline
|
||||
|
||||
When a trial starts or `checkout.session.completed` fires, the backend runs **inline provisioning**:
|
||||
|
||||
1. **Create organization** in `shared.organizations` with:
|
||||
- `name` = business name from signup
|
||||
- `schema_name` = `tenant_{random_12_chars}`
|
||||
- `status` = `trial` (for trial) or `active` (for card checkout)
|
||||
- `plan_level` = selected plan
|
||||
- `billing_interval` = `month` or `year`
|
||||
- `stripe_customer_id` + `stripe_subscription_id`
|
||||
- `trial_ends_at` (if trial)
|
||||
- Uses `ON CONFLICT (stripe_customer_id)` for idempotency
|
||||
|
||||
2. **Create tenant schema** via `TenantSchemaService.createTenantSchema()`
|
||||
3. **Create or find user** in `shared.users` by email
|
||||
4. **Create membership** in `shared.user_organizations` (role: `president`)
|
||||
5. **Generate invite token** (JWT, 72-hour expiry)
|
||||
6. **Send activation email** with link to set password
|
||||
7. **Initialize onboarding** progress row
|
||||
|
||||
### Provisioning Status Polling
|
||||
|
||||
`GET /api/billing/status?session_id=xxx` (no auth required)
|
||||
|
||||
Accepts both Stripe checkout session IDs and subscription IDs. Returns: `{ status }` where status is:
|
||||
- `not_configured` -- Stripe not set up
|
||||
- `pending` -- no customer ID yet
|
||||
- `provisioning` -- org exists but not ready
|
||||
- `active` -- ready (includes `trial` status)
|
||||
|
||||
---
|
||||
|
||||
## 6. Account Activation (Magic Link)
|
||||
|
||||
### Validate Token
|
||||
|
||||
`GET /api/auth/activate?token=xxx` -- returns `{ valid, email, orgName, orgId, userId }`
|
||||
|
||||
### Activate Account
|
||||
|
||||
`POST /api/auth/activate` -- body `{ token, password, fullName }` -- sets password, issues session
|
||||
|
||||
---
|
||||
|
||||
## 7. Guided Onboarding Checklist
|
||||
|
||||
| Step Key | UI Label | Description |
|
||||
|----------|----------|-------------|
|
||||
| `profile` | Profile | Set up user profile |
|
||||
| `workspace` | Workspace | Configure organization settings |
|
||||
| `invite_member` | Invite Member | Invite at least one team member |
|
||||
| `first_workflow` | First Account | Create the first chart-of-accounts entry |
|
||||
|
||||
---
|
||||
|
||||
## 8. Subscription Management & Upgrade/Downgrade
|
||||
|
||||
### Stripe Customer Portal
|
||||
|
||||
Users manage their subscription through the **Stripe Customer Portal**, accessed via:
|
||||
- Settings page > Billing card > "Manage Billing" button
|
||||
- Calls `POST /api/billing/portal` which creates a portal session and returns the URL
|
||||
|
||||
### What Users Can Do in the Portal
|
||||
|
||||
- **Switch plans**: Change between Starter and Professional
|
||||
- **Switch billing frequency**: Monthly to Annual (and vice versa)
|
||||
- **Update payment method**: Add/change credit card
|
||||
- **Cancel subscription**: Cancels at end of current period
|
||||
- **View invoices**: See billing history
|
||||
|
||||
### Upgrade/Downgrade Behavior
|
||||
|
||||
| Change | Behavior |
|
||||
|--------|----------|
|
||||
| Monthly to Annual | Immediate. Prorate remaining monthly time, start annual cycle now. |
|
||||
| Annual to Monthly | Scheduled at end of current annual period. |
|
||||
| Starter to Professional | Immediate. Prorate price difference. |
|
||||
| Professional to Starter | Scheduled at end of current period. |
|
||||
|
||||
Stripe handles proration automatically when configured with `proration_behavior: 'create_prorations'`.
|
||||
|
||||
### Subscription Info Endpoint
|
||||
|
||||
`GET /api/billing/subscription` (auth required) returns:
|
||||
```json
|
||||
{
|
||||
"plan": "professional",
|
||||
"planName": "Professional",
|
||||
"billingInterval": "month",
|
||||
"status": "active",
|
||||
"collectionMethod": "charge_automatically",
|
||||
"trialEndsAt": null,
|
||||
"currentPeriodEnd": "2026-04-18T00:00:00.000Z",
|
||||
"cancelAtPeriodEnd": false
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. ACH / Invoice Billing
|
||||
|
||||
### Overview
|
||||
|
||||
For enterprise customers who need to pay via ACH bank transfer or wire, an admin can switch the subscription's collection method from automatic card charge to invoice billing.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Admin** calls `PUT /api/admin/organizations/:id/billing` with:
|
||||
```json
|
||||
{ "collectionMethod": "send_invoice", "daysUntilDue": 30 }
|
||||
```
|
||||
2. Stripe subscription is updated: `collection_method = 'send_invoice'`, `days_until_due = 30`
|
||||
3. At each billing cycle, Stripe generates an invoice and emails it to the customer
|
||||
4. Customer pays via ACH / wire / bank transfer
|
||||
5. When payment is received, Stripe marks invoice paid and org remains active
|
||||
|
||||
### Access Rules for Invoice Customers
|
||||
|
||||
| Stage | Access |
|
||||
|-------|--------|
|
||||
| Trial | Full |
|
||||
| Invoice issued | Full |
|
||||
| Due date passed | Read-only (past_due) |
|
||||
| 15+ days overdue | Admin may archive |
|
||||
|
||||
### Switching Back
|
||||
|
||||
To switch back to automatic card billing:
|
||||
```json
|
||||
{ "collectionMethod": "charge_automatically" }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. Access Control & Grace Periods
|
||||
|
||||
### Organization Status Access Rules
|
||||
|
||||
| Status | Access | Description |
|
||||
|--------|--------|-------------|
|
||||
| `trial` | **Full** | 14-day trial, all features available |
|
||||
| `active` | **Full** | Paid subscription, all features available |
|
||||
| `past_due` | **Read-only** | Payment failed or invoice overdue. Users can view data but cannot create/edit/delete. |
|
||||
| `suspended` | **Blocked** | Admin suspended. 403 on all org-scoped endpoints. |
|
||||
| `archived` | **Blocked** | Subscription cancelled. 403 on all org-scoped endpoints. Data preserved. |
|
||||
|
||||
### Implementation
|
||||
|
||||
- **Tenant Middleware** (`tenant.middleware.ts`): Blocks `suspended` and `archived` with 403. Sets `req.orgPastDue = true` for `past_due`.
|
||||
- **WriteAccessGuard** (`write-access.guard.ts`): Blocks POST/PUT/PATCH/DELETE for `past_due` orgs with message: "Your subscription is past due. Please update your payment method."
|
||||
|
||||
---
|
||||
|
||||
## 11. Authentication & Sessions
|
||||
|
||||
### Token Architecture
|
||||
|
||||
| Token | Type | Lifetime | Storage |
|
||||
|-------|------|----------|---------|
|
||||
| Access token | JWT | 1 hour | Frontend Zustand store |
|
||||
| Refresh token | Opaque (64 bytes) | 30 days | httpOnly cookie (`ledgeriq_rt`) |
|
||||
| MFA challenge | JWT | 5 minutes | Frontend state |
|
||||
| Invite/activation | JWT | 72 hours | URL query parameter |
|
||||
|
||||
### Session Endpoints
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `POST` | `/api/auth/login` | No | Email + password login |
|
||||
| `POST` | `/api/auth/register` | No | Create account |
|
||||
| `POST` | `/api/auth/refresh` | Cookie | Refresh access token |
|
||||
| `POST` | `/api/auth/logout` | Cookie | Revoke current session |
|
||||
| `POST` | `/api/auth/logout-everywhere` | JWT | Revoke all sessions |
|
||||
| `POST` | `/api/auth/switch-org` | JWT | Switch organization |
|
||||
|
||||
---
|
||||
|
||||
## 12. Multi-Factor Authentication (TOTP)
|
||||
|
||||
### MFA Endpoints
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `POST` | `/api/auth/mfa/setup` | JWT | Generate QR code + secret |
|
||||
| `POST` | `/api/auth/mfa/enable` | JWT | Enable MFA with TOTP code |
|
||||
| `POST` | `/api/auth/mfa/verify` | mfaToken | Verify during login |
|
||||
| `POST` | `/api/auth/mfa/disable` | JWT | Disable (requires password) |
|
||||
| `GET` | `/api/auth/mfa/status` | JWT | Check MFA status |
|
||||
|
||||
---
|
||||
|
||||
## 13. Single Sign-On (SSO)
|
||||
|
||||
| Provider | Env Vars Required |
|
||||
|----------|-------------------|
|
||||
| Google | `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET`, `GOOGLE_CALLBACK_URL` |
|
||||
| Microsoft/Azure AD | `AZURE_CLIENT_ID`, `AZURE_CLIENT_SECRET`, `AZURE_TENANT_ID`, `AZURE_CALLBACK_URL` |
|
||||
|
||||
SSO providers are conditionally loaded based on env vars.
|
||||
|
||||
---
|
||||
|
||||
## 14. Passkeys (WebAuthn)
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `POST` | `/api/auth/passkeys/register-options` | JWT | Get registration options |
|
||||
| `POST` | `/api/auth/passkeys/register` | JWT | Complete registration |
|
||||
| `POST` | `/api/auth/passkeys/login-options` | No | Get authentication options |
|
||||
| `POST` | `/api/auth/passkeys/login` | No | Authenticate with passkey |
|
||||
| `GET` | `/api/auth/passkeys` | JWT | List user's passkeys |
|
||||
| `DELETE` | `/api/auth/passkeys/:id` | JWT | Remove a passkey |
|
||||
|
||||
---
|
||||
|
||||
## 15. Environment Variables Reference
|
||||
|
||||
### Stripe (Required for billing)
|
||||
|
||||
| Variable | Description |
|
||||
|----------|-------------|
|
||||
| `STRIPE_SECRET_KEY` | Stripe secret key. Must NOT contain "placeholder" to activate. |
|
||||
| `STRIPE_WEBHOOK_SECRET` | Webhook endpoint signing secret |
|
||||
| `STRIPE_STARTER_MONTHLY_PRICE_ID` | Stripe Price ID for Starter monthly |
|
||||
| `STRIPE_STARTER_ANNUAL_PRICE_ID` | Stripe Price ID for Starter annual |
|
||||
| `STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID` | Stripe Price ID for Professional monthly |
|
||||
| `STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID` | Stripe Price ID for Professional annual |
|
||||
| `STRIPE_ENTERPRISE_MONTHLY_PRICE_ID` | Stripe Price ID for Enterprise monthly |
|
||||
| `STRIPE_ENTERPRISE_ANNUAL_PRICE_ID` | Stripe Price ID for Enterprise annual |
|
||||
|
||||
Legacy single-price vars (`STRIPE_STARTER_PRICE_ID`, etc.) are still supported as fallback for monthly prices.
|
||||
|
||||
### Trial Configuration
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `REQUIRE_PAYMENT_METHOD_FOR_TRIAL` | `false` | Set to `true` to require card upfront via Stripe Checkout |
|
||||
|
||||
### SSO (Optional)
|
||||
|
||||
| Variable | Description |
|
||||
|----------|-------------|
|
||||
| `GOOGLE_CLIENT_ID` | Google OAuth client ID |
|
||||
| `GOOGLE_CLIENT_SECRET` | Google OAuth client secret |
|
||||
| `GOOGLE_CALLBACK_URL` | OAuth redirect URI |
|
||||
| `AZURE_CLIENT_ID` | Azure AD application (client) ID |
|
||||
| `AZURE_CLIENT_SECRET` | Azure AD client secret |
|
||||
| `AZURE_TENANT_ID` | Azure AD tenant ID |
|
||||
| `AZURE_CALLBACK_URL` | OAuth redirect URI |
|
||||
|
||||
### WebAuthn / Passkeys
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `WEBAUTHN_RP_ID` | `localhost` | Relying party identifier |
|
||||
| `WEBAUTHN_RP_ORIGIN` | `http://localhost` | Expected browser origin |
|
||||
|
||||
### Other
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `INVITE_TOKEN_SECRET` | `dev-invite-secret` | Secret for invite/activation JWTs |
|
||||
| `APP_URL` | `http://localhost` | Base URL for generated links |
|
||||
| `RESEND_API_KEY` | -- | Resend email provider API key |
|
||||
|
||||
---
|
||||
|
||||
## 16. Manual Intervention & Ops Tasks
|
||||
|
||||
### Stripe Dashboard Setup
|
||||
|
||||
1. **Create Products and Prices** for each plan:
|
||||
- Starter: monthly ($29/mo recurring) + annual ($261/yr recurring)
|
||||
- Professional: monthly ($79/mo recurring) + annual ($711/yr recurring)
|
||||
- Enterprise: monthly + annual (custom pricing)
|
||||
- Copy all Price IDs to env vars
|
||||
|
||||
2. **Configure Stripe Webhook** endpoint:
|
||||
- URL: `https://yourdomain.com/api/webhooks/stripe`
|
||||
- Events: `checkout.session.completed`, `invoice.payment_succeeded`, `invoice.payment_failed`, `customer.subscription.deleted`, `customer.subscription.trial_will_end`, `customer.subscription.updated`
|
||||
|
||||
3. **Configure Stripe Customer Portal**:
|
||||
- Enable plan switching (allow switching between monthly and annual prices)
|
||||
- Enable payment method updates
|
||||
- Enable cancellation
|
||||
- Enable invoice history
|
||||
|
||||
4. **Set production secrets**: `INVITE_TOKEN_SECRET`, `JWT_SECRET`, `WEBAUTHN_RP_ID`, `WEBAUTHN_RP_ORIGIN`
|
||||
|
||||
5. **Configure SSO providers** (optional)
|
||||
|
||||
### Ongoing Ops
|
||||
|
||||
- **Refresh token cleanup**: Schedule `RefreshTokenService.cleanupExpired()` periodically
|
||||
- **Monitor `shared.email_log`**: Check for failed email deliveries
|
||||
- **ACH/Invoice customers**: Admin sets up via `PUT /api/admin/organizations/:id/billing`
|
||||
|
||||
### Finding activation URLs (dev/testing)
|
||||
|
||||
```sql
|
||||
SELECT to_email, metadata->>'activationUrl' AS url, sent_at
|
||||
FROM shared.email_log
|
||||
WHERE template = 'activation'
|
||||
ORDER BY sent_at DESC
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 17. What's Stubbed vs. Production-Ready
|
||||
|
||||
| Component | Status | Notes |
|
||||
|-----------|--------|-------|
|
||||
| Stripe Checkout (card-required flow) | **Ready** (test mode) | Switch to live keys for production |
|
||||
| Stripe Trial (no-card flow) | **Ready** (test mode) | Creates customer + subscription server-side |
|
||||
| Stripe Webhooks | **Ready** | All 6 events handled with idempotency |
|
||||
| Stripe Customer Portal | **Ready** | Full org-context customer ID lookup implemented |
|
||||
| Monthly/Annual Pricing | **Ready** | Toggle on pricing page, 6 Stripe Price IDs |
|
||||
| ACH/Invoice Billing | **Ready** | Admin endpoint switches collection method |
|
||||
| Provisioning | **Ready** | Inline, supports both trial and active status |
|
||||
| Email service | **Ready** (with Resend) | Falls back to stub logging if not configured |
|
||||
| Trial emails | **Ready** | Trial-ending and trial-expired templates |
|
||||
| Access control (past_due) | **Ready** | Read-only grace period for failed payments |
|
||||
| Activation (magic link) | **Ready** | Full end-to-end flow |
|
||||
| Onboarding checklist | **Ready** | Server-side progress tracking |
|
||||
| Refresh tokens | **Ready** | Needs scheduled cleanup |
|
||||
| TOTP MFA | **Ready** | Full setup, enable, verify, recovery |
|
||||
| SSO (Google/Azure) | **Ready** (needs keys) | Conditional loading |
|
||||
| Passkeys (WebAuthn) | **Ready** | Registration, authentication, removal |
|
||||
|
||||
---
|
||||
|
||||
## 18. API Endpoint Reference
|
||||
|
||||
### Billing
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `POST` | `/api/billing/start-trial` | No | Start 14-day no-card trial |
|
||||
| `POST` | `/api/billing/create-checkout-session` | No | Create Stripe Checkout (card-required flow) |
|
||||
| `POST` | `/api/webhooks/stripe` | Stripe sig | Webhook receiver |
|
||||
| `GET` | `/api/billing/status?session_id=` | No | Poll provisioning status |
|
||||
| `GET` | `/api/billing/subscription` | JWT | Get current subscription info |
|
||||
| `POST` | `/api/billing/portal` | JWT | Create Stripe Customer Portal session |
|
||||
| `PUT` | `/api/admin/organizations/:id/billing` | JWT (superadmin) | Switch billing method (card/invoice) |
|
||||
|
||||
### Auth
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `POST` | `/api/auth/register` | No | Register new user |
|
||||
| `POST` | `/api/auth/login` | No | Login (may return MFA challenge) |
|
||||
| `POST` | `/api/auth/refresh` | Cookie | Refresh access token |
|
||||
| `POST` | `/api/auth/logout` | Cookie | Logout current session |
|
||||
| `POST` | `/api/auth/logout-everywhere` | JWT | Revoke all sessions |
|
||||
| `GET` | `/api/auth/activate?token=` | No | Validate activation token |
|
||||
| `POST` | `/api/auth/activate` | No | Set password + activate |
|
||||
| `POST` | `/api/auth/resend-activation` | No | Resend activation email |
|
||||
| `GET` | `/api/auth/profile` | JWT | Get user profile |
|
||||
| `POST` | `/api/auth/switch-org` | JWT | Switch organization |
|
||||
|
||||
### Onboarding
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `GET` | `/api/onboarding/progress` | JWT | Get onboarding progress |
|
||||
| `PATCH` | `/api/onboarding/progress` | JWT | Mark step complete |
|
||||
|
||||
---
|
||||
|
||||
## Database Tables & Columns
|
||||
|
||||
### Tables Added (Migration 015)
|
||||
|
||||
| Table | Purpose |
|
||||
|-------|---------|
|
||||
| `shared.refresh_tokens` | Hashed refresh tokens with expiry/revocation |
|
||||
| `shared.stripe_events` | Idempotency ledger for Stripe webhooks |
|
||||
| `shared.invite_tokens` | Activation/invite magic links |
|
||||
| `shared.onboarding_progress` | Per-org onboarding step completion |
|
||||
| `shared.user_passkeys` | WebAuthn credentials |
|
||||
| `shared.email_log` | Email audit trail |
|
||||
|
||||
### Columns Added to `shared.organizations`
|
||||
|
||||
| Column | Type | Migration | Description |
|
||||
|--------|------|-----------|-------------|
|
||||
| `stripe_customer_id` | VARCHAR(255) UNIQUE | 015 | Stripe customer ID |
|
||||
| `stripe_subscription_id` | VARCHAR(255) UNIQUE | 015 | Stripe subscription ID |
|
||||
| `trial_ends_at` | TIMESTAMPTZ | 015 | Trial expiration date |
|
||||
| `billing_interval` | VARCHAR(20) | 017 | `month` or `year` |
|
||||
| `collection_method` | VARCHAR(20) | 017 | `charge_automatically` or `send_invoice` |
|
||||
|
||||
### Organization Status Values
|
||||
|
||||
`active`, `trial`, `past_due`, `suspended`, `archived`
|
||||
22
PARKING-LOT.md
Normal file
22
PARKING-LOT.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Parking Lot — Features Hidden or Deferred
|
||||
|
||||
This document tracks features that have been built but are currently hidden or deferred for future use.
|
||||
|
||||
---
|
||||
|
||||
## Invoices & Payments (Hidden as of 2026.03.19)
|
||||
|
||||
**Status:** Built but hidden from navigation
|
||||
|
||||
**What exists:**
|
||||
- Full Invoices page at `/invoices` with CRUD, generation, and management
|
||||
- Full Payments page at `/payments` with payment tracking and reconciliation
|
||||
- Backend API endpoints for both modules are fully functional
|
||||
- Routes remain registered in `App.tsx` (accessible via direct URL if needed)
|
||||
|
||||
**Where hidden:**
|
||||
- `frontend/src/components/layout/Sidebar.tsx` — Navigation links commented out in the Transactions section
|
||||
|
||||
**To re-enable:**
|
||||
1. Uncomment the Invoices and Payments entries in `Sidebar.tsx` (search for "PARKING-LOT.md")
|
||||
2. No other changes needed — routes and backend are intact
|
||||
136
PLAN.md
Normal file
136
PLAN.md
Normal file
@@ -0,0 +1,136 @@
|
||||
# Phase 2 Bug Fix & Tweaks - Implementation Plan
|
||||
|
||||
## 1. Admin Panel: Tenant Creation, Contract/Plan Fields, Disable/Archive
|
||||
|
||||
### Database Changes
|
||||
- Add `contract_number VARCHAR(100)` and `plan_level VARCHAR(50) DEFAULT 'standard'` to `shared.organizations` (live DB ALTER + init SQL)
|
||||
- Add `archived` to the status CHECK constraint: `('active', 'suspended', 'trial', 'archived')`
|
||||
- Add to Organization entity: `contractNumber`, `planLevel` columns
|
||||
|
||||
### Backend Changes
|
||||
- **admin.controller.ts**: Add two new endpoints:
|
||||
- `POST /admin/tenants` — Creates org + first user + tenant schema in one call. Accepts: org name, email, address, contractNumber, planLevel, plus first user's email/password/firstName/lastName. Calls OrganizationsService.create() then sets up the user.
|
||||
- `PUT /admin/organizations/:id/status` — Sets status to 'active', 'suspended', or 'archived'
|
||||
- **auth.module.ts**: Import OrganizationsModule so AdminController can inject OrganizationsService
|
||||
- **auth.service.ts**: In `login()`, after loading user with orgs, check if the default org's status is 'suspended' or 'archived' → throw UnauthorizedException("Your organization has been suspended/archived")
|
||||
- **users.service.ts**: Update `findAllOrganizations()` query to include `contract_number, plan_level` in the SELECT
|
||||
|
||||
### Frontend Changes
|
||||
- **AdminPage.tsx**:
|
||||
- Add "Create Tenant" button → opens a modal with: org name, address, email, phone, contract number, plan level (select: standard/premium/enterprise), first admin email, first admin password, first/last name
|
||||
- Orgs table: add Contract #, Plan Level columns
|
||||
- Orgs table: add Status dropdown/buttons (Active/Suspended/Archived) per row with confirmation
|
||||
- Show status colors: active=green, trial=yellow, suspended=orange, archived=red
|
||||
|
||||
## 2. Units/Homeowners: Delete + Assessment Group Binding
|
||||
|
||||
### Backend Changes
|
||||
- **units.controller.ts**: Add `@Delete(':id')` route
|
||||
- **units.service.ts**:
|
||||
- Add `delete(id)` method — checks for outstanding invoices first, then deletes
|
||||
- Add `assessment_group_id` to `create()` INSERT and `update()` UPDATE queries
|
||||
- Update `findAll()` to JOIN assessment_groups and return `assessment_group_name`
|
||||
|
||||
### Frontend Changes
|
||||
- **UnitsPage.tsx**:
|
||||
- Add delete button (trash icon) per row with confirmation dialog
|
||||
- Add Assessment Group dropdown (Select) in create/edit modal, populated from `/assessment-groups` query
|
||||
- Show assessment group name in table
|
||||
- When an assessment group is selected and no manual monthly_assessment is set, auto-fill from the group's regular_assessment
|
||||
|
||||
## 3. Assessment Groups: Frequency Field
|
||||
|
||||
### Database Changes
|
||||
- Add `frequency VARCHAR(20) DEFAULT 'monthly'` to `assessment_groups` table (live DB ALTER + tenant-schema DDL)
|
||||
- CHECK constraint: `('monthly', 'quarterly', 'annual')`
|
||||
|
||||
### Backend Changes
|
||||
- **assessment-groups.service.ts**:
|
||||
- Add `frequency` to `create()` INSERT
|
||||
- Add `frequency` to `update()` dynamic sets
|
||||
- Update `findAll()` and `getSummary()` income calculations to adjust by frequency:
|
||||
- monthly → multiply by 1 (×12/year)
|
||||
- quarterly → amounts are per quarter, so monthly = amount/3
|
||||
- annual → amounts are per year, so monthly = amount/12
|
||||
- Summary labels should change to reflect "Monthly Equivalent" for mixed frequencies
|
||||
|
||||
### Frontend Changes
|
||||
- **AssessmentGroupsPage.tsx**:
|
||||
- Add frequency Select in create/edit modal: Monthly, Quarterly, Annual
|
||||
- Show frequency badge in table
|
||||
- Update summary cards: labels → "Monthly Equivalent Operating" etc.
|
||||
- Assessment amount label changes based on frequency ("Per Month" / "Per Quarter" / "Per Year")
|
||||
|
||||
## 4. UI Streamlining: Sidebar Grouping, Rename, Logo
|
||||
|
||||
### Sidebar Restructure
|
||||
Group nav items into labeled sections:
|
||||
```
|
||||
Dashboard
|
||||
─── FINANCIALS ───
|
||||
Accounts (renamed from "Chart of Accounts")
|
||||
Budgets
|
||||
Investments
|
||||
─── ASSESSMENTS ───
|
||||
Units / Homeowners
|
||||
Assessment Groups
|
||||
─── TRANSACTIONS ───
|
||||
Transactions
|
||||
Invoices
|
||||
Payments
|
||||
─── PLANNING ───
|
||||
Capital Projects
|
||||
Reserves
|
||||
Vendors
|
||||
─── REPORTS ───
|
||||
(collapsible with sub-items)
|
||||
─── ADMIN ───
|
||||
Year-End
|
||||
Settings
|
||||
─── PLATFORM ADMIN ─── (superadmin only)
|
||||
Admin Panel
|
||||
```
|
||||
|
||||
### Logo
|
||||
- Copy SVG to `frontend/src/assets/logo.svg`
|
||||
- In AppLayout.tsx: Replace `<Title order={3} c="blue">HOA LedgerIQ</Title>` with an `<img>` tag loading the SVG, sized to fit the 60px header (height ~40px with padding)
|
||||
- SVG will be served directly (Vite handles SVG imports natively), no PNG conversion needed since browsers render SVG natively and it's cleaner
|
||||
|
||||
## 5. Capital Projects: PDF Table Export, Kanban Default, Future Category
|
||||
|
||||
### Frontend Changes
|
||||
- **CapitalProjectsPage.tsx**:
|
||||
- Change default viewMode from `'table'` to `'kanban'`
|
||||
- PDF export: temporarily switch to table view for print, then restore. Use `@media print` CSS to always show table layout regardless of current view
|
||||
- Add "Future" column in kanban: projects with `target_year = 9999` (sentinel value) display as "Future"
|
||||
- Update the form: Target Year select should include a "Future (Beyond 5-Year)" option that maps to year 9999
|
||||
- Kanban year list: always include current year through +5, plus "Future" if any projects exist there
|
||||
- Table view: group "Future" projects under a "Future" header
|
||||
- Title: "Capital Projects" (remove "(5-Year Plan)" since we now have Future)
|
||||
|
||||
### Backend
|
||||
- No backend changes needed — target_year=9999 works with existing schema (integer column, no constraint)
|
||||
|
||||
## File Change Summary
|
||||
|
||||
| File | Action |
|
||||
|------|--------|
|
||||
| `db/init/00-init.sql` | Add contract_number, plan_level, update status CHECK |
|
||||
| `backend/src/modules/organizations/entities/organization.entity.ts` | Add contractNumber, planLevel columns |
|
||||
| `backend/src/modules/organizations/dto/create-organization.dto.ts` | Add contractNumber, planLevel fields |
|
||||
| `backend/src/modules/auth/admin.controller.ts` | Add POST /admin/tenants, PUT /admin/organizations/:id/status |
|
||||
| `backend/src/modules/auth/auth.module.ts` | Import OrganizationsModule |
|
||||
| `backend/src/modules/auth/auth.service.ts` | Add org status check on login |
|
||||
| `backend/src/modules/users/users.service.ts` | Update findAllOrganizations query |
|
||||
| `backend/src/modules/units/units.controller.ts` | Add DELETE route |
|
||||
| `backend/src/modules/units/units.service.ts` | Add delete(), assessment_group_id support |
|
||||
| `backend/src/modules/assessment-groups/assessment-groups.service.ts` | Add frequency support + adjust income calcs |
|
||||
| `backend/src/database/tenant-schema.service.ts` | Add frequency to assessment_groups DDL |
|
||||
| `frontend/src/assets/logo.svg` | New — copy from /Users/claw/Downloads/logo_house.svg |
|
||||
| `frontend/src/components/layout/AppLayout.tsx` | Replace text with logo |
|
||||
| `frontend/src/components/layout/Sidebar.tsx` | Restructure with grouped sections |
|
||||
| `frontend/src/pages/admin/AdminPage.tsx` | Create tenant modal, status management, new columns |
|
||||
| `frontend/src/pages/units/UnitsPage.tsx` | Delete, assessment group dropdown |
|
||||
| `frontend/src/pages/assessment-groups/AssessmentGroupsPage.tsx` | Frequency field |
|
||||
| `frontend/src/pages/capital-projects/CapitalProjectsPage.tsx` | Kanban default, table PDF, Future category |
|
||||
| Live DB | ALTER TABLE commands for contract_number, plan_level, frequency, status CHECK |
|
||||
349
TESTING_CONVENTIONS.md
Normal file
349
TESTING_CONVENTIONS.md
Normal file
@@ -0,0 +1,349 @@
|
||||
# Testing Conventions — HOA LedgerIQ E2E & API Tests
|
||||
|
||||
This document is the single source of truth for writing, organizing, and running Playwright-based E2E and API regression tests in this project.
|
||||
|
||||
---
|
||||
|
||||
## Architecture
|
||||
|
||||
| Component | Technology | Port |
|
||||
|-----------|-----------|------|
|
||||
| Reverse proxy | nginx | :80 |
|
||||
| Backend API | NestJS 10 | :3000 (internal) |
|
||||
| Frontend | React 18 + Vite | :5173 (internal) |
|
||||
| Database | PostgreSQL 15 | :5432 |
|
||||
| Cache | Redis 7 | :6379 |
|
||||
| Test runner | Playwright | host |
|
||||
|
||||
Tests run on the **host machine** against the app running in **Docker Compose**. The `BASE_URL` defaults to `http://localhost` (nginx).
|
||||
|
||||
---
|
||||
|
||||
## Folder Structure
|
||||
|
||||
```
|
||||
tests/
|
||||
├── .auth/ # Stored auth state (gitignored)
|
||||
│ └── user.json # Browser state from auth.setup.ts
|
||||
├── fixtures/
|
||||
│ ├── auth.fixture.ts # API login helpers, token management
|
||||
│ ├── base.fixture.ts # Extended test object with typed fixtures
|
||||
│ ├── db.fixture.ts # Postgres seed/cleanup via pg driver
|
||||
│ └── test-data.ts # Shared constants (users, sample data)
|
||||
├── page-objects/
|
||||
│ ├── index.ts # Re-exports all page objects
|
||||
│ ├── BasePage.ts # Abstract base with shared helpers
|
||||
│ ├── LoginPage.ts # /login page
|
||||
│ ├── DashboardPage.ts # /dashboard page
|
||||
│ └── AccountsPage.ts # /accounts page
|
||||
├── e2e/ # Browser-based end-to-end tests
|
||||
│ ├── auth.spec.ts # Login/logout UI flows
|
||||
│ ├── dashboard.spec.ts # Dashboard load + navigation
|
||||
│ └── visual.spec.ts # Screenshot regression tests
|
||||
├── api/ # API-only tests (no browser)
|
||||
│ ├── auth.api.spec.ts # /api/auth/* endpoints
|
||||
│ └── accounts.api.spec.ts # /api/accounts/* CRUD
|
||||
└── auth.setup.ts # One-time auth setup project
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
| What | Convention | Example |
|
||||
|------|-----------|---------|
|
||||
| E2E test files | `tests/e2e/<feature>.spec.ts` | `auth.spec.ts` |
|
||||
| API test files | `tests/api/<resource>.api.spec.ts` | `accounts.api.spec.ts` |
|
||||
| Page objects | `tests/page-objects/<PageName>.ts` | `LoginPage.ts` |
|
||||
| Fixtures | `tests/fixtures/<purpose>.fixture.ts` | `db.fixture.ts` |
|
||||
| Test data | `tests/fixtures/test-data.ts` | single file |
|
||||
| Snapshot baselines | auto-generated in `*-snapshots/` dirs | `login-page.png` |
|
||||
|
||||
### Test descriptions
|
||||
|
||||
Use `test.describe('Feature or Endpoint')` and `test('should <behavior>')`:
|
||||
|
||||
```ts
|
||||
test.describe('POST /api/auth/login', () => {
|
||||
test('should return access token for valid credentials', async ({ request }) => {
|
||||
// ...
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## How to Write New Tests
|
||||
|
||||
### 1. E2E (browser) test
|
||||
|
||||
```ts
|
||||
// tests/e2e/invoices.spec.ts
|
||||
import { test, expect } from '../fixtures/base.fixture';
|
||||
import { InvoicesPage } from '../page-objects';
|
||||
|
||||
test.describe('Invoices', () => {
|
||||
let invoicesPage: InvoicesPage;
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
invoicesPage = new InvoicesPage(page);
|
||||
await invoicesPage.goto();
|
||||
});
|
||||
|
||||
test('should display invoice list', async () => {
|
||||
await invoicesPage.assertOnPage();
|
||||
// ... assertions
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### 2. API test
|
||||
|
||||
```ts
|
||||
// tests/api/payments.api.spec.ts
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { apiLogin, apiSwitchOrg, authHeaders } from '../fixtures/auth.fixture';
|
||||
import { TEST_USERS } from '../fixtures/test-data';
|
||||
|
||||
const API_BASE = process.env.API_BASE_URL || 'http://localhost/api';
|
||||
let accessToken: string;
|
||||
|
||||
test.beforeAll(async ({ request }) => {
|
||||
const tokens = await apiLogin(request, TEST_USERS.treasurer);
|
||||
if (tokens.organizations?.length > 0) {
|
||||
const switched = await apiSwitchOrg(request, tokens.accessToken, (tokens.organizations[0] as any).id);
|
||||
accessToken = switched.accessToken;
|
||||
} else {
|
||||
accessToken = tokens.accessToken;
|
||||
}
|
||||
});
|
||||
|
||||
test.describe('GET /api/payments', () => {
|
||||
test('should return payments list', async ({ request }) => {
|
||||
const response = await request.get(`${API_BASE}/payments`, {
|
||||
headers: authHeaders(accessToken),
|
||||
});
|
||||
expect(response.status()).toBe(200);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### 3. Visual regression test
|
||||
|
||||
```ts
|
||||
test('invoices page should match baseline', async ({ page }) => {
|
||||
await page.goto('/invoices');
|
||||
await page.waitForLoadState('networkidle');
|
||||
await page.waitForTimeout(500); // Let animations settle
|
||||
|
||||
await expect(page).toHaveScreenshot('invoices-page.png', {
|
||||
fullPage: true,
|
||||
mask: [page.locator('time')], // Mask dynamic dates
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Update baselines: `npx playwright test --update-snapshots`
|
||||
|
||||
---
|
||||
|
||||
## How to Add New Page Objects
|
||||
|
||||
1. Create `tests/page-objects/MyPage.ts`:
|
||||
|
||||
```ts
|
||||
import { type Page, expect } from '@playwright/test';
|
||||
import { BasePage } from './BasePage';
|
||||
|
||||
export class MyPage extends BasePage {
|
||||
readonly path = '/my-path';
|
||||
|
||||
// Locators — prefer role/label selectors over CSS
|
||||
get heading() {
|
||||
return this.page.getByRole('heading', { name: /my page/i });
|
||||
}
|
||||
|
||||
get createButton() {
|
||||
return this.page.getByRole('button', { name: /create/i });
|
||||
}
|
||||
|
||||
// Actions
|
||||
override async waitForReady(): Promise<void> {
|
||||
await this.page.waitForLoadState('networkidle');
|
||||
await expect(this.heading).toBeVisible();
|
||||
}
|
||||
|
||||
async createItem(name: string): Promise<void> {
|
||||
await this.createButton.click();
|
||||
await this.page.getByLabel(/name/i).fill(name);
|
||||
await this.page.getByRole('button', { name: /save/i }).click();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. Export from `tests/page-objects/index.ts`:
|
||||
|
||||
```ts
|
||||
export { MyPage } from './MyPage';
|
||||
```
|
||||
|
||||
### Page object rules
|
||||
|
||||
- Extend `BasePage` and set `readonly path`
|
||||
- Override `waitForReady()` for page-specific loading
|
||||
- Use **role/label locators** (not CSS selectors): `getByRole()`, `getByLabel()`, `getByText()`
|
||||
- Expose **locators as getters** and **actions as methods**
|
||||
- Keep assertions in test files, not page objects (except `assertOnPage()`)
|
||||
|
||||
---
|
||||
|
||||
## Authentication in Tests
|
||||
|
||||
### Pre-authenticated tests (default)
|
||||
|
||||
Most tests use stored auth state from `auth.setup.ts`. This runs once via the `auth-setup` Playwright project and saves browser state to `tests/.auth/user.json`.
|
||||
|
||||
Tests automatically get this state via `storageState` in `playwright.config.ts`.
|
||||
|
||||
### Unauthenticated tests
|
||||
|
||||
For testing the login flow itself, opt out:
|
||||
|
||||
```ts
|
||||
test.use({ storageState: { cookies: [], origins: [] } });
|
||||
```
|
||||
|
||||
### API tests
|
||||
|
||||
Use the `apiLogin()` and `authHeaders()` helpers:
|
||||
|
||||
```ts
|
||||
import { apiLogin, authHeaders } from '../fixtures/auth.fixture';
|
||||
|
||||
const tokens = await apiLogin(request, TEST_USERS.treasurer);
|
||||
const response = await request.get(url, {
|
||||
headers: authHeaders(tokens.accessToken),
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Database Seeding & Cleanup
|
||||
|
||||
### When to use direct DB access
|
||||
|
||||
- Verifying backend wrote correct data
|
||||
- Seeding complex state that's hard to create via API
|
||||
- Cleanup after tests
|
||||
|
||||
### How
|
||||
|
||||
```ts
|
||||
import { test } from '../fixtures/base.fixture';
|
||||
|
||||
test('should verify data', async ({ db }) => {
|
||||
const result = await db.query('SELECT * FROM schema.table WHERE ...');
|
||||
expect(result.rows.length).toBeGreaterThan(0);
|
||||
});
|
||||
```
|
||||
|
||||
### Cleanup convention
|
||||
|
||||
- Prefix all test-created data with `E2E_` (use `TEST_PREFIX` from test-data.ts)
|
||||
- The `db.cleanup()` method deletes rows matching this prefix
|
||||
- Call `db.cleanup()` in `test.afterAll` for write-path tests
|
||||
|
||||
---
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. Docker Compose services running: `docker-compose up -d`
|
||||
2. Test user seeded in the database (use the backend seed script or create manually)
|
||||
3. Environment configured: `cp .env.test.example .env.test` and fill in values
|
||||
|
||||
### Commands
|
||||
|
||||
```bash
|
||||
# Install Playwright (first time)
|
||||
npx playwright install --with-deps
|
||||
|
||||
# Run all tests
|
||||
npx playwright test
|
||||
|
||||
# Run only E2E tests
|
||||
npx playwright test tests/e2e/
|
||||
|
||||
# Run only API tests
|
||||
npx playwright test --project=api
|
||||
|
||||
# Run in specific browser
|
||||
npx playwright test --project=chromium
|
||||
|
||||
# Run in headed mode (see the browser)
|
||||
npx playwright test --headed
|
||||
|
||||
# Run a single test file
|
||||
npx playwright test tests/e2e/auth.spec.ts
|
||||
|
||||
# Debug mode (step through tests)
|
||||
npx playwright test --debug
|
||||
|
||||
# Update visual regression baselines
|
||||
npx playwright test tests/e2e/visual.spec.ts --update-snapshots
|
||||
|
||||
# View HTML report
|
||||
npx playwright show-report
|
||||
|
||||
# Run against production
|
||||
BASE_URL=https://your-prod-domain.com npx playwright test --project=api
|
||||
```
|
||||
|
||||
### npm scripts (from project root)
|
||||
|
||||
```bash
|
||||
npm run test:e2e # All Playwright tests
|
||||
npm run test:e2e:chromium # Chromium only
|
||||
npm run test:e2e:api # API tests only
|
||||
npm run test:e2e:headed # Headed mode
|
||||
npm run test:e2e:debug # Debug mode
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Default | Purpose |
|
||||
|----------|---------|---------|
|
||||
| `BASE_URL` | `http://localhost` | App URL (nginx) |
|
||||
| `API_BASE_URL` | `http://localhost/api` | Backend API base |
|
||||
| `TEST_DB_URL` | `postgresql://hoafinance:change_me@localhost:5432/hoafinance` | Direct Postgres for seeding |
|
||||
| `TEST_USER_EMAIL` | `e2e-treasurer@test.hoaledgeriq.com` | Test user email |
|
||||
| `TEST_USER_PASSWORD` | `TestPass123!` | Test user password |
|
||||
| `CI` | — | Set by CI providers; enables retries, single worker |
|
||||
|
||||
---
|
||||
|
||||
## Style Rules
|
||||
|
||||
1. **Import `test` from `../fixtures/base.fixture`** for tests needing DB or auth fixtures. Import from `@playwright/test` for basic tests.
|
||||
2. **One `test.describe` per feature or endpoint** per file.
|
||||
3. **No `page.waitForTimeout()` except in visual tests** — use `waitForLoadState`, `waitForURL`, or `waitForResponse` instead.
|
||||
4. **No hardcoded URLs** — use `BASE_URL`, `API_BASE`, or page object paths.
|
||||
5. **No test interdependencies** — each test should work in isolation (use `test.beforeEach` for setup).
|
||||
6. **Clean up after write tests** — use `TEST_PREFIX` and `db.cleanup()`.
|
||||
7. **API tests go in `tests/api/`**, E2E tests in `tests/e2e/`** — don't mix.
|
||||
8. **Locators**: prefer `getByRole` > `getByLabel` > `getByText` > `getByTestId` > CSS selectors.
|
||||
|
||||
---
|
||||
|
||||
## Adding Tests for a New Feature (Quick Checklist)
|
||||
|
||||
- [ ] Create page object in `tests/page-objects/` if it's a new page
|
||||
- [ ] Export it from `tests/page-objects/index.ts`
|
||||
- [ ] Create `tests/e2e/<feature>.spec.ts` for UI flows
|
||||
- [ ] Create `tests/api/<resource>.api.spec.ts` for API endpoints
|
||||
- [ ] Add sample data constants to `tests/fixtures/test-data.ts` if needed
|
||||
- [ ] Run `npx playwright test tests/e2e/<feature>.spec.ts` to verify
|
||||
- [ ] Update visual baselines if the feature changes existing pages
|
||||
32
backend/Dockerfile
Normal file
32
backend/Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
||||
# ---- Production Dockerfile for NestJS backend ----
|
||||
# Multi-stage build: compile TypeScript, then run with minimal image
|
||||
|
||||
# Stage 1: Build
|
||||
FROM node:20-alpine AS builder
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Stage 2: Production
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
|
||||
# Only install production dependencies
|
||||
COPY package*.json ./
|
||||
RUN npm ci --omit=dev && npm cache clean --force
|
||||
|
||||
# Copy compiled output and New Relic preload from builder
|
||||
COPY --from=builder /app/dist ./dist
|
||||
COPY --from=builder /app/newrelic-preload.js ./newrelic-preload.js
|
||||
|
||||
# New Relic agent — configured entirely via environment variables
|
||||
ENV NEW_RELIC_NO_CONFIG_FILE=true
|
||||
ENV NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=true
|
||||
ENV NEW_RELIC_LOG=stdout
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
# Preload the New Relic agent (activates only when NEW_RELIC_ENABLED=true)
|
||||
CMD ["node", "-r", "./newrelic-preload.js", "dist/main"]
|
||||
@@ -7,6 +7,11 @@ RUN npm install
|
||||
|
||||
COPY . .
|
||||
|
||||
# New Relic agent — configured entirely via environment variables
|
||||
ENV NEW_RELIC_NO_CONFIG_FILE=true
|
||||
ENV NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=true
|
||||
ENV NEW_RELIC_LOG=stdout
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["npm", "run", "start:dev"]
|
||||
|
||||
7
backend/newrelic-preload.js
Normal file
7
backend/newrelic-preload.js
Normal file
@@ -0,0 +1,7 @@
|
||||
// Conditionally load the New Relic agent before any other modules.
|
||||
// Controlled by the NEW_RELIC_ENABLED environment variable (.env).
|
||||
'use strict';
|
||||
|
||||
if (process.env.NEW_RELIC_ENABLED === 'true') {
|
||||
require('newrelic');
|
||||
}
|
||||
2639
backend/package-lock.json
generated
2639
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "hoa-ledgeriq-backend",
|
||||
"version": "0.2.0",
|
||||
"version": "2026.3.24",
|
||||
"description": "HOA LedgerIQ - Backend API",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
@@ -8,7 +8,7 @@
|
||||
"start": "nest start",
|
||||
"start:dev": "nest start --watch",
|
||||
"start:debug": "nest start --debug --watch",
|
||||
"start:prod": "node dist/main",
|
||||
"start:prod": "node -r ./newrelic-preload.js dist/main",
|
||||
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\"",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
@@ -23,18 +23,31 @@
|
||||
"@nestjs/jwt": "^10.2.0",
|
||||
"@nestjs/passport": "^10.0.3",
|
||||
"@nestjs/platform-express": "^10.4.15",
|
||||
"@nestjs/schedule": "^6.1.1",
|
||||
"@nestjs/swagger": "^7.4.2",
|
||||
"@nestjs/throttler": "^6.5.0",
|
||||
"@nestjs/typeorm": "^10.0.2",
|
||||
"@simplewebauthn/server": "^13.3.0",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"bullmq": "^5.71.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.1",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"helmet": "^8.1.0",
|
||||
"ioredis": "^5.4.2",
|
||||
"newrelic": "latest",
|
||||
"otplib": "^13.3.0",
|
||||
"passport": "^0.7.0",
|
||||
"passport-azure-ad": "^4.3.5",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"passport-local": "^1.0.0",
|
||||
"pg": "^8.13.1",
|
||||
"qrcode": "^1.5.4",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"resend": "^6.9.4",
|
||||
"rxjs": "^7.8.1",
|
||||
"stripe": "^20.4.1",
|
||||
"typeorm": "^0.3.20",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
@@ -43,12 +56,15 @@
|
||||
"@nestjs/schematics": "^10.2.3",
|
||||
"@nestjs/testing": "^10.4.15",
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/cookie-parser": "^1.4.10",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^20.17.12",
|
||||
"@types/passport-google-oauth20": "^2.0.17",
|
||||
"@types/passport-jwt": "^4.0.1",
|
||||
"@types/passport-local": "^1.0.38",
|
||||
"@types/qrcode": "^1.5.6",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"jest": "^29.7.0",
|
||||
"ts-jest": "^29.2.5",
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import { Module, MiddlewareConsumer, NestModule } from '@nestjs/common';
|
||||
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||
import { ThrottlerModule } from '@nestjs/throttler';
|
||||
import { AppController } from './app.controller';
|
||||
import { DatabaseModule } from './database/database.module';
|
||||
import { TenantMiddleware } from './database/tenant.middleware';
|
||||
import { WriteAccessGuard } from './common/guards/write-access.guard';
|
||||
import { NoCacheInterceptor } from './common/interceptors/no-cache.interceptor';
|
||||
import { AuthModule } from './modules/auth/auth.module';
|
||||
import { OrganizationsModule } from './modules/organizations/organizations.module';
|
||||
import { UsersModule } from './modules/users/users.module';
|
||||
@@ -23,6 +27,14 @@ import { AssessmentGroupsModule } from './modules/assessment-groups/assessment-g
|
||||
import { ProjectsModule } from './modules/projects/projects.module';
|
||||
import { MonthlyActualsModule } from './modules/monthly-actuals/monthly-actuals.module';
|
||||
import { AttachmentsModule } from './modules/attachments/attachments.module';
|
||||
import { InvestmentPlanningModule } from './modules/investment-planning/investment-planning.module';
|
||||
import { HealthScoresModule } from './modules/health-scores/health-scores.module';
|
||||
import { BoardPlanningModule } from './modules/board-planning/board-planning.module';
|
||||
import { BillingModule } from './modules/billing/billing.module';
|
||||
import { EmailModule } from './modules/email/email.module';
|
||||
import { OnboardingModule } from './modules/onboarding/onboarding.module';
|
||||
import { IdeasModule } from './modules/ideas/ideas.module';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
@@ -38,8 +50,19 @@ import { AttachmentsModule } from './modules/attachments/attachments.module';
|
||||
autoLoadEntities: true,
|
||||
synchronize: false,
|
||||
logging: false,
|
||||
// Connection pool — reuse connections instead of creating new ones per query
|
||||
extra: {
|
||||
max: 30, // max pool size (across all concurrent requests)
|
||||
min: 5, // keep at least 5 idle connections warm
|
||||
idleTimeoutMillis: 30000, // close idle connections after 30s
|
||||
connectionTimeoutMillis: 5000, // fail fast if pool is exhausted
|
||||
},
|
||||
}),
|
||||
}),
|
||||
ThrottlerModule.forRoot([{
|
||||
ttl: 60000, // 1-minute window
|
||||
limit: 100, // 100 requests per minute (global default)
|
||||
}]),
|
||||
DatabaseModule,
|
||||
AuthModule,
|
||||
OrganizationsModule,
|
||||
@@ -60,8 +83,26 @@ import { AttachmentsModule } from './modules/attachments/attachments.module';
|
||||
ProjectsModule,
|
||||
MonthlyActualsModule,
|
||||
AttachmentsModule,
|
||||
InvestmentPlanningModule,
|
||||
HealthScoresModule,
|
||||
BoardPlanningModule,
|
||||
BillingModule,
|
||||
EmailModule,
|
||||
OnboardingModule,
|
||||
IdeasModule,
|
||||
ScheduleModule.forRoot(),
|
||||
],
|
||||
controllers: [AppController],
|
||||
providers: [
|
||||
{
|
||||
provide: APP_GUARD,
|
||||
useClass: WriteAccessGuard,
|
||||
},
|
||||
{
|
||||
provide: APP_INTERCEPTOR,
|
||||
useClass: NoCacheInterceptor,
|
||||
},
|
||||
],
|
||||
})
|
||||
export class AppModule implements NestModule {
|
||||
configure(consumer: MiddlewareConsumer) {
|
||||
|
||||
4
backend/src/common/decorators/allow-viewer.decorator.ts
Normal file
4
backend/src/common/decorators/allow-viewer.decorator.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import { SetMetadata } from '@nestjs/common';
|
||||
|
||||
export const ALLOW_VIEWER_KEY = 'allowViewer';
|
||||
export const AllowViewer = () => SetMetadata(ALLOW_VIEWER_KEY, true);
|
||||
42
backend/src/common/guards/write-access.guard.ts
Normal file
42
backend/src/common/guards/write-access.guard.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { Injectable, CanActivate, ExecutionContext, ForbiddenException } from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import { ALLOW_VIEWER_KEY } from '../decorators/allow-viewer.decorator';
|
||||
|
||||
@Injectable()
|
||||
export class WriteAccessGuard implements CanActivate {
|
||||
constructor(private reflector: Reflector) {}
|
||||
|
||||
canActivate(context: ExecutionContext): boolean {
|
||||
const request = context.switchToHttp().getRequest();
|
||||
const method = request.method;
|
||||
|
||||
// Allow all read methods
|
||||
if (['GET', 'HEAD', 'OPTIONS'].includes(method)) return true;
|
||||
|
||||
// Determine role from either req.userRole (set by TenantMiddleware which runs
|
||||
// before guards) or req.user.role (set by JwtAuthGuard Passport strategy).
|
||||
const role = request.userRole || request.user?.role;
|
||||
if (!role) return true; // unauthenticated endpoints like login/register
|
||||
|
||||
// Check for @AllowViewer() exemption on handler or class
|
||||
const allowViewer = this.reflector.getAllAndOverride<boolean>(ALLOW_VIEWER_KEY, [
|
||||
context.getHandler(),
|
||||
context.getClass(),
|
||||
]);
|
||||
if (allowViewer) return true;
|
||||
|
||||
// Block viewer role from write operations
|
||||
if (role === 'viewer') {
|
||||
throw new ForbiddenException('Read-only users cannot modify data');
|
||||
}
|
||||
|
||||
// Block writes for past_due organizations (grace period: read-only access)
|
||||
if (request.orgPastDue) {
|
||||
throw new ForbiddenException(
|
||||
'Your subscription is past due. Please update your payment method to continue making changes.',
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
16
backend/src/common/interceptors/no-cache.interceptor.ts
Normal file
16
backend/src/common/interceptors/no-cache.interceptor.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Injectable, NestInterceptor, ExecutionContext, CallHandler } from '@nestjs/common';
|
||||
import { Observable } from 'rxjs';
|
||||
|
||||
/**
|
||||
* Prevents browsers and proxies from caching authenticated API responses
|
||||
* containing sensitive financial data (account balances, transactions, PII).
|
||||
*/
|
||||
@Injectable()
|
||||
export class NoCacheInterceptor implements NestInterceptor {
|
||||
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
|
||||
const res = context.switchToHttp().getResponse();
|
||||
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate, private');
|
||||
res.setHeader('Pragma', 'no-cache');
|
||||
return next.handle();
|
||||
}
|
||||
}
|
||||
@@ -112,6 +112,8 @@ export class TenantSchemaService {
|
||||
special_assessment DECIMAL(10,2) DEFAULT 0.00,
|
||||
unit_count INTEGER DEFAULT 0,
|
||||
frequency VARCHAR(20) DEFAULT 'monthly' CHECK (frequency IN ('monthly', 'quarterly', 'annual')),
|
||||
due_months INTEGER[] DEFAULT '{1,2,3,4,5,6,7,8,9,10,11,12}',
|
||||
due_day INTEGER DEFAULT 1,
|
||||
is_default BOOLEAN DEFAULT FALSE,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
@@ -155,8 +157,11 @@ export class TenantSchemaService {
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
amount_paid DECIMAL(10,2) DEFAULT 0.00,
|
||||
status VARCHAR(20) DEFAULT 'draft' CHECK (status IN (
|
||||
'draft', 'sent', 'paid', 'partial', 'overdue', 'void', 'written_off'
|
||||
'draft', 'pending', 'sent', 'paid', 'partial', 'overdue', 'void', 'written_off'
|
||||
)),
|
||||
period_start DATE,
|
||||
period_end DATE,
|
||||
assessment_group_id UUID REFERENCES "${s}".assessment_groups(id),
|
||||
journal_entry_id UUID REFERENCES "${s}".journal_entries(id),
|
||||
sent_at TIMESTAMPTZ,
|
||||
paid_at TIMESTAMPTZ,
|
||||
@@ -202,6 +207,7 @@ export class TenantSchemaService {
|
||||
default_account_id UUID REFERENCES "${s}".accounts(id),
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
ytd_payments DECIMAL(15,2) DEFAULT 0.00,
|
||||
last_negotiated DATE,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
@@ -316,6 +322,38 @@ export class TenantSchemaService {
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
|
||||
// AI Investment Recommendations (saved per tenant)
|
||||
`CREATE TABLE "${s}".ai_recommendations (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
recommendations_json JSONB NOT NULL,
|
||||
overall_assessment TEXT,
|
||||
risk_notes JSONB,
|
||||
requested_by UUID,
|
||||
response_time_ms INTEGER,
|
||||
status VARCHAR(20) DEFAULT 'complete',
|
||||
error_message TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
|
||||
// Health Scores (AI-derived operating / reserve fund health)
|
||||
`CREATE TABLE "${s}".health_scores (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
score_type VARCHAR(20) NOT NULL CHECK (score_type IN ('operating', 'reserve')),
|
||||
score INTEGER NOT NULL CHECK (score >= 0 AND score <= 100),
|
||||
previous_score INTEGER,
|
||||
trajectory VARCHAR(20) CHECK (trajectory IN ('improving', 'stable', 'declining')),
|
||||
label VARCHAR(30),
|
||||
summary TEXT,
|
||||
factors JSONB,
|
||||
recommendations JSONB,
|
||||
missing_data JSONB,
|
||||
status VARCHAR(20) NOT NULL DEFAULT 'complete' CHECK (status IN ('complete', 'pending', 'error')),
|
||||
response_time_ms INTEGER,
|
||||
calculated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
`CREATE INDEX "idx_${s}_hs_type_calc" ON "${s}".health_scores(score_type, calculated_at DESC)`,
|
||||
|
||||
// Attachments (file storage for receipts/invoices)
|
||||
`CREATE TABLE "${s}".attachments (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
@@ -328,6 +366,99 @@ export class TenantSchemaService {
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
|
||||
// Board Planning - Scenarios
|
||||
`CREATE TABLE "${s}".board_scenarios (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
scenario_type VARCHAR(30) NOT NULL CHECK (scenario_type IN ('investment', 'assessment')),
|
||||
status VARCHAR(20) DEFAULT 'draft' CHECK (status IN ('draft', 'active', 'approved', 'archived')),
|
||||
projection_months INTEGER DEFAULT 36,
|
||||
projection_cache JSONB,
|
||||
projection_cached_at TIMESTAMPTZ,
|
||||
created_by UUID NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
|
||||
// Board Planning - Scenario Investments
|
||||
`CREATE TABLE "${s}".scenario_investments (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
scenario_id UUID NOT NULL REFERENCES "${s}".board_scenarios(id) ON DELETE CASCADE,
|
||||
source_recommendation_id UUID,
|
||||
label VARCHAR(255) NOT NULL,
|
||||
investment_type VARCHAR(50) CHECK (investment_type IN ('cd', 'money_market', 'treasury', 'savings', 'other')),
|
||||
fund_type VARCHAR(20) NOT NULL CHECK (fund_type IN ('operating', 'reserve')),
|
||||
principal DECIMAL(15,2) NOT NULL,
|
||||
interest_rate DECIMAL(6,4),
|
||||
term_months INTEGER,
|
||||
institution VARCHAR(255),
|
||||
purchase_date DATE,
|
||||
maturity_date DATE,
|
||||
auto_renew BOOLEAN DEFAULT FALSE,
|
||||
executed_investment_id UUID,
|
||||
notes TEXT,
|
||||
sort_order INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
|
||||
// Board Planning - Scenario Assessments
|
||||
`CREATE TABLE "${s}".scenario_assessments (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
scenario_id UUID NOT NULL REFERENCES "${s}".board_scenarios(id) ON DELETE CASCADE,
|
||||
change_type VARCHAR(30) NOT NULL CHECK (change_type IN ('dues_increase', 'special_assessment', 'dues_decrease')),
|
||||
label VARCHAR(255) NOT NULL,
|
||||
target_fund VARCHAR(20) CHECK (target_fund IN ('operating', 'reserve', 'both')),
|
||||
percentage_change DECIMAL(6,3),
|
||||
flat_amount_change DECIMAL(10,2),
|
||||
special_total DECIMAL(15,2),
|
||||
special_per_unit DECIMAL(10,2),
|
||||
special_installments INTEGER DEFAULT 1,
|
||||
effective_date DATE NOT NULL,
|
||||
end_date DATE,
|
||||
applies_to_group_id UUID,
|
||||
notes TEXT,
|
||||
sort_order INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)`,
|
||||
|
||||
// Budget Plans
|
||||
`CREATE TABLE "${s}".budget_plans (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
fiscal_year INTEGER NOT NULL,
|
||||
status VARCHAR(20) NOT NULL DEFAULT 'planning' CHECK (status IN ('planning', 'approved', 'ratified')),
|
||||
base_year INTEGER NOT NULL,
|
||||
inflation_rate DECIMAL(5,2) NOT NULL DEFAULT 2.50,
|
||||
notes TEXT,
|
||||
created_by UUID,
|
||||
approved_by UUID,
|
||||
approved_at TIMESTAMPTZ,
|
||||
ratified_by UUID,
|
||||
ratified_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
UNIQUE(fiscal_year)
|
||||
)`,
|
||||
|
||||
// Budget Plan Lines
|
||||
`CREATE TABLE "${s}".budget_plan_lines (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
budget_plan_id UUID NOT NULL REFERENCES "${s}".budget_plans(id) ON DELETE CASCADE,
|
||||
account_id UUID NOT NULL REFERENCES "${s}".accounts(id),
|
||||
fund_type VARCHAR(20) NOT NULL CHECK (fund_type IN ('operating', 'reserve')),
|
||||
jan DECIMAL(12,2) DEFAULT 0, feb DECIMAL(12,2) DEFAULT 0,
|
||||
mar DECIMAL(12,2) DEFAULT 0, apr DECIMAL(12,2) DEFAULT 0,
|
||||
may DECIMAL(12,2) DEFAULT 0, jun DECIMAL(12,2) DEFAULT 0,
|
||||
jul DECIMAL(12,2) DEFAULT 0, aug DECIMAL(12,2) DEFAULT 0,
|
||||
sep DECIMAL(12,2) DEFAULT 0, oct DECIMAL(12,2) DEFAULT 0,
|
||||
nov DECIMAL(12,2) DEFAULT 0, dec_amt DECIMAL(12,2) DEFAULT 0,
|
||||
is_manually_adjusted BOOLEAN DEFAULT FALSE,
|
||||
notes TEXT,
|
||||
UNIQUE(budget_plan_id, account_id, fund_type)
|
||||
)`,
|
||||
|
||||
// Indexes
|
||||
`CREATE INDEX "idx_${s}_att_je" ON "${s}".attachments(journal_entry_id)`,
|
||||
`CREATE INDEX "idx_${s}_je_date" ON "${s}".journal_entries(entry_date)`,
|
||||
@@ -340,6 +471,12 @@ export class TenantSchemaService {
|
||||
`CREATE INDEX "idx_${s}_pay_unit" ON "${s}".payments(unit_id)`,
|
||||
`CREATE INDEX "idx_${s}_pay_inv" ON "${s}".payments(invoice_id)`,
|
||||
`CREATE INDEX "idx_${s}_bud_year" ON "${s}".budgets(fiscal_year)`,
|
||||
`CREATE INDEX "idx_${s}_bs_type_status" ON "${s}".board_scenarios(scenario_type, status)`,
|
||||
`CREATE INDEX "idx_${s}_si_scenario" ON "${s}".scenario_investments(scenario_id)`,
|
||||
`CREATE INDEX "idx_${s}_sa_scenario" ON "${s}".scenario_assessments(scenario_id)`,
|
||||
`CREATE INDEX "idx_${s}_bp_year" ON "${s}".budget_plans(fiscal_year)`,
|
||||
`CREATE INDEX "idx_${s}_bp_status" ON "${s}".budget_plans(status)`,
|
||||
`CREATE INDEX "idx_${s}_bpl_plan" ON "${s}".budget_plan_lines(budget_plan_id)`,
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Injectable, NestMiddleware } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
|
||||
@@ -8,13 +9,21 @@ export interface TenantRequest extends Request {
|
||||
orgId?: string;
|
||||
userId?: string;
|
||||
userRole?: string;
|
||||
orgPastDue?: boolean;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class TenantMiddleware implements NestMiddleware {
|
||||
constructor(private configService: ConfigService) {}
|
||||
// In-memory cache for org info to avoid DB hit per request
|
||||
private orgCache = new Map<string, { status: string; schemaName: string; cachedAt: number }>();
|
||||
private static readonly CACHE_TTL = 60_000; // 60 seconds
|
||||
|
||||
use(req: TenantRequest, _res: Response, next: NextFunction) {
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private dataSource: DataSource,
|
||||
) {}
|
||||
|
||||
async use(req: TenantRequest, res: Response, next: NextFunction) {
|
||||
// Try to extract tenant info from Authorization header JWT
|
||||
const authHeader = req.headers.authorization;
|
||||
if (authHeader && authHeader.startsWith('Bearer ')) {
|
||||
@@ -22,11 +31,29 @@ export class TenantMiddleware implements NestMiddleware {
|
||||
const token = authHeader.substring(7);
|
||||
const secret = this.configService.get<string>('JWT_SECRET');
|
||||
const decoded = jwt.verify(token, secret!) as any;
|
||||
if (decoded?.orgSchema) {
|
||||
req.tenantSchema = decoded.orgSchema;
|
||||
if (decoded?.orgId) {
|
||||
// Look up org info (status + schema) from orgId with caching
|
||||
const orgInfo = await this.getOrgInfo(decoded.orgId);
|
||||
if (orgInfo) {
|
||||
if (['suspended', 'archived'].includes(orgInfo.status)) {
|
||||
res.status(403).json({
|
||||
statusCode: 403,
|
||||
message: `This organization has been ${orgInfo.status}. Please contact your administrator.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
// past_due: allow through with read-only flag (WriteAccessGuard enforces)
|
||||
if (orgInfo.status === 'past_due') {
|
||||
req.orgPastDue = true;
|
||||
}
|
||||
req.tenantSchema = orgInfo.schemaName;
|
||||
}
|
||||
req.orgId = decoded.orgId;
|
||||
req.userId = decoded.sub;
|
||||
req.userRole = decoded.role;
|
||||
} else if (decoded?.sub) {
|
||||
// Superadmin or user without org — still set userId
|
||||
req.userId = decoded.sub;
|
||||
}
|
||||
} catch {
|
||||
// Token invalid or expired - let Passport handle the auth error
|
||||
@@ -34,4 +61,28 @@ export class TenantMiddleware implements NestMiddleware {
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
private async getOrgInfo(orgId: string): Promise<{ status: string; schemaName: string } | null> {
|
||||
const cached = this.orgCache.get(orgId);
|
||||
if (cached && Date.now() - cached.cachedAt < TenantMiddleware.CACHE_TTL) {
|
||||
return { status: cached.status, schemaName: cached.schemaName };
|
||||
}
|
||||
try {
|
||||
const result = await this.dataSource.query(
|
||||
`SELECT status, schema_name as "schemaName" FROM shared.organizations WHERE id = $1`,
|
||||
[orgId],
|
||||
);
|
||||
if (result.length > 0) {
|
||||
this.orgCache.set(orgId, {
|
||||
status: result[0].status,
|
||||
schemaName: result[0].schemaName,
|
||||
cachedAt: Date.now(),
|
||||
});
|
||||
return { status: result[0].status, schemaName: result[0].schemaName };
|
||||
}
|
||||
} catch {
|
||||
// Non-critical — don't block requests on cache miss errors
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,78 @@
|
||||
import * as _cluster from 'node:cluster';
|
||||
import * as os from 'node:os';
|
||||
import { NestFactory } from '@nestjs/core';
|
||||
import { ValidationPipe } from '@nestjs/common';
|
||||
import { SwaggerModule, DocumentBuilder } from '@nestjs/swagger';
|
||||
import helmet from 'helmet';
|
||||
import * as cookieParser from 'cookie-parser';
|
||||
import { AppModule } from './app.module';
|
||||
|
||||
const cluster = _cluster as any; // Cast to 'any' bypasses the missing property errors
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Clustering — fork one worker per CPU core in production
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const WORKERS = isProduction
|
||||
? Math.min(os.cpus().length, 4) // cap at 4 workers to stay within DB pool
|
||||
: 1; // single process in dev
|
||||
|
||||
if (WORKERS > 1 && cluster.isPrimary) {
|
||||
console.log(`Primary ${process.pid} forking ${WORKERS} workers ...`);
|
||||
for (let i = 0; i < WORKERS; i++) {
|
||||
cluster.fork();
|
||||
}
|
||||
cluster.on('exit', (worker: any, code: number) => {
|
||||
console.warn(`Worker ${worker.process.pid} exited (code ${code}), restarting ...`);
|
||||
cluster.fork();
|
||||
});
|
||||
} else {
|
||||
bootstrap();
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// NestJS bootstrap
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function bootstrap() {
|
||||
const app = await NestFactory.create(AppModule);
|
||||
const app = await NestFactory.create(AppModule, {
|
||||
logger: isProduction ? ['error', 'warn', 'log'] : ['error', 'warn', 'log', 'debug', 'verbose'],
|
||||
// Enable raw body for Stripe webhook signature verification
|
||||
rawBody: true,
|
||||
});
|
||||
|
||||
app.setGlobalPrefix('api');
|
||||
|
||||
// Request logging
|
||||
app.use((req: any, _res: any, next: any) => {
|
||||
console.log(`[REQ] ${req.method} ${req.url} auth=${req.headers.authorization ? 'yes' : 'no'}`);
|
||||
next();
|
||||
});
|
||||
// Cookie parser — needed for refresh token httpOnly cookies
|
||||
app.use(cookieParser());
|
||||
|
||||
// Security headers — Helmet sets CSP, X-Frame-Options, X-Content-Type-Options,
|
||||
// Referrer-Policy, Permissions-Policy, and removes X-Powered-By
|
||||
app.use(
|
||||
helmet({
|
||||
contentSecurityPolicy: {
|
||||
directives: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'", 'https://chat.hoaledgeriq.com'],
|
||||
connectSrc: ["'self'", 'https://chat.hoaledgeriq.com', 'wss://chat.hoaledgeriq.com'],
|
||||
imgSrc: ["'self'", 'data:', 'https://chat.hoaledgeriq.com'],
|
||||
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||
frameSrc: ["'self'", 'https://chat.hoaledgeriq.com'],
|
||||
fontSrc: ["'self'", 'data:'],
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Request logging — only in development (too noisy / slow for prod)
|
||||
if (!isProduction) {
|
||||
app.use((req: any, _res: any, next: any) => {
|
||||
console.log(`[REQ] ${req.method} ${req.url} auth=${req.headers.authorization ? 'yes' : 'no'}`);
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
app.useGlobalPipes(
|
||||
new ValidationPipe({
|
||||
@@ -22,21 +82,24 @@ async function bootstrap() {
|
||||
}),
|
||||
);
|
||||
|
||||
// CORS — in production nginx handles this; accept all origins behind the proxy
|
||||
app.enableCors({
|
||||
origin: ['http://localhost', 'http://localhost:5173'],
|
||||
origin: isProduction ? true : ['http://localhost', 'http://localhost:5173'],
|
||||
credentials: true,
|
||||
});
|
||||
|
||||
const config = new DocumentBuilder()
|
||||
.setTitle('HOA LedgerIQ API')
|
||||
.setDescription('API for the HOA LedgerIQ')
|
||||
.setVersion('0.1.0')
|
||||
.addBearerAuth()
|
||||
.build();
|
||||
const document = SwaggerModule.createDocument(app, config);
|
||||
SwaggerModule.setup('api/docs', app, document);
|
||||
// Swagger docs — disabled in production to avoid exposing API surface
|
||||
if (!isProduction) {
|
||||
const config = new DocumentBuilder()
|
||||
.setTitle('HOA LedgerIQ API')
|
||||
.setDescription('API for the HOA LedgerIQ')
|
||||
.setVersion('2026.3.11')
|
||||
.addBearerAuth()
|
||||
.build();
|
||||
const document = SwaggerModule.createDocument(app, config);
|
||||
SwaggerModule.setup('api/docs', app, document);
|
||||
}
|
||||
|
||||
await app.listen(3000);
|
||||
console.log('Backend running on port 3000');
|
||||
console.log(`Backend worker ${process.pid} listening on port 3000`);
|
||||
}
|
||||
bootstrap();
|
||||
|
||||
@@ -58,6 +58,14 @@ export class AccountsController {
|
||||
return this.accountsService.adjustBalance(id, dto);
|
||||
}
|
||||
|
||||
@Post('transfer')
|
||||
@ApiOperation({ summary: 'Transfer funds between asset accounts' })
|
||||
transferFunds(
|
||||
@Body() dto: { fromAccountId: string; toAccountId: string; amount: number; transferDate: string; memo?: string },
|
||||
) {
|
||||
return this.accountsService.transferFunds(dto);
|
||||
}
|
||||
|
||||
@Get(':id')
|
||||
@ApiOperation({ summary: 'Get account by ID' })
|
||||
findOne(@Param('id') id: string) {
|
||||
|
||||
@@ -74,9 +74,9 @@ export class AccountsService {
|
||||
|
||||
// Create opening balance journal entry if initialBalance is provided and non-zero
|
||||
if (dto.initialBalance && dto.initialBalance !== 0) {
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = now.getMonth() + 1;
|
||||
const balanceDate = dto.initialBalanceDate ? new Date(dto.initialBalanceDate) : new Date();
|
||||
const year = balanceDate.getFullYear();
|
||||
const month = balanceDate.getMonth() + 1;
|
||||
|
||||
// Find the current fiscal period
|
||||
const periods = await this.tenant.query(
|
||||
@@ -111,12 +111,14 @@ export class AccountsService {
|
||||
);
|
||||
}
|
||||
|
||||
// Create the journal entry
|
||||
// Create the journal entry (use provided balance date or today)
|
||||
const entryDate = dto.initialBalanceDate || new Date().toISOString().split('T')[0];
|
||||
const jeInsert = await this.tenant.query(
|
||||
`INSERT INTO journal_entries (entry_date, description, entry_type, fiscal_period_id, is_posted, posted_at, created_by)
|
||||
VALUES (CURRENT_DATE, $1, 'opening_balance', $2, true, NOW(), $3)
|
||||
VALUES ($1::date, $2, 'opening_balance', $3, true, NOW(), $4)
|
||||
RETURNING id`,
|
||||
[
|
||||
entryDate,
|
||||
`Opening balance for ${dto.name}`,
|
||||
fiscalPeriodId,
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
@@ -142,7 +144,21 @@ export class AccountsService {
|
||||
}
|
||||
}
|
||||
|
||||
return account;
|
||||
// Auto-set as primary if this is the first asset account for this fund_type
|
||||
if (dto.accountType === 'asset') {
|
||||
const existingPrimary = await this.tenant.query(
|
||||
'SELECT id FROM accounts WHERE fund_type = $1 AND is_primary = true AND id != $2',
|
||||
[dto.fundType, accountId],
|
||||
);
|
||||
if (!existingPrimary.length) {
|
||||
await this.tenant.query(
|
||||
'UPDATE accounts SET is_primary = true WHERE id = $1',
|
||||
[accountId],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return this.findOne(accountId);
|
||||
}
|
||||
|
||||
async update(id: string, dto: UpdateAccountDto) {
|
||||
@@ -344,6 +360,62 @@ export class AccountsService {
|
||||
return journalEntry;
|
||||
}
|
||||
|
||||
async transferFunds(dto: {
|
||||
fromAccountId: string;
|
||||
toAccountId: string;
|
||||
amount: number;
|
||||
transferDate: string;
|
||||
memo?: string;
|
||||
}) {
|
||||
if (dto.amount <= 0) throw new BadRequestException('Transfer amount must be positive');
|
||||
if (dto.fromAccountId === dto.toAccountId) throw new BadRequestException('Cannot transfer to the same account');
|
||||
|
||||
const fromAccount = await this.findOne(dto.fromAccountId);
|
||||
const toAccount = await this.findOne(dto.toAccountId);
|
||||
|
||||
if (fromAccount.account_type !== 'asset') throw new BadRequestException('Source account must be an asset account');
|
||||
if (toAccount.account_type !== 'asset') throw new BadRequestException('Destination account must be an asset account');
|
||||
|
||||
// Find fiscal period
|
||||
const asOf = new Date(dto.transferDate);
|
||||
const year = asOf.getFullYear();
|
||||
const month = asOf.getMonth() + 1;
|
||||
const periods = await this.tenant.query(
|
||||
'SELECT id FROM fiscal_periods WHERE year = $1 AND month = $2',
|
||||
[year, month],
|
||||
);
|
||||
if (!periods.length) {
|
||||
throw new BadRequestException(`No fiscal period found for ${year}-${String(month).padStart(2, '0')}`);
|
||||
}
|
||||
|
||||
const memo = dto.memo || `Transfer from ${fromAccount.name} to ${toAccount.name}`;
|
||||
|
||||
// Create journal entry: debit destination (increase), credit source (decrease)
|
||||
const jeRows = await this.tenant.query(
|
||||
`INSERT INTO journal_entries (entry_date, description, entry_type, fiscal_period_id, is_posted, posted_at, created_by)
|
||||
VALUES ($1, $2, 'transfer', $3, true, NOW(), $4)
|
||||
RETURNING *`,
|
||||
[dto.transferDate, memo, periods[0].id, '00000000-0000-0000-0000-000000000000'],
|
||||
);
|
||||
const je = jeRows[0];
|
||||
|
||||
// Credit source account (reduces asset balance)
|
||||
await this.tenant.query(
|
||||
`INSERT INTO journal_entry_lines (journal_entry_id, account_id, debit, credit, memo)
|
||||
VALUES ($1, $2, 0, $3, $4)`,
|
||||
[je.id, dto.fromAccountId, dto.amount, memo],
|
||||
);
|
||||
|
||||
// Debit destination account (increases asset balance)
|
||||
await this.tenant.query(
|
||||
`INSERT INTO journal_entry_lines (journal_entry_id, account_id, debit, credit, memo)
|
||||
VALUES ($1, $2, $3, 0, $4)`,
|
||||
[je.id, dto.toAccountId, dto.amount, memo],
|
||||
);
|
||||
|
||||
return je;
|
||||
}
|
||||
|
||||
async getTrialBalance(asOfDate?: string) {
|
||||
const dateFilter = asOfDate
|
||||
? `AND je.entry_date <= $1`
|
||||
|
||||
@@ -37,6 +37,11 @@ export class CreateAccountDto {
|
||||
@IsOptional()
|
||||
initialBalance?: number;
|
||||
|
||||
@ApiProperty({ required: false, description: 'ISO date string (YYYY-MM-DD) for when the initial balance was accurate' })
|
||||
@IsString()
|
||||
@IsOptional()
|
||||
initialBalanceDate?: string;
|
||||
|
||||
@ApiProperty({ required: false, description: 'Annual interest rate as a percentage' })
|
||||
@IsOptional()
|
||||
interestRate?: number;
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { Injectable, NotFoundException, BadRequestException } from '@nestjs/common';
|
||||
import { TenantService } from '../../database/tenant.service';
|
||||
|
||||
const DEFAULT_DUE_MONTHS: Record<string, number[]> = {
|
||||
monthly: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
|
||||
quarterly: [1, 4, 7, 10],
|
||||
annual: [1],
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class AssessmentGroupsService {
|
||||
constructor(private tenant: TenantService) {}
|
||||
@@ -42,6 +48,33 @@ export class AssessmentGroupsService {
|
||||
return rows.length ? rows[0] : null;
|
||||
}
|
||||
|
||||
private validateDueMonths(frequency: string, dueMonths: number[]) {
|
||||
if (!dueMonths || !dueMonths.length) {
|
||||
throw new BadRequestException('Due months are required');
|
||||
}
|
||||
// Validate all values are 1-12
|
||||
if (dueMonths.some((m) => m < 1 || m > 12 || !Number.isInteger(m))) {
|
||||
throw new BadRequestException('Due months must be integers between 1 and 12');
|
||||
}
|
||||
switch (frequency) {
|
||||
case 'monthly':
|
||||
if (dueMonths.length !== 12) {
|
||||
throw new BadRequestException('Monthly frequency must include all 12 months');
|
||||
}
|
||||
break;
|
||||
case 'quarterly':
|
||||
if (dueMonths.length !== 4) {
|
||||
throw new BadRequestException('Quarterly frequency must have exactly 4 due months');
|
||||
}
|
||||
break;
|
||||
case 'annual':
|
||||
if (dueMonths.length !== 1) {
|
||||
throw new BadRequestException('Annual frequency must have exactly 1 due month');
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async create(dto: any) {
|
||||
const existingGroups = await this.tenant.query('SELECT COUNT(*) as cnt FROM assessment_groups');
|
||||
const isFirstGroup = parseInt(existingGroups[0].cnt) === 0;
|
||||
@@ -51,17 +84,23 @@ export class AssessmentGroupsService {
|
||||
await this.tenant.query('UPDATE assessment_groups SET is_default = false WHERE is_default = true');
|
||||
}
|
||||
|
||||
const frequency = dto.frequency || 'monthly';
|
||||
const dueMonths = dto.dueMonths || DEFAULT_DUE_MONTHS[frequency] || DEFAULT_DUE_MONTHS.monthly;
|
||||
const dueDay = Math.min(Math.max(dto.dueDay || 1, 1), 28);
|
||||
|
||||
this.validateDueMonths(frequency, dueMonths);
|
||||
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO assessment_groups (name, description, regular_assessment, special_assessment, unit_count, frequency, is_default)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *`,
|
||||
`INSERT INTO assessment_groups (name, description, regular_assessment, special_assessment, unit_count, frequency, due_months, due_day, is_default)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *`,
|
||||
[dto.name, dto.description || null, dto.regularAssessment || 0, dto.specialAssessment || 0,
|
||||
dto.unitCount || 0, dto.frequency || 'monthly', shouldBeDefault],
|
||||
dto.unitCount || 0, frequency, dueMonths, dueDay, shouldBeDefault],
|
||||
);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async update(id: string, dto: any) {
|
||||
await this.findOne(id);
|
||||
const existing = await this.findOne(id);
|
||||
|
||||
if (dto.isDefault === true) {
|
||||
await this.tenant.query('UPDATE assessment_groups SET is_default = false WHERE is_default = true');
|
||||
@@ -80,6 +119,24 @@ export class AssessmentGroupsService {
|
||||
if (dto.frequency !== undefined) { sets.push(`frequency = $${idx++}`); params.push(dto.frequency); }
|
||||
if (dto.isDefault !== undefined) { sets.push(`is_default = $${idx++}`); params.push(dto.isDefault); }
|
||||
|
||||
// Handle due_months: if frequency changed and no explicit dueMonths, auto-populate defaults
|
||||
const effectiveFrequency = dto.frequency || existing.frequency;
|
||||
if (dto.dueMonths !== undefined) {
|
||||
this.validateDueMonths(effectiveFrequency, dto.dueMonths);
|
||||
sets.push(`due_months = $${idx++}`);
|
||||
params.push(dto.dueMonths);
|
||||
} else if (dto.frequency !== undefined && dto.frequency !== existing.frequency) {
|
||||
// Frequency changed, auto-populate due_months
|
||||
const newDueMonths = DEFAULT_DUE_MONTHS[dto.frequency] || DEFAULT_DUE_MONTHS.monthly;
|
||||
sets.push(`due_months = $${idx++}`);
|
||||
params.push(newDueMonths);
|
||||
}
|
||||
|
||||
if (dto.dueDay !== undefined) {
|
||||
sets.push(`due_day = $${idx++}`);
|
||||
params.push(Math.min(Math.max(dto.dueDay, 1), 28));
|
||||
}
|
||||
|
||||
if (!sets.length) return this.findOne(id);
|
||||
|
||||
sets.push('updated_at = NOW()');
|
||||
|
||||
325
backend/src/modules/auth/admin-analytics.service.ts
Normal file
325
backend/src/modules/auth/admin-analytics.service.ts
Normal file
@@ -0,0 +1,325 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
@Injectable()
|
||||
export class AdminAnalyticsService {
|
||||
private readonly logger = new Logger(AdminAnalyticsService.name);
|
||||
|
||||
constructor(private dataSource: DataSource) {}
|
||||
|
||||
/**
|
||||
* Platform-wide metrics for the admin dashboard.
|
||||
*/
|
||||
async getPlatformMetrics() {
|
||||
const [
|
||||
userStats,
|
||||
orgStats,
|
||||
planBreakdown,
|
||||
statusBreakdown,
|
||||
newTenantsPerMonth,
|
||||
newUsersPerMonth,
|
||||
aiStats,
|
||||
activeUsers30d,
|
||||
] = await Promise.all([
|
||||
this.dataSource.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_users,
|
||||
COUNT(*) FILTER (WHERE is_superadmin = true) as superadmin_count,
|
||||
COUNT(*) FILTER (WHERE is_platform_owner = true) as platform_owner_count
|
||||
FROM shared.users
|
||||
`),
|
||||
this.dataSource.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_organizations,
|
||||
COUNT(*) FILTER (WHERE status = 'active') as active_count,
|
||||
COUNT(*) FILTER (WHERE status = 'archived') as archived_count,
|
||||
COUNT(*) FILTER (WHERE status = 'suspended') as suspended_count,
|
||||
COUNT(*) FILTER (WHERE status = 'trial') as trial_count
|
||||
FROM shared.organizations
|
||||
`),
|
||||
this.dataSource.query(`
|
||||
SELECT plan_level, COUNT(*) as count
|
||||
FROM shared.organizations
|
||||
WHERE status != 'archived'
|
||||
GROUP BY plan_level
|
||||
ORDER BY count DESC
|
||||
`),
|
||||
this.dataSource.query(`
|
||||
SELECT status, COUNT(*) as count
|
||||
FROM shared.organizations
|
||||
GROUP BY status
|
||||
ORDER BY count DESC
|
||||
`),
|
||||
this.dataSource.query(`
|
||||
SELECT
|
||||
DATE_TRUNC('month', created_at) as month,
|
||||
COUNT(*) as count
|
||||
FROM shared.organizations
|
||||
WHERE created_at > NOW() - INTERVAL '6 months'
|
||||
GROUP BY DATE_TRUNC('month', created_at)
|
||||
ORDER BY month DESC
|
||||
`),
|
||||
this.dataSource.query(`
|
||||
SELECT
|
||||
DATE_TRUNC('month', created_at) as month,
|
||||
COUNT(*) as count
|
||||
FROM shared.users
|
||||
WHERE created_at > NOW() - INTERVAL '6 months'
|
||||
GROUP BY DATE_TRUNC('month', created_at)
|
||||
ORDER BY month DESC
|
||||
`),
|
||||
this.dataSource.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_requests,
|
||||
COUNT(*) FILTER (WHERE status = 'success') as successful,
|
||||
ROUND(AVG(response_time_ms)) as avg_response_ms
|
||||
FROM shared.ai_recommendation_log
|
||||
WHERE requested_at > NOW() - INTERVAL '30 days'
|
||||
`),
|
||||
this.dataSource.query(`
|
||||
SELECT COUNT(DISTINCT user_id) as count
|
||||
FROM shared.login_history
|
||||
WHERE logged_in_at > NOW() - INTERVAL '30 days'
|
||||
`),
|
||||
]);
|
||||
|
||||
return {
|
||||
totalUsers: parseInt(userStats[0]?.total_users || '0'),
|
||||
superadminCount: parseInt(userStats[0]?.superadmin_count || '0'),
|
||||
platformOwnerCount: parseInt(userStats[0]?.platform_owner_count || '0'),
|
||||
activeUsers30d: parseInt(activeUsers30d[0]?.count || '0'),
|
||||
totalOrganizations: parseInt(orgStats[0]?.total_organizations || '0'),
|
||||
activeOrganizations: parseInt(orgStats[0]?.active_count || '0'),
|
||||
archivedOrganizations: parseInt(orgStats[0]?.archived_count || '0'),
|
||||
suspendedOrganizations: parseInt(orgStats[0]?.suspended_count || '0'),
|
||||
trialOrganizations: parseInt(orgStats[0]?.trial_count || '0'),
|
||||
planBreakdown: planBreakdown.map((r: any) => ({
|
||||
plan: r.plan_level,
|
||||
count: parseInt(r.count),
|
||||
})),
|
||||
statusBreakdown: statusBreakdown.map((r: any) => ({
|
||||
status: r.status,
|
||||
count: parseInt(r.count),
|
||||
})),
|
||||
newTenantsPerMonth: newTenantsPerMonth.map((r: any) => ({
|
||||
month: r.month,
|
||||
count: parseInt(r.count),
|
||||
})),
|
||||
newUsersPerMonth: newUsersPerMonth.map((r: any) => ({
|
||||
month: r.month,
|
||||
count: parseInt(r.count),
|
||||
})),
|
||||
aiRequestsLast30d: parseInt(aiStats[0]?.total_requests || '0'),
|
||||
aiSuccessfulLast30d: parseInt(aiStats[0]?.successful || '0'),
|
||||
aiAvgResponseMs: parseInt(aiStats[0]?.avg_response_ms || '0'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Detailed analytics for a specific tenant/organization.
|
||||
*/
|
||||
async getTenantDetail(orgId: string) {
|
||||
const [orgInfo, loginStats, weeklyLogins, monthlyLogins, aiCount, memberCount] = await Promise.all([
|
||||
this.dataSource.query(
|
||||
`SELECT o.*, (SELECT MAX(logged_in_at) FROM shared.login_history WHERE organization_id = o.id) as last_login
|
||||
FROM shared.organizations o WHERE o.id = $1`,
|
||||
[orgId],
|
||||
),
|
||||
this.dataSource.query(
|
||||
`SELECT
|
||||
COUNT(*) FILTER (WHERE logged_in_at > NOW() - INTERVAL '7 days') as logins_this_week,
|
||||
COUNT(*) FILTER (WHERE logged_in_at > NOW() - INTERVAL '30 days') as logins_this_month,
|
||||
COUNT(DISTINCT user_id) FILTER (WHERE logged_in_at > NOW() - INTERVAL '30 days') as active_users_30d
|
||||
FROM shared.login_history WHERE organization_id = $1`,
|
||||
[orgId],
|
||||
),
|
||||
this.dataSource.query(
|
||||
`SELECT
|
||||
DATE_TRUNC('week', logged_in_at) as week,
|
||||
COUNT(*) as count
|
||||
FROM shared.login_history
|
||||
WHERE organization_id = $1 AND logged_in_at > NOW() - INTERVAL '4 weeks'
|
||||
GROUP BY DATE_TRUNC('week', logged_in_at)
|
||||
ORDER BY week DESC`,
|
||||
[orgId],
|
||||
),
|
||||
this.dataSource.query(
|
||||
`SELECT
|
||||
DATE_TRUNC('month', logged_in_at) as month,
|
||||
COUNT(*) as count
|
||||
FROM shared.login_history
|
||||
WHERE organization_id = $1 AND logged_in_at > NOW() - INTERVAL '6 months'
|
||||
GROUP BY DATE_TRUNC('month', logged_in_at)
|
||||
ORDER BY month DESC`,
|
||||
[orgId],
|
||||
),
|
||||
this.dataSource.query(
|
||||
`SELECT COUNT(*) as count
|
||||
FROM shared.ai_recommendation_log
|
||||
WHERE organization_id = $1 AND requested_at > NOW() - INTERVAL '30 days'`,
|
||||
[orgId],
|
||||
),
|
||||
this.dataSource.query(
|
||||
`SELECT COUNT(*) as count FROM shared.user_organizations WHERE organization_id = $1 AND is_active = true`,
|
||||
[orgId],
|
||||
),
|
||||
]);
|
||||
|
||||
const org = orgInfo[0];
|
||||
if (!org) return null;
|
||||
|
||||
// Cross-schema queries for tenant financial data
|
||||
let cashOnHand = 0;
|
||||
let hasBudget = false;
|
||||
let recentTransactions = 0;
|
||||
|
||||
try {
|
||||
const cashResult = await this.dataSource.query(`
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM "${org.schema_name}".accounts a
|
||||
JOIN "${org.schema_name}".journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN "${org.schema_name}".journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
WHERE a.account_type = 'asset' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`);
|
||||
cashOnHand = parseFloat(cashResult[0]?.total || '0');
|
||||
|
||||
const budgetResult = await this.dataSource.query(
|
||||
`SELECT COUNT(*) as count FROM "${org.schema_name}".budgets WHERE fiscal_year = $1`,
|
||||
[new Date().getFullYear()],
|
||||
);
|
||||
hasBudget = parseInt(budgetResult[0]?.count || '0') > 0;
|
||||
|
||||
const txnResult = await this.dataSource.query(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM "${org.schema_name}".journal_entries
|
||||
WHERE is_posted = true AND entry_date > NOW() - INTERVAL '30 days'
|
||||
`);
|
||||
recentTransactions = parseInt(txnResult[0]?.count || '0');
|
||||
} catch (err) {
|
||||
this.logger.warn(`Failed to query tenant schema ${org.schema_name}: ${err.message}`);
|
||||
}
|
||||
|
||||
return {
|
||||
organization: org,
|
||||
lastLogin: org.last_login,
|
||||
loginsThisWeek: parseInt(loginStats[0]?.logins_this_week || '0'),
|
||||
loginsThisMonth: parseInt(loginStats[0]?.logins_this_month || '0'),
|
||||
activeUsers30d: parseInt(loginStats[0]?.active_users_30d || '0'),
|
||||
weeklyLogins: weeklyLogins.map((r: any) => ({
|
||||
week: r.week,
|
||||
count: parseInt(r.count),
|
||||
})),
|
||||
monthlyLogins: monthlyLogins.map((r: any) => ({
|
||||
month: r.month,
|
||||
count: parseInt(r.count),
|
||||
})),
|
||||
aiRecommendations30d: parseInt(aiCount[0]?.count || '0'),
|
||||
memberCount: parseInt(memberCount[0]?.count || '0'),
|
||||
cashOnHand,
|
||||
hasBudget,
|
||||
recentTransactions,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* All tenants with health scores for the Health & Adoption tab.
|
||||
*
|
||||
* Health Score (0-100):
|
||||
* Active users 30d ≥ 1 → 25pts
|
||||
* Has current-year budget → 25pts
|
||||
* Journal entries 30d ≥ 1 → 25pts
|
||||
* 2+ active members → 15pts
|
||||
* AI usage 30d ≥ 1 → 10pts
|
||||
*/
|
||||
async getAllTenantsHealth() {
|
||||
const orgs = await this.dataSource.query(`
|
||||
SELECT
|
||||
o.id, o.name, o.schema_name, o.status, o.plan_level, o.created_at,
|
||||
o.payment_date, o.renewal_date,
|
||||
(SELECT COUNT(*) FROM shared.user_organizations WHERE organization_id = o.id AND is_active = true) as member_count,
|
||||
(SELECT MAX(lh.logged_in_at) FROM shared.login_history lh WHERE lh.organization_id = o.id) as last_login,
|
||||
(SELECT COUNT(DISTINCT lh.user_id) FROM shared.login_history lh WHERE lh.organization_id = o.id AND lh.logged_in_at > NOW() - INTERVAL '30 days') as active_users_30d,
|
||||
(SELECT COUNT(*) FROM shared.ai_recommendation_log ar WHERE ar.organization_id = o.id AND ar.requested_at > NOW() - INTERVAL '30 days') as ai_usage_30d
|
||||
FROM shared.organizations o
|
||||
WHERE o.status != 'archived'
|
||||
ORDER BY o.name
|
||||
`);
|
||||
|
||||
const currentYear = new Date().getFullYear();
|
||||
const results = [];
|
||||
|
||||
for (const org of orgs) {
|
||||
let cashOnHand = 0;
|
||||
let hasBudget = false;
|
||||
let journalEntries30d = 0;
|
||||
|
||||
try {
|
||||
const cashResult = await this.dataSource.query(`
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM "${org.schema_name}".accounts a
|
||||
JOIN "${org.schema_name}".journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN "${org.schema_name}".journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
WHERE a.account_type = 'asset' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`);
|
||||
cashOnHand = parseFloat(cashResult[0]?.total || '0');
|
||||
|
||||
const budgetResult = await this.dataSource.query(
|
||||
`SELECT COUNT(*) as count FROM "${org.schema_name}".budgets WHERE fiscal_year = $1`,
|
||||
[currentYear],
|
||||
);
|
||||
hasBudget = parseInt(budgetResult[0]?.count || '0') > 0;
|
||||
|
||||
const jeResult = await this.dataSource.query(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM "${org.schema_name}".journal_entries
|
||||
WHERE is_posted = true AND entry_date > NOW() - INTERVAL '30 days'
|
||||
`);
|
||||
journalEntries30d = parseInt(jeResult[0]?.count || '0');
|
||||
} catch (err) {
|
||||
// Schema may not exist yet (new tenant)
|
||||
this.logger.warn(`Health check skip for ${org.schema_name}: ${err.message}`);
|
||||
}
|
||||
|
||||
// Calculate health score
|
||||
const activeUsers = parseInt(org.active_users_30d) || 0;
|
||||
const memberCount = parseInt(org.member_count) || 0;
|
||||
const aiUsage = parseInt(org.ai_usage_30d) || 0;
|
||||
|
||||
let healthScore = 0;
|
||||
if (activeUsers >= 1) healthScore += 25;
|
||||
if (hasBudget) healthScore += 25;
|
||||
if (journalEntries30d >= 1) healthScore += 25;
|
||||
if (memberCount >= 2) healthScore += 15;
|
||||
if (aiUsage >= 1) healthScore += 10;
|
||||
|
||||
results.push({
|
||||
id: org.id,
|
||||
name: org.name,
|
||||
schemaName: org.schema_name,
|
||||
status: org.status,
|
||||
planLevel: org.plan_level,
|
||||
createdAt: org.created_at,
|
||||
paymentDate: org.payment_date,
|
||||
renewalDate: org.renewal_date,
|
||||
memberCount,
|
||||
lastLogin: org.last_login,
|
||||
activeUsers30d: activeUsers,
|
||||
aiUsage30d: aiUsage,
|
||||
cashOnHand,
|
||||
hasBudget,
|
||||
journalEntries30d,
|
||||
healthScore,
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
import { Controller, Get, Post, Put, Body, Param, UseGuards, Req, ForbiddenException, BadRequestException } from '@nestjs/common';
|
||||
import { ApiTags, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { JwtAuthGuard } from './guards/jwt-auth.guard';
|
||||
import { AuthService } from './auth.service';
|
||||
import { UsersService } from '../users/users.service';
|
||||
import { OrganizationsService } from '../organizations/organizations.service';
|
||||
import { AdminAnalyticsService } from './admin-analytics.service';
|
||||
import { IdeasService } from '../ideas/ideas.service';
|
||||
import * as bcrypt from 'bcryptjs';
|
||||
|
||||
@ApiTags('admin')
|
||||
@@ -11,8 +14,11 @@ import * as bcrypt from 'bcryptjs';
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class AdminController {
|
||||
constructor(
|
||||
private authService: AuthService,
|
||||
private usersService: UsersService,
|
||||
private orgService: OrganizationsService,
|
||||
private analyticsService: AdminAnalyticsService,
|
||||
private ideasService: IdeasService,
|
||||
) {}
|
||||
|
||||
private async requireSuperadmin(req: any) {
|
||||
@@ -22,25 +28,93 @@ export class AdminController {
|
||||
}
|
||||
}
|
||||
|
||||
// ── Platform Metrics ──
|
||||
|
||||
@Get('metrics')
|
||||
async getPlatformMetrics(@Req() req: any) {
|
||||
await this.requireSuperadmin(req);
|
||||
return this.analyticsService.getPlatformMetrics();
|
||||
}
|
||||
|
||||
// ── Users ──
|
||||
|
||||
@Get('users')
|
||||
async listUsers(@Req() req: any) {
|
||||
await this.requireSuperadmin(req);
|
||||
const users = await this.usersService.findAllUsers();
|
||||
return users.map(u => ({
|
||||
id: u.id, email: u.email, firstName: u.firstName, lastName: u.lastName,
|
||||
isSuperadmin: u.isSuperadmin, lastLoginAt: u.lastLoginAt, createdAt: u.createdAt,
|
||||
isSuperadmin: u.isSuperadmin, isPlatformOwner: u.isPlatformOwner || false,
|
||||
lastLoginAt: u.lastLoginAt, createdAt: u.createdAt,
|
||||
organizations: u.userOrganizations?.map(uo => ({
|
||||
id: uo.organizationId, name: uo.organization?.name, role: uo.role,
|
||||
})) || [],
|
||||
}));
|
||||
}
|
||||
|
||||
// ── Organizations ──
|
||||
|
||||
@Get('organizations')
|
||||
async listOrganizations(@Req() req: any) {
|
||||
await this.requireSuperadmin(req);
|
||||
return this.usersService.findAllOrganizations();
|
||||
}
|
||||
|
||||
@Get('organizations/:id/detail')
|
||||
async getTenantDetail(@Req() req: any, @Param('id') id: string) {
|
||||
await this.requireSuperadmin(req);
|
||||
const detail = await this.analyticsService.getTenantDetail(id);
|
||||
if (!detail) {
|
||||
throw new BadRequestException('Organization not found');
|
||||
}
|
||||
return detail;
|
||||
}
|
||||
|
||||
@Put('organizations/:id/subscription')
|
||||
async updateSubscription(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() body: { paymentDate?: string; confirmationNumber?: string; renewalDate?: string },
|
||||
) {
|
||||
await this.requireSuperadmin(req);
|
||||
const org = await this.orgService.updateSubscription(id, body);
|
||||
return { success: true, organization: org };
|
||||
}
|
||||
|
||||
@Put('organizations/:id/status')
|
||||
async updateOrgStatus(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() body: { status: string },
|
||||
) {
|
||||
await this.requireSuperadmin(req);
|
||||
const validStatuses = ['active', 'suspended', 'trial', 'archived'];
|
||||
if (!validStatuses.includes(body.status)) {
|
||||
throw new BadRequestException(`Invalid status. Must be one of: ${validStatuses.join(', ')}`);
|
||||
}
|
||||
const org = await this.orgService.updateStatus(id, body.status);
|
||||
return { success: true, organization: org };
|
||||
}
|
||||
|
||||
// ── Plan Level ──
|
||||
|
||||
@Put('organizations/:id/plan')
|
||||
async updateOrgPlan(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() body: { planLevel: string },
|
||||
) {
|
||||
await this.requireSuperadmin(req);
|
||||
const validPlans = ['standard', 'premium', 'enterprise'];
|
||||
if (!validPlans.includes(body.planLevel)) {
|
||||
throw new BadRequestException(`Invalid plan. Must be one of: ${validPlans.join(', ')}`);
|
||||
}
|
||||
const org = await this.orgService.updatePlanLevel(id, body.planLevel);
|
||||
return { success: true, organization: org };
|
||||
}
|
||||
|
||||
// ── Superadmin Toggle ──
|
||||
|
||||
@Post('users/:id/superadmin')
|
||||
async toggleSuperadmin(@Req() req: any, @Param('id') id: string, @Body() body: { isSuperadmin: boolean }) {
|
||||
await this.requireSuperadmin(req);
|
||||
@@ -48,6 +122,25 @@ export class AdminController {
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
// ── User Impersonation ──
|
||||
|
||||
@Post('impersonate/:userId')
|
||||
async impersonateUser(@Req() req: any, @Param('userId') userId: string) {
|
||||
await this.requireSuperadmin(req);
|
||||
const adminUserId = req.user.userId || req.user.sub;
|
||||
return this.authService.impersonateUser(adminUserId, userId);
|
||||
}
|
||||
|
||||
// ── Tenant Health ──
|
||||
|
||||
@Get('tenants-health')
|
||||
async getTenantsHealth(@Req() req: any) {
|
||||
await this.requireSuperadmin(req);
|
||||
return this.analyticsService.getAllTenantsHealth();
|
||||
}
|
||||
|
||||
// ── Create Tenant ──
|
||||
|
||||
@Post('tenants')
|
||||
async createTenant(@Req() req: any, @Body() body: {
|
||||
orgName: string;
|
||||
@@ -106,18 +199,44 @@ export class AdminController {
|
||||
return { success: true, organization: org };
|
||||
}
|
||||
|
||||
@Put('organizations/:id/status')
|
||||
async updateOrgStatus(
|
||||
// ── Ideation ──
|
||||
|
||||
@Get('ideas')
|
||||
async listAllIdeas(@Req() req: any) {
|
||||
await this.requireSuperadmin(req);
|
||||
return this.ideasService.findAll();
|
||||
}
|
||||
|
||||
@Put('ideas/:id/status')
|
||||
async updateIdeaStatus(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() body: { status: string },
|
||||
) {
|
||||
await this.requireSuperadmin(req);
|
||||
const validStatuses = ['active', 'suspended', 'trial', 'archived'];
|
||||
if (!validStatuses.includes(body.status)) {
|
||||
throw new BadRequestException(`Invalid status. Must be one of: ${validStatuses.join(', ')}`);
|
||||
}
|
||||
const org = await this.orgService.updateStatus(id, body.status);
|
||||
const idea = await this.ideasService.updateStatus(id, body.status);
|
||||
return { success: true, idea };
|
||||
}
|
||||
|
||||
@Put('ideas/:id/note')
|
||||
async updateIdeaNote(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() body: { adminNote: string },
|
||||
) {
|
||||
await this.requireSuperadmin(req);
|
||||
const idea = await this.ideasService.updateNote(id, body.adminNote);
|
||||
return { success: true, idea };
|
||||
}
|
||||
|
||||
@Put('organizations/:id/settings')
|
||||
async updateOrgSettings(
|
||||
@Req() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() body: Record<string, any>,
|
||||
) {
|
||||
await this.requireSuperadmin(req);
|
||||
const org = await this.orgService.updateSettings(id, body);
|
||||
return { success: true, organization: org };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,50 @@
|
||||
import {
|
||||
Controller,
|
||||
Post,
|
||||
Patch,
|
||||
Body,
|
||||
UseGuards,
|
||||
Request,
|
||||
Get,
|
||||
Res,
|
||||
Query,
|
||||
HttpCode,
|
||||
ForbiddenException,
|
||||
BadRequestException,
|
||||
} from '@nestjs/common';
|
||||
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { AuthGuard } from '@nestjs/passport';
|
||||
import { Throttle } from '@nestjs/throttler';
|
||||
import { Response } from 'express';
|
||||
import { AuthService } from './auth.service';
|
||||
import { RegisterDto } from './dto/register.dto';
|
||||
import { LoginDto } from './dto/login.dto';
|
||||
import { SwitchOrgDto } from './dto/switch-org.dto';
|
||||
import { JwtAuthGuard } from './guards/jwt-auth.guard';
|
||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||
|
||||
const COOKIE_NAME = 'ledgeriq_rt';
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const isOpenRegistration = process.env.ALLOW_OPEN_REGISTRATION === 'true';
|
||||
|
||||
function setRefreshCookie(res: Response, token: string) {
|
||||
res.cookie(COOKIE_NAME, token, {
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
path: '/api/auth',
|
||||
maxAge: 30 * 24 * 60 * 60 * 1000, // 30 days
|
||||
});
|
||||
}
|
||||
|
||||
function clearRefreshCookie(res: Response) {
|
||||
res.clearCookie(COOKIE_NAME, {
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
path: '/api/auth',
|
||||
});
|
||||
}
|
||||
|
||||
@ApiTags('auth')
|
||||
@Controller('auth')
|
||||
@@ -20,16 +52,74 @@ export class AuthController {
|
||||
constructor(private authService: AuthService) {}
|
||||
|
||||
@Post('register')
|
||||
@ApiOperation({ summary: 'Register a new user' })
|
||||
async register(@Body() dto: RegisterDto) {
|
||||
return this.authService.register(dto);
|
||||
@ApiOperation({ summary: 'Register a new user (disabled unless ALLOW_OPEN_REGISTRATION=true)' })
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
async register(@Body() dto: RegisterDto, @Res({ passthrough: true }) res: Response) {
|
||||
if (!isOpenRegistration) {
|
||||
throw new ForbiddenException(
|
||||
'Open registration is disabled. Please use an invitation link to create your account.',
|
||||
);
|
||||
}
|
||||
const result = await this.authService.register(dto);
|
||||
if (result.refreshToken) {
|
||||
setRefreshCookie(res, result.refreshToken);
|
||||
}
|
||||
const { refreshToken, ...response } = result;
|
||||
return response;
|
||||
}
|
||||
|
||||
@Post('login')
|
||||
@ApiOperation({ summary: 'Login with email and password' })
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
@UseGuards(AuthGuard('local'))
|
||||
async login(@Request() req: any, @Body() _dto: LoginDto) {
|
||||
return this.authService.login(req.user);
|
||||
async login(@Request() req: any, @Body() _dto: LoginDto, @Res({ passthrough: true }) res: Response) {
|
||||
const ip = req.headers['x-forwarded-for'] || req.ip;
|
||||
const ua = req.headers['user-agent'];
|
||||
const result = await this.authService.login(req.user, ip, ua);
|
||||
|
||||
// MFA challenge — no cookie, just return the challenge token
|
||||
if ('mfaRequired' in result) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if ('refreshToken' in result && result.refreshToken) {
|
||||
setRefreshCookie(res, result.refreshToken);
|
||||
}
|
||||
const { refreshToken: _rt, ...response } = result as any;
|
||||
return response;
|
||||
}
|
||||
|
||||
@Post('refresh')
|
||||
@ApiOperation({ summary: 'Refresh access token using httpOnly cookie' })
|
||||
async refresh(@Request() req: any, @Res({ passthrough: true }) res: Response) {
|
||||
const rawToken = req.cookies?.[COOKIE_NAME];
|
||||
if (!rawToken) {
|
||||
throw new BadRequestException('No refresh token');
|
||||
}
|
||||
return this.authService.refreshAccessToken(rawToken);
|
||||
}
|
||||
|
||||
@Post('logout')
|
||||
@ApiOperation({ summary: 'Logout and revoke refresh token' })
|
||||
@HttpCode(200)
|
||||
async logout(@Request() req: any, @Res({ passthrough: true }) res: Response) {
|
||||
const rawToken = req.cookies?.[COOKIE_NAME];
|
||||
if (rawToken) {
|
||||
await this.authService.logout(rawToken);
|
||||
}
|
||||
clearRefreshCookie(res);
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Post('logout-everywhere')
|
||||
@ApiOperation({ summary: 'Revoke all sessions' })
|
||||
@HttpCode(200)
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async logoutEverywhere(@Request() req: any, @Res({ passthrough: true }) res: Response) {
|
||||
await this.authService.logoutEverywhere(req.user.sub);
|
||||
clearRefreshCookie(res);
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Get('profile')
|
||||
@@ -40,11 +130,114 @@ export class AuthController {
|
||||
return this.authService.getProfile(req.user.sub);
|
||||
}
|
||||
|
||||
@Patch('intro-seen')
|
||||
@ApiOperation({ summary: 'Mark the how-to intro as seen for the current user' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@AllowViewer()
|
||||
async markIntroSeen(@Request() req: any) {
|
||||
await this.authService.markIntroSeen(req.user.sub);
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Post('switch-org')
|
||||
@ApiOperation({ summary: 'Switch active organization' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async switchOrg(@Request() req: any, @Body() dto: SwitchOrgDto) {
|
||||
return this.authService.switchOrganization(req.user.sub, dto.organizationId);
|
||||
@AllowViewer()
|
||||
async switchOrg(@Request() req: any, @Body() dto: SwitchOrgDto, @Res({ passthrough: true }) res: Response) {
|
||||
const ip = req.headers['x-forwarded-for'] || req.ip;
|
||||
const ua = req.headers['user-agent'];
|
||||
const result = await this.authService.switchOrganization(req.user.sub, dto.organizationId, ip, ua);
|
||||
if (result.refreshToken) {
|
||||
setRefreshCookie(res, result.refreshToken);
|
||||
}
|
||||
const { refreshToken, ...response } = result;
|
||||
return response;
|
||||
}
|
||||
|
||||
// ─── Activation Endpoints ─────────────────────────────────────────
|
||||
|
||||
@Get('activate')
|
||||
@ApiOperation({ summary: 'Validate an activation token' })
|
||||
async validateActivation(@Query('token') token: string) {
|
||||
if (!token) throw new BadRequestException('Token required');
|
||||
return this.authService.validateInviteToken(token);
|
||||
}
|
||||
|
||||
@Post('activate')
|
||||
@ApiOperation({ summary: 'Activate user account with password' })
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
async activate(
|
||||
@Body() body: { token: string; password: string; fullName: string },
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
) {
|
||||
if (!body.token || !body.password || !body.fullName) {
|
||||
throw new BadRequestException('Token, password, and fullName are required');
|
||||
}
|
||||
if (body.password.length < 8) {
|
||||
throw new BadRequestException('Password must be at least 8 characters');
|
||||
}
|
||||
const result = await this.authService.activateUser(body.token, body.password, body.fullName);
|
||||
if (result.refreshToken) {
|
||||
setRefreshCookie(res, result.refreshToken);
|
||||
}
|
||||
const { refreshToken, ...response } = result;
|
||||
return response;
|
||||
}
|
||||
|
||||
@Post('resend-activation')
|
||||
@ApiOperation({ summary: 'Resend activation email' })
|
||||
@Throttle({ default: { limit: 2, ttl: 60000 } })
|
||||
async resendActivation(@Body() body: { email: string }) {
|
||||
// Stubbed — will be implemented when email service is ready
|
||||
return { success: true, message: 'If an account exists, a new activation link has been sent.' };
|
||||
}
|
||||
|
||||
// ─── Password Reset Flow ──────────────────────────────────────────
|
||||
|
||||
@Post('forgot-password')
|
||||
@ApiOperation({ summary: 'Request a password reset email' })
|
||||
@HttpCode(200)
|
||||
@Throttle({ default: { limit: 3, ttl: 60000 } })
|
||||
async forgotPassword(@Body() body: { email: string }) {
|
||||
if (!body.email) throw new BadRequestException('Email is required');
|
||||
await this.authService.requestPasswordReset(body.email);
|
||||
// Always return same message to prevent account enumeration
|
||||
return { message: 'If that email exists, a password reset link has been sent.' };
|
||||
}
|
||||
|
||||
@Post('reset-password')
|
||||
@ApiOperation({ summary: 'Reset password using a reset token' })
|
||||
@HttpCode(200)
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
async resetPassword(@Body() body: { token: string; newPassword: string }) {
|
||||
if (!body.token || !body.newPassword) {
|
||||
throw new BadRequestException('Token and newPassword are required');
|
||||
}
|
||||
if (body.newPassword.length < 8) {
|
||||
throw new BadRequestException('Password must be at least 8 characters');
|
||||
}
|
||||
await this.authService.resetPassword(body.token, body.newPassword);
|
||||
return { message: 'Password updated successfully.' };
|
||||
}
|
||||
|
||||
@Patch('change-password')
|
||||
@ApiOperation({ summary: 'Change password (authenticated)' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@AllowViewer()
|
||||
async changePassword(
|
||||
@Request() req: any,
|
||||
@Body() body: { currentPassword: string; newPassword: string },
|
||||
) {
|
||||
if (!body.currentPassword || !body.newPassword) {
|
||||
throw new BadRequestException('currentPassword and newPassword are required');
|
||||
}
|
||||
if (body.newPassword.length < 8) {
|
||||
throw new BadRequestException('Password must be at least 8 characters');
|
||||
}
|
||||
await this.authService.changePassword(req.user.sub, body.currentPassword, body.newPassword);
|
||||
return { message: 'Password changed successfully.' };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,28 +4,53 @@ import { PassportModule } from '@nestjs/passport';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { AuthController } from './auth.controller';
|
||||
import { AdminController } from './admin.controller';
|
||||
import { MfaController } from './mfa.controller';
|
||||
import { SsoController } from './sso.controller';
|
||||
import { PasskeyController } from './passkey.controller';
|
||||
import { AuthService } from './auth.service';
|
||||
import { AdminAnalyticsService } from './admin-analytics.service';
|
||||
import { RefreshTokenService } from './refresh-token.service';
|
||||
import { MfaService } from './mfa.service';
|
||||
import { SsoService } from './sso.service';
|
||||
import { PasskeyService } from './passkey.service';
|
||||
import { JwtStrategy } from './strategies/jwt.strategy';
|
||||
import { LocalStrategy } from './strategies/local.strategy';
|
||||
import { UsersModule } from '../users/users.module';
|
||||
import { OrganizationsModule } from '../organizations/organizations.module';
|
||||
import { IdeasModule } from '../ideas/ideas.module';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
UsersModule,
|
||||
OrganizationsModule,
|
||||
IdeasModule,
|
||||
PassportModule,
|
||||
JwtModule.registerAsync({
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
useFactory: (configService: ConfigService) => ({
|
||||
secret: configService.get<string>('JWT_SECRET'),
|
||||
signOptions: { expiresIn: '24h' },
|
||||
signOptions: { expiresIn: '1h' },
|
||||
}),
|
||||
}),
|
||||
],
|
||||
controllers: [AuthController, AdminController],
|
||||
providers: [AuthService, JwtStrategy, LocalStrategy],
|
||||
exports: [AuthService],
|
||||
controllers: [
|
||||
AuthController,
|
||||
AdminController,
|
||||
MfaController,
|
||||
SsoController,
|
||||
PasskeyController,
|
||||
],
|
||||
providers: [
|
||||
AuthService,
|
||||
AdminAnalyticsService,
|
||||
RefreshTokenService,
|
||||
MfaService,
|
||||
SsoService,
|
||||
PasskeyService,
|
||||
JwtStrategy,
|
||||
LocalStrategy,
|
||||
],
|
||||
exports: [AuthService, RefreshTokenService, JwtModule],
|
||||
})
|
||||
export class AuthModule {}
|
||||
|
||||
@@ -2,19 +2,39 @@ import {
|
||||
Injectable,
|
||||
UnauthorizedException,
|
||||
ConflictException,
|
||||
ForbiddenException,
|
||||
NotFoundException,
|
||||
BadRequestException,
|
||||
Logger,
|
||||
} from '@nestjs/common';
|
||||
import { JwtService } from '@nestjs/jwt';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { DataSource } from 'typeorm';
|
||||
import * as bcrypt from 'bcryptjs';
|
||||
import { randomBytes, createHash } from 'crypto';
|
||||
import { UsersService } from '../users/users.service';
|
||||
import { EmailService } from '../email/email.service';
|
||||
import { RegisterDto } from './dto/register.dto';
|
||||
import { User } from '../users/entities/user.entity';
|
||||
import { RefreshTokenService } from './refresh-token.service';
|
||||
|
||||
@Injectable()
|
||||
export class AuthService {
|
||||
private readonly logger = new Logger(AuthService.name);
|
||||
private readonly inviteSecret: string;
|
||||
private readonly appUrl: string;
|
||||
|
||||
constructor(
|
||||
private usersService: UsersService,
|
||||
private jwtService: JwtService,
|
||||
) {}
|
||||
private configService: ConfigService,
|
||||
private dataSource: DataSource,
|
||||
private refreshTokenService: RefreshTokenService,
|
||||
private emailService: EmailService,
|
||||
) {
|
||||
this.inviteSecret = this.configService.get<string>('INVITE_TOKEN_SECRET') || 'dev-invite-secret';
|
||||
this.appUrl = this.configService.get<string>('APP_URL') || 'http://localhost:5173';
|
||||
}
|
||||
|
||||
async register(dto: RegisterDto) {
|
||||
const existing = await this.usersService.findByEmail(dto.email);
|
||||
@@ -47,7 +67,7 @@ export class AuthService {
|
||||
return user;
|
||||
}
|
||||
|
||||
async login(user: User) {
|
||||
async login(user: User, ipAddress?: string, userAgent?: string) {
|
||||
await this.usersService.updateLastLogin(user.id);
|
||||
const fullUser = await this.usersService.findByIdWithOrgs(user.id);
|
||||
const u = fullUser || user;
|
||||
@@ -65,9 +85,30 @@ export class AuthService {
|
||||
}
|
||||
}
|
||||
|
||||
// Record login in history (org_id is null at initial login)
|
||||
this.recordLoginHistory(user.id, null, ipAddress, userAgent).catch(() => {});
|
||||
|
||||
// If MFA is enabled, return a challenge token instead of full session
|
||||
if (u.mfaEnabled && u.mfaSecret) {
|
||||
const mfaToken = this.jwtService.sign(
|
||||
{ sub: u.id, type: 'mfa_challenge' },
|
||||
{ expiresIn: '5m' },
|
||||
);
|
||||
return { mfaRequired: true, mfaToken };
|
||||
}
|
||||
|
||||
return this.generateTokenResponse(u);
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete login after MFA verification — generate full session tokens.
|
||||
*/
|
||||
async completeMfaLogin(userId: string): Promise<any> {
|
||||
const user = await this.usersService.findByIdWithOrgs(userId);
|
||||
if (!user) throw new UnauthorizedException('User not found');
|
||||
return this.generateTokenResponse(user);
|
||||
}
|
||||
|
||||
async getProfile(userId: string) {
|
||||
const user = await this.usersService.findByIdWithOrgs(userId);
|
||||
if (!user) {
|
||||
@@ -78,6 +119,7 @@ export class AuthService {
|
||||
email: user.email,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
mfaEnabled: user.mfaEnabled || false,
|
||||
organizations: user.userOrganizations?.map((uo) => ({
|
||||
id: uo.organization.id,
|
||||
name: uo.organization.name,
|
||||
@@ -86,7 +128,7 @@ export class AuthService {
|
||||
};
|
||||
}
|
||||
|
||||
async switchOrganization(userId: string, organizationId: string) {
|
||||
async switchOrganization(userId: string, organizationId: string, ipAddress?: string, userAgent?: string) {
|
||||
const user = await this.usersService.findByIdWithOrgs(userId);
|
||||
if (!user) {
|
||||
throw new UnauthorizedException('User not found');
|
||||
@@ -99,26 +141,57 @@ export class AuthService {
|
||||
throw new UnauthorizedException('Not a member of this organization');
|
||||
}
|
||||
|
||||
// Block access to suspended/archived organizations
|
||||
const orgStatus = membership.organization?.status;
|
||||
if (orgStatus && ['suspended', 'archived'].includes(orgStatus)) {
|
||||
throw new ForbiddenException(
|
||||
`This organization has been ${orgStatus}. Please contact your administrator.`,
|
||||
);
|
||||
}
|
||||
|
||||
const payload = {
|
||||
sub: user.id,
|
||||
email: user.email,
|
||||
orgId: membership.organizationId,
|
||||
orgSchema: membership.organization.schemaName,
|
||||
role: membership.role,
|
||||
};
|
||||
|
||||
// Record org switch in login history
|
||||
this.recordLoginHistory(userId, organizationId, ipAddress, userAgent).catch(() => {});
|
||||
|
||||
// Generate new refresh token for org switch
|
||||
const refreshToken = await this.refreshTokenService.createRefreshToken(user.id);
|
||||
|
||||
return {
|
||||
accessToken: this.jwtService.sign(payload),
|
||||
refreshToken,
|
||||
organization: {
|
||||
id: membership.organization.id,
|
||||
name: membership.organization.name,
|
||||
role: membership.role,
|
||||
settings: membership.organization.settings || {},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private generateTokenResponse(user: User) {
|
||||
const orgs = user.userOrganizations || [];
|
||||
/**
|
||||
* Refresh an access token using a valid refresh token.
|
||||
*/
|
||||
async refreshAccessToken(rawRefreshToken: string) {
|
||||
const userId = await this.refreshTokenService.validateRefreshToken(rawRefreshToken);
|
||||
if (!userId) {
|
||||
throw new UnauthorizedException('Invalid or expired refresh token');
|
||||
}
|
||||
|
||||
const user = await this.usersService.findByIdWithOrgs(userId);
|
||||
if (!user) {
|
||||
throw new UnauthorizedException('User not found');
|
||||
}
|
||||
|
||||
// Generate a new access token (keep same org context if available)
|
||||
const orgs = (user.userOrganizations || []).filter(
|
||||
(uo) => !uo.organization?.status || !['suspended', 'archived'].includes(uo.organization.status),
|
||||
);
|
||||
const defaultOrg = orgs[0];
|
||||
|
||||
const payload: Record<string, any> = {
|
||||
@@ -129,25 +202,289 @@ export class AuthService {
|
||||
|
||||
if (defaultOrg) {
|
||||
payload.orgId = defaultOrg.organizationId;
|
||||
payload.orgSchema = defaultOrg.organization?.schemaName;
|
||||
payload.role = defaultOrg.role;
|
||||
}
|
||||
|
||||
return {
|
||||
accessToken: this.jwtService.sign(payload),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Logout: revoke the refresh token.
|
||||
*/
|
||||
async logout(rawRefreshToken: string): Promise<void> {
|
||||
if (rawRefreshToken) {
|
||||
await this.refreshTokenService.revokeToken(rawRefreshToken);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logout everywhere: revoke all refresh tokens for a user.
|
||||
*/
|
||||
async logoutEverywhere(userId: string): Promise<void> {
|
||||
await this.refreshTokenService.revokeAllUserTokens(userId);
|
||||
}
|
||||
|
||||
async markIntroSeen(userId: string): Promise<void> {
|
||||
await this.usersService.markIntroSeen(userId);
|
||||
}
|
||||
|
||||
// ─── Invite Token (Activation) Methods ──────────────────────────────
|
||||
|
||||
/**
|
||||
* Validate an invite/activation token.
|
||||
*/
|
||||
async validateInviteToken(token: string) {
|
||||
try {
|
||||
const payload = this.jwtService.verify(token, { secret: this.inviteSecret });
|
||||
if (payload.type !== 'invite') throw new Error('Not an invite token');
|
||||
|
||||
const tokenHash = createHash('sha256').update(token).digest('hex');
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT it.*, o.name as org_name FROM shared.invite_tokens it
|
||||
JOIN shared.organizations o ON o.id = it.organization_id
|
||||
WHERE it.token_hash = $1`,
|
||||
[tokenHash],
|
||||
);
|
||||
|
||||
if (rows.length === 0) throw new Error('Token not found');
|
||||
const row = rows[0];
|
||||
if (row.used_at) throw new BadRequestException('This activation link has already been used');
|
||||
if (new Date(row.expires_at) < new Date()) throw new BadRequestException('This activation link has expired');
|
||||
|
||||
return { valid: true, email: payload.email, orgName: row.org_name, orgId: payload.orgId, userId: payload.userId };
|
||||
} catch (err) {
|
||||
if (err instanceof BadRequestException) throw err;
|
||||
throw new BadRequestException('Invalid or expired activation link');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Activate a user from an invite token (set password, activate, issue session).
|
||||
*/
|
||||
async activateUser(token: string, password: string, fullName: string) {
|
||||
const info = await this.validateInviteToken(token);
|
||||
|
||||
const passwordHash = await bcrypt.hash(password, 12);
|
||||
const [firstName, ...rest] = fullName.trim().split(' ');
|
||||
const lastName = rest.join(' ') || '';
|
||||
|
||||
// Update user record
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET password_hash = $1, first_name = $2, last_name = $3,
|
||||
is_email_verified = true, updated_at = NOW()
|
||||
WHERE id = $4`,
|
||||
[passwordHash, firstName, lastName, info.userId],
|
||||
);
|
||||
|
||||
// Mark invite token as used
|
||||
const tokenHash = createHash('sha256').update(token).digest('hex');
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.invite_tokens SET used_at = NOW() WHERE token_hash = $1`,
|
||||
[tokenHash],
|
||||
);
|
||||
|
||||
// Issue session
|
||||
const user = await this.usersService.findByIdWithOrgs(info.userId);
|
||||
if (!user) throw new NotFoundException('User not found after activation');
|
||||
|
||||
return this.generateTokenResponse(user);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a signed invite token for a user/org pair.
|
||||
*/
|
||||
async generateInviteToken(userId: string, orgId: string, email: string): Promise<string> {
|
||||
const token = this.jwtService.sign(
|
||||
{ type: 'invite', userId, orgId, email },
|
||||
{ secret: this.inviteSecret, expiresIn: '72h' },
|
||||
);
|
||||
|
||||
const tokenHash = createHash('sha256').update(token).digest('hex');
|
||||
const expiresAt = new Date(Date.now() + 72 * 60 * 60 * 1000);
|
||||
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.invite_tokens (organization_id, user_id, token_hash, expires_at)
|
||||
VALUES ($1, $2, $3, $4)`,
|
||||
[orgId, userId, tokenHash, expiresAt],
|
||||
);
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
// ─── Password Reset Flow ──────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Request a password reset. Generates a token, stores its hash, and sends an email.
|
||||
* Silently succeeds even if the email doesn't exist (prevents enumeration).
|
||||
*/
|
||||
async requestPasswordReset(email: string): Promise<void> {
|
||||
const user = await this.usersService.findByEmail(email);
|
||||
if (!user) {
|
||||
// Silently return — don't reveal whether the account exists
|
||||
return;
|
||||
}
|
||||
|
||||
// Invalidate any existing reset tokens for this user
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.password_reset_tokens SET used_at = NOW()
|
||||
WHERE user_id = $1 AND used_at IS NULL`,
|
||||
[user.id],
|
||||
);
|
||||
|
||||
// Generate a 64-byte random token
|
||||
const rawToken = randomBytes(64).toString('base64url');
|
||||
const tokenHash = createHash('sha256').update(rawToken).digest('hex');
|
||||
const expiresAt = new Date(Date.now() + 15 * 60 * 1000); // 15 minutes
|
||||
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.password_reset_tokens (user_id, token_hash, expires_at)
|
||||
VALUES ($1, $2, $3)`,
|
||||
[user.id, tokenHash, expiresAt],
|
||||
);
|
||||
|
||||
const resetUrl = `${this.appUrl}/reset-password?token=${rawToken}`;
|
||||
await this.emailService.sendPasswordResetEmail(user.email, resetUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset password using a valid reset token.
|
||||
*/
|
||||
async resetPassword(rawToken: string, newPassword: string): Promise<void> {
|
||||
const tokenHash = createHash('sha256').update(rawToken).digest('hex');
|
||||
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT id, user_id, expires_at, used_at
|
||||
FROM shared.password_reset_tokens
|
||||
WHERE token_hash = $1`,
|
||||
[tokenHash],
|
||||
);
|
||||
|
||||
if (rows.length === 0) {
|
||||
throw new BadRequestException('Invalid or expired reset token');
|
||||
}
|
||||
|
||||
const record = rows[0];
|
||||
|
||||
if (record.used_at) {
|
||||
throw new BadRequestException('This reset link has already been used');
|
||||
}
|
||||
|
||||
if (new Date(record.expires_at) < new Date()) {
|
||||
throw new BadRequestException('This reset link has expired');
|
||||
}
|
||||
|
||||
// Update password
|
||||
const passwordHash = await bcrypt.hash(newPassword, 12);
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET password_hash = $1, updated_at = NOW() WHERE id = $2`,
|
||||
[passwordHash, record.user_id],
|
||||
);
|
||||
|
||||
// Mark token as used
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.password_reset_tokens SET used_at = NOW() WHERE id = $1`,
|
||||
[record.id],
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change password for an authenticated user (requires current password).
|
||||
*/
|
||||
async changePassword(userId: string, currentPassword: string, newPassword: string): Promise<void> {
|
||||
const user = await this.usersService.findById(userId);
|
||||
if (!user || !user.passwordHash) {
|
||||
throw new UnauthorizedException('User not found');
|
||||
}
|
||||
|
||||
const isValid = await bcrypt.compare(currentPassword, user.passwordHash);
|
||||
if (!isValid) {
|
||||
throw new UnauthorizedException('Current password is incorrect');
|
||||
}
|
||||
|
||||
const passwordHash = await bcrypt.hash(newPassword, 12);
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET password_hash = $1, updated_at = NOW() WHERE id = $2`,
|
||||
[passwordHash, userId],
|
||||
);
|
||||
}
|
||||
|
||||
// ─── Private Helpers ──────────────────────────────────────────────
|
||||
|
||||
private async recordLoginHistory(
|
||||
userId: string,
|
||||
organizationId: string | null,
|
||||
ipAddress?: string,
|
||||
userAgent?: string,
|
||||
) {
|
||||
try {
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.login_history (user_id, organization_id, ip_address, user_agent)
|
||||
VALUES ($1, $2, $3, $4)`,
|
||||
[userId, organizationId, ipAddress || null, userAgent || null],
|
||||
);
|
||||
} catch (err) {
|
||||
// Non-critical — don't let login history failure block auth
|
||||
}
|
||||
}
|
||||
|
||||
async generateTokenResponse(user: User, impersonatedBy?: string) {
|
||||
const allOrgs = user.userOrganizations || [];
|
||||
// Filter out suspended/archived organizations
|
||||
const orgs = allOrgs.filter(
|
||||
(uo) => !uo.organization?.status || !['suspended', 'archived'].includes(uo.organization.status),
|
||||
);
|
||||
const defaultOrg = orgs[0];
|
||||
|
||||
const payload: Record<string, any> = {
|
||||
sub: user.id,
|
||||
email: user.email,
|
||||
isSuperadmin: user.isSuperadmin || false,
|
||||
};
|
||||
|
||||
if (impersonatedBy) {
|
||||
payload.impersonatedBy = impersonatedBy;
|
||||
}
|
||||
|
||||
if (defaultOrg) {
|
||||
payload.orgId = defaultOrg.organizationId;
|
||||
payload.role = defaultOrg.role;
|
||||
}
|
||||
|
||||
// Create refresh token
|
||||
const refreshToken = await this.refreshTokenService.createRefreshToken(user.id);
|
||||
|
||||
return {
|
||||
accessToken: this.jwtService.sign(payload),
|
||||
refreshToken,
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
isSuperadmin: user.isSuperadmin || false,
|
||||
isPlatformOwner: user.isPlatformOwner || false,
|
||||
hasSeenIntro: user.hasSeenIntro || false,
|
||||
mfaEnabled: user.mfaEnabled || false,
|
||||
},
|
||||
organizations: orgs.map((uo) => ({
|
||||
id: uo.organizationId,
|
||||
name: uo.organization?.name,
|
||||
schemaName: uo.organization?.schemaName,
|
||||
status: uo.organization?.status,
|
||||
role: uo.role,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
async impersonateUser(adminUserId: string, targetUserId: string) {
|
||||
const targetUser = await this.usersService.findByIdWithOrgs(targetUserId);
|
||||
if (!targetUser) {
|
||||
throw new NotFoundException('User not found');
|
||||
}
|
||||
if (targetUser.isSuperadmin) {
|
||||
throw new ForbiddenException('Cannot impersonate another superadmin');
|
||||
}
|
||||
return this.generateTokenResponse(targetUser, adminUserId);
|
||||
}
|
||||
}
|
||||
|
||||
121
backend/src/modules/auth/mfa.controller.ts
Normal file
121
backend/src/modules/auth/mfa.controller.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import {
|
||||
Controller,
|
||||
Post,
|
||||
Get,
|
||||
Body,
|
||||
UseGuards,
|
||||
Request,
|
||||
Res,
|
||||
BadRequestException,
|
||||
UnauthorizedException,
|
||||
} from '@nestjs/common';
|
||||
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { Throttle } from '@nestjs/throttler';
|
||||
import { JwtService } from '@nestjs/jwt';
|
||||
import { Response } from 'express';
|
||||
import { MfaService } from './mfa.service';
|
||||
import { AuthService } from './auth.service';
|
||||
import { JwtAuthGuard } from './guards/jwt-auth.guard';
|
||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||
|
||||
const COOKIE_NAME = 'ledgeriq_rt';
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
@ApiTags('auth')
|
||||
@Controller('auth/mfa')
|
||||
export class MfaController {
|
||||
constructor(
|
||||
private mfaService: MfaService,
|
||||
private authService: AuthService,
|
||||
private jwtService: JwtService,
|
||||
) {}
|
||||
|
||||
@Post('setup')
|
||||
@ApiOperation({ summary: 'Generate MFA setup (QR code + secret)' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async setup(@Request() req: any) {
|
||||
return this.mfaService.generateSetup(req.user.sub);
|
||||
}
|
||||
|
||||
@Post('enable')
|
||||
@ApiOperation({ summary: 'Enable MFA after verifying TOTP code' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async enable(@Request() req: any, @Body() body: { token: string }) {
|
||||
if (!body.token) throw new BadRequestException('TOTP code required');
|
||||
return this.mfaService.enableMfa(req.user.sub, body.token);
|
||||
}
|
||||
|
||||
@Post('verify')
|
||||
@ApiOperation({ summary: 'Verify MFA during login flow' })
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
async verify(
|
||||
@Body() body: { mfaToken: string; token: string; useRecovery?: boolean },
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
) {
|
||||
if (!body.mfaToken || !body.token) {
|
||||
throw new BadRequestException('mfaToken and verification code required');
|
||||
}
|
||||
|
||||
// Decode the MFA challenge token
|
||||
let payload: any;
|
||||
try {
|
||||
payload = this.jwtService.verify(body.mfaToken);
|
||||
if (payload.type !== 'mfa_challenge') throw new Error('Wrong token type');
|
||||
} catch {
|
||||
throw new UnauthorizedException('Invalid or expired MFA challenge');
|
||||
}
|
||||
|
||||
const userId = payload.sub;
|
||||
let verified = false;
|
||||
|
||||
if (body.useRecovery) {
|
||||
verified = await this.mfaService.verifyRecoveryCode(userId, body.token);
|
||||
} else {
|
||||
verified = await this.mfaService.verifyMfa(userId, body.token);
|
||||
}
|
||||
|
||||
if (!verified) {
|
||||
throw new UnauthorizedException('Invalid verification code');
|
||||
}
|
||||
|
||||
// MFA passed — issue full session
|
||||
const result = await this.authService.completeMfaLogin(userId);
|
||||
if (result.refreshToken) {
|
||||
res.cookie(COOKIE_NAME, result.refreshToken, {
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
path: '/api/auth',
|
||||
maxAge: 30 * 24 * 60 * 60 * 1000,
|
||||
});
|
||||
}
|
||||
const { refreshToken: _rt, ...response } = result;
|
||||
return response;
|
||||
}
|
||||
|
||||
@Post('disable')
|
||||
@ApiOperation({ summary: 'Disable MFA (requires password)' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async disable(@Request() req: any, @Body() body: { password: string }) {
|
||||
if (!body.password) throw new BadRequestException('Password required to disable MFA');
|
||||
|
||||
// Verify password first
|
||||
const user = await this.authService.validateUser(req.user.email, body.password);
|
||||
if (!user) throw new UnauthorizedException('Invalid password');
|
||||
|
||||
await this.mfaService.disableMfa(req.user.sub);
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Get('status')
|
||||
@ApiOperation({ summary: 'Get MFA status' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@AllowViewer()
|
||||
async status(@Request() req: any) {
|
||||
return this.mfaService.getStatus(req.user.sub);
|
||||
}
|
||||
}
|
||||
154
backend/src/modules/auth/mfa.service.ts
Normal file
154
backend/src/modules/auth/mfa.service.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { Injectable, Logger, BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { DataSource } from 'typeorm';
|
||||
import * as bcrypt from 'bcryptjs';
|
||||
import { generateSecret, generateURI, verifySync } from 'otplib';
|
||||
import * as QRCode from 'qrcode';
|
||||
import { randomBytes } from 'crypto';
|
||||
|
||||
@Injectable()
|
||||
export class MfaService {
|
||||
private readonly logger = new Logger(MfaService.name);
|
||||
|
||||
constructor(private dataSource: DataSource) {}
|
||||
|
||||
/**
|
||||
* Generate MFA setup data (secret + QR code) for a user.
|
||||
*/
|
||||
async generateSetup(userId: string): Promise<{ secret: string; qrDataUrl: string; otpauthUrl: string }> {
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT email, mfa_enabled FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (userRows.length === 0) throw new BadRequestException('User not found');
|
||||
|
||||
const secret = generateSecret();
|
||||
const otpauthUrl = generateURI({ secret, issuer: 'HOA LedgerIQ', label: userRows[0].email });
|
||||
const qrDataUrl = await QRCode.toDataURL(otpauthUrl);
|
||||
|
||||
// Store the secret temporarily (not verified yet)
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET mfa_secret = $1, updated_at = NOW() WHERE id = $2`,
|
||||
[secret, userId],
|
||||
);
|
||||
|
||||
return { secret, qrDataUrl, otpauthUrl };
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable MFA after verifying the initial TOTP code.
|
||||
* Returns recovery codes.
|
||||
*/
|
||||
async enableMfa(userId: string, token: string): Promise<{ recoveryCodes: string[] }> {
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT mfa_secret, mfa_enabled FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (userRows.length === 0) throw new BadRequestException('User not found');
|
||||
if (!userRows[0].mfa_secret) throw new BadRequestException('MFA setup not initiated');
|
||||
if (userRows[0].mfa_enabled) throw new BadRequestException('MFA is already enabled');
|
||||
|
||||
// Verify the token
|
||||
const result = verifySync({ token, secret: userRows[0].mfa_secret });
|
||||
if (!result.valid) throw new BadRequestException('Invalid verification code');
|
||||
|
||||
// Generate recovery codes
|
||||
const recoveryCodes = Array.from({ length: 10 }, () =>
|
||||
randomBytes(4).toString('hex').toUpperCase(),
|
||||
);
|
||||
|
||||
// Hash recovery codes for storage
|
||||
const hashedCodes = await Promise.all(
|
||||
recoveryCodes.map((code) => bcrypt.hash(code, 10)),
|
||||
);
|
||||
|
||||
// Enable MFA
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET
|
||||
mfa_enabled = true,
|
||||
totp_verified_at = NOW(),
|
||||
recovery_codes = $1,
|
||||
updated_at = NOW()
|
||||
WHERE id = $2`,
|
||||
[JSON.stringify(hashedCodes), userId],
|
||||
);
|
||||
|
||||
this.logger.log(`MFA enabled for user ${userId}`);
|
||||
return { recoveryCodes };
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify a TOTP code during login.
|
||||
*/
|
||||
async verifyMfa(userId: string, token: string): Promise<boolean> {
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT mfa_secret, mfa_enabled FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (userRows.length === 0 || !userRows[0].mfa_enabled) return false;
|
||||
|
||||
const result = verifySync({ token, secret: userRows[0].mfa_secret });
|
||||
return result.valid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify a recovery code (consumes it on success).
|
||||
*/
|
||||
async verifyRecoveryCode(userId: string, code: string): Promise<boolean> {
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT recovery_codes FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (userRows.length === 0 || !userRows[0].recovery_codes) return false;
|
||||
|
||||
const hashedCodes: string[] = JSON.parse(userRows[0].recovery_codes);
|
||||
|
||||
for (let i = 0; i < hashedCodes.length; i++) {
|
||||
const match = await bcrypt.compare(code.toUpperCase(), hashedCodes[i]);
|
||||
if (match) {
|
||||
// Remove the used code
|
||||
hashedCodes.splice(i, 1);
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET recovery_codes = $1, updated_at = NOW() WHERE id = $2`,
|
||||
[JSON.stringify(hashedCodes), userId],
|
||||
);
|
||||
this.logger.log(`Recovery code used for user ${userId}`);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable MFA (requires password verification done by caller).
|
||||
*/
|
||||
async disableMfa(userId: string): Promise<void> {
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET
|
||||
mfa_enabled = false,
|
||||
mfa_secret = NULL,
|
||||
totp_verified_at = NULL,
|
||||
recovery_codes = NULL,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
this.logger.log(`MFA disabled for user ${userId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get MFA status for a user.
|
||||
*/
|
||||
async getStatus(userId: string): Promise<{ enabled: boolean; hasRecoveryCodes: boolean }> {
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT mfa_enabled, recovery_codes FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (rows.length === 0) return { enabled: false, hasRecoveryCodes: false };
|
||||
|
||||
return {
|
||||
enabled: rows[0].mfa_enabled || false,
|
||||
hasRecoveryCodes: !!rows[0].recovery_codes && JSON.parse(rows[0].recovery_codes || '[]').length > 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
112
backend/src/modules/auth/passkey.controller.ts
Normal file
112
backend/src/modules/auth/passkey.controller.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import {
|
||||
Controller,
|
||||
Post,
|
||||
Get,
|
||||
Delete,
|
||||
Param,
|
||||
Body,
|
||||
UseGuards,
|
||||
Request,
|
||||
Res,
|
||||
BadRequestException,
|
||||
} from '@nestjs/common';
|
||||
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { Throttle } from '@nestjs/throttler';
|
||||
import { Response } from 'express';
|
||||
import { PasskeyService } from './passkey.service';
|
||||
import { AuthService } from './auth.service';
|
||||
import { UsersService } from '../users/users.service';
|
||||
import { JwtAuthGuard } from './guards/jwt-auth.guard';
|
||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||
|
||||
const COOKIE_NAME = 'ledgeriq_rt';
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
@ApiTags('auth')
|
||||
@Controller('auth/passkeys')
|
||||
export class PasskeyController {
|
||||
constructor(
|
||||
private passkeyService: PasskeyService,
|
||||
private authService: AuthService,
|
||||
private usersService: UsersService,
|
||||
) {}
|
||||
|
||||
@Post('register-options')
|
||||
@ApiOperation({ summary: 'Get passkey registration options' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async getRegistrationOptions(@Request() req: any) {
|
||||
return this.passkeyService.generateRegistrationOptions(req.user.sub);
|
||||
}
|
||||
|
||||
@Post('register')
|
||||
@ApiOperation({ summary: 'Register a new passkey' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async register(
|
||||
@Request() req: any,
|
||||
@Body() body: { response: any; deviceName?: string },
|
||||
) {
|
||||
if (!body.response) throw new BadRequestException('Attestation response required');
|
||||
return this.passkeyService.verifyRegistration(req.user.sub, body.response, body.deviceName);
|
||||
}
|
||||
|
||||
@Post('login-options')
|
||||
@ApiOperation({ summary: 'Get passkey login options' })
|
||||
@Throttle({ default: { limit: 10, ttl: 60000 } })
|
||||
async getLoginOptions(@Body() body: { email?: string }) {
|
||||
return this.passkeyService.generateAuthenticationOptions(body.email);
|
||||
}
|
||||
|
||||
@Post('login')
|
||||
@ApiOperation({ summary: 'Authenticate with passkey' })
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
async login(
|
||||
@Body() body: { response: any; challenge: string },
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
) {
|
||||
if (!body.response || !body.challenge) {
|
||||
throw new BadRequestException('Assertion response and challenge required');
|
||||
}
|
||||
|
||||
const { userId } = await this.passkeyService.verifyAuthentication(body.response, body.challenge);
|
||||
|
||||
// Get user with orgs and generate session
|
||||
const user = await this.usersService.findByIdWithOrgs(userId);
|
||||
if (!user) throw new BadRequestException('User not found');
|
||||
|
||||
await this.usersService.updateLastLogin(userId);
|
||||
const result = await this.authService.generateTokenResponse(user);
|
||||
|
||||
if (result.refreshToken) {
|
||||
res.cookie(COOKIE_NAME, result.refreshToken, {
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
path: '/api/auth',
|
||||
maxAge: 30 * 24 * 60 * 60 * 1000,
|
||||
});
|
||||
}
|
||||
|
||||
const { refreshToken: _rt, ...response } = result;
|
||||
return response;
|
||||
}
|
||||
|
||||
@Get()
|
||||
@ApiOperation({ summary: 'List registered passkeys' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@AllowViewer()
|
||||
async list(@Request() req: any) {
|
||||
return this.passkeyService.listPasskeys(req.user.sub);
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
@ApiOperation({ summary: 'Remove a passkey' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async remove(@Request() req: any, @Param('id') passkeyId: string) {
|
||||
await this.passkeyService.removePasskey(req.user.sub, passkeyId);
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
246
backend/src/modules/auth/passkey.service.ts
Normal file
246
backend/src/modules/auth/passkey.service.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { Injectable, Logger, BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { DataSource } from 'typeorm';
|
||||
import {
|
||||
generateRegistrationOptions,
|
||||
verifyRegistrationResponse,
|
||||
generateAuthenticationOptions,
|
||||
verifyAuthenticationResponse,
|
||||
} from '@simplewebauthn/server';
|
||||
|
||||
// Use inline type aliases to avoid ESM-only @simplewebauthn/types import issue
|
||||
type RegistrationResponseJSON = any;
|
||||
type AuthenticationResponseJSON = any;
|
||||
type AuthenticatorTransportFuture = any;
|
||||
|
||||
@Injectable()
|
||||
export class PasskeyService {
|
||||
private readonly logger = new Logger(PasskeyService.name);
|
||||
private rpID: string;
|
||||
private rpName: string;
|
||||
private origin: string;
|
||||
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private dataSource: DataSource,
|
||||
) {
|
||||
this.rpID = this.configService.get<string>('WEBAUTHN_RP_ID') || 'localhost';
|
||||
this.rpName = 'HOA LedgerIQ';
|
||||
this.origin = this.configService.get<string>('WEBAUTHN_RP_ORIGIN') || 'http://localhost';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate registration options for navigator.credentials.create().
|
||||
*/
|
||||
async generateRegistrationOptions(userId: string) {
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT id, email, first_name, last_name FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (userRows.length === 0) throw new BadRequestException('User not found');
|
||||
const user = userRows[0];
|
||||
|
||||
// Get existing passkeys for exclusion
|
||||
const existingKeys = await this.dataSource.query(
|
||||
`SELECT credential_id, transports FROM shared.user_passkeys WHERE user_id = $1`,
|
||||
[userId],
|
||||
);
|
||||
|
||||
const options = await generateRegistrationOptions({
|
||||
rpName: this.rpName,
|
||||
rpID: this.rpID,
|
||||
userID: new TextEncoder().encode(userId),
|
||||
userName: user.email,
|
||||
userDisplayName: `${user.first_name || ''} ${user.last_name || ''}`.trim() || user.email,
|
||||
attestationType: 'none',
|
||||
excludeCredentials: existingKeys.map((k: any) => ({
|
||||
id: k.credential_id,
|
||||
type: 'public-key' as const,
|
||||
transports: k.transports || [],
|
||||
})),
|
||||
authenticatorSelection: {
|
||||
residentKey: 'preferred',
|
||||
userVerification: 'preferred',
|
||||
},
|
||||
});
|
||||
|
||||
// Store challenge temporarily
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET webauthn_challenge = $1, updated_at = NOW() WHERE id = $2`,
|
||||
[options.challenge, userId],
|
||||
);
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify and store a passkey registration.
|
||||
*/
|
||||
async verifyRegistration(userId: string, response: RegistrationResponseJSON, deviceName?: string) {
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT webauthn_challenge FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (userRows.length === 0) throw new BadRequestException('User not found');
|
||||
const expectedChallenge = userRows[0].webauthn_challenge;
|
||||
if (!expectedChallenge) throw new BadRequestException('No registration challenge found');
|
||||
|
||||
const verification = await verifyRegistrationResponse({
|
||||
response,
|
||||
expectedChallenge,
|
||||
expectedOrigin: this.origin,
|
||||
expectedRPID: this.rpID,
|
||||
});
|
||||
|
||||
if (!verification.verified || !verification.registrationInfo) {
|
||||
throw new BadRequestException('Passkey registration verification failed');
|
||||
}
|
||||
|
||||
const { credential } = verification.registrationInfo;
|
||||
|
||||
// Store the passkey
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.user_passkeys (user_id, credential_id, public_key, counter, device_name, transports)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)`,
|
||||
[
|
||||
userId,
|
||||
Buffer.from(credential.id).toString('base64url'),
|
||||
Buffer.from(credential.publicKey).toString('base64url'),
|
||||
credential.counter,
|
||||
deviceName || 'Passkey',
|
||||
credential.transports || [],
|
||||
],
|
||||
);
|
||||
|
||||
// Clear challenge
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET webauthn_challenge = NULL WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
|
||||
this.logger.log(`Passkey registered for user ${userId}`);
|
||||
return { verified: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate authentication options for navigator.credentials.get().
|
||||
*/
|
||||
async generateAuthenticationOptions(email?: string) {
|
||||
let allowCredentials: any[] | undefined;
|
||||
|
||||
if (email) {
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT u.id FROM shared.users u WHERE u.email = $1`,
|
||||
[email],
|
||||
);
|
||||
if (userRows.length > 0) {
|
||||
const passkeys = await this.dataSource.query(
|
||||
`SELECT credential_id, transports FROM shared.user_passkeys WHERE user_id = $1`,
|
||||
[userRows[0].id],
|
||||
);
|
||||
allowCredentials = passkeys.map((k: any) => ({
|
||||
id: k.credential_id,
|
||||
type: 'public-key' as const,
|
||||
transports: k.transports || [],
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
const options = await generateAuthenticationOptions({
|
||||
rpID: this.rpID,
|
||||
allowCredentials,
|
||||
userVerification: 'preferred',
|
||||
});
|
||||
|
||||
// Store challenge — for passkey login we need a temporary storage
|
||||
// Since we don't know the user yet, store in a shared way
|
||||
// In production, use Redis/session. For now, we'll pass it back and verify client-side.
|
||||
return { ...options, challenge: options.challenge };
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify authentication and return the user.
|
||||
*/
|
||||
async verifyAuthentication(response: AuthenticationResponseJSON, expectedChallenge: string) {
|
||||
// Find the credential
|
||||
const credId = response.id;
|
||||
const passkeys = await this.dataSource.query(
|
||||
`SELECT p.*, u.id as user_id, u.email
|
||||
FROM shared.user_passkeys p
|
||||
JOIN shared.users u ON u.id = p.user_id
|
||||
WHERE p.credential_id = $1`,
|
||||
[credId],
|
||||
);
|
||||
|
||||
if (passkeys.length === 0) {
|
||||
throw new UnauthorizedException('Passkey not recognized');
|
||||
}
|
||||
|
||||
const passkey = passkeys[0];
|
||||
|
||||
const verification = await verifyAuthenticationResponse({
|
||||
response,
|
||||
expectedChallenge,
|
||||
expectedOrigin: this.origin,
|
||||
expectedRPID: this.rpID,
|
||||
credential: {
|
||||
id: passkey.credential_id,
|
||||
publicKey: Buffer.from(passkey.public_key, 'base64url'),
|
||||
counter: Number(passkey.counter),
|
||||
transports: (passkey.transports || []) as AuthenticatorTransportFuture[],
|
||||
},
|
||||
});
|
||||
|
||||
if (!verification.verified) {
|
||||
throw new UnauthorizedException('Passkey authentication failed');
|
||||
}
|
||||
|
||||
// Update counter and last_used_at
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.user_passkeys SET counter = $1, last_used_at = NOW() WHERE id = $2`,
|
||||
[verification.authenticationInfo.newCounter, passkey.id],
|
||||
);
|
||||
|
||||
return { userId: passkey.user_id };
|
||||
}
|
||||
|
||||
/**
|
||||
* List user's registered passkeys.
|
||||
*/
|
||||
async listPasskeys(userId: string) {
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT id, device_name, created_at, last_used_at
|
||||
FROM shared.user_passkeys
|
||||
WHERE user_id = $1
|
||||
ORDER BY created_at DESC`,
|
||||
[userId],
|
||||
);
|
||||
return rows;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a passkey.
|
||||
*/
|
||||
async removePasskey(userId: string, passkeyId: string): Promise<void> {
|
||||
// Check that user has password or other passkeys
|
||||
const [userRows, passkeyCount] = await Promise.all([
|
||||
this.dataSource.query(`SELECT password_hash FROM shared.users WHERE id = $1`, [userId]),
|
||||
this.dataSource.query(
|
||||
`SELECT COUNT(*) as cnt FROM shared.user_passkeys WHERE user_id = $1`,
|
||||
[userId],
|
||||
),
|
||||
]);
|
||||
|
||||
const hasPassword = !!userRows[0]?.password_hash;
|
||||
const count = parseInt(passkeyCount[0]?.cnt || '0', 10);
|
||||
|
||||
if (!hasPassword && count <= 1) {
|
||||
throw new BadRequestException('Cannot remove your only passkey without a password set');
|
||||
}
|
||||
|
||||
await this.dataSource.query(
|
||||
`DELETE FROM shared.user_passkeys WHERE id = $1 AND user_id = $2`,
|
||||
[passkeyId, userId],
|
||||
);
|
||||
}
|
||||
}
|
||||
98
backend/src/modules/auth/refresh-token.service.ts
Normal file
98
backend/src/modules/auth/refresh-token.service.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { randomBytes, createHash } from 'crypto';
|
||||
|
||||
@Injectable()
|
||||
export class RefreshTokenService {
|
||||
private readonly logger = new Logger(RefreshTokenService.name);
|
||||
|
||||
constructor(private dataSource: DataSource) {}
|
||||
|
||||
/**
|
||||
* Create a new refresh token for a user.
|
||||
* Returns the raw (unhashed) token to be sent as an httpOnly cookie.
|
||||
*/
|
||||
async createRefreshToken(userId: string): Promise<string> {
|
||||
const rawToken = randomBytes(64).toString('base64url');
|
||||
const tokenHash = this.hashToken(rawToken);
|
||||
const expiresAt = new Date(Date.now() + 30 * 24 * 60 * 60 * 1000); // 30 days
|
||||
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.refresh_tokens (user_id, token_hash, expires_at)
|
||||
VALUES ($1, $2, $3)`,
|
||||
[userId, tokenHash, expiresAt],
|
||||
);
|
||||
|
||||
return rawToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a refresh token. Returns the user_id if valid, null otherwise.
|
||||
*/
|
||||
async validateRefreshToken(rawToken: string): Promise<string | null> {
|
||||
const tokenHash = this.hashToken(rawToken);
|
||||
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT user_id, expires_at, revoked_at
|
||||
FROM shared.refresh_tokens
|
||||
WHERE token_hash = $1`,
|
||||
[tokenHash],
|
||||
);
|
||||
|
||||
if (rows.length === 0) return null;
|
||||
|
||||
const { user_id, expires_at, revoked_at } = rows[0];
|
||||
|
||||
// Check if revoked
|
||||
if (revoked_at) return null;
|
||||
|
||||
// Check if expired
|
||||
if (new Date(expires_at) < new Date()) return null;
|
||||
|
||||
return user_id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke a single refresh token.
|
||||
*/
|
||||
async revokeToken(rawToken: string): Promise<void> {
|
||||
const tokenHash = this.hashToken(rawToken);
|
||||
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.refresh_tokens SET revoked_at = NOW() WHERE token_hash = $1`,
|
||||
[tokenHash],
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke all refresh tokens for a user ("log out everywhere").
|
||||
*/
|
||||
async revokeAllUserTokens(userId: string): Promise<void> {
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.refresh_tokens SET revoked_at = NOW()
|
||||
WHERE user_id = $1 AND revoked_at IS NULL`,
|
||||
[userId],
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove expired / revoked tokens older than 7 days.
|
||||
* Called periodically to keep the table clean.
|
||||
*/
|
||||
async cleanupExpired(): Promise<number> {
|
||||
const result = await this.dataSource.query(
|
||||
`DELETE FROM shared.refresh_tokens
|
||||
WHERE (expires_at < NOW() - INTERVAL '7 days')
|
||||
OR (revoked_at IS NOT NULL AND revoked_at < NOW() - INTERVAL '7 days')`,
|
||||
);
|
||||
const deleted = result?.[1] ?? 0;
|
||||
if (deleted > 0) {
|
||||
this.logger.log(`Cleaned up ${deleted} expired/revoked refresh tokens`);
|
||||
}
|
||||
return deleted;
|
||||
}
|
||||
|
||||
private hashToken(rawToken: string): string {
|
||||
return createHash('sha256').update(rawToken).digest('hex');
|
||||
}
|
||||
}
|
||||
105
backend/src/modules/auth/sso.controller.ts
Normal file
105
backend/src/modules/auth/sso.controller.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Delete,
|
||||
Param,
|
||||
UseGuards,
|
||||
Request,
|
||||
Res,
|
||||
BadRequestException,
|
||||
} from '@nestjs/common';
|
||||
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { Response } from 'express';
|
||||
import { SsoService } from './sso.service';
|
||||
import { AuthService } from './auth.service';
|
||||
import { JwtAuthGuard } from './guards/jwt-auth.guard';
|
||||
|
||||
const COOKIE_NAME = 'ledgeriq_rt';
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
@ApiTags('auth')
|
||||
@Controller('auth')
|
||||
export class SsoController {
|
||||
constructor(
|
||||
private ssoService: SsoService,
|
||||
private authService: AuthService,
|
||||
) {}
|
||||
|
||||
@Get('sso/providers')
|
||||
@ApiOperation({ summary: 'Get available SSO providers' })
|
||||
getProviders() {
|
||||
return this.ssoService.getAvailableProviders();
|
||||
}
|
||||
|
||||
// Google OAuth routes would be:
|
||||
// GET /auth/google → passport.authenticate('google')
|
||||
// GET /auth/google/callback → passport callback
|
||||
// These are registered conditionally in auth.module.ts if env vars are set.
|
||||
// For now, we'll add the callback handler:
|
||||
|
||||
@Get('google/callback')
|
||||
@ApiOperation({ summary: 'Google OAuth callback' })
|
||||
async googleCallback(@Request() req: any, @Res() res: Response) {
|
||||
if (!req.user) {
|
||||
return res.redirect('/login?error=sso_failed');
|
||||
}
|
||||
|
||||
const result = await this.authService.generateTokenResponse(req.user);
|
||||
|
||||
// Set refresh token cookie
|
||||
if (result.refreshToken) {
|
||||
res.cookie(COOKIE_NAME, result.refreshToken, {
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
path: '/api/auth',
|
||||
maxAge: 30 * 24 * 60 * 60 * 1000,
|
||||
});
|
||||
}
|
||||
|
||||
// Redirect to app with access token in URL fragment (for SPA to pick up)
|
||||
return res.redirect(`/sso-callback?token=${result.accessToken}`);
|
||||
}
|
||||
|
||||
@Get('azure/callback')
|
||||
@ApiOperation({ summary: 'Azure AD OAuth callback' })
|
||||
async azureCallback(@Request() req: any, @Res() res: Response) {
|
||||
if (!req.user) {
|
||||
return res.redirect('/login?error=sso_failed');
|
||||
}
|
||||
|
||||
const result = await this.authService.generateTokenResponse(req.user);
|
||||
|
||||
if (result.refreshToken) {
|
||||
res.cookie(COOKIE_NAME, result.refreshToken, {
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
path: '/api/auth',
|
||||
maxAge: 30 * 24 * 60 * 60 * 1000,
|
||||
});
|
||||
}
|
||||
|
||||
return res.redirect(`/sso-callback?token=${result.accessToken}`);
|
||||
}
|
||||
|
||||
@Post('sso/link')
|
||||
@ApiOperation({ summary: 'Link SSO provider to current user' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async linkAccount(@Request() req: any) {
|
||||
// This would typically be done via the OAuth redirect flow
|
||||
// For now, it's a placeholder
|
||||
throw new BadRequestException('Use the OAuth redirect flow to link accounts');
|
||||
}
|
||||
|
||||
@Delete('sso/unlink/:provider')
|
||||
@ApiOperation({ summary: 'Unlink SSO provider from current user' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async unlinkAccount(@Request() req: any, @Param('provider') provider: string) {
|
||||
await this.ssoService.unlinkSsoAccount(req.user.sub, provider);
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
97
backend/src/modules/auth/sso.service.ts
Normal file
97
backend/src/modules/auth/sso.service.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { Injectable, Logger, BadRequestException } from '@nestjs/common';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { UsersService } from '../users/users.service';
|
||||
|
||||
interface SsoProfile {
|
||||
provider: string;
|
||||
providerId: string;
|
||||
email: string;
|
||||
firstName?: string;
|
||||
lastName?: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class SsoService {
|
||||
private readonly logger = new Logger(SsoService.name);
|
||||
|
||||
constructor(
|
||||
private dataSource: DataSource,
|
||||
private usersService: UsersService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Find existing user by SSO provider+id, or by email match, or create new.
|
||||
*/
|
||||
async findOrCreateSsoUser(profile: SsoProfile) {
|
||||
// 1. Try to find by provider + provider ID
|
||||
const byProvider = await this.dataSource.query(
|
||||
`SELECT * FROM shared.users WHERE oauth_provider = $1 AND oauth_provider_id = $2`,
|
||||
[profile.provider, profile.providerId],
|
||||
);
|
||||
if (byProvider.length > 0) {
|
||||
return this.usersService.findByIdWithOrgs(byProvider[0].id);
|
||||
}
|
||||
|
||||
// 2. Try to find by email match (link accounts)
|
||||
const byEmail = await this.usersService.findByEmail(profile.email);
|
||||
if (byEmail) {
|
||||
// Link the SSO provider to existing account
|
||||
await this.linkSsoAccount(byEmail.id, profile.provider, profile.providerId);
|
||||
return this.usersService.findByIdWithOrgs(byEmail.id);
|
||||
}
|
||||
|
||||
// 3. Create new user
|
||||
const newUser = await this.dataSource.query(
|
||||
`INSERT INTO shared.users (email, first_name, last_name, oauth_provider, oauth_provider_id, is_email_verified)
|
||||
VALUES ($1, $2, $3, $4, $5, true)
|
||||
RETURNING id`,
|
||||
[profile.email, profile.firstName || '', profile.lastName || '', profile.provider, profile.providerId],
|
||||
);
|
||||
|
||||
return this.usersService.findByIdWithOrgs(newUser[0].id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Link an SSO provider to an existing user.
|
||||
*/
|
||||
async linkSsoAccount(userId: string, provider: string, providerId: string): Promise<void> {
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET oauth_provider = $1, oauth_provider_id = $2, updated_at = NOW() WHERE id = $3`,
|
||||
[provider, providerId, userId],
|
||||
);
|
||||
this.logger.log(`Linked ${provider} SSO to user ${userId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unlink SSO from a user (only if they have a password set).
|
||||
*/
|
||||
async unlinkSsoAccount(userId: string, provider: string): Promise<void> {
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT password_hash, oauth_provider FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (rows.length === 0) throw new BadRequestException('User not found');
|
||||
if (!rows[0].password_hash) {
|
||||
throw new BadRequestException('Cannot unlink SSO — you must set a password first');
|
||||
}
|
||||
if (rows[0].oauth_provider !== provider) {
|
||||
throw new BadRequestException('SSO provider mismatch');
|
||||
}
|
||||
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.users SET oauth_provider = NULL, oauth_provider_id = NULL, updated_at = NOW() WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
this.logger.log(`Unlinked ${provider} SSO from user ${userId}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get which SSO providers are configured.
|
||||
*/
|
||||
getAvailableProviders(): { google: boolean; azure: boolean } {
|
||||
return {
|
||||
google: !!(process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET),
|
||||
azure: !!(process.env.AZURE_CLIENT_ID && process.env.AZURE_CLIENT_SECRET),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -18,9 +18,9 @@ export class JwtStrategy extends PassportStrategy(Strategy) {
|
||||
sub: payload.sub,
|
||||
email: payload.email,
|
||||
orgId: payload.orgId,
|
||||
orgSchema: payload.orgSchema,
|
||||
role: payload.role,
|
||||
isSuperadmin: payload.isSuperadmin || false,
|
||||
impersonatedBy: payload.impersonatedBy || null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
133
backend/src/modules/billing/billing.controller.ts
Normal file
133
backend/src/modules/billing/billing.controller.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import {
|
||||
Controller,
|
||||
Post,
|
||||
Put,
|
||||
Get,
|
||||
Body,
|
||||
Param,
|
||||
Query,
|
||||
Req,
|
||||
UseGuards,
|
||||
RawBodyRequest,
|
||||
BadRequestException,
|
||||
ForbiddenException,
|
||||
Request,
|
||||
} from '@nestjs/common';
|
||||
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { Throttle } from '@nestjs/throttler';
|
||||
import { Request as ExpressRequest } from 'express';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { BillingService } from './billing.service';
|
||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||
|
||||
@ApiTags('billing')
|
||||
@Controller()
|
||||
export class BillingController {
|
||||
constructor(
|
||||
private billingService: BillingService,
|
||||
private dataSource: DataSource,
|
||||
) {}
|
||||
|
||||
@Post('billing/start-trial')
|
||||
@ApiOperation({ summary: 'Start a free trial (no card required)' })
|
||||
@Throttle({ default: { limit: 10, ttl: 60000 } })
|
||||
async startTrial(
|
||||
@Body() body: { planId: string; billingInterval?: 'month' | 'year'; email: string; businessName: string },
|
||||
) {
|
||||
if (!body.planId) throw new BadRequestException('planId is required');
|
||||
if (!body.email) throw new BadRequestException('email is required');
|
||||
if (!body.businessName) throw new BadRequestException('businessName is required');
|
||||
return this.billingService.startTrial(
|
||||
body.planId,
|
||||
body.billingInterval || 'month',
|
||||
body.email,
|
||||
body.businessName,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('billing/create-checkout-session')
|
||||
@ApiOperation({ summary: 'Create a Stripe Checkout Session' })
|
||||
@Throttle({ default: { limit: 10, ttl: 60000 } })
|
||||
async createCheckout(
|
||||
@Body() body: { planId: string; billingInterval?: 'month' | 'year'; email?: string; businessName?: string },
|
||||
) {
|
||||
if (!body.planId) throw new BadRequestException('planId is required');
|
||||
return this.billingService.createCheckoutSession(
|
||||
body.planId,
|
||||
body.billingInterval || 'month',
|
||||
body.email,
|
||||
body.businessName,
|
||||
);
|
||||
}
|
||||
|
||||
@Post('webhooks/stripe')
|
||||
@ApiOperation({ summary: 'Stripe webhook endpoint' })
|
||||
async handleWebhook(@Req() req: RawBodyRequest<ExpressRequest>) {
|
||||
const signature = req.headers['stripe-signature'] as string;
|
||||
if (!signature) throw new BadRequestException('Missing Stripe signature');
|
||||
if (!req.rawBody) throw new BadRequestException('Missing raw body');
|
||||
await this.billingService.handleWebhook(req.rawBody, signature);
|
||||
return { received: true };
|
||||
}
|
||||
|
||||
@Get('billing/status')
|
||||
@ApiOperation({ summary: 'Check provisioning status for a checkout session or subscription' })
|
||||
async getStatus(@Query('session_id') sessionId: string) {
|
||||
if (!sessionId) throw new BadRequestException('session_id required');
|
||||
return this.billingService.getProvisioningStatus(sessionId);
|
||||
}
|
||||
|
||||
@Get('billing/subscription')
|
||||
@ApiOperation({ summary: 'Get current subscription info' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async getSubscription(@Request() req: any) {
|
||||
const orgId = req.user.orgId;
|
||||
if (!orgId) throw new BadRequestException('No organization context');
|
||||
return this.billingService.getSubscriptionInfo(orgId);
|
||||
}
|
||||
|
||||
@Post('billing/portal')
|
||||
@ApiOperation({ summary: 'Create Stripe Customer Portal session' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async createPortal(@Request() req: any) {
|
||||
const orgId = req.user.orgId;
|
||||
if (!orgId) throw new BadRequestException('No organization context');
|
||||
return this.billingService.createPortalSession(orgId);
|
||||
}
|
||||
|
||||
// ─── Admin: Switch Billing Method (ACH / Invoice) ──────────
|
||||
|
||||
@Put('admin/organizations/:id/billing')
|
||||
@ApiOperation({ summary: 'Switch organization billing method (superadmin only)' })
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
async updateBillingMethod(
|
||||
@Request() req: any,
|
||||
@Param('id') id: string,
|
||||
@Body() body: { collectionMethod: 'charge_automatically' | 'send_invoice'; daysUntilDue?: number },
|
||||
) {
|
||||
// Require superadmin
|
||||
const userId = req.user.userId || req.user.sub;
|
||||
const userRows = await this.dataSource.query(
|
||||
`SELECT is_superadmin FROM shared.users WHERE id = $1`,
|
||||
[userId],
|
||||
);
|
||||
if (!userRows.length || !userRows[0].is_superadmin) {
|
||||
throw new ForbiddenException('Superadmin access required');
|
||||
}
|
||||
|
||||
if (!['charge_automatically', 'send_invoice'].includes(body.collectionMethod)) {
|
||||
throw new BadRequestException('collectionMethod must be "charge_automatically" or "send_invoice"');
|
||||
}
|
||||
|
||||
await this.billingService.switchToInvoiceBilling(
|
||||
id,
|
||||
body.collectionMethod,
|
||||
body.daysUntilDue || 30,
|
||||
);
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
}
|
||||
13
backend/src/modules/billing/billing.module.ts
Normal file
13
backend/src/modules/billing/billing.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { BillingService } from './billing.service';
|
||||
import { BillingController } from './billing.controller';
|
||||
import { AuthModule } from '../auth/auth.module';
|
||||
import { DatabaseModule } from '../../database/database.module';
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, DatabaseModule],
|
||||
controllers: [BillingController],
|
||||
providers: [BillingService],
|
||||
exports: [BillingService],
|
||||
})
|
||||
export class BillingModule {}
|
||||
678
backend/src/modules/billing/billing.service.ts
Normal file
678
backend/src/modules/billing/billing.service.ts
Normal file
@@ -0,0 +1,678 @@
|
||||
import { Injectable, Logger, BadRequestException, RawBodyRequest } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { DataSource } from 'typeorm';
|
||||
import Stripe from 'stripe';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import * as bcrypt from 'bcryptjs';
|
||||
import { TenantSchemaService } from '../../database/tenant-schema.service';
|
||||
import { AuthService } from '../auth/auth.service';
|
||||
import { EmailService } from '../email/email.service';
|
||||
|
||||
const PLAN_FEATURES: Record<string, { name: string; unitLimit: number }> = {
|
||||
starter: { name: 'Starter', unitLimit: 50 },
|
||||
professional: { name: 'Professional', unitLimit: 200 },
|
||||
enterprise: { name: 'Enterprise', unitLimit: 999999 },
|
||||
};
|
||||
|
||||
type BillingInterval = 'month' | 'year';
|
||||
|
||||
@Injectable()
|
||||
export class BillingService {
|
||||
private readonly logger = new Logger(BillingService.name);
|
||||
private stripe: Stripe | null = null;
|
||||
private webhookSecret: string;
|
||||
private priceMap: Record<string, { monthly: string; annual: string }>;
|
||||
private requirePaymentForTrial: boolean;
|
||||
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private dataSource: DataSource,
|
||||
private tenantSchemaService: TenantSchemaService,
|
||||
private authService: AuthService,
|
||||
private emailService: EmailService,
|
||||
) {
|
||||
const secretKey = this.configService.get<string>('STRIPE_SECRET_KEY');
|
||||
if (secretKey && !secretKey.includes('placeholder')) {
|
||||
this.stripe = new Stripe(secretKey, { apiVersion: '2025-02-24.acacia' as any });
|
||||
this.logger.log('Stripe initialized');
|
||||
} else {
|
||||
this.logger.warn('Stripe not configured — billing endpoints will return stubs');
|
||||
}
|
||||
|
||||
this.webhookSecret = this.configService.get<string>('STRIPE_WEBHOOK_SECRET') || '';
|
||||
this.requirePaymentForTrial =
|
||||
this.configService.get<string>('REQUIRE_PAYMENT_METHOD_FOR_TRIAL') === 'true';
|
||||
|
||||
// Build price map with backward-compat: new monthly vars fall back to old single vars
|
||||
this.priceMap = {
|
||||
starter: {
|
||||
monthly: this.configService.get<string>('STRIPE_STARTER_MONTHLY_PRICE_ID')
|
||||
|| this.configService.get<string>('STRIPE_STARTER_PRICE_ID') || '',
|
||||
annual: this.configService.get<string>('STRIPE_STARTER_ANNUAL_PRICE_ID') || '',
|
||||
},
|
||||
professional: {
|
||||
monthly: this.configService.get<string>('STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID')
|
||||
|| this.configService.get<string>('STRIPE_PROFESSIONAL_PRICE_ID') || '',
|
||||
annual: this.configService.get<string>('STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID') || '',
|
||||
},
|
||||
enterprise: {
|
||||
monthly: this.configService.get<string>('STRIPE_ENTERPRISE_MONTHLY_PRICE_ID')
|
||||
|| this.configService.get<string>('STRIPE_ENTERPRISE_PRICE_ID') || '',
|
||||
annual: this.configService.get<string>('STRIPE_ENTERPRISE_ANNUAL_PRICE_ID') || '',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Price Resolution ────────────────────────────────────────
|
||||
|
||||
private getPriceId(planId: string, interval: BillingInterval): string {
|
||||
const plan = this.priceMap[planId];
|
||||
if (!plan) throw new BadRequestException(`Invalid plan: ${planId}`);
|
||||
const priceId = interval === 'year' ? plan.annual : plan.monthly;
|
||||
if (!priceId || priceId.includes('placeholder')) {
|
||||
throw new BadRequestException(`Price not configured for ${planId} (${interval})`);
|
||||
}
|
||||
return priceId;
|
||||
}
|
||||
|
||||
// ─── Trial Signup (No Card Required) ────────────────────────
|
||||
|
||||
/**
|
||||
* Start a free trial without collecting payment.
|
||||
* Creates a Stripe customer + subscription with trial_period_days,
|
||||
* then provisions the organization immediately.
|
||||
*/
|
||||
async startTrial(
|
||||
planId: string,
|
||||
billingInterval: BillingInterval,
|
||||
email: string,
|
||||
businessName: string,
|
||||
): Promise<{ success: boolean; subscriptionId: string }> {
|
||||
if (!this.stripe) throw new BadRequestException('Stripe not configured');
|
||||
if (!email) throw new BadRequestException('Email is required');
|
||||
if (!businessName) throw new BadRequestException('Business name is required');
|
||||
|
||||
const priceId = this.getPriceId(planId, billingInterval);
|
||||
|
||||
// 1. Create Stripe customer
|
||||
const customer = await this.stripe.customers.create({
|
||||
email,
|
||||
metadata: { plan_id: planId, business_name: businessName, billing_interval: billingInterval },
|
||||
});
|
||||
|
||||
// 2. Create subscription with 14-day trial (no payment method)
|
||||
const subscription = await this.stripe.subscriptions.create({
|
||||
customer: customer.id,
|
||||
items: [{ price: priceId }],
|
||||
trial_period_days: 14,
|
||||
payment_settings: {
|
||||
save_default_payment_method: 'on_subscription',
|
||||
},
|
||||
trial_settings: {
|
||||
end_behavior: { missing_payment_method: 'cancel' },
|
||||
},
|
||||
metadata: { plan_id: planId, business_name: businessName, billing_interval: billingInterval },
|
||||
});
|
||||
|
||||
const trialEnd = subscription.trial_end
|
||||
? new Date(subscription.trial_end * 1000)
|
||||
: new Date(Date.now() + 14 * 24 * 60 * 60 * 1000);
|
||||
|
||||
// 3. Provision organization immediately with trial status
|
||||
await this.provisionOrganization(
|
||||
customer.id,
|
||||
subscription.id,
|
||||
email,
|
||||
planId,
|
||||
businessName,
|
||||
'trial',
|
||||
billingInterval,
|
||||
trialEnd,
|
||||
);
|
||||
|
||||
this.logger.log(`Trial started for ${email}, plan=${planId}, interval=${billingInterval}`);
|
||||
return { success: true, subscriptionId: subscription.id };
|
||||
}
|
||||
|
||||
// ─── Checkout Session (Card-required flow / post-trial) ─────
|
||||
|
||||
/**
|
||||
* Create a Stripe Checkout Session for a new subscription.
|
||||
* Used when REQUIRE_PAYMENT_METHOD_FOR_TRIAL=true, or for
|
||||
* post-trial conversion where the user adds a payment method.
|
||||
*/
|
||||
async createCheckoutSession(
|
||||
planId: string,
|
||||
billingInterval: BillingInterval = 'month',
|
||||
email?: string,
|
||||
businessName?: string,
|
||||
): Promise<{ url: string }> {
|
||||
if (!this.stripe) throw new BadRequestException('Stripe not configured');
|
||||
|
||||
const priceId = this.getPriceId(planId, billingInterval);
|
||||
|
||||
const sessionConfig: Stripe.Checkout.SessionCreateParams = {
|
||||
mode: 'subscription',
|
||||
payment_method_types: ['card'],
|
||||
line_items: [{ price: priceId, quantity: 1 }],
|
||||
success_url: `${this.getAppUrl()}/onboarding/pending?session_id={CHECKOUT_SESSION_ID}`,
|
||||
cancel_url: `${this.getAppUrl()}/pricing`,
|
||||
customer_email: email || undefined,
|
||||
metadata: {
|
||||
plan_id: planId,
|
||||
business_name: businessName || '',
|
||||
billing_interval: billingInterval,
|
||||
},
|
||||
};
|
||||
|
||||
// If trial is card-required, add trial period to checkout
|
||||
if (this.requirePaymentForTrial) {
|
||||
sessionConfig.subscription_data = {
|
||||
trial_period_days: 14,
|
||||
metadata: {
|
||||
plan_id: planId,
|
||||
business_name: businessName || '',
|
||||
billing_interval: billingInterval,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const session = await this.stripe.checkout.sessions.create(sessionConfig);
|
||||
return { url: session.url! };
|
||||
}
|
||||
|
||||
// ─── Webhook Handling ───────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Handle a Stripe webhook event.
|
||||
*/
|
||||
async handleWebhook(rawBody: Buffer, signature: string): Promise<void> {
|
||||
if (!this.stripe) throw new BadRequestException('Stripe not configured');
|
||||
|
||||
let event: Stripe.Event;
|
||||
try {
|
||||
event = this.stripe.webhooks.constructEvent(rawBody, signature, this.webhookSecret);
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Webhook signature verification failed: ${err.message}`);
|
||||
throw new BadRequestException('Invalid webhook signature');
|
||||
}
|
||||
|
||||
// Idempotency check
|
||||
const existing = await this.dataSource.query(
|
||||
`SELECT id FROM shared.stripe_events WHERE id = $1`,
|
||||
[event.id],
|
||||
);
|
||||
if (existing.length > 0) {
|
||||
this.logger.log(`Duplicate Stripe event ${event.id}, skipping`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Record event
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.stripe_events (id, type, payload) VALUES ($1, $2, $3)`,
|
||||
[event.id, event.type, JSON.stringify(event.data)],
|
||||
);
|
||||
|
||||
// Dispatch
|
||||
switch (event.type) {
|
||||
case 'checkout.session.completed':
|
||||
await this.handleCheckoutCompleted(event.data.object as Stripe.Checkout.Session);
|
||||
break;
|
||||
case 'invoice.payment_succeeded':
|
||||
await this.handlePaymentSucceeded(event.data.object as Stripe.Invoice);
|
||||
break;
|
||||
case 'invoice.payment_failed':
|
||||
await this.handlePaymentFailed(event.data.object as Stripe.Invoice);
|
||||
break;
|
||||
case 'customer.subscription.deleted':
|
||||
await this.handleSubscriptionDeleted(event.data.object as Stripe.Subscription);
|
||||
break;
|
||||
case 'customer.subscription.trial_will_end':
|
||||
await this.handleTrialWillEnd(event.data.object as Stripe.Subscription);
|
||||
break;
|
||||
case 'customer.subscription.updated':
|
||||
await this.handleSubscriptionUpdated(event.data.object as Stripe.Subscription);
|
||||
break;
|
||||
default:
|
||||
this.logger.log(`Unhandled Stripe event: ${event.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Provisioning Status ────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Get provisioning status for a checkout session OR subscription ID.
|
||||
*/
|
||||
async getProvisioningStatus(sessionId: string): Promise<{ status: string; activationUrl?: string }> {
|
||||
if (!this.stripe) return { status: 'not_configured' };
|
||||
|
||||
// Try as checkout session first
|
||||
let customerId: string | null = null;
|
||||
try {
|
||||
const session = await this.stripe.checkout.sessions.retrieve(sessionId);
|
||||
customerId = session.customer as string;
|
||||
} catch {
|
||||
// Not a checkout session — try looking up by subscription ID
|
||||
try {
|
||||
const subscription = await this.stripe.subscriptions.retrieve(sessionId);
|
||||
customerId = subscription.customer as string;
|
||||
} catch {
|
||||
return { status: 'pending' };
|
||||
}
|
||||
}
|
||||
|
||||
if (!customerId) return { status: 'pending' };
|
||||
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT id, status FROM shared.organizations WHERE stripe_customer_id = $1`,
|
||||
[customerId],
|
||||
);
|
||||
|
||||
if (rows.length === 0) return { status: 'provisioning' };
|
||||
if (['active', 'trial'].includes(rows[0].status)) return { status: 'active' };
|
||||
return { status: 'provisioning' };
|
||||
}
|
||||
|
||||
// ─── Stripe Customer Portal ─────────────────────────────────
|
||||
|
||||
/**
|
||||
* Create a Stripe Customer Portal session for managing subscription.
|
||||
*/
|
||||
async createPortalSession(orgId: string): Promise<{ url: string }> {
|
||||
if (!this.stripe) throw new BadRequestException('Stripe is not configured');
|
||||
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT stripe_customer_id, stripe_subscription_id, status
|
||||
FROM shared.organizations WHERE id = $1`,
|
||||
[orgId],
|
||||
);
|
||||
if (rows.length === 0) {
|
||||
throw new BadRequestException('Organization not found');
|
||||
}
|
||||
|
||||
let customerId = rows[0].stripe_customer_id;
|
||||
|
||||
// Fallback: if customer ID is missing but subscription exists, retrieve customer from subscription
|
||||
if (!customerId && rows[0].stripe_subscription_id) {
|
||||
try {
|
||||
const sub = await this.stripe.subscriptions.retrieve(rows[0].stripe_subscription_id) as Stripe.Subscription;
|
||||
customerId = typeof sub.customer === 'string' ? sub.customer : sub.customer?.id;
|
||||
if (customerId) {
|
||||
// Backfill the customer ID for future calls
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.organizations SET stripe_customer_id = $1 WHERE id = $2`,
|
||||
[customerId, orgId],
|
||||
);
|
||||
this.logger.log(`Backfilled stripe_customer_id=${customerId} for org=${orgId}`);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn(`Failed to retrieve customer from subscription: ${(err as Error).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!customerId) {
|
||||
const status = rows[0].status;
|
||||
if (status === 'trial') {
|
||||
throw new BadRequestException(
|
||||
'Billing portal is not available during your free trial. Add a payment method when your trial ends to manage your subscription.',
|
||||
);
|
||||
}
|
||||
throw new BadRequestException('No Stripe customer found for this organization. Please contact support.');
|
||||
}
|
||||
|
||||
const session = await this.stripe.billingPortal.sessions.create({
|
||||
customer: customerId,
|
||||
return_url: `${this.getAppUrl()}/settings`,
|
||||
});
|
||||
|
||||
return { url: session.url };
|
||||
}
|
||||
|
||||
// ─── Subscription Info ──────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Get current subscription details for the Settings billing tab.
|
||||
*/
|
||||
async getSubscriptionInfo(orgId: string): Promise<{
|
||||
plan: string;
|
||||
planName: string;
|
||||
billingInterval: string;
|
||||
status: string;
|
||||
collectionMethod: string;
|
||||
trialEndsAt: string | null;
|
||||
currentPeriodEnd: string | null;
|
||||
cancelAtPeriodEnd: boolean;
|
||||
hasStripeCustomer: boolean;
|
||||
}> {
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT plan_level, billing_interval, status, collection_method,
|
||||
trial_ends_at, stripe_subscription_id, stripe_customer_id
|
||||
FROM shared.organizations WHERE id = $1`,
|
||||
[orgId],
|
||||
);
|
||||
|
||||
if (rows.length === 0) throw new BadRequestException('Organization not found');
|
||||
|
||||
const org = rows[0];
|
||||
let currentPeriodEnd: string | null = null;
|
||||
let cancelAtPeriodEnd = false;
|
||||
|
||||
// Fetch live data from Stripe if available
|
||||
if (this.stripe && org.stripe_subscription_id) {
|
||||
try {
|
||||
const sub = await this.stripe.subscriptions.retrieve(org.stripe_subscription_id, {
|
||||
expand: ['items.data'],
|
||||
}) as Stripe.Subscription;
|
||||
// current_period_end is on the subscription item in newer Stripe API versions
|
||||
const firstItem = sub.items?.data?.[0];
|
||||
if (firstItem?.current_period_end) {
|
||||
currentPeriodEnd = new Date(firstItem.current_period_end * 1000).toISOString();
|
||||
}
|
||||
cancelAtPeriodEnd = sub.cancel_at_period_end;
|
||||
} catch {
|
||||
// Non-critical — use DB data only
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
plan: org.plan_level || 'starter',
|
||||
planName: PLAN_FEATURES[org.plan_level]?.name || org.plan_level || 'Starter',
|
||||
billingInterval: org.billing_interval || 'month',
|
||||
status: org.status || 'active',
|
||||
collectionMethod: org.collection_method || 'charge_automatically',
|
||||
trialEndsAt: org.trial_ends_at ? new Date(org.trial_ends_at).toISOString() : null,
|
||||
currentPeriodEnd,
|
||||
cancelAtPeriodEnd,
|
||||
hasStripeCustomer: !!org.stripe_customer_id,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Invoice / ACH Billing (Admin) ──────────────────────────
|
||||
|
||||
/**
|
||||
* Switch a customer's subscription to invoice collection (ACH/wire).
|
||||
* Admin-only operation for enterprise customers.
|
||||
*/
|
||||
async switchToInvoiceBilling(
|
||||
orgId: string,
|
||||
collectionMethod: 'charge_automatically' | 'send_invoice',
|
||||
daysUntilDue: number = 30,
|
||||
): Promise<void> {
|
||||
if (!this.stripe) throw new BadRequestException('Stripe not configured');
|
||||
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT stripe_subscription_id, stripe_customer_id FROM shared.organizations WHERE id = $1`,
|
||||
[orgId],
|
||||
);
|
||||
if (rows.length === 0 || !rows[0].stripe_subscription_id) {
|
||||
throw new BadRequestException('No Stripe subscription found for this organization');
|
||||
}
|
||||
|
||||
const updateParams: Stripe.SubscriptionUpdateParams = {
|
||||
collection_method: collectionMethod,
|
||||
};
|
||||
if (collectionMethod === 'send_invoice') {
|
||||
updateParams.days_until_due = daysUntilDue;
|
||||
}
|
||||
|
||||
await this.stripe.subscriptions.update(rows[0].stripe_subscription_id, updateParams);
|
||||
|
||||
// Update DB
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.organizations SET collection_method = $1, updated_at = NOW() WHERE id = $2`,
|
||||
[collectionMethod, orgId],
|
||||
);
|
||||
|
||||
this.logger.log(`Billing method updated for org ${orgId}: ${collectionMethod}`);
|
||||
}
|
||||
|
||||
// ─── Webhook Handlers ──────────────────────────────────────
|
||||
|
||||
private async handleCheckoutCompleted(session: Stripe.Checkout.Session): Promise<void> {
|
||||
const customerId = session.customer as string;
|
||||
const subscriptionId = session.subscription as string;
|
||||
const email = session.customer_email || session.customer_details?.email || '';
|
||||
const planId = session.metadata?.plan_id || 'starter';
|
||||
const businessName = session.metadata?.business_name || 'My HOA';
|
||||
const billingInterval = (session.metadata?.billing_interval || 'month') as BillingInterval;
|
||||
|
||||
this.logger.log(`Provisioning org for ${email}, plan=${planId}, customer=${customerId}`);
|
||||
|
||||
try {
|
||||
// Determine if this is a trial checkout (card required for trial)
|
||||
let status: 'active' | 'trial' = 'active';
|
||||
let trialEnd: Date | undefined;
|
||||
|
||||
if (this.stripe && subscriptionId) {
|
||||
const sub = await this.stripe.subscriptions.retrieve(subscriptionId);
|
||||
if (sub.status === 'trialing' && sub.trial_end) {
|
||||
status = 'trial';
|
||||
trialEnd = new Date(sub.trial_end * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
await this.provisionOrganization(
|
||||
customerId, subscriptionId, email, planId, businessName,
|
||||
status, billingInterval, trialEnd,
|
||||
);
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Provisioning failed: ${err.message}`, err.stack);
|
||||
}
|
||||
}
|
||||
|
||||
private async handlePaymentSucceeded(invoice: Stripe.Invoice): Promise<void> {
|
||||
const customerId = invoice.customer as string;
|
||||
// Activate tenant if it was pending/trial
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.organizations SET status = 'active', updated_at = NOW()
|
||||
WHERE stripe_customer_id = $1 AND status IN ('trial', 'past_due')`,
|
||||
[customerId],
|
||||
);
|
||||
}
|
||||
|
||||
private async handlePaymentFailed(invoice: Stripe.Invoice): Promise<void> {
|
||||
const customerId = invoice.customer as string;
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT email, name FROM shared.organizations WHERE stripe_customer_id = $1`,
|
||||
[customerId],
|
||||
);
|
||||
|
||||
// Set org to past_due for grace period (read-only access)
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.organizations SET status = 'past_due', updated_at = NOW()
|
||||
WHERE stripe_customer_id = $1 AND status = 'active'`,
|
||||
[customerId],
|
||||
);
|
||||
|
||||
if (rows.length > 0 && rows[0].email) {
|
||||
await this.emailService.sendPaymentFailedEmail(rows[0].email, rows[0].name || 'Your organization');
|
||||
}
|
||||
this.logger.warn(`Payment failed for customer ${customerId}`);
|
||||
}
|
||||
|
||||
private async handleSubscriptionDeleted(subscription: Stripe.Subscription): Promise<void> {
|
||||
const customerId = subscription.customer as string;
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.organizations SET status = 'archived', updated_at = NOW()
|
||||
WHERE stripe_customer_id = $1`,
|
||||
[customerId],
|
||||
);
|
||||
this.logger.log(`Subscription cancelled for customer ${customerId}`);
|
||||
}
|
||||
|
||||
private async handleTrialWillEnd(subscription: Stripe.Subscription): Promise<void> {
|
||||
const customerId = subscription.customer as string;
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT id, email, name FROM shared.organizations WHERE stripe_customer_id = $1`,
|
||||
[customerId],
|
||||
);
|
||||
|
||||
if (rows.length === 0) return;
|
||||
|
||||
const org = rows[0];
|
||||
const daysRemaining = 3; // This webhook fires 3 days before trial end
|
||||
const settingsUrl = `${this.getAppUrl()}/settings`;
|
||||
|
||||
if (org.email) {
|
||||
await this.emailService.sendTrialEndingEmail(
|
||||
org.email,
|
||||
org.name || 'Your organization',
|
||||
daysRemaining,
|
||||
settingsUrl,
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.log(`Trial ending soon for customer ${customerId}, org ${org.id}`);
|
||||
}
|
||||
|
||||
private async handleSubscriptionUpdated(subscription: Stripe.Subscription): Promise<void> {
|
||||
const customerId = subscription.customer as string;
|
||||
|
||||
// Determine new status
|
||||
let newStatus: string;
|
||||
switch (subscription.status) {
|
||||
case 'trialing':
|
||||
newStatus = 'trial';
|
||||
break;
|
||||
case 'active':
|
||||
newStatus = 'active';
|
||||
break;
|
||||
case 'past_due':
|
||||
newStatus = 'past_due';
|
||||
break;
|
||||
case 'canceled':
|
||||
case 'unpaid':
|
||||
newStatus = 'archived';
|
||||
break;
|
||||
default:
|
||||
return; // Don't update for other statuses
|
||||
}
|
||||
|
||||
// Determine billing interval from the subscription items
|
||||
let billingInterval: BillingInterval = 'month';
|
||||
if (subscription.items?.data?.[0]?.price?.recurring?.interval === 'year') {
|
||||
billingInterval = 'year';
|
||||
}
|
||||
|
||||
// Determine plan from price metadata or existing mapping
|
||||
let planId: string | null = null;
|
||||
const activePriceId = subscription.items?.data?.[0]?.price?.id;
|
||||
if (activePriceId) {
|
||||
for (const [plan, prices] of Object.entries(this.priceMap)) {
|
||||
if (prices.monthly === activePriceId || prices.annual === activePriceId) {
|
||||
planId = plan;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build update query dynamically
|
||||
const updates: string[] = [`status = '${newStatus}'`, `billing_interval = '${billingInterval}'`, `updated_at = NOW()`];
|
||||
if (planId) {
|
||||
updates.push(`plan_level = '${planId}'`);
|
||||
}
|
||||
if (subscription.collection_method) {
|
||||
updates.push(`collection_method = '${subscription.collection_method}'`);
|
||||
}
|
||||
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.organizations SET ${updates.join(', ')} WHERE stripe_customer_id = $1`,
|
||||
[customerId],
|
||||
);
|
||||
|
||||
this.logger.log(`Subscription updated for customer ${customerId}: status=${newStatus}, interval=${billingInterval}`);
|
||||
}
|
||||
|
||||
// ─── Provisioning ──────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Full provisioning flow: create org, schema, user, invite token, email.
|
||||
*/
|
||||
async provisionOrganization(
|
||||
customerId: string,
|
||||
subscriptionId: string,
|
||||
email: string,
|
||||
planId: string,
|
||||
businessName: string,
|
||||
status: 'active' | 'trial' = 'active',
|
||||
billingInterval: BillingInterval = 'month',
|
||||
trialEndsAt?: Date,
|
||||
): Promise<void> {
|
||||
// 1. Create or upsert organization
|
||||
const schemaName = `tenant_${uuid().replace(/-/g, '').substring(0, 12)}`;
|
||||
|
||||
const orgRows = await this.dataSource.query(
|
||||
`INSERT INTO shared.organizations
|
||||
(name, schema_name, status, plan_level, stripe_customer_id, stripe_subscription_id, email, billing_interval, trial_ends_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
ON CONFLICT (stripe_customer_id) DO UPDATE SET
|
||||
stripe_subscription_id = EXCLUDED.stripe_subscription_id,
|
||||
plan_level = EXCLUDED.plan_level,
|
||||
status = EXCLUDED.status,
|
||||
billing_interval = EXCLUDED.billing_interval,
|
||||
trial_ends_at = EXCLUDED.trial_ends_at,
|
||||
updated_at = NOW()
|
||||
RETURNING id, schema_name`,
|
||||
[businessName, schemaName, status, planId, customerId, subscriptionId, email, billingInterval, trialEndsAt || null],
|
||||
);
|
||||
|
||||
const orgId = orgRows[0].id;
|
||||
const actualSchema = orgRows[0].schema_name;
|
||||
|
||||
// 2. Create tenant schema
|
||||
try {
|
||||
await this.tenantSchemaService.createTenantSchema(actualSchema);
|
||||
this.logger.log(`Created tenant schema: ${actualSchema}`);
|
||||
} catch (err: any) {
|
||||
if (err.message?.includes('already exists')) {
|
||||
this.logger.log(`Schema ${actualSchema} already exists, skipping creation`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Create or find user
|
||||
let userRows = await this.dataSource.query(
|
||||
`SELECT id FROM shared.users WHERE email = $1`,
|
||||
[email],
|
||||
);
|
||||
|
||||
let userId: string;
|
||||
if (userRows.length === 0) {
|
||||
const newUser = await this.dataSource.query(
|
||||
`INSERT INTO shared.users (email, is_email_verified)
|
||||
VALUES ($1, false)
|
||||
RETURNING id`,
|
||||
[email],
|
||||
);
|
||||
userId = newUser[0].id;
|
||||
} else {
|
||||
userId = userRows[0].id;
|
||||
}
|
||||
|
||||
// 4. Create membership (president role)
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.user_organizations (user_id, organization_id, role)
|
||||
VALUES ($1, $2, 'president')
|
||||
ON CONFLICT (user_id, organization_id) DO NOTHING`,
|
||||
[userId, orgId],
|
||||
);
|
||||
|
||||
// 5. Generate invite token and "send" activation email
|
||||
const inviteToken = await this.authService.generateInviteToken(userId, orgId, email);
|
||||
const activationUrl = `${this.getAppUrl()}/activate?token=${inviteToken}`;
|
||||
await this.emailService.sendActivationEmail(email, businessName, activationUrl);
|
||||
|
||||
// 6. Initialize onboarding progress
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.onboarding_progress (organization_id) VALUES ($1) ON CONFLICT DO NOTHING`,
|
||||
[orgId],
|
||||
);
|
||||
|
||||
this.logger.log(`Provisioning complete for org=${orgId}, user=${userId}, status=${status}`);
|
||||
}
|
||||
|
||||
private getAppUrl(): string {
|
||||
return this.configService.get<string>('APP_URL') || 'http://localhost';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,594 @@
|
||||
import { Injectable, NotFoundException } from '@nestjs/common';
|
||||
import { TenantService } from '../../database/tenant.service';
|
||||
|
||||
const monthLabels = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'];
|
||||
const monthNames = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec_amt'];
|
||||
|
||||
const round2 = (v: number) => Math.round(v * 100) / 100;
|
||||
|
||||
@Injectable()
|
||||
export class BoardPlanningProjectionService {
|
||||
constructor(private tenant: TenantService) {}
|
||||
|
||||
/** Return cached projection if fresh, otherwise compute. */
|
||||
async getProjection(scenarioId: string) {
|
||||
const rows = await this.tenant.query('SELECT * FROM board_scenarios WHERE id = $1', [scenarioId]);
|
||||
if (!rows.length) throw new NotFoundException('Scenario not found');
|
||||
const scenario = rows[0];
|
||||
|
||||
// Return cache if it exists and is less than 1 hour old
|
||||
if (scenario.projection_cache && scenario.projection_cached_at) {
|
||||
const age = Date.now() - new Date(scenario.projection_cached_at).getTime();
|
||||
if (age < 3600000) return scenario.projection_cache;
|
||||
}
|
||||
|
||||
return this.computeProjection(scenarioId);
|
||||
}
|
||||
|
||||
/** Compute full projection for a scenario. Also auto-creates renewal records for auto_renew investments. */
|
||||
async computeProjection(scenarioId: string) {
|
||||
const scenarioRows = await this.tenant.query('SELECT * FROM board_scenarios WHERE id = $1', [scenarioId]);
|
||||
if (!scenarioRows.length) throw new NotFoundException('Scenario not found');
|
||||
const scenario = scenarioRows[0];
|
||||
|
||||
// Auto-create renewal investment records for auto_renew investments that have maturity dates
|
||||
await this.ensureRenewalRecords(scenarioId);
|
||||
|
||||
const investments = await this.tenant.query(
|
||||
'SELECT * FROM scenario_investments WHERE scenario_id = $1 ORDER BY purchase_date', [scenarioId],
|
||||
);
|
||||
const assessments = await this.tenant.query(
|
||||
'SELECT * FROM scenario_assessments WHERE scenario_id = $1 ORDER BY effective_date', [scenarioId],
|
||||
);
|
||||
|
||||
const months = scenario.projection_months || 36;
|
||||
const now = new Date();
|
||||
const startYear = now.getFullYear();
|
||||
const currentMonth = now.getMonth() + 1;
|
||||
|
||||
// ── 1. Baseline state (mirrors reports.service.ts getCashFlowForecast) ──
|
||||
const baseline = await this.getBaselineState(startYear, months);
|
||||
|
||||
// ── 2. Build month-by-month projection ──
|
||||
let { opCash, resCash, opInv, resInv } = baseline.openingBalances;
|
||||
const datapoints: any[] = [];
|
||||
let totalInterestEarned = 0;
|
||||
const interestByInvestment: Record<string, number> = {};
|
||||
|
||||
for (let i = 0; i < months; i++) {
|
||||
const year = startYear + Math.floor(i / 12);
|
||||
const month = (i % 12) + 1;
|
||||
const key = `${year}-${month}`;
|
||||
const label = `${monthLabels[month - 1]} ${year}`;
|
||||
const isHistorical = year < startYear || (year === startYear && month < currentMonth);
|
||||
|
||||
// Baseline income/expenses from budget
|
||||
const budget = baseline.budgetsByYearMonth[key] || { opIncome: 0, opExpense: 0, resIncome: 0, resExpense: 0 };
|
||||
const baseAssessment = this.getAssessmentIncome(baseline.assessmentGroups, month);
|
||||
const existingMaturity = baseline.maturityIndex[key] || { operating: 0, reserve: 0 };
|
||||
const project = baseline.projectIndex[key] || { operating: 0, reserve: 0 };
|
||||
|
||||
// Scenario investment deltas for this month
|
||||
const invDelta = this.computeInvestmentDelta(investments, year, month);
|
||||
totalInterestEarned += invDelta.interestEarned;
|
||||
for (const [invId, amt] of Object.entries(invDelta.interestByInvestment)) {
|
||||
interestByInvestment[invId] = (interestByInvestment[invId] || 0) + amt;
|
||||
}
|
||||
|
||||
// Scenario assessment deltas for this month
|
||||
const asmtDelta = this.computeAssessmentDelta(assessments, baseline.assessmentGroups, year, month);
|
||||
|
||||
if (isHistorical) {
|
||||
// Historical months: use actual changes + scenario deltas
|
||||
const opChange = baseline.histIndex[`${year}-${month}-operating`] || 0;
|
||||
const resChange = baseline.histIndex[`${year}-${month}-reserve`] || 0;
|
||||
opCash += opChange + invDelta.opCashFlow + asmtDelta.operating;
|
||||
resCash += resChange + invDelta.resCashFlow + asmtDelta.reserve;
|
||||
} else {
|
||||
// Forecast months: budget + assessments + scenario deltas
|
||||
const opIncomeMonth = (budget.opIncome > 0 ? budget.opIncome : baseAssessment.operating) + asmtDelta.operating;
|
||||
const resIncomeMonth = (budget.resIncome > 0 ? budget.resIncome : baseAssessment.reserve) + asmtDelta.reserve;
|
||||
|
||||
opCash += opIncomeMonth - budget.opExpense - project.operating + existingMaturity.operating + invDelta.opCashFlow;
|
||||
resCash += resIncomeMonth - budget.resExpense - project.reserve + existingMaturity.reserve + invDelta.resCashFlow;
|
||||
|
||||
// Existing maturities reduce investment balances
|
||||
if (existingMaturity.operating > 0) {
|
||||
opInv -= existingMaturity.operating * 0.96; // approximate principal
|
||||
if (opInv < 0) opInv = 0;
|
||||
}
|
||||
if (existingMaturity.reserve > 0) {
|
||||
resInv -= existingMaturity.reserve * 0.96;
|
||||
if (resInv < 0) resInv = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Scenario investment balance changes
|
||||
opInv += invDelta.opInvChange;
|
||||
resInv += invDelta.resInvChange;
|
||||
if (opInv < 0) opInv = 0;
|
||||
if (resInv < 0) resInv = 0;
|
||||
|
||||
datapoints.push({
|
||||
month: label,
|
||||
year,
|
||||
monthNum: month,
|
||||
is_forecast: !isHistorical,
|
||||
operating_cash: round2(opCash),
|
||||
operating_investments: round2(opInv),
|
||||
reserve_cash: round2(resCash),
|
||||
reserve_investments: round2(resInv),
|
||||
});
|
||||
}
|
||||
|
||||
// ── 3. Summary metrics ──
|
||||
const summary = this.computeSummary(datapoints, baseline, assessments, investments, totalInterestEarned, interestByInvestment);
|
||||
|
||||
const result = { datapoints, summary };
|
||||
|
||||
// ── 4. Cache ──
|
||||
await this.tenant.query(
|
||||
`UPDATE board_scenarios SET projection_cache = $1, projection_cached_at = NOW() WHERE id = $2`,
|
||||
[JSON.stringify(result), scenarioId],
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Compare multiple scenarios side-by-side. */
|
||||
async compareScenarios(scenarioIds: string[]) {
|
||||
if (!scenarioIds.length || scenarioIds.length > 4) {
|
||||
throw new NotFoundException('Provide 1 to 4 scenario IDs');
|
||||
}
|
||||
|
||||
const scenarios = await Promise.all(
|
||||
scenarioIds.map(async (id) => {
|
||||
const rows = await this.tenant.query('SELECT id, name, scenario_type, status FROM board_scenarios WHERE id = $1', [id]);
|
||||
if (!rows.length) throw new NotFoundException(`Scenario ${id} not found`);
|
||||
const projection = await this.getProjection(id);
|
||||
return { ...rows[0], projection };
|
||||
}),
|
||||
);
|
||||
|
||||
return { scenarios };
|
||||
}
|
||||
|
||||
// ── Private Helpers ──
|
||||
|
||||
/**
|
||||
* For each auto_renew investment with a maturity_date, ensure a corresponding
|
||||
* renewal investment record exists (starting at maturity_date, same term).
|
||||
* The renewal record has auto_renew=false so it won't create infinite chains.
|
||||
*/
|
||||
private async ensureRenewalRecords(scenarioId: string) {
|
||||
const autoRenewInvestments = await this.tenant.query(
|
||||
`SELECT * FROM scenario_investments
|
||||
WHERE scenario_id = $1 AND auto_renew = true AND maturity_date IS NOT NULL AND executed_investment_id IS NULL`,
|
||||
[scenarioId],
|
||||
);
|
||||
|
||||
for (const inv of autoRenewInvestments) {
|
||||
// Check if a renewal record already exists (linked by notes convention or same label pattern)
|
||||
const renewalLabel = `${inv.label} (Renewal)`;
|
||||
const existing = await this.tenant.query(
|
||||
`SELECT id FROM scenario_investments WHERE scenario_id = $1 AND label = $2 AND purchase_date = $3`,
|
||||
[scenarioId, renewalLabel, inv.maturity_date],
|
||||
);
|
||||
|
||||
if (existing.length > 0) continue; // Already created
|
||||
|
||||
// Compute new maturity date from original term
|
||||
let newMaturityDate: string | null = null;
|
||||
const termMonths = parseInt(inv.term_months) || 0;
|
||||
if (termMonths > 0 && inv.maturity_date) {
|
||||
const d = new Date(inv.maturity_date);
|
||||
d.setMonth(d.getMonth() + termMonths);
|
||||
newMaturityDate = d.toISOString().split('T')[0];
|
||||
}
|
||||
|
||||
await this.tenant.query(
|
||||
`INSERT INTO scenario_investments
|
||||
(scenario_id, label, investment_type, fund_type, principal, interest_rate,
|
||||
term_months, institution, purchase_date, maturity_date, auto_renew, notes, sort_order)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, false, $11, $12)`,
|
||||
[
|
||||
scenarioId, renewalLabel, inv.investment_type, inv.fund_type,
|
||||
inv.principal, inv.interest_rate, inv.term_months || null,
|
||||
inv.institution, inv.maturity_date, newMaturityDate,
|
||||
`Auto-created renewal of "${inv.label}". Modify as needed.`,
|
||||
(parseInt(inv.sort_order) || 0) + 1,
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBaselineState(startYear: number, months: number) {
|
||||
// Current balances from asset accounts
|
||||
const opCashRows = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false
|
||||
WHERE a.account_type = 'asset' AND a.fund_type = 'operating' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`);
|
||||
const resCashRows = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false
|
||||
WHERE a.account_type = 'asset' AND a.fund_type = 'reserve' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`);
|
||||
const opInvRows = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(current_value), 0) as total FROM investment_accounts WHERE fund_type = 'operating' AND is_active = true
|
||||
`);
|
||||
const resInvRows = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(current_value), 0) as total FROM investment_accounts WHERE fund_type = 'reserve' AND is_active = true
|
||||
`);
|
||||
|
||||
// Opening balances at start of startYear
|
||||
const openingOp = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false AND je.entry_date < $1::date
|
||||
WHERE a.account_type = 'asset' AND a.fund_type = 'operating' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`, [`${startYear}-01-01`]);
|
||||
const openingRes = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false AND je.entry_date < $1::date
|
||||
WHERE a.account_type = 'asset' AND a.fund_type = 'reserve' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`, [`${startYear}-01-01`]);
|
||||
|
||||
// Assessment groups
|
||||
const assessmentGroups = await this.tenant.query(
|
||||
`SELECT frequency, regular_assessment, special_assessment, unit_count FROM assessment_groups WHERE is_active = true`,
|
||||
);
|
||||
|
||||
// Budgets (official + planned budget fallback)
|
||||
const budgetsByYearMonth: Record<string, any> = {};
|
||||
const endYear = startYear + Math.ceil(months / 12) + 1;
|
||||
for (let yr = startYear; yr <= endYear; yr++) {
|
||||
let budgetRows: any[];
|
||||
try {
|
||||
budgetRows = await this.tenant.query(
|
||||
`SELECT fund_type, account_type, jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec_amt FROM (
|
||||
SELECT b.account_id, b.fund_type, a.account_type,
|
||||
b.jan, b.feb, b.mar, b.apr, b.may, b.jun, b.jul, b.aug, b.sep, b.oct, b.nov, b.dec_amt,
|
||||
1 as source_priority
|
||||
FROM budgets b JOIN accounts a ON a.id = b.account_id WHERE b.fiscal_year = $1
|
||||
UNION ALL
|
||||
SELECT bpl.account_id, bpl.fund_type, a.account_type,
|
||||
bpl.jan, bpl.feb, bpl.mar, bpl.apr, bpl.may, bpl.jun, bpl.jul, bpl.aug, bpl.sep, bpl.oct, bpl.nov, bpl.dec_amt,
|
||||
2 as source_priority
|
||||
FROM budget_plan_lines bpl
|
||||
JOIN budget_plans bp ON bp.id = bpl.budget_plan_id
|
||||
JOIN accounts a ON a.id = bpl.account_id
|
||||
WHERE bp.fiscal_year = $1
|
||||
) combined
|
||||
ORDER BY account_id, fund_type, source_priority`, [yr],
|
||||
);
|
||||
} catch {
|
||||
// budget_plan_lines may not exist yet - fall back to official only
|
||||
budgetRows = await this.tenant.query(
|
||||
`SELECT b.fund_type, a.account_type, b.jan, b.feb, b.mar, b.apr, b.may, b.jun, b.jul, b.aug, b.sep, b.oct, b.nov, b.dec_amt
|
||||
FROM budgets b JOIN accounts a ON a.id = b.account_id WHERE b.fiscal_year = $1`, [yr],
|
||||
);
|
||||
}
|
||||
for (let m = 0; m < 12; m++) {
|
||||
const key = `${yr}-${m + 1}`;
|
||||
if (!budgetsByYearMonth[key]) budgetsByYearMonth[key] = { opIncome: 0, opExpense: 0, resIncome: 0, resExpense: 0 };
|
||||
for (const row of budgetRows) {
|
||||
const amt = parseFloat(row[monthNames[m]]) || 0;
|
||||
if (amt === 0) continue;
|
||||
const isOp = row.fund_type === 'operating';
|
||||
if (row.account_type === 'income') {
|
||||
if (isOp) budgetsByYearMonth[key].opIncome += amt;
|
||||
else budgetsByYearMonth[key].resIncome += amt;
|
||||
} else if (row.account_type === 'expense') {
|
||||
if (isOp) budgetsByYearMonth[key].opExpense += amt;
|
||||
else budgetsByYearMonth[key].resExpense += amt;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Historical cash changes
|
||||
const historicalCash = await this.tenant.query(`
|
||||
SELECT EXTRACT(YEAR FROM je.entry_date)::int as yr, EXTRACT(MONTH FROM je.entry_date)::int as mo,
|
||||
a.fund_type, COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as net_change
|
||||
FROM journal_entry_lines jel
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false
|
||||
JOIN accounts a ON a.id = jel.account_id AND a.account_type = 'asset' AND a.is_active = true
|
||||
WHERE je.entry_date >= $1::date
|
||||
GROUP BY yr, mo, a.fund_type ORDER BY yr, mo
|
||||
`, [`${startYear}-01-01`]);
|
||||
|
||||
const histIndex: Record<string, number> = {};
|
||||
for (const row of historicalCash) {
|
||||
histIndex[`${row.yr}-${row.mo}-${row.fund_type}`] = parseFloat(row.net_change) || 0;
|
||||
}
|
||||
|
||||
// Investment maturities
|
||||
const maturities = await this.tenant.query(`
|
||||
SELECT fund_type, current_value, maturity_date, interest_rate, purchase_date
|
||||
FROM investment_accounts WHERE is_active = true AND maturity_date IS NOT NULL AND maturity_date > CURRENT_DATE
|
||||
`);
|
||||
const maturityIndex: Record<string, { operating: number; reserve: number }> = {};
|
||||
for (const inv of maturities) {
|
||||
const d = new Date(inv.maturity_date);
|
||||
const key = `${d.getFullYear()}-${d.getMonth() + 1}`;
|
||||
if (!maturityIndex[key]) maturityIndex[key] = { operating: 0, reserve: 0 };
|
||||
const val = parseFloat(inv.current_value) || 0;
|
||||
const rate = parseFloat(inv.interest_rate) || 0;
|
||||
const purchaseDate = inv.purchase_date ? new Date(inv.purchase_date) : new Date();
|
||||
const matDate = new Date(inv.maturity_date);
|
||||
const daysHeld = Math.max((matDate.getTime() - purchaseDate.getTime()) / 86400000, 1);
|
||||
const interestEarned = val * (rate / 100) * (daysHeld / 365);
|
||||
const maturityTotal = val + interestEarned;
|
||||
if (inv.fund_type === 'operating') maturityIndex[key].operating += maturityTotal;
|
||||
else maturityIndex[key].reserve += maturityTotal;
|
||||
}
|
||||
|
||||
// Capital project expenses (from unified projects table)
|
||||
const projectExpenses = await this.tenant.query(`
|
||||
SELECT estimated_cost, target_year, target_month, fund_source
|
||||
FROM projects WHERE is_active = true AND status IN ('planned', 'in_progress') AND target_year IS NOT NULL AND estimated_cost > 0
|
||||
`);
|
||||
const projectIndex: Record<string, { operating: number; reserve: number }> = {};
|
||||
for (const p of projectExpenses) {
|
||||
const yr = parseInt(p.target_year);
|
||||
const mo = parseInt(p.target_month) || 6;
|
||||
const key = `${yr}-${mo}`;
|
||||
if (!projectIndex[key]) projectIndex[key] = { operating: 0, reserve: 0 };
|
||||
const cost = parseFloat(p.estimated_cost) || 0;
|
||||
if (p.fund_source === 'operating') projectIndex[key].operating += cost;
|
||||
else projectIndex[key].reserve += cost;
|
||||
}
|
||||
|
||||
// Also include capital_projects table (Capital Planning page)
|
||||
try {
|
||||
const capitalProjectExpenses = await this.tenant.query(`
|
||||
SELECT estimated_cost, target_year, target_month, fund_source
|
||||
FROM capital_projects WHERE status IN ('planned', 'approved', 'in_progress') AND target_year IS NOT NULL AND estimated_cost > 0
|
||||
`);
|
||||
for (const p of capitalProjectExpenses) {
|
||||
const yr = parseInt(p.target_year);
|
||||
const mo = parseInt(p.target_month) || 6;
|
||||
const key = `${yr}-${mo}`;
|
||||
if (!projectIndex[key]) projectIndex[key] = { operating: 0, reserve: 0 };
|
||||
const cost = parseFloat(p.estimated_cost) || 0;
|
||||
if (p.fund_source === 'operating') projectIndex[key].operating += cost;
|
||||
else projectIndex[key].reserve += cost;
|
||||
}
|
||||
} catch {
|
||||
// capital_projects table may not exist in all tenants
|
||||
}
|
||||
|
||||
return {
|
||||
openingBalances: {
|
||||
opCash: parseFloat(openingOp[0]?.total || '0'),
|
||||
resCash: parseFloat(openingRes[0]?.total || '0'),
|
||||
opInv: parseFloat(opInvRows[0]?.total || '0'),
|
||||
resInv: parseFloat(resInvRows[0]?.total || '0'),
|
||||
},
|
||||
assessmentGroups,
|
||||
budgetsByYearMonth,
|
||||
histIndex,
|
||||
maturityIndex,
|
||||
projectIndex,
|
||||
};
|
||||
}
|
||||
|
||||
private getAssessmentIncome(assessmentGroups: any[], month: number) {
|
||||
let operating = 0;
|
||||
let reserve = 0;
|
||||
for (const g of assessmentGroups) {
|
||||
const units = parseInt(g.unit_count) || 0;
|
||||
const regular = parseFloat(g.regular_assessment) || 0;
|
||||
const special = parseFloat(g.special_assessment) || 0;
|
||||
const freq = g.frequency || 'monthly';
|
||||
let applies = false;
|
||||
if (freq === 'monthly') applies = true;
|
||||
else if (freq === 'quarterly') applies = [1, 4, 7, 10].includes(month);
|
||||
else if (freq === 'annual') applies = month === 1;
|
||||
if (applies) {
|
||||
operating += regular * units;
|
||||
reserve += special * units;
|
||||
}
|
||||
}
|
||||
return { operating, reserve };
|
||||
}
|
||||
|
||||
/** Compute investment cash flow and balance deltas for a given month from scenario investments. */
|
||||
private computeInvestmentDelta(investments: any[], year: number, month: number) {
|
||||
let opCashFlow = 0;
|
||||
let resCashFlow = 0;
|
||||
let opInvChange = 0;
|
||||
let resInvChange = 0;
|
||||
let interestEarned = 0;
|
||||
const interestByInvestment: Record<string, number> = {};
|
||||
|
||||
for (const inv of investments) {
|
||||
if (inv.executed_investment_id) continue; // skip already-executed investments
|
||||
|
||||
const principal = parseFloat(inv.principal) || 0;
|
||||
const rate = parseFloat(inv.interest_rate) || 0;
|
||||
const isOp = inv.fund_type === 'operating';
|
||||
|
||||
// Purchase: cash leaves, investment balance increases
|
||||
if (inv.purchase_date) {
|
||||
const pd = new Date(inv.purchase_date);
|
||||
if (pd.getFullYear() === year && pd.getMonth() + 1 === month) {
|
||||
if (isOp) { opCashFlow -= principal; opInvChange += principal; }
|
||||
else { resCashFlow -= principal; resInvChange += principal; }
|
||||
}
|
||||
}
|
||||
|
||||
// Maturity: investment returns to cash with interest
|
||||
if (inv.maturity_date) {
|
||||
const md = new Date(inv.maturity_date);
|
||||
if (md.getFullYear() === year && md.getMonth() + 1 === month) {
|
||||
const purchaseDate = inv.purchase_date ? new Date(inv.purchase_date) : new Date();
|
||||
const daysHeld = Math.max((md.getTime() - purchaseDate.getTime()) / 86400000, 1);
|
||||
const invInterest = principal * (rate / 100) * (daysHeld / 365);
|
||||
const maturityTotal = principal + invInterest;
|
||||
|
||||
interestEarned += invInterest;
|
||||
interestByInvestment[inv.id] = (interestByInvestment[inv.id] || 0) + invInterest;
|
||||
|
||||
if (isOp) { opCashFlow += maturityTotal; opInvChange -= principal; }
|
||||
else { resCashFlow += maturityTotal; resInvChange -= principal; }
|
||||
|
||||
// Note: auto_renew investments now create separate renewal records
|
||||
// (via ensureRenewalRecords), so the renewal purchase is handled by
|
||||
// that record's purchase_date logic above — no inline reinvest needed.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { opCashFlow, resCashFlow, opInvChange, resInvChange, interestEarned, interestByInvestment };
|
||||
}
|
||||
|
||||
/** Compute assessment income delta for a given month from scenario assessment changes. */
|
||||
private computeAssessmentDelta(scenarioAssessments: any[], assessmentGroups: any[], year: number, month: number) {
|
||||
let operating = 0;
|
||||
let reserve = 0;
|
||||
|
||||
const monthDate = new Date(year, month - 1, 1);
|
||||
|
||||
// Get total units across all assessment groups
|
||||
let totalUnits = 0;
|
||||
for (const g of assessmentGroups) {
|
||||
totalUnits += parseInt(g.unit_count) || 0;
|
||||
}
|
||||
|
||||
for (const a of scenarioAssessments) {
|
||||
const effectiveDate = new Date(a.effective_date);
|
||||
const endDate = a.end_date ? new Date(a.end_date) : null;
|
||||
|
||||
// Only apply if within the active window
|
||||
if (monthDate < effectiveDate) continue;
|
||||
if (endDate && monthDate > endDate) continue;
|
||||
|
||||
if (a.change_type === 'dues_increase' || a.change_type === 'dues_decrease') {
|
||||
const baseIncome = this.getAssessmentIncome(assessmentGroups, month);
|
||||
const pctChange = parseFloat(a.percentage_change) || 0;
|
||||
const flatChange = parseFloat(a.flat_amount_change) || 0;
|
||||
const sign = a.change_type === 'dues_decrease' ? -1 : 1;
|
||||
|
||||
let delta = 0;
|
||||
if (pctChange > 0) {
|
||||
// Percentage change of base assessment income
|
||||
const target = a.target_fund || 'operating';
|
||||
if (target === 'operating' || target === 'both') {
|
||||
delta = baseIncome.operating * (pctChange / 100) * sign;
|
||||
operating += delta;
|
||||
}
|
||||
if (target === 'reserve' || target === 'both') {
|
||||
delta = baseIncome.reserve * (pctChange / 100) * sign;
|
||||
reserve += delta;
|
||||
}
|
||||
} else if (flatChange > 0) {
|
||||
// Flat per-unit change times total units
|
||||
const target = a.target_fund || 'operating';
|
||||
if (target === 'operating' || target === 'both') {
|
||||
operating += flatChange * totalUnits * sign;
|
||||
}
|
||||
if (target === 'reserve' || target === 'both') {
|
||||
reserve += flatChange * totalUnits * sign;
|
||||
}
|
||||
}
|
||||
} else if (a.change_type === 'special_assessment') {
|
||||
// Special assessment distributed across installments
|
||||
const perUnit = parseFloat(a.special_per_unit) || 0;
|
||||
const installments = parseInt(a.special_installments) || 1;
|
||||
const monthsFromStart = (year - effectiveDate.getFullYear()) * 12 + (month - (effectiveDate.getMonth() + 1));
|
||||
|
||||
if (monthsFromStart >= 0 && monthsFromStart < installments) {
|
||||
const monthlyIncome = (perUnit * totalUnits) / installments;
|
||||
const target = a.target_fund || 'reserve';
|
||||
if (target === 'operating' || target === 'both') operating += monthlyIncome;
|
||||
if (target === 'reserve' || target === 'both') reserve += monthlyIncome;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { operating, reserve };
|
||||
}
|
||||
|
||||
private computeSummary(
|
||||
datapoints: any[], baseline: any, scenarioAssessments: any[],
|
||||
investments?: any[], totalInterestEarned = 0, interestByInvestment: Record<string, number> = {},
|
||||
) {
|
||||
if (!datapoints.length) return {};
|
||||
|
||||
const last = datapoints[datapoints.length - 1];
|
||||
const first = datapoints[0];
|
||||
|
||||
const allLiquidity = datapoints.map(
|
||||
(d) => d.operating_cash + d.operating_investments + d.reserve_cash + d.reserve_investments,
|
||||
);
|
||||
const minLiquidity = Math.min(...allLiquidity);
|
||||
const endLiquidity = allLiquidity[allLiquidity.length - 1];
|
||||
|
||||
// Reserve coverage: reserve balance / avg monthly reserve expenditure from planned capital projects
|
||||
let totalReserveProjectCost = 0;
|
||||
const projectionYears = Math.max(1, Math.ceil(datapoints.length / 12));
|
||||
for (const key of Object.keys(baseline.projectIndex)) {
|
||||
totalReserveProjectCost += baseline.projectIndex[key].reserve || 0;
|
||||
}
|
||||
const avgMonthlyReserveExpenditure = totalReserveProjectCost > 0
|
||||
? totalReserveProjectCost / (projectionYears * 12)
|
||||
: 0;
|
||||
const reserveCoverageMonths = avgMonthlyReserveExpenditure > 0
|
||||
? (last.reserve_cash + last.reserve_investments) / avgMonthlyReserveExpenditure
|
||||
: 0; // No planned projects = show 0 (N/A)
|
||||
|
||||
// Calculate total principal from scenario investments
|
||||
let totalPrincipal = 0;
|
||||
const investmentInterestDetails: Array<{ id: string; label: string; principal: number; interest: number }> = [];
|
||||
if (investments) {
|
||||
for (const inv of investments) {
|
||||
if (inv.executed_investment_id) continue;
|
||||
const principal = parseFloat(inv.principal) || 0;
|
||||
totalPrincipal += principal;
|
||||
const interest = interestByInvestment[inv.id] || 0;
|
||||
investmentInterestDetails.push({
|
||||
id: inv.id,
|
||||
label: inv.label,
|
||||
principal: round2(principal),
|
||||
interest: round2(interest),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
end_liquidity: round2(endLiquidity),
|
||||
min_liquidity: round2(minLiquidity),
|
||||
reserve_coverage_months: round2(reserveCoverageMonths),
|
||||
end_operating_cash: last.operating_cash,
|
||||
end_reserve_cash: last.reserve_cash,
|
||||
end_operating_investments: last.operating_investments,
|
||||
end_reserve_investments: last.reserve_investments,
|
||||
period_change: round2(endLiquidity - allLiquidity[0]),
|
||||
total_interest_earned: round2(totalInterestEarned),
|
||||
total_principal_invested: round2(totalPrincipal),
|
||||
roi_percentage: totalPrincipal > 0 ? round2((totalInterestEarned / totalPrincipal) * 100) : 0,
|
||||
investment_interest_details: investmentInterestDetails,
|
||||
};
|
||||
}
|
||||
}
|
||||
200
backend/src/modules/board-planning/board-planning.controller.ts
Normal file
200
backend/src/modules/board-planning/board-planning.controller.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import { Controller, Get, Post, Put, Delete, Body, Param, Query, Req, Res, UseGuards } from '@nestjs/common';
|
||||
import { Response } from 'express';
|
||||
import { ApiTags, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||
import { BoardPlanningService } from './board-planning.service';
|
||||
import { BoardPlanningProjectionService } from './board-planning-projection.service';
|
||||
import { BudgetPlanningService } from './budget-planning.service';
|
||||
|
||||
@ApiTags('board-planning')
|
||||
@Controller('board-planning')
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class BoardPlanningController {
|
||||
constructor(
|
||||
private service: BoardPlanningService,
|
||||
private projection: BoardPlanningProjectionService,
|
||||
private budgetPlanning: BudgetPlanningService,
|
||||
) {}
|
||||
|
||||
// ── Scenarios ──
|
||||
|
||||
@Get('scenarios')
|
||||
@AllowViewer()
|
||||
listScenarios(@Query('type') type?: string) {
|
||||
return this.service.listScenarios(type);
|
||||
}
|
||||
|
||||
@Get('scenarios/:id')
|
||||
@AllowViewer()
|
||||
getScenario(@Param('id') id: string) {
|
||||
return this.service.getScenario(id);
|
||||
}
|
||||
|
||||
@Post('scenarios')
|
||||
createScenario(@Body() dto: any, @Req() req: any) {
|
||||
return this.service.createScenario(dto, req.user.sub);
|
||||
}
|
||||
|
||||
@Put('scenarios/:id')
|
||||
updateScenario(@Param('id') id: string, @Body() dto: any) {
|
||||
return this.service.updateScenario(id, dto);
|
||||
}
|
||||
|
||||
@Delete('scenarios/:id')
|
||||
deleteScenario(@Param('id') id: string) {
|
||||
return this.service.deleteScenario(id);
|
||||
}
|
||||
|
||||
// ── Scenario Investments ──
|
||||
|
||||
@Get('scenarios/:scenarioId/investments')
|
||||
@AllowViewer()
|
||||
listInvestments(@Param('scenarioId') scenarioId: string) {
|
||||
return this.service.listInvestments(scenarioId);
|
||||
}
|
||||
|
||||
@Post('scenarios/:scenarioId/investments')
|
||||
addInvestment(@Param('scenarioId') scenarioId: string, @Body() dto: any) {
|
||||
return this.service.addInvestment(scenarioId, dto);
|
||||
}
|
||||
|
||||
@Post('scenarios/:scenarioId/investments/from-recommendation')
|
||||
addFromRecommendation(@Param('scenarioId') scenarioId: string, @Body() dto: any) {
|
||||
return this.service.addInvestmentFromRecommendation(scenarioId, dto);
|
||||
}
|
||||
|
||||
@Put('investments/:id')
|
||||
updateInvestment(@Param('id') id: string, @Body() dto: any) {
|
||||
return this.service.updateInvestment(id, dto);
|
||||
}
|
||||
|
||||
@Delete('investments/:id')
|
||||
removeInvestment(@Param('id') id: string) {
|
||||
return this.service.removeInvestment(id);
|
||||
}
|
||||
|
||||
// ── Scenario Assessments ──
|
||||
|
||||
@Get('scenarios/:scenarioId/assessments')
|
||||
@AllowViewer()
|
||||
listAssessments(@Param('scenarioId') scenarioId: string) {
|
||||
return this.service.listAssessments(scenarioId);
|
||||
}
|
||||
|
||||
@Post('scenarios/:scenarioId/assessments')
|
||||
addAssessment(@Param('scenarioId') scenarioId: string, @Body() dto: any) {
|
||||
return this.service.addAssessment(scenarioId, dto);
|
||||
}
|
||||
|
||||
@Put('assessments/:id')
|
||||
updateAssessment(@Param('id') id: string, @Body() dto: any) {
|
||||
return this.service.updateAssessment(id, dto);
|
||||
}
|
||||
|
||||
@Delete('assessments/:id')
|
||||
removeAssessment(@Param('id') id: string) {
|
||||
return this.service.removeAssessment(id);
|
||||
}
|
||||
|
||||
// ── Projections ──
|
||||
|
||||
@Get('scenarios/:id/projection')
|
||||
@AllowViewer()
|
||||
getProjection(@Param('id') id: string) {
|
||||
return this.projection.getProjection(id);
|
||||
}
|
||||
|
||||
@Post('scenarios/:id/projection/refresh')
|
||||
refreshProjection(@Param('id') id: string) {
|
||||
return this.projection.computeProjection(id);
|
||||
}
|
||||
|
||||
// ── Comparison ──
|
||||
|
||||
@Get('compare')
|
||||
@AllowViewer()
|
||||
compareScenarios(@Query('ids') ids: string) {
|
||||
const scenarioIds = ids.split(',').map((s) => s.trim()).filter(Boolean);
|
||||
return this.projection.compareScenarios(scenarioIds);
|
||||
}
|
||||
|
||||
// ── Execute Investment ──
|
||||
|
||||
@Post('investments/:id/execute')
|
||||
executeInvestment(
|
||||
@Param('id') id: string,
|
||||
@Body() dto: { executionDate: string },
|
||||
@Req() req: any,
|
||||
) {
|
||||
return this.service.executeInvestment(id, dto.executionDate, req.user.sub);
|
||||
}
|
||||
|
||||
// ── Budget Planning ──
|
||||
|
||||
@Get('budget-plans')
|
||||
@AllowViewer()
|
||||
listBudgetPlans() {
|
||||
return this.budgetPlanning.listPlans();
|
||||
}
|
||||
|
||||
@Get('budget-plans/available-years')
|
||||
@AllowViewer()
|
||||
getAvailableYears() {
|
||||
return this.budgetPlanning.getAvailableYears();
|
||||
}
|
||||
|
||||
@Get('budget-plans/:year')
|
||||
@AllowViewer()
|
||||
getBudgetPlan(@Param('year') year: string) {
|
||||
return this.budgetPlanning.getPlan(parseInt(year, 10));
|
||||
}
|
||||
|
||||
@Post('budget-plans')
|
||||
createBudgetPlan(@Body() dto: { fiscalYear: number; baseYear: number; inflationRate?: number }, @Req() req: any) {
|
||||
return this.budgetPlanning.createPlan(dto.fiscalYear, dto.baseYear, dto.inflationRate ?? 2.5, req.user.sub);
|
||||
}
|
||||
|
||||
@Put('budget-plans/:year/lines')
|
||||
updateBudgetPlanLines(@Param('year') year: string, @Body() dto: { planId: string; lines: any[] }) {
|
||||
return this.budgetPlanning.updateLines(dto.planId, dto.lines);
|
||||
}
|
||||
|
||||
@Put('budget-plans/:year/inflation')
|
||||
updateBudgetPlanInflation(@Param('year') year: string, @Body() dto: { inflationRate: number }) {
|
||||
return this.budgetPlanning.updateInflation(parseInt(year, 10), dto.inflationRate);
|
||||
}
|
||||
|
||||
@Put('budget-plans/:year/status')
|
||||
advanceBudgetPlanStatus(@Param('year') year: string, @Body() dto: { status: string }, @Req() req: any) {
|
||||
return this.budgetPlanning.advanceStatus(parseInt(year, 10), dto.status, req.user.sub);
|
||||
}
|
||||
|
||||
@Post('budget-plans/:year/import')
|
||||
importBudgetPlanLines(
|
||||
@Param('year') year: string,
|
||||
@Body() lines: any[],
|
||||
@Req() req: any,
|
||||
) {
|
||||
return this.budgetPlanning.importLines(parseInt(year, 10), lines, req.user.sub);
|
||||
}
|
||||
|
||||
@Get('budget-plans/:year/template')
|
||||
async getBudgetPlanTemplate(
|
||||
@Param('year') year: string,
|
||||
@Res() res: Response,
|
||||
) {
|
||||
const csv = await this.budgetPlanning.getTemplate(parseInt(year, 10));
|
||||
res.set({
|
||||
'Content-Type': 'text/csv',
|
||||
'Content-Disposition': `attachment; filename="budget_template_${year}.csv"`,
|
||||
});
|
||||
res.send(csv);
|
||||
}
|
||||
|
||||
@Delete('budget-plans/:year')
|
||||
deleteBudgetPlan(@Param('year') year: string) {
|
||||
return this.budgetPlanning.deletePlan(parseInt(year, 10));
|
||||
}
|
||||
}
|
||||
12
backend/src/modules/board-planning/board-planning.module.ts
Normal file
12
backend/src/modules/board-planning/board-planning.module.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { BoardPlanningController } from './board-planning.controller';
|
||||
import { BoardPlanningService } from './board-planning.service';
|
||||
import { BoardPlanningProjectionService } from './board-planning-projection.service';
|
||||
import { BudgetPlanningService } from './budget-planning.service';
|
||||
|
||||
@Module({
|
||||
controllers: [BoardPlanningController],
|
||||
providers: [BoardPlanningService, BoardPlanningProjectionService, BudgetPlanningService],
|
||||
exports: [BoardPlanningService, BudgetPlanningService],
|
||||
})
|
||||
export class BoardPlanningModule {}
|
||||
383
backend/src/modules/board-planning/board-planning.service.ts
Normal file
383
backend/src/modules/board-planning/board-planning.service.ts
Normal file
@@ -0,0 +1,383 @@
|
||||
import { Injectable, NotFoundException, BadRequestException } from '@nestjs/common';
|
||||
import { TenantService } from '../../database/tenant.service';
|
||||
|
||||
@Injectable()
|
||||
export class BoardPlanningService {
|
||||
constructor(private tenant: TenantService) {}
|
||||
|
||||
// ── Scenarios ──
|
||||
|
||||
async listScenarios(type?: string) {
|
||||
let sql = `
|
||||
SELECT bs.*,
|
||||
(SELECT COUNT(*) FROM scenario_investments si WHERE si.scenario_id = bs.id) as investment_count,
|
||||
(SELECT COALESCE(SUM(si.principal), 0) FROM scenario_investments si WHERE si.scenario_id = bs.id) as total_principal,
|
||||
(SELECT COUNT(*) FROM scenario_assessments sa WHERE sa.scenario_id = bs.id) as assessment_count
|
||||
FROM board_scenarios bs
|
||||
WHERE bs.status != 'archived'
|
||||
`;
|
||||
const params: any[] = [];
|
||||
if (type) {
|
||||
params.push(type);
|
||||
sql += ` AND bs.scenario_type = $${params.length}`;
|
||||
}
|
||||
sql += ' ORDER BY bs.updated_at DESC';
|
||||
return this.tenant.query(sql, params);
|
||||
}
|
||||
|
||||
async getScenario(id: string) {
|
||||
const rows = await this.tenant.query('SELECT * FROM board_scenarios WHERE id = $1', [id]);
|
||||
if (!rows.length) throw new NotFoundException('Scenario not found');
|
||||
const scenario = rows[0];
|
||||
|
||||
const investments = await this.tenant.query(
|
||||
'SELECT * FROM scenario_investments WHERE scenario_id = $1 ORDER BY sort_order, purchase_date',
|
||||
[id],
|
||||
);
|
||||
const assessments = await this.tenant.query(
|
||||
'SELECT * FROM scenario_assessments WHERE scenario_id = $1 ORDER BY sort_order, effective_date',
|
||||
[id],
|
||||
);
|
||||
|
||||
return { ...scenario, investments, assessments };
|
||||
}
|
||||
|
||||
async createScenario(dto: any, userId: string) {
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO board_scenarios (name, description, scenario_type, projection_months, created_by)
|
||||
VALUES ($1, $2, $3, $4, $5) RETURNING *`,
|
||||
[dto.name, dto.description || null, dto.scenarioType, dto.projectionMonths || 36, userId],
|
||||
);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async updateScenario(id: string, dto: any) {
|
||||
await this.getScenarioRow(id);
|
||||
const rows = await this.tenant.query(
|
||||
`UPDATE board_scenarios SET
|
||||
name = COALESCE($2, name),
|
||||
description = COALESCE($3, description),
|
||||
status = COALESCE($4, status),
|
||||
projection_months = COALESCE($5, projection_months),
|
||||
updated_at = NOW()
|
||||
WHERE id = $1 RETURNING *`,
|
||||
[id, dto.name, dto.description, dto.status, dto.projectionMonths],
|
||||
);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async deleteScenario(id: string) {
|
||||
await this.getScenarioRow(id);
|
||||
await this.tenant.query(
|
||||
`UPDATE board_scenarios SET status = 'archived', updated_at = NOW() WHERE id = $1`,
|
||||
[id],
|
||||
);
|
||||
}
|
||||
|
||||
// ── Scenario Investments ──
|
||||
|
||||
async listInvestments(scenarioId: string) {
|
||||
return this.tenant.query(
|
||||
'SELECT * FROM scenario_investments WHERE scenario_id = $1 ORDER BY sort_order, purchase_date',
|
||||
[scenarioId],
|
||||
);
|
||||
}
|
||||
|
||||
async addInvestment(scenarioId: string, dto: any) {
|
||||
await this.getScenarioRow(scenarioId);
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO scenario_investments
|
||||
(scenario_id, source_recommendation_id, label, investment_type, fund_type,
|
||||
principal, interest_rate, term_months, institution, purchase_date, maturity_date,
|
||||
auto_renew, notes, sort_order)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
RETURNING *`,
|
||||
[
|
||||
scenarioId, dto.sourceRecommendationId || null, dto.label,
|
||||
dto.investmentType || null, dto.fundType,
|
||||
dto.principal, dto.interestRate || null, dto.termMonths || null,
|
||||
dto.institution || null, dto.purchaseDate || null, dto.maturityDate || null,
|
||||
dto.autoRenew || false, dto.notes || null, dto.sortOrder || 0,
|
||||
],
|
||||
);
|
||||
await this.invalidateProjectionCache(scenarioId);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async addInvestmentFromRecommendation(scenarioId: string, dto: any) {
|
||||
await this.getScenarioRow(scenarioId);
|
||||
|
||||
// Helper: compute maturity date from purchase date + term months
|
||||
const computeMaturityDate = (purchaseDate: string | null, termMonths: number | null): string | null => {
|
||||
if (!purchaseDate || !termMonths) return null;
|
||||
const d = new Date(purchaseDate);
|
||||
d.setMonth(d.getMonth() + termMonths);
|
||||
return d.toISOString().split('T')[0];
|
||||
};
|
||||
|
||||
const startDate = dto.startDate || null; // ISO date string e.g. "2026-03-16"
|
||||
|
||||
// If the recommendation has components (e.g. CD ladder with multiple CDs), create one row per component
|
||||
const components = dto.components as any[] | undefined;
|
||||
if (components && Array.isArray(components) && components.length > 0) {
|
||||
const results: any[] = [];
|
||||
for (let i = 0; i < components.length; i++) {
|
||||
const comp = components[i];
|
||||
const termMonths = comp.term_months || null;
|
||||
const maturityDate = computeMaturityDate(startDate, termMonths);
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO scenario_investments
|
||||
(scenario_id, source_recommendation_id, label, investment_type, fund_type,
|
||||
principal, interest_rate, term_months, institution, purchase_date, maturity_date,
|
||||
notes, sort_order)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||
RETURNING *`,
|
||||
[
|
||||
scenarioId, dto.sourceRecommendationId || null,
|
||||
comp.label || `${dto.title || 'AI Recommendation'} - Part ${i + 1}`,
|
||||
comp.investment_type || dto.investmentType || null,
|
||||
dto.fundType || 'reserve',
|
||||
comp.amount || 0, comp.rate || null,
|
||||
termMonths, comp.bank_name || dto.bankName || null,
|
||||
startDate, maturityDate,
|
||||
dto.rationale || dto.notes || null,
|
||||
i,
|
||||
],
|
||||
);
|
||||
results.push(rows[0]);
|
||||
}
|
||||
await this.invalidateProjectionCache(scenarioId);
|
||||
return results;
|
||||
}
|
||||
|
||||
// Single investment (no components)
|
||||
const termMonths = dto.termMonths || null;
|
||||
const maturityDate = computeMaturityDate(startDate, termMonths);
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO scenario_investments
|
||||
(scenario_id, source_recommendation_id, label, investment_type, fund_type,
|
||||
principal, interest_rate, term_months, institution, purchase_date, maturity_date, notes)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||
RETURNING *`,
|
||||
[
|
||||
scenarioId, dto.sourceRecommendationId || null,
|
||||
dto.title || dto.label || 'AI Recommendation',
|
||||
dto.investmentType || null, dto.fundType || 'reserve',
|
||||
dto.suggestedAmount || 0, dto.suggestedRate || null,
|
||||
termMonths, dto.bankName || null,
|
||||
startDate, maturityDate,
|
||||
dto.rationale || dto.notes || null,
|
||||
],
|
||||
);
|
||||
await this.invalidateProjectionCache(scenarioId);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async updateInvestment(id: string, dto: any) {
|
||||
const inv = await this.getInvestmentRow(id);
|
||||
const rows = await this.tenant.query(
|
||||
`UPDATE scenario_investments SET
|
||||
label = COALESCE($2, label),
|
||||
investment_type = COALESCE($3, investment_type),
|
||||
fund_type = COALESCE($4, fund_type),
|
||||
principal = COALESCE($5, principal),
|
||||
interest_rate = COALESCE($6, interest_rate),
|
||||
term_months = COALESCE($7, term_months),
|
||||
institution = COALESCE($8, institution),
|
||||
purchase_date = COALESCE($9, purchase_date),
|
||||
maturity_date = COALESCE($10, maturity_date),
|
||||
auto_renew = COALESCE($11, auto_renew),
|
||||
notes = COALESCE($12, notes),
|
||||
sort_order = COALESCE($13, sort_order),
|
||||
updated_at = NOW()
|
||||
WHERE id = $1 RETURNING *`,
|
||||
[
|
||||
id, dto.label, dto.investmentType, dto.fundType,
|
||||
dto.principal, dto.interestRate, dto.termMonths,
|
||||
dto.institution, dto.purchaseDate, dto.maturityDate,
|
||||
dto.autoRenew, dto.notes, dto.sortOrder,
|
||||
],
|
||||
);
|
||||
await this.invalidateProjectionCache(inv.scenario_id);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async removeInvestment(id: string) {
|
||||
const inv = await this.getInvestmentRow(id);
|
||||
await this.tenant.query('DELETE FROM scenario_investments WHERE id = $1', [id]);
|
||||
await this.invalidateProjectionCache(inv.scenario_id);
|
||||
}
|
||||
|
||||
// ── Scenario Assessments ──
|
||||
|
||||
async listAssessments(scenarioId: string) {
|
||||
return this.tenant.query(
|
||||
'SELECT * FROM scenario_assessments WHERE scenario_id = $1 ORDER BY sort_order, effective_date',
|
||||
[scenarioId],
|
||||
);
|
||||
}
|
||||
|
||||
async addAssessment(scenarioId: string, dto: any) {
|
||||
await this.getScenarioRow(scenarioId);
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO scenario_assessments
|
||||
(scenario_id, change_type, label, target_fund, percentage_change,
|
||||
flat_amount_change, special_total, special_per_unit, special_installments,
|
||||
effective_date, end_date, applies_to_group_id, notes, sort_order)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
RETURNING *`,
|
||||
[
|
||||
scenarioId, dto.changeType, dto.label, dto.targetFund || 'operating',
|
||||
dto.percentageChange || null, dto.flatAmountChange || null,
|
||||
dto.specialTotal || null, dto.specialPerUnit || null,
|
||||
dto.specialInstallments || 1, dto.effectiveDate,
|
||||
dto.endDate || null, dto.appliesToGroupId || null,
|
||||
dto.notes || null, dto.sortOrder || 0,
|
||||
],
|
||||
);
|
||||
await this.invalidateProjectionCache(scenarioId);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async updateAssessment(id: string, dto: any) {
|
||||
const asmt = await this.getAssessmentRow(id);
|
||||
const rows = await this.tenant.query(
|
||||
`UPDATE scenario_assessments SET
|
||||
change_type = COALESCE($2, change_type),
|
||||
label = COALESCE($3, label),
|
||||
target_fund = COALESCE($4, target_fund),
|
||||
percentage_change = COALESCE($5, percentage_change),
|
||||
flat_amount_change = COALESCE($6, flat_amount_change),
|
||||
special_total = COALESCE($7, special_total),
|
||||
special_per_unit = COALESCE($8, special_per_unit),
|
||||
special_installments = COALESCE($9, special_installments),
|
||||
effective_date = COALESCE($10, effective_date),
|
||||
end_date = COALESCE($11, end_date),
|
||||
applies_to_group_id = COALESCE($12, applies_to_group_id),
|
||||
notes = COALESCE($13, notes),
|
||||
sort_order = COALESCE($14, sort_order),
|
||||
updated_at = NOW()
|
||||
WHERE id = $1 RETURNING *`,
|
||||
[
|
||||
id, dto.changeType, dto.label, dto.targetFund,
|
||||
dto.percentageChange, dto.flatAmountChange,
|
||||
dto.specialTotal, dto.specialPerUnit, dto.specialInstallments,
|
||||
dto.effectiveDate, dto.endDate, dto.appliesToGroupId,
|
||||
dto.notes, dto.sortOrder,
|
||||
],
|
||||
);
|
||||
await this.invalidateProjectionCache(asmt.scenario_id);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async removeAssessment(id: string) {
|
||||
const asmt = await this.getAssessmentRow(id);
|
||||
await this.tenant.query('DELETE FROM scenario_assessments WHERE id = $1', [id]);
|
||||
await this.invalidateProjectionCache(asmt.scenario_id);
|
||||
}
|
||||
|
||||
// ── Execute Investment (Story 1D) ──
|
||||
|
||||
async executeInvestment(investmentId: string, executionDate: string, userId: string) {
|
||||
const inv = await this.getInvestmentRow(investmentId);
|
||||
if (inv.executed_investment_id) {
|
||||
throw new BadRequestException('This investment has already been executed');
|
||||
}
|
||||
|
||||
// 1. Create real investment_accounts record
|
||||
const invRows = await this.tenant.query(
|
||||
`INSERT INTO investment_accounts
|
||||
(name, institution, investment_type, fund_type, principal, interest_rate,
|
||||
maturity_date, purchase_date, current_value, notes, is_active)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, true)
|
||||
RETURNING *`,
|
||||
[
|
||||
inv.label, inv.institution, inv.investment_type || 'cd',
|
||||
inv.fund_type, inv.principal, inv.interest_rate || 0,
|
||||
inv.maturity_date, executionDate, inv.principal,
|
||||
`Executed from scenario investment. ${inv.notes || ''}`.trim(),
|
||||
],
|
||||
);
|
||||
const realInvestment = invRows[0];
|
||||
|
||||
// 2. Create journal entry at the execution date
|
||||
const entryDate = new Date(executionDate);
|
||||
const year = entryDate.getFullYear();
|
||||
const month = entryDate.getMonth() + 1;
|
||||
|
||||
const periods = await this.tenant.query(
|
||||
'SELECT id FROM fiscal_periods WHERE year = $1 AND month = $2',
|
||||
[year, month],
|
||||
);
|
||||
if (periods.length) {
|
||||
const primaryRows = await this.tenant.query(
|
||||
`SELECT id, name FROM accounts WHERE is_primary = true AND fund_type = $1 AND is_active = true LIMIT 1`,
|
||||
[inv.fund_type],
|
||||
);
|
||||
const equityAccountNumber = inv.fund_type === 'reserve' ? '3100' : '3000';
|
||||
const equityRows = await this.tenant.query(
|
||||
'SELECT id FROM accounts WHERE account_number = $1',
|
||||
[equityAccountNumber],
|
||||
);
|
||||
|
||||
if (primaryRows.length && equityRows.length) {
|
||||
const memo = `Transfer to investment: ${inv.label}`;
|
||||
const jeRows = await this.tenant.query(
|
||||
`INSERT INTO journal_entries (entry_date, description, entry_type, fiscal_period_id, is_posted, posted_at, created_by)
|
||||
VALUES ($1, $2, 'transfer', $3, true, NOW(), $4)
|
||||
RETURNING *`,
|
||||
[executionDate, memo, periods[0].id, userId],
|
||||
);
|
||||
const je = jeRows[0];
|
||||
// Credit primary asset account (reduces cash)
|
||||
await this.tenant.query(
|
||||
`INSERT INTO journal_entry_lines (journal_entry_id, account_id, debit, credit, memo)
|
||||
VALUES ($1, $2, 0, $3, $4)`,
|
||||
[je.id, primaryRows[0].id, inv.principal, memo],
|
||||
);
|
||||
// Debit equity offset account
|
||||
await this.tenant.query(
|
||||
`INSERT INTO journal_entry_lines (journal_entry_id, account_id, debit, credit, memo)
|
||||
VALUES ($1, $2, $3, 0, $4)`,
|
||||
[je.id, equityRows[0].id, inv.principal, memo],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Link back to scenario investment
|
||||
await this.tenant.query(
|
||||
`UPDATE scenario_investments SET executed_investment_id = $1, updated_at = NOW() WHERE id = $2`,
|
||||
[realInvestment.id, investmentId],
|
||||
);
|
||||
|
||||
await this.invalidateProjectionCache(inv.scenario_id);
|
||||
return realInvestment;
|
||||
}
|
||||
|
||||
// ── Helpers ──
|
||||
|
||||
private async getScenarioRow(id: string) {
|
||||
const rows = await this.tenant.query('SELECT * FROM board_scenarios WHERE id = $1', [id]);
|
||||
if (!rows.length) throw new NotFoundException('Scenario not found');
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
private async getInvestmentRow(id: string) {
|
||||
const rows = await this.tenant.query('SELECT * FROM scenario_investments WHERE id = $1', [id]);
|
||||
if (!rows.length) throw new NotFoundException('Scenario investment not found');
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
private async getAssessmentRow(id: string) {
|
||||
const rows = await this.tenant.query('SELECT * FROM scenario_assessments WHERE id = $1', [id]);
|
||||
if (!rows.length) throw new NotFoundException('Scenario assessment not found');
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async invalidateProjectionCache(scenarioId: string) {
|
||||
await this.tenant.query(
|
||||
`UPDATE board_scenarios SET projection_cache = NULL, projection_cached_at = NULL, updated_at = NOW() WHERE id = $1`,
|
||||
[scenarioId],
|
||||
);
|
||||
}
|
||||
}
|
||||
407
backend/src/modules/board-planning/budget-planning.service.ts
Normal file
407
backend/src/modules/board-planning/budget-planning.service.ts
Normal file
@@ -0,0 +1,407 @@
|
||||
import { Injectable, NotFoundException, BadRequestException } from '@nestjs/common';
|
||||
import { TenantService } from '../../database/tenant.service';
|
||||
|
||||
const monthCols = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec_amt'];
|
||||
|
||||
@Injectable()
|
||||
export class BudgetPlanningService {
|
||||
constructor(private tenant: TenantService) {}
|
||||
|
||||
// ── Plans CRUD ──
|
||||
|
||||
async listPlans() {
|
||||
return this.tenant.query(
|
||||
`SELECT bp.*,
|
||||
(SELECT COUNT(*) FROM budget_plan_lines bpl WHERE bpl.budget_plan_id = bp.id) as line_count
|
||||
FROM budget_plans bp ORDER BY bp.fiscal_year`,
|
||||
);
|
||||
}
|
||||
|
||||
async getPlan(fiscalYear: number) {
|
||||
const plans = await this.tenant.query(
|
||||
'SELECT * FROM budget_plans WHERE fiscal_year = $1', [fiscalYear],
|
||||
);
|
||||
if (!plans.length) return null;
|
||||
|
||||
const plan = plans[0];
|
||||
const lines = await this.tenant.query(
|
||||
`SELECT bpl.*, a.account_number, a.name as account_name, a.account_type, a.fund_type as account_fund_type
|
||||
FROM budget_plan_lines bpl
|
||||
JOIN accounts a ON a.id = bpl.account_id
|
||||
WHERE bpl.budget_plan_id = $1
|
||||
ORDER BY a.account_number`,
|
||||
[plan.id],
|
||||
);
|
||||
return { ...plan, lines };
|
||||
}
|
||||
|
||||
async getAvailableYears() {
|
||||
// Find the latest year that has official budgets
|
||||
const result = await this.tenant.query(
|
||||
'SELECT MAX(fiscal_year) as max_year FROM budgets',
|
||||
);
|
||||
const rawMaxYear = result[0]?.max_year;
|
||||
const latestBudgetYear = rawMaxYear || null; // null means no budgets exist at all
|
||||
const baseYear = rawMaxYear || new Date().getFullYear();
|
||||
|
||||
// Also find years that already have plans
|
||||
const existingPlans = await this.tenant.query(
|
||||
'SELECT fiscal_year, status FROM budget_plans ORDER BY fiscal_year',
|
||||
);
|
||||
const planYears = existingPlans.map((p: any) => ({
|
||||
year: p.fiscal_year,
|
||||
status: p.status,
|
||||
}));
|
||||
|
||||
// Return next 5 years (or current year + 4 if no budgets exist)
|
||||
const years = [];
|
||||
const startOffset = rawMaxYear ? 1 : 0; // include current year if no budgets exist
|
||||
for (let i = startOffset; i <= startOffset + 4; i++) {
|
||||
const yr = baseYear + i;
|
||||
const existing = planYears.find((p: any) => p.year === yr);
|
||||
years.push({
|
||||
year: yr,
|
||||
hasPlan: !!existing,
|
||||
status: existing?.status || null,
|
||||
});
|
||||
}
|
||||
return { latestBudgetYear, years, existingPlans: planYears };
|
||||
}
|
||||
|
||||
async createPlan(fiscalYear: number, baseYear: number, inflationRate: number, userId: string) {
|
||||
// Check no existing plan for this year
|
||||
const existing = await this.tenant.query(
|
||||
'SELECT id FROM budget_plans WHERE fiscal_year = $1', [fiscalYear],
|
||||
);
|
||||
if (existing.length) {
|
||||
throw new BadRequestException(`A budget plan already exists for ${fiscalYear}`);
|
||||
}
|
||||
|
||||
// Create the plan
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO budget_plans (fiscal_year, base_year, inflation_rate, created_by)
|
||||
VALUES ($1, $2, $3, $4) RETURNING *`,
|
||||
[fiscalYear, baseYear, inflationRate, userId],
|
||||
);
|
||||
const plan = rows[0];
|
||||
|
||||
// Generate inflated lines from base year
|
||||
await this.generateLines(plan.id, baseYear, inflationRate, fiscalYear);
|
||||
|
||||
return this.getPlan(fiscalYear);
|
||||
}
|
||||
|
||||
async generateLines(planId: string, baseYear: number, inflationRate: number, fiscalYear: number) {
|
||||
// Delete existing non-manually-adjusted lines (or all if fresh)
|
||||
await this.tenant.query(
|
||||
'DELETE FROM budget_plan_lines WHERE budget_plan_id = $1 AND is_manually_adjusted = false',
|
||||
[planId],
|
||||
);
|
||||
|
||||
// Try official budgets first, then fall back to budget_plan_lines for base year
|
||||
let baseLines = await this.tenant.query(
|
||||
`SELECT b.account_id, b.fund_type, ${monthCols.join(', ')}
|
||||
FROM budgets b WHERE b.fiscal_year = $1`,
|
||||
[baseYear],
|
||||
);
|
||||
|
||||
if (!baseLines.length) {
|
||||
// Fall back to budget_plan_lines for base year (for chained plans)
|
||||
baseLines = await this.tenant.query(
|
||||
`SELECT bpl.account_id, bpl.fund_type, ${monthCols.join(', ')}
|
||||
FROM budget_plan_lines bpl
|
||||
JOIN budget_plans bp ON bp.id = bpl.budget_plan_id
|
||||
WHERE bp.fiscal_year = $1`,
|
||||
[baseYear],
|
||||
);
|
||||
}
|
||||
|
||||
if (!baseLines.length) return;
|
||||
|
||||
// Compound inflation: (1 + rate/100)^yearsGap
|
||||
const yearsGap = Math.max(1, fiscalYear - baseYear);
|
||||
const multiplier = Math.pow(1 + inflationRate / 100, yearsGap);
|
||||
|
||||
// Get existing manually-adjusted lines to avoid duplicates
|
||||
const manualLines = await this.tenant.query(
|
||||
`SELECT account_id, fund_type FROM budget_plan_lines
|
||||
WHERE budget_plan_id = $1 AND is_manually_adjusted = true`,
|
||||
[planId],
|
||||
);
|
||||
const manualKeys = new Set(manualLines.map((l: any) => `${l.account_id}-${l.fund_type}`));
|
||||
|
||||
for (const line of baseLines) {
|
||||
const key = `${line.account_id}-${line.fund_type}`;
|
||||
if (manualKeys.has(key)) continue; // Don't overwrite manual edits
|
||||
|
||||
const inflated = monthCols.map((m) => {
|
||||
const val = parseFloat(line[m]) || 0;
|
||||
return Math.round(val * multiplier * 100) / 100;
|
||||
});
|
||||
|
||||
await this.tenant.query(
|
||||
`INSERT INTO budget_plan_lines (budget_plan_id, account_id, fund_type,
|
||||
jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec_amt)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)
|
||||
ON CONFLICT (budget_plan_id, account_id, fund_type)
|
||||
DO UPDATE SET jan=$4, feb=$5, mar=$6, apr=$7, may=$8, jun=$9,
|
||||
jul=$10, aug=$11, sep=$12, oct=$13, nov=$14, dec_amt=$15,
|
||||
is_manually_adjusted=false`,
|
||||
[planId, line.account_id, line.fund_type, ...inflated],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async updateLines(planId: string, lines: any[]) {
|
||||
for (const line of lines) {
|
||||
const monthValues = monthCols.map((m) => {
|
||||
const key = m === 'dec_amt' ? 'dec' : m;
|
||||
return line[key] ?? line[m] ?? 0;
|
||||
});
|
||||
|
||||
await this.tenant.query(
|
||||
`INSERT INTO budget_plan_lines (budget_plan_id, account_id, fund_type,
|
||||
jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec_amt, is_manually_adjusted)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, true)
|
||||
ON CONFLICT (budget_plan_id, account_id, fund_type)
|
||||
DO UPDATE SET jan=$4, feb=$5, mar=$6, apr=$7, may=$8, jun=$9,
|
||||
jul=$10, aug=$11, sep=$12, oct=$13, nov=$14, dec_amt=$15,
|
||||
is_manually_adjusted=true`,
|
||||
[planId, line.accountId, line.fundType, ...monthValues],
|
||||
);
|
||||
}
|
||||
return { updated: lines.length };
|
||||
}
|
||||
|
||||
async updateInflation(fiscalYear: number, inflationRate: number) {
|
||||
const plans = await this.tenant.query(
|
||||
'SELECT * FROM budget_plans WHERE fiscal_year = $1', [fiscalYear],
|
||||
);
|
||||
if (!plans.length) throw new NotFoundException('Budget plan not found');
|
||||
|
||||
const plan = plans[0];
|
||||
if (plan.status === 'ratified') {
|
||||
throw new BadRequestException('Cannot modify inflation on a ratified budget');
|
||||
}
|
||||
|
||||
await this.tenant.query(
|
||||
'UPDATE budget_plans SET inflation_rate = $1, updated_at = NOW() WHERE fiscal_year = $2',
|
||||
[inflationRate, fiscalYear],
|
||||
);
|
||||
|
||||
// Re-generate only non-manually-adjusted lines
|
||||
await this.generateLines(plan.id, plan.base_year, inflationRate, fiscalYear);
|
||||
|
||||
return this.getPlan(fiscalYear);
|
||||
}
|
||||
|
||||
async advanceStatus(fiscalYear: number, newStatus: string, userId: string) {
|
||||
const plans = await this.tenant.query(
|
||||
'SELECT * FROM budget_plans WHERE fiscal_year = $1', [fiscalYear],
|
||||
);
|
||||
if (!plans.length) throw new NotFoundException('Budget plan not found');
|
||||
|
||||
const plan = plans[0];
|
||||
const validTransitions: Record<string, string[]> = {
|
||||
planning: ['approved'],
|
||||
approved: ['planning', 'ratified'],
|
||||
ratified: ['approved'],
|
||||
};
|
||||
|
||||
if (!validTransitions[plan.status]?.includes(newStatus)) {
|
||||
throw new BadRequestException(`Cannot transition from ${plan.status} to ${newStatus}`);
|
||||
}
|
||||
|
||||
// If reverting from ratified, remove official budget
|
||||
if (plan.status === 'ratified' && newStatus === 'approved') {
|
||||
await this.tenant.query('DELETE FROM budgets WHERE fiscal_year = $1', [fiscalYear]);
|
||||
}
|
||||
|
||||
const updates: string[] = ['status = $1', 'updated_at = NOW()'];
|
||||
const params: any[] = [newStatus];
|
||||
|
||||
if (newStatus === 'approved') {
|
||||
updates.push(`approved_by = $${params.length + 1}`, `approved_at = NOW()`);
|
||||
params.push(userId);
|
||||
} else if (newStatus === 'ratified') {
|
||||
updates.push(`ratified_by = $${params.length + 1}`, `ratified_at = NOW()`);
|
||||
params.push(userId);
|
||||
}
|
||||
|
||||
params.push(fiscalYear);
|
||||
await this.tenant.query(
|
||||
`UPDATE budget_plans SET ${updates.join(', ')} WHERE fiscal_year = $${params.length}`,
|
||||
params,
|
||||
);
|
||||
|
||||
// If ratifying, copy to official budgets
|
||||
if (newStatus === 'ratified') {
|
||||
await this.ratifyToOfficial(plan.id, fiscalYear);
|
||||
}
|
||||
|
||||
return this.getPlan(fiscalYear);
|
||||
}
|
||||
|
||||
private async ratifyToOfficial(planId: string, fiscalYear: number) {
|
||||
// Clear existing official budgets for this year
|
||||
await this.tenant.query('DELETE FROM budgets WHERE fiscal_year = $1', [fiscalYear]);
|
||||
|
||||
// Copy plan lines to official budgets
|
||||
await this.tenant.query(
|
||||
`INSERT INTO budgets (fiscal_year, account_id, fund_type,
|
||||
jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec_amt, notes)
|
||||
SELECT $1, bpl.account_id, bpl.fund_type,
|
||||
bpl.jan, bpl.feb, bpl.mar, bpl.apr, bpl.may, bpl.jun,
|
||||
bpl.jul, bpl.aug, bpl.sep, bpl.oct, bpl.nov, bpl.dec_amt, bpl.notes
|
||||
FROM budget_plan_lines bpl WHERE bpl.budget_plan_id = $2`,
|
||||
[fiscalYear, planId],
|
||||
);
|
||||
}
|
||||
|
||||
async importLines(fiscalYear: number, lines: any[], userId: string) {
|
||||
// Ensure plan exists (create if needed)
|
||||
let plans = await this.tenant.query(
|
||||
'SELECT * FROM budget_plans WHERE fiscal_year = $1', [fiscalYear],
|
||||
);
|
||||
if (!plans.length) {
|
||||
await this.tenant.query(
|
||||
`INSERT INTO budget_plans (fiscal_year, base_year, inflation_rate, created_by)
|
||||
VALUES ($1, $1, 0, $2) RETURNING *`,
|
||||
[fiscalYear, userId],
|
||||
);
|
||||
plans = await this.tenant.query(
|
||||
'SELECT * FROM budget_plans WHERE fiscal_year = $1', [fiscalYear],
|
||||
);
|
||||
}
|
||||
const plan = plans[0];
|
||||
const errors: string[] = [];
|
||||
const created: string[] = [];
|
||||
let imported = 0;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const accountNumber = String(line.accountNumber || line.account_number || '').trim();
|
||||
const accountName = String(line.accountName || line.account_name || '').trim();
|
||||
if (!accountNumber) {
|
||||
errors.push(`Row ${i + 1}: missing account_number`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let accounts = await this.tenant.query(
|
||||
`SELECT id, fund_type, account_type FROM accounts WHERE account_number = $1 AND is_active = true`,
|
||||
[accountNumber],
|
||||
);
|
||||
|
||||
// Auto-create account if not found
|
||||
if ((!accounts || accounts.length === 0) && accountName) {
|
||||
const accountType = this.inferAccountType(accountNumber, accountName);
|
||||
const fundType = this.inferFundType(accountNumber, accountName);
|
||||
await this.tenant.query(
|
||||
`INSERT INTO accounts (account_number, name, account_type, fund_type, is_system)
|
||||
VALUES ($1, $2, $3, $4, false)`,
|
||||
[accountNumber, accountName, accountType, fundType],
|
||||
);
|
||||
accounts = await this.tenant.query(
|
||||
`SELECT id, fund_type, account_type FROM accounts WHERE account_number = $1 AND is_active = true`,
|
||||
[accountNumber],
|
||||
);
|
||||
created.push(`${accountNumber} - ${accountName} (${accountType}/${fundType})`);
|
||||
}
|
||||
|
||||
if (!accounts || accounts.length === 0) {
|
||||
errors.push(`Row ${i + 1}: account "${accountNumber}" not found`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const account = accounts[0];
|
||||
const fundType = line.fund_type || account.fund_type || 'operating';
|
||||
const monthValues = monthCols.map((m) => {
|
||||
const key = m === 'dec_amt' ? 'dec' : m;
|
||||
return this.parseCurrency(line[key] ?? line[m] ?? 0);
|
||||
});
|
||||
|
||||
await this.tenant.query(
|
||||
`INSERT INTO budget_plan_lines (budget_plan_id, account_id, fund_type,
|
||||
jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec_amt, is_manually_adjusted)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, true)
|
||||
ON CONFLICT (budget_plan_id, account_id, fund_type)
|
||||
DO UPDATE SET jan=$4, feb=$5, mar=$6, apr=$7, may=$8, jun=$9,
|
||||
jul=$10, aug=$11, sep=$12, oct=$13, nov=$14, dec_amt=$15,
|
||||
is_manually_adjusted=true`,
|
||||
[plan.id, account.id, fundType, ...monthValues],
|
||||
);
|
||||
imported++;
|
||||
}
|
||||
|
||||
return { imported, errors, created, plan: await this.getPlan(fiscalYear) };
|
||||
}
|
||||
|
||||
async getTemplate(fiscalYear: number): Promise<string> {
|
||||
const rows = await this.tenant.query(
|
||||
`SELECT a.account_number, a.name as account_name,
|
||||
COALESCE(b.jan, 0) as jan, COALESCE(b.feb, 0) as feb,
|
||||
COALESCE(b.mar, 0) as mar, COALESCE(b.apr, 0) as apr,
|
||||
COALESCE(b.may, 0) as may, COALESCE(b.jun, 0) as jun,
|
||||
COALESCE(b.jul, 0) as jul, COALESCE(b.aug, 0) as aug,
|
||||
COALESCE(b.sep, 0) as sep, COALESCE(b.oct, 0) as oct,
|
||||
COALESCE(b.nov, 0) as nov, COALESCE(b.dec_amt, 0) as dec
|
||||
FROM accounts a
|
||||
LEFT JOIN budgets b ON b.account_id = a.id AND b.fiscal_year = $1
|
||||
WHERE a.is_active = true
|
||||
AND a.account_type IN ('income', 'expense')
|
||||
ORDER BY a.account_number`,
|
||||
[fiscalYear],
|
||||
);
|
||||
|
||||
const header = 'account_number,account_name,jan,feb,mar,apr,may,jun,jul,aug,sep,oct,nov,dec';
|
||||
const csvLines = rows.map((r: any) => {
|
||||
const name = String(r.account_name).includes(',') ? `"${r.account_name}"` : r.account_name;
|
||||
return [r.account_number, name, r.jan, r.feb, r.mar, r.apr, r.may, r.jun, r.jul, r.aug, r.sep, r.oct, r.nov, r.dec].join(',');
|
||||
});
|
||||
return [header, ...csvLines].join('\n');
|
||||
}
|
||||
|
||||
private parseCurrency(val: string | number | undefined | null): number {
|
||||
if (val === undefined || val === null) return 0;
|
||||
if (typeof val === 'number') return val;
|
||||
let s = String(val).trim();
|
||||
if (!s || s === '-' || s === '$-' || s === '$ -') return 0;
|
||||
const isNegative = s.includes('(') && s.includes(')');
|
||||
s = s.replace(/[$,\s()]/g, '');
|
||||
if (!s || s === '-') return 0;
|
||||
const num = parseFloat(s);
|
||||
if (isNaN(num)) return 0;
|
||||
return isNegative ? -num : num;
|
||||
}
|
||||
|
||||
private inferAccountType(accountNumber: string, accountName: string): string {
|
||||
const prefix = parseInt(accountNumber.split('-')[0].trim(), 10);
|
||||
if (isNaN(prefix)) return 'expense';
|
||||
const nameUpper = (accountName || '').toUpperCase();
|
||||
if (prefix >= 3000 && prefix < 4000) return 'income';
|
||||
if (nameUpper.includes('INCOME') || nameUpper.includes('REVENUE') || nameUpper.includes('ASSESSMENT')) return 'income';
|
||||
return 'expense';
|
||||
}
|
||||
|
||||
private inferFundType(accountNumber: string, accountName: string): string {
|
||||
const prefix = parseInt(accountNumber.split('-')[0].trim(), 10);
|
||||
const nameUpper = (accountName || '').toUpperCase();
|
||||
if (nameUpper.includes('RESERVE')) return 'reserve';
|
||||
if (prefix >= 7000 && prefix < 8000) return 'reserve';
|
||||
return 'operating';
|
||||
}
|
||||
|
||||
async deletePlan(fiscalYear: number) {
|
||||
const plans = await this.tenant.query(
|
||||
'SELECT * FROM budget_plans WHERE fiscal_year = $1', [fiscalYear],
|
||||
);
|
||||
if (!plans.length) throw new NotFoundException('Budget plan not found');
|
||||
|
||||
if (plans[0].status !== 'planning') {
|
||||
throw new BadRequestException('Can only delete plans in planning status');
|
||||
}
|
||||
|
||||
await this.tenant.query('DELETE FROM budget_plans WHERE fiscal_year = $1', [fiscalYear]);
|
||||
return { deleted: true };
|
||||
}
|
||||
}
|
||||
9
backend/src/modules/email/email.module.ts
Normal file
9
backend/src/modules/email/email.module.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { Module, Global } from '@nestjs/common';
|
||||
import { EmailService } from './email.service';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [EmailService],
|
||||
exports: [EmailService],
|
||||
})
|
||||
export class EmailModule {}
|
||||
348
backend/src/modules/email/email.service.ts
Normal file
348
backend/src/modules/email/email.service.ts
Normal file
@@ -0,0 +1,348 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { Resend } from 'resend';
|
||||
|
||||
@Injectable()
|
||||
export class EmailService {
|
||||
private readonly logger = new Logger(EmailService.name);
|
||||
private resend: Resend | null = null;
|
||||
private fromAddress: string;
|
||||
private replyToAddress: string;
|
||||
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private dataSource: DataSource,
|
||||
) {
|
||||
const apiKey = this.configService.get<string>('RESEND_API_KEY');
|
||||
if (apiKey && !apiKey.includes('placeholder')) {
|
||||
this.resend = new Resend(apiKey);
|
||||
this.logger.log('Resend email service initialized');
|
||||
} else {
|
||||
this.logger.warn('Resend not configured — emails will be logged only (stub mode)');
|
||||
}
|
||||
this.fromAddress = this.configService.get<string>('RESEND_FROM_ADDRESS') || 'noreply@hoaledgeriq.com';
|
||||
this.replyToAddress = this.configService.get<string>('RESEND_REPLY_TO') || '';
|
||||
}
|
||||
|
||||
// ─── Public API ──────────────────────────────────────────────
|
||||
|
||||
async sendActivationEmail(email: string, businessName: string, activationUrl: string): Promise<void> {
|
||||
const subject = `Activate your ${businessName} account on HOA LedgerIQ`;
|
||||
const html = this.buildTemplate({
|
||||
preheader: 'Your HOA LedgerIQ account is ready to activate.',
|
||||
heading: 'Welcome to HOA LedgerIQ!',
|
||||
body: `
|
||||
<p>Your organization <strong>${this.esc(businessName)}</strong> has been created and is ready to go.</p>
|
||||
<p>Click the button below to set your password and activate your account:</p>
|
||||
`,
|
||||
ctaText: 'Activate My Account',
|
||||
ctaUrl: activationUrl,
|
||||
footer: 'This activation link expires in 72 hours. If you did not sign up for HOA LedgerIQ, please ignore this email.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'activation', { businessName, activationUrl });
|
||||
}
|
||||
|
||||
async sendWelcomeEmail(email: string, businessName: string): Promise<void> {
|
||||
const appUrl = this.configService.get<string>('APP_URL') || 'https://app.hoaledgeriq.com';
|
||||
const subject = `Welcome to HOA LedgerIQ — ${businessName}`;
|
||||
const html = this.buildTemplate({
|
||||
preheader: `${businessName} is all set up on HOA LedgerIQ.`,
|
||||
heading: `You're all set!`,
|
||||
body: `
|
||||
<p>Your account for <strong>${this.esc(businessName)}</strong> is now active.</p>
|
||||
<p>Log in to start managing your HOA's finances, assessments, and investments — all in one place.</p>
|
||||
`,
|
||||
ctaText: 'Go to Dashboard',
|
||||
ctaUrl: `${appUrl}/dashboard`,
|
||||
footer: 'If you have any questions, just reply to this email and we\'ll help you get started.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'welcome', { businessName });
|
||||
}
|
||||
|
||||
async sendPaymentFailedEmail(email: string, businessName: string): Promise<void> {
|
||||
const subject = `Action required: Payment failed for ${businessName}`;
|
||||
const html = this.buildTemplate({
|
||||
preheader: 'We were unable to process your payment.',
|
||||
heading: 'Payment Failed',
|
||||
body: `
|
||||
<p>We were unable to process the latest payment for <strong>${this.esc(businessName)}</strong>.</p>
|
||||
<p>Please update your payment method to avoid any interruption to your service.</p>
|
||||
`,
|
||||
ctaText: 'Update Payment Method',
|
||||
ctaUrl: `${this.configService.get<string>('APP_URL') || 'https://app.hoaledgeriq.com'}/settings`,
|
||||
footer: 'If you believe this is an error, please reply to this email and we\'ll look into it.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'payment_failed', { businessName });
|
||||
}
|
||||
|
||||
async sendInviteMemberEmail(email: string, orgName: string, inviteUrl: string): Promise<void> {
|
||||
const subject = `You've been invited to ${orgName} on HOA LedgerIQ`;
|
||||
const html = this.buildTemplate({
|
||||
preheader: `Join ${orgName} on HOA LedgerIQ.`,
|
||||
heading: 'You\'re Invited!',
|
||||
body: `
|
||||
<p>You've been invited to join <strong>${this.esc(orgName)}</strong> on HOA LedgerIQ.</p>
|
||||
<p>Click below to accept the invitation and set up your account:</p>
|
||||
`,
|
||||
ctaText: 'Accept Invitation',
|
||||
ctaUrl: inviteUrl,
|
||||
footer: 'This invitation link expires in 7 days. If you were not expecting this, please ignore this email.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'invite_member', { orgName, inviteUrl });
|
||||
}
|
||||
|
||||
async sendTrialEndingEmail(email: string, businessName: string, daysRemaining: number, settingsUrl: string): Promise<void> {
|
||||
const subject = `Your free trial ends in ${daysRemaining} days — ${businessName}`;
|
||||
const html = this.buildTemplate({
|
||||
preheader: `Your HOA LedgerIQ trial for ${businessName} is ending soon.`,
|
||||
heading: `Your Trial Ends in ${daysRemaining} Days`,
|
||||
body: `
|
||||
<p>Your free trial for <strong>${this.esc(businessName)}</strong> on HOA LedgerIQ ends in <strong>${daysRemaining} days</strong>.</p>
|
||||
<p>To continue using all features without interruption, add a payment method before your trial expires.</p>
|
||||
<p>If you don't add a payment method, your account will become read-only and you won't be able to make changes to your data.</p>
|
||||
`,
|
||||
ctaText: 'Add Payment Method',
|
||||
ctaUrl: settingsUrl,
|
||||
footer: 'If you have any questions about plans or pricing, just reply to this email.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'trial_ending', { businessName, daysRemaining, settingsUrl });
|
||||
}
|
||||
|
||||
async sendTrialExpiredEmail(email: string, businessName: string): Promise<void> {
|
||||
const appUrl = this.configService.get<string>('APP_URL') || 'https://app.hoaledgeriq.com';
|
||||
const subject = `Your free trial has ended — ${businessName}`;
|
||||
const html = this.buildTemplate({
|
||||
preheader: `Your HOA LedgerIQ trial for ${businessName} has ended.`,
|
||||
heading: 'Your Trial Has Ended',
|
||||
body: `
|
||||
<p>The free trial for <strong>${this.esc(businessName)}</strong> on HOA LedgerIQ has ended.</p>
|
||||
<p>Your data is safe and your account is preserved. Subscribe to a plan to regain full access to your HOA financial management tools.</p>
|
||||
`,
|
||||
ctaText: 'Choose a Plan',
|
||||
ctaUrl: `${appUrl}/pricing`,
|
||||
footer: 'Your data will be preserved. You can reactivate your account at any time by subscribing to a plan.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'trial_expired', { businessName });
|
||||
}
|
||||
|
||||
async sendNewMemberWelcomeEmail(
|
||||
email: string,
|
||||
firstName: string,
|
||||
orgName: string,
|
||||
): Promise<void> {
|
||||
const appUrl = this.configService.get<string>('APP_URL') || 'https://app.hoaledgeriq.com';
|
||||
const subject = `Welcome to ${orgName} on HOA LedgerIQ`;
|
||||
const html = this.buildTemplate({
|
||||
preheader: `Your account for ${orgName} on HOA LedgerIQ is ready.`,
|
||||
heading: `Welcome, ${this.esc(firstName)}!`,
|
||||
body: `
|
||||
<p>You've been added as a member of <strong>${this.esc(orgName)}</strong> on HOA LedgerIQ.</p>
|
||||
<p>Your account is ready to use. Log in with your email address and the temporary password provided by your administrator. You'll be able to change your password after logging in.</p>
|
||||
<p>HOA LedgerIQ gives you access to your community's financial dashboard, budgets, reports, and more.</p>
|
||||
`,
|
||||
ctaText: 'Log In Now',
|
||||
ctaUrl: `${appUrl}/login`,
|
||||
footer: 'If you were not expecting this email, please contact your HOA administrator.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'new_member_welcome', { orgName, firstName });
|
||||
}
|
||||
|
||||
async sendPasswordResetEmail(email: string, resetUrl: string): Promise<void> {
|
||||
const subject = 'Reset your HOA LedgerIQ password';
|
||||
const html = this.buildTemplate({
|
||||
preheader: 'Password reset requested for your HOA LedgerIQ account.',
|
||||
heading: 'Password Reset',
|
||||
body: `
|
||||
<p>We received a request to reset your password. Click the button below to choose a new one:</p>
|
||||
`,
|
||||
ctaText: 'Reset Password',
|
||||
ctaUrl: resetUrl,
|
||||
footer: 'This link expires in 1 hour. If you did not request a password reset, please ignore this email — your password will remain unchanged.',
|
||||
});
|
||||
|
||||
await this.send(email, subject, html, 'password_reset', { resetUrl });
|
||||
}
|
||||
|
||||
// ─── Core send logic ────────────────────────────────────────
|
||||
|
||||
private async send(
|
||||
toEmail: string,
|
||||
subject: string,
|
||||
html: string,
|
||||
template: string,
|
||||
metadata: Record<string, any>,
|
||||
): Promise<void> {
|
||||
// Always log to the database
|
||||
await this.log(toEmail, subject, html, template, metadata);
|
||||
|
||||
if (!this.resend) {
|
||||
this.logger.log(`📧 EMAIL STUB → ${toEmail}`);
|
||||
this.logger.log(` Subject: ${subject}`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await this.resend.emails.send({
|
||||
from: this.fromAddress,
|
||||
to: [toEmail],
|
||||
replyTo: this.replyToAddress || undefined,
|
||||
subject,
|
||||
html,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
this.logger.error(`Resend error for ${toEmail}: ${JSON.stringify(result.error)}`);
|
||||
await this.updateLogStatus(toEmail, template, 'failed', result.error.message);
|
||||
} else {
|
||||
this.logger.log(`✅ Email sent to ${toEmail} (id: ${result.data?.id})`);
|
||||
await this.updateLogStatus(toEmail, template, 'sent', result.data?.id);
|
||||
}
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Failed to send email to ${toEmail}: ${err.message}`);
|
||||
await this.updateLogStatus(toEmail, template, 'failed', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Database logging ───────────────────────────────────────
|
||||
|
||||
private async log(
|
||||
toEmail: string,
|
||||
subject: string,
|
||||
body: string,
|
||||
template: string,
|
||||
metadata: Record<string, any>,
|
||||
): Promise<void> {
|
||||
try {
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.email_log (to_email, subject, body, template, metadata)
|
||||
VALUES ($1, $2, $3, $4, $5)`,
|
||||
[toEmail, subject, body, template, JSON.stringify(metadata)],
|
||||
);
|
||||
} catch (err) {
|
||||
this.logger.warn(`Failed to log email: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async updateLogStatus(toEmail: string, template: string, status: string, detail?: string): Promise<void> {
|
||||
try {
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.email_log
|
||||
SET metadata = metadata || $1::jsonb
|
||||
WHERE to_email = $2 AND template = $3
|
||||
AND created_at = (
|
||||
SELECT MAX(created_at) FROM shared.email_log
|
||||
WHERE to_email = $2 AND template = $3
|
||||
)`,
|
||||
[JSON.stringify({ send_status: status, send_detail: detail || '' }), toEmail, template],
|
||||
);
|
||||
} catch {
|
||||
// Best effort — don't block the flow
|
||||
}
|
||||
}
|
||||
|
||||
// ─── HTML email template ────────────────────────────────────
|
||||
|
||||
private esc(text: string): string {
|
||||
return text.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
private buildTemplate(opts: {
|
||||
preheader: string;
|
||||
heading: string;
|
||||
body: string;
|
||||
ctaText: string;
|
||||
ctaUrl: string;
|
||||
footer: string;
|
||||
}): string {
|
||||
return `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>${this.esc(opts.heading)}</title>
|
||||
<!--[if mso]><noscript><xml><o:OfficeDocumentSettings><o:PixelsPerInch>96</o:PixelsPerInch></o:OfficeDocumentSettings></xml></noscript><![endif]-->
|
||||
</head>
|
||||
<body style="margin:0;padding:0;background-color:#f4f5f7;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,sans-serif;">
|
||||
<!-- Preheader (hidden preview text) -->
|
||||
<div style="display:none;max-height:0;overflow:hidden;">${this.esc(opts.preheader)}</div>
|
||||
|
||||
<table role="presentation" width="100%" cellpadding="0" cellspacing="0" style="background-color:#f4f5f7;padding:24px 0;">
|
||||
<tr>
|
||||
<td align="center">
|
||||
<table role="presentation" width="600" cellpadding="0" cellspacing="0" style="max-width:600px;width:100%;">
|
||||
|
||||
<!-- Logo bar -->
|
||||
<tr>
|
||||
<td align="center" style="padding:24px 0 16px;">
|
||||
<span style="font-size:22px;font-weight:700;color:#1a73e8;letter-spacing:-0.5px;">
|
||||
HOA LedgerIQ
|
||||
</span>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Main card -->
|
||||
<tr>
|
||||
<td>
|
||||
<table role="presentation" width="100%" cellpadding="0" cellspacing="0"
|
||||
style="background-color:#ffffff;border-radius:8px;overflow:hidden;box-shadow:0 1px 3px rgba(0,0,0,0.08);">
|
||||
<tr>
|
||||
<td style="padding:40px 32px;">
|
||||
<h1 style="margin:0 0 16px;font-size:24px;font-weight:700;color:#1a1a2e;">
|
||||
${this.esc(opts.heading)}
|
||||
</h1>
|
||||
<div style="font-size:15px;line-height:1.6;color:#4a4a68;">
|
||||
${opts.body}
|
||||
</div>
|
||||
|
||||
<!-- CTA Button -->
|
||||
<table role="presentation" cellpadding="0" cellspacing="0" style="margin:28px 0 8px;">
|
||||
<tr>
|
||||
<td align="center" style="background-color:#1a73e8;border-radius:6px;">
|
||||
<a href="${opts.ctaUrl}"
|
||||
target="_blank"
|
||||
style="display:inline-block;padding:14px 32px;color:#ffffff;font-size:15px;font-weight:600;text-decoration:none;border-radius:6px;">
|
||||
${this.esc(opts.ctaText)}
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<!-- Fallback URL -->
|
||||
<p style="font-size:12px;color:#999;word-break:break-all;margin-top:16px;">
|
||||
If the button doesn't work, copy and paste this link into your browser:<br>
|
||||
<a href="${opts.ctaUrl}" style="color:#1a73e8;">${opts.ctaUrl}</a>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Footer -->
|
||||
<tr>
|
||||
<td style="padding:24px 32px;text-align:center;">
|
||||
<p style="font-size:12px;color:#999;line-height:1.5;margin:0;">
|
||||
${this.esc(opts.footer)}
|
||||
</p>
|
||||
<p style="font-size:12px;color:#bbb;margin:12px 0 0;">
|
||||
© ${new Date().getFullYear()} HOA LedgerIQ — Smart Financial Management for HOAs
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
import { Controller, Get, Post, UseGuards, Req, Logger } from '@nestjs/common';
|
||||
import { ApiTags, ApiBearerAuth, ApiOperation } from '@nestjs/swagger';
|
||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||
import { HealthScoresService } from './health-scores.service';
|
||||
|
||||
@ApiTags('health-scores')
|
||||
@Controller('health-scores')
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class HealthScoresController {
|
||||
private readonly logger = new Logger(HealthScoresController.name);
|
||||
|
||||
constructor(private service: HealthScoresService) {}
|
||||
|
||||
@Get('latest')
|
||||
@ApiOperation({ summary: 'Get latest operating and reserve health scores' })
|
||||
getLatest(@Req() req: any) {
|
||||
const schema = req.tenantSchema;
|
||||
return this.service.getLatestScores(schema);
|
||||
}
|
||||
|
||||
@Post('calculate')
|
||||
@ApiOperation({ summary: 'Trigger both health score recalculations (async — returns immediately)' })
|
||||
@AllowViewer()
|
||||
async calculate(@Req() req: any) {
|
||||
const schema = req.tenantSchema;
|
||||
|
||||
// Fire-and-forget — background processing saves results to DB
|
||||
Promise.all([
|
||||
this.service.calculateScore(schema, 'operating'),
|
||||
this.service.calculateScore(schema, 'reserve'),
|
||||
]).catch((err) => {
|
||||
this.logger.error(`Background health score calculation failed: ${err.message}`);
|
||||
});
|
||||
|
||||
return {
|
||||
status: 'processing',
|
||||
message: 'Health score calculations started. Results will appear when ready.',
|
||||
};
|
||||
}
|
||||
|
||||
@Post('calculate/operating')
|
||||
@ApiOperation({ summary: 'Trigger operating fund health score recalculation (async)' })
|
||||
@AllowViewer()
|
||||
async calculateOperating(@Req() req: any) {
|
||||
const schema = req.tenantSchema;
|
||||
|
||||
// Fire-and-forget
|
||||
this.service.calculateScore(schema, 'operating').catch((err) => {
|
||||
this.logger.error(`Background operating score failed: ${err.message}`);
|
||||
});
|
||||
|
||||
return {
|
||||
status: 'processing',
|
||||
message: 'Operating fund health score calculation started.',
|
||||
};
|
||||
}
|
||||
|
||||
@Post('calculate/reserve')
|
||||
@ApiOperation({ summary: 'Trigger reserve fund health score recalculation (async)' })
|
||||
@AllowViewer()
|
||||
async calculateReserve(@Req() req: any) {
|
||||
const schema = req.tenantSchema;
|
||||
|
||||
// Fire-and-forget
|
||||
this.service.calculateScore(schema, 'reserve').catch((err) => {
|
||||
this.logger.error(`Background reserve score failed: ${err.message}`);
|
||||
});
|
||||
|
||||
return {
|
||||
status: 'processing',
|
||||
message: 'Reserve fund health score calculation started.',
|
||||
};
|
||||
}
|
||||
}
|
||||
10
backend/src/modules/health-scores/health-scores.module.ts
Normal file
10
backend/src/modules/health-scores/health-scores.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { HealthScoresController } from './health-scores.controller';
|
||||
import { HealthScoresService } from './health-scores.service';
|
||||
import { HealthScoresScheduler } from './health-scores.scheduler';
|
||||
|
||||
@Module({
|
||||
controllers: [HealthScoresController],
|
||||
providers: [HealthScoresService, HealthScoresScheduler],
|
||||
})
|
||||
export class HealthScoresModule {}
|
||||
54
backend/src/modules/health-scores/health-scores.scheduler.ts
Normal file
54
backend/src/modules/health-scores/health-scores.scheduler.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { HealthScoresService } from './health-scores.service';
|
||||
|
||||
@Injectable()
|
||||
export class HealthScoresScheduler {
|
||||
private readonly logger = new Logger(HealthScoresScheduler.name);
|
||||
|
||||
constructor(
|
||||
private dataSource: DataSource,
|
||||
private healthScoresService: HealthScoresService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Run daily at 2:00 AM — calculate health scores for all active tenants.
|
||||
* Uses DataSource directly to list tenants (no HTTP request context needed).
|
||||
*/
|
||||
@Cron('0 2 * * *')
|
||||
async calculateAllTenantScores() {
|
||||
this.logger.log('Starting daily health score calculation for all tenants...');
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
const orgs = await this.dataSource.query(
|
||||
`SELECT id, name, schema_name FROM shared.organizations WHERE status = 'active'`,
|
||||
);
|
||||
|
||||
this.logger.log(`Found ${orgs.length} active tenants`);
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (const org of orgs) {
|
||||
try {
|
||||
await this.healthScoresService.calculateScore(org.schema_name, 'operating');
|
||||
await this.healthScoresService.calculateScore(org.schema_name, 'reserve');
|
||||
successCount++;
|
||||
this.logger.log(`Health scores calculated for ${org.name} (${org.schema_name})`);
|
||||
} catch (err: any) {
|
||||
errorCount++;
|
||||
this.logger.error(`Failed to calculate health scores for ${org.name}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime;
|
||||
this.logger.log(
|
||||
`Daily health scores complete: ${successCount} success, ${errorCount} errors (${elapsed}ms)`,
|
||||
);
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Health score scheduler failed: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
1215
backend/src/modules/health-scores/health-scores.service.ts
Normal file
1215
backend/src/modules/health-scores/health-scores.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
12
backend/src/modules/ideas/dto/create-idea.dto.ts
Normal file
12
backend/src/modules/ideas/dto/create-idea.dto.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { IsString, IsNotEmpty, IsOptional, MaxLength } from 'class-validator';
|
||||
|
||||
export class CreateIdeaDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(255)
|
||||
title: string;
|
||||
|
||||
@IsString()
|
||||
@IsOptional()
|
||||
description?: string;
|
||||
}
|
||||
49
backend/src/modules/ideas/entities/idea.entity.ts
Normal file
49
backend/src/modules/ideas/entities/idea.entity.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import {
|
||||
Entity,
|
||||
PrimaryGeneratedColumn,
|
||||
Column,
|
||||
CreateDateColumn,
|
||||
UpdateDateColumn,
|
||||
ManyToOne,
|
||||
JoinColumn,
|
||||
} from 'typeorm';
|
||||
import { Organization } from '../../organizations/entities/organization.entity';
|
||||
import { User } from '../../users/entities/user.entity';
|
||||
|
||||
@Entity({ schema: 'shared', name: 'ideas' })
|
||||
export class Idea {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
id: string;
|
||||
|
||||
@Column({ name: 'org_id' })
|
||||
orgId: string;
|
||||
|
||||
@Column({ name: 'user_id' })
|
||||
userId: string;
|
||||
|
||||
@Column({ length: 255 })
|
||||
title: string;
|
||||
|
||||
@Column({ type: 'text', nullable: true })
|
||||
description: string;
|
||||
|
||||
@Column({ length: 20, default: 'new' })
|
||||
status: string;
|
||||
|
||||
@Column({ name: 'admin_note', type: 'text', nullable: true })
|
||||
adminNote: string;
|
||||
|
||||
@CreateDateColumn({ name: 'created_at', type: 'timestamptz' })
|
||||
createdAt: Date;
|
||||
|
||||
@UpdateDateColumn({ name: 'updated_at', type: 'timestamptz' })
|
||||
updatedAt: Date;
|
||||
|
||||
@ManyToOne(() => Organization)
|
||||
@JoinColumn({ name: 'org_id' })
|
||||
organization: Organization;
|
||||
|
||||
@ManyToOne(() => User)
|
||||
@JoinColumn({ name: 'user_id' })
|
||||
user: User;
|
||||
}
|
||||
27
backend/src/modules/ideas/ideas.controller.ts
Normal file
27
backend/src/modules/ideas/ideas.controller.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Controller, Get, Post, Body, Req, UseGuards } from '@nestjs/common';
|
||||
import { ApiTags, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||
import { IdeasService } from './ideas.service';
|
||||
import { CreateIdeaDto } from './dto/create-idea.dto';
|
||||
|
||||
@ApiTags('ideas')
|
||||
@Controller('ideas')
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class IdeasController {
|
||||
constructor(private ideasService: IdeasService) {}
|
||||
|
||||
@Post()
|
||||
async create(@Req() req: any, @Body() dto: CreateIdeaDto) {
|
||||
const orgId = req.user.orgId;
|
||||
const userId = req.user.userId || req.user.sub;
|
||||
const idea = await this.ideasService.create(orgId, userId, dto);
|
||||
return { success: true, idea };
|
||||
}
|
||||
|
||||
@Get()
|
||||
async findByOrg(@Req() req: any) {
|
||||
const orgId = req.user.orgId;
|
||||
return this.ideasService.findByOrg(orgId);
|
||||
}
|
||||
}
|
||||
14
backend/src/modules/ideas/ideas.module.ts
Normal file
14
backend/src/modules/ideas/ideas.module.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||
import { Idea } from './entities/idea.entity';
|
||||
import { Organization } from '../organizations/entities/organization.entity';
|
||||
import { IdeasController } from './ideas.controller';
|
||||
import { IdeasService } from './ideas.service';
|
||||
|
||||
@Module({
|
||||
imports: [TypeOrmModule.forFeature([Idea, Organization])],
|
||||
controllers: [IdeasController],
|
||||
providers: [IdeasService],
|
||||
exports: [IdeasService],
|
||||
})
|
||||
export class IdeasModule {}
|
||||
89
backend/src/modules/ideas/ideas.service.ts
Normal file
89
backend/src/modules/ideas/ideas.service.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { Injectable, ForbiddenException, NotFoundException, BadRequestException } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import { Idea } from './entities/idea.entity';
|
||||
import { Organization } from '../organizations/entities/organization.entity';
|
||||
import { CreateIdeaDto } from './dto/create-idea.dto';
|
||||
|
||||
@Injectable()
|
||||
export class IdeasService {
|
||||
constructor(
|
||||
@InjectRepository(Idea)
|
||||
private ideasRepository: Repository<Idea>,
|
||||
@InjectRepository(Organization)
|
||||
private orgRepository: Repository<Organization>,
|
||||
) {}
|
||||
|
||||
async create(orgId: string, userId: string, dto: CreateIdeaDto): Promise<Idea> {
|
||||
const org = await this.orgRepository.findOne({ where: { id: orgId } });
|
||||
if (!org) {
|
||||
throw new NotFoundException('Organization not found');
|
||||
}
|
||||
if (org.settings?.ideationEnabled !== true) {
|
||||
throw new ForbiddenException('Ideation is not enabled for this organization');
|
||||
}
|
||||
|
||||
const idea = this.ideasRepository.create({
|
||||
orgId,
|
||||
userId,
|
||||
title: dto.title,
|
||||
description: dto.description,
|
||||
});
|
||||
return this.ideasRepository.save(idea);
|
||||
}
|
||||
|
||||
async findByOrg(orgId: string): Promise<Idea[]> {
|
||||
return this.ideasRepository.find({
|
||||
where: { orgId },
|
||||
order: { createdAt: 'DESC' },
|
||||
});
|
||||
}
|
||||
|
||||
async findAll(): Promise<any[]> {
|
||||
return this.ideasRepository
|
||||
.createQueryBuilder('idea')
|
||||
.leftJoin('idea.organization', 'org')
|
||||
.leftJoin('idea.user', 'user')
|
||||
.select([
|
||||
'idea.id AS id',
|
||||
'idea.title AS title',
|
||||
'idea.description AS description',
|
||||
'idea.status AS status',
|
||||
'idea.createdAt AS "createdAt"',
|
||||
'idea.adminNote AS "adminNote"',
|
||||
'org.id AS "orgId"',
|
||||
'org.name AS "orgName"',
|
||||
'user.id AS "userId"',
|
||||
'user.email AS "userEmail"',
|
||||
'user.firstName AS "userFirstName"',
|
||||
'user.lastName AS "userLastName"',
|
||||
])
|
||||
.orderBy('idea.createdAt', 'DESC')
|
||||
.getRawMany();
|
||||
}
|
||||
|
||||
async updateStatus(id: string, status: string): Promise<Idea> {
|
||||
const validStatuses = ['new', 'reviewed', 'accepted', 'rejected'];
|
||||
if (!validStatuses.includes(status)) {
|
||||
throw new BadRequestException(`Invalid status. Must be one of: ${validStatuses.join(', ')}`);
|
||||
}
|
||||
|
||||
const idea = await this.ideasRepository.findOne({ where: { id } });
|
||||
if (!idea) {
|
||||
throw new NotFoundException('Idea not found');
|
||||
}
|
||||
|
||||
idea.status = status;
|
||||
return this.ideasRepository.save(idea);
|
||||
}
|
||||
|
||||
async updateNote(id: string, adminNote: string): Promise<Idea> {
|
||||
const idea = await this.ideasRepository.findOne({ where: { id } });
|
||||
if (!idea) {
|
||||
throw new NotFoundException('Idea not found');
|
||||
}
|
||||
|
||||
idea.adminNote = adminNote;
|
||||
return this.ideasRepository.save(idea);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
import { Controller, Get, Post, UseGuards, Req } from '@nestjs/common';
|
||||
import { ApiTags, ApiBearerAuth, ApiOperation } from '@nestjs/swagger';
|
||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||
import { InvestmentPlanningService } from './investment-planning.service';
|
||||
|
||||
@ApiTags('investment-planning')
|
||||
@Controller('investment-planning')
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class InvestmentPlanningController {
|
||||
constructor(private service: InvestmentPlanningService) {}
|
||||
|
||||
@Get('snapshot')
|
||||
@ApiOperation({ summary: 'Get financial snapshot for investment planning' })
|
||||
getSnapshot() {
|
||||
return this.service.getFinancialSnapshot();
|
||||
}
|
||||
|
||||
@Get('cd-rates')
|
||||
@ApiOperation({ summary: 'Get latest CD rates from market data (backward compat)' })
|
||||
getCdRates() {
|
||||
return this.service.getCdRates();
|
||||
}
|
||||
|
||||
@Get('market-rates')
|
||||
@ApiOperation({ summary: 'Get all market rates grouped by type (CD, Money Market, High Yield Savings)' })
|
||||
getMarketRates() {
|
||||
return this.service.getMarketRates();
|
||||
}
|
||||
|
||||
@Get('saved-recommendation')
|
||||
@ApiOperation({ summary: 'Get the latest saved AI recommendation for this tenant' })
|
||||
getSavedRecommendation() {
|
||||
return this.service.getSavedRecommendation();
|
||||
}
|
||||
|
||||
@Post('recommendations')
|
||||
@ApiOperation({ summary: 'Trigger AI-powered investment recommendations (async — returns immediately)' })
|
||||
@AllowViewer()
|
||||
triggerRecommendations(@Req() req: any) {
|
||||
return this.service.triggerAIRecommendations(req.user?.sub, req.user?.orgId);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { InvestmentPlanningController } from './investment-planning.controller';
|
||||
import { InvestmentPlanningService } from './investment-planning.service';
|
||||
|
||||
@Module({
|
||||
controllers: [InvestmentPlanningController],
|
||||
providers: [InvestmentPlanningService],
|
||||
})
|
||||
export class InvestmentPlanningModule {}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,6 +16,11 @@ export class InvoicesController {
|
||||
@Get(':id')
|
||||
findOne(@Param('id') id: string) { return this.invoicesService.findOne(id); }
|
||||
|
||||
@Post('generate-preview')
|
||||
generatePreview(@Body() dto: { month: number; year: number }) {
|
||||
return this.invoicesService.generatePreview(dto);
|
||||
}
|
||||
|
||||
@Post('generate-bulk')
|
||||
generateBulk(@Body() dto: { month: number; year: number }, @Request() req: any) {
|
||||
return this.invoicesService.generateBulk(dto, req.user.sub);
|
||||
|
||||
@@ -1,33 +1,135 @@
|
||||
import { Injectable, NotFoundException, BadRequestException } from '@nestjs/common';
|
||||
import { TenantService } from '../../database/tenant.service';
|
||||
|
||||
const MONTH_NAMES = [
|
||||
'', 'January', 'February', 'March', 'April', 'May', 'June',
|
||||
'July', 'August', 'September', 'October', 'November', 'December',
|
||||
];
|
||||
|
||||
const MONTH_ABBREV = [
|
||||
'', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec',
|
||||
];
|
||||
|
||||
@Injectable()
|
||||
export class InvoicesService {
|
||||
constructor(private tenant: TenantService) {}
|
||||
|
||||
async findAll() {
|
||||
return this.tenant.query(`
|
||||
SELECT i.*, u.unit_number,
|
||||
SELECT i.*, u.unit_number, u.owner_name, ag.name as assessment_group_name, ag.frequency,
|
||||
(i.amount - i.amount_paid) as balance_due
|
||||
FROM invoices i
|
||||
JOIN units u ON u.id = i.unit_id
|
||||
LEFT JOIN assessment_groups ag ON ag.id = i.assessment_group_id
|
||||
ORDER BY i.invoice_date DESC, i.invoice_number DESC
|
||||
`);
|
||||
}
|
||||
|
||||
async findOne(id: string) {
|
||||
const rows = await this.tenant.query(`
|
||||
SELECT i.*, u.unit_number FROM invoices i
|
||||
SELECT i.*, u.unit_number, u.owner_name FROM invoices i
|
||||
JOIN units u ON u.id = i.unit_id WHERE i.id = $1`, [id]);
|
||||
if (!rows.length) throw new NotFoundException('Invoice not found');
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async generateBulk(dto: { month: number; year: number }, userId: string) {
|
||||
const units = await this.tenant.query(
|
||||
`SELECT * FROM units WHERE status = 'active' AND monthly_assessment > 0`,
|
||||
/**
|
||||
* Calculate billing period based on frequency and the billing month.
|
||||
*/
|
||||
private calculatePeriod(frequency: string, month: number, year: number): { start: string; end: string; description: string } {
|
||||
switch (frequency) {
|
||||
case 'quarterly': {
|
||||
// Period covers 3 months starting from the billing month
|
||||
const startDate = new Date(year, month - 1, 1);
|
||||
const endDate = new Date(year, month + 2, 0); // last day of month+2
|
||||
const endMonth = month + 2 > 12 ? month + 2 - 12 : month + 2;
|
||||
const quarter = Math.ceil(month / 3);
|
||||
return {
|
||||
start: startDate.toISOString().split('T')[0],
|
||||
end: endDate.toISOString().split('T')[0],
|
||||
description: `Q${quarter} ${year} Assessment (${MONTH_ABBREV[month]}-${MONTH_ABBREV[endMonth]})`,
|
||||
};
|
||||
}
|
||||
case 'annual': {
|
||||
const startDate = new Date(year, 0, 1);
|
||||
const endDate = new Date(year, 11, 31);
|
||||
return {
|
||||
start: startDate.toISOString().split('T')[0],
|
||||
end: endDate.toISOString().split('T')[0],
|
||||
description: `Annual Assessment ${year}`,
|
||||
};
|
||||
}
|
||||
default: { // monthly
|
||||
const startDate = new Date(year, month - 1, 1);
|
||||
const endDate = new Date(year, month, 0); // last day of month
|
||||
return {
|
||||
start: startDate.toISOString().split('T')[0],
|
||||
end: endDate.toISOString().split('T')[0],
|
||||
description: `Monthly Assessment - ${MONTH_NAMES[month]} ${year}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Preview which groups/units will be billed for a given month/year.
|
||||
*/
|
||||
async generatePreview(dto: { month: number; year: number }) {
|
||||
const allGroups = await this.tenant.query(
|
||||
`SELECT ag.*, (SELECT COUNT(*) FROM units u WHERE u.assessment_group_id = ag.id AND u.status = 'active') as active_units
|
||||
FROM assessment_groups ag WHERE ag.is_active = true ORDER BY ag.name`,
|
||||
);
|
||||
if (!units.length) throw new BadRequestException('No active units with assessments found');
|
||||
|
||||
const groups = allGroups.map((g: any) => {
|
||||
const dueMonths: number[] = g.due_months || [1,2,3,4,5,6,7,8,9,10,11,12];
|
||||
const isBillingMonth = dueMonths.includes(dto.month);
|
||||
const activeUnits = parseInt(g.active_units || '0');
|
||||
const totalAmount = isBillingMonth
|
||||
? (parseFloat(g.regular_assessment) + parseFloat(g.special_assessment || '0')) * activeUnits
|
||||
: 0;
|
||||
const period = this.calculatePeriod(g.frequency || 'monthly', dto.month, dto.year);
|
||||
|
||||
return {
|
||||
id: g.id,
|
||||
name: g.name,
|
||||
frequency: g.frequency || 'monthly',
|
||||
due_months: dueMonths,
|
||||
active_units: activeUnits,
|
||||
regular_assessment: g.regular_assessment,
|
||||
special_assessment: g.special_assessment,
|
||||
is_billing_month: isBillingMonth,
|
||||
total_amount: totalAmount,
|
||||
period_description: period.description,
|
||||
};
|
||||
});
|
||||
|
||||
const billableGroups = groups.filter((g: any) => g.is_billing_month && g.active_units > 0);
|
||||
const totalInvoices = billableGroups.reduce((sum: number, g: any) => sum + g.active_units, 0);
|
||||
const totalAmount = billableGroups.reduce((sum: number, g: any) => sum + g.total_amount, 0);
|
||||
|
||||
return {
|
||||
month: dto.month,
|
||||
year: dto.year,
|
||||
month_name: MONTH_NAMES[dto.month],
|
||||
groups,
|
||||
summary: { total_groups_billing: billableGroups.length, total_invoices: totalInvoices, total_amount: totalAmount },
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate invoices for all assessment groups where the given month is a billing month.
|
||||
*/
|
||||
async generateBulk(dto: { month: number; year: number }, userId: string) {
|
||||
// Get assessment groups where this month is a billing month
|
||||
const groups = await this.tenant.query(
|
||||
`SELECT * FROM assessment_groups WHERE is_active = true AND $1 = ANY(due_months)`,
|
||||
[dto.month],
|
||||
);
|
||||
|
||||
if (!groups.length) {
|
||||
throw new BadRequestException(`No assessment groups have billing scheduled for ${MONTH_NAMES[dto.month]}`);
|
||||
}
|
||||
|
||||
// Get or create fiscal period
|
||||
let fp = await this.tenant.query(
|
||||
@@ -41,50 +143,87 @@ export class InvoicesService {
|
||||
}
|
||||
const fiscalPeriodId = fp[0].id;
|
||||
|
||||
const invoiceDate = new Date(dto.year, dto.month - 1, 1);
|
||||
const dueDate = new Date(dto.year, dto.month - 1, 15);
|
||||
// Look up GL accounts once
|
||||
const arAccount = await this.tenant.query(`SELECT id FROM accounts WHERE account_number = '1200'`);
|
||||
const incomeAccount = await this.tenant.query(`SELECT id FROM accounts WHERE account_number = '4000'`);
|
||||
|
||||
let created = 0;
|
||||
const groupResults: any[] = [];
|
||||
|
||||
for (const unit of units) {
|
||||
const invNum = `INV-${dto.year}${String(dto.month).padStart(2, '0')}-${unit.unit_number}`;
|
||||
|
||||
// Check if already generated
|
||||
const existing = await this.tenant.query(
|
||||
'SELECT id FROM invoices WHERE invoice_number = $1', [invNum],
|
||||
);
|
||||
if (existing.length) continue;
|
||||
|
||||
// Create the invoice
|
||||
const inv = await this.tenant.query(
|
||||
`INSERT INTO invoices (invoice_number, unit_id, invoice_date, due_date, invoice_type, description, amount, status)
|
||||
VALUES ($1, $2, $3, $4, 'regular_assessment', $5, $6, 'sent') RETURNING id`,
|
||||
[invNum, unit.id, invoiceDate.toISOString().split('T')[0], dueDate.toISOString().split('T')[0],
|
||||
`Monthly assessment - ${new Date(dto.year, dto.month - 1).toLocaleString('default', { month: 'long', year: 'numeric' })}`,
|
||||
unit.monthly_assessment],
|
||||
for (const group of groups) {
|
||||
// Get active units in this assessment group
|
||||
const units = await this.tenant.query(
|
||||
`SELECT * FROM units WHERE status = 'active' AND assessment_group_id = $1`,
|
||||
[group.id],
|
||||
);
|
||||
|
||||
// Create journal entry: DR Accounts Receivable, CR Assessment Income
|
||||
const arAccount = await this.tenant.query(`SELECT id FROM accounts WHERE account_number = '1200'`);
|
||||
const incomeAccount = await this.tenant.query(`SELECT id FROM accounts WHERE account_number = '4000'`);
|
||||
if (!units.length) continue;
|
||||
|
||||
if (arAccount.length && incomeAccount.length) {
|
||||
const je = await this.tenant.query(
|
||||
`INSERT INTO journal_entries (entry_date, description, entry_type, fiscal_period_id, source_type, source_id, is_posted, posted_at, created_by)
|
||||
VALUES ($1, $2, 'assessment', $3, 'invoice', $4, true, NOW(), $5) RETURNING id`,
|
||||
[invoiceDate.toISOString().split('T')[0], `Assessment - Unit ${unit.unit_number}`, fiscalPeriodId, inv[0].id, userId],
|
||||
const frequency = group.frequency || 'monthly';
|
||||
const period = this.calculatePeriod(frequency, dto.month, dto.year);
|
||||
const dueDay = Math.min(group.due_day || 1, 28);
|
||||
const invoiceDate = new Date(dto.year, dto.month - 1, 1);
|
||||
const dueDate = new Date(dto.year, dto.month - 1, dueDay);
|
||||
|
||||
// Use the group's assessment amount (full period amount, not monthly equivalent)
|
||||
const assessmentAmount = parseFloat(group.regular_assessment) + parseFloat(group.special_assessment || '0');
|
||||
|
||||
let groupCreated = 0;
|
||||
|
||||
for (const unit of units) {
|
||||
const invNum = `INV-${dto.year}${String(dto.month).padStart(2, '0')}-${unit.unit_number}`;
|
||||
|
||||
// Check if already generated
|
||||
const existing = await this.tenant.query(
|
||||
'SELECT id FROM invoices WHERE invoice_number = $1', [invNum],
|
||||
);
|
||||
await this.tenant.query(
|
||||
`INSERT INTO journal_entry_lines (journal_entry_id, account_id, debit, credit) VALUES ($1, $2, $3, 0), ($1, $4, 0, $3)`,
|
||||
[je[0].id, arAccount[0].id, unit.monthly_assessment, incomeAccount[0].id],
|
||||
);
|
||||
await this.tenant.query(
|
||||
`UPDATE invoices SET journal_entry_id = $1 WHERE id = $2`, [je[0].id, inv[0].id],
|
||||
if (existing.length) continue;
|
||||
|
||||
// Use unit-level override if set, otherwise use group amount
|
||||
const unitAmount = unit.monthly_assessment && parseFloat(unit.monthly_assessment) > 0
|
||||
? (frequency === 'monthly'
|
||||
? parseFloat(unit.monthly_assessment)
|
||||
: frequency === 'quarterly'
|
||||
? parseFloat(unit.monthly_assessment) * 3
|
||||
: parseFloat(unit.monthly_assessment) * 12)
|
||||
: assessmentAmount;
|
||||
|
||||
// Create the invoice with status 'pending' (no email sending capability)
|
||||
const inv = await this.tenant.query(
|
||||
`INSERT INTO invoices (invoice_number, unit_id, invoice_date, due_date, invoice_type, description, amount, status, period_start, period_end, assessment_group_id)
|
||||
VALUES ($1, $2, $3, $4, 'regular_assessment', $5, $6, 'pending', $7, $8, $9) RETURNING id`,
|
||||
[invNum, unit.id, invoiceDate.toISOString().split('T')[0], dueDate.toISOString().split('T')[0],
|
||||
period.description, unitAmount, period.start, period.end, group.id],
|
||||
);
|
||||
|
||||
// Create journal entry: DR Accounts Receivable, CR Assessment Income
|
||||
if (arAccount.length && incomeAccount.length) {
|
||||
const je = await this.tenant.query(
|
||||
`INSERT INTO journal_entries (entry_date, description, entry_type, fiscal_period_id, source_type, source_id, is_posted, posted_at, created_by)
|
||||
VALUES ($1, $2, 'assessment', $3, 'invoice', $4, true, NOW(), $5) RETURNING id`,
|
||||
[invoiceDate.toISOString().split('T')[0], `Assessment - Unit ${unit.unit_number}`, fiscalPeriodId, inv[0].id, userId],
|
||||
);
|
||||
await this.tenant.query(
|
||||
`INSERT INTO journal_entry_lines (journal_entry_id, account_id, debit, credit) VALUES ($1, $2, $3, 0), ($1, $4, 0, $3)`,
|
||||
[je[0].id, arAccount[0].id, unitAmount, incomeAccount[0].id],
|
||||
);
|
||||
await this.tenant.query(
|
||||
`UPDATE invoices SET journal_entry_id = $1 WHERE id = $2`, [je[0].id, inv[0].id],
|
||||
);
|
||||
}
|
||||
created++;
|
||||
groupCreated++;
|
||||
}
|
||||
created++;
|
||||
|
||||
groupResults.push({
|
||||
group_name: group.name,
|
||||
frequency,
|
||||
period: period.description,
|
||||
invoices_created: groupCreated,
|
||||
});
|
||||
}
|
||||
|
||||
return { created, month: dto.month, year: dto.year };
|
||||
return { created, month: dto.month, year: dto.year, groups: groupResults };
|
||||
}
|
||||
|
||||
async applyLateFees(dto: { grace_period_days: number; late_fee_amount: number }, userId: string) {
|
||||
@@ -95,7 +234,7 @@ export class InvoicesService {
|
||||
const overdue = await this.tenant.query(`
|
||||
SELECT i.*, u.unit_number FROM invoices i
|
||||
JOIN units u ON u.id = i.unit_id
|
||||
WHERE i.status IN ('sent', 'partial') AND i.due_date < $1
|
||||
WHERE i.status IN ('pending', 'partial') AND i.due_date < $1
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM invoices lf WHERE lf.unit_id = i.unit_id
|
||||
AND lf.invoice_type = 'late_fee' AND lf.description LIKE '%' || i.invoice_number || '%'
|
||||
@@ -109,7 +248,7 @@ export class InvoicesService {
|
||||
const lfNum = `LF-${inv.invoice_number}`;
|
||||
await this.tenant.query(
|
||||
`INSERT INTO invoices (invoice_number, unit_id, invoice_date, due_date, invoice_type, description, amount, status)
|
||||
VALUES ($1, $2, CURRENT_DATE, CURRENT_DATE + INTERVAL '15 days', 'late_fee', $3, $4, 'sent')`,
|
||||
VALUES ($1, $2, CURRENT_DATE, CURRENT_DATE + INTERVAL '15 days', 'late_fee', $3, $4, 'pending')`,
|
||||
[lfNum, inv.unit_id, `Late fee for invoice ${inv.invoice_number}`, dto.late_fee_amount],
|
||||
);
|
||||
applied++;
|
||||
|
||||
@@ -13,6 +13,16 @@ export class JournalEntriesService {
|
||||
async findAll(filters: { from?: string; to?: string; accountId?: string; type?: string }) {
|
||||
let sql = `
|
||||
SELECT je.*,
|
||||
CASE
|
||||
WHEN SUM(CASE WHEN a.account_type IN ('income','expense') THEN 1 ELSE 0 END) > 0
|
||||
THEN COALESCE(SUM(CASE WHEN a.account_type IN ('income','expense') THEN jel.debit ELSE 0 END), 0)
|
||||
ELSE COALESCE(SUM(jel.debit), 0)
|
||||
END as total_debit,
|
||||
CASE
|
||||
WHEN SUM(CASE WHEN a.account_type IN ('income','expense') THEN 1 ELSE 0 END) > 0
|
||||
THEN COALESCE(SUM(CASE WHEN a.account_type IN ('income','expense') THEN jel.credit ELSE 0 END), 0)
|
||||
ELSE COALESCE(SUM(jel.credit), 0)
|
||||
END as total_credit,
|
||||
json_agg(json_build_object(
|
||||
'id', jel.id, 'account_id', jel.account_id,
|
||||
'debit', jel.debit, 'credit', jel.credit, 'memo', jel.memo,
|
||||
|
||||
31
backend/src/modules/onboarding/onboarding.controller.ts
Normal file
31
backend/src/modules/onboarding/onboarding.controller.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { Controller, Get, Patch, Body, UseGuards, Request, BadRequestException } from '@nestjs/common';
|
||||
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||
import { OnboardingService } from './onboarding.service';
|
||||
|
||||
@ApiTags('onboarding')
|
||||
@Controller('onboarding')
|
||||
@ApiBearerAuth()
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class OnboardingController {
|
||||
constructor(private onboardingService: OnboardingService) {}
|
||||
|
||||
@Get('progress')
|
||||
@ApiOperation({ summary: 'Get onboarding progress for current org' })
|
||||
@AllowViewer()
|
||||
async getProgress(@Request() req: any) {
|
||||
const orgId = req.user.orgId;
|
||||
if (!orgId) throw new BadRequestException('No organization context');
|
||||
return this.onboardingService.getProgress(orgId);
|
||||
}
|
||||
|
||||
@Patch('progress')
|
||||
@ApiOperation({ summary: 'Mark an onboarding step as complete' })
|
||||
async markStep(@Request() req: any, @Body() body: { step: string }) {
|
||||
const orgId = req.user.orgId;
|
||||
if (!orgId) throw new BadRequestException('No organization context');
|
||||
if (!body.step) throw new BadRequestException('step is required');
|
||||
return this.onboardingService.markStepComplete(orgId, body.step);
|
||||
}
|
||||
}
|
||||
10
backend/src/modules/onboarding/onboarding.module.ts
Normal file
10
backend/src/modules/onboarding/onboarding.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { OnboardingService } from './onboarding.service';
|
||||
import { OnboardingController } from './onboarding.controller';
|
||||
|
||||
@Module({
|
||||
controllers: [OnboardingController],
|
||||
providers: [OnboardingService],
|
||||
exports: [OnboardingService],
|
||||
})
|
||||
export class OnboardingModule {}
|
||||
79
backend/src/modules/onboarding/onboarding.service.ts
Normal file
79
backend/src/modules/onboarding/onboarding.service.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
const REQUIRED_STEPS = ['profile', 'workspace', 'invite_member', 'first_workflow'];
|
||||
|
||||
@Injectable()
|
||||
export class OnboardingService {
|
||||
private readonly logger = new Logger(OnboardingService.name);
|
||||
|
||||
constructor(private dataSource: DataSource) {}
|
||||
|
||||
async getProgress(orgId: string) {
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT completed_steps, completed_at, updated_at
|
||||
FROM shared.onboarding_progress
|
||||
WHERE organization_id = $1`,
|
||||
[orgId],
|
||||
);
|
||||
|
||||
if (rows.length === 0) {
|
||||
// Create a fresh record
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.onboarding_progress (organization_id)
|
||||
VALUES ($1) ON CONFLICT DO NOTHING`,
|
||||
[orgId],
|
||||
);
|
||||
return { completedSteps: [], completedAt: null, requiredSteps: REQUIRED_STEPS };
|
||||
}
|
||||
|
||||
return {
|
||||
completedSteps: rows[0].completed_steps || [],
|
||||
completedAt: rows[0].completed_at,
|
||||
requiredSteps: REQUIRED_STEPS,
|
||||
};
|
||||
}
|
||||
|
||||
async markStepComplete(orgId: string, step: string) {
|
||||
// Add step to array (using array_append with dedup)
|
||||
await this.dataSource.query(
|
||||
`INSERT INTO shared.onboarding_progress (organization_id, completed_steps, updated_at)
|
||||
VALUES ($1, ARRAY[$2::text], NOW())
|
||||
ON CONFLICT (organization_id)
|
||||
DO UPDATE SET
|
||||
completed_steps = CASE
|
||||
WHEN $2 = ANY(onboarding_progress.completed_steps) THEN onboarding_progress.completed_steps
|
||||
ELSE array_append(onboarding_progress.completed_steps, $2::text)
|
||||
END,
|
||||
updated_at = NOW()`,
|
||||
[orgId, step],
|
||||
);
|
||||
|
||||
// Check if all required steps are done
|
||||
const rows = await this.dataSource.query(
|
||||
`SELECT completed_steps FROM shared.onboarding_progress WHERE organization_id = $1`,
|
||||
[orgId],
|
||||
);
|
||||
|
||||
const completedSteps = rows[0]?.completed_steps || [];
|
||||
const allDone = REQUIRED_STEPS.every((s) => completedSteps.includes(s));
|
||||
|
||||
if (allDone) {
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.onboarding_progress SET completed_at = NOW() WHERE organization_id = $1 AND completed_at IS NULL`,
|
||||
[orgId],
|
||||
);
|
||||
}
|
||||
|
||||
return this.getProgress(orgId);
|
||||
}
|
||||
|
||||
async resetProgress(orgId: string) {
|
||||
await this.dataSource.query(
|
||||
`UPDATE shared.onboarding_progress SET completed_steps = '{}', completed_at = NULL, updated_at = NOW()
|
||||
WHERE organization_id = $1`,
|
||||
[orgId],
|
||||
);
|
||||
return this.getProgress(orgId);
|
||||
}
|
||||
}
|
||||
@@ -61,6 +61,15 @@ export class Organization {
|
||||
@Column({ name: 'plan_level', default: 'standard' })
|
||||
planLevel: string;
|
||||
|
||||
@Column({ name: 'payment_date', type: 'date', nullable: true })
|
||||
paymentDate: Date;
|
||||
|
||||
@Column({ name: 'confirmation_number', type: 'varchar', nullable: true })
|
||||
confirmationNumber: string;
|
||||
|
||||
@Column({ name: 'renewal_date', type: 'date', nullable: true })
|
||||
renewalDate: Date;
|
||||
|
||||
@CreateDateColumn({ name: 'created_at', type: 'timestamptz' })
|
||||
createdAt: Date;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Controller, Post, Get, Put, Delete, Body, Param, UseGuards, Request, ForbiddenException } from '@nestjs/common';
|
||||
import { Controller, Post, Get, Put, Patch, Delete, Body, Param, UseGuards, Request, ForbiddenException } from '@nestjs/common';
|
||||
import { ApiTags, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { OrganizationsService } from './organizations.service';
|
||||
import { CreateOrganizationDto } from './dto/create-organization.dto';
|
||||
@@ -23,6 +23,13 @@ export class OrganizationsController {
|
||||
return this.orgService.findByUser(req.user.sub);
|
||||
}
|
||||
|
||||
@Patch('settings')
|
||||
@ApiOperation({ summary: 'Update settings for the current organization' })
|
||||
async updateSettings(@Request() req: any, @Body() body: Record<string, any>) {
|
||||
this.requireTenantAdmin(req);
|
||||
return this.orgService.updateSettings(req.user.orgId, body);
|
||||
}
|
||||
|
||||
// ── Org Member Management ──
|
||||
|
||||
private requireTenantAdmin(req: any) {
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
import { Injectable, ConflictException, BadRequestException, NotFoundException } from '@nestjs/common';
|
||||
import { Injectable, ConflictException, BadRequestException, NotFoundException, Logger } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import { Organization } from './entities/organization.entity';
|
||||
import { UserOrganization } from './entities/user-organization.entity';
|
||||
import { TenantSchemaService } from '../../database/tenant-schema.service';
|
||||
import { CreateOrganizationDto } from './dto/create-organization.dto';
|
||||
import { EmailService } from '../email/email.service';
|
||||
import * as bcrypt from 'bcryptjs';
|
||||
|
||||
@Injectable()
|
||||
export class OrganizationsService {
|
||||
private readonly logger = new Logger(OrganizationsService.name);
|
||||
|
||||
constructor(
|
||||
@InjectRepository(Organization)
|
||||
private orgRepository: Repository<Organization>,
|
||||
@InjectRepository(UserOrganization)
|
||||
private userOrgRepository: Repository<UserOrganization>,
|
||||
private tenantSchemaService: TenantSchemaService,
|
||||
private emailService: EmailService,
|
||||
) {}
|
||||
|
||||
async create(dto: CreateOrganizationDto, userId: string) {
|
||||
@@ -62,6 +66,29 @@ export class OrganizationsService {
|
||||
return this.orgRepository.save(org);
|
||||
}
|
||||
|
||||
async updatePlanLevel(id: string, planLevel: string) {
|
||||
const org = await this.orgRepository.findOne({ where: { id } });
|
||||
if (!org) throw new NotFoundException('Organization not found');
|
||||
org.planLevel = planLevel;
|
||||
return this.orgRepository.save(org);
|
||||
}
|
||||
|
||||
async updateSubscription(id: string, data: { paymentDate?: string; confirmationNumber?: string; renewalDate?: string }) {
|
||||
const org = await this.orgRepository.findOne({ where: { id } });
|
||||
if (!org) throw new NotFoundException('Organization not found');
|
||||
if (data.paymentDate !== undefined) (org as any).paymentDate = data.paymentDate ? new Date(data.paymentDate) : null;
|
||||
if (data.confirmationNumber !== undefined) (org as any).confirmationNumber = data.confirmationNumber || null;
|
||||
if (data.renewalDate !== undefined) (org as any).renewalDate = data.renewalDate ? new Date(data.renewalDate) : null;
|
||||
return this.orgRepository.save(org);
|
||||
}
|
||||
|
||||
async updateSettings(id: string, settings: Record<string, any>) {
|
||||
const org = await this.orgRepository.findOne({ where: { id } });
|
||||
if (!org) throw new NotFoundException('Organization not found');
|
||||
org.settings = { ...(org.settings || {}), ...settings };
|
||||
return this.orgRepository.save(org);
|
||||
}
|
||||
|
||||
async findByUser(userId: string) {
|
||||
const memberships = await this.userOrgRepository.find({
|
||||
where: { userId, isActive: true },
|
||||
@@ -101,12 +128,29 @@ export class OrganizationsService {
|
||||
return rows;
|
||||
}
|
||||
|
||||
private static readonly MEMBER_LIMIT_PLANS = ['starter', 'standard', 'professional'];
|
||||
private static readonly MAX_MEMBERS = 5;
|
||||
|
||||
async addMember(
|
||||
orgId: string,
|
||||
data: { email: string; firstName: string; lastName: string; password: string; role: string },
|
||||
) {
|
||||
const dataSource = this.orgRepository.manager.connection;
|
||||
|
||||
// Enforce member limit for starter and professional plans
|
||||
const org = await this.orgRepository.findOne({ where: { id: orgId } });
|
||||
const planLevel = org?.planLevel || 'starter';
|
||||
if (OrganizationsService.MEMBER_LIMIT_PLANS.includes(planLevel)) {
|
||||
const activeMemberCount = await this.userOrgRepository.count({
|
||||
where: { organizationId: orgId, isActive: true },
|
||||
});
|
||||
if (activeMemberCount >= OrganizationsService.MAX_MEMBERS) {
|
||||
throw new BadRequestException(
|
||||
`Your ${planLevel === 'starter' ? 'Starter' : 'Professional'} plan is limited to ${OrganizationsService.MAX_MEMBERS} user accounts. Please upgrade to Enterprise for unlimited members.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if user already exists
|
||||
let userRows = await dataSource.query(
|
||||
`SELECT id FROM shared.users WHERE email = $1`,
|
||||
@@ -130,6 +174,14 @@ export class OrganizationsService {
|
||||
existing.role = data.role;
|
||||
return this.userOrgRepository.save(existing);
|
||||
}
|
||||
// Update password for existing user being added to a new org
|
||||
if (data.password) {
|
||||
const passwordHash = await bcrypt.hash(data.password, 12);
|
||||
await dataSource.query(
|
||||
`UPDATE shared.users SET password_hash = $1 WHERE id = $2`,
|
||||
[passwordHash, userId],
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Create new user
|
||||
const passwordHash = await bcrypt.hash(data.password, 12);
|
||||
@@ -148,7 +200,23 @@ export class OrganizationsService {
|
||||
organizationId: orgId,
|
||||
role: data.role,
|
||||
});
|
||||
return this.userOrgRepository.save(membership);
|
||||
const saved = await this.userOrgRepository.save(membership);
|
||||
|
||||
// Send welcome email to the new member
|
||||
try {
|
||||
const org = await this.orgRepository.findOne({ where: { id: orgId } });
|
||||
const orgName = org?.name || 'your organization';
|
||||
await this.emailService.sendNewMemberWelcomeEmail(
|
||||
data.email,
|
||||
data.firstName,
|
||||
orgName,
|
||||
);
|
||||
} catch (err) {
|
||||
this.logger.warn(`Failed to send welcome email to ${data.email}: ${err}`);
|
||||
// Don't fail the member addition if the email fails
|
||||
}
|
||||
|
||||
return saved;
|
||||
}
|
||||
|
||||
async updateMemberRole(orgId: string, membershipId: string, role: string) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Controller, Get, Post, Body, Param, UseGuards, Request } from '@nestjs/common';
|
||||
import { Controller, Get, Post, Put, Delete, Body, Param, UseGuards, Request } from '@nestjs/common';
|
||||
import { ApiTags, ApiBearerAuth } from '@nestjs/swagger';
|
||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||
import { PaymentsService } from './payments.service';
|
||||
@@ -18,4 +18,12 @@ export class PaymentsController {
|
||||
|
||||
@Post()
|
||||
create(@Body() dto: any, @Request() req: any) { return this.paymentsService.create(dto, req.user.sub); }
|
||||
|
||||
@Put(':id')
|
||||
update(@Param('id') id: string, @Body() dto: any, @Request() req: any) {
|
||||
return this.paymentsService.update(id, dto, req.user.sub);
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
delete(@Param('id') id: string) { return this.paymentsService.delete(id); }
|
||||
}
|
||||
|
||||
@@ -74,17 +74,95 @@ export class PaymentsService {
|
||||
await this.tenant.query(`UPDATE payments SET journal_entry_id = $1 WHERE id = $2`, [je[0].id, payment[0].id]);
|
||||
}
|
||||
|
||||
// Update invoice if linked
|
||||
// Update invoice if linked — use explicit cast to avoid PostgreSQL type inference error
|
||||
if (invoice) {
|
||||
const newPaid = parseFloat(invoice.amount_paid) + parseFloat(dto.amount);
|
||||
const invoiceAmt = parseFloat(invoice.amount);
|
||||
const newStatus = newPaid >= invoiceAmt ? 'paid' : 'partial';
|
||||
await this.tenant.query(
|
||||
`UPDATE invoices SET amount_paid = $1, status = $2, paid_at = CASE WHEN $2 = 'paid' THEN NOW() ELSE paid_at END, updated_at = NOW() WHERE id = $3`,
|
||||
[newPaid, newStatus, invoice.id],
|
||||
`UPDATE invoices SET amount_paid = $1, status = $2::VARCHAR, paid_at = CASE WHEN $3::VARCHAR = 'paid' THEN NOW() ELSE paid_at END, updated_at = NOW() WHERE id = $4`,
|
||||
[newPaid, newStatus, newStatus, invoice.id],
|
||||
);
|
||||
}
|
||||
|
||||
return payment[0];
|
||||
}
|
||||
|
||||
async update(id: string, dto: any, userId: string) {
|
||||
const existing = await this.findOne(id);
|
||||
|
||||
const sets: string[] = [];
|
||||
const params: any[] = [];
|
||||
let idx = 1;
|
||||
|
||||
if (dto.payment_date !== undefined) { sets.push(`payment_date = $${idx++}`); params.push(dto.payment_date); }
|
||||
if (dto.amount !== undefined) { sets.push(`amount = $${idx++}`); params.push(dto.amount); }
|
||||
if (dto.payment_method !== undefined) { sets.push(`payment_method = $${idx++}`); params.push(dto.payment_method); }
|
||||
if (dto.reference_number !== undefined) { sets.push(`reference_number = $${idx++}`); params.push(dto.reference_number); }
|
||||
if (dto.notes !== undefined) { sets.push(`notes = $${idx++}`); params.push(dto.notes); }
|
||||
|
||||
if (!sets.length) return this.findOne(id);
|
||||
|
||||
params.push(id);
|
||||
await this.tenant.query(
|
||||
`UPDATE payments SET ${sets.join(', ')} WHERE id = $${idx} RETURNING *`,
|
||||
params,
|
||||
);
|
||||
|
||||
// If amount changed and payment is linked to an invoice, recalculate invoice totals
|
||||
if (dto.amount !== undefined && existing.invoice_id) {
|
||||
await this.recalculateInvoice(existing.invoice_id);
|
||||
}
|
||||
|
||||
return this.findOne(id);
|
||||
}
|
||||
|
||||
async delete(id: string) {
|
||||
const payment = await this.findOne(id);
|
||||
const invoiceId = payment.invoice_id;
|
||||
|
||||
// Delete associated journal entry lines and journal entry
|
||||
if (payment.journal_entry_id) {
|
||||
await this.tenant.query('DELETE FROM journal_entry_lines WHERE journal_entry_id = $1', [payment.journal_entry_id]);
|
||||
await this.tenant.query('DELETE FROM journal_entries WHERE id = $1', [payment.journal_entry_id]);
|
||||
}
|
||||
|
||||
// Delete the payment
|
||||
await this.tenant.query('DELETE FROM payments WHERE id = $1', [id]);
|
||||
|
||||
// Recalculate invoice totals if payment was linked
|
||||
if (invoiceId) {
|
||||
await this.recalculateInvoice(invoiceId);
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
private async recalculateInvoice(invoiceId: string) {
|
||||
// Sum all remaining payments for this invoice
|
||||
const result = await this.tenant.query(
|
||||
'SELECT COALESCE(SUM(amount), 0) as total_paid FROM payments WHERE invoice_id = $1',
|
||||
[invoiceId],
|
||||
);
|
||||
const totalPaid = parseFloat(result[0].total_paid);
|
||||
|
||||
// Get the invoice amount
|
||||
const inv = await this.tenant.query('SELECT amount FROM invoices WHERE id = $1', [invoiceId]);
|
||||
if (!inv.length) return;
|
||||
|
||||
const invoiceAmt = parseFloat(inv[0].amount);
|
||||
let newStatus: string;
|
||||
if (totalPaid >= invoiceAmt) {
|
||||
newStatus = 'paid';
|
||||
} else if (totalPaid > 0) {
|
||||
newStatus = 'partial';
|
||||
} else {
|
||||
newStatus = 'pending';
|
||||
}
|
||||
|
||||
await this.tenant.query(
|
||||
`UPDATE invoices SET amount_paid = $1, status = $2::VARCHAR, paid_at = CASE WHEN $3::VARCHAR = 'paid' THEN NOW() ELSE NULL END, updated_at = NOW() WHERE id = $4`,
|
||||
[totalPaid, newStatus, newStatus, invoiceId],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ export class ProjectsService {
|
||||
|
||||
async findAll() {
|
||||
const projects = await this.tenant.query(
|
||||
'SELECT * FROM projects WHERE is_active = true ORDER BY name',
|
||||
'SELECT * FROM projects WHERE is_active = true ORDER BY planned_date NULLS LAST, target_year NULLS LAST, target_month NULLS LAST, name',
|
||||
);
|
||||
return this.computeFunding(projects);
|
||||
}
|
||||
@@ -20,7 +20,7 @@ export class ProjectsService {
|
||||
|
||||
async findForPlanning() {
|
||||
const projects = await this.tenant.query(
|
||||
'SELECT * FROM projects WHERE is_active = true AND target_year IS NOT NULL ORDER BY target_year, target_month NULLS LAST, priority',
|
||||
'SELECT * FROM projects WHERE is_active = true ORDER BY target_year NULLS LAST, target_month NULLS LAST, priority',
|
||||
);
|
||||
return this.computeFunding(projects);
|
||||
}
|
||||
@@ -157,6 +157,9 @@ export class ProjectsService {
|
||||
const params: any[] = [];
|
||||
let idx = 1;
|
||||
|
||||
// Date columns must be null (not empty string) for PostgreSQL DATE type
|
||||
const dateFields = new Set(['last_replacement_date', 'next_replacement_date', 'planned_date']);
|
||||
|
||||
// Build dynamic SET clause
|
||||
const fields: [string, string][] = [
|
||||
['name', 'name'], ['description', 'description'], ['category', 'category'],
|
||||
@@ -175,7 +178,8 @@ export class ProjectsService {
|
||||
for (const [dtoKey, dbCol] of fields) {
|
||||
if (dto[dtoKey] !== undefined) {
|
||||
sets.push(`${dbCol} = $${idx++}`);
|
||||
params.push(dto[dtoKey]);
|
||||
const val = dateFields.has(dtoKey) && dto[dtoKey] === '' ? null : dto[dtoKey];
|
||||
params.push(val);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -276,7 +280,7 @@ export class ProjectsService {
|
||||
await this.findOne(id);
|
||||
const rows = await this.tenant.query(
|
||||
'UPDATE projects SET planned_date = $2, updated_at = NOW() WHERE id = $1 RETURNING *',
|
||||
[id, planned_date],
|
||||
[id, planned_date || null],
|
||||
);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
@@ -24,8 +24,16 @@ export class ReportsController {
|
||||
}
|
||||
|
||||
@Get('cash-flow-sankey')
|
||||
getCashFlowSankey(@Query('year') year?: string) {
|
||||
return this.reportsService.getCashFlowSankey(parseInt(year || '') || new Date().getFullYear());
|
||||
getCashFlowSankey(
|
||||
@Query('year') year?: string,
|
||||
@Query('source') source?: string,
|
||||
@Query('fundType') fundType?: string,
|
||||
) {
|
||||
return this.reportsService.getCashFlowSankey(
|
||||
parseInt(year || '') || new Date().getFullYear(),
|
||||
source || 'actuals',
|
||||
fundType || 'all',
|
||||
);
|
||||
}
|
||||
|
||||
@Get('cash-flow')
|
||||
@@ -57,6 +65,11 @@ export class ReportsController {
|
||||
return this.reportsService.getDashboardKPIs();
|
||||
}
|
||||
|
||||
@Get('upcoming-investment-activities')
|
||||
getUpcomingInvestmentActivities() {
|
||||
return this.reportsService.getUpcomingInvestmentActivities();
|
||||
}
|
||||
|
||||
@Get('cash-flow-forecast')
|
||||
getCashFlowForecast(
|
||||
@Query('startYear') startYear?: string,
|
||||
@@ -66,4 +79,27 @@ export class ReportsController {
|
||||
const mo = Math.min(parseInt(months || '') || 24, 48);
|
||||
return this.reportsService.getCashFlowForecast(yr, mo);
|
||||
}
|
||||
|
||||
@Get('capital-planning')
|
||||
getCapitalPlanningReport(@Query('startYear') startYear?: string) {
|
||||
return this.reportsService.getCapitalPlanningReport(
|
||||
parseInt(startYear || '') || undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@Get('quarterly')
|
||||
getQuarterlyFinancial(
|
||||
@Query('year') year?: string,
|
||||
@Query('quarter') quarter?: string,
|
||||
) {
|
||||
const now = new Date();
|
||||
const defaultYear = now.getFullYear();
|
||||
// Default to last complete quarter
|
||||
const currentQuarter = Math.ceil((now.getMonth() + 1) / 3);
|
||||
const defaultQuarter = currentQuarter > 1 ? currentQuarter - 1 : 4;
|
||||
const defaultQYear = currentQuarter > 1 ? defaultYear : defaultYear - 1;
|
||||
const yr = parseInt(year || '') || defaultQYear;
|
||||
const q = Math.min(Math.max(parseInt(quarter || '') || defaultQuarter, 1), 4);
|
||||
return this.reportsService.getQuarterlyFinancial(yr, q);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,10 +14,12 @@ export class ReportsService {
|
||||
ELSE COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0)
|
||||
END as balance
|
||||
FROM accounts a
|
||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date <= $1
|
||||
LEFT JOIN (
|
||||
journal_entry_lines jel
|
||||
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date <= $1
|
||||
) ON jel.account_id = a.id
|
||||
WHERE a.is_active = true AND a.account_type IN ('asset', 'liability', 'equity')
|
||||
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
||||
HAVING CASE
|
||||
@@ -32,6 +34,71 @@ export class ReportsService {
|
||||
const liabilities = rows.filter((r: any) => r.account_type === 'liability');
|
||||
const equity = rows.filter((r: any) => r.account_type === 'equity');
|
||||
|
||||
// Compute current year net income (income - expenses) for the fiscal year through as_of date
|
||||
// This balances the accounting equation: Assets = Liabilities + Equity + Net Income
|
||||
const fiscalYearStart = `${asOf.substring(0, 4)}-01-01`;
|
||||
const netIncomeSql = `
|
||||
SELECT
|
||||
COALESCE(SUM(CASE WHEN a.account_type = 'income'
|
||||
THEN jel.credit - jel.debit ELSE 0 END), 0) -
|
||||
COALESCE(SUM(CASE WHEN a.account_type = 'expense'
|
||||
THEN jel.debit - jel.credit ELSE 0 END), 0) as net_income
|
||||
FROM journal_entry_lines jel
|
||||
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date BETWEEN $1 AND $2
|
||||
INNER JOIN accounts a ON a.id = jel.account_id
|
||||
AND a.account_type IN ('income', 'expense') AND a.is_active = true
|
||||
`;
|
||||
const netIncomeResult = await this.tenant.query(netIncomeSql, [fiscalYearStart, asOf]);
|
||||
const netIncome = parseFloat(netIncomeResult[0]?.net_income || '0');
|
||||
|
||||
// Add current year net income as a synthetic equity line
|
||||
if (netIncome !== 0) {
|
||||
equity.push({
|
||||
id: null,
|
||||
account_number: '',
|
||||
name: 'Current Year Net Income',
|
||||
account_type: 'equity',
|
||||
fund_type: 'operating',
|
||||
balance: netIncome.toFixed(2),
|
||||
});
|
||||
}
|
||||
|
||||
// Add investment account balances to assets and corresponding equity
|
||||
const investmentsSql = `
|
||||
SELECT id, name, institution, current_value as balance, fund_type
|
||||
FROM investment_accounts
|
||||
WHERE is_active = true AND current_value > 0
|
||||
`;
|
||||
const investments = await this.tenant.query(investmentsSql);
|
||||
const investmentsByFund: Record<string, number> = {};
|
||||
for (const inv of investments) {
|
||||
assets.push({
|
||||
id: inv.id,
|
||||
account_number: '',
|
||||
name: `${inv.name} (${inv.institution})`,
|
||||
account_type: 'asset',
|
||||
fund_type: inv.fund_type,
|
||||
balance: parseFloat(inv.balance).toFixed(2),
|
||||
});
|
||||
investmentsByFund[inv.fund_type] = (investmentsByFund[inv.fund_type] || 0) + parseFloat(inv.balance);
|
||||
}
|
||||
// Add investment balances as synthetic equity lines to maintain A = L + E
|
||||
for (const [fundType, total] of Object.entries(investmentsByFund)) {
|
||||
if (total > 0) {
|
||||
const label = fundType === 'reserve' ? 'Reserve' : 'Operating';
|
||||
equity.push({
|
||||
id: null,
|
||||
account_number: '',
|
||||
name: `${label} Investment Holdings`,
|
||||
account_type: 'equity',
|
||||
fund_type: fundType,
|
||||
balance: total.toFixed(2),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const totalAssets = assets.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
||||
const totalLiabilities = liabilities.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
||||
const totalEquity = equity.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
||||
@@ -54,10 +121,12 @@ export class ReportsService {
|
||||
ELSE COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0)
|
||||
END as amount
|
||||
FROM accounts a
|
||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date BETWEEN $1 AND $2
|
||||
LEFT JOIN (
|
||||
journal_entry_lines jel
|
||||
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date BETWEEN $1 AND $2
|
||||
) ON jel.account_id = a.id
|
||||
WHERE a.is_active = true AND a.account_type IN ('income', 'expense')
|
||||
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
||||
HAVING CASE
|
||||
@@ -83,33 +152,151 @@ export class ReportsService {
|
||||
};
|
||||
}
|
||||
|
||||
async getCashFlowSankey(year: number) {
|
||||
// Get income accounts with amounts
|
||||
const income = await this.tenant.query(`
|
||||
SELECT a.name, COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0) as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||
WHERE a.account_type = 'income' AND a.is_active = true
|
||||
GROUP BY a.id, a.name
|
||||
HAVING COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0) > 0
|
||||
ORDER BY amount DESC
|
||||
`, [year]);
|
||||
async getCashFlowSankey(year: number, source = 'actuals', fundType = 'all') {
|
||||
let income: any[];
|
||||
let expenses: any[];
|
||||
|
||||
const expenses = await this.tenant.query(`
|
||||
SELECT a.name, a.fund_type, COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||
WHERE a.account_type = 'expense' AND a.is_active = true
|
||||
GROUP BY a.id, a.name, a.fund_type
|
||||
HAVING COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) > 0
|
||||
ORDER BY amount DESC
|
||||
`, [year]);
|
||||
const fundCondition = fundType !== 'all' ? ` AND a.fund_type = $2` : '';
|
||||
const fundParams = fundType !== 'all' ? [year, fundType] : [year];
|
||||
|
||||
const monthSum = `COALESCE(b.jan,0)+COALESCE(b.feb,0)+COALESCE(b.mar,0)+COALESCE(b.apr,0)+COALESCE(b.may,0)+COALESCE(b.jun,0)+COALESCE(b.jul,0)+COALESCE(b.aug,0)+COALESCE(b.sep,0)+COALESCE(b.oct,0)+COALESCE(b.nov,0)+COALESCE(b.dec_amt,0)`;
|
||||
|
||||
if (source === 'budget') {
|
||||
income = await this.tenant.query(`
|
||||
SELECT a.name, SUM(${monthSum}) as amount
|
||||
FROM budgets b
|
||||
JOIN accounts a ON a.id = b.account_id
|
||||
WHERE b.fiscal_year = $1 AND a.account_type = 'income' AND a.is_active = true${fundCondition}
|
||||
GROUP BY a.id, a.name
|
||||
HAVING SUM(${monthSum}) > 0
|
||||
ORDER BY SUM(${monthSum}) DESC
|
||||
`, fundParams);
|
||||
|
||||
expenses = await this.tenant.query(`
|
||||
SELECT a.name, a.fund_type, SUM(${monthSum}) as amount
|
||||
FROM budgets b
|
||||
JOIN accounts a ON a.id = b.account_id
|
||||
WHERE b.fiscal_year = $1 AND a.account_type = 'expense' AND a.is_active = true${fundCondition}
|
||||
GROUP BY a.id, a.name, a.fund_type
|
||||
HAVING SUM(${monthSum}) > 0
|
||||
ORDER BY SUM(${monthSum}) DESC
|
||||
`, fundParams);
|
||||
|
||||
} else if (source === 'forecast') {
|
||||
// Combine actuals (Jan to current date) + budget (remaining months)
|
||||
const now = new Date();
|
||||
const currentMonth = now.getMonth(); // 0-indexed
|
||||
const monthNames = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec_amt'];
|
||||
const remainingMonths = monthNames.slice(currentMonth + 1);
|
||||
|
||||
const actualsFundCond = fundType !== 'all' ? ' AND a.fund_type = $2' : '';
|
||||
const actualsParams: any[] = fundType !== 'all' ? [`${year}-01-01`, fundType] : [`${year}-01-01`];
|
||||
|
||||
const actualsIncome = await this.tenant.query(`
|
||||
SELECT a.name, COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0) as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date >= $1 AND je.entry_date <= CURRENT_DATE
|
||||
WHERE a.account_type = 'income' AND a.is_active = true${actualsFundCond}
|
||||
GROUP BY a.id, a.name
|
||||
`, actualsParams);
|
||||
|
||||
const actualsExpenses = await this.tenant.query(`
|
||||
SELECT a.name, a.fund_type, COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date >= $1 AND je.entry_date <= CURRENT_DATE
|
||||
WHERE a.account_type = 'expense' AND a.is_active = true${actualsFundCond}
|
||||
GROUP BY a.id, a.name, a.fund_type
|
||||
`, actualsParams);
|
||||
|
||||
// Budget for remaining months
|
||||
let budgetIncome: any[] = [];
|
||||
let budgetExpenses: any[] = [];
|
||||
if (remainingMonths.length > 0) {
|
||||
const budgetMonthSum = remainingMonths.map(m => `COALESCE(b.${m},0)`).join('+');
|
||||
budgetIncome = await this.tenant.query(`
|
||||
SELECT a.name, SUM(${budgetMonthSum}) as amount
|
||||
FROM budgets b
|
||||
JOIN accounts a ON a.id = b.account_id
|
||||
WHERE b.fiscal_year = $1 AND a.account_type = 'income' AND a.is_active = true${fundCondition}
|
||||
GROUP BY a.id, a.name
|
||||
`, fundParams);
|
||||
|
||||
budgetExpenses = await this.tenant.query(`
|
||||
SELECT a.name, a.fund_type, SUM(${budgetMonthSum}) as amount
|
||||
FROM budgets b
|
||||
JOIN accounts a ON a.id = b.account_id
|
||||
WHERE b.fiscal_year = $1 AND a.account_type = 'expense' AND a.is_active = true${fundCondition}
|
||||
GROUP BY a.id, a.name, a.fund_type
|
||||
`, fundParams);
|
||||
}
|
||||
|
||||
// Merge actuals + budget by account name
|
||||
const incomeMap = new Map<string, number>();
|
||||
for (const a of actualsIncome) {
|
||||
const amt = parseFloat(a.amount) || 0;
|
||||
if (amt > 0) incomeMap.set(a.name, (incomeMap.get(a.name) || 0) + amt);
|
||||
}
|
||||
for (const b of budgetIncome) {
|
||||
const amt = parseFloat(b.amount) || 0;
|
||||
if (amt > 0) incomeMap.set(b.name, (incomeMap.get(b.name) || 0) + amt);
|
||||
}
|
||||
income = Array.from(incomeMap.entries())
|
||||
.map(([name, amount]) => ({ name, amount: String(amount) }))
|
||||
.sort((a, b) => parseFloat(b.amount) - parseFloat(a.amount));
|
||||
|
||||
const expenseMap = new Map<string, { amount: number; fund_type: string }>();
|
||||
for (const a of actualsExpenses) {
|
||||
const amt = parseFloat(a.amount) || 0;
|
||||
if (amt > 0) {
|
||||
const existing = expenseMap.get(a.name);
|
||||
expenseMap.set(a.name, { amount: (existing?.amount || 0) + amt, fund_type: a.fund_type });
|
||||
}
|
||||
}
|
||||
for (const b of budgetExpenses) {
|
||||
const amt = parseFloat(b.amount) || 0;
|
||||
if (amt > 0) {
|
||||
const existing = expenseMap.get(b.name);
|
||||
expenseMap.set(b.name, { amount: (existing?.amount || 0) + amt, fund_type: b.fund_type });
|
||||
}
|
||||
}
|
||||
expenses = Array.from(expenseMap.entries())
|
||||
.map(([name, { amount, fund_type }]) => ({ name, amount: String(amount), fund_type }))
|
||||
.sort((a, b) => parseFloat(b.amount) - parseFloat(a.amount));
|
||||
|
||||
} else {
|
||||
// Actuals: query journal entries for the year
|
||||
income = await this.tenant.query(`
|
||||
SELECT a.name, COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0) as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||
WHERE a.account_type = 'income' AND a.is_active = true${fundCondition}
|
||||
GROUP BY a.id, a.name
|
||||
HAVING COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0) > 0
|
||||
ORDER BY amount DESC
|
||||
`, fundParams);
|
||||
|
||||
expenses = await this.tenant.query(`
|
||||
SELECT a.name, a.fund_type, COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||
WHERE a.account_type = 'expense' AND a.is_active = true${fundCondition}
|
||||
GROUP BY a.id, a.name, a.fund_type
|
||||
HAVING COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) > 0
|
||||
ORDER BY amount DESC
|
||||
`, fundParams);
|
||||
}
|
||||
|
||||
if (!income.length && !expenses.length) {
|
||||
return { nodes: [], links: [], total_income: 0, total_expenses: 0, net_cash_flow: 0 };
|
||||
@@ -222,20 +409,20 @@ export class ReportsService {
|
||||
ORDER BY a.name
|
||||
`, [from, to]);
|
||||
|
||||
// Asset filter: cash-only vs cash + investment accounts
|
||||
const assetFilter = includeInvestments
|
||||
? `a.account_type = 'asset'`
|
||||
: `a.account_type = 'asset' AND a.name LIKE '%Cash%'`;
|
||||
// Asset filter: all asset accounts (bank/checking/savings are the cash accounts)
|
||||
const assetFilter = `a.account_type = 'asset'`;
|
||||
|
||||
// Cash beginning and ending balances
|
||||
const beginCash = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as balance FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM accounts a
|
||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date < $1
|
||||
LEFT JOIN (
|
||||
journal_entry_lines jel
|
||||
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date < $1
|
||||
) ON jel.account_id = a.id
|
||||
WHERE ${assetFilter} AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
@@ -245,10 +432,12 @@ export class ReportsService {
|
||||
SELECT COALESCE(SUM(sub.bal), 0) as balance FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||
FROM accounts a
|
||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date <= $1
|
||||
LEFT JOIN (
|
||||
journal_entry_lines jel
|
||||
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date <= $1
|
||||
) ON jel.account_id = a.id
|
||||
WHERE ${assetFilter} AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
@@ -273,7 +462,8 @@ export class ReportsService {
|
||||
const totalOperating = operatingItems.reduce((s: number, r: any) => s + r.amount, 0);
|
||||
const totalReserve = reserveItems.reduce((s: number, r: any) => s + r.amount, 0);
|
||||
const beginningBalance = parseFloat(beginCash[0]?.balance || '0') + (includeInvestments ? investmentBalance : 0);
|
||||
const endingBalance = parseFloat(endCash[0]?.balance || '0') + investmentBalance;
|
||||
// Only include investment balances in ending balance when includeInvestments is toggled on
|
||||
const endingBalance = parseFloat(endCash[0]?.balance || '0') + (includeInvestments ? investmentBalance : 0);
|
||||
|
||||
return {
|
||||
from, to,
|
||||
@@ -360,19 +550,22 @@ export class ReportsService {
|
||||
const incomeStmt = await this.getIncomeStatement(from, to);
|
||||
const balanceSheet = await this.getBalanceSheet(to);
|
||||
|
||||
// 1099 vendor data
|
||||
// 1099 vendor data — uses journal entries via vendor's default_account_id
|
||||
const vendors1099 = await this.tenant.query(`
|
||||
SELECT v.id, v.name, v.tax_id, v.address_line1, v.city, v.state, v.zip_code,
|
||||
COALESCE(SUM(p.amount), 0) as total_paid
|
||||
COALESCE(SUM(p_amounts.amount), 0) as total_paid
|
||||
FROM vendors v
|
||||
JOIN (
|
||||
SELECT vendor_id, amount FROM invoices
|
||||
WHERE EXTRACT(YEAR FROM invoice_date) = $1
|
||||
AND status IN ('paid', 'partial')
|
||||
) p ON p.vendor_id = v.id
|
||||
LEFT JOIN (
|
||||
SELECT jel.account_id, jel.debit as amount
|
||||
FROM journal_entry_lines jel
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
WHERE je.is_posted = true AND je.is_void = false
|
||||
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||
AND jel.debit > 0
|
||||
) p_amounts ON p_amounts.account_id = v.default_account_id
|
||||
WHERE v.is_1099_eligible = true
|
||||
GROUP BY v.id, v.name, v.tax_id, v.address_line1, v.city, v.state, v.zip_code
|
||||
HAVING COALESCE(SUM(p.amount), 0) >= 600
|
||||
HAVING COALESCE(SUM(p_amounts.amount), 0) >= 600
|
||||
ORDER BY v.name
|
||||
`, [year]);
|
||||
|
||||
@@ -444,24 +637,43 @@ export class ReportsService {
|
||||
}
|
||||
|
||||
async getDashboardKPIs() {
|
||||
// Total cash: ALL asset accounts (not just those named "Cash")
|
||||
// Uses proper double-entry balance: debit - credit for assets
|
||||
const cash = await this.tenant.query(`
|
||||
// Operating cash (asset accounts, fund_type=operating)
|
||||
const opCash = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.balance), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as balance
|
||||
FROM accounts a
|
||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false
|
||||
WHERE a.account_type = 'asset' AND a.is_active = true
|
||||
WHERE a.account_type = 'asset' AND a.fund_type = 'operating' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`);
|
||||
// Also include investment account current_value in total cash
|
||||
const investmentCash = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(current_value), 0) as total
|
||||
FROM investment_accounts WHERE is_active = true
|
||||
// Reserve cash (asset accounts, fund_type=reserve)
|
||||
const resCash = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.balance), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as balance
|
||||
FROM accounts a
|
||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false
|
||||
WHERE a.account_type = 'asset' AND a.fund_type = 'reserve' AND a.is_active = true
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`);
|
||||
const totalCash = parseFloat(cash[0]?.total || '0') + parseFloat(investmentCash[0]?.total || '0');
|
||||
// Investment accounts split by fund type
|
||||
const opInv = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(current_value), 0) as total
|
||||
FROM investment_accounts WHERE fund_type = 'operating' AND is_active = true
|
||||
`);
|
||||
const resInv = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(current_value), 0) as total
|
||||
FROM investment_accounts WHERE fund_type = 'reserve' AND is_active = true
|
||||
`);
|
||||
|
||||
const operatingCash = parseFloat(opCash[0]?.total || '0');
|
||||
const reserveCash = parseFloat(resCash[0]?.total || '0');
|
||||
const operatingInvestments = parseFloat(opInv[0]?.total || '0');
|
||||
const reserveInvestments = parseFloat(resInv[0]?.total || '0');
|
||||
const totalCash = operatingCash + reserveCash + operatingInvestments + reserveInvestments;
|
||||
|
||||
// Receivables: sum of unpaid invoices
|
||||
const ar = await this.tenant.query(`
|
||||
@@ -469,9 +681,7 @@ export class ReportsService {
|
||||
FROM invoices WHERE status NOT IN ('paid', 'void', 'written_off')
|
||||
`);
|
||||
|
||||
// Reserve fund balance: use the reserve equity accounts (fund balance accounts like 3100)
|
||||
// The equity accounts track the total reserve fund position via double-entry bookkeeping
|
||||
// This is the standard HOA approach — every reserve contribution/expenditure flows through equity
|
||||
// Reserve fund balance via equity accounts + reserve investments
|
||||
const reserves = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.balance), 0) as total FROM (
|
||||
SELECT COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0) as balance
|
||||
@@ -482,17 +692,67 @@ export class ReportsService {
|
||||
GROUP BY a.id
|
||||
) sub
|
||||
`);
|
||||
// Add reserve investment account values to the reserve fund total
|
||||
const reserveInvestments = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(current_value), 0) as total
|
||||
FROM investment_accounts WHERE fund_type = 'reserve' AND is_active = true
|
||||
`);
|
||||
|
||||
// Delinquent count (overdue invoices)
|
||||
const delinquent = await this.tenant.query(`
|
||||
SELECT COUNT(DISTINCT unit_id) as count FROM invoices WHERE status = 'overdue'
|
||||
`);
|
||||
|
||||
// Monthly interest estimate from accounts + investments with rates
|
||||
const acctInterest = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(sub.monthly_interest), 0) as total FROM (
|
||||
SELECT (COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0)) * (a.interest_rate / 100) / 12 as monthly_interest
|
||||
FROM accounts a
|
||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id AND je.is_posted = true AND je.is_void = false
|
||||
WHERE a.account_type = 'asset' AND a.is_active = true AND a.interest_rate > 0
|
||||
GROUP BY a.id, a.interest_rate
|
||||
) sub
|
||||
`);
|
||||
const acctInterestTotal = parseFloat(acctInterest[0]?.total || '0');
|
||||
const invInterest = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(current_value * interest_rate / 100 / 12), 0) as total
|
||||
FROM investment_accounts WHERE is_active = true AND interest_rate > 0
|
||||
`);
|
||||
const estMonthlyInterest = acctInterestTotal + parseFloat(invInterest[0]?.total || '0');
|
||||
|
||||
// Interest earned YTD: actual interest income from journal entries for current year
|
||||
const currentYear = new Date().getFullYear();
|
||||
const interestEarned = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(jel.credit - jel.debit), 0) as total
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||
WHERE a.account_type = 'income' AND a.is_active = true
|
||||
AND LOWER(a.name) LIKE '%interest%'
|
||||
`, [currentYear]);
|
||||
|
||||
// Interest earned last year (for YoY comparison)
|
||||
const interestLastYear = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(jel.credit - jel.debit), 0) as total
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||
WHERE a.account_type = 'income' AND a.is_active = true
|
||||
AND LOWER(a.name) LIKE '%interest%'
|
||||
`, [currentYear - 1]);
|
||||
|
||||
// Projected interest for current year: YTD actual + remaining months using
|
||||
// the rate-based est_monthly_interest (same source as the dashboard KPI)
|
||||
const currentMonth = new Date().getMonth() + 1;
|
||||
const ytdInterest = parseFloat(interestEarned[0]?.total || '0');
|
||||
const projectedInterest = ytdInterest + (estMonthlyInterest * (12 - currentMonth));
|
||||
|
||||
// Planned capital spend for current year
|
||||
const capitalSpend = await this.tenant.query(`
|
||||
SELECT COALESCE(SUM(estimated_cost), 0) as total
|
||||
FROM projects WHERE target_year = $1 AND status IN ('planned', 'in_progress') AND is_active = true
|
||||
`, [currentYear]);
|
||||
|
||||
// Recent transactions
|
||||
const recentTx = await this.tenant.query(`
|
||||
SELECT je.id, je.entry_date, je.description, je.entry_type,
|
||||
@@ -504,9 +764,91 @@ export class ReportsService {
|
||||
return {
|
||||
total_cash: totalCash.toFixed(2),
|
||||
total_receivables: ar[0]?.total || '0.00',
|
||||
reserve_fund_balance: (parseFloat(reserves[0]?.total || '0') + parseFloat(reserveInvestments[0]?.total || '0')).toFixed(2),
|
||||
reserve_fund_balance: (parseFloat(reserves[0]?.total || '0') + reserveInvestments).toFixed(2),
|
||||
delinquent_units: parseInt(delinquent[0]?.count || '0'),
|
||||
recent_transactions: recentTx,
|
||||
// Enhanced split data
|
||||
operating_cash: operatingCash.toFixed(2),
|
||||
reserve_cash: reserveCash.toFixed(2),
|
||||
operating_investments: operatingInvestments.toFixed(2),
|
||||
reserve_investments: reserveInvestments.toFixed(2),
|
||||
est_monthly_interest: estMonthlyInterest.toFixed(2),
|
||||
interest_earned_ytd: ytdInterest.toFixed(2),
|
||||
interest_last_year: parseFloat(interestLastYear[0]?.total || '0').toFixed(2),
|
||||
interest_projected: projectedInterest.toFixed(2),
|
||||
planned_capital_spend: capitalSpend[0]?.total || '0.00',
|
||||
};
|
||||
}
|
||||
|
||||
async getUpcomingInvestmentActivities() {
|
||||
const now = new Date();
|
||||
const in45Days = new Date(now);
|
||||
in45Days.setDate(in45Days.getDate() + 45);
|
||||
const in60Days = new Date(now);
|
||||
in60Days.setDate(in60Days.getDate() + 60);
|
||||
|
||||
// 1. Investments maturing within 45 days
|
||||
const maturingInvestments = await this.tenant.query(`
|
||||
SELECT id, name, institution, investment_type, fund_type, current_value, principal,
|
||||
interest_rate, maturity_date, purchase_date
|
||||
FROM investment_accounts
|
||||
WHERE is_active = true
|
||||
AND maturity_date IS NOT NULL
|
||||
AND maturity_date BETWEEN CURRENT_DATE AND $1::date
|
||||
ORDER BY maturity_date ASC
|
||||
`, [in45Days.toISOString().split('T')[0]]);
|
||||
|
||||
// Compute interest earned and days remaining for each
|
||||
const maturing = maturingInvestments.map((inv: any) => {
|
||||
const principal = parseFloat(inv.principal) || parseFloat(inv.current_value) || 0;
|
||||
const rate = parseFloat(inv.interest_rate) || 0;
|
||||
const purchaseDate = inv.purchase_date ? new Date(inv.purchase_date) : now;
|
||||
const maturityDate = new Date(inv.maturity_date);
|
||||
const daysHeld = Math.max((maturityDate.getTime() - purchaseDate.getTime()) / 86400000, 1);
|
||||
const interestEarned = principal * (rate / 100) * (daysHeld / 365);
|
||||
const daysRemaining = Math.max(Math.ceil((maturityDate.getTime() - now.getTime()) / 86400000), 0);
|
||||
return {
|
||||
...inv,
|
||||
interest_earned: interestEarned.toFixed(2),
|
||||
maturity_value: (principal + interestEarned).toFixed(2),
|
||||
days_remaining: daysRemaining,
|
||||
activity_type: 'maturity',
|
||||
};
|
||||
});
|
||||
|
||||
// 2. Approved scenario investments due to execute within 60 days
|
||||
let scenarioItems: any[] = [];
|
||||
try {
|
||||
scenarioItems = await this.tenant.query(`
|
||||
SELECT si.id, si.label, si.investment_type, si.fund_type, si.principal,
|
||||
si.interest_rate, si.purchase_date, si.maturity_date, si.institution,
|
||||
bs.name as scenario_name, bs.status as scenario_status
|
||||
FROM scenario_investments si
|
||||
JOIN board_scenarios bs ON bs.id = si.scenario_id
|
||||
WHERE bs.status = 'approved'
|
||||
AND si.executed_investment_id IS NULL
|
||||
AND si.purchase_date IS NOT NULL
|
||||
AND si.purchase_date BETWEEN CURRENT_DATE AND $1::date
|
||||
ORDER BY si.purchase_date ASC
|
||||
`, [in60Days.toISOString().split('T')[0]]);
|
||||
} catch {
|
||||
// scenario tables may not exist
|
||||
}
|
||||
|
||||
const upcoming = scenarioItems.map((si: any) => {
|
||||
const purchaseDate = new Date(si.purchase_date);
|
||||
const daysUntil = Math.max(Math.ceil((purchaseDate.getTime() - now.getTime()) / 86400000), 0);
|
||||
return {
|
||||
...si,
|
||||
days_until: daysUntil,
|
||||
activity_type: 'planned_purchase',
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
maturing_investments: maturing,
|
||||
upcoming_scenario_investments: upcoming,
|
||||
total_activities: maturing.length + upcoming.length,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -594,15 +936,37 @@ export class ReportsService {
|
||||
// We need budgets for startYear and startYear+1 to cover 24 months
|
||||
const budgetsByYearMonth: Record<string, { opIncome: number; opExpense: number; resIncome: number; resExpense: number }> = {};
|
||||
|
||||
for (const yr of [startYear, startYear + 1, startYear + 2]) {
|
||||
const budgetRows = await this.tenant.query(
|
||||
`SELECT b.fund_type, a.account_type,
|
||||
b.jan, b.feb, b.mar, b.apr, b.may, b.jun,
|
||||
b.jul, b.aug, b.sep, b.oct, b.nov, b.dec_amt
|
||||
FROM budgets b
|
||||
JOIN accounts a ON a.id = b.account_id
|
||||
WHERE b.fiscal_year = $1`, [yr],
|
||||
);
|
||||
const endYear = startYear + Math.ceil(months / 12) + 1;
|
||||
for (let yr = startYear; yr <= endYear; yr++) {
|
||||
let budgetRows: any[];
|
||||
try {
|
||||
budgetRows = await this.tenant.query(
|
||||
`SELECT fund_type, account_type, jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec_amt FROM (
|
||||
SELECT b.account_id, b.fund_type, a.account_type,
|
||||
b.jan, b.feb, b.mar, b.apr, b.may, b.jun, b.jul, b.aug, b.sep, b.oct, b.nov, b.dec_amt,
|
||||
1 as source_priority
|
||||
FROM budgets b JOIN accounts a ON a.id = b.account_id WHERE b.fiscal_year = $1
|
||||
UNION ALL
|
||||
SELECT bpl.account_id, bpl.fund_type, a.account_type,
|
||||
bpl.jan, bpl.feb, bpl.mar, bpl.apr, bpl.may, bpl.jun, bpl.jul, bpl.aug, bpl.sep, bpl.oct, bpl.nov, bpl.dec_amt,
|
||||
2 as source_priority
|
||||
FROM budget_plan_lines bpl
|
||||
JOIN budget_plans bp ON bp.id = bpl.budget_plan_id
|
||||
JOIN accounts a ON a.id = bpl.account_id
|
||||
WHERE bp.fiscal_year = $1
|
||||
) combined
|
||||
ORDER BY account_id, fund_type, source_priority`, [yr],
|
||||
);
|
||||
} catch {
|
||||
budgetRows = await this.tenant.query(
|
||||
`SELECT b.fund_type, a.account_type,
|
||||
b.jan, b.feb, b.mar, b.apr, b.may, b.jun,
|
||||
b.jul, b.aug, b.sep, b.oct, b.nov, b.dec_amt
|
||||
FROM budgets b
|
||||
JOIN accounts a ON a.id = b.account_id
|
||||
WHERE b.fiscal_year = $1`, [yr],
|
||||
);
|
||||
}
|
||||
for (let m = 0; m < 12; m++) {
|
||||
const key = `${yr}-${m + 1}`;
|
||||
if (!budgetsByYearMonth[key]) budgetsByYearMonth[key] = { opIncome: 0, opExpense: 0, resIncome: 0, resExpense: 0 };
|
||||
@@ -729,11 +1093,24 @@ export class ReportsService {
|
||||
let runOpInv = opInv;
|
||||
let runResInv = resInv;
|
||||
|
||||
// Determine which months have actual journal entries
|
||||
// A month is "actual" only if it's not in the future AND has real journal entry data
|
||||
const monthsWithActuals = new Set<string>();
|
||||
for (const key of Object.keys(histIndex)) {
|
||||
// histIndex keys are "year-month-fund_type", extract year-month
|
||||
const parts = key.split('-');
|
||||
const ym = `${parts[0]}-${parts[1]}`;
|
||||
monthsWithActuals.add(ym);
|
||||
}
|
||||
|
||||
for (let i = 0; i < months; i++) {
|
||||
const year = startYear + Math.floor(i / 12);
|
||||
const month = (i % 12) + 1;
|
||||
const key = `${year}-${month}`;
|
||||
const isHistorical = year < currentYear || (year === currentYear && month <= currentMonth);
|
||||
// A month is historical (actual) only if it's in the past AND has journal entries
|
||||
const isPastMonth = year < currentYear || (year === currentYear && month < currentMonth);
|
||||
const hasActuals = monthsWithActuals.has(key);
|
||||
const isHistorical = isPastMonth && hasActuals;
|
||||
const label = `${monthLabels[month - 1]} ${year}`;
|
||||
|
||||
if (isHistorical) {
|
||||
@@ -795,4 +1172,284 @@ export class ReportsService {
|
||||
datapoints,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Quarterly Financial Report: quarter income statement, YTD income statement,
|
||||
* budget vs actuals for the quarter and YTD, and over-budget items.
|
||||
*/
|
||||
async getQuarterlyFinancial(year: number, quarter: number) {
|
||||
// Quarter date ranges
|
||||
const qStartMonths = [1, 4, 7, 10];
|
||||
const qEndMonths = [3, 6, 9, 12];
|
||||
const qStart = `${year}-${String(qStartMonths[quarter - 1]).padStart(2, '0')}-01`;
|
||||
const qEndMonth = qEndMonths[quarter - 1];
|
||||
const qEndDay = [31, 30, 30, 31][quarter - 1]; // Mar=31, Jun=30, Sep=30, Dec=31
|
||||
const qEnd = `${year}-${String(qEndMonth).padStart(2, '0')}-${qEndDay}`;
|
||||
const ytdStart = `${year}-01-01`;
|
||||
|
||||
// Quarter and YTD income statements (reuse existing method)
|
||||
const quarterIS = await this.getIncomeStatement(qStart, qEnd);
|
||||
const ytdIS = await this.getIncomeStatement(ytdStart, qEnd);
|
||||
|
||||
// Budget data for the quarter months
|
||||
const budgetMonthCols = {
|
||||
1: ['jan', 'feb', 'mar'],
|
||||
2: ['apr', 'may', 'jun'],
|
||||
3: ['jul', 'aug', 'sep'],
|
||||
4: ['oct', 'nov', 'dec_amt'],
|
||||
} as Record<number, string[]>;
|
||||
const ytdMonthCols = {
|
||||
1: ['jan', 'feb', 'mar'],
|
||||
2: ['jan', 'feb', 'mar', 'apr', 'may', 'jun'],
|
||||
3: ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep'],
|
||||
4: ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec_amt'],
|
||||
} as Record<number, string[]>;
|
||||
|
||||
const qCols = budgetMonthCols[quarter];
|
||||
const ytdCols = ytdMonthCols[quarter];
|
||||
|
||||
const budgetRows = await this.tenant.query(
|
||||
`SELECT b.account_id, a.account_number, a.name, a.account_type, a.fund_type,
|
||||
b.jan, b.feb, b.mar, b.apr, b.may, b.jun,
|
||||
b.jul, b.aug, b.sep, b.oct, b.nov, b.dec_amt
|
||||
FROM budgets b
|
||||
JOIN accounts a ON a.id = b.account_id
|
||||
WHERE b.fiscal_year = $1`, [year],
|
||||
);
|
||||
|
||||
// Actual amounts per account for the quarter and YTD
|
||||
const quarterActuals = await this.tenant.query(`
|
||||
SELECT a.id as account_id, a.account_number, a.name, a.account_type, a.fund_type,
|
||||
CASE
|
||||
WHEN a.account_type = 'income'
|
||||
THEN COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0)
|
||||
ELSE COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0)
|
||||
END as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date BETWEEN $1 AND $2
|
||||
WHERE a.account_type IN ('income', 'expense') AND a.is_active = true
|
||||
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
||||
`, [qStart, qEnd]);
|
||||
|
||||
const ytdActuals = await this.tenant.query(`
|
||||
SELECT a.id as account_id, a.account_number, a.name, a.account_type, a.fund_type,
|
||||
CASE
|
||||
WHEN a.account_type = 'income'
|
||||
THEN COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0)
|
||||
ELSE COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0)
|
||||
END as amount
|
||||
FROM accounts a
|
||||
JOIN journal_entry_lines jel ON jel.account_id = a.id
|
||||
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||
AND je.is_posted = true AND je.is_void = false
|
||||
AND je.entry_date BETWEEN $1 AND $2
|
||||
WHERE a.account_type IN ('income', 'expense') AND a.is_active = true
|
||||
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
||||
`, [ytdStart, qEnd]);
|
||||
|
||||
// Build budget vs actual comparison
|
||||
const actualsByIdQ = new Map<string, number>();
|
||||
for (const a of quarterActuals) {
|
||||
actualsByIdQ.set(a.account_id, parseFloat(a.amount) || 0);
|
||||
}
|
||||
const actualsByIdYTD = new Map<string, number>();
|
||||
for (const a of ytdActuals) {
|
||||
actualsByIdYTD.set(a.account_id, parseFloat(a.amount) || 0);
|
||||
}
|
||||
|
||||
const budgetVsActual: any[] = [];
|
||||
const overBudgetItems: any[] = [];
|
||||
|
||||
for (const b of budgetRows) {
|
||||
const qBudget = qCols.reduce((sum: number, col: string) => sum + (parseFloat(b[col]) || 0), 0);
|
||||
const ytdBudget = ytdCols.reduce((sum: number, col: string) => sum + (parseFloat(b[col]) || 0), 0);
|
||||
const qActual = actualsByIdQ.get(b.account_id) || 0;
|
||||
const ytdActual = actualsByIdYTD.get(b.account_id) || 0;
|
||||
|
||||
if (qBudget === 0 && ytdBudget === 0 && qActual === 0 && ytdActual === 0) continue;
|
||||
|
||||
const qVariance = qActual - qBudget;
|
||||
const ytdVariance = ytdActual - ytdBudget;
|
||||
const isExpense = b.account_type === 'expense';
|
||||
|
||||
const item = {
|
||||
account_id: b.account_id,
|
||||
account_number: b.account_number,
|
||||
name: b.name,
|
||||
account_type: b.account_type,
|
||||
fund_type: b.fund_type,
|
||||
quarter_budget: qBudget,
|
||||
quarter_actual: qActual,
|
||||
quarter_variance: qVariance,
|
||||
ytd_budget: ytdBudget,
|
||||
ytd_actual: ytdActual,
|
||||
ytd_variance: ytdVariance,
|
||||
};
|
||||
budgetVsActual.push(item);
|
||||
|
||||
// Flag expenses over budget by more than 10%
|
||||
if (isExpense && qBudget > 0 && qActual > qBudget * 1.1) {
|
||||
overBudgetItems.push({
|
||||
...item,
|
||||
variance_pct: ((qActual / qBudget - 1) * 100).toFixed(1),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Also include accounts with actuals but no budget
|
||||
for (const a of quarterActuals) {
|
||||
if (!budgetRows.find((b: any) => b.account_id === a.account_id)) {
|
||||
const ytdActual = actualsByIdYTD.get(a.account_id) || 0;
|
||||
budgetVsActual.push({
|
||||
account_id: a.account_id,
|
||||
account_number: a.account_number,
|
||||
name: a.name,
|
||||
account_type: a.account_type,
|
||||
fund_type: a.fund_type,
|
||||
quarter_budget: 0,
|
||||
quarter_actual: parseFloat(a.amount) || 0,
|
||||
quarter_variance: parseFloat(a.amount) || 0,
|
||||
ytd_budget: 0,
|
||||
ytd_actual: ytdActual,
|
||||
ytd_variance: ytdActual,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort: income first, then expenses, both by account number
|
||||
budgetVsActual.sort((a: any, b: any) => {
|
||||
if (a.account_type !== b.account_type) return a.account_type === 'income' ? -1 : 1;
|
||||
return (a.account_number || '').localeCompare(b.account_number || '');
|
||||
});
|
||||
|
||||
return {
|
||||
year,
|
||||
quarter,
|
||||
quarter_label: `Q${quarter} ${year}`,
|
||||
date_range: { from: qStart, to: qEnd },
|
||||
quarter_income_statement: quarterIS,
|
||||
ytd_income_statement: ytdIS,
|
||||
budget_vs_actual: budgetVsActual,
|
||||
over_budget_items: overBudgetItems,
|
||||
};
|
||||
}
|
||||
|
||||
async getCapitalPlanningReport(startYear?: number) {
|
||||
const baseYear = startYear || new Date().getFullYear();
|
||||
const years = [baseYear, baseYear + 1, baseYear + 2, baseYear + 3, baseYear + 4];
|
||||
|
||||
// Get all active projects
|
||||
const projects = await this.tenant.query(
|
||||
`SELECT id, name, description, category, estimated_cost, target_year, target_month,
|
||||
useful_life_years, last_replacement_date, next_replacement_date, fund_source,
|
||||
status, priority, condition_rating
|
||||
FROM projects
|
||||
WHERE is_active = true
|
||||
ORDER BY category NULLS LAST, priority, name`,
|
||||
);
|
||||
|
||||
// Also try capital_projects table
|
||||
let capitalProjects: any[] = [];
|
||||
try {
|
||||
capitalProjects = await this.tenant.query(
|
||||
`SELECT id, name, description, estimated_cost, target_year, target_month,
|
||||
fund_source, status, priority, notes
|
||||
FROM capital_projects
|
||||
WHERE status NOT IN ('cancelled')
|
||||
ORDER BY priority, name`,
|
||||
);
|
||||
} catch {
|
||||
// Table may not exist
|
||||
}
|
||||
|
||||
// Merge and group by category
|
||||
const allProjects = [
|
||||
...projects.map((p: any) => ({
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
description: p.description,
|
||||
category: p.category || 'Uncategorized',
|
||||
estimated_cost: parseFloat(p.estimated_cost) || 0,
|
||||
target_year: parseInt(p.target_year) || null,
|
||||
useful_life_years: parseInt(p.useful_life_years) || null,
|
||||
last_replacement_date: p.last_replacement_date,
|
||||
fund_source: p.fund_source || 'reserve',
|
||||
status: p.status,
|
||||
priority: parseInt(p.priority) || 3,
|
||||
condition_rating: parseInt(p.condition_rating) || null,
|
||||
})),
|
||||
...capitalProjects
|
||||
.filter((cp: any) => !projects.some((p: any) => p.name === cp.name && p.target_year === cp.target_year))
|
||||
.map((cp: any) => ({
|
||||
id: cp.id,
|
||||
name: cp.name,
|
||||
description: cp.description,
|
||||
category: 'Capital Projects',
|
||||
estimated_cost: parseFloat(cp.estimated_cost) || 0,
|
||||
target_year: parseInt(cp.target_year) || null,
|
||||
useful_life_years: null,
|
||||
last_replacement_date: null,
|
||||
fund_source: cp.fund_source || 'reserve',
|
||||
status: cp.status,
|
||||
priority: parseInt(cp.priority) || 3,
|
||||
condition_rating: null,
|
||||
})),
|
||||
];
|
||||
|
||||
// Group by category
|
||||
const categories: Record<string, any[]> = {};
|
||||
for (const project of allProjects) {
|
||||
const cat = project.category;
|
||||
if (!categories[cat]) categories[cat] = [];
|
||||
categories[cat].push(project);
|
||||
}
|
||||
|
||||
// Build year columns for each project
|
||||
const categoryData = Object.entries(categories).map(([category, items]) => ({
|
||||
category,
|
||||
projects: items.map((p) => {
|
||||
const yearAmounts: Record<number, number> = {};
|
||||
let beyond = 0;
|
||||
if (p.target_year) {
|
||||
if (p.target_year >= years[0] && p.target_year <= years[4]) {
|
||||
yearAmounts[p.target_year] = p.estimated_cost;
|
||||
} else if (p.target_year > years[4]) {
|
||||
beyond = p.estimated_cost;
|
||||
}
|
||||
}
|
||||
return {
|
||||
...p,
|
||||
year_amounts: yearAmounts,
|
||||
beyond,
|
||||
};
|
||||
}),
|
||||
}));
|
||||
|
||||
// Compute totals per year
|
||||
const yearTotals: Record<number, number> = {};
|
||||
let beyondTotal = 0;
|
||||
for (const y of years) yearTotals[y] = 0;
|
||||
for (const cat of categoryData) {
|
||||
for (const p of cat.projects) {
|
||||
for (const y of years) {
|
||||
yearTotals[y] += p.year_amounts[y] || 0;
|
||||
}
|
||||
beyondTotal += p.beyond;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title: `${years[4] - years[0] + 1}-YEAR CAPITAL PROJECT FORECAST`,
|
||||
start_year: years[0],
|
||||
years,
|
||||
categories: categoryData,
|
||||
year_totals: yearTotals,
|
||||
beyond_total: beyondTotal,
|
||||
grand_total: Object.values(yearTotals).reduce((a, b) => a + b, 0) + beyondTotal,
|
||||
generated_at: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +46,12 @@ export class User {
|
||||
@Column({ name: 'is_superadmin', default: false })
|
||||
isSuperadmin: boolean;
|
||||
|
||||
@Column({ name: 'is_platform_owner', default: false })
|
||||
isPlatformOwner: boolean;
|
||||
|
||||
@Column({ name: 'has_seen_intro', default: false })
|
||||
hasSeenIntro: boolean;
|
||||
|
||||
@Column({ name: 'last_login_at', type: 'timestamptz', nullable: true })
|
||||
lastLoginAt: Date;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Injectable, ForbiddenException } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import { User } from './entities/user.entity';
|
||||
@@ -50,13 +50,23 @@ export class UsersService {
|
||||
const dataSource = this.usersRepository.manager.connection;
|
||||
return dataSource.query(`
|
||||
SELECT o.*,
|
||||
(SELECT COUNT(*) FROM shared.user_organizations WHERE organization_id = o.id) as member_count
|
||||
(SELECT COUNT(*) FROM shared.user_organizations WHERE organization_id = o.id) as member_count,
|
||||
(SELECT MAX(lh.logged_in_at) FROM shared.login_history lh WHERE lh.organization_id = o.id) as last_activity
|
||||
FROM shared.organizations o
|
||||
ORDER BY o.created_at DESC
|
||||
`);
|
||||
}
|
||||
|
||||
async markIntroSeen(id: string): Promise<void> {
|
||||
await this.usersRepository.update(id, { hasSeenIntro: true });
|
||||
}
|
||||
|
||||
async setSuperadmin(userId: string, isSuperadmin: boolean): Promise<void> {
|
||||
// Protect platform owner from having superadmin removed
|
||||
const user = await this.usersRepository.findOne({ where: { id: userId } });
|
||||
if (user?.isPlatformOwner) {
|
||||
throw new ForbiddenException('Cannot modify platform owner superadmin status');
|
||||
}
|
||||
await this.usersRepository.update(userId, { isSuperadmin });
|
||||
}
|
||||
}
|
||||
|
||||
27
backend/src/modules/vendors/vendors.service.ts
vendored
27
backend/src/modules/vendors/vendors.service.ts
vendored
@@ -17,10 +17,10 @@ export class VendorsService {
|
||||
|
||||
async create(dto: any) {
|
||||
const rows = await this.tenant.query(
|
||||
`INSERT INTO vendors (name, contact_name, email, phone, address_line1, city, state, zip_code, tax_id, is_1099_eligible, default_account_id)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) RETURNING *`,
|
||||
`INSERT INTO vendors (name, contact_name, email, phone, address_line1, city, state, zip_code, tax_id, is_1099_eligible, default_account_id, last_negotiated)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING *`,
|
||||
[dto.name, dto.contact_name, dto.email, dto.phone, dto.address_line1, dto.city, dto.state, dto.zip_code,
|
||||
dto.tax_id, dto.is_1099_eligible || false, dto.default_account_id || null],
|
||||
dto.tax_id, dto.is_1099_eligible || false, dto.default_account_id || null, dto.last_negotiated || null],
|
||||
);
|
||||
return rows[0];
|
||||
}
|
||||
@@ -32,24 +32,25 @@ export class VendorsService {
|
||||
email = COALESCE($4, email), phone = COALESCE($5, phone), address_line1 = COALESCE($6, address_line1),
|
||||
city = COALESCE($7, city), state = COALESCE($8, state), zip_code = COALESCE($9, zip_code),
|
||||
tax_id = COALESCE($10, tax_id), is_1099_eligible = COALESCE($11, is_1099_eligible),
|
||||
default_account_id = COALESCE($12, default_account_id), updated_at = NOW()
|
||||
default_account_id = COALESCE($12, default_account_id), last_negotiated = $13, updated_at = NOW()
|
||||
WHERE id = $1 RETURNING *`,
|
||||
[id, dto.name, dto.contact_name, dto.email, dto.phone, dto.address_line1, dto.city, dto.state,
|
||||
dto.zip_code, dto.tax_id, dto.is_1099_eligible, dto.default_account_id],
|
||||
dto.zip_code, dto.tax_id, dto.is_1099_eligible, dto.default_account_id, dto.last_negotiated || null],
|
||||
);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
async exportCSV(): Promise<string> {
|
||||
const rows = await this.tenant.query(
|
||||
`SELECT name, contact_name, email, phone, address_line1, city, state, zip_code, tax_id, is_1099_eligible
|
||||
`SELECT name, contact_name, email, phone, address_line1, city, state, zip_code, tax_id, is_1099_eligible, last_negotiated
|
||||
FROM vendors WHERE is_active = true ORDER BY name`,
|
||||
);
|
||||
const headers = ['name', 'contact_name', 'email', 'phone', 'address_line1', 'city', 'state', 'zip_code', 'tax_id', 'is_1099_eligible'];
|
||||
const headers = ['name', 'contact_name', 'email', 'phone', 'address_line1', 'city', 'state', 'zip_code', 'tax_id', 'is_1099_eligible', 'last_negotiated'];
|
||||
const lines = [headers.join(',')];
|
||||
for (const r of rows) {
|
||||
lines.push(headers.map((h) => {
|
||||
const v = r[h] ?? '';
|
||||
let v = r[h] ?? '';
|
||||
if (v instanceof Date) v = v.toISOString().split('T')[0];
|
||||
const s = String(v);
|
||||
return s.includes(',') || s.includes('"') ? `"${s.replace(/"/g, '""')}"` : s;
|
||||
}).join(','));
|
||||
@@ -80,20 +81,22 @@ export class VendorsService {
|
||||
zip_code = COALESCE(NULLIF($8, ''), zip_code),
|
||||
tax_id = COALESCE(NULLIF($9, ''), tax_id),
|
||||
is_1099_eligible = COALESCE(NULLIF($10, '')::boolean, is_1099_eligible),
|
||||
last_negotiated = COALESCE(NULLIF($11, '')::date, last_negotiated),
|
||||
updated_at = NOW()
|
||||
WHERE id = $1`,
|
||||
[existing[0].id, row.contact_name, row.email, row.phone, row.address_line1,
|
||||
row.city, row.state, row.zip_code, row.tax_id, row.is_1099_eligible],
|
||||
row.city, row.state, row.zip_code, row.tax_id, row.is_1099_eligible, row.last_negotiated],
|
||||
);
|
||||
updated++;
|
||||
} else {
|
||||
await this.tenant.query(
|
||||
`INSERT INTO vendors (name, contact_name, email, phone, address_line1, city, state, zip_code, tax_id, is_1099_eligible)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
|
||||
`INSERT INTO vendors (name, contact_name, email, phone, address_line1, city, state, zip_code, tax_id, is_1099_eligible, last_negotiated)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)`,
|
||||
[name, row.contact_name || null, row.email || null, row.phone || null,
|
||||
row.address_line1 || null, row.city || null, row.state || null,
|
||||
row.zip_code || null, row.tax_id || null,
|
||||
row.is_1099_eligible === 'true' || row.is_1099_eligible === true || false],
|
||||
row.is_1099_eligible === 'true' || row.is_1099_eligible === true || false,
|
||||
row.last_negotiated || null],
|
||||
);
|
||||
created++;
|
||||
}
|
||||
|
||||
@@ -26,6 +26,9 @@ CREATE TABLE shared.organizations (
|
||||
email VARCHAR(255),
|
||||
tax_id VARCHAR(20),
|
||||
fiscal_year_start_month INTEGER DEFAULT 1 CHECK (fiscal_year_start_month BETWEEN 1 AND 12),
|
||||
payment_date DATE,
|
||||
confirmation_number VARCHAR(100),
|
||||
renewal_date DATE,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
@@ -45,6 +48,7 @@ CREATE TABLE shared.users (
|
||||
oauth_provider_id VARCHAR(255),
|
||||
last_login_at TIMESTAMPTZ,
|
||||
is_superadmin BOOLEAN DEFAULT FALSE,
|
||||
is_platform_owner BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
@@ -73,6 +77,43 @@ CREATE TABLE shared.invitations (
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Market Rates (cross-tenant market data for investment recommendations)
|
||||
-- Supports CD, Money Market, and High Yield Savings rate types
|
||||
CREATE TABLE shared.cd_rates (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
bank_name VARCHAR(255) NOT NULL,
|
||||
apy DECIMAL(6,4) NOT NULL,
|
||||
min_deposit DECIMAL(15,2),
|
||||
term VARCHAR(100) NOT NULL,
|
||||
term_months INTEGER,
|
||||
rate_type VARCHAR(50) NOT NULL DEFAULT 'cd',
|
||||
fetched_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
source_url VARCHAR(500),
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Login history (track logins/org-switches for platform analytics)
|
||||
CREATE TABLE shared.login_history (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
user_id UUID NOT NULL REFERENCES shared.users(id) ON DELETE CASCADE,
|
||||
organization_id UUID REFERENCES shared.organizations(id) ON DELETE SET NULL,
|
||||
logged_in_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
ip_address VARCHAR(45),
|
||||
user_agent TEXT
|
||||
);
|
||||
|
||||
-- AI recommendation log (track AI usage per tenant)
|
||||
CREATE TABLE shared.ai_recommendation_log (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
tenant_schema VARCHAR(63),
|
||||
organization_id UUID REFERENCES shared.organizations(id) ON DELETE SET NULL,
|
||||
user_id UUID REFERENCES shared.users(id) ON DELETE SET NULL,
|
||||
recommendation_count INTEGER,
|
||||
response_time_ms INTEGER,
|
||||
status VARCHAR(20),
|
||||
requested_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Indexes
|
||||
CREATE INDEX idx_user_orgs_user ON shared.user_organizations(user_id);
|
||||
CREATE INDEX idx_user_orgs_org ON shared.user_organizations(organization_id);
|
||||
@@ -80,3 +121,12 @@ CREATE INDEX idx_users_email ON shared.users(email);
|
||||
CREATE INDEX idx_orgs_schema ON shared.organizations(schema_name);
|
||||
CREATE INDEX idx_invitations_token ON shared.invitations(token);
|
||||
CREATE INDEX idx_invitations_email ON shared.invitations(email);
|
||||
CREATE INDEX idx_cd_rates_fetched ON shared.cd_rates(fetched_at DESC);
|
||||
CREATE INDEX idx_cd_rates_apy ON shared.cd_rates(apy DESC);
|
||||
CREATE INDEX idx_cd_rates_type ON shared.cd_rates(rate_type);
|
||||
CREATE INDEX idx_cd_rates_type_fetched ON shared.cd_rates(rate_type, fetched_at DESC);
|
||||
CREATE INDEX idx_login_history_org_time ON shared.login_history(organization_id, logged_in_at DESC);
|
||||
CREATE INDEX idx_login_history_user ON shared.login_history(user_id);
|
||||
CREATE INDEX idx_login_history_time ON shared.login_history(logged_in_at DESC);
|
||||
CREATE INDEX idx_ai_rec_log_org ON shared.ai_recommendation_log(organization_id);
|
||||
CREATE INDEX idx_ai_rec_log_time ON shared.ai_recommendation_log(requested_at DESC);
|
||||
|
||||
17
db/migrations/005-cd-rates.sql
Normal file
17
db/migrations/005-cd-rates.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
-- Migration: Add CD rates table to shared schema
|
||||
-- For existing deployments that already have the shared schema initialized
|
||||
|
||||
CREATE TABLE IF NOT EXISTS shared.cd_rates (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
bank_name VARCHAR(255) NOT NULL,
|
||||
apy DECIMAL(6,4) NOT NULL,
|
||||
min_deposit DECIMAL(15,2),
|
||||
term VARCHAR(100) NOT NULL,
|
||||
term_months INTEGER,
|
||||
fetched_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
source_url VARCHAR(500),
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_cd_rates_fetched ON shared.cd_rates(fetched_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_cd_rates_apy ON shared.cd_rates(apy DESC);
|
||||
52
db/migrations/006-admin-platform.sql
Normal file
52
db/migrations/006-admin-platform.sql
Normal file
@@ -0,0 +1,52 @@
|
||||
-- ============================================================
|
||||
-- Migration 006: Platform Administration Features
|
||||
-- Adds: is_platform_owner, subscription fields, login_history, ai_recommendation_log
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- 1. Add is_platform_owner to users
|
||||
ALTER TABLE shared.users
|
||||
ADD COLUMN IF NOT EXISTS is_platform_owner BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- 2. Add subscription fields to organizations
|
||||
ALTER TABLE shared.organizations
|
||||
ADD COLUMN IF NOT EXISTS payment_date DATE,
|
||||
ADD COLUMN IF NOT EXISTS confirmation_number VARCHAR(100),
|
||||
ADD COLUMN IF NOT EXISTS renewal_date DATE;
|
||||
|
||||
-- 3. Create login_history table
|
||||
CREATE TABLE IF NOT EXISTS shared.login_history (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
user_id UUID NOT NULL REFERENCES shared.users(id) ON DELETE CASCADE,
|
||||
organization_id UUID REFERENCES shared.organizations(id) ON DELETE SET NULL,
|
||||
logged_in_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
ip_address VARCHAR(45),
|
||||
user_agent TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_login_history_org_time
|
||||
ON shared.login_history(organization_id, logged_in_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_login_history_user
|
||||
ON shared.login_history(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_login_history_time
|
||||
ON shared.login_history(logged_in_at DESC);
|
||||
|
||||
-- 4. Create ai_recommendation_log table
|
||||
CREATE TABLE IF NOT EXISTS shared.ai_recommendation_log (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
tenant_schema VARCHAR(63),
|
||||
organization_id UUID REFERENCES shared.organizations(id) ON DELETE SET NULL,
|
||||
user_id UUID REFERENCES shared.users(id) ON DELETE SET NULL,
|
||||
recommendation_count INTEGER,
|
||||
response_time_ms INTEGER,
|
||||
status VARCHAR(20),
|
||||
requested_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ai_rec_log_org
|
||||
ON shared.ai_recommendation_log(organization_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_ai_rec_log_time
|
||||
ON shared.ai_recommendation_log(requested_at DESC);
|
||||
|
||||
COMMIT;
|
||||
36
db/migrations/007-market-rates.sql
Normal file
36
db/migrations/007-market-rates.sql
Normal file
@@ -0,0 +1,36 @@
|
||||
-- Migration: Expand cd_rates for multiple market rate types + tenant AI recommendation storage
|
||||
-- Phase 6: AI Features Part 2
|
||||
|
||||
-- 1) Add rate_type column to shared.cd_rates to support CD, Money Market, and High Yield Savings
|
||||
ALTER TABLE shared.cd_rates
|
||||
ADD COLUMN IF NOT EXISTS rate_type VARCHAR(50) DEFAULT 'cd' NOT NULL;
|
||||
|
||||
-- Index for filtering by rate type
|
||||
CREATE INDEX IF NOT EXISTS idx_cd_rates_type ON shared.cd_rates(rate_type);
|
||||
|
||||
-- Composite index for getting latest rates by type efficiently
|
||||
CREATE INDEX IF NOT EXISTS idx_cd_rates_type_fetched ON shared.cd_rates(rate_type, fetched_at DESC);
|
||||
|
||||
-- 2) Create ai_recommendations table in each existing tenant schema
|
||||
-- This stores saved AI investment recommendations per tenant
|
||||
-- For new tenants, this is handled by tenant-schema.service.ts
|
||||
DO $$
|
||||
DECLARE
|
||||
tenant_schema TEXT;
|
||||
BEGIN
|
||||
FOR tenant_schema IN
|
||||
SELECT schema_name FROM shared.organizations WHERE schema_name IS NOT NULL
|
||||
LOOP
|
||||
EXECUTE format(
|
||||
'CREATE TABLE IF NOT EXISTS %I.ai_recommendations (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
recommendations_json JSONB NOT NULL,
|
||||
overall_assessment TEXT,
|
||||
risk_notes JSONB,
|
||||
requested_by UUID,
|
||||
response_time_ms INTEGER,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)', tenant_schema
|
||||
);
|
||||
END LOOP;
|
||||
END $$;
|
||||
16
db/migrations/008-vendor-last-negotiated.sql
Normal file
16
db/migrations/008-vendor-last-negotiated.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
-- Migration: Add last_negotiated date to vendors table
|
||||
-- Bug & Tweak Sprint
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
tenant_schema TEXT;
|
||||
BEGIN
|
||||
FOR tenant_schema IN
|
||||
SELECT schema_name FROM shared.organizations WHERE schema_name IS NOT NULL
|
||||
LOOP
|
||||
EXECUTE format(
|
||||
'ALTER TABLE %I.vendors ADD COLUMN IF NOT EXISTS last_negotiated DATE',
|
||||
tenant_schema
|
||||
);
|
||||
END LOOP;
|
||||
END $$;
|
||||
9
db/migrations/009-onboarding-flags.sql
Normal file
9
db/migrations/009-onboarding-flags.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
-- Migration: Add onboarding tracking flag to users table
|
||||
-- Phase 7: Onboarding Features
|
||||
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE shared.users
|
||||
ADD COLUMN IF NOT EXISTS has_seen_intro BOOLEAN DEFAULT FALSE;
|
||||
|
||||
COMMIT;
|
||||
34
db/migrations/010-health-scores.sql
Normal file
34
db/migrations/010-health-scores.sql
Normal file
@@ -0,0 +1,34 @@
|
||||
-- Migration: Add health_scores table to all tenant schemas
|
||||
-- This table stores AI-derived operating and reserve fund health scores
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
tenant RECORD;
|
||||
BEGIN
|
||||
FOR tenant IN
|
||||
SELECT schema_name FROM shared.organizations WHERE status = 'active'
|
||||
LOOP
|
||||
EXECUTE format(
|
||||
'CREATE TABLE IF NOT EXISTS %I.health_scores (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
score_type VARCHAR(20) NOT NULL CHECK (score_type IN (''operating'', ''reserve'')),
|
||||
score INTEGER NOT NULL CHECK (score >= 0 AND score <= 100),
|
||||
previous_score INTEGER,
|
||||
trajectory VARCHAR(20) CHECK (trajectory IN (''improving'', ''stable'', ''declining'')),
|
||||
label VARCHAR(30),
|
||||
summary TEXT,
|
||||
factors JSONB,
|
||||
recommendations JSONB,
|
||||
missing_data JSONB,
|
||||
status VARCHAR(20) NOT NULL DEFAULT ''complete'' CHECK (status IN (''complete'', ''pending'', ''error'')),
|
||||
response_time_ms INTEGER,
|
||||
calculated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)', tenant.schema_name
|
||||
);
|
||||
EXECUTE format(
|
||||
'CREATE INDEX IF NOT EXISTS idx_%s_hs_type_calc ON %I.health_scores(score_type, calculated_at DESC)',
|
||||
replace(tenant.schema_name, '.', '_'), tenant.schema_name
|
||||
);
|
||||
END LOOP;
|
||||
END $$;
|
||||
57
db/migrations/011-invoice-billing-frequency.sql
Normal file
57
db/migrations/011-invoice-billing-frequency.sql
Normal file
@@ -0,0 +1,57 @@
|
||||
-- Migration 011: Add billing frequency support to invoices
|
||||
-- Adds due_months and due_day to assessment_groups
|
||||
-- Adds period_start, period_end, assessment_group_id to invoices
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
v_schema TEXT;
|
||||
BEGIN
|
||||
FOR v_schema IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'tenant_%'
|
||||
LOOP
|
||||
-- Add due_months and due_day to assessment_groups
|
||||
EXECUTE format('
|
||||
ALTER TABLE %I.assessment_groups
|
||||
ADD COLUMN IF NOT EXISTS due_months INTEGER[] DEFAULT ''{1,2,3,4,5,6,7,8,9,10,11,12}'',
|
||||
ADD COLUMN IF NOT EXISTS due_day INTEGER DEFAULT 1
|
||||
', v_schema);
|
||||
|
||||
-- Add period tracking and assessment group link to invoices
|
||||
EXECUTE format('
|
||||
ALTER TABLE %I.invoices
|
||||
ADD COLUMN IF NOT EXISTS period_start DATE,
|
||||
ADD COLUMN IF NOT EXISTS period_end DATE,
|
||||
ADD COLUMN IF NOT EXISTS assessment_group_id UUID
|
||||
', v_schema);
|
||||
|
||||
-- Backfill due_months based on existing frequency values
|
||||
EXECUTE format('
|
||||
UPDATE %I.assessment_groups
|
||||
SET due_months = CASE frequency
|
||||
WHEN ''quarterly'' THEN ''{1,4,7,10}''::INTEGER[]
|
||||
WHEN ''annual'' THEN ''{1}''::INTEGER[]
|
||||
ELSE ''{1,2,3,4,5,6,7,8,9,10,11,12}''::INTEGER[]
|
||||
END
|
||||
WHERE due_months IS NULL OR due_months = ''{1,2,3,4,5,6,7,8,9,10,11,12}''
|
||||
AND frequency != ''monthly''
|
||||
', v_schema);
|
||||
|
||||
-- Backfill period_start/period_end for existing invoices (all monthly)
|
||||
EXECUTE format('
|
||||
UPDATE %I.invoices
|
||||
SET period_start = invoice_date,
|
||||
period_end = (invoice_date + INTERVAL ''1 month'' - INTERVAL ''1 day'')::DATE
|
||||
WHERE period_start IS NULL AND invoice_type = ''regular_assessment''
|
||||
', v_schema);
|
||||
|
||||
-- Backfill assessment_group_id on existing invoices from units
|
||||
EXECUTE format('
|
||||
UPDATE %I.invoices i
|
||||
SET assessment_group_id = u.assessment_group_id
|
||||
FROM %I.units u
|
||||
WHERE i.unit_id = u.id AND i.assessment_group_id IS NULL
|
||||
', v_schema, v_schema);
|
||||
|
||||
END LOOP;
|
||||
END $$;
|
||||
33
db/migrations/012-invoice-status-pending.sql
Normal file
33
db/migrations/012-invoice-status-pending.sql
Normal file
@@ -0,0 +1,33 @@
|
||||
-- Migration 012: Replace 'sent' status with 'pending' for invoices
|
||||
-- 'sent' implied email delivery which doesn't exist; 'pending' is more accurate
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
v_schema TEXT;
|
||||
v_constraint TEXT;
|
||||
BEGIN
|
||||
FOR v_schema IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'tenant_%'
|
||||
LOOP
|
||||
-- Find and drop the existing status check constraint
|
||||
SELECT constraint_name INTO v_constraint
|
||||
FROM information_schema.table_constraints
|
||||
WHERE table_schema = v_schema
|
||||
AND table_name = 'invoices'
|
||||
AND constraint_type = 'CHECK'
|
||||
AND constraint_name LIKE '%status%';
|
||||
|
||||
IF v_constraint IS NOT NULL THEN
|
||||
EXECUTE format('ALTER TABLE %I.invoices DROP CONSTRAINT %I', v_schema, v_constraint);
|
||||
END IF;
|
||||
|
||||
-- Add new constraint that includes 'pending'
|
||||
EXECUTE format('ALTER TABLE %I.invoices ADD CONSTRAINT invoices_status_check CHECK (status IN (
|
||||
''draft'', ''pending'', ''sent'', ''paid'', ''partial'', ''overdue'', ''void'', ''written_off''
|
||||
))', v_schema);
|
||||
|
||||
-- Convert existing 'sent' invoices to 'pending'
|
||||
EXECUTE format('UPDATE %I.invoices SET status = ''pending'' WHERE status = ''sent''', v_schema);
|
||||
END LOOP;
|
||||
END $$;
|
||||
83
db/migrations/013-board-planning.sql
Normal file
83
db/migrations/013-board-planning.sql
Normal file
@@ -0,0 +1,83 @@
|
||||
-- Migration 013: Board Planning tables (scenarios, investments, assessments)
|
||||
-- Applies to all existing tenant schemas
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
tenant_schema TEXT;
|
||||
BEGIN
|
||||
FOR tenant_schema IN
|
||||
SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name LIKE 'tenant_%'
|
||||
LOOP
|
||||
-- Board Scenarios
|
||||
EXECUTE format('
|
||||
CREATE TABLE IF NOT EXISTS %I.board_scenarios (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
scenario_type VARCHAR(30) NOT NULL CHECK (scenario_type IN (''investment'', ''assessment'')),
|
||||
status VARCHAR(20) DEFAULT ''draft'' CHECK (status IN (''draft'', ''active'', ''approved'', ''archived'')),
|
||||
projection_months INTEGER DEFAULT 36,
|
||||
projection_cache JSONB,
|
||||
projection_cached_at TIMESTAMPTZ,
|
||||
created_by UUID NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)', tenant_schema);
|
||||
|
||||
-- Scenario Investments
|
||||
EXECUTE format('
|
||||
CREATE TABLE IF NOT EXISTS %I.scenario_investments (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
scenario_id UUID NOT NULL REFERENCES %I.board_scenarios(id) ON DELETE CASCADE,
|
||||
source_recommendation_id UUID,
|
||||
label VARCHAR(255) NOT NULL,
|
||||
investment_type VARCHAR(50) CHECK (investment_type IN (''cd'', ''money_market'', ''treasury'', ''savings'', ''other'')),
|
||||
fund_type VARCHAR(20) NOT NULL CHECK (fund_type IN (''operating'', ''reserve'')),
|
||||
principal DECIMAL(15,2) NOT NULL,
|
||||
interest_rate DECIMAL(6,4),
|
||||
term_months INTEGER,
|
||||
institution VARCHAR(255),
|
||||
purchase_date DATE,
|
||||
maturity_date DATE,
|
||||
auto_renew BOOLEAN DEFAULT FALSE,
|
||||
executed_investment_id UUID,
|
||||
notes TEXT,
|
||||
sort_order INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)', tenant_schema, tenant_schema);
|
||||
|
||||
-- Scenario Assessments
|
||||
EXECUTE format('
|
||||
CREATE TABLE IF NOT EXISTS %I.scenario_assessments (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
scenario_id UUID NOT NULL REFERENCES %I.board_scenarios(id) ON DELETE CASCADE,
|
||||
change_type VARCHAR(30) NOT NULL CHECK (change_type IN (''dues_increase'', ''special_assessment'', ''dues_decrease'')),
|
||||
label VARCHAR(255) NOT NULL,
|
||||
target_fund VARCHAR(20) CHECK (target_fund IN (''operating'', ''reserve'', ''both'')),
|
||||
percentage_change DECIMAL(6,3),
|
||||
flat_amount_change DECIMAL(10,2),
|
||||
special_total DECIMAL(15,2),
|
||||
special_per_unit DECIMAL(10,2),
|
||||
special_installments INTEGER DEFAULT 1,
|
||||
effective_date DATE NOT NULL,
|
||||
end_date DATE,
|
||||
applies_to_group_id UUID,
|
||||
notes TEXT,
|
||||
sort_order INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)', tenant_schema, tenant_schema);
|
||||
|
||||
-- Indexes
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%s_bs_type_status ON %I.board_scenarios(scenario_type, status)',
|
||||
replace(tenant_schema, '.', '_'), tenant_schema);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%s_si_scenario ON %I.scenario_investments(scenario_id)',
|
||||
replace(tenant_schema, '.', '_'), tenant_schema);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%s_sa_scenario ON %I.scenario_assessments(scenario_id)',
|
||||
replace(tenant_schema, '.', '_'), tenant_schema);
|
||||
|
||||
RAISE NOTICE 'Board planning tables created for schema: %', tenant_schema;
|
||||
END LOOP;
|
||||
END $$;
|
||||
54
db/migrations/014-budget-planning.sql
Normal file
54
db/migrations/014-budget-planning.sql
Normal file
@@ -0,0 +1,54 @@
|
||||
-- Migration: Add budget_plans and budget_plan_lines tables to all tenant schemas
|
||||
DO $migration$
|
||||
DECLARE
|
||||
s TEXT;
|
||||
BEGIN
|
||||
FOR s IN
|
||||
SELECT schema_name FROM information_schema.schemata WHERE schema_name LIKE 'tenant_%'
|
||||
LOOP
|
||||
-- budget_plans
|
||||
EXECUTE format('
|
||||
CREATE TABLE IF NOT EXISTS %I.budget_plans (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
fiscal_year INTEGER NOT NULL,
|
||||
status VARCHAR(20) NOT NULL DEFAULT ''planning'' CHECK (status IN (''planning'', ''approved'', ''ratified'')),
|
||||
base_year INTEGER NOT NULL,
|
||||
inflation_rate DECIMAL(5,2) NOT NULL DEFAULT 2.50,
|
||||
notes TEXT,
|
||||
created_by UUID,
|
||||
approved_by UUID,
|
||||
approved_at TIMESTAMPTZ,
|
||||
ratified_by UUID,
|
||||
ratified_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
UNIQUE(fiscal_year)
|
||||
)', s);
|
||||
|
||||
-- budget_plan_lines
|
||||
EXECUTE format('
|
||||
CREATE TABLE IF NOT EXISTS %I.budget_plan_lines (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
budget_plan_id UUID NOT NULL REFERENCES %I.budget_plans(id) ON DELETE CASCADE,
|
||||
account_id UUID NOT NULL REFERENCES %I.accounts(id),
|
||||
fund_type VARCHAR(20) NOT NULL CHECK (fund_type IN (''operating'', ''reserve'')),
|
||||
jan DECIMAL(12,2) DEFAULT 0, feb DECIMAL(12,2) DEFAULT 0,
|
||||
mar DECIMAL(12,2) DEFAULT 0, apr DECIMAL(12,2) DEFAULT 0,
|
||||
may DECIMAL(12,2) DEFAULT 0, jun DECIMAL(12,2) DEFAULT 0,
|
||||
jul DECIMAL(12,2) DEFAULT 0, aug DECIMAL(12,2) DEFAULT 0,
|
||||
sep DECIMAL(12,2) DEFAULT 0, oct DECIMAL(12,2) DEFAULT 0,
|
||||
nov DECIMAL(12,2) DEFAULT 0, dec_amt DECIMAL(12,2) DEFAULT 0,
|
||||
is_manually_adjusted BOOLEAN DEFAULT FALSE,
|
||||
notes TEXT,
|
||||
UNIQUE(budget_plan_id, account_id, fund_type)
|
||||
)', s, s, s);
|
||||
|
||||
-- Indexes
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%s_bp_year ON %I.budget_plans(fiscal_year)', replace(s, 'tenant_', ''), s);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%s_bp_status ON %I.budget_plans(status)', replace(s, 'tenant_', ''), s);
|
||||
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%s_bpl_plan ON %I.budget_plan_lines(budget_plan_id)', replace(s, 'tenant_', ''), s);
|
||||
|
||||
RAISE NOTICE 'Migrated schema: %', s;
|
||||
END LOOP;
|
||||
END;
|
||||
$migration$;
|
||||
107
db/migrations/015-saas-onboarding-auth.sql
Normal file
107
db/migrations/015-saas-onboarding-auth.sql
Normal file
@@ -0,0 +1,107 @@
|
||||
-- Migration 015: SaaS Onboarding + Auth (Stripe, Refresh Tokens, MFA, SSO, Passkeys)
|
||||
-- Adds tables for refresh tokens, stripe event tracking, invite tokens,
|
||||
-- onboarding progress, and WebAuthn passkeys.
|
||||
|
||||
-- ============================================================================
|
||||
-- 1. Modify shared.organizations — add Stripe billing columns
|
||||
-- ============================================================================
|
||||
ALTER TABLE shared.organizations ADD COLUMN IF NOT EXISTS stripe_customer_id VARCHAR(255) UNIQUE;
|
||||
ALTER TABLE shared.organizations ADD COLUMN IF NOT EXISTS stripe_subscription_id VARCHAR(255) UNIQUE;
|
||||
ALTER TABLE shared.organizations ADD COLUMN IF NOT EXISTS trial_ends_at TIMESTAMPTZ;
|
||||
|
||||
-- Update plan_level CHECK constraint to include new SaaS plan tiers
|
||||
-- (Drop and re-add since ALTER CHECK is not supported in PG)
|
||||
ALTER TABLE shared.organizations DROP CONSTRAINT IF EXISTS organizations_plan_level_check;
|
||||
ALTER TABLE shared.organizations ADD CONSTRAINT organizations_plan_level_check
|
||||
CHECK (plan_level IN ('standard', 'premium', 'enterprise', 'starter', 'professional'));
|
||||
|
||||
-- ============================================================================
|
||||
-- 2. New table: shared.refresh_tokens
|
||||
-- ============================================================================
|
||||
CREATE TABLE IF NOT EXISTS shared.refresh_tokens (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
user_id UUID NOT NULL REFERENCES shared.users(id) ON DELETE CASCADE,
|
||||
token_hash VARCHAR(255) UNIQUE NOT NULL,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
revoked_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_tokens_user ON shared.refresh_tokens(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_tokens_hash ON shared.refresh_tokens(token_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_tokens_expires ON shared.refresh_tokens(expires_at);
|
||||
|
||||
-- ============================================================================
|
||||
-- 3. New table: shared.stripe_events (idempotency for webhook processing)
|
||||
-- ============================================================================
|
||||
CREATE TABLE IF NOT EXISTS shared.stripe_events (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
type VARCHAR(100) NOT NULL,
|
||||
processed_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
payload JSONB
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- 4. New table: shared.invite_tokens (magic link activation)
|
||||
-- ============================================================================
|
||||
CREATE TABLE IF NOT EXISTS shared.invite_tokens (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
organization_id UUID NOT NULL REFERENCES shared.organizations(id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES shared.users(id) ON DELETE CASCADE,
|
||||
token_hash VARCHAR(255) UNIQUE NOT NULL,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
used_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_invite_tokens_hash ON shared.invite_tokens(token_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_invite_tokens_user ON shared.invite_tokens(user_id);
|
||||
|
||||
-- ============================================================================
|
||||
-- 5. New table: shared.onboarding_progress
|
||||
-- ============================================================================
|
||||
CREATE TABLE IF NOT EXISTS shared.onboarding_progress (
|
||||
organization_id UUID PRIMARY KEY REFERENCES shared.organizations(id) ON DELETE CASCADE,
|
||||
completed_steps TEXT[] DEFAULT '{}',
|
||||
completed_at TIMESTAMPTZ,
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- 6. New table: shared.user_passkeys (WebAuthn)
|
||||
-- ============================================================================
|
||||
CREATE TABLE IF NOT EXISTS shared.user_passkeys (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
user_id UUID NOT NULL REFERENCES shared.users(id) ON DELETE CASCADE,
|
||||
credential_id TEXT UNIQUE NOT NULL,
|
||||
public_key TEXT NOT NULL,
|
||||
counter BIGINT DEFAULT 0,
|
||||
device_name VARCHAR(255),
|
||||
transports TEXT[],
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
last_used_at TIMESTAMPTZ
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_user_passkeys_user ON shared.user_passkeys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_passkeys_cred ON shared.user_passkeys(credential_id);
|
||||
|
||||
-- ============================================================================
|
||||
-- 7. Modify shared.users — add MFA/WebAuthn columns
|
||||
-- ============================================================================
|
||||
ALTER TABLE shared.users ADD COLUMN IF NOT EXISTS totp_verified_at TIMESTAMPTZ;
|
||||
ALTER TABLE shared.users ADD COLUMN IF NOT EXISTS recovery_codes TEXT;
|
||||
ALTER TABLE shared.users ADD COLUMN IF NOT EXISTS webauthn_challenge TEXT;
|
||||
ALTER TABLE shared.users ADD COLUMN IF NOT EXISTS has_seen_intro BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- ============================================================================
|
||||
-- 8. Stubbed email log table (for development — replaces real email sends)
|
||||
-- ============================================================================
|
||||
CREATE TABLE IF NOT EXISTS shared.email_log (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
to_email VARCHAR(255) NOT NULL,
|
||||
subject VARCHAR(500) NOT NULL,
|
||||
body TEXT,
|
||||
template VARCHAR(100),
|
||||
metadata JSONB,
|
||||
sent_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
25
db/migrations/016-password-reset-tokens.sql
Normal file
25
db/migrations/016-password-reset-tokens.sql
Normal file
@@ -0,0 +1,25 @@
|
||||
-- Migration 016: Password Reset Tokens
|
||||
-- Adds table for password reset token storage (hashed, single-use, short-lived).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS shared.password_reset_tokens (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
user_id UUID NOT NULL REFERENCES shared.users(id) ON DELETE CASCADE,
|
||||
token_hash VARCHAR(255) UNIQUE NOT NULL,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
used_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_password_reset_tokens_hash ON shared.password_reset_tokens(token_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_password_reset_tokens_user ON shared.password_reset_tokens(user_id);
|
||||
|
||||
-- Also ensure email_log table exists (may not exist if migration 015 hasn't been applied)
|
||||
CREATE TABLE IF NOT EXISTS shared.email_log (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
to_email VARCHAR(255) NOT NULL,
|
||||
subject VARCHAR(500) NOT NULL,
|
||||
body TEXT,
|
||||
template VARCHAR(100),
|
||||
metadata JSONB,
|
||||
sent_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
27
db/migrations/017-billing-enhancements.sql
Normal file
27
db/migrations/017-billing-enhancements.sql
Normal file
@@ -0,0 +1,27 @@
|
||||
-- Migration 017: Billing Enhancements
|
||||
-- Adds support for annual billing, free trials, ACH/invoice billing,
|
||||
-- and past_due grace period status.
|
||||
|
||||
-- ============================================================================
|
||||
-- 1. Add billing_interval column (month or year)
|
||||
-- ============================================================================
|
||||
ALTER TABLE shared.organizations ADD COLUMN IF NOT EXISTS billing_interval VARCHAR(20) DEFAULT 'month';
|
||||
|
||||
-- ============================================================================
|
||||
-- 2. Add collection_method column (charge_automatically or send_invoice)
|
||||
-- ============================================================================
|
||||
ALTER TABLE shared.organizations ADD COLUMN IF NOT EXISTS collection_method VARCHAR(20) DEFAULT 'charge_automatically';
|
||||
|
||||
-- ============================================================================
|
||||
-- 3. Update status CHECK to include 'past_due'
|
||||
-- ============================================================================
|
||||
ALTER TABLE shared.organizations DROP CONSTRAINT IF EXISTS organizations_status_check;
|
||||
ALTER TABLE shared.organizations ADD CONSTRAINT organizations_status_check
|
||||
CHECK (status IN ('active', 'suspended', 'trial', 'archived', 'past_due'));
|
||||
|
||||
-- ============================================================================
|
||||
-- 4. Ensure plan_level CHECK includes SaaS tiers (idempotent with 015)
|
||||
-- ============================================================================
|
||||
ALTER TABLE shared.organizations DROP CONSTRAINT IF EXISTS organizations_plan_level_check;
|
||||
ALTER TABLE shared.organizations ADD CONSTRAINT organizations_plan_level_check
|
||||
CHECK (plan_level IN ('standard', 'premium', 'enterprise', 'starter', 'professional'));
|
||||
15
db/migrations/018-ideas.sql
Normal file
15
db/migrations/018-ideas.sql
Normal file
@@ -0,0 +1,15 @@
|
||||
-- Ideation feature: shared ideas table for cross-tenant idea submissions
|
||||
CREATE TABLE IF NOT EXISTS shared.ideas (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
org_id UUID NOT NULL REFERENCES shared.organizations(id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES shared.users(id) ON DELETE CASCADE,
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
status VARCHAR(20) NOT NULL DEFAULT 'new',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ideas_org_id ON shared.ideas(org_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_ideas_status ON shared.ideas(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_ideas_created_at ON shared.ideas(created_at DESC);
|
||||
2
db/migrations/019-ideas-admin-note.sql
Normal file
2
db/migrations/019-ideas-admin-note.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- Add private admin note column to ideas table
|
||||
ALTER TABLE shared.ideas ADD COLUMN IF NOT EXISTS admin_note TEXT;
|
||||
@@ -16,6 +16,31 @@
|
||||
-- Enable UUID generation
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- ============================================================
|
||||
-- 0. Create platform owner account (admin@hoaledgeriq.com)
|
||||
-- ============================================================
|
||||
DO $$
|
||||
DECLARE
|
||||
v_platform_owner_id UUID;
|
||||
BEGIN
|
||||
SELECT id INTO v_platform_owner_id FROM shared.users WHERE email = 'admin@hoaledgeriq.com';
|
||||
IF v_platform_owner_id IS NULL THEN
|
||||
INSERT INTO shared.users (id, email, password_hash, first_name, last_name, is_superadmin, is_platform_owner)
|
||||
VALUES (
|
||||
uuid_generate_v4(),
|
||||
'admin@hoaledgeriq.com',
|
||||
-- bcrypt hash of platform owner password (cost 12)
|
||||
'$2b$12$QRJEJYsjy.24Va.57h13Te7UX7nMTN9hWhW19bwuCAkr1Dm0FWqrm',
|
||||
'Platform',
|
||||
'Admin',
|
||||
true,
|
||||
true
|
||||
) RETURNING id INTO v_platform_owner_id;
|
||||
END IF;
|
||||
-- Platform owner has NO org memberships — admin-only account
|
||||
RAISE NOTICE 'Platform Owner: admin@hoaledgeriq.com (id: %)', v_platform_owner_id;
|
||||
END $$;
|
||||
|
||||
-- ============================================================
|
||||
-- 1. Create test user and organization
|
||||
-- ============================================================
|
||||
@@ -179,7 +204,10 @@ CREATE TABLE IF NOT EXISTS %I.assessment_groups (
|
||||
special_assessment DECIMAL(10,2) DEFAULT 0.00,
|
||||
unit_count INTEGER DEFAULT 0,
|
||||
frequency VARCHAR(20) DEFAULT ''monthly'',
|
||||
due_months INTEGER[] DEFAULT ''{1,2,3,4,5,6,7,8,9,10,11,12}'',
|
||||
due_day INTEGER DEFAULT 1,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
is_default BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)', v_schema);
|
||||
@@ -219,6 +247,9 @@ CREATE TABLE IF NOT EXISTS %I.invoices (
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
amount_paid DECIMAL(10,2) DEFAULT 0.00,
|
||||
status VARCHAR(20) DEFAULT ''draft'',
|
||||
period_start DATE,
|
||||
period_end DATE,
|
||||
assessment_group_id UUID,
|
||||
journal_entry_id UUID,
|
||||
sent_at TIMESTAMPTZ,
|
||||
paid_at TIMESTAMPTZ,
|
||||
@@ -418,10 +449,10 @@ END LOOP;
|
||||
-- ============================================================
|
||||
-- 4b. Seed Assessment Groups
|
||||
-- ============================================================
|
||||
EXECUTE format('INSERT INTO %I.assessment_groups (name, description, regular_assessment, special_assessment, unit_count) VALUES
|
||||
(''Single Family Homes'', ''Standard single family detached homes (Units 1-20)'', 350.00, 0.00, 20),
|
||||
(''Patio Homes'', ''Medium-sized patio homes (Units 21-35)'', 425.00, 0.00, 15),
|
||||
(''Estate Lots'', ''Large estate lots (Units 36-50)'', 500.00, 75.00, 15)
|
||||
EXECUTE format('INSERT INTO %I.assessment_groups (name, description, regular_assessment, special_assessment, unit_count, frequency, due_months, due_day) VALUES
|
||||
(''Single Family Homes'', ''Standard single family detached homes (Units 1-20)'', 350.00, 0.00, 20, ''monthly'', ''{1,2,3,4,5,6,7,8,9,10,11,12}'', 15),
|
||||
(''Patio Homes'', ''Medium-sized patio homes (Units 21-35)'', 1275.00, 0.00, 15, ''quarterly'', ''{1,4,7,10}'', 1),
|
||||
(''Estate Lots'', ''Large estate lots (Units 36-50)'', 6000.00, 900.00, 15, ''annual'', ''{3}'', 1)
|
||||
', v_schema);
|
||||
|
||||
-- ============================================================
|
||||
@@ -836,7 +867,42 @@ EXECUTE format('INSERT INTO %I.capital_projects (name, description, estimated_co
|
||||
(''Perimeter Fence Repair'', ''Replace damaged fence sections and repaint'', 8000, $1 + 4, 8, ''planned'', ''reserve'', 4)
|
||||
', v_schema) USING v_year;
|
||||
|
||||
-- Add subscription data to the organization
|
||||
UPDATE shared.organizations
|
||||
SET payment_date = (CURRENT_DATE - INTERVAL '15 days')::DATE,
|
||||
confirmation_number = 'PAY-2026-SVH-001',
|
||||
renewal_date = (CURRENT_DATE + INTERVAL '350 days')::DATE
|
||||
WHERE schema_name = v_schema;
|
||||
|
||||
-- ============================================================
|
||||
-- 13. Seed login_history for demo analytics
|
||||
-- ============================================================
|
||||
-- Admin user: regular logins over the past 30 days
|
||||
FOR v_month IN 0..29 LOOP
|
||||
INSERT INTO shared.login_history (user_id, organization_id, logged_in_at, ip_address, user_agent)
|
||||
VALUES (
|
||||
v_user_id,
|
||||
v_org_id,
|
||||
NOW() - (v_month || ' days')::INTERVAL - (random() * 8 || ' hours')::INTERVAL,
|
||||
'192.168.1.' || (10 + (random() * 50)::INT),
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)'
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
-- Viewer user: occasional logins (every 3-5 days)
|
||||
FOR v_month IN 0..9 LOOP
|
||||
INSERT INTO shared.login_history (user_id, organization_id, logged_in_at, ip_address, user_agent)
|
||||
VALUES (
|
||||
(SELECT id FROM shared.users WHERE email = 'viewer@sunrisevalley.org'),
|
||||
v_org_id,
|
||||
NOW() - ((v_month * 3) || ' days')::INTERVAL - (random() * 12 || ' hours')::INTERVAL,
|
||||
'10.0.0.' || (100 + (random() * 50)::INT),
|
||||
'Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X)'
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
RAISE NOTICE 'Seed data created successfully for Sunrise Valley HOA!';
|
||||
RAISE NOTICE 'Platform Owner: admin@hoaledgeriq.com (SuperAdmin + Platform Owner)';
|
||||
RAISE NOTICE 'Admin Login: admin@sunrisevalley.org / password123 (SuperAdmin + President)';
|
||||
RAISE NOTICE 'Viewer Login: viewer@sunrisevalley.org / password123 (Homeowner)';
|
||||
|
||||
|
||||
121
docker-compose.prod.yml
Normal file
121
docker-compose.prod.yml
Normal file
@@ -0,0 +1,121 @@
|
||||
# Production override — use with:
|
||||
# docker compose -f docker-compose.yml -f docker-compose.prod.yml up -d --build
|
||||
#
|
||||
# What this changes from the base (dev) config:
|
||||
# - Disables the Docker nginx container (host nginx handles routing + SSL)
|
||||
# - Backend: production Dockerfile (compiled JS, no watch, no devDeps)
|
||||
# - Frontend: production Dockerfile (static build served by nginx on port 3001)
|
||||
# - Backend + Frontend bound to 127.0.0.1 only (host nginx proxies to them)
|
||||
# - No source-code volume mounts (uses baked-in built code)
|
||||
# - Memory limits and health checks on backend
|
||||
# - Tuned PostgreSQL for production workloads
|
||||
# - Restart policies for reliability
|
||||
#
|
||||
# SSL/TLS and request routing are handled by the host-level nginx.
|
||||
# See nginx/host-production.conf for a ready-to-use reference config.
|
||||
|
||||
services:
|
||||
nginx:
|
||||
# Disabled in production — host nginx handles routing + SSL directly.
|
||||
# The dev-only Docker nginx is still used by the base docker-compose.yml.
|
||||
deploy:
|
||||
replicas: 0
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile # production Dockerfile (compiled JS)
|
||||
ports:
|
||||
- "127.0.0.1:3000:3000" # loopback only — host nginx proxies here
|
||||
volumes: [] # override: no source mounts in prod
|
||||
environment:
|
||||
- DATABASE_URL=${DATABASE_URL}
|
||||
- REDIS_URL=${REDIS_URL}
|
||||
- JWT_SECRET=${JWT_SECRET}
|
||||
- NODE_ENV=production
|
||||
- AI_API_URL=${AI_API_URL}
|
||||
- AI_API_KEY=${AI_API_KEY}
|
||||
- AI_MODEL=${AI_MODEL}
|
||||
- AI_DEBUG=${AI_DEBUG:-false}
|
||||
- NEW_RELIC_ENABLED=${NEW_RELIC_ENABLED:-false}
|
||||
- NEW_RELIC_LICENSE_KEY=${NEW_RELIC_LICENSE_KEY:-}
|
||||
- NEW_RELIC_APP_NAME=${NEW_RELIC_APP_NAME:-HOALedgerIQ_App}
|
||||
- STRIPE_SECRET_KEY=${STRIPE_SECRET_KEY:-}
|
||||
- STRIPE_WEBHOOK_SECRET=${STRIPE_WEBHOOK_SECRET:-}
|
||||
- STRIPE_STARTER_PRICE_ID=${STRIPE_STARTER_PRICE_ID:-}
|
||||
- STRIPE_PROFESSIONAL_PRICE_ID=${STRIPE_PROFESSIONAL_PRICE_ID:-}
|
||||
- STRIPE_ENTERPRISE_PRICE_ID=${STRIPE_ENTERPRISE_PRICE_ID:-}
|
||||
- STRIPE_STARTER_MONTHLY_PRICE_ID=${STRIPE_STARTER_MONTHLY_PRICE_ID:-}
|
||||
- STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID=${STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID:-}
|
||||
- STRIPE_ENTERPRISE_MONTHLY_PRICE_ID=${STRIPE_ENTERPRISE_MONTHLY_PRICE_ID:-}
|
||||
- STRIPE_STARTER_ANNUAL_PRICE_ID=${STRIPE_STARTER_ANNUAL_PRICE_ID:-}
|
||||
- STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID=${STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID:-}
|
||||
- STRIPE_ENTERPRISE_ANNUAL_PRICE_ID=${STRIPE_ENTERPRISE_ANNUAL_PRICE_ID:-}
|
||||
- REQUIRE_PAYMENT_METHOD_FOR_TRIAL=${REQUIRE_PAYMENT_METHOD_FOR_TRIAL:-false}
|
||||
- GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID:-}
|
||||
- GOOGLE_CLIENT_SECRET=${GOOGLE_CLIENT_SECRET:-}
|
||||
- GOOGLE_CALLBACK_URL=${GOOGLE_CALLBACK_URL:-https://app.hoaledgeriq.com/api/auth/google/callback}
|
||||
- AZURE_CLIENT_ID=${AZURE_CLIENT_ID:-}
|
||||
- AZURE_CLIENT_SECRET=${AZURE_CLIENT_SECRET:-}
|
||||
- AZURE_TENANT_ID=${AZURE_TENANT_ID:-}
|
||||
- AZURE_CALLBACK_URL=${AZURE_CALLBACK_URL:-https://app.hoaledgeriq.com/api/auth/azure/callback}
|
||||
- WEBAUTHN_RP_ID=${WEBAUTHN_RP_ID:-app.hoaledgeriq.com}
|
||||
- WEBAUTHN_RP_ORIGIN=${WEBAUTHN_RP_ORIGIN:-https://app.hoaledgeriq.com}
|
||||
- INVITE_TOKEN_SECRET=${INVITE_TOKEN_SECRET:-}
|
||||
- APP_URL=${APP_URL:-https://app.hoaledgeriq.com}
|
||||
- RESEND_API_KEY=${RESEND_API_KEY:-}
|
||||
- RESEND_FROM_ADDRESS=${RESEND_FROM_ADDRESS:-noreply@hoaledgeriq.com}
|
||||
- RESEND_REPLY_TO=${RESEND_REPLY_TO:-sales@hoaledgeriq.com}
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 1024M
|
||||
reservations:
|
||||
memory: 256M
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -qO- http://localhost:3000/api || exit 1"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
restart: unless-stopped
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: Dockerfile # production Dockerfile (static nginx)
|
||||
ports:
|
||||
- "127.0.0.1:3001:3001" # loopback only — host nginx proxies here
|
||||
volumes: [] # override: no source mounts in prod
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
restart: unless-stopped
|
||||
|
||||
postgres:
|
||||
# Tune PostgreSQL for production workloads
|
||||
command: >
|
||||
postgres
|
||||
-c max_connections=200
|
||||
-c shared_buffers=256MB
|
||||
-c effective_cache_size=512MB
|
||||
-c work_mem=4MB
|
||||
-c maintenance_work_mem=64MB
|
||||
-c checkpoint_completion_target=0.9
|
||||
-c wal_buffers=16MB
|
||||
-c random_page_cost=1.1
|
||||
# No host port mapping — backend reaches postgres via the Docker network.
|
||||
# Removes 2 docker-proxy processes and closes 0.0.0.0:5432 to the internet.
|
||||
ports: []
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 1024M
|
||||
reservations:
|
||||
memory: 512M
|
||||
restart: unless-stopped
|
||||
|
||||
redis:
|
||||
# No host port mapping — backend reaches redis via the Docker network.
|
||||
# Removes 2 docker-proxy processes and closes 0.0.0.0:6379 to the internet.
|
||||
ports: []
|
||||
restart: unless-stopped
|
||||
28
docker-compose.ssl.yml
Normal file
28
docker-compose.ssl.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
# SSL override — use with: docker compose -f docker-compose.yml -f docker-compose.ssl.yml up -d
|
||||
#
|
||||
# This adds port 443, certbot volumes, and a certbot renewal service
|
||||
# to the base docker-compose.yml configuration.
|
||||
|
||||
services:
|
||||
nginx:
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./nginx/ssl.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
- certbot_www:/var/www/certbot:ro
|
||||
- certbot_conf:/etc/letsencrypt:ro
|
||||
|
||||
certbot:
|
||||
image: certbot/certbot:latest
|
||||
volumes:
|
||||
- certbot_www:/var/www/certbot
|
||||
- certbot_conf:/etc/letsencrypt
|
||||
networks:
|
||||
- hoanet
|
||||
# Auto-renew: check twice daily, only renews if < 30 days remain
|
||||
entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew --quiet; sleep 12h & wait $${!}; done'"
|
||||
|
||||
volumes:
|
||||
certbot_www:
|
||||
certbot_conf:
|
||||
@@ -15,13 +15,46 @@ services:
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile.dev
|
||||
ports:
|
||||
- "3000:3000"
|
||||
# No host port mapping — dev traffic goes through the Docker nginx container.
|
||||
# Production overlay maps 127.0.0.1:3000 for the host reverse proxy.
|
||||
environment:
|
||||
- DATABASE_URL=${DATABASE_URL}
|
||||
- REDIS_URL=${REDIS_URL}
|
||||
- JWT_SECRET=${JWT_SECRET}
|
||||
- NODE_ENV=${NODE_ENV}
|
||||
- AI_API_URL=${AI_API_URL}
|
||||
- AI_API_KEY=${AI_API_KEY}
|
||||
- AI_MODEL=${AI_MODEL}
|
||||
- AI_DEBUG=${AI_DEBUG:-false}
|
||||
- NEW_RELIC_ENABLED=${NEW_RELIC_ENABLED:-false}
|
||||
- NEW_RELIC_LICENSE_KEY=${NEW_RELIC_LICENSE_KEY:-}
|
||||
- NEW_RELIC_APP_NAME=${NEW_RELIC_APP_NAME:-HOALedgerIQ_App}
|
||||
- STRIPE_SECRET_KEY=${STRIPE_SECRET_KEY:-}
|
||||
- STRIPE_WEBHOOK_SECRET=${STRIPE_WEBHOOK_SECRET:-}
|
||||
- STRIPE_STARTER_PRICE_ID=${STRIPE_STARTER_PRICE_ID:-}
|
||||
- STRIPE_PROFESSIONAL_PRICE_ID=${STRIPE_PROFESSIONAL_PRICE_ID:-}
|
||||
- STRIPE_ENTERPRISE_PRICE_ID=${STRIPE_ENTERPRISE_PRICE_ID:-}
|
||||
- STRIPE_STARTER_MONTHLY_PRICE_ID=${STRIPE_STARTER_MONTHLY_PRICE_ID:-}
|
||||
- STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID=${STRIPE_PROFESSIONAL_MONTHLY_PRICE_ID:-}
|
||||
- STRIPE_ENTERPRISE_MONTHLY_PRICE_ID=${STRIPE_ENTERPRISE_MONTHLY_PRICE_ID:-}
|
||||
- STRIPE_STARTER_ANNUAL_PRICE_ID=${STRIPE_STARTER_ANNUAL_PRICE_ID:-}
|
||||
- STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID=${STRIPE_PROFESSIONAL_ANNUAL_PRICE_ID:-}
|
||||
- STRIPE_ENTERPRISE_ANNUAL_PRICE_ID=${STRIPE_ENTERPRISE_ANNUAL_PRICE_ID:-}
|
||||
- REQUIRE_PAYMENT_METHOD_FOR_TRIAL=${REQUIRE_PAYMENT_METHOD_FOR_TRIAL:-false}
|
||||
- GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID:-}
|
||||
- GOOGLE_CLIENT_SECRET=${GOOGLE_CLIENT_SECRET:-}
|
||||
- GOOGLE_CALLBACK_URL=${GOOGLE_CALLBACK_URL:-http://localhost/api/auth/google/callback}
|
||||
- AZURE_CLIENT_ID=${AZURE_CLIENT_ID:-}
|
||||
- AZURE_CLIENT_SECRET=${AZURE_CLIENT_SECRET:-}
|
||||
- AZURE_TENANT_ID=${AZURE_TENANT_ID:-}
|
||||
- AZURE_CALLBACK_URL=${AZURE_CALLBACK_URL:-http://localhost/api/auth/azure/callback}
|
||||
- WEBAUTHN_RP_ID=${WEBAUTHN_RP_ID:-localhost}
|
||||
- WEBAUTHN_RP_ORIGIN=${WEBAUTHN_RP_ORIGIN:-http://localhost}
|
||||
- INVITE_TOKEN_SECRET=${INVITE_TOKEN_SECRET:-dev-invite-secret}
|
||||
- APP_URL=${APP_URL:-http://localhost}
|
||||
- RESEND_API_KEY=${RESEND_API_KEY:-}
|
||||
- RESEND_FROM_ADDRESS=${RESEND_FROM_ADDRESS:-noreply@hoaledgeriq.com}
|
||||
- RESEND_REPLY_TO=${RESEND_REPLY_TO:-}
|
||||
volumes:
|
||||
- ./backend/src:/app/src
|
||||
- ./backend/nest-cli.json:/app/nest-cli.json
|
||||
@@ -39,8 +72,8 @@ services:
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: Dockerfile.dev
|
||||
ports:
|
||||
- "5173:5173"
|
||||
# No host port mapping — dev traffic goes through the Docker nginx container.
|
||||
# Production overlay maps 127.0.0.1:3001 for the host reverse proxy.
|
||||
environment:
|
||||
- NODE_ENV=${NODE_ENV}
|
||||
volumes:
|
||||
|
||||
545
docs/AI_FEATURE_AUDIT.md
Normal file
545
docs/AI_FEATURE_AUDIT.md
Normal file
@@ -0,0 +1,545 @@
|
||||
# AI Feature Audit Report
|
||||
|
||||
**Audit Date:** 2026-03-05
|
||||
**Tenant Under Test:** Pine Creek HOA (`tenant_pine_creek_hoa_q33i`)
|
||||
**AI Model:** Qwen 3.5-397B-A17B via NVIDIA NIM (Temperature: 0.3)
|
||||
**Auditor:** Claude Opus 4.6 (automated)
|
||||
**Data Snapshot Date:** 2026-03-04
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
Three AI-powered features were audited against ground-truth database records: **Operating Fund Health**, **Reserve Fund Health**, and **Investment Recommendations**. Overall, the AI demonstrates strong financial reasoning and produces actionable, fiduciary-appropriate recommendations. However, score consistency across runs is a concern (16-point spread on operating, 20-point spread on reserve), and several specific data interpretation issues were identified.
|
||||
|
||||
| Feature | Latest Score/Grade | Concurrence | Verdict |
|
||||
|---|---|---|---|
|
||||
| Operating Fund Health | 88 / Good | **72%** | Score ~10-15 pts high; cash runway below its own "Good" threshold |
|
||||
| Reserve Fund Health | 45 / Needs Attention | **85%** | Well-calibrated; minor data misquote on annual contributions |
|
||||
| Investment Recommendations | 6 recommendations | **88%** | Excellent specificity; all market rates verified accurate |
|
||||
|
||||
---
|
||||
|
||||
## Data Foundation (Ground Truth)
|
||||
|
||||
### Financial Position
|
||||
|
||||
| Metric | Value | Source |
|
||||
|---|---|---|
|
||||
| Operating Cash (Checking) | $27,418.81 | GL balance |
|
||||
| Reserve Cash (Savings) | $10,688.45 | GL balance |
|
||||
| Reserve CD #1a (FCB) | $10,000 @ 3.67%, matures 6/19/26 | `investment_accounts` |
|
||||
| Reserve CD #2a (FCB) | $8,000 @ 3.60%, matures 4/14/26 | `investment_accounts` |
|
||||
| Reserve CD #3a (FCB) | $10,000 @ 3.67%, matures 8/18/26 | `investment_accounts` |
|
||||
| Total Reserve Fund | $38,688.45 | Cash + Investments |
|
||||
| Total Assets | $66,107.26 | Operating + Reserve |
|
||||
|
||||
### Budget (FY2026)
|
||||
|
||||
| Category | Annual Total |
|
||||
|---|---|
|
||||
| Operating Income | $184,207.40 |
|
||||
| Operating Expense | $139,979.95 |
|
||||
| **Net Operating Surplus** | **$44,227.45** |
|
||||
| Monthly Expense Run Rate | $11,665.00 |
|
||||
| Reserve Interest Income | $1,449.96 |
|
||||
| Reserve Disbursements | $22,000.00 (Mar $13K, Apr $9K) |
|
||||
|
||||
### Assessment Structure
|
||||
|
||||
- **67 units** at $2,328.14/year regular + $300.00/year special (annual frequency)
|
||||
- Total annual regular assessments: ~$155,985
|
||||
- Total annual special assessments: ~$20,100
|
||||
- Budget timing: assessments front-loaded in Mar-Jun
|
||||
|
||||
### Actuals (YTD through March 4, 2026)
|
||||
|
||||
| Metric | Value |
|
||||
|---|---|
|
||||
| YTD Income | $88.16 (ARC fees $100 - $50 adj + $38.16 interest) |
|
||||
| YTD Expenses | $1,850.42 (January only) |
|
||||
| Delinquent Invoices | 0 ($0.00) |
|
||||
| Journal Entries Posted | 4 (Jan actuals + Feb adjusting + Feb opening balances) |
|
||||
|
||||
### Capital Projects (from `projects` table, 26 total)
|
||||
|
||||
| Project | Cost | Target | Funded % |
|
||||
|---|---|---|---|
|
||||
| Pond Spillway | $7,000 | Mar 2026 | 0% |
|
||||
| Tuscany Drain Box | $5,500 | May 2026 | 0% |
|
||||
| Front Entrance Power Washing | $1,500 | Mar 2027 | 0% |
|
||||
| Irrigation Pump Replacement | $1,500 | Jun 2027 | 0% |
|
||||
| **Road Sealing - All Roads** | **$80,000** | **Jun 2029** | **0%** |
|
||||
| Asphalt Repair - Creek Stone Dr | $43,000 | TBD | 0% |
|
||||
| Pavilion & Vineyard Structures | $7,000 | Jun 2035 | 0% |
|
||||
| 16 placeholder items | $1.00 each | TBD | 0% |
|
||||
| **Total Planned** | **$152,016** | | **0%** |
|
||||
|
||||
### Reserve Components
|
||||
|
||||
- **0 components tracked** (empty `reserve_components` table)
|
||||
|
||||
### Market Rates (fetched 2026-03-04)
|
||||
|
||||
| Type | Top Rate | Bank | Term |
|
||||
|---|---|---|---|
|
||||
| CD | 4.10% | E*TRADE / Synchrony | 12-14 mo |
|
||||
| High-Yield Savings | 4.09% | Openbank | Liquid |
|
||||
| Money Market | 4.03% | Vio Bank | Liquid |
|
||||
|
||||
---
|
||||
|
||||
## 1. Operating Fund Health Score
|
||||
|
||||
**Latest Score:** 88 (Good) — Generated 2026-03-04T19:24:36Z
|
||||
**Score History:** 48 → 72 → 78 → 72 → 78 → **88** (6 runs, March 2-4)
|
||||
**Overall Concurrence: 72%**
|
||||
|
||||
### Factor-by-Factor Analysis
|
||||
|
||||
#### Factor 1: "Projected Cash Flow" — Impact: Positive
|
||||
> "12-month forecast shows consistent positive liquidity, with cash balances never dipping below the starting $27,419 and peaking at $142,788 in June."
|
||||
|
||||
| Check | Result |
|
||||
|---|---|
|
||||
| Budget surplus ($184K income vs $140K expense) | **Verified** ✅ |
|
||||
| Assessments front-loaded Mar-Jun | **Verified** ✅ (budget shows $48K Mar, $64K Apr, $32K May, $16K Jun) |
|
||||
| Peak of ~$142K in June | **Plausible** ✅ ($27K + cumulative income through June) |
|
||||
| Cash never below starting $27K | **Plausible** ✅ (expenses < income by month) |
|
||||
|
||||
**Concurrence: 95%** — Forecast logic is sound. The only risk is the assumption that assessments are collected on the exact budget schedule.
|
||||
|
||||
---
|
||||
|
||||
#### Factor 2: "Delinquency Rate" — Impact: Positive
|
||||
> "$0.00 in overdue invoices and a 0.0% delinquency rate."
|
||||
|
||||
**Concurrence: 100%** ✅ — Database confirms zero delinquent invoices.
|
||||
|
||||
---
|
||||
|
||||
#### Factor 3: "Budget Performance (Timing)" — Impact: Neutral
|
||||
> "YTD income is 99.8% below budget ($55k variance) primarily due to the timing of the large Special Assessment ($20,700) and regular assessments appearing in future projected months."
|
||||
|
||||
| Check | Result |
|
||||
|---|---|
|
||||
| YTD income $88.16 | **Verified** ✅ |
|
||||
| Budget includes March ($55K) in YTD calc | **Accurate** — AI uses month 3 of 12, includes full March budget |
|
||||
| Timing explanation | **Reasonable** — we're only 4 days into March |
|
||||
| Rating as "neutral" vs "negative" | **Appropriate** ✅ — correctly avoids penalizing for calendar timing |
|
||||
|
||||
**Concurrence: 80%** — The variance is accurately computed but presenting a $55K "variance" when we're 4 days into March could alarm a board member. The YTD window through month 3 includes all of March's budget despite only 4 days having elapsed. Consider computing YTD budget pro-rata or through the prior complete month.
|
||||
|
||||
**🔧 Tuning Suggestion:** Add a note to the prompt about pro-rating the current month's budget, or instruct the AI to note "X days into the current month" when the variance is driven by incomplete-month timing.
|
||||
|
||||
---
|
||||
|
||||
#### Factor 4: "Cash Reserves" — Impact: Positive
|
||||
> "Current operating cash of $27,419 provides 2.4 months of runway based on the annual expense run rate."
|
||||
|
||||
| Check | Result |
|
||||
|---|---|
|
||||
| $27,419 / ($139,980 / 12) = 2.35 months | **Math verified** ✅ |
|
||||
| Rated as "positive" | **Questionable** ⚠️ |
|
||||
|
||||
**Concurrence: 60%** — The math is correct, but rating 2.4 months as "positive" contradicts the scoring guidelines which state 2-3 months = "Fair" (60-74) and 3-6 months = "Good" (75-89). This factor should be "neutral" at best, and the overall score should reflect that the HOA is *below* the "Good" threshold for cash reserves.
|
||||
|
||||
**🔧 Tuning Suggestion:** Add explicit guidance in the prompt: "If cash runway is below 3 months, this factor MUST be neutral or negative, regardless of projected future inflows."
|
||||
|
||||
---
|
||||
|
||||
#### Factor 5: "Expense Management" — Impact: Positive
|
||||
> "YTD expenses are $36,313 under budget (4.8% of annual budget spent vs 25% of year elapsed)."
|
||||
|
||||
| Check | Result |
|
||||
|---|---|
|
||||
| YTD expenses $1,850.42 | **Verified** ✅ |
|
||||
| Budget YTD (3 months): ~$38,164 | **Correct** ✅ |
|
||||
| $1,850 / $38,164 = 4.85% | **Math verified** ✅ |
|
||||
| "25% of year elapsed" | **Correct** (month 3 of 12) |
|
||||
| Phrasing "of annual budget" | **Misleading** ⚠️ — it's actually 4.8% of YTD budget, not annual |
|
||||
|
||||
**Concurrence: 70%** — The percentage is correctly calculated against YTD budget, but the phrasing "of annual budget" is incorrect. Also, the low spend is not necessarily positive — only January actuals exist; February hasn't been posted yet, which the AI partially acknowledges with "or delayed billing cycles."
|
||||
|
||||
---
|
||||
|
||||
### Recommendation Assessment
|
||||
|
||||
| # | Recommendation | Priority | Concurrence |
|
||||
|---|---|---|---|
|
||||
| 1 | "Verify the posting schedule for the $20,700 Special Assessment" | Low | **90%** ✅ Valid; assessments are annual, collection timing matters |
|
||||
| 2 | "Investigate the low YTD expense recognition ($1,850 vs $38,164)" | Medium | **95%** ✅ Excellent catch; Feb expenses not posted yet |
|
||||
| 3 | "Consider moving excess cash over $100K in Q2 to interest-bearing account" | Low | **85%** ✅ Sound advice; aligns with HY Savings at 4.09% |
|
||||
|
||||
**Recommendation Concurrence: 90%** — All three recommendations are actionable and data-backed.
|
||||
|
||||
---
|
||||
|
||||
### Score Assessment
|
||||
|
||||
**Is 88 (Good) the right score?**
|
||||
|
||||
| Scoring Criterion | Guidelines Say | Actual | Alignment |
|
||||
|---|---|---|---|
|
||||
| Cash reserves | 3-6 months for "Good" | 2.4 months | ❌ Below threshold |
|
||||
| Income vs expenses | "Roughly matching" for Good | $184K vs $140K (surplus) | ✅ Exceeds |
|
||||
| Delinquency | "Manageable" for Good | 0% | ✅ Excellent |
|
||||
| Budget performance | No major overruns for Good | Under budget (timing) | ✅ Positive |
|
||||
| Projected cash flow | Not explicitly in guidelines | Strong positive trajectory | ✅ Positive |
|
||||
|
||||
The cash runway of 2.4 months is below the stated "Good" (75-89) threshold of 3-6 months and technically falls in the "Fair" (60-74) range of 2-3 months. Earlier AI runs scored this 72-78, which better aligns with the guidelines. The 88 appears to overweight the projected future cash flow (which is speculative) vs the current actual position.
|
||||
|
||||
**Suggested correct score: 74-80** (high end of Fair to low end of Good)
|
||||
|
||||
---
|
||||
|
||||
### Score Consistency Concern
|
||||
|
||||
| Run Date | Score | Label |
|
||||
|---|---|---|
|
||||
| Mar 2 15:07 | 48 | Needs Attention |
|
||||
| Mar 2 15:12 | 78 | Good |
|
||||
| Mar 2 15:36 | 72 | Fair |
|
||||
| Mar 2 17:09 | 78 | Good |
|
||||
| Mar 3 02:03 | 72 | Fair |
|
||||
| Mar 4 19:24 | 88 | Good |
|
||||
|
||||
A **40-point spread** (48-88) across 6 runs with essentially the same data is concerning. Even excluding the outlier first run (which noted a data config issue with "1 units"), the remaining 5 runs span 72-88 (16 points). At temperature 0.3, this suggests the model is not deterministic enough for financial scoring.
|
||||
|
||||
**🔧 Tuning Suggestion:** Consider lowering temperature to 0.1 for health score calculations to improve consistency. Alternatively, implement a moving average of the last 3 scores to smooth volatility.
|
||||
|
||||
---
|
||||
|
||||
## 2. Reserve Fund Health Score
|
||||
|
||||
**Latest Score:** 45 (Needs Attention) — Generated 2026-03-04T19:24:50Z
|
||||
**Score History:** 25 → 48 → 42 → 25 → 45 → 35 → **45** (7 runs, March 2-4)
|
||||
**Overall Concurrence: 85%**
|
||||
|
||||
### Factor-by-Factor Analysis
|
||||
|
||||
#### Factor 1: "Funded Ratio" — Impact: Negative
|
||||
> "Calculated at 0% because no reserve components have been inventoried or assigned replacement costs, making it impossible to measure true funding health against the $152,016 in planned projects."
|
||||
|
||||
| Check | Result |
|
||||
|---|---|
|
||||
| 0 reserve components in DB | **Verified** ✅ |
|
||||
| $152,016 in planned projects | **Verified** ✅ (sum of all `projects` rows) |
|
||||
| 0% funded ratio | **Technically accurate** ✅ (no denominator from components) |
|
||||
| Distinction between components and projects | **Well articulated** ✅ |
|
||||
|
||||
**Concurrence: 95%** — The AI correctly identifies that the 0% is an artifact of missing reserve study data, not a literal lack of funds. It appropriately flags this as a governance failure.
|
||||
|
||||
---
|
||||
|
||||
#### Factor 2: "Projected Cash Flow" — Impact: Positive
|
||||
> "Strong immediate liquidity; cash balance is projected to rise from $10,688 to over $49,000 by May 2026 due to special assessment income covering the $12,500 in urgent 2026 project costs."
|
||||
|
||||
| Check | Result |
|
||||
|---|---|
|
||||
| Starting reserve cash $10,688 | **Verified** ✅ |
|
||||
| 2026 project costs: $7K (Mar) + $5.5K (May) = $12,500 | **Verified** ✅ |
|
||||
| Special assessment: $300 × 67 = $20,100/year | **Verified** ✅ |
|
||||
| CD maturities: $8K (Apr), $10K (Jun), $10K (Aug) | **Verified** ✅ |
|
||||
| Projected rise to $49K by May | **Plausible** ✅ (income + maturities - project costs) |
|
||||
|
||||
**Concurrence: 85%** — Math is directionally correct. However, the assessment is annual frequency so the full $20,100 may arrive in a single payment, not spread monthly. The timing assumption is critical.
|
||||
|
||||
---
|
||||
|
||||
#### Factor 3: "Component Tracking" — Impact: Negative
|
||||
> "Critical failure in governance: 'No reserve components tracked' means the association is flying blind on the condition and remaining useful life of major assets like roads and irrigation."
|
||||
|
||||
**Concurrence: 100%** ✅ — Database confirms 0 rows in `reserve_components`. This is objectively a critical gap.
|
||||
|
||||
---
|
||||
|
||||
#### Factor 4: "Annual Contributions" — Impact: Negative
|
||||
> "Recurring annual reserve income is only $300 (plus minimal interest), which is grossly insufficient to fund the $80,000 road sealing project due in 2029."
|
||||
|
||||
| Check | Result |
|
||||
|---|---|
|
||||
| Reserve budget income: $1,449.96/yr (interest only) | **Verified** ✅ |
|
||||
| Special assessment: $300/unit × 67 = $20,100/yr | **Verified** ✅ |
|
||||
| "$300" cited as annual reserve income | **Incorrect** ⚠️ |
|
||||
| Road Sealing $80K in June 2029 | **Verified** ✅ |
|
||||
|
||||
**Concurrence: 65%** — The concern about insufficient contributions is valid, but the "$300" figure appears to confuse the per-unit special assessment amount ($300/unit) with the total annual reserve income. Actual annual reserve income = $1,450 (interest) + $20,100 (special assessments) = **$21,550/yr**. Even at $21,550/yr, the 3 years until Road Sealing would accumulate ~$64,650, still short of $80K. So the directional concern is correct, but the magnitude is significantly misstated.
|
||||
|
||||
**🔧 Tuning Suggestion:** The prompt should explicitly label the special assessment income total (not per-unit) in the data context. Currently the data says "$300.00/unit × 67 units (annual)" — the AI should compute $20,100 but sometimes fixates on the $300 per-unit figure. Consider pre-computing and passing the total.
|
||||
|
||||
---
|
||||
|
||||
### Recommendation Assessment
|
||||
|
||||
| # | Recommendation | Priority | Concurrence |
|
||||
|---|---|---|---|
|
||||
| 1 | "Commission a professional Reserve Study to inventory assets and establish funded ratio" | High | **100%** ✅ Critical and universally correct |
|
||||
| 2 | "Develop a long-term funding plan for the $80,000 Road Sealing project (2029)" | High | **90%** ✅ Verified project exists; $80K with 0% funded |
|
||||
| 3 | "Formalize collection of special assessments into the reserve fund vs operating" | Medium | **95%** ✅ Budget shows special assessments in operating income section |
|
||||
|
||||
**Recommendation Concurrence: 95%** — All recommendations are actionable, appropriately prioritized, and backed by database evidence.
|
||||
|
||||
---
|
||||
|
||||
### Score Assessment
|
||||
|
||||
**Is 45 (Needs Attention) the right score?**
|
||||
|
||||
| Scoring Criterion | Guidelines Say | Actual | Alignment |
|
||||
|---|---|---|---|
|
||||
| Percent funded | 20-30% for "Needs Attention" | 0% (no components) | ⬇️ Worse than threshold |
|
||||
| Contributions | "Inadequate" for Needs Attention | $21,550/yr for $152K in projects | ⚠️ Borderline |
|
||||
| Component tracking | "Multiple urgent unfunded" | 0 tracked, 2 due in 2026 | ❌ Critical gap |
|
||||
| Investments | Not scored negatively | 3 CDs earning 3.6-3.67% | ✅ Positive |
|
||||
| Capital readiness | | $12.5K due soon, only $10.7K cash | ⚠️ Tight |
|
||||
|
||||
A score of 45 is reasonable. The 0% funded ratio technically suggests "At Risk" (20-39), but the presence of real assets ($38.7K), active investments, and manageable near-term liquidity justifies bumping it into the "Needs Attention" band. The AI's balancing of the artificial 0% metric against actual fund health shows good judgment.
|
||||
|
||||
**Suggested correct score: 40-50** — the AI's 45 is well-calibrated.
|
||||
|
||||
---
|
||||
|
||||
### Score Consistency Concern
|
||||
|
||||
| Run Date | Score | Label |
|
||||
|---|---|---|
|
||||
| Mar 2 15:06 | 25 | At Risk |
|
||||
| Mar 2 15:13 | 25 | At Risk |
|
||||
| Mar 2 15:37 | 48 | Needs Attention |
|
||||
| Mar 2 17:10 | 42 | Needs Attention |
|
||||
| Mar 3 02:04 | 45 | Needs Attention |
|
||||
| Mar 4 18:49 | 35 | At Risk |
|
||||
| Mar 4 19:24 | 45 | Needs Attention |
|
||||
|
||||
A **23-point spread** (25-48) across 7 runs. The scores oscillate between "At Risk" and "Needs Attention" — the model cannot consistently decide which band this falls into. The most recent 3 runs (35, 45, 45) are more stable.
|
||||
|
||||
**🔧 Tuning Suggestion:** Add boundary guidance to the prompt: "When the score falls within ±5 points of a threshold (40, 60, 75, 90), explicitly justify which side of the boundary the HOA falls on."
|
||||
|
||||
---
|
||||
|
||||
## 3. AI Investment Recommendations
|
||||
|
||||
**Latest Run:** 2026-03-04T19:28:22Z (3 runs saved)
|
||||
**Overall Concurrence: 88%**
|
||||
|
||||
### Overall Assessment
|
||||
> "The HOA has a healthy long-term cash flow outlook with significant surpluses projected by mid-2026, but faces an immediate liquidity pinch in the Reserve Fund for March/April capital projects. The current investment strategy relies on older, lower-yielding CDs (3.60-3.67%) that are maturing soon."
|
||||
|
||||
**Concurrence: 92%** ✅ — Every claim verified:
|
||||
- CDs are at 3.60-3.67% vs market 4.10% (verified)
|
||||
- March project ($7K) vs reserve cash ($10.7K) is tight (verified)
|
||||
- Long-term surplus projected from assessment income (verified from budget)
|
||||
|
||||
---
|
||||
|
||||
### Recommendation-by-Recommendation Analysis
|
||||
|
||||
#### Rec 1: "Critical Reserve Shortfall for March Project" — HIGH / Liquidity Warning
|
||||
|
||||
| Claim | Database Value | Match |
|
||||
|---|---|---|
|
||||
| Reserve cash = $10,688 | $10,688.45 | ✅ Exact |
|
||||
| $7,000 Pond Spillway project due March | Projects table: $7,000, Mar 2026 | ✅ Exact |
|
||||
| Shortfall risk | $10,688 - $7,000 = $3,688 remaining — tight but feasible | ✅ |
|
||||
| Suggested action: expedite special assessment or transfer from operating | Sound advice | ✅ |
|
||||
|
||||
**Concurrence: 90%** — The liquidity concern is real. After paying the $7K project, only $3.7K would remain in reserve cash before the $5.5K May project. The AI correctly flags the timing risk even though the fund is technically solvent.
|
||||
|
||||
---
|
||||
|
||||
#### Rec 2: "Reinvest Maturing CD #2a at Higher Rate" — HIGH / Maturity Action
|
||||
|
||||
| Claim | Database Value | Match |
|
||||
|---|---|---|
|
||||
| CD #2a = $8,000 | $8,000.00 | ✅ Exact |
|
||||
| Current rate = 3.60% | 3.60% | ✅ Exact |
|
||||
| Maturity = April 14, 2026 | 2026-04-14 | ✅ Exact |
|
||||
| Market rate = 4.10% (E*TRADE) | CD rates: E*TRADE 4.10%, 1 year, $0 min | ✅ Exact |
|
||||
| Additional yield: ~$40/year per $8K | $8K × 0.50% = $40 | ✅ Math correct |
|
||||
|
||||
**Concurrence: 95%** ✅ — Textbook-correct recommendation. Every data point verified. The 50 bps improvement is risk-free income.
|
||||
|
||||
---
|
||||
|
||||
#### Rec 3: "Establish 12-Month CD Ladder for Reserves" — MEDIUM / CD Ladder
|
||||
|
||||
| Claim | Database Value | Match |
|
||||
|---|---|---|
|
||||
| ~$38K total reserve portfolio | $38,688.45 | ✅ Exact |
|
||||
| Suggest 4-rung ladder (3/6/9/12 mo) | Standard strategy | ✅ |
|
||||
| Rates up to 4.10% | Market data confirmed | ✅ |
|
||||
| $9K matures every quarter | $38K / 4 = $9.5K per rung | ✅ Approximate |
|
||||
|
||||
**Concurrence: 75%** — Strategy is sound in principle, but the recommendation overlooks two constraints:
|
||||
1. **Immediate project costs ($12.5K in 2026)** must be reserved first, leaving ~$26K for laddering
|
||||
2. **Investing the entire $38K** is aggressive — some cash buffer should remain liquid
|
||||
|
||||
**🔧 Tuning Suggestion:** Add a constraint to the prompt: "When recommending CD ladders, always subtract upcoming project costs (next 12 months) and a minimum emergency reserve (1 month of budgeted reserve expenses) before calculating the investable amount."
|
||||
|
||||
---
|
||||
|
||||
#### Rec 4: "Deploy Excess Operating Cash to High-Yield Savings" — MEDIUM / New Investment
|
||||
|
||||
| Claim | Database Value | Match |
|
||||
|---|---|---|
|
||||
| Operating cash = $27,418 | $27,418.81 | ✅ Exact |
|
||||
| 3-month buffer = ~$35,000 | $11,665 × 3 = $34,995 | ✅ Math correct |
|
||||
| Current cash below buffer | $27.4K < $35K | ✅ Correctly identified |
|
||||
| Openbank 4.09% APY | Market data: Openbank 4.09%, $0.01 min | ✅ Exact |
|
||||
| Trigger: "As soon as balance exceeds $35K" | Sound deferred recommendation | ✅ |
|
||||
|
||||
**Concurrence: 90%** ✅ — The AI correctly identifies the current shortfall and provides a forward-looking trigger. Well-structured advice that respects the liquidity constraint.
|
||||
|
||||
---
|
||||
|
||||
#### Rec 5: "Optimize Reserve Cash Yield Post-Project" — LOW / Reallocation
|
||||
|
||||
| Claim | Database Value | Match |
|
||||
|---|---|---|
|
||||
| Vio Bank Money Market at 4.03% | Market data: Vio Bank 4.03%, $0 min | ✅ Exact |
|
||||
| Post-project reserve cash deployment | Appropriate timing | ✅ |
|
||||
| T+1 liquidity for emergencies | Correct MM account characteristic | ✅ |
|
||||
|
||||
**Concurrence: 85%** ✅ — Reasonable low-priority optimization. Correctly uses market data.
|
||||
|
||||
---
|
||||
|
||||
#### Rec 6: "Formalize Special Assessment Collection for Reserves" — LOW / General
|
||||
|
||||
| Claim | Database Value | Match |
|
||||
|---|---|---|
|
||||
| $300/unit special assessment | Assessment groups: $300.00 special | ✅ Exact |
|
||||
| Risk of commingling with operating | Budget shows special assessments in operating income | ✅ Identified |
|
||||
|
||||
**Concurrence: 90%** ✅ — Important governance recommendation. The budget structure does show special assessments as operating income, which could lead to improper fund commingling.
|
||||
|
||||
---
|
||||
|
||||
### Risk Notes Assessment
|
||||
|
||||
| Risk Note | Verified | Concurrence |
|
||||
|---|---|---|
|
||||
| "Reserve cash ($10.6K) barely sufficient for $7K + $5.5K projects" | ✅ $10,688 vs $12,500 in projects | **95%** |
|
||||
| "Concentration risk: CDs maturing in 4-month window (Apr-Aug)" | ✅ All 3 CDs mature Apr-Aug 2026 | **100%** |
|
||||
| "Operating cash ballooning to $140K+ without investment plan" | ✅ Budget shows large Q2 surplus | **85%** |
|
||||
| "Road Sealing $80K in 2029 needs dedicated savings plan" | ✅ Project exists, 0% funded | **95%** |
|
||||
|
||||
**Risk Notes Concurrence: 94%** — All risk items are data-backed and appropriately flagged.
|
||||
|
||||
---
|
||||
|
||||
### Cross-Run Consistency (Investment Recommendations)
|
||||
|
||||
Three runs were compared. Key observations:
|
||||
- **Core recommendations are highly consistent** across runs: CD reinvestment, HY savings for operating, CD ladder for reserves
|
||||
- **Dollar amounts match exactly** across all runs (same data inputs)
|
||||
- **Bank name recommendations vary slightly** (E*TRADE vs "Top CD Rate") — cosmetic, not substantive
|
||||
- **Priority levels are stable** (HIGH for liquidity warnings, MEDIUM for optimization)
|
||||
|
||||
**Consistency Grade: A-** — Investment recommendations show much better consistency than health scores, likely because the structured data (specific CDs, specific rates) constrains the output more than the subjective health scoring.
|
||||
|
||||
---
|
||||
|
||||
## Cross-Cutting Issues
|
||||
|
||||
### Issue 1: Score Volatility (MEDIUM Priority)
|
||||
|
||||
Health scores vary significantly across runs despite identical input data:
|
||||
- Operating: 40-point spread (48-88)
|
||||
- Reserve: 23-point spread (25-48)
|
||||
|
||||
**Root Cause:** Temperature 0.3 allows too much variance for numerical scoring. The model interprets guidelines subjectively.
|
||||
|
||||
**Recommended Fix:**
|
||||
1. Reduce temperature to **0.1** for health score calculations
|
||||
2. Implement a **3-run moving average** to smooth individual run variance
|
||||
3. Add explicit **boundary justification** requirements to prompts
|
||||
|
||||
### Issue 2: YTD Budget Calculation Includes Incomplete Month (LOW Priority)
|
||||
|
||||
The operating health score computes YTD budget through the current month (March), but actual data may only cover a few days. This creates alarming income variances (e.g., "$55K variance") that are pure timing artifacts.
|
||||
|
||||
**Recommended Fix:**
|
||||
- Compute YTD budget through the **prior completed month** (February)
|
||||
- OR pro-rate the current month's budget by days elapsed
|
||||
- Add a note to the prompt: "If the variance is driven by the current incomplete month, flag it as 'timing' and weight it minimally."
|
||||
|
||||
### Issue 3: Per-Unit vs Total Confusion on Special Assessments (LOW Priority)
|
||||
|
||||
The AI sometimes quotes "$300" as the annual reserve income instead of $300 × 67 = $20,100. The data passed says "$300.00/unit × 67 units (annual)" but the model occasionally fixates on the per-unit figure.
|
||||
|
||||
**Recommended Fix:**
|
||||
- Pre-compute and include the total in the data: "Total Annual Special Assessment Income: $20,100.00"
|
||||
- Keep the per-unit breakdown for context but lead with the total
|
||||
|
||||
### Issue 4: Cash Runway Classification Inconsistency (MEDIUM Priority)
|
||||
|
||||
The operating health score rates 2.4 months of cash runway as "positive" despite the scoring guidelines defining 2-3 months as "Fair" territory. This inflates the overall score.
|
||||
|
||||
**Recommended Fix:**
|
||||
- Add explicit prompt guidance: "Cash runway categorization: <2 months = negative, 2-3 months = neutral, 3-6 months = positive, 6+ months = strongly positive. Do NOT rate below-threshold runway as positive based on projected future inflows."
|
||||
|
||||
### Issue 5: Dual Project Tables (INFORMATIONAL)
|
||||
|
||||
The schema contains both `capital_projects` (empty) and `projects` (26 rows). The health score service correctly queries `projects`, but auditors initially checked `capital_projects` and found no data. This dual-table pattern could confuse future developers.
|
||||
|
||||
**Recommended Fix:**
|
||||
- Consolidate into a single table, OR
|
||||
- Add a comment/documentation clarifying the canonical source
|
||||
|
||||
---
|
||||
|
||||
## Concurrence Summary by Recommendation
|
||||
|
||||
### Operating Fund Health — Recommendations
|
||||
| Recommendation | Concurrence |
|
||||
|---|---|
|
||||
| Verify posting schedule for $20,700 Special Assessment | 90% |
|
||||
| Investigate low YTD expense recognition | 95% |
|
||||
| Move excess cash to interest-bearing account | 85% |
|
||||
| **Average** | **90%** |
|
||||
|
||||
### Reserve Fund Health — Recommendations
|
||||
| Recommendation | Concurrence |
|
||||
|---|---|
|
||||
| Commission professional Reserve Study | 100% |
|
||||
| Develop funding plan for $80K Road Sealing | 90% |
|
||||
| Formalize special assessment collection for reserves | 95% |
|
||||
| **Average** | **95%** |
|
||||
|
||||
### Investment Planning — Recommendations
|
||||
| Recommendation | Concurrence |
|
||||
|---|---|
|
||||
| Critical Reserve Shortfall for March Project | 90% |
|
||||
| Reinvest Maturing CD #2a at Higher Rate | 95% |
|
||||
| Establish 12-Month CD Ladder | 75% |
|
||||
| Deploy Operating Cash to HY Savings | 90% |
|
||||
| Optimize Reserve Cash Post-Project | 85% |
|
||||
| Formalize Special Assessment Collection | 90% |
|
||||
| **Average** | **88%** |
|
||||
|
||||
---
|
||||
|
||||
## Final Grades
|
||||
|
||||
| Feature | Score Accuracy | Recommendation Quality | Data Fidelity | Consistency | **Overall** |
|
||||
|---|---|---|---|---|---|
|
||||
| Operating Fund Health | C+ (score ~15 pts high) | A (90%) | B+ (minor math phrasing) | C (16-pt spread) | **72% — B-** |
|
||||
| Reserve Fund Health | A- (well-calibrated) | A (95%) | B (per-unit confusion) | B- (23-pt spread) | **85% — B+** |
|
||||
| Investment Recommendations | N/A (no single score) | A (88%) | A (exact data matches) | A- (stable across runs) | **88% — A-** |
|
||||
|
||||
---
|
||||
|
||||
## Priority Action Items for Tuning
|
||||
|
||||
1. **[HIGH]** Reduce AI temperature from 0.3 → 0.1 for health score calculations to reduce score volatility
|
||||
2. **[MEDIUM]** Add explicit cash-runway-to-impact mapping in operating prompt to prevent misclassification
|
||||
3. **[MEDIUM]** Pre-compute total special assessment income in data context (not just per-unit)
|
||||
4. **[LOW]** Adjust YTD budget calculation to use prior completed month or pro-rate current month
|
||||
5. **[LOW]** Add boundary justification requirement to scoring prompts
|
||||
6. **[LOW]** Consider implementing 3-run moving average for displayed health scores
|
||||
|
||||
---
|
||||
|
||||
*Generated by Claude Opus 4.6 — Automated AI Feature Audit*
|
||||
22
frontend/Dockerfile
Normal file
22
frontend/Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
||||
# ---- Production Dockerfile for React frontend ----
|
||||
# Multi-stage build: compile to static assets, serve with nginx
|
||||
|
||||
# Stage 1: Build
|
||||
FROM node:20-alpine AS builder
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Stage 2: Serve with nginx
|
||||
FROM nginx:alpine
|
||||
|
||||
# Copy the built static files
|
||||
COPY --from=builder /app/dist /usr/share/nginx/html
|
||||
|
||||
# Copy a small nginx config for SPA routing
|
||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||
|
||||
EXPOSE 3001
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
@@ -9,5 +9,34 @@
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
<script>
|
||||
(function(d,t) {
|
||||
var BASE_URL="https://chat.hoaledgeriq.com";
|
||||
var g=d.createElement(t),s=d.getElementsByTagName(t)[0];
|
||||
g.src=BASE_URL+"/packs/js/sdk.js";
|
||||
g.async=true;
|
||||
s.parentNode.insertBefore(g,s);
|
||||
g.onload=function(){
|
||||
window.chatwootSDK.run({
|
||||
websiteToken:'K6VXvTtKXvaCMvre4yK85SPb',
|
||||
baseUrl:BASE_URL
|
||||
})
|
||||
}
|
||||
})(document,"script");
|
||||
window.addEventListener('chatwoot:ready', function() {
|
||||
try {
|
||||
var raw = localStorage.getItem('ledgeriq-auth');
|
||||
if (!raw) return;
|
||||
var auth = JSON.parse(raw);
|
||||
var user = auth && auth.state && auth.state.user;
|
||||
if (user && window.$chatwoot) {
|
||||
window.$chatwoot.setUser(user.id, {
|
||||
name: (user.firstName || '') + ' ' + (user.lastName || ''),
|
||||
email: user.email
|
||||
});
|
||||
}
|
||||
} catch (e) {}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
20
frontend/nginx.conf
Normal file
20
frontend/nginx.conf
Normal file
@@ -0,0 +1,20 @@
|
||||
# Minimal nginx config for serving the React SPA inside the frontend container.
|
||||
# The outer nginx reverse proxy forwards non-API requests here.
|
||||
|
||||
server {
|
||||
listen 3001;
|
||||
server_name _;
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
# Serve static assets with long cache (Vite hashes filenames)
|
||||
location /assets/ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# SPA fallback — any non-file route returns index.html
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user