Compare commits
22 Commits
a550a8d0be
...
ai-improve
| Author | SHA1 | Date | |
|---|---|---|---|
| 9146118df1 | |||
| 07d15001ae | |||
| a0b366e94a | |||
| 3790a3bd9e | |||
| 0a07c61ca3 | |||
| 337b6061b2 | |||
| 467fdd2a6c | |||
| c12ad94b7f | |||
| 05e241c792 | |||
| 5ee4c71fc1 | |||
| 81908e48ea | |||
| 6230558b91 | |||
| 2c215353d4 | |||
| d526025926 | |||
| 411239bea4 | |||
| 7e6c4c16ce | |||
| ea0e3d6f29 | |||
| 8db89373e0 | |||
| e719f593de | |||
| 16adfd6f26 | |||
| 704f29362a | |||
| 42767e3119 |
@@ -12,3 +12,8 @@ AI_API_KEY=your_nvidia_api_key_here
|
|||||||
AI_MODEL=qwen/qwen3.5-397b-a17b
|
AI_MODEL=qwen/qwen3.5-397b-a17b
|
||||||
# Set to 'true' to enable detailed AI prompt/response logging
|
# Set to 'true' to enable detailed AI prompt/response logging
|
||||||
AI_DEBUG=false
|
AI_DEBUG=false
|
||||||
|
|
||||||
|
# New Relic APM — set ENABLED=true and provide your license key to activate
|
||||||
|
NEW_RELIC_ENABLED=false
|
||||||
|
NEW_RELIC_LICENSE_KEY=your_new_relic_license_key_here
|
||||||
|
NEW_RELIC_APP_NAME=HOALedgerIQ_App
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -24,6 +24,11 @@ postgres_data/
|
|||||||
redis_data/
|
redis_data/
|
||||||
pgdata/
|
pgdata/
|
||||||
|
|
||||||
|
# Database backups
|
||||||
|
backups/
|
||||||
|
*.dump
|
||||||
|
*.dump.gz
|
||||||
|
|
||||||
# SSL
|
# SSL
|
||||||
letsencrypt/
|
letsencrypt/
|
||||||
|
|
||||||
|
|||||||
136
PLAN.md
Normal file
136
PLAN.md
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
# Phase 2 Bug Fix & Tweaks - Implementation Plan
|
||||||
|
|
||||||
|
## 1. Admin Panel: Tenant Creation, Contract/Plan Fields, Disable/Archive
|
||||||
|
|
||||||
|
### Database Changes
|
||||||
|
- Add `contract_number VARCHAR(100)` and `plan_level VARCHAR(50) DEFAULT 'standard'` to `shared.organizations` (live DB ALTER + init SQL)
|
||||||
|
- Add `archived` to the status CHECK constraint: `('active', 'suspended', 'trial', 'archived')`
|
||||||
|
- Add to Organization entity: `contractNumber`, `planLevel` columns
|
||||||
|
|
||||||
|
### Backend Changes
|
||||||
|
- **admin.controller.ts**: Add two new endpoints:
|
||||||
|
- `POST /admin/tenants` — Creates org + first user + tenant schema in one call. Accepts: org name, email, address, contractNumber, planLevel, plus first user's email/password/firstName/lastName. Calls OrganizationsService.create() then sets up the user.
|
||||||
|
- `PUT /admin/organizations/:id/status` — Sets status to 'active', 'suspended', or 'archived'
|
||||||
|
- **auth.module.ts**: Import OrganizationsModule so AdminController can inject OrganizationsService
|
||||||
|
- **auth.service.ts**: In `login()`, after loading user with orgs, check if the default org's status is 'suspended' or 'archived' → throw UnauthorizedException("Your organization has been suspended/archived")
|
||||||
|
- **users.service.ts**: Update `findAllOrganizations()` query to include `contract_number, plan_level` in the SELECT
|
||||||
|
|
||||||
|
### Frontend Changes
|
||||||
|
- **AdminPage.tsx**:
|
||||||
|
- Add "Create Tenant" button → opens a modal with: org name, address, email, phone, contract number, plan level (select: standard/premium/enterprise), first admin email, first admin password, first/last name
|
||||||
|
- Orgs table: add Contract #, Plan Level columns
|
||||||
|
- Orgs table: add Status dropdown/buttons (Active/Suspended/Archived) per row with confirmation
|
||||||
|
- Show status colors: active=green, trial=yellow, suspended=orange, archived=red
|
||||||
|
|
||||||
|
## 2. Units/Homeowners: Delete + Assessment Group Binding
|
||||||
|
|
||||||
|
### Backend Changes
|
||||||
|
- **units.controller.ts**: Add `@Delete(':id')` route
|
||||||
|
- **units.service.ts**:
|
||||||
|
- Add `delete(id)` method — checks for outstanding invoices first, then deletes
|
||||||
|
- Add `assessment_group_id` to `create()` INSERT and `update()` UPDATE queries
|
||||||
|
- Update `findAll()` to JOIN assessment_groups and return `assessment_group_name`
|
||||||
|
|
||||||
|
### Frontend Changes
|
||||||
|
- **UnitsPage.tsx**:
|
||||||
|
- Add delete button (trash icon) per row with confirmation dialog
|
||||||
|
- Add Assessment Group dropdown (Select) in create/edit modal, populated from `/assessment-groups` query
|
||||||
|
- Show assessment group name in table
|
||||||
|
- When an assessment group is selected and no manual monthly_assessment is set, auto-fill from the group's regular_assessment
|
||||||
|
|
||||||
|
## 3. Assessment Groups: Frequency Field
|
||||||
|
|
||||||
|
### Database Changes
|
||||||
|
- Add `frequency VARCHAR(20) DEFAULT 'monthly'` to `assessment_groups` table (live DB ALTER + tenant-schema DDL)
|
||||||
|
- CHECK constraint: `('monthly', 'quarterly', 'annual')`
|
||||||
|
|
||||||
|
### Backend Changes
|
||||||
|
- **assessment-groups.service.ts**:
|
||||||
|
- Add `frequency` to `create()` INSERT
|
||||||
|
- Add `frequency` to `update()` dynamic sets
|
||||||
|
- Update `findAll()` and `getSummary()` income calculations to adjust by frequency:
|
||||||
|
- monthly → multiply by 1 (×12/year)
|
||||||
|
- quarterly → amounts are per quarter, so monthly = amount/3
|
||||||
|
- annual → amounts are per year, so monthly = amount/12
|
||||||
|
- Summary labels should change to reflect "Monthly Equivalent" for mixed frequencies
|
||||||
|
|
||||||
|
### Frontend Changes
|
||||||
|
- **AssessmentGroupsPage.tsx**:
|
||||||
|
- Add frequency Select in create/edit modal: Monthly, Quarterly, Annual
|
||||||
|
- Show frequency badge in table
|
||||||
|
- Update summary cards: labels → "Monthly Equivalent Operating" etc.
|
||||||
|
- Assessment amount label changes based on frequency ("Per Month" / "Per Quarter" / "Per Year")
|
||||||
|
|
||||||
|
## 4. UI Streamlining: Sidebar Grouping, Rename, Logo
|
||||||
|
|
||||||
|
### Sidebar Restructure
|
||||||
|
Group nav items into labeled sections:
|
||||||
|
```
|
||||||
|
Dashboard
|
||||||
|
─── FINANCIALS ───
|
||||||
|
Accounts (renamed from "Chart of Accounts")
|
||||||
|
Budgets
|
||||||
|
Investments
|
||||||
|
─── ASSESSMENTS ───
|
||||||
|
Units / Homeowners
|
||||||
|
Assessment Groups
|
||||||
|
─── TRANSACTIONS ───
|
||||||
|
Transactions
|
||||||
|
Invoices
|
||||||
|
Payments
|
||||||
|
─── PLANNING ───
|
||||||
|
Capital Projects
|
||||||
|
Reserves
|
||||||
|
Vendors
|
||||||
|
─── REPORTS ───
|
||||||
|
(collapsible with sub-items)
|
||||||
|
─── ADMIN ───
|
||||||
|
Year-End
|
||||||
|
Settings
|
||||||
|
─── PLATFORM ADMIN ─── (superadmin only)
|
||||||
|
Admin Panel
|
||||||
|
```
|
||||||
|
|
||||||
|
### Logo
|
||||||
|
- Copy SVG to `frontend/src/assets/logo.svg`
|
||||||
|
- In AppLayout.tsx: Replace `<Title order={3} c="blue">HOA LedgerIQ</Title>` with an `<img>` tag loading the SVG, sized to fit the 60px header (height ~40px with padding)
|
||||||
|
- SVG will be served directly (Vite handles SVG imports natively), no PNG conversion needed since browsers render SVG natively and it's cleaner
|
||||||
|
|
||||||
|
## 5. Capital Projects: PDF Table Export, Kanban Default, Future Category
|
||||||
|
|
||||||
|
### Frontend Changes
|
||||||
|
- **CapitalProjectsPage.tsx**:
|
||||||
|
- Change default viewMode from `'table'` to `'kanban'`
|
||||||
|
- PDF export: temporarily switch to table view for print, then restore. Use `@media print` CSS to always show table layout regardless of current view
|
||||||
|
- Add "Future" column in kanban: projects with `target_year = 9999` (sentinel value) display as "Future"
|
||||||
|
- Update the form: Target Year select should include a "Future (Beyond 5-Year)" option that maps to year 9999
|
||||||
|
- Kanban year list: always include current year through +5, plus "Future" if any projects exist there
|
||||||
|
- Table view: group "Future" projects under a "Future" header
|
||||||
|
- Title: "Capital Projects" (remove "(5-Year Plan)" since we now have Future)
|
||||||
|
|
||||||
|
### Backend
|
||||||
|
- No backend changes needed — target_year=9999 works with existing schema (integer column, no constraint)
|
||||||
|
|
||||||
|
## File Change Summary
|
||||||
|
|
||||||
|
| File | Action |
|
||||||
|
|------|--------|
|
||||||
|
| `db/init/00-init.sql` | Add contract_number, plan_level, update status CHECK |
|
||||||
|
| `backend/src/modules/organizations/entities/organization.entity.ts` | Add contractNumber, planLevel columns |
|
||||||
|
| `backend/src/modules/organizations/dto/create-organization.dto.ts` | Add contractNumber, planLevel fields |
|
||||||
|
| `backend/src/modules/auth/admin.controller.ts` | Add POST /admin/tenants, PUT /admin/organizations/:id/status |
|
||||||
|
| `backend/src/modules/auth/auth.module.ts` | Import OrganizationsModule |
|
||||||
|
| `backend/src/modules/auth/auth.service.ts` | Add org status check on login |
|
||||||
|
| `backend/src/modules/users/users.service.ts` | Update findAllOrganizations query |
|
||||||
|
| `backend/src/modules/units/units.controller.ts` | Add DELETE route |
|
||||||
|
| `backend/src/modules/units/units.service.ts` | Add delete(), assessment_group_id support |
|
||||||
|
| `backend/src/modules/assessment-groups/assessment-groups.service.ts` | Add frequency support + adjust income calcs |
|
||||||
|
| `backend/src/database/tenant-schema.service.ts` | Add frequency to assessment_groups DDL |
|
||||||
|
| `frontend/src/assets/logo.svg` | New — copy from /Users/claw/Downloads/logo_house.svg |
|
||||||
|
| `frontend/src/components/layout/AppLayout.tsx` | Replace text with logo |
|
||||||
|
| `frontend/src/components/layout/Sidebar.tsx` | Restructure with grouped sections |
|
||||||
|
| `frontend/src/pages/admin/AdminPage.tsx` | Create tenant modal, status management, new columns |
|
||||||
|
| `frontend/src/pages/units/UnitsPage.tsx` | Delete, assessment group dropdown |
|
||||||
|
| `frontend/src/pages/assessment-groups/AssessmentGroupsPage.tsx` | Frequency field |
|
||||||
|
| `frontend/src/pages/capital-projects/CapitalProjectsPage.tsx` | Kanban default, table PDF, Future category |
|
||||||
|
| Live DB | ALTER TABLE commands for contract_number, plan_level, frequency, status CHECK |
|
||||||
32
backend/Dockerfile
Normal file
32
backend/Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# ---- Production Dockerfile for NestJS backend ----
|
||||||
|
# Multi-stage build: compile TypeScript, then run with minimal image
|
||||||
|
|
||||||
|
# Stage 1: Build
|
||||||
|
FROM node:20-alpine AS builder
|
||||||
|
WORKDIR /app
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci
|
||||||
|
COPY . .
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
# Stage 2: Production
|
||||||
|
FROM node:20-alpine
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Only install production dependencies
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --omit=dev && npm cache clean --force
|
||||||
|
|
||||||
|
# Copy compiled output and New Relic preload from builder
|
||||||
|
COPY --from=builder /app/dist ./dist
|
||||||
|
COPY --from=builder /app/newrelic-preload.js ./newrelic-preload.js
|
||||||
|
|
||||||
|
# New Relic agent — configured entirely via environment variables
|
||||||
|
ENV NEW_RELIC_NO_CONFIG_FILE=true
|
||||||
|
ENV NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=true
|
||||||
|
ENV NEW_RELIC_LOG=stdout
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
# Preload the New Relic agent (activates only when NEW_RELIC_ENABLED=true)
|
||||||
|
CMD ["node", "-r", "./newrelic-preload.js", "dist/main"]
|
||||||
@@ -7,6 +7,11 @@ RUN npm install
|
|||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
# New Relic agent — configured entirely via environment variables
|
||||||
|
ENV NEW_RELIC_NO_CONFIG_FILE=true
|
||||||
|
ENV NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=true
|
||||||
|
ENV NEW_RELIC_LOG=stdout
|
||||||
|
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
CMD ["npm", "run", "start:dev"]
|
CMD ["npm", "run", "start:dev"]
|
||||||
|
|||||||
7
backend/newrelic-preload.js
Normal file
7
backend/newrelic-preload.js
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
// Conditionally load the New Relic agent before any other modules.
|
||||||
|
// Controlled by the NEW_RELIC_ENABLED environment variable (.env).
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
if (process.env.NEW_RELIC_ENABLED === 'true') {
|
||||||
|
require('newrelic');
|
||||||
|
}
|
||||||
1385
backend/package-lock.json
generated
1385
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,7 @@
|
|||||||
"start": "nest start",
|
"start": "nest start",
|
||||||
"start:dev": "nest start --watch",
|
"start:dev": "nest start --watch",
|
||||||
"start:debug": "nest start --debug --watch",
|
"start:debug": "nest start --debug --watch",
|
||||||
"start:prod": "node dist/main",
|
"start:prod": "node -r ./newrelic-preload.js dist/main",
|
||||||
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\"",
|
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\"",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:watch": "jest --watch",
|
"test:watch": "jest --watch",
|
||||||
@@ -37,6 +37,7 @@
|
|||||||
"reflect-metadata": "^0.2.2",
|
"reflect-metadata": "^0.2.2",
|
||||||
"rxjs": "^7.8.1",
|
"rxjs": "^7.8.1",
|
||||||
"typeorm": "^0.3.20",
|
"typeorm": "^0.3.20",
|
||||||
|
"newrelic": "latest",
|
||||||
"uuid": "^9.0.1"
|
"uuid": "^9.0.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -43,6 +43,13 @@ import { ScheduleModule } from '@nestjs/schedule';
|
|||||||
autoLoadEntities: true,
|
autoLoadEntities: true,
|
||||||
synchronize: false,
|
synchronize: false,
|
||||||
logging: false,
|
logging: false,
|
||||||
|
// Connection pool — reuse connections instead of creating new ones per query
|
||||||
|
extra: {
|
||||||
|
max: 30, // max pool size (across all concurrent requests)
|
||||||
|
min: 5, // keep at least 5 idle connections warm
|
||||||
|
idleTimeoutMillis: 30000, // close idle connections after 30s
|
||||||
|
connectionTimeoutMillis: 5000, // fail fast if pool is exhausted
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
DatabaseModule,
|
DatabaseModule,
|
||||||
|
|||||||
@@ -325,6 +325,8 @@ export class TenantSchemaService {
|
|||||||
risk_notes JSONB,
|
risk_notes JSONB,
|
||||||
requested_by UUID,
|
requested_by UUID,
|
||||||
response_time_ms INTEGER,
|
response_time_ms INTEGER,
|
||||||
|
status VARCHAR(20) DEFAULT 'complete',
|
||||||
|
error_message TEXT,
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||||
)`,
|
)`,
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,53 @@
|
|||||||
|
import * as _cluster from 'node:cluster';
|
||||||
|
import * as os from 'node:os';
|
||||||
import { NestFactory } from '@nestjs/core';
|
import { NestFactory } from '@nestjs/core';
|
||||||
import { ValidationPipe } from '@nestjs/common';
|
import { ValidationPipe } from '@nestjs/common';
|
||||||
import { SwaggerModule, DocumentBuilder } from '@nestjs/swagger';
|
import { SwaggerModule, DocumentBuilder } from '@nestjs/swagger';
|
||||||
import { AppModule } from './app.module';
|
import { AppModule } from './app.module';
|
||||||
|
|
||||||
|
const cluster = _cluster as any; // Cast to 'any' bypasses the missing property errors
|
||||||
|
const isProduction = process.env.NODE_ENV === 'production';
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Clustering — fork one worker per CPU core in production
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
const WORKERS = isProduction
|
||||||
|
? Math.min(os.cpus().length, 4) // cap at 4 workers to stay within DB pool
|
||||||
|
: 1; // single process in dev
|
||||||
|
|
||||||
|
if (WORKERS > 1 && cluster.isPrimary) {
|
||||||
|
console.log(`Primary ${process.pid} forking ${WORKERS} workers ...`);
|
||||||
|
for (let i = 0; i < WORKERS; i++) {
|
||||||
|
cluster.fork();
|
||||||
|
}
|
||||||
|
cluster.on('exit', (worker: any, code: number) => {
|
||||||
|
console.warn(`Worker ${worker.process.pid} exited (code ${code}), restarting ...`);
|
||||||
|
cluster.fork();
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
bootstrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// NestJS bootstrap
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
async function bootstrap() {
|
async function bootstrap() {
|
||||||
const app = await NestFactory.create(AppModule);
|
const app = await NestFactory.create(AppModule, {
|
||||||
|
logger: isProduction ? ['error', 'warn', 'log'] : ['error', 'warn', 'log', 'debug', 'verbose'],
|
||||||
|
});
|
||||||
|
|
||||||
app.setGlobalPrefix('api');
|
app.setGlobalPrefix('api');
|
||||||
|
|
||||||
// Request logging
|
// Request logging — only in development (too noisy / slow for prod)
|
||||||
app.use((req: any, _res: any, next: any) => {
|
if (!isProduction) {
|
||||||
console.log(`[REQ] ${req.method} ${req.url} auth=${req.headers.authorization ? 'yes' : 'no'}`);
|
app.use((req: any, _res: any, next: any) => {
|
||||||
next();
|
console.log(`[REQ] ${req.method} ${req.url} auth=${req.headers.authorization ? 'yes' : 'no'}`);
|
||||||
});
|
next();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
app.useGlobalPipes(
|
app.useGlobalPipes(
|
||||||
new ValidationPipe({
|
new ValidationPipe({
|
||||||
@@ -22,21 +57,22 @@ async function bootstrap() {
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// CORS — in production nginx handles this; accept all origins behind the proxy
|
||||||
app.enableCors({
|
app.enableCors({
|
||||||
origin: ['http://localhost', 'http://localhost:5173'],
|
origin: isProduction ? true : ['http://localhost', 'http://localhost:5173'],
|
||||||
credentials: true,
|
credentials: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Swagger docs — available in all environments
|
||||||
const config = new DocumentBuilder()
|
const config = new DocumentBuilder()
|
||||||
.setTitle('HOA LedgerIQ API')
|
.setTitle('HOA LedgerIQ API')
|
||||||
.setDescription('API for the HOA LedgerIQ')
|
.setDescription('API for the HOA LedgerIQ')
|
||||||
.setVersion('0.1.0')
|
.setVersion('2026.3.2')
|
||||||
.addBearerAuth()
|
.addBearerAuth()
|
||||||
.build();
|
.build();
|
||||||
const document = SwaggerModule.createDocument(app, config);
|
const document = SwaggerModule.createDocument(app, config);
|
||||||
SwaggerModule.setup('api/docs', app, document);
|
SwaggerModule.setup('api/docs', app, document);
|
||||||
|
|
||||||
await app.listen(3000);
|
await app.listen(3000);
|
||||||
console.log('Backend running on port 3000');
|
console.log(`Backend worker ${process.pid} listening on port 3000`);
|
||||||
}
|
}
|
||||||
bootstrap();
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { Controller, Get, Post, UseGuards, Req } from '@nestjs/common';
|
import { Controller, Get, Post, UseGuards, Req, Logger } from '@nestjs/common';
|
||||||
import { ApiTags, ApiBearerAuth, ApiOperation } from '@nestjs/swagger';
|
import { ApiTags, ApiBearerAuth, ApiOperation } from '@nestjs/swagger';
|
||||||
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard';
|
||||||
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
import { AllowViewer } from '../../common/decorators/allow-viewer.decorator';
|
||||||
@@ -9,6 +9,8 @@ import { HealthScoresService } from './health-scores.service';
|
|||||||
@ApiBearerAuth()
|
@ApiBearerAuth()
|
||||||
@UseGuards(JwtAuthGuard)
|
@UseGuards(JwtAuthGuard)
|
||||||
export class HealthScoresController {
|
export class HealthScoresController {
|
||||||
|
private readonly logger = new Logger(HealthScoresController.name);
|
||||||
|
|
||||||
constructor(private service: HealthScoresService) {}
|
constructor(private service: HealthScoresService) {}
|
||||||
|
|
||||||
@Get('latest')
|
@Get('latest')
|
||||||
@@ -19,14 +21,56 @@ export class HealthScoresController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Post('calculate')
|
@Post('calculate')
|
||||||
@ApiOperation({ summary: 'Trigger health score recalculation for current tenant' })
|
@ApiOperation({ summary: 'Trigger both health score recalculations (async — returns immediately)' })
|
||||||
@AllowViewer()
|
@AllowViewer()
|
||||||
async calculate(@Req() req: any) {
|
async calculate(@Req() req: any) {
|
||||||
const schema = req.user?.orgSchema;
|
const schema = req.user?.orgSchema;
|
||||||
const [operating, reserve] = await Promise.all([
|
|
||||||
|
// Fire-and-forget — background processing saves results to DB
|
||||||
|
Promise.all([
|
||||||
this.service.calculateScore(schema, 'operating'),
|
this.service.calculateScore(schema, 'operating'),
|
||||||
this.service.calculateScore(schema, 'reserve'),
|
this.service.calculateScore(schema, 'reserve'),
|
||||||
]);
|
]).catch((err) => {
|
||||||
return { operating, reserve };
|
this.logger.error(`Background health score calculation failed: ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 'processing',
|
||||||
|
message: 'Health score calculations started. Results will appear when ready.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post('calculate/operating')
|
||||||
|
@ApiOperation({ summary: 'Trigger operating fund health score recalculation (async)' })
|
||||||
|
@AllowViewer()
|
||||||
|
async calculateOperating(@Req() req: any) {
|
||||||
|
const schema = req.user?.orgSchema;
|
||||||
|
|
||||||
|
// Fire-and-forget
|
||||||
|
this.service.calculateScore(schema, 'operating').catch((err) => {
|
||||||
|
this.logger.error(`Background operating score failed: ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 'processing',
|
||||||
|
message: 'Operating fund health score calculation started.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post('calculate/reserve')
|
||||||
|
@ApiOperation({ summary: 'Trigger reserve fund health score recalculation (async)' })
|
||||||
|
@AllowViewer()
|
||||||
|
async calculateReserve(@Req() req: any) {
|
||||||
|
const schema = req.user?.orgSchema;
|
||||||
|
|
||||||
|
// Fire-and-forget
|
||||||
|
this.service.calculateScore(schema, 'reserve').catch((err) => {
|
||||||
|
this.logger.error(`Background reserve score failed: ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 'processing',
|
||||||
|
message: 'Reserve fund health score calculation started.',
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -47,23 +47,49 @@ export class HealthScoresService {
|
|||||||
|
|
||||||
// ── Public API ──
|
// ── Public API ──
|
||||||
|
|
||||||
async getLatestScores(schema: string): Promise<{ operating: HealthScore | null; reserve: HealthScore | null }> {
|
async getLatestScores(schema: string): Promise<{
|
||||||
|
operating: HealthScore | null;
|
||||||
|
reserve: HealthScore | null;
|
||||||
|
operating_last_failed: boolean;
|
||||||
|
reserve_last_failed: boolean;
|
||||||
|
}> {
|
||||||
const qr = this.dataSource.createQueryRunner();
|
const qr = this.dataSource.createQueryRunner();
|
||||||
try {
|
try {
|
||||||
await qr.connect();
|
await qr.connect();
|
||||||
await qr.query(`SET search_path TO "${schema}"`);
|
await qr.query(`SET search_path TO "${schema}"`);
|
||||||
|
|
||||||
const operating = await qr.query(
|
// For each score type, return the latest *successful* score for display,
|
||||||
`SELECT * FROM health_scores WHERE score_type = 'operating' ORDER BY calculated_at DESC LIMIT 1`,
|
// and flag whether the most recent attempt (any status) was an error.
|
||||||
);
|
const result = { operating: null as HealthScore | null, reserve: null as HealthScore | null, operating_last_failed: false, reserve_last_failed: false };
|
||||||
const reserve = await qr.query(
|
|
||||||
`SELECT * FROM health_scores WHERE score_type = 'reserve' ORDER BY calculated_at DESC LIMIT 1`,
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
for (const scoreType of ['operating', 'reserve'] as const) {
|
||||||
operating: operating[0] || null,
|
// Most recent row (any status)
|
||||||
reserve: reserve[0] || null,
|
const latest = await qr.query(
|
||||||
};
|
`SELECT * FROM health_scores WHERE score_type = $1 ORDER BY calculated_at DESC LIMIT 1`,
|
||||||
|
[scoreType],
|
||||||
|
);
|
||||||
|
const latestRow = latest[0] || null;
|
||||||
|
|
||||||
|
if (!latestRow) {
|
||||||
|
// No scores at all
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (latestRow.status === 'error') {
|
||||||
|
// Most recent attempt failed — return the latest *complete* score instead
|
||||||
|
const lastGood = await qr.query(
|
||||||
|
`SELECT * FROM health_scores WHERE score_type = $1 AND status = 'complete' ORDER BY calculated_at DESC LIMIT 1`,
|
||||||
|
[scoreType],
|
||||||
|
);
|
||||||
|
result[scoreType] = lastGood[0] || latestRow; // fall back to error row if no good score exists
|
||||||
|
result[`${scoreType}_last_failed`] = true;
|
||||||
|
} else {
|
||||||
|
result[scoreType] = latestRow;
|
||||||
|
result[`${scoreType}_last_failed`] = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
} finally {
|
} finally {
|
||||||
await qr.release();
|
await qr.release();
|
||||||
}
|
}
|
||||||
@@ -226,7 +252,7 @@ export class HealthScoresService {
|
|||||||
private async gatherOperatingData(qr: any) {
|
private async gatherOperatingData(qr: any) {
|
||||||
const year = new Date().getFullYear();
|
const year = new Date().getFullYear();
|
||||||
|
|
||||||
const [accounts, budgets, assessments, cashFlow, recentTransactions] = await Promise.all([
|
const [accounts, budgets, assessments, cashFlow, recentTransactions, actualsMonths] = await Promise.all([
|
||||||
// Operating accounts with balances
|
// Operating accounts with balances
|
||||||
qr.query(`
|
qr.query(`
|
||||||
SELECT a.name, a.account_number, a.account_type, a.fund_type,
|
SELECT a.name, a.account_number, a.account_type, a.fund_type,
|
||||||
@@ -285,21 +311,54 @@ export class HealthScoresService {
|
|||||||
FROM invoices
|
FROM invoices
|
||||||
WHERE status IN ('sent', 'overdue') AND due_date < CURRENT_DATE
|
WHERE status IN ('sent', 'overdue') AND due_date < CURRENT_DATE
|
||||||
`),
|
`),
|
||||||
|
// Detect which months have posted actuals (expense or income JEs)
|
||||||
|
qr.query(`
|
||||||
|
SELECT DISTINCT EXTRACT(MONTH FROM je.entry_date)::int as month_num
|
||||||
|
FROM journal_entries je
|
||||||
|
JOIN journal_entry_lines jel ON jel.journal_entry_id = je.id
|
||||||
|
JOIN accounts a ON a.id = jel.account_id
|
||||||
|
WHERE je.entry_date >= $1
|
||||||
|
AND je.entry_date < $2
|
||||||
|
AND je.is_posted = true AND je.is_void = false
|
||||||
|
AND a.fund_type = 'operating'
|
||||||
|
AND a.account_type IN ('income', 'expense')
|
||||||
|
ORDER BY month_num
|
||||||
|
`, [`${year}-01-01`, `${year + 1}-01-01`]),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Calculate month-by-month budget actuals progress
|
// Calculate month-by-month budget actuals progress
|
||||||
const currentMonth = new Date().getMonth(); // 0-indexed
|
const currentMonth = new Date().getMonth(); // 0-indexed
|
||||||
|
const dayOfMonth = new Date().getDate();
|
||||||
const monthNames = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec_amt'];
|
const monthNames = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec_amt'];
|
||||||
|
const monthLabelsForBudget = ['January','February','March','April','May','June','July','August','September','October','November','December'];
|
||||||
|
|
||||||
|
// Determine which months have posted actuals
|
||||||
|
const monthsWithActuals: number[] = actualsMonths.map((r: any) => parseInt(r.month_num)); // 1-indexed
|
||||||
|
const lastActualsMonth0 = monthsWithActuals.length > 0
|
||||||
|
? Math.max(...monthsWithActuals) - 1 // convert to 0-indexed
|
||||||
|
: -1; // no actuals posted at all
|
||||||
|
|
||||||
|
// YTD budget = sum through last month with actuals only (NOT current incomplete month)
|
||||||
let budgetedIncomeYTD = 0;
|
let budgetedIncomeYTD = 0;
|
||||||
let budgetedExpenseYTD = 0;
|
let budgetedExpenseYTD = 0;
|
||||||
for (const b of budgets) {
|
for (const b of budgets) {
|
||||||
for (let m = 0; m <= currentMonth; m++) {
|
for (let m = 0; m <= lastActualsMonth0; m++) {
|
||||||
const amt = parseFloat(b[monthNames[m]]) || 0;
|
const amt = parseFloat(b[monthNames[m]]) || 0;
|
||||||
if (b.account_type === 'income') budgetedIncomeYTD += amt;
|
if (b.account_type === 'income') budgetedIncomeYTD += amt;
|
||||||
else if (b.account_type === 'expense') budgetedExpenseYTD += amt;
|
else if (b.account_type === 'expense') budgetedExpenseYTD += amt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Current month budget (shown separately, not included in YTD comparison)
|
||||||
|
let currentMonthBudgetIncome = 0;
|
||||||
|
let currentMonthBudgetExpense = 0;
|
||||||
|
for (const b of budgets) {
|
||||||
|
const amt = parseFloat(b[monthNames[currentMonth]]) || 0;
|
||||||
|
if (b.account_type === 'income') currentMonthBudgetIncome += amt;
|
||||||
|
else if (b.account_type === 'expense') currentMonthBudgetExpense += amt;
|
||||||
|
}
|
||||||
|
const currentMonthHasActuals = monthsWithActuals.includes(currentMonth + 1);
|
||||||
|
|
||||||
const operatingCash = accounts
|
const operatingCash = accounts
|
||||||
.filter((a: any) => a.account_type === 'asset')
|
.filter((a: any) => a.account_type === 'asset')
|
||||||
.reduce((s: number, a: any) => s + parseFloat(a.balance || '0'), 0);
|
.reduce((s: number, a: any) => s + parseFloat(a.balance || '0'), 0);
|
||||||
@@ -433,11 +492,27 @@ export class HealthScoresService {
|
|||||||
ytdIncome,
|
ytdIncome,
|
||||||
ytdExpense,
|
ytdExpense,
|
||||||
monthlyAssessmentIncome,
|
monthlyAssessmentIncome,
|
||||||
|
totalAnnualAssessmentIncome: assessments.reduce((sum: number, ag: any) => {
|
||||||
|
const regular = parseFloat(ag.regular_assessment) || 0;
|
||||||
|
const units = parseInt(ag.unit_count) || 0;
|
||||||
|
const total = regular * units;
|
||||||
|
const freq = ag.frequency || 'monthly';
|
||||||
|
if (freq === 'monthly') return sum + total * 12;
|
||||||
|
if (freq === 'quarterly') return sum + total * 4;
|
||||||
|
return sum + total; // annual
|
||||||
|
}, 0),
|
||||||
delinquentCount: parseInt(recentTransactions[0]?.count || '0'),
|
delinquentCount: parseInt(recentTransactions[0]?.count || '0'),
|
||||||
delinquentAmount: parseFloat(recentTransactions[0]?.total_overdue || '0'),
|
delinquentAmount: parseFloat(recentTransactions[0]?.total_overdue || '0'),
|
||||||
monthsOfExpenses: budgetedExpenseAnnual > 0 ? (operatingCash / (budgetedExpenseAnnual / 12)) : 0,
|
monthsOfExpenses: budgetedExpenseAnnual > 0 ? (operatingCash / (budgetedExpenseAnnual / 12)) : 0,
|
||||||
year,
|
year,
|
||||||
currentMonth: currentMonth + 1,
|
currentMonth: currentMonth + 1,
|
||||||
|
dayOfMonth,
|
||||||
|
monthsWithActuals,
|
||||||
|
lastActualsMonthLabel: lastActualsMonth0 >= 0 ? monthLabelsForBudget[lastActualsMonth0] : null,
|
||||||
|
currentMonthLabel: monthLabelsForBudget[currentMonth],
|
||||||
|
currentMonthBudgetIncome,
|
||||||
|
currentMonthBudgetExpense,
|
||||||
|
currentMonthHasActuals,
|
||||||
forecast,
|
forecast,
|
||||||
lowestCash: Math.round(lowestCash * 100) / 100,
|
lowestCash: Math.round(lowestCash * 100) / 100,
|
||||||
lowestCashMonth,
|
lowestCashMonth,
|
||||||
@@ -715,6 +790,14 @@ KEY FACTORS TO EVALUATE:
|
|||||||
4. Income-to-expense ratio
|
4. Income-to-expense ratio
|
||||||
5. Emergency buffer adequacy
|
5. Emergency buffer adequacy
|
||||||
6. CRITICAL — Projected cash flow: Use the 12-MONTH CASH FLOW FORECAST to assess future liquidity. The forecast shows month-by-month projected income (from assessments and budgeted sources), expenses (from budget), and project costs. Check whether cash will go negative or dangerously low in any future month. If projected income arrives before projected expenses, the position may be adequate even if current cash seems low. Conversely, if a large expense precedes income in a given month, flag the timing risk.
|
6. CRITICAL — Projected cash flow: Use the 12-MONTH CASH FLOW FORECAST to assess future liquidity. The forecast shows month-by-month projected income (from assessments and budgeted sources), expenses (from budget), and project costs. Check whether cash will go negative or dangerously low in any future month. If projected income arrives before projected expenses, the position may be adequate even if current cash seems low. Conversely, if a large expense precedes income in a given month, flag the timing risk.
|
||||||
|
7. BUDGET TIMING: YTD budget comparisons only include months where actual accounting entries have been posted. Do NOT penalize the HOA for a budget variance in the current month if actuals have not yet been submitted — this is normal operational procedure. Actuals are posted at month-end. The current month's budget is shown separately for context only, not for variance analysis.
|
||||||
|
|
||||||
|
CASH RUNWAY CLASSIFICATION (strict — use these rules for the Cash Reserves factor):
|
||||||
|
- <2 months of expenses: impact = "negative"
|
||||||
|
- 2-3 months of expenses: impact = "neutral"
|
||||||
|
- 3-6 months of expenses: impact = "positive"
|
||||||
|
- 6+ months of expenses: impact = "strongly positive" (contributes to Excellent score)
|
||||||
|
Do NOT rate cash runway as positive based on projected future inflows — evaluate the CURRENT cash-on-hand position for this factor. Future inflows should be evaluated separately under the Projected Cash Flow factor.
|
||||||
|
|
||||||
RESPONSE FORMAT:
|
RESPONSE FORMAT:
|
||||||
Respond with ONLY valid JSON (no markdown, no code fences):
|
Respond with ONLY valid JSON (no markdown, no code fences):
|
||||||
@@ -742,14 +825,30 @@ Provide 3-5 factors and 1-3 actionable recommendations. Be specific with dollar
|
|||||||
.join('\n') || 'No budget line items.';
|
.join('\n') || 'No budget line items.';
|
||||||
|
|
||||||
const assessmentLines = data.assessments
|
const assessmentLines = data.assessments
|
||||||
.map((a: any) => `- ${a.name}: $${parseFloat(a.regular_assessment || '0').toFixed(2)}/unit × ${a.unit_count} units (${a.frequency})`)
|
.map((a: any) => {
|
||||||
|
const regular = parseFloat(a.regular_assessment || '0');
|
||||||
|
const units = parseInt(a.unit_count || '0');
|
||||||
|
const total = regular * units;
|
||||||
|
return `- ${a.name}: $${regular.toFixed(2)}/unit × ${units} units (${a.frequency}) = $${total.toFixed(2)} total/period`;
|
||||||
|
})
|
||||||
.join('\n') || 'No assessment groups.';
|
.join('\n') || 'No assessment groups.';
|
||||||
|
|
||||||
|
const totalAnnualAssessmentIncome = data.assessments.reduce((sum: number, a: any) => {
|
||||||
|
const regular = parseFloat(a.regular_assessment || '0');
|
||||||
|
const units = parseInt(a.unit_count || '0');
|
||||||
|
const total = regular * units;
|
||||||
|
const freq = a.frequency || 'monthly';
|
||||||
|
if (freq === 'monthly') return sum + total * 12;
|
||||||
|
if (freq === 'quarterly') return sum + total * 4;
|
||||||
|
return sum + total; // annual
|
||||||
|
}, 0);
|
||||||
|
|
||||||
const userPrompt = `Evaluate this HOA's operating fund health.
|
const userPrompt = `Evaluate this HOA's operating fund health.
|
||||||
|
|
||||||
TODAY: ${today}
|
TODAY: ${today}
|
||||||
FISCAL YEAR: ${data.year}
|
FISCAL YEAR: ${data.year}
|
||||||
CURRENT MONTH: ${data.currentMonth} of 12
|
CURRENT MONTH: ${data.currentMonthLabel} (day ${data.dayOfMonth}), month ${data.currentMonth} of 12
|
||||||
|
Months with posted actuals: ${data.monthsWithActuals.length > 0 ? data.monthsWithActuals.map((m: number) => ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'][m - 1]).join(', ') : 'None yet'}
|
||||||
|
|
||||||
=== OPERATING FUND ACCOUNTS ===
|
=== OPERATING FUND ACCOUNTS ===
|
||||||
${accountLines}
|
${accountLines}
|
||||||
@@ -763,20 +862,28 @@ Budgeted Annual Income: $${data.budgetedIncomeAnnual.toFixed(2)}
|
|||||||
Budgeted Annual Expenses: $${data.budgetedExpenseAnnual.toFixed(2)}
|
Budgeted Annual Expenses: $${data.budgetedExpenseAnnual.toFixed(2)}
|
||||||
Monthly Expense Run Rate: $${(data.budgetedExpenseAnnual / 12).toFixed(2)}
|
Monthly Expense Run Rate: $${(data.budgetedExpenseAnnual / 12).toFixed(2)}
|
||||||
|
|
||||||
=== BUDGET VS ACTUAL (YTD through month ${data.currentMonth}) ===
|
=== BUDGET VS ACTUAL (YTD through ${data.lastActualsMonthLabel || 'N/A — no actuals posted yet'}) ===
|
||||||
|
Note: This comparison only covers months with posted accounting entries. ${data.lastActualsMonthLabel ? `Actuals have been posted through ${data.lastActualsMonthLabel}.` : 'No monthly actuals have been posted yet for this fiscal year.'} Budget figures are used for forecasting until actuals are submitted at month-end.
|
||||||
|
|
||||||
Budgeted Income YTD: $${data.budgetedIncomeYTD.toFixed(2)}
|
Budgeted Income YTD: $${data.budgetedIncomeYTD.toFixed(2)}
|
||||||
Actual Income YTD: $${data.ytdIncome.toFixed(2)}
|
Actual Income YTD: $${data.ytdIncome.toFixed(2)}
|
||||||
Income Variance: $${(data.ytdIncome - data.budgetedIncomeYTD).toFixed(2)} (${data.budgetedIncomeYTD > 0 ? ((data.ytdIncome / data.budgetedIncomeYTD) * 100).toFixed(1) : 0}% of budget)
|
Income Variance: $${(data.ytdIncome - data.budgetedIncomeYTD).toFixed(2)}${data.budgetedIncomeYTD > 0 ? ` (${((data.ytdIncome / data.budgetedIncomeYTD) * 100).toFixed(1)}% of budget)` : ''}
|
||||||
|
|
||||||
Budgeted Expenses YTD: $${data.budgetedExpenseYTD.toFixed(2)}
|
Budgeted Expenses YTD: $${data.budgetedExpenseYTD.toFixed(2)}
|
||||||
Actual Expenses YTD: $${data.ytdExpense.toFixed(2)}
|
Actual Expenses YTD: $${data.ytdExpense.toFixed(2)}
|
||||||
Expense Variance: $${(data.ytdExpense - data.budgetedExpenseYTD).toFixed(2)} (${data.budgetedExpenseYTD > 0 ? ((data.ytdExpense / data.budgetedExpenseYTD) * 100).toFixed(1) : 0}% of budget)
|
Expense Variance: $${(data.ytdExpense - data.budgetedExpenseYTD).toFixed(2)}${data.budgetedExpenseYTD > 0 ? ` (${((data.ytdExpense / data.budgetedExpenseYTD) * 100).toFixed(1)}% of budget)` : ''}
|
||||||
|
|
||||||
|
=== CURRENT MONTH (${data.currentMonthLabel}, ${data.dayOfMonth} days elapsed) ===
|
||||||
|
Budgeted Income this month: $${data.currentMonthBudgetIncome.toFixed(2)}
|
||||||
|
Budgeted Expenses this month: $${data.currentMonthBudgetExpense.toFixed(2)}
|
||||||
|
Actuals posted this month: ${data.currentMonthHasActuals ? 'Yes' : 'No — actuals are typically posted at month-end'}
|
||||||
|
|
||||||
=== CASH RUNWAY ===
|
=== CASH RUNWAY ===
|
||||||
Months of Operating Expenses Covered: ${data.monthsOfExpenses.toFixed(1)} months
|
Months of Operating Expenses Covered: ${data.monthsOfExpenses.toFixed(1)} months
|
||||||
|
|
||||||
=== ASSESSMENT INCOME ===
|
=== ASSESSMENT INCOME ===
|
||||||
${assessmentLines}
|
${assessmentLines}
|
||||||
|
Total Annual Assessment Income: $${data.totalAnnualAssessmentIncome.toFixed(2)}
|
||||||
Monthly Assessment Income: $${data.monthlyAssessmentIncome.toFixed(2)}
|
Monthly Assessment Income: $${data.monthlyAssessmentIncome.toFixed(2)}
|
||||||
|
|
||||||
=== DELINQUENCY ===
|
=== DELINQUENCY ===
|
||||||
@@ -918,11 +1025,26 @@ ${budgetLines}
|
|||||||
|
|
||||||
=== SPECIAL ASSESSMENT INCOME (Reserve Fund) ===
|
=== SPECIAL ASSESSMENT INCOME (Reserve Fund) ===
|
||||||
${data.assessments.length === 0 ? 'No special assessments configured.' :
|
${data.assessments.length === 0 ? 'No special assessments configured.' :
|
||||||
data.assessments.map((a: any) => {
|
(() => {
|
||||||
const special = parseFloat(a.special_assessment || '0');
|
const lines = data.assessments.map((a: any) => {
|
||||||
if (special === 0) return null;
|
const special = parseFloat(a.special_assessment || '0');
|
||||||
return `- ${a.name}: $${special.toFixed(2)}/unit × ${a.unit_count} units (${a.frequency}) = $${(special * parseInt(a.unit_count || '0')).toFixed(2)}/period → Reserve Fund`;
|
if (special === 0) return null;
|
||||||
}).filter(Boolean).join('\n') || 'No special assessments currently being collected.'}
|
const units = parseInt(a.unit_count || '0');
|
||||||
|
const totalPerPeriod = special * units;
|
||||||
|
return `- ${a.name}: $${special.toFixed(2)}/unit × ${units} units (${a.frequency}) = $${totalPerPeriod.toFixed(2)}/period → Reserve Fund`;
|
||||||
|
}).filter(Boolean);
|
||||||
|
if (lines.length === 0) return 'No special assessments currently being collected.';
|
||||||
|
const totalAnnual = data.assessments.reduce((sum: number, a: any) => {
|
||||||
|
const special = parseFloat(a.special_assessment || '0');
|
||||||
|
const units = parseInt(a.unit_count || '0');
|
||||||
|
const total = special * units;
|
||||||
|
const freq = a.frequency || 'monthly';
|
||||||
|
if (freq === 'monthly') return sum + total * 12;
|
||||||
|
if (freq === 'quarterly') return sum + total * 4;
|
||||||
|
return sum + total;
|
||||||
|
}, 0);
|
||||||
|
return lines.join('\n') + '\nTotal Annual Special Assessment Income to Reserves: $' + totalAnnual.toFixed(2);
|
||||||
|
})()}
|
||||||
|
|
||||||
=== 12-MONTH PROJECTED CASH FLOW (Reserve Fund) ===
|
=== 12-MONTH PROJECTED CASH FLOW (Reserve Fund) ===
|
||||||
Starting Reserve Cash: $${data.reserveCash.toFixed(2)}
|
Starting Reserve Cash: $${data.reserveCash.toFixed(2)}
|
||||||
@@ -967,7 +1089,7 @@ Projected Year-End Total (Cash + Investments): $${data.projectedYearEndTotal.toF
|
|||||||
const requestBody = {
|
const requestBody = {
|
||||||
model,
|
model,
|
||||||
messages,
|
messages,
|
||||||
temperature: 0.3,
|
temperature: 0.1,
|
||||||
max_tokens: 2048,
|
max_tokens: 2048,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -993,7 +1115,7 @@ Projected Year-End Total (Cash + Investments): $${data.projectedYearEndTotal.toF
|
|||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'Content-Length': Buffer.byteLength(bodyString, 'utf-8'),
|
'Content-Length': Buffer.byteLength(bodyString, 'utf-8'),
|
||||||
},
|
},
|
||||||
timeout: 120000,
|
timeout: 600000, // 10 minute timeout
|
||||||
};
|
};
|
||||||
|
|
||||||
const req = https.request(options, (res) => {
|
const req = https.request(options, (res) => {
|
||||||
@@ -1007,7 +1129,7 @@ Projected Year-End Total (Cash + Investments): $${data.projectedYearEndTotal.toF
|
|||||||
req.on('error', (err) => reject(err));
|
req.on('error', (err) => reject(err));
|
||||||
req.on('timeout', () => {
|
req.on('timeout', () => {
|
||||||
req.destroy();
|
req.destroy();
|
||||||
reject(new Error('Request timed out after 120s'));
|
reject(new Error('Request timed out after 600s'));
|
||||||
});
|
});
|
||||||
|
|
||||||
req.write(bodyString);
|
req.write(bodyString);
|
||||||
|
|||||||
@@ -36,9 +36,9 @@ export class InvestmentPlanningController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Post('recommendations')
|
@Post('recommendations')
|
||||||
@ApiOperation({ summary: 'Get AI-powered investment recommendations' })
|
@ApiOperation({ summary: 'Trigger AI-powered investment recommendations (async — returns immediately)' })
|
||||||
@AllowViewer()
|
@AllowViewer()
|
||||||
getRecommendations(@Req() req: any) {
|
triggerRecommendations(@Req() req: any) {
|
||||||
return this.service.getAIRecommendations(req.user?.sub, req.user?.orgId);
|
return this.service.triggerAIRecommendations(req.user?.sub, req.user?.orgId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -65,6 +65,9 @@ export interface SavedRecommendation {
|
|||||||
risk_notes: string[];
|
risk_notes: string[];
|
||||||
response_time_ms: number;
|
response_time_ms: number;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
|
status: 'processing' | 'complete' | 'error';
|
||||||
|
last_failed: boolean;
|
||||||
|
error_message?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
@@ -196,14 +199,33 @@ export class InvestmentPlanningService {
|
|||||||
return rates.cd;
|
return rates.cd;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure the status/error_message columns exist (for tenants created before this migration).
|
||||||
|
*/
|
||||||
|
private async ensureStatusColumn(): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.tenant.query(
|
||||||
|
`ALTER TABLE ai_recommendations ADD COLUMN IF NOT EXISTS status VARCHAR(20) DEFAULT 'complete'`,
|
||||||
|
);
|
||||||
|
await this.tenant.query(
|
||||||
|
`ALTER TABLE ai_recommendations ADD COLUMN IF NOT EXISTS error_message TEXT`,
|
||||||
|
);
|
||||||
|
} catch {
|
||||||
|
// Ignore — column may already exist or table may not exist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the latest saved AI recommendation for this tenant.
|
* Get the latest saved AI recommendation for this tenant.
|
||||||
|
* Returns status and last_failed flag for UI state management.
|
||||||
*/
|
*/
|
||||||
async getSavedRecommendation(): Promise<SavedRecommendation | null> {
|
async getSavedRecommendation(): Promise<SavedRecommendation | null> {
|
||||||
try {
|
try {
|
||||||
|
await this.ensureStatusColumn();
|
||||||
|
|
||||||
const rows = await this.tenant.query(
|
const rows = await this.tenant.query(
|
||||||
`SELECT id, recommendations_json, overall_assessment, risk_notes,
|
`SELECT id, recommendations_json, overall_assessment, risk_notes,
|
||||||
response_time_ms, created_at
|
response_time_ms, status, error_message, created_at
|
||||||
FROM ai_recommendations
|
FROM ai_recommendations
|
||||||
ORDER BY created_at DESC
|
ORDER BY created_at DESC
|
||||||
LIMIT 1`,
|
LIMIT 1`,
|
||||||
@@ -212,6 +234,64 @@ export class InvestmentPlanningService {
|
|||||||
if (!rows || rows.length === 0) return null;
|
if (!rows || rows.length === 0) return null;
|
||||||
|
|
||||||
const row = rows[0];
|
const row = rows[0];
|
||||||
|
const status = row.status || 'complete';
|
||||||
|
|
||||||
|
// If still processing, return processing status
|
||||||
|
if (status === 'processing') {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
recommendations: [],
|
||||||
|
overall_assessment: '',
|
||||||
|
risk_notes: [],
|
||||||
|
response_time_ms: 0,
|
||||||
|
created_at: row.created_at,
|
||||||
|
status: 'processing',
|
||||||
|
last_failed: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If latest attempt failed, return the last successful result with last_failed flag
|
||||||
|
if (status === 'error') {
|
||||||
|
const lastGood = await this.tenant.query(
|
||||||
|
`SELECT id, recommendations_json, overall_assessment, risk_notes,
|
||||||
|
response_time_ms, created_at
|
||||||
|
FROM ai_recommendations
|
||||||
|
WHERE status = 'complete'
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT 1`,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (lastGood?.length) {
|
||||||
|
const goodRow = lastGood[0];
|
||||||
|
const recData = goodRow.recommendations_json || {};
|
||||||
|
return {
|
||||||
|
id: goodRow.id,
|
||||||
|
recommendations: recData.recommendations || [],
|
||||||
|
overall_assessment: goodRow.overall_assessment || recData.overall_assessment || '',
|
||||||
|
risk_notes: goodRow.risk_notes || recData.risk_notes || [],
|
||||||
|
response_time_ms: goodRow.response_time_ms || 0,
|
||||||
|
created_at: goodRow.created_at,
|
||||||
|
status: 'complete',
|
||||||
|
last_failed: true,
|
||||||
|
error_message: row.error_message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// No previous good result — return error state
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
recommendations: [],
|
||||||
|
overall_assessment: row.error_message || 'AI analysis failed. Please try again.',
|
||||||
|
risk_notes: [],
|
||||||
|
response_time_ms: 0,
|
||||||
|
created_at: row.created_at,
|
||||||
|
status: 'error',
|
||||||
|
last_failed: true,
|
||||||
|
error_message: row.error_message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Complete — return the data normally
|
||||||
const recData = row.recommendations_json || {};
|
const recData = row.recommendations_json || {};
|
||||||
return {
|
return {
|
||||||
id: row.id,
|
id: row.id,
|
||||||
@@ -220,6 +300,8 @@ export class InvestmentPlanningService {
|
|||||||
risk_notes: row.risk_notes || recData.risk_notes || [],
|
risk_notes: row.risk_notes || recData.risk_notes || [],
|
||||||
response_time_ms: row.response_time_ms || 0,
|
response_time_ms: row.response_time_ms || 0,
|
||||||
created_at: row.created_at,
|
created_at: row.created_at,
|
||||||
|
status: 'complete',
|
||||||
|
last_failed: false,
|
||||||
};
|
};
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
// Table might not exist yet (pre-migration tenants)
|
// Table might not exist yet (pre-migration tenants)
|
||||||
@@ -228,15 +310,153 @@ export class InvestmentPlanningService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save a 'processing' placeholder record and return its ID.
|
||||||
|
*/
|
||||||
|
private async saveProcessingRecord(userId?: string): Promise<string> {
|
||||||
|
await this.ensureStatusColumn();
|
||||||
|
const rows = await this.tenant.query(
|
||||||
|
`INSERT INTO ai_recommendations
|
||||||
|
(recommendations_json, overall_assessment, risk_notes, requested_by, status)
|
||||||
|
VALUES ('{}', '', '[]', $1, 'processing')
|
||||||
|
RETURNING id`,
|
||||||
|
[userId || null],
|
||||||
|
);
|
||||||
|
return rows[0].id;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a processing record with completed results.
|
||||||
|
*/
|
||||||
|
private async updateRecommendationComplete(
|
||||||
|
jobId: string,
|
||||||
|
aiResponse: AIResponse,
|
||||||
|
userId: string | undefined,
|
||||||
|
elapsed: number,
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.tenant.query(
|
||||||
|
`UPDATE ai_recommendations
|
||||||
|
SET recommendations_json = $1,
|
||||||
|
overall_assessment = $2,
|
||||||
|
risk_notes = $3,
|
||||||
|
response_time_ms = $4,
|
||||||
|
status = 'complete'
|
||||||
|
WHERE id = $5`,
|
||||||
|
[
|
||||||
|
JSON.stringify(aiResponse),
|
||||||
|
aiResponse.overall_assessment || '',
|
||||||
|
JSON.stringify(aiResponse.risk_notes || []),
|
||||||
|
elapsed,
|
||||||
|
jobId,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
} catch (err: any) {
|
||||||
|
this.logger.warn(`Could not update recommendation ${jobId}: ${err.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a processing record with error status.
|
||||||
|
*/
|
||||||
|
private async updateRecommendationError(jobId: string, errorMessage: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.tenant.query(
|
||||||
|
`UPDATE ai_recommendations
|
||||||
|
SET status = 'error',
|
||||||
|
error_message = $1
|
||||||
|
WHERE id = $2`,
|
||||||
|
[errorMessage, jobId],
|
||||||
|
);
|
||||||
|
} catch (err: any) {
|
||||||
|
this.logger.warn(`Could not update recommendation error ${jobId}: ${err.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger AI recommendations asynchronously.
|
||||||
|
* Saves a 'processing' record, starts the AI work in the background, and returns immediately.
|
||||||
|
* The TenantService instance remains alive via closure reference for the duration of the background work.
|
||||||
|
*/
|
||||||
|
async triggerAIRecommendations(userId?: string, orgId?: string): Promise<{ status: string; message: string }> {
|
||||||
|
const jobId = await this.saveProcessingRecord(userId);
|
||||||
|
this.logger.log(`AI recommendation triggered (job ${jobId}), starting background processing...`);
|
||||||
|
|
||||||
|
// Fire-and-forget — the Promise keeps this service instance (and TenantService) alive
|
||||||
|
this.runBackgroundRecommendations(jobId, userId, orgId).catch((err) => {
|
||||||
|
this.logger.error(`Background AI recommendation failed (job ${jobId}): ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 'processing',
|
||||||
|
message: 'AI analysis has been started. You can navigate away safely — results will appear when ready.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run the full AI recommendation pipeline in the background.
|
||||||
|
*/
|
||||||
|
private async runBackgroundRecommendations(jobId: string, userId?: string, orgId?: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
const [snapshot, allRates, monthlyForecast] = await Promise.all([
|
||||||
|
this.getFinancialSnapshot(),
|
||||||
|
this.getMarketRates(),
|
||||||
|
this.getMonthlyForecast(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
this.debug('background_snapshot_summary', {
|
||||||
|
job_id: jobId,
|
||||||
|
operating_cash: snapshot.summary.operating_cash,
|
||||||
|
reserve_cash: snapshot.summary.reserve_cash,
|
||||||
|
total_all: snapshot.summary.total_all,
|
||||||
|
investment_accounts: snapshot.investment_accounts.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
const messages = this.buildPromptMessages(snapshot, allRates, monthlyForecast);
|
||||||
|
const aiResponse = await this.callAI(messages);
|
||||||
|
const elapsed = Date.now() - startTime;
|
||||||
|
|
||||||
|
this.debug('background_final_response', {
|
||||||
|
job_id: jobId,
|
||||||
|
recommendation_count: aiResponse.recommendations.length,
|
||||||
|
has_assessment: !!aiResponse.overall_assessment,
|
||||||
|
elapsed_ms: elapsed,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if the AI returned a graceful error (empty recommendations with error message)
|
||||||
|
const isGracefulError = aiResponse.recommendations.length === 0 &&
|
||||||
|
(aiResponse.overall_assessment?.includes('Unable to generate') ||
|
||||||
|
aiResponse.overall_assessment?.includes('invalid response'));
|
||||||
|
|
||||||
|
if (isGracefulError) {
|
||||||
|
await this.updateRecommendationError(jobId, aiResponse.overall_assessment);
|
||||||
|
} else {
|
||||||
|
await this.updateRecommendationComplete(jobId, aiResponse, userId, elapsed);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log AI usage (fire-and-forget)
|
||||||
|
this.logAIUsage(userId, orgId, aiResponse, elapsed).catch(() => {});
|
||||||
|
|
||||||
|
this.logger.log(`Background AI recommendation completed (job ${jobId}) in ${elapsed}ms`);
|
||||||
|
} catch (err: any) {
|
||||||
|
this.logger.error(`Background AI recommendation error (job ${jobId}): ${err.message}`);
|
||||||
|
await this.updateRecommendationError(jobId, err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Save AI recommendation result to tenant schema.
|
* Save AI recommendation result to tenant schema.
|
||||||
|
* @deprecated Use triggerAIRecommendations() for async flow instead
|
||||||
*/
|
*/
|
||||||
private async saveRecommendation(aiResponse: AIResponse, userId: string | undefined, elapsed: number): Promise<void> {
|
private async saveRecommendation(aiResponse: AIResponse, userId: string | undefined, elapsed: number): Promise<void> {
|
||||||
try {
|
try {
|
||||||
|
await this.ensureStatusColumn();
|
||||||
await this.tenant.query(
|
await this.tenant.query(
|
||||||
`INSERT INTO ai_recommendations
|
`INSERT INTO ai_recommendations
|
||||||
(recommendations_json, overall_assessment, risk_notes, requested_by, response_time_ms)
|
(recommendations_json, overall_assessment, risk_notes, requested_by, response_time_ms, status)
|
||||||
VALUES ($1, $2, $3, $4, $5)`,
|
VALUES ($1, $2, $3, $4, $5, 'complete')`,
|
||||||
[
|
[
|
||||||
JSON.stringify(aiResponse),
|
JSON.stringify(aiResponse),
|
||||||
aiResponse.overall_assessment || '',
|
aiResponse.overall_assessment || '',
|
||||||
@@ -873,7 +1093,7 @@ Based on this complete financial picture INCLUDING the 12-month cash flow foreca
|
|||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'Content-Length': Buffer.byteLength(bodyString, 'utf-8'),
|
'Content-Length': Buffer.byteLength(bodyString, 'utf-8'),
|
||||||
},
|
},
|
||||||
timeout: 180000, // 3 minute timeout
|
timeout: 600000, // 10 minute timeout
|
||||||
};
|
};
|
||||||
|
|
||||||
const req = https.request(options, (res) => {
|
const req = https.request(options, (res) => {
|
||||||
@@ -887,7 +1107,7 @@ Based on this complete financial picture INCLUDING the 12-month cash flow foreca
|
|||||||
req.on('error', (err) => reject(err));
|
req.on('error', (err) => reject(err));
|
||||||
req.on('timeout', () => {
|
req.on('timeout', () => {
|
||||||
req.destroy();
|
req.destroy();
|
||||||
reject(new Error(`Request timed out after 180s`));
|
reject(new Error(`Request timed out after 600s`));
|
||||||
});
|
});
|
||||||
|
|
||||||
req.write(bodyString);
|
req.write(bodyString);
|
||||||
|
|||||||
@@ -157,6 +157,9 @@ export class ProjectsService {
|
|||||||
const params: any[] = [];
|
const params: any[] = [];
|
||||||
let idx = 1;
|
let idx = 1;
|
||||||
|
|
||||||
|
// Date columns must be null (not empty string) for PostgreSQL DATE type
|
||||||
|
const dateFields = new Set(['last_replacement_date', 'next_replacement_date', 'planned_date']);
|
||||||
|
|
||||||
// Build dynamic SET clause
|
// Build dynamic SET clause
|
||||||
const fields: [string, string][] = [
|
const fields: [string, string][] = [
|
||||||
['name', 'name'], ['description', 'description'], ['category', 'category'],
|
['name', 'name'], ['description', 'description'], ['category', 'category'],
|
||||||
@@ -175,7 +178,8 @@ export class ProjectsService {
|
|||||||
for (const [dtoKey, dbCol] of fields) {
|
for (const [dtoKey, dbCol] of fields) {
|
||||||
if (dto[dtoKey] !== undefined) {
|
if (dto[dtoKey] !== undefined) {
|
||||||
sets.push(`${dbCol} = $${idx++}`);
|
sets.push(`${dbCol} = $${idx++}`);
|
||||||
params.push(dto[dtoKey]);
|
const val = dateFields.has(dtoKey) && dto[dtoKey] === '' ? null : dto[dtoKey];
|
||||||
|
params.push(val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -276,7 +280,7 @@ export class ProjectsService {
|
|||||||
await this.findOne(id);
|
await this.findOne(id);
|
||||||
const rows = await this.tenant.query(
|
const rows = await this.tenant.query(
|
||||||
'UPDATE projects SET planned_date = $2, updated_at = NOW() WHERE id = $1 RETURNING *',
|
'UPDATE projects SET planned_date = $2, updated_at = NOW() WHERE id = $1 RETURNING *',
|
||||||
[id, planned_date],
|
[id, planned_date || null],
|
||||||
);
|
);
|
||||||
return rows[0];
|
return rows[0];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,10 +14,12 @@ export class ReportsService {
|
|||||||
ELSE COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0)
|
ELSE COALESCE(SUM(jel.credit), 0) - COALESCE(SUM(jel.debit), 0)
|
||||||
END as balance
|
END as balance
|
||||||
FROM accounts a
|
FROM accounts a
|
||||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
LEFT JOIN (
|
||||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
journal_entry_lines jel
|
||||||
AND je.is_posted = true AND je.is_void = false
|
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||||
AND je.entry_date <= $1
|
AND je.is_posted = true AND je.is_void = false
|
||||||
|
AND je.entry_date <= $1
|
||||||
|
) ON jel.account_id = a.id
|
||||||
WHERE a.is_active = true AND a.account_type IN ('asset', 'liability', 'equity')
|
WHERE a.is_active = true AND a.account_type IN ('asset', 'liability', 'equity')
|
||||||
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
||||||
HAVING CASE
|
HAVING CASE
|
||||||
@@ -32,6 +34,71 @@ export class ReportsService {
|
|||||||
const liabilities = rows.filter((r: any) => r.account_type === 'liability');
|
const liabilities = rows.filter((r: any) => r.account_type === 'liability');
|
||||||
const equity = rows.filter((r: any) => r.account_type === 'equity');
|
const equity = rows.filter((r: any) => r.account_type === 'equity');
|
||||||
|
|
||||||
|
// Compute current year net income (income - expenses) for the fiscal year through as_of date
|
||||||
|
// This balances the accounting equation: Assets = Liabilities + Equity + Net Income
|
||||||
|
const fiscalYearStart = `${asOf.substring(0, 4)}-01-01`;
|
||||||
|
const netIncomeSql = `
|
||||||
|
SELECT
|
||||||
|
COALESCE(SUM(CASE WHEN a.account_type = 'income'
|
||||||
|
THEN jel.credit - jel.debit ELSE 0 END), 0) -
|
||||||
|
COALESCE(SUM(CASE WHEN a.account_type = 'expense'
|
||||||
|
THEN jel.debit - jel.credit ELSE 0 END), 0) as net_income
|
||||||
|
FROM journal_entry_lines jel
|
||||||
|
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||||
|
AND je.is_posted = true AND je.is_void = false
|
||||||
|
AND je.entry_date BETWEEN $1 AND $2
|
||||||
|
INNER JOIN accounts a ON a.id = jel.account_id
|
||||||
|
AND a.account_type IN ('income', 'expense') AND a.is_active = true
|
||||||
|
`;
|
||||||
|
const netIncomeResult = await this.tenant.query(netIncomeSql, [fiscalYearStart, asOf]);
|
||||||
|
const netIncome = parseFloat(netIncomeResult[0]?.net_income || '0');
|
||||||
|
|
||||||
|
// Add current year net income as a synthetic equity line
|
||||||
|
if (netIncome !== 0) {
|
||||||
|
equity.push({
|
||||||
|
id: null,
|
||||||
|
account_number: '',
|
||||||
|
name: 'Current Year Net Income',
|
||||||
|
account_type: 'equity',
|
||||||
|
fund_type: 'operating',
|
||||||
|
balance: netIncome.toFixed(2),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add investment account balances to assets and corresponding equity
|
||||||
|
const investmentsSql = `
|
||||||
|
SELECT id, name, institution, current_value as balance, fund_type
|
||||||
|
FROM investment_accounts
|
||||||
|
WHERE is_active = true AND current_value > 0
|
||||||
|
`;
|
||||||
|
const investments = await this.tenant.query(investmentsSql);
|
||||||
|
const investmentsByFund: Record<string, number> = {};
|
||||||
|
for (const inv of investments) {
|
||||||
|
assets.push({
|
||||||
|
id: inv.id,
|
||||||
|
account_number: '',
|
||||||
|
name: `${inv.name} (${inv.institution})`,
|
||||||
|
account_type: 'asset',
|
||||||
|
fund_type: inv.fund_type,
|
||||||
|
balance: parseFloat(inv.balance).toFixed(2),
|
||||||
|
});
|
||||||
|
investmentsByFund[inv.fund_type] = (investmentsByFund[inv.fund_type] || 0) + parseFloat(inv.balance);
|
||||||
|
}
|
||||||
|
// Add investment balances as synthetic equity lines to maintain A = L + E
|
||||||
|
for (const [fundType, total] of Object.entries(investmentsByFund)) {
|
||||||
|
if (total > 0) {
|
||||||
|
const label = fundType === 'reserve' ? 'Reserve' : 'Operating';
|
||||||
|
equity.push({
|
||||||
|
id: null,
|
||||||
|
account_number: '',
|
||||||
|
name: `${label} Investment Holdings`,
|
||||||
|
account_type: 'equity',
|
||||||
|
fund_type: fundType,
|
||||||
|
balance: total.toFixed(2),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const totalAssets = assets.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
const totalAssets = assets.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
||||||
const totalLiabilities = liabilities.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
const totalLiabilities = liabilities.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
||||||
const totalEquity = equity.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
const totalEquity = equity.reduce((s: number, r: any) => s + parseFloat(r.balance), 0);
|
||||||
@@ -54,10 +121,12 @@ export class ReportsService {
|
|||||||
ELSE COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0)
|
ELSE COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0)
|
||||||
END as amount
|
END as amount
|
||||||
FROM accounts a
|
FROM accounts a
|
||||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
LEFT JOIN (
|
||||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
journal_entry_lines jel
|
||||||
AND je.is_posted = true AND je.is_void = false
|
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||||
AND je.entry_date BETWEEN $1 AND $2
|
AND je.is_posted = true AND je.is_void = false
|
||||||
|
AND je.entry_date BETWEEN $1 AND $2
|
||||||
|
) ON jel.account_id = a.id
|
||||||
WHERE a.is_active = true AND a.account_type IN ('income', 'expense')
|
WHERE a.is_active = true AND a.account_type IN ('income', 'expense')
|
||||||
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
GROUP BY a.id, a.account_number, a.name, a.account_type, a.fund_type
|
||||||
HAVING CASE
|
HAVING CASE
|
||||||
@@ -340,20 +409,20 @@ export class ReportsService {
|
|||||||
ORDER BY a.name
|
ORDER BY a.name
|
||||||
`, [from, to]);
|
`, [from, to]);
|
||||||
|
|
||||||
// Asset filter: cash-only vs cash + investment accounts
|
// Asset filter: all asset accounts (bank/checking/savings are the cash accounts)
|
||||||
const assetFilter = includeInvestments
|
const assetFilter = `a.account_type = 'asset'`;
|
||||||
? `a.account_type = 'asset'`
|
|
||||||
: `a.account_type = 'asset' AND a.name LIKE '%Cash%'`;
|
|
||||||
|
|
||||||
// Cash beginning and ending balances
|
// Cash beginning and ending balances
|
||||||
const beginCash = await this.tenant.query(`
|
const beginCash = await this.tenant.query(`
|
||||||
SELECT COALESCE(SUM(sub.bal), 0) as balance FROM (
|
SELECT COALESCE(SUM(sub.bal), 0) as balance FROM (
|
||||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||||
FROM accounts a
|
FROM accounts a
|
||||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
LEFT JOIN (
|
||||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
journal_entry_lines jel
|
||||||
AND je.is_posted = true AND je.is_void = false
|
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||||
AND je.entry_date < $1
|
AND je.is_posted = true AND je.is_void = false
|
||||||
|
AND je.entry_date < $1
|
||||||
|
) ON jel.account_id = a.id
|
||||||
WHERE ${assetFilter} AND a.is_active = true
|
WHERE ${assetFilter} AND a.is_active = true
|
||||||
GROUP BY a.id
|
GROUP BY a.id
|
||||||
) sub
|
) sub
|
||||||
@@ -363,10 +432,12 @@ export class ReportsService {
|
|||||||
SELECT COALESCE(SUM(sub.bal), 0) as balance FROM (
|
SELECT COALESCE(SUM(sub.bal), 0) as balance FROM (
|
||||||
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
SELECT COALESCE(SUM(jel.debit), 0) - COALESCE(SUM(jel.credit), 0) as bal
|
||||||
FROM accounts a
|
FROM accounts a
|
||||||
LEFT JOIN journal_entry_lines jel ON jel.account_id = a.id
|
LEFT JOIN (
|
||||||
LEFT JOIN journal_entries je ON je.id = jel.journal_entry_id
|
journal_entry_lines jel
|
||||||
AND je.is_posted = true AND je.is_void = false
|
INNER JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||||
AND je.entry_date <= $1
|
AND je.is_posted = true AND je.is_void = false
|
||||||
|
AND je.entry_date <= $1
|
||||||
|
) ON jel.account_id = a.id
|
||||||
WHERE ${assetFilter} AND a.is_active = true
|
WHERE ${assetFilter} AND a.is_active = true
|
||||||
GROUP BY a.id
|
GROUP BY a.id
|
||||||
) sub
|
) sub
|
||||||
@@ -479,19 +550,22 @@ export class ReportsService {
|
|||||||
const incomeStmt = await this.getIncomeStatement(from, to);
|
const incomeStmt = await this.getIncomeStatement(from, to);
|
||||||
const balanceSheet = await this.getBalanceSheet(to);
|
const balanceSheet = await this.getBalanceSheet(to);
|
||||||
|
|
||||||
// 1099 vendor data
|
// 1099 vendor data — uses journal entries via vendor's default_account_id
|
||||||
const vendors1099 = await this.tenant.query(`
|
const vendors1099 = await this.tenant.query(`
|
||||||
SELECT v.id, v.name, v.tax_id, v.address_line1, v.city, v.state, v.zip_code,
|
SELECT v.id, v.name, v.tax_id, v.address_line1, v.city, v.state, v.zip_code,
|
||||||
COALESCE(SUM(p.amount), 0) as total_paid
|
COALESCE(SUM(p_amounts.amount), 0) as total_paid
|
||||||
FROM vendors v
|
FROM vendors v
|
||||||
JOIN (
|
LEFT JOIN (
|
||||||
SELECT vendor_id, amount FROM invoices
|
SELECT jel.account_id, jel.debit as amount
|
||||||
WHERE EXTRACT(YEAR FROM invoice_date) = $1
|
FROM journal_entry_lines jel
|
||||||
AND status IN ('paid', 'partial')
|
JOIN journal_entries je ON je.id = jel.journal_entry_id
|
||||||
) p ON p.vendor_id = v.id
|
WHERE je.is_posted = true AND je.is_void = false
|
||||||
|
AND EXTRACT(YEAR FROM je.entry_date) = $1
|
||||||
|
AND jel.debit > 0
|
||||||
|
) p_amounts ON p_amounts.account_id = v.default_account_id
|
||||||
WHERE v.is_1099_eligible = true
|
WHERE v.is_1099_eligible = true
|
||||||
GROUP BY v.id, v.name, v.tax_id, v.address_line1, v.city, v.state, v.zip_code
|
GROUP BY v.id, v.name, v.tax_id, v.address_line1, v.city, v.state, v.zip_code
|
||||||
HAVING COALESCE(SUM(p.amount), 0) >= 600
|
HAVING COALESCE(SUM(p_amounts.amount), 0) >= 600
|
||||||
ORDER BY v.name
|
ORDER BY v.name
|
||||||
`, [year]);
|
`, [year]);
|
||||||
|
|
||||||
|
|||||||
95
docker-compose.prod.yml
Normal file
95
docker-compose.prod.yml
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
# Production override — use with:
|
||||||
|
# docker compose -f docker-compose.yml -f docker-compose.prod.yml up -d --build
|
||||||
|
#
|
||||||
|
# What this changes from the base (dev) config:
|
||||||
|
# - Disables the Docker nginx container (host nginx handles routing + SSL)
|
||||||
|
# - Backend: production Dockerfile (compiled JS, no watch, no devDeps)
|
||||||
|
# - Frontend: production Dockerfile (static build served by nginx on port 3001)
|
||||||
|
# - Backend + Frontend bound to 127.0.0.1 only (host nginx proxies to them)
|
||||||
|
# - No source-code volume mounts (uses baked-in built code)
|
||||||
|
# - Memory limits and health checks on backend
|
||||||
|
# - Tuned PostgreSQL for production workloads
|
||||||
|
# - Restart policies for reliability
|
||||||
|
#
|
||||||
|
# SSL/TLS and request routing are handled by the host-level nginx.
|
||||||
|
# See nginx/host-production.conf for a ready-to-use reference config.
|
||||||
|
|
||||||
|
services:
|
||||||
|
nginx:
|
||||||
|
# Disabled in production — host nginx handles routing + SSL directly.
|
||||||
|
# The dev-only Docker nginx is still used by the base docker-compose.yml.
|
||||||
|
deploy:
|
||||||
|
replicas: 0
|
||||||
|
|
||||||
|
backend:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
dockerfile: Dockerfile # production Dockerfile (compiled JS)
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:3000:3000" # loopback only — host nginx proxies here
|
||||||
|
volumes: [] # override: no source mounts in prod
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=${DATABASE_URL}
|
||||||
|
- REDIS_URL=${REDIS_URL}
|
||||||
|
- JWT_SECRET=${JWT_SECRET}
|
||||||
|
- NODE_ENV=production
|
||||||
|
- AI_API_URL=${AI_API_URL}
|
||||||
|
- AI_API_KEY=${AI_API_KEY}
|
||||||
|
- AI_MODEL=${AI_MODEL}
|
||||||
|
- AI_DEBUG=${AI_DEBUG:-false}
|
||||||
|
- NEW_RELIC_ENABLED=${NEW_RELIC_ENABLED:-false}
|
||||||
|
- NEW_RELIC_LICENSE_KEY=${NEW_RELIC_LICENSE_KEY:-}
|
||||||
|
- NEW_RELIC_APP_NAME=${NEW_RELIC_APP_NAME:-HOALedgerIQ_App}
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: 1024M
|
||||||
|
reservations:
|
||||||
|
memory: 256M
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "wget -qO- http://localhost:3000/api || exit 1"]
|
||||||
|
interval: 15s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 3
|
||||||
|
start_period: 30s
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
build:
|
||||||
|
context: ./frontend
|
||||||
|
dockerfile: Dockerfile # production Dockerfile (static nginx)
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:3001:3001" # loopback only — host nginx proxies here
|
||||||
|
volumes: [] # override: no source mounts in prod
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=production
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
# Tune PostgreSQL for production workloads
|
||||||
|
command: >
|
||||||
|
postgres
|
||||||
|
-c max_connections=200
|
||||||
|
-c shared_buffers=256MB
|
||||||
|
-c effective_cache_size=512MB
|
||||||
|
-c work_mem=4MB
|
||||||
|
-c maintenance_work_mem=64MB
|
||||||
|
-c checkpoint_completion_target=0.9
|
||||||
|
-c wal_buffers=16MB
|
||||||
|
-c random_page_cost=1.1
|
||||||
|
# No host port mapping — backend reaches postgres via the Docker network.
|
||||||
|
# Removes 2 docker-proxy processes and closes 0.0.0.0:5432 to the internet.
|
||||||
|
ports: []
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: 1024M
|
||||||
|
reservations:
|
||||||
|
memory: 512M
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
redis:
|
||||||
|
# No host port mapping — backend reaches redis via the Docker network.
|
||||||
|
# Removes 2 docker-proxy processes and closes 0.0.0.0:6379 to the internet.
|
||||||
|
ports: []
|
||||||
|
restart: unless-stopped
|
||||||
28
docker-compose.ssl.yml
Normal file
28
docker-compose.ssl.yml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# SSL override — use with: docker compose -f docker-compose.yml -f docker-compose.ssl.yml up -d
|
||||||
|
#
|
||||||
|
# This adds port 443, certbot volumes, and a certbot renewal service
|
||||||
|
# to the base docker-compose.yml configuration.
|
||||||
|
|
||||||
|
services:
|
||||||
|
nginx:
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
- "443:443"
|
||||||
|
volumes:
|
||||||
|
- ./nginx/ssl.conf:/etc/nginx/conf.d/default.conf:ro
|
||||||
|
- certbot_www:/var/www/certbot:ro
|
||||||
|
- certbot_conf:/etc/letsencrypt:ro
|
||||||
|
|
||||||
|
certbot:
|
||||||
|
image: certbot/certbot:latest
|
||||||
|
volumes:
|
||||||
|
- certbot_www:/var/www/certbot
|
||||||
|
- certbot_conf:/etc/letsencrypt
|
||||||
|
networks:
|
||||||
|
- hoanet
|
||||||
|
# Auto-renew: check twice daily, only renews if < 30 days remain
|
||||||
|
entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew --quiet; sleep 12h & wait $${!}; done'"
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
certbot_www:
|
||||||
|
certbot_conf:
|
||||||
@@ -15,8 +15,8 @@ services:
|
|||||||
build:
|
build:
|
||||||
context: ./backend
|
context: ./backend
|
||||||
dockerfile: Dockerfile.dev
|
dockerfile: Dockerfile.dev
|
||||||
ports:
|
# No host port mapping — dev traffic goes through the Docker nginx container.
|
||||||
- "3000:3000"
|
# Production overlay maps 127.0.0.1:3000 for the host reverse proxy.
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_URL=${DATABASE_URL}
|
- DATABASE_URL=${DATABASE_URL}
|
||||||
- REDIS_URL=${REDIS_URL}
|
- REDIS_URL=${REDIS_URL}
|
||||||
@@ -26,6 +26,9 @@ services:
|
|||||||
- AI_API_KEY=${AI_API_KEY}
|
- AI_API_KEY=${AI_API_KEY}
|
||||||
- AI_MODEL=${AI_MODEL}
|
- AI_MODEL=${AI_MODEL}
|
||||||
- AI_DEBUG=${AI_DEBUG:-false}
|
- AI_DEBUG=${AI_DEBUG:-false}
|
||||||
|
- NEW_RELIC_ENABLED=${NEW_RELIC_ENABLED:-false}
|
||||||
|
- NEW_RELIC_LICENSE_KEY=${NEW_RELIC_LICENSE_KEY:-}
|
||||||
|
- NEW_RELIC_APP_NAME=${NEW_RELIC_APP_NAME:-HOALedgerIQ_App}
|
||||||
volumes:
|
volumes:
|
||||||
- ./backend/src:/app/src
|
- ./backend/src:/app/src
|
||||||
- ./backend/nest-cli.json:/app/nest-cli.json
|
- ./backend/nest-cli.json:/app/nest-cli.json
|
||||||
@@ -43,8 +46,8 @@ services:
|
|||||||
build:
|
build:
|
||||||
context: ./frontend
|
context: ./frontend
|
||||||
dockerfile: Dockerfile.dev
|
dockerfile: Dockerfile.dev
|
||||||
ports:
|
# No host port mapping — dev traffic goes through the Docker nginx container.
|
||||||
- "5173:5173"
|
# Production overlay maps 127.0.0.1:3001 for the host reverse proxy.
|
||||||
environment:
|
environment:
|
||||||
- NODE_ENV=${NODE_ENV}
|
- NODE_ENV=${NODE_ENV}
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
545
docs/AI_FEATURE_AUDIT.md
Normal file
545
docs/AI_FEATURE_AUDIT.md
Normal file
@@ -0,0 +1,545 @@
|
|||||||
|
# AI Feature Audit Report
|
||||||
|
|
||||||
|
**Audit Date:** 2026-03-05
|
||||||
|
**Tenant Under Test:** Pine Creek HOA (`tenant_pine_creek_hoa_q33i`)
|
||||||
|
**AI Model:** Qwen 3.5-397B-A17B via NVIDIA NIM (Temperature: 0.3)
|
||||||
|
**Auditor:** Claude Opus 4.6 (automated)
|
||||||
|
**Data Snapshot Date:** 2026-03-04
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Executive Summary
|
||||||
|
|
||||||
|
Three AI-powered features were audited against ground-truth database records: **Operating Fund Health**, **Reserve Fund Health**, and **Investment Recommendations**. Overall, the AI demonstrates strong financial reasoning and produces actionable, fiduciary-appropriate recommendations. However, score consistency across runs is a concern (16-point spread on operating, 20-point spread on reserve), and several specific data interpretation issues were identified.
|
||||||
|
|
||||||
|
| Feature | Latest Score/Grade | Concurrence | Verdict |
|
||||||
|
|---|---|---|---|
|
||||||
|
| Operating Fund Health | 88 / Good | **72%** | Score ~10-15 pts high; cash runway below its own "Good" threshold |
|
||||||
|
| Reserve Fund Health | 45 / Needs Attention | **85%** | Well-calibrated; minor data misquote on annual contributions |
|
||||||
|
| Investment Recommendations | 6 recommendations | **88%** | Excellent specificity; all market rates verified accurate |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Data Foundation (Ground Truth)
|
||||||
|
|
||||||
|
### Financial Position
|
||||||
|
|
||||||
|
| Metric | Value | Source |
|
||||||
|
|---|---|---|
|
||||||
|
| Operating Cash (Checking) | $27,418.81 | GL balance |
|
||||||
|
| Reserve Cash (Savings) | $10,688.45 | GL balance |
|
||||||
|
| Reserve CD #1a (FCB) | $10,000 @ 3.67%, matures 6/19/26 | `investment_accounts` |
|
||||||
|
| Reserve CD #2a (FCB) | $8,000 @ 3.60%, matures 4/14/26 | `investment_accounts` |
|
||||||
|
| Reserve CD #3a (FCB) | $10,000 @ 3.67%, matures 8/18/26 | `investment_accounts` |
|
||||||
|
| Total Reserve Fund | $38,688.45 | Cash + Investments |
|
||||||
|
| Total Assets | $66,107.26 | Operating + Reserve |
|
||||||
|
|
||||||
|
### Budget (FY2026)
|
||||||
|
|
||||||
|
| Category | Annual Total |
|
||||||
|
|---|---|
|
||||||
|
| Operating Income | $184,207.40 |
|
||||||
|
| Operating Expense | $139,979.95 |
|
||||||
|
| **Net Operating Surplus** | **$44,227.45** |
|
||||||
|
| Monthly Expense Run Rate | $11,665.00 |
|
||||||
|
| Reserve Interest Income | $1,449.96 |
|
||||||
|
| Reserve Disbursements | $22,000.00 (Mar $13K, Apr $9K) |
|
||||||
|
|
||||||
|
### Assessment Structure
|
||||||
|
|
||||||
|
- **67 units** at $2,328.14/year regular + $300.00/year special (annual frequency)
|
||||||
|
- Total annual regular assessments: ~$155,985
|
||||||
|
- Total annual special assessments: ~$20,100
|
||||||
|
- Budget timing: assessments front-loaded in Mar-Jun
|
||||||
|
|
||||||
|
### Actuals (YTD through March 4, 2026)
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|---|---|
|
||||||
|
| YTD Income | $88.16 (ARC fees $100 - $50 adj + $38.16 interest) |
|
||||||
|
| YTD Expenses | $1,850.42 (January only) |
|
||||||
|
| Delinquent Invoices | 0 ($0.00) |
|
||||||
|
| Journal Entries Posted | 4 (Jan actuals + Feb adjusting + Feb opening balances) |
|
||||||
|
|
||||||
|
### Capital Projects (from `projects` table, 26 total)
|
||||||
|
|
||||||
|
| Project | Cost | Target | Funded % |
|
||||||
|
|---|---|---|---|
|
||||||
|
| Pond Spillway | $7,000 | Mar 2026 | 0% |
|
||||||
|
| Tuscany Drain Box | $5,500 | May 2026 | 0% |
|
||||||
|
| Front Entrance Power Washing | $1,500 | Mar 2027 | 0% |
|
||||||
|
| Irrigation Pump Replacement | $1,500 | Jun 2027 | 0% |
|
||||||
|
| **Road Sealing - All Roads** | **$80,000** | **Jun 2029** | **0%** |
|
||||||
|
| Asphalt Repair - Creek Stone Dr | $43,000 | TBD | 0% |
|
||||||
|
| Pavilion & Vineyard Structures | $7,000 | Jun 2035 | 0% |
|
||||||
|
| 16 placeholder items | $1.00 each | TBD | 0% |
|
||||||
|
| **Total Planned** | **$152,016** | | **0%** |
|
||||||
|
|
||||||
|
### Reserve Components
|
||||||
|
|
||||||
|
- **0 components tracked** (empty `reserve_components` table)
|
||||||
|
|
||||||
|
### Market Rates (fetched 2026-03-04)
|
||||||
|
|
||||||
|
| Type | Top Rate | Bank | Term |
|
||||||
|
|---|---|---|---|
|
||||||
|
| CD | 4.10% | E*TRADE / Synchrony | 12-14 mo |
|
||||||
|
| High-Yield Savings | 4.09% | Openbank | Liquid |
|
||||||
|
| Money Market | 4.03% | Vio Bank | Liquid |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Operating Fund Health Score
|
||||||
|
|
||||||
|
**Latest Score:** 88 (Good) — Generated 2026-03-04T19:24:36Z
|
||||||
|
**Score History:** 48 → 72 → 78 → 72 → 78 → **88** (6 runs, March 2-4)
|
||||||
|
**Overall Concurrence: 72%**
|
||||||
|
|
||||||
|
### Factor-by-Factor Analysis
|
||||||
|
|
||||||
|
#### Factor 1: "Projected Cash Flow" — Impact: Positive
|
||||||
|
> "12-month forecast shows consistent positive liquidity, with cash balances never dipping below the starting $27,419 and peaking at $142,788 in June."
|
||||||
|
|
||||||
|
| Check | Result |
|
||||||
|
|---|---|
|
||||||
|
| Budget surplus ($184K income vs $140K expense) | **Verified** ✅ |
|
||||||
|
| Assessments front-loaded Mar-Jun | **Verified** ✅ (budget shows $48K Mar, $64K Apr, $32K May, $16K Jun) |
|
||||||
|
| Peak of ~$142K in June | **Plausible** ✅ ($27K + cumulative income through June) |
|
||||||
|
| Cash never below starting $27K | **Plausible** ✅ (expenses < income by month) |
|
||||||
|
|
||||||
|
**Concurrence: 95%** — Forecast logic is sound. The only risk is the assumption that assessments are collected on the exact budget schedule.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Factor 2: "Delinquency Rate" — Impact: Positive
|
||||||
|
> "$0.00 in overdue invoices and a 0.0% delinquency rate."
|
||||||
|
|
||||||
|
**Concurrence: 100%** ✅ — Database confirms zero delinquent invoices.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Factor 3: "Budget Performance (Timing)" — Impact: Neutral
|
||||||
|
> "YTD income is 99.8% below budget ($55k variance) primarily due to the timing of the large Special Assessment ($20,700) and regular assessments appearing in future projected months."
|
||||||
|
|
||||||
|
| Check | Result |
|
||||||
|
|---|---|
|
||||||
|
| YTD income $88.16 | **Verified** ✅ |
|
||||||
|
| Budget includes March ($55K) in YTD calc | **Accurate** — AI uses month 3 of 12, includes full March budget |
|
||||||
|
| Timing explanation | **Reasonable** — we're only 4 days into March |
|
||||||
|
| Rating as "neutral" vs "negative" | **Appropriate** ✅ — correctly avoids penalizing for calendar timing |
|
||||||
|
|
||||||
|
**Concurrence: 80%** — The variance is accurately computed but presenting a $55K "variance" when we're 4 days into March could alarm a board member. The YTD window through month 3 includes all of March's budget despite only 4 days having elapsed. Consider computing YTD budget pro-rata or through the prior complete month.
|
||||||
|
|
||||||
|
**🔧 Tuning Suggestion:** Add a note to the prompt about pro-rating the current month's budget, or instruct the AI to note "X days into the current month" when the variance is driven by incomplete-month timing.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Factor 4: "Cash Reserves" — Impact: Positive
|
||||||
|
> "Current operating cash of $27,419 provides 2.4 months of runway based on the annual expense run rate."
|
||||||
|
|
||||||
|
| Check | Result |
|
||||||
|
|---|---|
|
||||||
|
| $27,419 / ($139,980 / 12) = 2.35 months | **Math verified** ✅ |
|
||||||
|
| Rated as "positive" | **Questionable** ⚠️ |
|
||||||
|
|
||||||
|
**Concurrence: 60%** — The math is correct, but rating 2.4 months as "positive" contradicts the scoring guidelines which state 2-3 months = "Fair" (60-74) and 3-6 months = "Good" (75-89). This factor should be "neutral" at best, and the overall score should reflect that the HOA is *below* the "Good" threshold for cash reserves.
|
||||||
|
|
||||||
|
**🔧 Tuning Suggestion:** Add explicit guidance in the prompt: "If cash runway is below 3 months, this factor MUST be neutral or negative, regardless of projected future inflows."
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Factor 5: "Expense Management" — Impact: Positive
|
||||||
|
> "YTD expenses are $36,313 under budget (4.8% of annual budget spent vs 25% of year elapsed)."
|
||||||
|
|
||||||
|
| Check | Result |
|
||||||
|
|---|---|
|
||||||
|
| YTD expenses $1,850.42 | **Verified** ✅ |
|
||||||
|
| Budget YTD (3 months): ~$38,164 | **Correct** ✅ |
|
||||||
|
| $1,850 / $38,164 = 4.85% | **Math verified** ✅ |
|
||||||
|
| "25% of year elapsed" | **Correct** (month 3 of 12) |
|
||||||
|
| Phrasing "of annual budget" | **Misleading** ⚠️ — it's actually 4.8% of YTD budget, not annual |
|
||||||
|
|
||||||
|
**Concurrence: 70%** — The percentage is correctly calculated against YTD budget, but the phrasing "of annual budget" is incorrect. Also, the low spend is not necessarily positive — only January actuals exist; February hasn't been posted yet, which the AI partially acknowledges with "or delayed billing cycles."
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Recommendation Assessment
|
||||||
|
|
||||||
|
| # | Recommendation | Priority | Concurrence |
|
||||||
|
|---|---|---|---|
|
||||||
|
| 1 | "Verify the posting schedule for the $20,700 Special Assessment" | Low | **90%** ✅ Valid; assessments are annual, collection timing matters |
|
||||||
|
| 2 | "Investigate the low YTD expense recognition ($1,850 vs $38,164)" | Medium | **95%** ✅ Excellent catch; Feb expenses not posted yet |
|
||||||
|
| 3 | "Consider moving excess cash over $100K in Q2 to interest-bearing account" | Low | **85%** ✅ Sound advice; aligns with HY Savings at 4.09% |
|
||||||
|
|
||||||
|
**Recommendation Concurrence: 90%** — All three recommendations are actionable and data-backed.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Score Assessment
|
||||||
|
|
||||||
|
**Is 88 (Good) the right score?**
|
||||||
|
|
||||||
|
| Scoring Criterion | Guidelines Say | Actual | Alignment |
|
||||||
|
|---|---|---|---|
|
||||||
|
| Cash reserves | 3-6 months for "Good" | 2.4 months | ❌ Below threshold |
|
||||||
|
| Income vs expenses | "Roughly matching" for Good | $184K vs $140K (surplus) | ✅ Exceeds |
|
||||||
|
| Delinquency | "Manageable" for Good | 0% | ✅ Excellent |
|
||||||
|
| Budget performance | No major overruns for Good | Under budget (timing) | ✅ Positive |
|
||||||
|
| Projected cash flow | Not explicitly in guidelines | Strong positive trajectory | ✅ Positive |
|
||||||
|
|
||||||
|
The cash runway of 2.4 months is below the stated "Good" (75-89) threshold of 3-6 months and technically falls in the "Fair" (60-74) range of 2-3 months. Earlier AI runs scored this 72-78, which better aligns with the guidelines. The 88 appears to overweight the projected future cash flow (which is speculative) vs the current actual position.
|
||||||
|
|
||||||
|
**Suggested correct score: 74-80** (high end of Fair to low end of Good)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Score Consistency Concern
|
||||||
|
|
||||||
|
| Run Date | Score | Label |
|
||||||
|
|---|---|---|
|
||||||
|
| Mar 2 15:07 | 48 | Needs Attention |
|
||||||
|
| Mar 2 15:12 | 78 | Good |
|
||||||
|
| Mar 2 15:36 | 72 | Fair |
|
||||||
|
| Mar 2 17:09 | 78 | Good |
|
||||||
|
| Mar 3 02:03 | 72 | Fair |
|
||||||
|
| Mar 4 19:24 | 88 | Good |
|
||||||
|
|
||||||
|
A **40-point spread** (48-88) across 6 runs with essentially the same data is concerning. Even excluding the outlier first run (which noted a data config issue with "1 units"), the remaining 5 runs span 72-88 (16 points). At temperature 0.3, this suggests the model is not deterministic enough for financial scoring.
|
||||||
|
|
||||||
|
**🔧 Tuning Suggestion:** Consider lowering temperature to 0.1 for health score calculations to improve consistency. Alternatively, implement a moving average of the last 3 scores to smooth volatility.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Reserve Fund Health Score
|
||||||
|
|
||||||
|
**Latest Score:** 45 (Needs Attention) — Generated 2026-03-04T19:24:50Z
|
||||||
|
**Score History:** 25 → 48 → 42 → 25 → 45 → 35 → **45** (7 runs, March 2-4)
|
||||||
|
**Overall Concurrence: 85%**
|
||||||
|
|
||||||
|
### Factor-by-Factor Analysis
|
||||||
|
|
||||||
|
#### Factor 1: "Funded Ratio" — Impact: Negative
|
||||||
|
> "Calculated at 0% because no reserve components have been inventoried or assigned replacement costs, making it impossible to measure true funding health against the $152,016 in planned projects."
|
||||||
|
|
||||||
|
| Check | Result |
|
||||||
|
|---|---|
|
||||||
|
| 0 reserve components in DB | **Verified** ✅ |
|
||||||
|
| $152,016 in planned projects | **Verified** ✅ (sum of all `projects` rows) |
|
||||||
|
| 0% funded ratio | **Technically accurate** ✅ (no denominator from components) |
|
||||||
|
| Distinction between components and projects | **Well articulated** ✅ |
|
||||||
|
|
||||||
|
**Concurrence: 95%** — The AI correctly identifies that the 0% is an artifact of missing reserve study data, not a literal lack of funds. It appropriately flags this as a governance failure.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Factor 2: "Projected Cash Flow" — Impact: Positive
|
||||||
|
> "Strong immediate liquidity; cash balance is projected to rise from $10,688 to over $49,000 by May 2026 due to special assessment income covering the $12,500 in urgent 2026 project costs."
|
||||||
|
|
||||||
|
| Check | Result |
|
||||||
|
|---|---|
|
||||||
|
| Starting reserve cash $10,688 | **Verified** ✅ |
|
||||||
|
| 2026 project costs: $7K (Mar) + $5.5K (May) = $12,500 | **Verified** ✅ |
|
||||||
|
| Special assessment: $300 × 67 = $20,100/year | **Verified** ✅ |
|
||||||
|
| CD maturities: $8K (Apr), $10K (Jun), $10K (Aug) | **Verified** ✅ |
|
||||||
|
| Projected rise to $49K by May | **Plausible** ✅ (income + maturities - project costs) |
|
||||||
|
|
||||||
|
**Concurrence: 85%** — Math is directionally correct. However, the assessment is annual frequency so the full $20,100 may arrive in a single payment, not spread monthly. The timing assumption is critical.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Factor 3: "Component Tracking" — Impact: Negative
|
||||||
|
> "Critical failure in governance: 'No reserve components tracked' means the association is flying blind on the condition and remaining useful life of major assets like roads and irrigation."
|
||||||
|
|
||||||
|
**Concurrence: 100%** ✅ — Database confirms 0 rows in `reserve_components`. This is objectively a critical gap.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Factor 4: "Annual Contributions" — Impact: Negative
|
||||||
|
> "Recurring annual reserve income is only $300 (plus minimal interest), which is grossly insufficient to fund the $80,000 road sealing project due in 2029."
|
||||||
|
|
||||||
|
| Check | Result |
|
||||||
|
|---|---|
|
||||||
|
| Reserve budget income: $1,449.96/yr (interest only) | **Verified** ✅ |
|
||||||
|
| Special assessment: $300/unit × 67 = $20,100/yr | **Verified** ✅ |
|
||||||
|
| "$300" cited as annual reserve income | **Incorrect** ⚠️ |
|
||||||
|
| Road Sealing $80K in June 2029 | **Verified** ✅ |
|
||||||
|
|
||||||
|
**Concurrence: 65%** — The concern about insufficient contributions is valid, but the "$300" figure appears to confuse the per-unit special assessment amount ($300/unit) with the total annual reserve income. Actual annual reserve income = $1,450 (interest) + $20,100 (special assessments) = **$21,550/yr**. Even at $21,550/yr, the 3 years until Road Sealing would accumulate ~$64,650, still short of $80K. So the directional concern is correct, but the magnitude is significantly misstated.
|
||||||
|
|
||||||
|
**🔧 Tuning Suggestion:** The prompt should explicitly label the special assessment income total (not per-unit) in the data context. Currently the data says "$300.00/unit × 67 units (annual)" — the AI should compute $20,100 but sometimes fixates on the $300 per-unit figure. Consider pre-computing and passing the total.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Recommendation Assessment
|
||||||
|
|
||||||
|
| # | Recommendation | Priority | Concurrence |
|
||||||
|
|---|---|---|---|
|
||||||
|
| 1 | "Commission a professional Reserve Study to inventory assets and establish funded ratio" | High | **100%** ✅ Critical and universally correct |
|
||||||
|
| 2 | "Develop a long-term funding plan for the $80,000 Road Sealing project (2029)" | High | **90%** ✅ Verified project exists; $80K with 0% funded |
|
||||||
|
| 3 | "Formalize collection of special assessments into the reserve fund vs operating" | Medium | **95%** ✅ Budget shows special assessments in operating income section |
|
||||||
|
|
||||||
|
**Recommendation Concurrence: 95%** — All recommendations are actionable, appropriately prioritized, and backed by database evidence.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Score Assessment
|
||||||
|
|
||||||
|
**Is 45 (Needs Attention) the right score?**
|
||||||
|
|
||||||
|
| Scoring Criterion | Guidelines Say | Actual | Alignment |
|
||||||
|
|---|---|---|---|
|
||||||
|
| Percent funded | 20-30% for "Needs Attention" | 0% (no components) | ⬇️ Worse than threshold |
|
||||||
|
| Contributions | "Inadequate" for Needs Attention | $21,550/yr for $152K in projects | ⚠️ Borderline |
|
||||||
|
| Component tracking | "Multiple urgent unfunded" | 0 tracked, 2 due in 2026 | ❌ Critical gap |
|
||||||
|
| Investments | Not scored negatively | 3 CDs earning 3.6-3.67% | ✅ Positive |
|
||||||
|
| Capital readiness | | $12.5K due soon, only $10.7K cash | ⚠️ Tight |
|
||||||
|
|
||||||
|
A score of 45 is reasonable. The 0% funded ratio technically suggests "At Risk" (20-39), but the presence of real assets ($38.7K), active investments, and manageable near-term liquidity justifies bumping it into the "Needs Attention" band. The AI's balancing of the artificial 0% metric against actual fund health shows good judgment.
|
||||||
|
|
||||||
|
**Suggested correct score: 40-50** — the AI's 45 is well-calibrated.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Score Consistency Concern
|
||||||
|
|
||||||
|
| Run Date | Score | Label |
|
||||||
|
|---|---|---|
|
||||||
|
| Mar 2 15:06 | 25 | At Risk |
|
||||||
|
| Mar 2 15:13 | 25 | At Risk |
|
||||||
|
| Mar 2 15:37 | 48 | Needs Attention |
|
||||||
|
| Mar 2 17:10 | 42 | Needs Attention |
|
||||||
|
| Mar 3 02:04 | 45 | Needs Attention |
|
||||||
|
| Mar 4 18:49 | 35 | At Risk |
|
||||||
|
| Mar 4 19:24 | 45 | Needs Attention |
|
||||||
|
|
||||||
|
A **23-point spread** (25-48) across 7 runs. The scores oscillate between "At Risk" and "Needs Attention" — the model cannot consistently decide which band this falls into. The most recent 3 runs (35, 45, 45) are more stable.
|
||||||
|
|
||||||
|
**🔧 Tuning Suggestion:** Add boundary guidance to the prompt: "When the score falls within ±5 points of a threshold (40, 60, 75, 90), explicitly justify which side of the boundary the HOA falls on."
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. AI Investment Recommendations
|
||||||
|
|
||||||
|
**Latest Run:** 2026-03-04T19:28:22Z (3 runs saved)
|
||||||
|
**Overall Concurrence: 88%**
|
||||||
|
|
||||||
|
### Overall Assessment
|
||||||
|
> "The HOA has a healthy long-term cash flow outlook with significant surpluses projected by mid-2026, but faces an immediate liquidity pinch in the Reserve Fund for March/April capital projects. The current investment strategy relies on older, lower-yielding CDs (3.60-3.67%) that are maturing soon."
|
||||||
|
|
||||||
|
**Concurrence: 92%** ✅ — Every claim verified:
|
||||||
|
- CDs are at 3.60-3.67% vs market 4.10% (verified)
|
||||||
|
- March project ($7K) vs reserve cash ($10.7K) is tight (verified)
|
||||||
|
- Long-term surplus projected from assessment income (verified from budget)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Recommendation-by-Recommendation Analysis
|
||||||
|
|
||||||
|
#### Rec 1: "Critical Reserve Shortfall for March Project" — HIGH / Liquidity Warning
|
||||||
|
|
||||||
|
| Claim | Database Value | Match |
|
||||||
|
|---|---|---|
|
||||||
|
| Reserve cash = $10,688 | $10,688.45 | ✅ Exact |
|
||||||
|
| $7,000 Pond Spillway project due March | Projects table: $7,000, Mar 2026 | ✅ Exact |
|
||||||
|
| Shortfall risk | $10,688 - $7,000 = $3,688 remaining — tight but feasible | ✅ |
|
||||||
|
| Suggested action: expedite special assessment or transfer from operating | Sound advice | ✅ |
|
||||||
|
|
||||||
|
**Concurrence: 90%** — The liquidity concern is real. After paying the $7K project, only $3.7K would remain in reserve cash before the $5.5K May project. The AI correctly flags the timing risk even though the fund is technically solvent.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Rec 2: "Reinvest Maturing CD #2a at Higher Rate" — HIGH / Maturity Action
|
||||||
|
|
||||||
|
| Claim | Database Value | Match |
|
||||||
|
|---|---|---|
|
||||||
|
| CD #2a = $8,000 | $8,000.00 | ✅ Exact |
|
||||||
|
| Current rate = 3.60% | 3.60% | ✅ Exact |
|
||||||
|
| Maturity = April 14, 2026 | 2026-04-14 | ✅ Exact |
|
||||||
|
| Market rate = 4.10% (E*TRADE) | CD rates: E*TRADE 4.10%, 1 year, $0 min | ✅ Exact |
|
||||||
|
| Additional yield: ~$40/year per $8K | $8K × 0.50% = $40 | ✅ Math correct |
|
||||||
|
|
||||||
|
**Concurrence: 95%** ✅ — Textbook-correct recommendation. Every data point verified. The 50 bps improvement is risk-free income.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Rec 3: "Establish 12-Month CD Ladder for Reserves" — MEDIUM / CD Ladder
|
||||||
|
|
||||||
|
| Claim | Database Value | Match |
|
||||||
|
|---|---|---|
|
||||||
|
| ~$38K total reserve portfolio | $38,688.45 | ✅ Exact |
|
||||||
|
| Suggest 4-rung ladder (3/6/9/12 mo) | Standard strategy | ✅ |
|
||||||
|
| Rates up to 4.10% | Market data confirmed | ✅ |
|
||||||
|
| $9K matures every quarter | $38K / 4 = $9.5K per rung | ✅ Approximate |
|
||||||
|
|
||||||
|
**Concurrence: 75%** — Strategy is sound in principle, but the recommendation overlooks two constraints:
|
||||||
|
1. **Immediate project costs ($12.5K in 2026)** must be reserved first, leaving ~$26K for laddering
|
||||||
|
2. **Investing the entire $38K** is aggressive — some cash buffer should remain liquid
|
||||||
|
|
||||||
|
**🔧 Tuning Suggestion:** Add a constraint to the prompt: "When recommending CD ladders, always subtract upcoming project costs (next 12 months) and a minimum emergency reserve (1 month of budgeted reserve expenses) before calculating the investable amount."
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Rec 4: "Deploy Excess Operating Cash to High-Yield Savings" — MEDIUM / New Investment
|
||||||
|
|
||||||
|
| Claim | Database Value | Match |
|
||||||
|
|---|---|---|
|
||||||
|
| Operating cash = $27,418 | $27,418.81 | ✅ Exact |
|
||||||
|
| 3-month buffer = ~$35,000 | $11,665 × 3 = $34,995 | ✅ Math correct |
|
||||||
|
| Current cash below buffer | $27.4K < $35K | ✅ Correctly identified |
|
||||||
|
| Openbank 4.09% APY | Market data: Openbank 4.09%, $0.01 min | ✅ Exact |
|
||||||
|
| Trigger: "As soon as balance exceeds $35K" | Sound deferred recommendation | ✅ |
|
||||||
|
|
||||||
|
**Concurrence: 90%** ✅ — The AI correctly identifies the current shortfall and provides a forward-looking trigger. Well-structured advice that respects the liquidity constraint.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Rec 5: "Optimize Reserve Cash Yield Post-Project" — LOW / Reallocation
|
||||||
|
|
||||||
|
| Claim | Database Value | Match |
|
||||||
|
|---|---|---|
|
||||||
|
| Vio Bank Money Market at 4.03% | Market data: Vio Bank 4.03%, $0 min | ✅ Exact |
|
||||||
|
| Post-project reserve cash deployment | Appropriate timing | ✅ |
|
||||||
|
| T+1 liquidity for emergencies | Correct MM account characteristic | ✅ |
|
||||||
|
|
||||||
|
**Concurrence: 85%** ✅ — Reasonable low-priority optimization. Correctly uses market data.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Rec 6: "Formalize Special Assessment Collection for Reserves" — LOW / General
|
||||||
|
|
||||||
|
| Claim | Database Value | Match |
|
||||||
|
|---|---|---|
|
||||||
|
| $300/unit special assessment | Assessment groups: $300.00 special | ✅ Exact |
|
||||||
|
| Risk of commingling with operating | Budget shows special assessments in operating income | ✅ Identified |
|
||||||
|
|
||||||
|
**Concurrence: 90%** ✅ — Important governance recommendation. The budget structure does show special assessments as operating income, which could lead to improper fund commingling.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Risk Notes Assessment
|
||||||
|
|
||||||
|
| Risk Note | Verified | Concurrence |
|
||||||
|
|---|---|---|
|
||||||
|
| "Reserve cash ($10.6K) barely sufficient for $7K + $5.5K projects" | ✅ $10,688 vs $12,500 in projects | **95%** |
|
||||||
|
| "Concentration risk: CDs maturing in 4-month window (Apr-Aug)" | ✅ All 3 CDs mature Apr-Aug 2026 | **100%** |
|
||||||
|
| "Operating cash ballooning to $140K+ without investment plan" | ✅ Budget shows large Q2 surplus | **85%** |
|
||||||
|
| "Road Sealing $80K in 2029 needs dedicated savings plan" | ✅ Project exists, 0% funded | **95%** |
|
||||||
|
|
||||||
|
**Risk Notes Concurrence: 94%** — All risk items are data-backed and appropriately flagged.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Cross-Run Consistency (Investment Recommendations)
|
||||||
|
|
||||||
|
Three runs were compared. Key observations:
|
||||||
|
- **Core recommendations are highly consistent** across runs: CD reinvestment, HY savings for operating, CD ladder for reserves
|
||||||
|
- **Dollar amounts match exactly** across all runs (same data inputs)
|
||||||
|
- **Bank name recommendations vary slightly** (E*TRADE vs "Top CD Rate") — cosmetic, not substantive
|
||||||
|
- **Priority levels are stable** (HIGH for liquidity warnings, MEDIUM for optimization)
|
||||||
|
|
||||||
|
**Consistency Grade: A-** — Investment recommendations show much better consistency than health scores, likely because the structured data (specific CDs, specific rates) constrains the output more than the subjective health scoring.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Cross-Cutting Issues
|
||||||
|
|
||||||
|
### Issue 1: Score Volatility (MEDIUM Priority)
|
||||||
|
|
||||||
|
Health scores vary significantly across runs despite identical input data:
|
||||||
|
- Operating: 40-point spread (48-88)
|
||||||
|
- Reserve: 23-point spread (25-48)
|
||||||
|
|
||||||
|
**Root Cause:** Temperature 0.3 allows too much variance for numerical scoring. The model interprets guidelines subjectively.
|
||||||
|
|
||||||
|
**Recommended Fix:**
|
||||||
|
1. Reduce temperature to **0.1** for health score calculations
|
||||||
|
2. Implement a **3-run moving average** to smooth individual run variance
|
||||||
|
3. Add explicit **boundary justification** requirements to prompts
|
||||||
|
|
||||||
|
### Issue 2: YTD Budget Calculation Includes Incomplete Month (LOW Priority)
|
||||||
|
|
||||||
|
The operating health score computes YTD budget through the current month (March), but actual data may only cover a few days. This creates alarming income variances (e.g., "$55K variance") that are pure timing artifacts.
|
||||||
|
|
||||||
|
**Recommended Fix:**
|
||||||
|
- Compute YTD budget through the **prior completed month** (February)
|
||||||
|
- OR pro-rate the current month's budget by days elapsed
|
||||||
|
- Add a note to the prompt: "If the variance is driven by the current incomplete month, flag it as 'timing' and weight it minimally."
|
||||||
|
|
||||||
|
### Issue 3: Per-Unit vs Total Confusion on Special Assessments (LOW Priority)
|
||||||
|
|
||||||
|
The AI sometimes quotes "$300" as the annual reserve income instead of $300 × 67 = $20,100. The data passed says "$300.00/unit × 67 units (annual)" but the model occasionally fixates on the per-unit figure.
|
||||||
|
|
||||||
|
**Recommended Fix:**
|
||||||
|
- Pre-compute and include the total in the data: "Total Annual Special Assessment Income: $20,100.00"
|
||||||
|
- Keep the per-unit breakdown for context but lead with the total
|
||||||
|
|
||||||
|
### Issue 4: Cash Runway Classification Inconsistency (MEDIUM Priority)
|
||||||
|
|
||||||
|
The operating health score rates 2.4 months of cash runway as "positive" despite the scoring guidelines defining 2-3 months as "Fair" territory. This inflates the overall score.
|
||||||
|
|
||||||
|
**Recommended Fix:**
|
||||||
|
- Add explicit prompt guidance: "Cash runway categorization: <2 months = negative, 2-3 months = neutral, 3-6 months = positive, 6+ months = strongly positive. Do NOT rate below-threshold runway as positive based on projected future inflows."
|
||||||
|
|
||||||
|
### Issue 5: Dual Project Tables (INFORMATIONAL)
|
||||||
|
|
||||||
|
The schema contains both `capital_projects` (empty) and `projects` (26 rows). The health score service correctly queries `projects`, but auditors initially checked `capital_projects` and found no data. This dual-table pattern could confuse future developers.
|
||||||
|
|
||||||
|
**Recommended Fix:**
|
||||||
|
- Consolidate into a single table, OR
|
||||||
|
- Add a comment/documentation clarifying the canonical source
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Concurrence Summary by Recommendation
|
||||||
|
|
||||||
|
### Operating Fund Health — Recommendations
|
||||||
|
| Recommendation | Concurrence |
|
||||||
|
|---|---|
|
||||||
|
| Verify posting schedule for $20,700 Special Assessment | 90% |
|
||||||
|
| Investigate low YTD expense recognition | 95% |
|
||||||
|
| Move excess cash to interest-bearing account | 85% |
|
||||||
|
| **Average** | **90%** |
|
||||||
|
|
||||||
|
### Reserve Fund Health — Recommendations
|
||||||
|
| Recommendation | Concurrence |
|
||||||
|
|---|---|
|
||||||
|
| Commission professional Reserve Study | 100% |
|
||||||
|
| Develop funding plan for $80K Road Sealing | 90% |
|
||||||
|
| Formalize special assessment collection for reserves | 95% |
|
||||||
|
| **Average** | **95%** |
|
||||||
|
|
||||||
|
### Investment Planning — Recommendations
|
||||||
|
| Recommendation | Concurrence |
|
||||||
|
|---|---|
|
||||||
|
| Critical Reserve Shortfall for March Project | 90% |
|
||||||
|
| Reinvest Maturing CD #2a at Higher Rate | 95% |
|
||||||
|
| Establish 12-Month CD Ladder | 75% |
|
||||||
|
| Deploy Operating Cash to HY Savings | 90% |
|
||||||
|
| Optimize Reserve Cash Post-Project | 85% |
|
||||||
|
| Formalize Special Assessment Collection | 90% |
|
||||||
|
| **Average** | **88%** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Final Grades
|
||||||
|
|
||||||
|
| Feature | Score Accuracy | Recommendation Quality | Data Fidelity | Consistency | **Overall** |
|
||||||
|
|---|---|---|---|---|---|
|
||||||
|
| Operating Fund Health | C+ (score ~15 pts high) | A (90%) | B+ (minor math phrasing) | C (16-pt spread) | **72% — B-** |
|
||||||
|
| Reserve Fund Health | A- (well-calibrated) | A (95%) | B (per-unit confusion) | B- (23-pt spread) | **85% — B+** |
|
||||||
|
| Investment Recommendations | N/A (no single score) | A (88%) | A (exact data matches) | A- (stable across runs) | **88% — A-** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Priority Action Items for Tuning
|
||||||
|
|
||||||
|
1. **[HIGH]** Reduce AI temperature from 0.3 → 0.1 for health score calculations to reduce score volatility
|
||||||
|
2. **[MEDIUM]** Add explicit cash-runway-to-impact mapping in operating prompt to prevent misclassification
|
||||||
|
3. **[MEDIUM]** Pre-compute total special assessment income in data context (not just per-unit)
|
||||||
|
4. **[LOW]** Adjust YTD budget calculation to use prior completed month or pro-rate current month
|
||||||
|
5. **[LOW]** Add boundary justification requirement to scoring prompts
|
||||||
|
6. **[LOW]** Consider implementing 3-run moving average for displayed health scores
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Generated by Claude Opus 4.6 — Automated AI Feature Audit*
|
||||||
@@ -1,375 +0,0 @@
|
|||||||
# HOA LedgerIQ — Deployment Guide
|
|
||||||
|
|
||||||
**Version:** 2026.3.2 (beta)
|
|
||||||
**Last updated:** 2026-03-02
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
1. [Prerequisites](#prerequisites)
|
|
||||||
2. [Deploy to a Fresh Docker Server](#deploy-to-a-fresh-docker-server)
|
|
||||||
3. [Backup the Local Test Database](#backup-the-local-test-database)
|
|
||||||
4. [Restore a Backup into the Staged Environment](#restore-a-backup-into-the-staged-environment)
|
|
||||||
5. [Running Migrations on the Staged Environment](#running-migrations-on-the-staged-environment)
|
|
||||||
6. [Verifying the Deployment](#verifying-the-deployment)
|
|
||||||
7. [Environment Variable Reference](#environment-variable-reference)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
On the **target server**, ensure the following are installed:
|
|
||||||
|
|
||||||
| Tool | Minimum Version |
|
|
||||||
|-----------------|-----------------|
|
|
||||||
| Docker Engine | 24+ |
|
|
||||||
| Docker Compose | v2+ |
|
|
||||||
| Git | 2.x |
|
|
||||||
| `psql` (client) | 15+ *(optional, for manual DB work)* |
|
|
||||||
|
|
||||||
The app runs five containers — nginx, backend (NestJS), frontend (Vite/React),
|
|
||||||
PostgreSQL 15, and Redis 7. Total memory footprint is roughly **1–2 GB** idle.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Deploy to a Fresh Docker Server
|
|
||||||
|
|
||||||
### 1. Clone the repository
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ssh your-staging-server
|
|
||||||
|
|
||||||
git clone <repo-url> /opt/hoa-ledgeriq
|
|
||||||
cd /opt/hoa-ledgeriq
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Create the environment file
|
|
||||||
|
|
||||||
Copy the example and fill in real values:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp .env.example .env
|
|
||||||
nano .env # or vi, your choice
|
|
||||||
```
|
|
||||||
|
|
||||||
**Required changes from defaults:**
|
|
||||||
|
|
||||||
```dotenv
|
|
||||||
# --- CHANGE THESE ---
|
|
||||||
POSTGRES_PASSWORD=<strong-random-password>
|
|
||||||
JWT_SECRET=<random-64-char-string>
|
|
||||||
|
|
||||||
# Database URL must match the password above
|
|
||||||
DATABASE_URL=postgresql://hoafinance:<same-password>@postgres:5432/hoafinance
|
|
||||||
|
|
||||||
# AI features (get a key from build.nvidia.com)
|
|
||||||
AI_API_KEY=nvapi-xxxxxxxxxxxx
|
|
||||||
|
|
||||||
# --- Usually fine as-is ---
|
|
||||||
POSTGRES_USER=hoafinance
|
|
||||||
POSTGRES_DB=hoafinance
|
|
||||||
REDIS_URL=redis://redis:6379
|
|
||||||
NODE_ENV=development # keep as development for staging
|
|
||||||
AI_API_URL=https://integrate.api.nvidia.com/v1
|
|
||||||
AI_MODEL=qwen/qwen3.5-397b-a17b
|
|
||||||
AI_DEBUG=false
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Tip:** Generate secrets quickly:
|
|
||||||
> ```bash
|
|
||||||
> openssl rand -hex 32 # good for JWT_SECRET
|
|
||||||
> openssl rand -base64 24 # good for POSTGRES_PASSWORD
|
|
||||||
> ```
|
|
||||||
|
|
||||||
### 3. Build and start the stack
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up -d --build
|
|
||||||
```
|
|
||||||
|
|
||||||
This will:
|
|
||||||
- Build the backend and frontend images
|
|
||||||
- Pull `postgres:15-alpine`, `redis:7-alpine`, and `nginx:alpine`
|
|
||||||
- Initialize the PostgreSQL database with the shared schema (`db/init/00-init.sql`)
|
|
||||||
- Start all five services on the `hoanet` bridge network
|
|
||||||
|
|
||||||
### 4. Wait for healthy services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose ps
|
|
||||||
```
|
|
||||||
|
|
||||||
All five containers should show `Up` (postgres and redis should also show
|
|
||||||
`(healthy)`). If the backend is restarting, check logs:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose logs backend --tail=50
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. (Optional) Seed with demo data
|
|
||||||
|
|
||||||
If deploying a fresh environment for testing and you want the Sunrise Valley
|
|
||||||
HOA demo tenant:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose exec -T postgres psql -U hoafinance -d hoafinance < db/seed/seed.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
This creates:
|
|
||||||
- Platform admin: `admin@hoaledgeriq.com` / `password123`
|
|
||||||
- Tenant admin: `admin@sunrisevalley.org` / `password123`
|
|
||||||
- Tenant viewer: `viewer@sunrisevalley.org` / `password123`
|
|
||||||
|
|
||||||
### 6. Access the application
|
|
||||||
|
|
||||||
| Service | URL |
|
|
||||||
|-----------|--------------------------------|
|
|
||||||
| App (UI) | `http://<server-ip>` |
|
|
||||||
| API | `http://<server-ip>/api` |
|
|
||||||
| Postgres | `<server-ip>:5432` (direct) |
|
|
||||||
|
|
||||||
> **Note:** For production, add an SSL-terminating proxy (Caddy, Traefik, or
|
|
||||||
> an nginx TLS config) in front of port 80.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Backup the Local Test Database
|
|
||||||
|
|
||||||
### Full database dump (recommended)
|
|
||||||
|
|
||||||
From your **local development machine** where the app is currently running:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /path/to/HOA_Financial_Platform
|
|
||||||
|
|
||||||
# Dump the entire database (all schemas, roles, data)
|
|
||||||
docker compose exec -T postgres pg_dump \
|
|
||||||
-U hoafinance \
|
|
||||||
-d hoafinance \
|
|
||||||
--no-owner \
|
|
||||||
--no-privileges \
|
|
||||||
--format=custom \
|
|
||||||
-f /tmp/hoafinance_backup.dump
|
|
||||||
|
|
||||||
# Copy the dump file out of the container
|
|
||||||
docker compose cp postgres:/tmp/hoafinance_backup.dump ./hoafinance_backup.dump
|
|
||||||
```
|
|
||||||
|
|
||||||
The `--format=custom` flag produces a compressed binary format that supports
|
|
||||||
selective restore. The file is typically 50–80% smaller than plain SQL.
|
|
||||||
|
|
||||||
### Alternative: Plain SQL dump
|
|
||||||
|
|
||||||
If you prefer a human-readable SQL file:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose exec -T postgres pg_dump \
|
|
||||||
-U hoafinance \
|
|
||||||
-d hoafinance \
|
|
||||||
--no-owner \
|
|
||||||
--no-privileges \
|
|
||||||
> hoafinance_backup.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
### Backup a single tenant schema
|
|
||||||
|
|
||||||
To export just one tenant (e.g., Pine Creek HOA):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose exec -T postgres pg_dump \
|
|
||||||
-U hoafinance \
|
|
||||||
-d hoafinance \
|
|
||||||
--no-owner \
|
|
||||||
--no-privileges \
|
|
||||||
--schema=tenant_pine_creek_hoa_q33i \
|
|
||||||
> pine_creek_backup.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Finding a tenant's schema name:**
|
|
||||||
> ```bash
|
|
||||||
> docker compose exec -T postgres psql -U hoafinance -d hoafinance \
|
|
||||||
> -c "SELECT name, schema_name FROM shared.organizations WHERE status = 'active';"
|
|
||||||
> ```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Restore a Backup into the Staged Environment
|
|
||||||
|
|
||||||
### 1. Transfer the backup to the staging server
|
|
||||||
|
|
||||||
```bash
|
|
||||||
scp hoafinance_backup.dump user@staging-server:/opt/hoa-ledgeriq/
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Ensure the stack is running
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/hoa-ledgeriq
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Drop and recreate the database (clean slate)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Connect to postgres and reset the database
|
|
||||||
docker compose exec -T postgres psql -U hoafinance -d postgres -c "
|
|
||||||
SELECT pg_terminate_backend(pid)
|
|
||||||
FROM pg_stat_activity
|
|
||||||
WHERE datname = 'hoafinance' AND pid <> pg_backend_pid();
|
|
||||||
"
|
|
||||||
docker compose exec -T postgres dropdb -U hoafinance hoafinance
|
|
||||||
docker compose exec -T postgres createdb -U hoafinance hoafinance
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4a. Restore from custom-format dump
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Copy the dump into the container
|
|
||||||
docker compose cp hoafinance_backup.dump postgres:/tmp/hoafinance_backup.dump
|
|
||||||
|
|
||||||
# Restore
|
|
||||||
docker compose exec -T postgres pg_restore \
|
|
||||||
-U hoafinance \
|
|
||||||
-d hoafinance \
|
|
||||||
--no-owner \
|
|
||||||
--no-privileges \
|
|
||||||
/tmp/hoafinance_backup.dump
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4b. Restore from plain SQL dump
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose exec -T postgres psql \
|
|
||||||
-U hoafinance \
|
|
||||||
-d hoafinance \
|
|
||||||
< hoafinance_backup.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Restart the backend
|
|
||||||
|
|
||||||
After restoring, restart the backend so NestJS re-establishes its connection
|
|
||||||
pool and picks up the restored schemas:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose restart backend
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Running Migrations on the Staged Environment
|
|
||||||
|
|
||||||
Migrations live in `db/migrations/` and are numbered sequentially. After
|
|
||||||
restoring an older backup, you may need to apply newer migrations.
|
|
||||||
|
|
||||||
Check which migrations exist:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ls -la db/migrations/
|
|
||||||
```
|
|
||||||
|
|
||||||
Apply them in order:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all migrations sequentially
|
|
||||||
for f in db/migrations/*.sql; do
|
|
||||||
echo "Applying $f ..."
|
|
||||||
docker compose exec -T postgres psql \
|
|
||||||
-U hoafinance \
|
|
||||||
-d hoafinance \
|
|
||||||
< "$f"
|
|
||||||
done
|
|
||||||
```
|
|
||||||
|
|
||||||
Or apply a specific migration:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose exec -T postgres psql \
|
|
||||||
-U hoafinance \
|
|
||||||
-d hoafinance \
|
|
||||||
< db/migrations/010-health-scores.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Note:** Migrations are idempotent where possible (`IF NOT EXISTS`,
|
|
||||||
> `DO $$ ... $$` blocks), so re-running one that has already been applied
|
|
||||||
> is generally safe.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Verifying the Deployment
|
|
||||||
|
|
||||||
### Quick health checks
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Backend is responding
|
|
||||||
curl -s http://localhost/api/auth/login | head -c 100
|
|
||||||
|
|
||||||
# Database is accessible
|
|
||||||
docker compose exec -T postgres psql -U hoafinance -d hoafinance \
|
|
||||||
-c "SELECT count(*) AS tenants FROM shared.organizations WHERE status = 'active';"
|
|
||||||
|
|
||||||
# Redis is working
|
|
||||||
docker compose exec -T redis redis-cli ping
|
|
||||||
```
|
|
||||||
|
|
||||||
### Full smoke test
|
|
||||||
|
|
||||||
1. Open `http://<server-ip>` in a browser
|
|
||||||
2. Log in with a known account
|
|
||||||
3. Navigate to Dashboard — verify health scores load
|
|
||||||
4. Navigate to Capital Planning — verify Kanban columns render
|
|
||||||
5. Navigate to Projects — verify project list loads
|
|
||||||
6. Check the Settings page — version should read **2026.3.2 (beta)**
|
|
||||||
|
|
||||||
### View logs
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose logs -f # all services
|
|
||||||
docker compose logs -f backend # backend only
|
|
||||||
docker compose logs -f postgres # database only
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Environment Variable Reference
|
|
||||||
|
|
||||||
| Variable | Required | Description |
|
|
||||||
|-------------------|----------|----------------------------------------------------|
|
|
||||||
| `POSTGRES_USER` | Yes | PostgreSQL username |
|
|
||||||
| `POSTGRES_PASSWORD`| Yes | PostgreSQL password (**change from default**) |
|
|
||||||
| `POSTGRES_DB` | Yes | Database name |
|
|
||||||
| `DATABASE_URL` | Yes | Full connection string for the backend |
|
|
||||||
| `REDIS_URL` | Yes | Redis connection string |
|
|
||||||
| `JWT_SECRET` | Yes | Secret for signing JWT tokens (**change from default**) |
|
|
||||||
| `NODE_ENV` | Yes | `development` or `production` |
|
|
||||||
| `AI_API_URL` | Yes | OpenAI-compatible inference endpoint |
|
|
||||||
| `AI_API_KEY` | Yes | API key for AI provider (Nvidia) |
|
|
||||||
| `AI_MODEL` | Yes | Model identifier for AI calls |
|
|
||||||
| `AI_DEBUG` | No | Set `true` to log raw AI prompts/responses |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────┐
|
|
||||||
Browser ────────► │ nginx :80 │
|
|
||||||
└──────┬──────┘
|
|
||||||
┌────────┴────────┐
|
|
||||||
▼ ▼
|
|
||||||
┌──────────────┐ ┌──────────────┐
|
|
||||||
│ backend :3000│ │frontend :5173│
|
|
||||||
│ (NestJS) │ │ (Vite/React) │
|
|
||||||
└──────┬───────┘ └──────────────┘
|
|
||||||
┌────┴────┐
|
|
||||||
▼ ▼
|
|
||||||
┌────────────┐ ┌───────────┐
|
|
||||||
│postgres:5432│ │redis :6379│
|
|
||||||
│ (PG 15) │ │ (Redis 7) │
|
|
||||||
└────────────┘ └───────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
**Multi-tenant isolation:** Each HOA organization gets its own PostgreSQL
|
|
||||||
schema (e.g., `tenant_pine_creek_hoa_q33i`). The `shared` schema holds
|
|
||||||
cross-tenant tables (users, organizations, market rates). Tenant context
|
|
||||||
is resolved from the JWT token on every API request.
|
|
||||||
22
frontend/Dockerfile
Normal file
22
frontend/Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# ---- Production Dockerfile for React frontend ----
|
||||||
|
# Multi-stage build: compile to static assets, serve with nginx
|
||||||
|
|
||||||
|
# Stage 1: Build
|
||||||
|
FROM node:20-alpine AS builder
|
||||||
|
WORKDIR /app
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci
|
||||||
|
COPY . .
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
# Stage 2: Serve with nginx
|
||||||
|
FROM nginx:alpine
|
||||||
|
|
||||||
|
# Copy the built static files
|
||||||
|
COPY --from=builder /app/dist /usr/share/nginx/html
|
||||||
|
|
||||||
|
# Copy a small nginx config for SPA routing
|
||||||
|
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
|
EXPOSE 3001
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
20
frontend/nginx.conf
Normal file
20
frontend/nginx.conf
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Minimal nginx config for serving the React SPA inside the frontend container.
|
||||||
|
# The outer nginx reverse proxy forwards non-API requests here.
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 3001;
|
||||||
|
server_name _;
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html;
|
||||||
|
|
||||||
|
# Serve static assets with long cache (Vite hashes filenames)
|
||||||
|
location /assets/ {
|
||||||
|
expires 1y;
|
||||||
|
add_header Cache-Control "public, immutable";
|
||||||
|
}
|
||||||
|
|
||||||
|
# SPA fallback — any non-file route returns index.html
|
||||||
|
location / {
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
|
}
|
||||||
|
}
|
||||||
140
frontend/package-lock.json
generated
140
frontend/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "hoa-ledgeriq-frontend",
|
"name": "hoa-ledgeriq-frontend",
|
||||||
"version": "0.1.0",
|
"version": "2026.3.2-beta",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "hoa-ledgeriq-frontend",
|
"name": "hoa-ledgeriq-frontend",
|
||||||
"version": "0.1.0",
|
"version": "2026.3.2-beta",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mantine/core": "^7.15.3",
|
"@mantine/core": "^7.15.3",
|
||||||
"@mantine/dates": "^7.15.3",
|
"@mantine/dates": "^7.15.3",
|
||||||
@@ -21,6 +21,7 @@
|
|||||||
"dayjs": "^1.11.13",
|
"dayjs": "^1.11.13",
|
||||||
"react": "^18.3.1",
|
"react": "^18.3.1",
|
||||||
"react-dom": "^18.3.1",
|
"react-dom": "^18.3.1",
|
||||||
|
"react-joyride": "^2.9.3",
|
||||||
"react-router-dom": "^6.28.2",
|
"react-router-dom": "^6.28.2",
|
||||||
"recharts": "^2.15.0",
|
"recharts": "^2.15.0",
|
||||||
"zustand": "^4.5.5"
|
"zustand": "^4.5.5"
|
||||||
@@ -772,6 +773,12 @@
|
|||||||
"integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==",
|
"integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@gilbarbara/deep-equal": {
|
||||||
|
"version": "0.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@gilbarbara/deep-equal/-/deep-equal-0.3.1.tgz",
|
||||||
|
"integrity": "sha512-I7xWjLs2YSVMc5gGx1Z3ZG1lgFpITPndpi8Ku55GeEIKpACCPQNS/OTqQbxgTCfq0Ncvcc+CrFov96itVh6Qvw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@jridgewell/gen-mapping": {
|
"node_modules/@jridgewell/gen-mapping": {
|
||||||
"version": "0.3.13",
|
"version": "0.3.13",
|
||||||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
|
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
|
||||||
@@ -1464,14 +1471,12 @@
|
|||||||
"version": "15.7.15",
|
"version": "15.7.15",
|
||||||
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
|
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
|
||||||
"integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==",
|
"integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==",
|
||||||
"devOptional": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/react": {
|
"node_modules/@types/react": {
|
||||||
"version": "18.3.28",
|
"version": "18.3.28",
|
||||||
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz",
|
"resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz",
|
||||||
"integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==",
|
"integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==",
|
||||||
"devOptional": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/prop-types": "*",
|
"@types/prop-types": "*",
|
||||||
@@ -1813,6 +1818,22 @@
|
|||||||
"integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==",
|
"integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/deep-diff": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/deep-diff/-/deep-diff-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-aWS3UIVH+NPGCD1kki+DCU9Dua032iSsO43LqQpcs4R3+dVv7tX0qBGjiVHJHjplsoUM2XRO/KB92glqc68awg==",
|
||||||
|
"deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/deepmerge": {
|
||||||
|
"version": "4.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz",
|
||||||
|
"integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/delayed-stream": {
|
"node_modules/delayed-stream": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
@@ -2165,6 +2186,12 @@
|
|||||||
"integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==",
|
"integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==",
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
|
"node_modules/is-lite": {
|
||||||
|
"version": "1.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-lite/-/is-lite-1.2.1.tgz",
|
||||||
|
"integrity": "sha512-pgF+L5bxC+10hLBgf6R2P4ZZUBOQIIacbdo8YvuCP8/JvsWxG7aZ9p10DYuLtifFci4l3VITphhMlMV4Y+urPw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/js-tokens": {
|
"node_modules/js-tokens": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||||
@@ -2326,6 +2353,17 @@
|
|||||||
"url": "https://github.com/sponsors/jonschlinkert"
|
"url": "https://github.com/sponsors/jonschlinkert"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/popper.js": {
|
||||||
|
"version": "1.16.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/popper.js/-/popper.js-1.16.1.tgz",
|
||||||
|
"integrity": "sha512-Wb4p1J4zyFTbM+u6WuO4XstYx4Ky9Cewe4DWrel7B0w6VVICvPwdOpotjzcf6eD8TsckVnIMNONQyPIUFOUbCQ==",
|
||||||
|
"deprecated": "You can find the new Popper v2 at @popperjs/core, this package is dedicated to the legacy v1",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/popperjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/postcss": {
|
"node_modules/postcss": {
|
||||||
"version": "8.5.6",
|
"version": "8.5.6",
|
||||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
|
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
|
||||||
@@ -2523,12 +2561,84 @@
|
|||||||
"react": "^18.3.1"
|
"react": "^18.3.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/react-floater": {
|
||||||
|
"version": "0.7.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-floater/-/react-floater-0.7.9.tgz",
|
||||||
|
"integrity": "sha512-NXqyp9o8FAXOATOEo0ZpyaQ2KPb4cmPMXGWkx377QtJkIXHlHRAGer7ai0r0C1kG5gf+KJ6Gy+gdNIiosvSicg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"deepmerge": "^4.3.1",
|
||||||
|
"is-lite": "^0.8.2",
|
||||||
|
"popper.js": "^1.16.0",
|
||||||
|
"prop-types": "^15.8.1",
|
||||||
|
"tree-changes": "^0.9.1"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "15 - 18",
|
||||||
|
"react-dom": "15 - 18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/react-floater/node_modules/@gilbarbara/deep-equal": {
|
||||||
|
"version": "0.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@gilbarbara/deep-equal/-/deep-equal-0.1.2.tgz",
|
||||||
|
"integrity": "sha512-jk+qzItoEb0D0xSSmrKDDzf9sheQj/BAPxlgNxgmOaA3mxpUa6ndJLYGZKsJnIVEQSD8zcTbyILz7I0HcnBCRA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/react-floater/node_modules/is-lite": {
|
||||||
|
"version": "0.8.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-lite/-/is-lite-0.8.2.tgz",
|
||||||
|
"integrity": "sha512-JZfH47qTsslwaAsqbMI3Q6HNNjUuq6Cmzzww50TdP5Esb6e1y2sK2UAaZZuzfAzpoI2AkxoPQapZdlDuP6Vlsw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/react-floater/node_modules/tree-changes": {
|
||||||
|
"version": "0.9.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/tree-changes/-/tree-changes-0.9.3.tgz",
|
||||||
|
"integrity": "sha512-vvvS+O6kEeGRzMglTKbc19ltLWNtmNt1cpBoSYLj/iEcPVvpJasemKOlxBrmZaCtDJoF+4bwv3m01UKYi8mukQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@gilbarbara/deep-equal": "^0.1.1",
|
||||||
|
"is-lite": "^0.8.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/react-innertext": {
|
||||||
|
"version": "1.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-innertext/-/react-innertext-1.1.5.tgz",
|
||||||
|
"integrity": "sha512-PWAqdqhxhHIv80dT9znP2KvS+hfkbRovFp4zFYHFFlOoQLRiawIic81gKb3U1wEyJZgMwgs3JoLtwryASRWP3Q==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"@types/react": ">=0.0.0 <=99",
|
||||||
|
"react": ">=0.0.0 <=99"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react-is": {
|
"node_modules/react-is": {
|
||||||
"version": "16.13.1",
|
"version": "16.13.1",
|
||||||
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
|
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
|
||||||
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==",
|
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/react-joyride": {
|
||||||
|
"version": "2.9.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-joyride/-/react-joyride-2.9.3.tgz",
|
||||||
|
"integrity": "sha512-1+Mg34XK5zaqJ63eeBhqdbk7dlGCFp36FXwsEvgpjqrtyywX2C6h9vr3jgxP0bGHCw8Ilsp/nRDzNVq6HJ3rNw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@gilbarbara/deep-equal": "^0.3.1",
|
||||||
|
"deep-diff": "^1.0.2",
|
||||||
|
"deepmerge": "^4.3.1",
|
||||||
|
"is-lite": "^1.2.1",
|
||||||
|
"react-floater": "^0.7.9",
|
||||||
|
"react-innertext": "^1.1.5",
|
||||||
|
"react-is": "^16.13.1",
|
||||||
|
"scroll": "^3.0.1",
|
||||||
|
"scrollparent": "^2.1.0",
|
||||||
|
"tree-changes": "^0.11.2",
|
||||||
|
"type-fest": "^4.27.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "15 - 18",
|
||||||
|
"react-dom": "15 - 18"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react-number-format": {
|
"node_modules/react-number-format": {
|
||||||
"version": "5.4.4",
|
"version": "5.4.4",
|
||||||
"resolved": "https://registry.npmjs.org/react-number-format/-/react-number-format-5.4.4.tgz",
|
"resolved": "https://registry.npmjs.org/react-number-format/-/react-number-format-5.4.4.tgz",
|
||||||
@@ -2790,6 +2900,18 @@
|
|||||||
"loose-envify": "^1.1.0"
|
"loose-envify": "^1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/scroll": {
|
||||||
|
"version": "3.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/scroll/-/scroll-3.0.1.tgz",
|
||||||
|
"integrity": "sha512-pz7y517OVls1maEzlirKO5nPYle9AXsFzTMNJrRGmT951mzpIBy7sNHOg5o/0MQd/NqliCiWnAi0kZneMPFLcg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/scrollparent": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/scrollparent/-/scrollparent-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-bnnvJL28/Rtz/kz2+4wpBjHzWoEzXhVg/TE8BeVGJHUqE8THNIRnDxDWMktwM+qahvlRdvlLdsQfYe+cuqfZeA==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/semver": {
|
"node_modules/semver": {
|
||||||
"version": "6.3.1",
|
"version": "6.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||||
@@ -2862,6 +2984,16 @@
|
|||||||
"url": "https://github.com/sponsors/SuperchupuDev"
|
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/tree-changes": {
|
||||||
|
"version": "0.11.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/tree-changes/-/tree-changes-0.11.3.tgz",
|
||||||
|
"integrity": "sha512-r14mvDZ6tqz8PRQmlFKjhUVngu4VZ9d92ON3tp0EGpFBE6PAHOq8Bx8m8ahbNoGE3uI/npjYcJiqVydyOiYXag==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@gilbarbara/deep-equal": "^0.3.1",
|
||||||
|
"is-lite": "^1.2.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/tslib": {
|
"node_modules/tslib": {
|
||||||
"version": "2.8.1",
|
"version": "2.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||||
|
|||||||
@@ -11,31 +11,32 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mantine/core": "^7.15.3",
|
"@mantine/core": "^7.15.3",
|
||||||
"@mantine/hooks": "^7.15.3",
|
|
||||||
"@mantine/form": "^7.15.3",
|
|
||||||
"@mantine/dates": "^7.15.3",
|
"@mantine/dates": "^7.15.3",
|
||||||
"@mantine/notifications": "^7.15.3",
|
"@mantine/form": "^7.15.3",
|
||||||
|
"@mantine/hooks": "^7.15.3",
|
||||||
"@mantine/modals": "^7.15.3",
|
"@mantine/modals": "^7.15.3",
|
||||||
|
"@mantine/notifications": "^7.15.3",
|
||||||
"@tabler/icons-react": "^3.28.1",
|
"@tabler/icons-react": "^3.28.1",
|
||||||
|
"@tanstack/react-query": "^5.64.2",
|
||||||
|
"axios": "^1.7.9",
|
||||||
|
"d3-sankey": "^0.12.3",
|
||||||
|
"dayjs": "^1.11.13",
|
||||||
"react": "^18.3.1",
|
"react": "^18.3.1",
|
||||||
"react-dom": "^18.3.1",
|
"react-dom": "^18.3.1",
|
||||||
|
"react-joyride": "^2.9.3",
|
||||||
"react-router-dom": "^6.28.2",
|
"react-router-dom": "^6.28.2",
|
||||||
"recharts": "^2.15.0",
|
"recharts": "^2.15.0",
|
||||||
"d3-sankey": "^0.12.3",
|
"zustand": "^4.5.5"
|
||||||
"zustand": "^4.5.5",
|
|
||||||
"axios": "^1.7.9",
|
|
||||||
"@tanstack/react-query": "^5.64.2",
|
|
||||||
"dayjs": "^1.11.13"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@types/d3-sankey": "^0.12.4",
|
||||||
"@types/react": "^18.3.18",
|
"@types/react": "^18.3.18",
|
||||||
"@types/react-dom": "^18.3.5",
|
"@types/react-dom": "^18.3.5",
|
||||||
"@types/d3-sankey": "^0.12.4",
|
|
||||||
"@vitejs/plugin-react": "^4.3.4",
|
"@vitejs/plugin-react": "^4.3.4",
|
||||||
"typescript": "^5.7.3",
|
|
||||||
"vite": "^5.4.14",
|
|
||||||
"postcss": "^8.4.49",
|
"postcss": "^8.4.49",
|
||||||
"postcss-preset-mantine": "^1.17.0",
|
"postcss-preset-mantine": "^1.17.0",
|
||||||
"postcss-simple-vars": "^7.0.1"
|
"postcss-simple-vars": "^7.0.1",
|
||||||
|
"typescript": "^5.7.3",
|
||||||
|
"vite": "^5.4.14"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ function KanbanCard({ project, onEdit, onDragStart }: KanbanCardProps) {
|
|||||||
const plannedLabel = formatPlannedDate(project.planned_date);
|
const plannedLabel = formatPlannedDate(project.planned_date);
|
||||||
// For projects in the Future bucket with a specific year, show the year
|
// For projects in the Future bucket with a specific year, show the year
|
||||||
const currentYear = new Date().getFullYear();
|
const currentYear = new Date().getFullYear();
|
||||||
const isBeyondWindow = project.target_year > currentYear + 4 && project.target_year !== FUTURE_YEAR;
|
const isBeyondWindow = project.target_year !== null && project.target_year > currentYear + 4 && project.target_year !== FUTURE_YEAR;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Card
|
<Card
|
||||||
@@ -430,13 +430,13 @@ export function CapitalProjectsPage() {
|
|||||||
// Merge base years with any extra years from projects (excluding FUTURE_YEAR for now)
|
// Merge base years with any extra years from projects (excluding FUTURE_YEAR for now)
|
||||||
const regularYears = [...new Set([...baseYears, ...projectYears.filter((y) => y !== FUTURE_YEAR)])].sort();
|
const regularYears = [...new Set([...baseYears, ...projectYears.filter((y) => y !== FUTURE_YEAR)])].sort();
|
||||||
const years = [
|
const years = [
|
||||||
...(hasUnscheduledProjects ? [UNSCHEDULED] : []),
|
|
||||||
...regularYears,
|
...regularYears,
|
||||||
...(hasFutureProjects ? [FUTURE_YEAR] : []),
|
...(hasFutureProjects ? [FUTURE_YEAR] : []),
|
||||||
|
...(hasUnscheduledProjects ? [UNSCHEDULED] : []),
|
||||||
];
|
];
|
||||||
|
|
||||||
// Kanban columns: Unscheduled + current..current+4 + Future
|
// Kanban columns: current..current+4 + Future + Unscheduled (rightmost)
|
||||||
const kanbanYears = [UNSCHEDULED, ...baseYears, FUTURE_YEAR];
|
const kanbanYears = [...baseYears, FUTURE_YEAR, UNSCHEDULED];
|
||||||
|
|
||||||
// ---- Loading state ----
|
// ---- Loading state ----
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,8 @@ import {
|
|||||||
IconRefresh,
|
IconRefresh,
|
||||||
IconInfoCircle,
|
IconInfoCircle,
|
||||||
} from '@tabler/icons-react';
|
} from '@tabler/icons-react';
|
||||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
import { useState, useCallback } from 'react';
|
||||||
|
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||||
import { useAuthStore } from '../../stores/authStore';
|
import { useAuthStore } from '../../stores/authStore';
|
||||||
import api from '../../services/api';
|
import api from '../../services/api';
|
||||||
|
|
||||||
@@ -39,6 +40,8 @@ interface HealthScore {
|
|||||||
interface HealthScoresData {
|
interface HealthScoresData {
|
||||||
operating: HealthScore | null;
|
operating: HealthScore | null;
|
||||||
reserve: HealthScore | null;
|
reserve: HealthScore | null;
|
||||||
|
operating_last_failed?: boolean;
|
||||||
|
reserve_last_failed?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getScoreColor(score: number): string {
|
function getScoreColor(score: number): string {
|
||||||
@@ -55,13 +58,36 @@ function TrajectoryIcon({ trajectory }: { trajectory: string | null }) {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function HealthScoreCard({ score, title, icon }: { score: HealthScore | null; title: string; icon: React.ReactNode }) {
|
function HealthScoreCard({
|
||||||
|
score,
|
||||||
|
title,
|
||||||
|
icon,
|
||||||
|
isRefreshing,
|
||||||
|
onRefresh,
|
||||||
|
lastFailed,
|
||||||
|
}: {
|
||||||
|
score: HealthScore | null;
|
||||||
|
title: string;
|
||||||
|
icon: React.ReactNode;
|
||||||
|
isRefreshing?: boolean;
|
||||||
|
onRefresh?: () => void;
|
||||||
|
lastFailed?: boolean;
|
||||||
|
}) {
|
||||||
|
// No score at all yet
|
||||||
if (!score) {
|
if (!score) {
|
||||||
return (
|
return (
|
||||||
<Card withBorder padding="lg" radius="md">
|
<Card withBorder padding="lg" radius="md">
|
||||||
<Group justify="space-between" mb="xs">
|
<Group justify="space-between" mb="xs">
|
||||||
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
||||||
{icon}
|
<Group gap={6}>
|
||||||
|
{onRefresh && (
|
||||||
|
<Tooltip label={`Recalculate ${title.toLowerCase()} score`}>
|
||||||
|
<Button variant="subtle" size="compact-xs" leftSection={<IconRefresh size={14} />}
|
||||||
|
loading={isRefreshing} onClick={onRefresh}>Refresh</Button>
|
||||||
|
</Tooltip>
|
||||||
|
)}
|
||||||
|
{icon}
|
||||||
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
<Center h={100}>
|
<Center h={100}>
|
||||||
<Text c="dimmed" size="sm">No health score yet</Text>
|
<Text c="dimmed" size="sm">No health score yet</Text>
|
||||||
@@ -70,6 +96,7 @@ function HealthScoreCard({ score, title, icon }: { score: HealthScore | null; ti
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Pending — missing data, can't calculate
|
||||||
if (score.status === 'pending') {
|
if (score.status === 'pending') {
|
||||||
const missingItems = Array.isArray(score.missing_data) ? score.missing_data :
|
const missingItems = Array.isArray(score.missing_data) ? score.missing_data :
|
||||||
(typeof score.missing_data === 'string' ? JSON.parse(score.missing_data) : []);
|
(typeof score.missing_data === 'string' ? JSON.parse(score.missing_data) : []);
|
||||||
@@ -77,7 +104,15 @@ function HealthScoreCard({ score, title, icon }: { score: HealthScore | null; ti
|
|||||||
<Card withBorder padding="lg" radius="md">
|
<Card withBorder padding="lg" radius="md">
|
||||||
<Group justify="space-between" mb="xs">
|
<Group justify="space-between" mb="xs">
|
||||||
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
||||||
{icon}
|
<Group gap={6}>
|
||||||
|
{onRefresh && (
|
||||||
|
<Tooltip label={`Recalculate ${title.toLowerCase()} score`}>
|
||||||
|
<Button variant="subtle" size="compact-xs" leftSection={<IconRefresh size={14} />}
|
||||||
|
loading={isRefreshing} onClick={onRefresh}>Refresh</Button>
|
||||||
|
</Tooltip>
|
||||||
|
)}
|
||||||
|
{icon}
|
||||||
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
<Center>
|
<Center>
|
||||||
<Stack align="center" gap="xs">
|
<Stack align="center" gap="xs">
|
||||||
@@ -92,20 +127,38 @@ function HealthScoreCard({ score, title, icon }: { score: HealthScore | null; ti
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (score.status === 'error') {
|
// For error status, we still render the score data (cached from the previous
|
||||||
|
// successful run) rather than blanking the card with "Error calculating score".
|
||||||
|
// A small watermark under the timestamp tells the user it's stale.
|
||||||
|
const showAsError = score.status === 'error' && score.score === 0 && !score.summary;
|
||||||
|
|
||||||
|
// Pure error with no cached data to fall back on
|
||||||
|
if (showAsError) {
|
||||||
return (
|
return (
|
||||||
<Card withBorder padding="lg" radius="md">
|
<Card withBorder padding="lg" radius="md">
|
||||||
<Group justify="space-between" mb="xs">
|
<Group justify="space-between" mb="xs">
|
||||||
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
||||||
{icon}
|
<Group gap={6}>
|
||||||
|
{onRefresh && (
|
||||||
|
<Tooltip label={`Retry ${title.toLowerCase()} score`}>
|
||||||
|
<Button variant="subtle" size="compact-xs" leftSection={<IconRefresh size={14} />}
|
||||||
|
loading={isRefreshing} onClick={onRefresh}>Retry</Button>
|
||||||
|
</Tooltip>
|
||||||
|
)}
|
||||||
|
{icon}
|
||||||
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
<Center h={100}>
|
<Center h={100}>
|
||||||
<Badge color="red" variant="light">Error calculating score</Badge>
|
<Stack align="center" gap={4}>
|
||||||
|
<Badge color="red" variant="light">Error calculating score</Badge>
|
||||||
|
<Text size="xs" c="dimmed">Click Retry to try again</Text>
|
||||||
|
</Stack>
|
||||||
</Center>
|
</Center>
|
||||||
</Card>
|
</Card>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Normal display — works for both 'complete' and 'error' (with cached data)
|
||||||
const color = getScoreColor(score.score);
|
const color = getScoreColor(score.score);
|
||||||
const factors = Array.isArray(score.factors) ? score.factors :
|
const factors = Array.isArray(score.factors) ? score.factors :
|
||||||
(typeof score.factors === 'string' ? JSON.parse(score.factors) : []);
|
(typeof score.factors === 'string' ? JSON.parse(score.factors) : []);
|
||||||
@@ -116,7 +169,15 @@ function HealthScoreCard({ score, title, icon }: { score: HealthScore | null; ti
|
|||||||
<Card withBorder padding="lg" radius="md">
|
<Card withBorder padding="lg" radius="md">
|
||||||
<Group justify="space-between" mb="xs">
|
<Group justify="space-between" mb="xs">
|
||||||
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
<Text size="xs" c="dimmed" tt="uppercase" fw={700}>{title} Health</Text>
|
||||||
{icon}
|
<Group gap={6}>
|
||||||
|
{onRefresh && (
|
||||||
|
<Tooltip label={`Recalculate ${title.toLowerCase()} score`}>
|
||||||
|
<Button variant="subtle" size="compact-xs" leftSection={<IconRefresh size={14} />}
|
||||||
|
loading={isRefreshing} onClick={onRefresh}>Refresh</Button>
|
||||||
|
</Tooltip>
|
||||||
|
)}
|
||||||
|
{icon}
|
||||||
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
<Group align="flex-start" gap="lg">
|
<Group align="flex-start" gap="lg">
|
||||||
<RingProgress
|
<RingProgress
|
||||||
@@ -215,9 +276,16 @@ function HealthScoreCard({ score, title, icon }: { score: HealthScore | null; ti
|
|||||||
</Stack>
|
</Stack>
|
||||||
</Group>
|
</Group>
|
||||||
{score.calculated_at && (
|
{score.calculated_at && (
|
||||||
<Text size="10px" c="dimmed" ta="right" mt={6} style={{ opacity: 0.7 }}>
|
<Stack gap={0} mt={6} align="flex-end">
|
||||||
Last updated {new Date(score.calculated_at).toLocaleDateString()} at {new Date(score.calculated_at).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })}
|
<Text size="10px" c="dimmed" style={{ opacity: 0.7 }}>
|
||||||
</Text>
|
Last updated {new Date(score.calculated_at).toLocaleDateString()} at {new Date(score.calculated_at).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })}
|
||||||
|
</Text>
|
||||||
|
{lastFailed && (
|
||||||
|
<Text size="10px" c="orange" fw={500} style={{ opacity: 0.85 }}>
|
||||||
|
last analysis failed — showing cached data
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
|
</Stack>
|
||||||
)}
|
)}
|
||||||
</Card>
|
</Card>
|
||||||
);
|
);
|
||||||
@@ -245,6 +313,10 @@ export function DashboardPage() {
|
|||||||
const currentOrg = useAuthStore((s) => s.currentOrg);
|
const currentOrg = useAuthStore((s) => s.currentOrg);
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
|
// Track whether a refresh is in progress (per score type) for async polling
|
||||||
|
const [operatingRefreshing, setOperatingRefreshing] = useState(false);
|
||||||
|
const [reserveRefreshing, setReserveRefreshing] = useState(false);
|
||||||
|
|
||||||
const { data, isLoading } = useQuery<DashboardData>({
|
const { data, isLoading } = useQuery<DashboardData>({
|
||||||
queryKey: ['dashboard'],
|
queryKey: ['dashboard'],
|
||||||
queryFn: async () => { const { data } = await api.get('/reports/dashboard'); return data; },
|
queryFn: async () => { const { data } = await api.get('/reports/dashboard'); return data; },
|
||||||
@@ -255,14 +327,66 @@ export function DashboardPage() {
|
|||||||
queryKey: ['health-scores'],
|
queryKey: ['health-scores'],
|
||||||
queryFn: async () => { const { data } = await api.get('/health-scores/latest'); return data; },
|
queryFn: async () => { const { data } = await api.get('/health-scores/latest'); return data; },
|
||||||
enabled: !!currentOrg,
|
enabled: !!currentOrg,
|
||||||
|
// Poll every 3 seconds while a refresh is in progress
|
||||||
|
refetchInterval: (operatingRefreshing || reserveRefreshing) ? 3000 : false,
|
||||||
});
|
});
|
||||||
|
|
||||||
const recalcMutation = useMutation({
|
// Async refresh handlers — trigger the backend and poll for results
|
||||||
mutationFn: () => api.post('/health-scores/calculate'),
|
const handleRefreshOperating = useCallback(async () => {
|
||||||
onSuccess: () => {
|
const prevId = healthScores?.operating?.id;
|
||||||
queryClient.invalidateQueries({ queryKey: ['health-scores'] });
|
setOperatingRefreshing(true);
|
||||||
},
|
try {
|
||||||
});
|
await api.post('/health-scores/calculate/operating');
|
||||||
|
} catch {
|
||||||
|
// Trigger failed at network level — polling will pick up any backend-saved error
|
||||||
|
}
|
||||||
|
// Start polling — watch for the health score to change (new id or updated timestamp)
|
||||||
|
const pollUntilDone = () => {
|
||||||
|
const checkInterval = setInterval(async () => {
|
||||||
|
try {
|
||||||
|
const { data: latest } = await api.get('/health-scores/latest');
|
||||||
|
const newScore = latest?.operating;
|
||||||
|
if (newScore && newScore.id !== prevId) {
|
||||||
|
setOperatingRefreshing(false);
|
||||||
|
queryClient.setQueryData(['health-scores'], latest);
|
||||||
|
clearInterval(checkInterval);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Keep polling
|
||||||
|
}
|
||||||
|
}, 3000);
|
||||||
|
// Safety timeout — stop polling after 11 minutes
|
||||||
|
setTimeout(() => { clearInterval(checkInterval); setOperatingRefreshing(false); }, 660000);
|
||||||
|
};
|
||||||
|
pollUntilDone();
|
||||||
|
}, [healthScores?.operating?.id, queryClient]);
|
||||||
|
|
||||||
|
const handleRefreshReserve = useCallback(async () => {
|
||||||
|
const prevId = healthScores?.reserve?.id;
|
||||||
|
setReserveRefreshing(true);
|
||||||
|
try {
|
||||||
|
await api.post('/health-scores/calculate/reserve');
|
||||||
|
} catch {
|
||||||
|
// Trigger failed at network level
|
||||||
|
}
|
||||||
|
const pollUntilDone = () => {
|
||||||
|
const checkInterval = setInterval(async () => {
|
||||||
|
try {
|
||||||
|
const { data: latest } = await api.get('/health-scores/latest');
|
||||||
|
const newScore = latest?.reserve;
|
||||||
|
if (newScore && newScore.id !== prevId) {
|
||||||
|
setReserveRefreshing(false);
|
||||||
|
queryClient.setQueryData(['health-scores'], latest);
|
||||||
|
clearInterval(checkInterval);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Keep polling
|
||||||
|
}
|
||||||
|
}, 3000);
|
||||||
|
setTimeout(() => { clearInterval(checkInterval); setReserveRefreshing(false); }, 660000);
|
||||||
|
};
|
||||||
|
pollUntilDone();
|
||||||
|
}, [healthScores?.reserve?.id, queryClient]);
|
||||||
|
|
||||||
const fmt = (v: string | number) =>
|
const fmt = (v: string | number) =>
|
||||||
parseFloat(String(v || '0')).toLocaleString('en-US', { style: 'currency', currency: 'USD' });
|
parseFloat(String(v || '0')).toLocaleString('en-US', { style: 'currency', currency: 'USD' });
|
||||||
@@ -290,20 +414,7 @@ export function DashboardPage() {
|
|||||||
<Center h={200}><Loader /></Center>
|
<Center h={200}><Loader /></Center>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
<Group justify="space-between" align="center">
|
<Text size="sm" fw={600} c="dimmed">AI Health Scores</Text>
|
||||||
<Text size="sm" fw={600} c="dimmed">AI Health Scores</Text>
|
|
||||||
<Tooltip label="Recalculate health scores now">
|
|
||||||
<Button
|
|
||||||
variant="subtle"
|
|
||||||
size="compact-xs"
|
|
||||||
leftSection={<IconRefresh size={14} />}
|
|
||||||
loading={recalcMutation.isPending}
|
|
||||||
onClick={() => recalcMutation.mutate()}
|
|
||||||
>
|
|
||||||
Refresh
|
|
||||||
</Button>
|
|
||||||
</Tooltip>
|
|
||||||
</Group>
|
|
||||||
<SimpleGrid cols={{ base: 1, md: 2 }}>
|
<SimpleGrid cols={{ base: 1, md: 2 }}>
|
||||||
<HealthScoreCard
|
<HealthScoreCard
|
||||||
score={healthScores?.operating || null}
|
score={healthScores?.operating || null}
|
||||||
@@ -313,6 +424,9 @@ export function DashboardPage() {
|
|||||||
<IconHeartbeat size={20} />
|
<IconHeartbeat size={20} />
|
||||||
</ThemeIcon>
|
</ThemeIcon>
|
||||||
}
|
}
|
||||||
|
isRefreshing={operatingRefreshing}
|
||||||
|
onRefresh={handleRefreshOperating}
|
||||||
|
lastFailed={!!healthScores?.operating_last_failed}
|
||||||
/>
|
/>
|
||||||
<HealthScoreCard
|
<HealthScoreCard
|
||||||
score={healthScores?.reserve || null}
|
score={healthScores?.reserve || null}
|
||||||
@@ -322,6 +436,9 @@ export function DashboardPage() {
|
|||||||
<IconHeartbeat size={20} />
|
<IconHeartbeat size={20} />
|
||||||
</ThemeIcon>
|
</ThemeIcon>
|
||||||
}
|
}
|
||||||
|
isRefreshing={reserveRefreshing}
|
||||||
|
onRefresh={handleRefreshReserve}
|
||||||
|
lastFailed={!!healthScores?.reserve_last_failed}
|
||||||
/>
|
/>
|
||||||
</SimpleGrid>
|
</SimpleGrid>
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useState, useEffect } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import {
|
import {
|
||||||
Title,
|
Title,
|
||||||
Text,
|
Text,
|
||||||
@@ -33,7 +33,7 @@ import {
|
|||||||
IconChevronDown,
|
IconChevronDown,
|
||||||
IconChevronUp,
|
IconChevronUp,
|
||||||
} from '@tabler/icons-react';
|
} from '@tabler/icons-react';
|
||||||
import { useQuery, useMutation } from '@tanstack/react-query';
|
import { useQuery } from '@tanstack/react-query';
|
||||||
import { notifications } from '@mantine/notifications';
|
import { notifications } from '@mantine/notifications';
|
||||||
import api from '../../services/api';
|
import api from '../../services/api';
|
||||||
|
|
||||||
@@ -107,6 +107,9 @@ interface SavedRecommendation {
|
|||||||
risk_notes: string[];
|
risk_notes: string[];
|
||||||
response_time_ms: number;
|
response_time_ms: number;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
|
status: 'processing' | 'complete' | 'error';
|
||||||
|
last_failed: boolean;
|
||||||
|
error_message?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Helpers ──
|
// ── Helpers ──
|
||||||
@@ -181,14 +184,29 @@ function RateTable({ rates, showTerm }: { rates: MarketRate[]; showTerm: boolean
|
|||||||
|
|
||||||
// ── Recommendations Display Component ──
|
// ── Recommendations Display Component ──
|
||||||
|
|
||||||
function RecommendationsDisplay({ aiResult, lastUpdated }: { aiResult: AIResponse; lastUpdated?: string }) {
|
function RecommendationsDisplay({
|
||||||
|
aiResult,
|
||||||
|
lastUpdated,
|
||||||
|
lastFailed,
|
||||||
|
}: {
|
||||||
|
aiResult: AIResponse;
|
||||||
|
lastUpdated?: string;
|
||||||
|
lastFailed?: boolean;
|
||||||
|
}) {
|
||||||
return (
|
return (
|
||||||
<Stack>
|
<Stack>
|
||||||
{/* Last Updated timestamp */}
|
{/* Last Updated timestamp + failure message */}
|
||||||
{lastUpdated && (
|
{lastUpdated && (
|
||||||
<Text size="xs" c="dimmed" ta="right">
|
<Stack gap={0} align="flex-end">
|
||||||
Last updated: {new Date(lastUpdated).toLocaleString()}
|
<Text size="xs" c="dimmed" ta="right">
|
||||||
</Text>
|
Last updated: {new Date(lastUpdated).toLocaleString()}
|
||||||
|
</Text>
|
||||||
|
{lastFailed && (
|
||||||
|
<Text size="10px" c="orange" fw={500} style={{ opacity: 0.85 }}>
|
||||||
|
last analysis failed — showing cached data
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
|
</Stack>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Overall Assessment */}
|
{/* Overall Assessment */}
|
||||||
@@ -327,9 +345,8 @@ function RecommendationsDisplay({ aiResult, lastUpdated }: { aiResult: AIRespons
|
|||||||
// ── Main Component ──
|
// ── Main Component ──
|
||||||
|
|
||||||
export function InvestmentPlanningPage() {
|
export function InvestmentPlanningPage() {
|
||||||
const [aiResult, setAiResult] = useState<AIResponse | null>(null);
|
|
||||||
const [lastUpdated, setLastUpdated] = useState<string | null>(null);
|
|
||||||
const [ratesExpanded, setRatesExpanded] = useState(true);
|
const [ratesExpanded, setRatesExpanded] = useState(true);
|
||||||
|
const [isTriggering, setIsTriggering] = useState(false);
|
||||||
|
|
||||||
// Load financial snapshot on mount
|
// Load financial snapshot on mount
|
||||||
const { data: snapshot, isLoading: snapshotLoading } = useQuery<FinancialSnapshot>({
|
const { data: snapshot, isLoading: snapshotLoading } = useQuery<FinancialSnapshot>({
|
||||||
@@ -349,50 +366,86 @@ export function InvestmentPlanningPage() {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Load saved recommendation on mount
|
// Load saved recommendation — polls every 3s when processing
|
||||||
const { data: savedRec } = useQuery<SavedRecommendation | null>({
|
const { data: savedRec } = useQuery<SavedRecommendation | null>({
|
||||||
queryKey: ['investment-planning-saved-recommendation'],
|
queryKey: ['investment-planning-saved-recommendation'],
|
||||||
queryFn: async () => {
|
queryFn: async () => {
|
||||||
const { data } = await api.get('/investment-planning/saved-recommendation');
|
const { data } = await api.get('/investment-planning/saved-recommendation');
|
||||||
return data;
|
return data;
|
||||||
},
|
},
|
||||||
|
refetchInterval: (query) => {
|
||||||
|
const rec = query.state.data;
|
||||||
|
// Poll every 3 seconds while processing
|
||||||
|
if (rec?.status === 'processing') return 3000;
|
||||||
|
// Also poll if we just triggered (status may not be 'processing' yet)
|
||||||
|
if (isTriggering) return 3000;
|
||||||
|
return false;
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Populate AI results from saved recommendation on load
|
// Derive display state from saved recommendation
|
||||||
useEffect(() => {
|
const isProcessing = savedRec?.status === 'processing' || isTriggering;
|
||||||
if (savedRec && !aiResult) {
|
const lastFailed = savedRec?.last_failed || false;
|
||||||
setAiResult({
|
const hasResults = savedRec && savedRec.status === 'complete' && savedRec.recommendations.length > 0;
|
||||||
recommendations: savedRec.recommendations,
|
const hasError = savedRec?.status === 'error' && !savedRec?.recommendations?.length;
|
||||||
overall_assessment: savedRec.overall_assessment,
|
|
||||||
risk_notes: savedRec.risk_notes,
|
|
||||||
});
|
|
||||||
setLastUpdated(savedRec.created_at);
|
|
||||||
}
|
|
||||||
}, [savedRec]); // eslint-disable-line react-hooks/exhaustive-deps
|
|
||||||
|
|
||||||
// AI recommendation (on-demand)
|
// Clear triggering flag once backend confirms processing or completes
|
||||||
const aiMutation = useMutation({
|
useEffect(() => {
|
||||||
mutationFn: async () => {
|
if (isTriggering && savedRec?.status === 'processing') {
|
||||||
const { data } = await api.post('/investment-planning/recommendations');
|
setIsTriggering(false);
|
||||||
return data as AIResponse;
|
}
|
||||||
},
|
if (isTriggering && savedRec?.status === 'complete') {
|
||||||
onSuccess: (data) => {
|
setIsTriggering(false);
|
||||||
setAiResult(data);
|
}
|
||||||
setLastUpdated(new Date().toISOString());
|
}, [savedRec?.status, isTriggering]);
|
||||||
if (data.recommendations.length > 0) {
|
|
||||||
notifications.show({
|
// Show notification when processing completes (transition from processing)
|
||||||
message: `Generated ${data.recommendations.length} investment recommendations`,
|
const prevStatusRef = useState<string | null>(null);
|
||||||
color: 'green',
|
useEffect(() => {
|
||||||
});
|
const [prevStatus, setPrevStatus] = prevStatusRef;
|
||||||
}
|
if (prevStatus === 'processing' && savedRec?.status === 'complete') {
|
||||||
},
|
|
||||||
onError: (err: any) => {
|
|
||||||
notifications.show({
|
notifications.show({
|
||||||
message: err.response?.data?.message || 'Failed to get AI recommendations',
|
message: `Generated ${savedRec.recommendations.length} investment recommendations`,
|
||||||
|
color: 'green',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (prevStatus === 'processing' && savedRec?.status === 'error') {
|
||||||
|
notifications.show({
|
||||||
|
message: savedRec.error_message || 'AI recommendation analysis failed',
|
||||||
color: 'red',
|
color: 'red',
|
||||||
});
|
});
|
||||||
},
|
}
|
||||||
});
|
setPrevStatus(savedRec?.status || null);
|
||||||
|
}, [savedRec?.status]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||||
|
|
||||||
|
// Trigger AI recommendations (async — returns immediately)
|
||||||
|
const handleTriggerAI = useCallback(async () => {
|
||||||
|
setIsTriggering(true);
|
||||||
|
try {
|
||||||
|
await api.post('/investment-planning/recommendations');
|
||||||
|
} catch (err: any) {
|
||||||
|
setIsTriggering(false);
|
||||||
|
notifications.show({
|
||||||
|
message: err.response?.data?.message || 'Failed to start AI analysis',
|
||||||
|
color: 'red',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Build AI result from saved recommendation for display
|
||||||
|
const aiResult: AIResponse | null = hasResults
|
||||||
|
? {
|
||||||
|
recommendations: savedRec!.recommendations,
|
||||||
|
overall_assessment: savedRec!.overall_assessment,
|
||||||
|
risk_notes: savedRec!.risk_notes,
|
||||||
|
}
|
||||||
|
: (lastFailed && savedRec?.recommendations?.length)
|
||||||
|
? {
|
||||||
|
recommendations: savedRec!.recommendations,
|
||||||
|
overall_assessment: savedRec!.overall_assessment,
|
||||||
|
risk_notes: savedRec!.risk_notes,
|
||||||
|
}
|
||||||
|
: null;
|
||||||
|
|
||||||
if (snapshotLoading) {
|
if (snapshotLoading) {
|
||||||
return (
|
return (
|
||||||
@@ -645,8 +698,8 @@ export function InvestmentPlanningPage() {
|
|||||||
</Group>
|
</Group>
|
||||||
<Button
|
<Button
|
||||||
leftSection={<IconSparkles size={16} />}
|
leftSection={<IconSparkles size={16} />}
|
||||||
onClick={() => aiMutation.mutate()}
|
onClick={handleTriggerAI}
|
||||||
loading={aiMutation.isPending}
|
loading={isProcessing}
|
||||||
variant="gradient"
|
variant="gradient"
|
||||||
gradient={{ from: 'grape', to: 'violet' }}
|
gradient={{ from: 'grape', to: 'violet' }}
|
||||||
>
|
>
|
||||||
@@ -654,8 +707,8 @@ export function InvestmentPlanningPage() {
|
|||||||
</Button>
|
</Button>
|
||||||
</Group>
|
</Group>
|
||||||
|
|
||||||
{/* Loading State */}
|
{/* Processing State */}
|
||||||
{aiMutation.isPending && (
|
{isProcessing && (
|
||||||
<Center py="xl">
|
<Center py="xl">
|
||||||
<Stack align="center" gap="sm">
|
<Stack align="center" gap="sm">
|
||||||
<Loader size="lg" type="dots" />
|
<Loader size="lg" type="dots" />
|
||||||
@@ -663,19 +716,32 @@ export function InvestmentPlanningPage() {
|
|||||||
Analyzing your financial data and market rates...
|
Analyzing your financial data and market rates...
|
||||||
</Text>
|
</Text>
|
||||||
<Text c="dimmed" size="xs">
|
<Text c="dimmed" size="xs">
|
||||||
This may take up to 30 seconds
|
You can navigate away — results will appear when ready
|
||||||
</Text>
|
</Text>
|
||||||
</Stack>
|
</Stack>
|
||||||
</Center>
|
</Center>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Results */}
|
{/* Error State (no cached data) */}
|
||||||
{aiResult && !aiMutation.isPending && (
|
{hasError && !isProcessing && (
|
||||||
<RecommendationsDisplay aiResult={aiResult} lastUpdated={lastUpdated || undefined} />
|
<Alert color="red" variant="light" title="Analysis Failed" mb="md">
|
||||||
|
<Text size="sm">
|
||||||
|
{savedRec?.error_message || 'The last AI analysis failed. Please try again.'}
|
||||||
|
</Text>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Results (with optional failure watermark) */}
|
||||||
|
{aiResult && !isProcessing && (
|
||||||
|
<RecommendationsDisplay
|
||||||
|
aiResult={aiResult}
|
||||||
|
lastUpdated={savedRec?.created_at || undefined}
|
||||||
|
lastFailed={lastFailed}
|
||||||
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Empty State */}
|
{/* Empty State */}
|
||||||
{!aiResult && !aiMutation.isPending && (
|
{!aiResult && !isProcessing && !hasError && (
|
||||||
<Paper p="xl" radius="sm" style={{ textAlign: 'center' }}>
|
<Paper p="xl" radius="sm" style={{ textAlign: 'center' }}>
|
||||||
<ThemeIcon variant="light" color="grape" size={48} mx="auto" mb="md">
|
<ThemeIcon variant="light" color="grape" size={48} mx="auto" mb="md">
|
||||||
<IconSparkles size={28} />
|
<IconSparkles size={28} />
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ interface QuarterlyData {
|
|||||||
export function QuarterlyReportPage() {
|
export function QuarterlyReportPage() {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const currentQuarter = Math.ceil((now.getMonth() + 1) / 3);
|
const currentQuarter = Math.ceil((now.getMonth() + 1) / 3);
|
||||||
const defaultQuarter = currentQuarter > 1 ? currentQuarter - 1 : 4;
|
const defaultQuarter = currentQuarter;
|
||||||
const defaultYear = currentQuarter > 1 ? now.getFullYear() : now.getFullYear() - 1;
|
const defaultYear = now.getFullYear();
|
||||||
|
|
||||||
const [year, setYear] = useState(String(defaultYear));
|
const [year, setYear] = useState(String(defaultYear));
|
||||||
const [quarter, setQuarter] = useState(String(defaultQuarter));
|
const [quarter, setQuarter] = useState(String(defaultQuarter));
|
||||||
@@ -102,7 +102,7 @@ export function QuarterlyReportPage() {
|
|||||||
|
|
||||||
{data && (
|
{data && (
|
||||||
<Text size="sm" c="dimmed">
|
<Text size="sm" c="dimmed">
|
||||||
{data.quarter_label} · {new Date(data.date_range.from).toLocaleDateString()} – {new Date(data.date_range.to).toLocaleDateString()}
|
{data.quarter_label} · {new Date(data.date_range.from + 'T00:00:00').toLocaleDateString()} – {new Date(data.date_range.to + 'T00:00:00').toLocaleDateString()}
|
||||||
</Text>
|
</Text>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ export default defineConfig({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
server: {
|
server: {
|
||||||
|
allowedHosts: ['app.hoaledgeriq.com'],
|
||||||
host: '0.0.0.0',
|
host: '0.0.0.0',
|
||||||
port: 5173,
|
port: 5173,
|
||||||
proxy: {
|
proxy: {
|
||||||
|
|||||||
18
nginx/certbot-init.conf
Normal file
18
nginx/certbot-init.conf
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Temporary nginx config — used ONLY during the initial certbot certificate
|
||||||
|
# request. Once the cert is obtained, switch to ssl.conf and restart nginx.
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name _;
|
||||||
|
|
||||||
|
# Certbot ACME challenge
|
||||||
|
location /.well-known/acme-challenge/ {
|
||||||
|
root /var/www/certbot;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Return 503 for everything else so it's obvious this is not the real app
|
||||||
|
location / {
|
||||||
|
return 503 "SSL certificate is being provisioned. Try again in a minute.\n";
|
||||||
|
add_header Content-Type text/plain;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -23,21 +23,8 @@ server {
|
|||||||
proxy_cache_bypass $http_upgrade;
|
proxy_cache_bypass $http_upgrade;
|
||||||
}
|
}
|
||||||
|
|
||||||
# AI recommendation endpoint needs a longer timeout (up to 3 minutes)
|
# AI endpoints now return immediately (async processing in background)
|
||||||
location /api/investment-planning/recommendations {
|
# No special timeout needed — kept for documentation purposes
|
||||||
proxy_pass http://backend;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection 'upgrade';
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_cache_bypass $http_upgrade;
|
|
||||||
proxy_read_timeout 180s;
|
|
||||||
proxy_connect_timeout 10s;
|
|
||||||
proxy_send_timeout 30s;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Everything else -> Vite dev server (frontend)
|
# Everything else -> Vite dev server (frontend)
|
||||||
location / {
|
location / {
|
||||||
|
|||||||
87
nginx/host-production.conf
Normal file
87
nginx/host-production.conf
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# HOA LedgerIQ — Host-level nginx config (production)
|
||||||
|
#
|
||||||
|
# Copy this file to /etc/nginx/sites-available/app.yourdomain.com
|
||||||
|
# and symlink to /etc/nginx/sites-enabled/:
|
||||||
|
#
|
||||||
|
# sudo cp nginx/host-production.conf /etc/nginx/sites-available/app.yourdomain.com
|
||||||
|
# sudo ln -s /etc/nginx/sites-available/app.yourdomain.com /etc/nginx/sites-enabled/
|
||||||
|
# sudo nginx -t && sudo systemctl reload nginx
|
||||||
|
#
|
||||||
|
# Then obtain an SSL certificate:
|
||||||
|
# sudo certbot --nginx -d app.yourdomain.com
|
||||||
|
#
|
||||||
|
# Replace "app.yourdomain.com" with your actual hostname throughout this file.
|
||||||
|
|
||||||
|
# --- Rate limiting ---
|
||||||
|
# 10 requests/sec per IP for API routes (shared memory zone: 10 MB ≈ 160k IPs)
|
||||||
|
limit_req_zone $binary_remote_addr zone=api_limit:10m rate=10r/s;
|
||||||
|
|
||||||
|
# --- HTTP → HTTPS redirect ---
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name app.yourdomain.com;
|
||||||
|
|
||||||
|
# Let certbot answer ACME challenges
|
||||||
|
location /.well-known/acme-challenge/ {
|
||||||
|
root /var/www/certbot;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Everything else → HTTPS
|
||||||
|
location / {
|
||||||
|
return 301 https://$host$request_uri;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- Main HTTPS server ---
|
||||||
|
server {
|
||||||
|
listen 443 ssl;
|
||||||
|
server_name app.yourdomain.com;
|
||||||
|
|
||||||
|
# SSL certificates (managed by certbot)
|
||||||
|
ssl_certificate /etc/letsencrypt/live/app.yourdomain.com/fullchain.pem;
|
||||||
|
ssl_certificate_key /etc/letsencrypt/live/app.yourdomain.com/privkey.pem;
|
||||||
|
|
||||||
|
# Modern TLS settings
|
||||||
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
|
ssl_prefer_server_ciphers on;
|
||||||
|
ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384';
|
||||||
|
ssl_session_cache shared:SSL:10m;
|
||||||
|
ssl_session_timeout 10m;
|
||||||
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||||
|
|
||||||
|
# --- Proxy defaults ---
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
|
||||||
|
# Buffer settings — prevent 502s when backend is slow to respond
|
||||||
|
proxy_buffering on;
|
||||||
|
proxy_buffer_size 16k;
|
||||||
|
proxy_buffers 8 16k;
|
||||||
|
proxy_busy_buffers_size 32k;
|
||||||
|
|
||||||
|
# --- API routes → NestJS backend (port 3000) ---
|
||||||
|
location /api/ {
|
||||||
|
limit_req zone=api_limit burst=30 nodelay;
|
||||||
|
|
||||||
|
proxy_pass http://127.0.0.1:3000;
|
||||||
|
proxy_read_timeout 30s;
|
||||||
|
proxy_connect_timeout 5s;
|
||||||
|
proxy_send_timeout 15s;
|
||||||
|
}
|
||||||
|
|
||||||
|
# AI endpoints now return immediately (async processing in background)
|
||||||
|
# No special timeout overrides needed
|
||||||
|
|
||||||
|
# --- Frontend → React SPA served by nginx (port 3001) ---
|
||||||
|
location / {
|
||||||
|
proxy_pass http://127.0.0.1:3001;
|
||||||
|
proxy_read_timeout 10s;
|
||||||
|
proxy_connect_timeout 5s;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
}
|
||||||
53
nginx/production.conf
Normal file
53
nginx/production.conf
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
upstream backend {
|
||||||
|
server backend:3000;
|
||||||
|
keepalive 32; # reuse connections to backend
|
||||||
|
}
|
||||||
|
|
||||||
|
upstream frontend {
|
||||||
|
server frontend:3001;
|
||||||
|
keepalive 16;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Shared proxy settings
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Connection ""; # enable keepalive to upstreams
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|
||||||
|
# Buffer settings — prevent 502s when backend is slow to respond
|
||||||
|
proxy_buffering on;
|
||||||
|
proxy_buffer_size 16k;
|
||||||
|
proxy_buffers 8 16k;
|
||||||
|
proxy_busy_buffers_size 32k;
|
||||||
|
|
||||||
|
# Rate limit zone (10 req/s per IP for API)
|
||||||
|
limit_req_zone $binary_remote_addr zone=api_limit:10m rate=10r/s;
|
||||||
|
|
||||||
|
# HTTP server — SSL termination is handled by the host reverse proxy
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name _;
|
||||||
|
|
||||||
|
# --- API routes → backend ---
|
||||||
|
location /api/ {
|
||||||
|
limit_req zone=api_limit burst=30 nodelay;
|
||||||
|
|
||||||
|
proxy_pass http://backend;
|
||||||
|
proxy_read_timeout 30s;
|
||||||
|
proxy_connect_timeout 5s;
|
||||||
|
proxy_send_timeout 15s;
|
||||||
|
}
|
||||||
|
|
||||||
|
# AI endpoints now return immediately (async processing in background)
|
||||||
|
# No special timeout overrides needed
|
||||||
|
|
||||||
|
# --- Static frontend → built React assets ---
|
||||||
|
location / {
|
||||||
|
proxy_pass http://frontend;
|
||||||
|
proxy_read_timeout 10s;
|
||||||
|
proxy_connect_timeout 5s;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
}
|
||||||
77
nginx/ssl.conf
Normal file
77
nginx/ssl.conf
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
upstream backend {
|
||||||
|
server backend:3000;
|
||||||
|
}
|
||||||
|
|
||||||
|
upstream frontend {
|
||||||
|
server frontend:3001;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Redirect all HTTP to HTTPS
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name _;
|
||||||
|
|
||||||
|
# Let certbot answer ACME challenges over HTTP
|
||||||
|
location /.well-known/acme-challenge/ {
|
||||||
|
root /var/www/certbot;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Everything else -> HTTPS
|
||||||
|
location / {
|
||||||
|
return 301 https://$host$request_uri;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# HTTPS server
|
||||||
|
server {
|
||||||
|
listen 443 ssl;
|
||||||
|
# Replace with your actual hostname:
|
||||||
|
server_name staging.example.com;
|
||||||
|
|
||||||
|
# --- TLS certificates (managed by certbot) ---
|
||||||
|
ssl_certificate /etc/letsencrypt/live/staging.example.com/fullchain.pem;
|
||||||
|
ssl_certificate_key /etc/letsencrypt/live/staging.example.com/privkey.pem;
|
||||||
|
|
||||||
|
# --- Modern TLS settings ---
|
||||||
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
|
ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384';
|
||||||
|
ssl_prefer_server_ciphers on;
|
||||||
|
ssl_session_cache shared:SSL:10m;
|
||||||
|
ssl_session_timeout 1d;
|
||||||
|
ssl_session_tickets off;
|
||||||
|
|
||||||
|
# --- Security headers ---
|
||||||
|
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains" always;
|
||||||
|
add_header X-Content-Type-Options nosniff always;
|
||||||
|
add_header X-Frame-Options SAMEORIGIN always;
|
||||||
|
|
||||||
|
# --- Proxy routes (same as default.conf) ---
|
||||||
|
|
||||||
|
# API requests -> NestJS backend
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass http://backend;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
|
||||||
|
# AI endpoints now return immediately (async processing in background)
|
||||||
|
# No special timeout overrides needed
|
||||||
|
|
||||||
|
# Everything else -> Vite dev server (frontend)
|
||||||
|
location / {
|
||||||
|
proxy_pass http://frontend;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
}
|
||||||
296
scripts/db-backup.sh
Executable file
296
scripts/db-backup.sh
Executable file
@@ -0,0 +1,296 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# db-backup.sh — Backup & restore the HOA LedgerIQ PostgreSQL database
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/db-backup.sh backup [--dir /path/to/backups] [--keep N]
|
||||||
|
# ./scripts/db-backup.sh restore <file.sql.gz | file.dump.gz>
|
||||||
|
# ./scripts/db-backup.sh list [--dir /path/to/backups]
|
||||||
|
#
|
||||||
|
# Backup produces a gzipped custom-format dump with a timestamped filename:
|
||||||
|
# backups/hoafinance_2026-03-02_140530.dump.gz
|
||||||
|
#
|
||||||
|
# Cron example (daily at 2 AM, keep 30 days):
|
||||||
|
# 0 2 * * * cd /opt/hoa-ledgeriq && ./scripts/db-backup.sh backup --keep 30
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# ---- Defaults ----
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||||
|
BACKUP_DIR="$PROJECT_DIR/backups"
|
||||||
|
KEEP_DAYS=0 # 0 = keep forever
|
||||||
|
DB_USER="${POSTGRES_USER:-hoafinance}"
|
||||||
|
DB_NAME="${POSTGRES_DB:-hoafinance}"
|
||||||
|
COMPOSE_CMD="docker compose"
|
||||||
|
|
||||||
|
# If running with the SSL override, detect it
|
||||||
|
if [ -f "$PROJECT_DIR/docker-compose.ssl.yml" ] && \
|
||||||
|
docker compose -f "$PROJECT_DIR/docker-compose.yml" \
|
||||||
|
-f "$PROJECT_DIR/docker-compose.ssl.yml" ps --quiet 2>/dev/null | head -1 | grep -q .; then
|
||||||
|
COMPOSE_CMD="docker compose -f $PROJECT_DIR/docker-compose.yml -f $PROJECT_DIR/docker-compose.ssl.yml"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ---- Colors ----
|
||||||
|
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; CYAN='\033[0;36m'; NC='\033[0m'
|
||||||
|
|
||||||
|
info() { echo -e "${CYAN}[INFO]${NC} $*"; }
|
||||||
|
ok() { echo -e "${GREEN}[OK]${NC} $*"; }
|
||||||
|
warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||||
|
err() { echo -e "${RED}[ERROR]${NC} $*" >&2; }
|
||||||
|
die() { err "$@"; exit 1; }
|
||||||
|
|
||||||
|
# ---- Helpers ----
|
||||||
|
|
||||||
|
ensure_postgres_running() {
|
||||||
|
if ! $COMPOSE_CMD ps postgres 2>/dev/null | grep -q "running\|Up"; then
|
||||||
|
die "PostgreSQL container is not running. Start it with: docker compose up -d postgres"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
format_size() {
|
||||||
|
local bytes=$1
|
||||||
|
if (( bytes >= 1073741824 )); then printf "%.1f GB" "$(echo "$bytes / 1073741824" | bc -l)"
|
||||||
|
elif (( bytes >= 1048576 )); then printf "%.1f MB" "$(echo "$bytes / 1048576" | bc -l)"
|
||||||
|
elif (( bytes >= 1024 )); then printf "%.1f KB" "$(echo "$bytes / 1024" | bc -l)"
|
||||||
|
else printf "%d B" "$bytes"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ---- BACKUP ----
|
||||||
|
|
||||||
|
do_backup() {
|
||||||
|
ensure_postgres_running
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
|
local timestamp
|
||||||
|
timestamp="$(date +%Y-%m-%d_%H%M%S)"
|
||||||
|
local filename="${DB_NAME}_${timestamp}.dump.gz"
|
||||||
|
local filepath="$BACKUP_DIR/$filename"
|
||||||
|
|
||||||
|
info "Starting backup of database '${DB_NAME}' ..."
|
||||||
|
|
||||||
|
# pg_dump inside the container, stream through gzip on the host
|
||||||
|
$COMPOSE_CMD exec -T postgres pg_dump \
|
||||||
|
-U "$DB_USER" \
|
||||||
|
-d "$DB_NAME" \
|
||||||
|
--no-owner \
|
||||||
|
--no-privileges \
|
||||||
|
--format=custom \
|
||||||
|
| gzip -9 > "$filepath"
|
||||||
|
|
||||||
|
local size
|
||||||
|
size=$(wc -c < "$filepath" | tr -d ' ')
|
||||||
|
|
||||||
|
if [ "$size" -lt 100 ]; then
|
||||||
|
rm -f "$filepath"
|
||||||
|
die "Backup file is suspiciously small — something went wrong. Check docker compose logs postgres."
|
||||||
|
fi
|
||||||
|
|
||||||
|
ok "Backup complete: ${filepath} ($(format_size "$size"))"
|
||||||
|
|
||||||
|
# ---- Prune old backups ----
|
||||||
|
if [ "$KEEP_DAYS" -gt 0 ]; then
|
||||||
|
local pruned=0
|
||||||
|
while IFS= read -r old_file; do
|
||||||
|
rm -f "$old_file"
|
||||||
|
((pruned++))
|
||||||
|
done < <(find "$BACKUP_DIR" -name "${DB_NAME}_*.dump.gz" -mtime +"$KEEP_DAYS" -type f 2>/dev/null)
|
||||||
|
if [ "$pruned" -gt 0 ]; then
|
||||||
|
info "Pruned $pruned backup(s) older than $KEEP_DAYS days"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ---- RESTORE ----
|
||||||
|
|
||||||
|
do_restore() {
|
||||||
|
local file="$1"
|
||||||
|
|
||||||
|
# Resolve relative path
|
||||||
|
if [[ "$file" != /* ]]; then
|
||||||
|
file="$(pwd)/$file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
[ -f "$file" ] || die "File not found: $file"
|
||||||
|
|
||||||
|
ensure_postgres_running
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
warn "This will DESTROY the current '${DB_NAME}' database and replace it"
|
||||||
|
warn "with the contents of: $(basename "$file")"
|
||||||
|
echo ""
|
||||||
|
read -rp "Type 'yes' to continue: " confirm
|
||||||
|
[ "$confirm" = "yes" ] || { info "Aborted."; exit 0; }
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
info "Step 1/4 — Terminating active connections ..."
|
||||||
|
$COMPOSE_CMD exec -T postgres psql -U "$DB_USER" -d postgres -c "
|
||||||
|
SELECT pg_terminate_backend(pid)
|
||||||
|
FROM pg_stat_activity
|
||||||
|
WHERE datname = '${DB_NAME}' AND pid <> pg_backend_pid();
|
||||||
|
" > /dev/null 2>&1 || true
|
||||||
|
|
||||||
|
info "Step 2/4 — Dropping and recreating database ..."
|
||||||
|
$COMPOSE_CMD exec -T postgres dropdb -U "$DB_USER" --if-exists "$DB_NAME"
|
||||||
|
$COMPOSE_CMD exec -T postgres createdb -U "$DB_USER" "$DB_NAME"
|
||||||
|
|
||||||
|
info "Step 3/4 — Restoring from $(basename "$file") ..."
|
||||||
|
|
||||||
|
if [[ "$file" == *.dump.gz ]]; then
|
||||||
|
# Custom-format dump, gzipped → decompress and pipe to pg_restore
|
||||||
|
gunzip -c "$file" | $COMPOSE_CMD exec -T postgres pg_restore \
|
||||||
|
-U "$DB_USER" \
|
||||||
|
-d "$DB_NAME" \
|
||||||
|
--no-owner \
|
||||||
|
--no-privileges \
|
||||||
|
--exit-on-error 2>&1 | tail -5 || true
|
||||||
|
|
||||||
|
elif [[ "$file" == *.sql.gz ]]; then
|
||||||
|
# Plain SQL dump, gzipped → decompress and pipe to psql
|
||||||
|
gunzip -c "$file" | $COMPOSE_CMD exec -T postgres psql \
|
||||||
|
-U "$DB_USER" \
|
||||||
|
-d "$DB_NAME" \
|
||||||
|
--quiet 2>&1 | tail -5 || true
|
||||||
|
|
||||||
|
elif [[ "$file" == *.dump ]]; then
|
||||||
|
# Custom-format dump, not compressed
|
||||||
|
$COMPOSE_CMD exec -T postgres pg_restore \
|
||||||
|
-U "$DB_USER" \
|
||||||
|
-d "$DB_NAME" \
|
||||||
|
--no-owner \
|
||||||
|
--no-privileges \
|
||||||
|
--exit-on-error < "$file" 2>&1 | tail -5 || true
|
||||||
|
|
||||||
|
elif [[ "$file" == *.sql ]]; then
|
||||||
|
# Plain SQL dump, not compressed
|
||||||
|
$COMPOSE_CMD exec -T postgres psql \
|
||||||
|
-U "$DB_USER" \
|
||||||
|
-d "$DB_NAME" \
|
||||||
|
--quiet < "$file" 2>&1 | tail -5 || true
|
||||||
|
|
||||||
|
else
|
||||||
|
die "Unsupported file format. Expected .dump.gz, .sql.gz, .dump, or .sql"
|
||||||
|
fi
|
||||||
|
|
||||||
|
info "Step 4/4 — Restarting backend ..."
|
||||||
|
$COMPOSE_CMD restart backend > /dev/null 2>&1
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
ok "Restore complete. Backend restarted."
|
||||||
|
|
||||||
|
# Quick sanity check
|
||||||
|
local tenant_count
|
||||||
|
tenant_count=$($COMPOSE_CMD exec -T postgres psql -U "$DB_USER" -d "$DB_NAME" \
|
||||||
|
-t -c "SELECT count(*) FROM shared.organizations WHERE status = 'active';" 2>/dev/null | tr -d ' ')
|
||||||
|
info "Active tenants found: ${tenant_count:-0}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ---- LIST ----
|
||||||
|
|
||||||
|
do_list() {
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
|
local count=0
|
||||||
|
echo ""
|
||||||
|
printf " %-42s %10s %s\n" "FILENAME" "SIZE" "DATE"
|
||||||
|
printf " %-42s %10s %s\n" "--------" "----" "----"
|
||||||
|
|
||||||
|
while IFS= read -r f; do
|
||||||
|
[ -z "$f" ] && continue
|
||||||
|
local size
|
||||||
|
size=$(wc -c < "$f" | tr -d ' ')
|
||||||
|
local mod_date
|
||||||
|
mod_date=$(date -r "$f" "+%Y-%m-%d %H:%M" 2>/dev/null || stat -c '%y' "$f" 2>/dev/null | cut -d. -f1)
|
||||||
|
printf " %-42s %10s %s\n" "$(basename "$f")" "$(format_size "$size")" "$mod_date"
|
||||||
|
((count++))
|
||||||
|
done < <(find "$BACKUP_DIR" -name "${DB_NAME}_*" -type f 2>/dev/null | sort)
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
if [ "$count" -eq 0 ]; then
|
||||||
|
info "No backups found in $BACKUP_DIR"
|
||||||
|
else
|
||||||
|
info "$count backup(s) in $BACKUP_DIR"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ---- CLI ----
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<EOF
|
||||||
|
HOA LedgerIQ Database Backup & Restore
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
$(basename "$0") backup [--dir DIR] [--keep DAYS] Create a timestamped gzipped backup
|
||||||
|
$(basename "$0") restore FILE Restore from a backup file
|
||||||
|
$(basename "$0") list [--dir DIR] List available backups
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dir DIR Backup directory (default: ./backups)
|
||||||
|
--keep DAYS Auto-delete backups older than DAYS (default: keep all)
|
||||||
|
|
||||||
|
Supported restore formats:
|
||||||
|
.dump.gz Custom-format pg_dump, gzipped (default backup format)
|
||||||
|
.sql.gz Plain SQL dump, gzipped
|
||||||
|
.dump Custom-format pg_dump, uncompressed
|
||||||
|
.sql Plain SQL dump, uncompressed
|
||||||
|
|
||||||
|
Cron example (daily at 2 AM, retain 30 days):
|
||||||
|
0 2 * * * cd /opt/hoa-ledgeriq && ./scripts/db-backup.sh backup --keep 30
|
||||||
|
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse command
|
||||||
|
COMMAND="${1:-}"
|
||||||
|
shift 2>/dev/null || true
|
||||||
|
|
||||||
|
[ -z "$COMMAND" ] && usage
|
||||||
|
|
||||||
|
# Parse flags
|
||||||
|
RESTORE_FILE=""
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
--dir) BACKUP_DIR="$2"; shift 2 ;;
|
||||||
|
--keep) KEEP_DAYS="$2"; shift 2 ;;
|
||||||
|
--help) usage ;;
|
||||||
|
*)
|
||||||
|
if [ "$COMMAND" = "restore" ] && [ -z "$RESTORE_FILE" ]; then
|
||||||
|
RESTORE_FILE="$1"; shift
|
||||||
|
else
|
||||||
|
die "Unknown argument: $1"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Load .env if present (for POSTGRES_USER / POSTGRES_DB)
|
||||||
|
if [ -f "$PROJECT_DIR/.env" ]; then
|
||||||
|
set -a
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source "$PROJECT_DIR/.env"
|
||||||
|
set +a
|
||||||
|
DB_USER="${POSTGRES_USER:-hoafinance}"
|
||||||
|
DB_NAME="${POSTGRES_DB:-hoafinance}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$COMMAND" in
|
||||||
|
backup)
|
||||||
|
do_backup
|
||||||
|
;;
|
||||||
|
restore)
|
||||||
|
[ -z "$RESTORE_FILE" ] && die "Usage: $(basename "$0") restore <file>"
|
||||||
|
do_restore "$RESTORE_FILE"
|
||||||
|
;;
|
||||||
|
list)
|
||||||
|
do_list
|
||||||
|
;;
|
||||||
|
-h|--help|help)
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
die "Unknown command: $COMMAND (try: backup, restore, list)"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
@@ -61,12 +61,10 @@ interface MarketRate {
|
|||||||
*/
|
*/
|
||||||
function parseTermMonths(term: string): number | null {
|
function parseTermMonths(term: string): number | null {
|
||||||
const lower = term.toLowerCase().trim();
|
const lower = term.toLowerCase().trim();
|
||||||
const monthMatch = lower.match(/(\d+)\s*month/);
|
const monthMatch = lower.match(/(\d+)\s*mo(?:nth)?/);
|
||||||
if (monthMatch) return parseInt(monthMatch[1], 10);
|
if (monthMatch) return parseInt(monthMatch[1], 10);
|
||||||
const yearMatch = lower.match(/(\d+)\s*year/);
|
// Handle fractional years like "1.5 years" or "1.5 yr"
|
||||||
if (yearMatch) return parseInt(yearMatch[1], 10) * 12;
|
const fracYearMatch = lower.match(/([\d.]+)\s*y(?:ear|r)/);
|
||||||
// Handle fractional years like "1.5 years"
|
|
||||||
const fracYearMatch = lower.match(/([\d.]+)\s*year/);
|
|
||||||
if (fracYearMatch) return Math.round(parseFloat(fracYearMatch[1]) * 12);
|
if (fracYearMatch) return Math.round(parseFloat(fracYearMatch[1]) * 12);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -84,10 +82,14 @@ function parseMinDeposit(raw: string): number | null {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse an APY string like "4.50%", "4.50% APY" into a number.
|
* Parse an APY string like "4.50%", "4.50% APY" into a number.
|
||||||
|
* Handles edge cases like ".4.50%" (leading period from adjacent text).
|
||||||
*/
|
*/
|
||||||
function parseApy(raw: string): number {
|
function parseApy(raw: string): number {
|
||||||
const cleaned = raw.replace(/[^0-9.]/g, '');
|
// Extract the first valid decimal number (digit-leading) from the string
|
||||||
return parseFloat(cleaned) || 0;
|
const match = raw.match(/(\d+\.?\d*)/);
|
||||||
|
if (!match) return 0;
|
||||||
|
const val = parseFloat(match[1]);
|
||||||
|
return isNaN(val) ? 0 : val;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -98,8 +100,20 @@ function sleep(ms: number): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Navigate to a Bankrate URL and scrape rate data.
|
* Navigate to a Bankrate URL and scrape rate data from individual bank offer cards.
|
||||||
* Reuses an existing browser instance.
|
*
|
||||||
|
* Bankrate uses a card-based layout with two sections:
|
||||||
|
* - .wrt-RateSections-sponsoredoffers (sponsored bank offers)
|
||||||
|
* - .wrt-RateSections-additionaloffers (additional bank offers)
|
||||||
|
*
|
||||||
|
* Each card (.rounded-md) contains:
|
||||||
|
* - Bank name in img[alt] (the logo)
|
||||||
|
* - APY after "APY as of" text
|
||||||
|
* - Min. deposit (CDs) or Min. balance for APY (savings/MM)
|
||||||
|
* - Term (CDs only): e.g. "1yr", "14mo"
|
||||||
|
*
|
||||||
|
* The page also has a summary table (.wealth-product-rate-list) with "best rates"
|
||||||
|
* per term but NO bank names — we explicitly skip this table.
|
||||||
*/
|
*/
|
||||||
async function fetchRatesFromPage(
|
async function fetchRatesFromPage(
|
||||||
browser: Browser,
|
browser: Browser,
|
||||||
@@ -109,7 +123,7 @@ async function fetchRatesFromPage(
|
|||||||
): Promise<MarketRate[]> {
|
): Promise<MarketRate[]> {
|
||||||
const page: Page = await browser.newPage();
|
const page: Page = await browser.newPage();
|
||||||
await page.setUserAgent(
|
await page.setUserAgent(
|
||||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -120,13 +134,13 @@ async function fetchRatesFromPage(
|
|||||||
timeout: 60000,
|
timeout: 60000,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Wait for rate content to render
|
// Wait for rate card sections to render
|
||||||
console.log('Waiting for rate data to render...');
|
console.log('Waiting for rate cards to render...');
|
||||||
await page.waitForSelector(
|
await page.waitForSelector(
|
||||||
'table, [data-testid*="rate"], .brc-table, [class*="ComparisonTable"], [class*="rate-table"]',
|
'.wrt-RateSections-sponsoredoffers .rounded-md, .wrt-RateSections-additionaloffers .rounded-md',
|
||||||
{ timeout: 30000 },
|
{ timeout: 30000 },
|
||||||
).catch(() => {
|
).catch(() => {
|
||||||
console.log('Primary selectors not found, proceeding with page scan...');
|
console.log('Bankrate card selectors not found, will try fallback...');
|
||||||
});
|
});
|
||||||
|
|
||||||
// Extra wait for dynamic content
|
// Extra wait for dynamic content
|
||||||
@@ -143,7 +157,7 @@ async function fetchRatesFromPage(
|
|||||||
});
|
});
|
||||||
await sleep(2000);
|
await sleep(2000);
|
||||||
|
|
||||||
// Extract rate data from the page
|
// Extract rate data from individual bank offer cards
|
||||||
const rawRates = await page.evaluate((maxRates: number) => {
|
const rawRates = await page.evaluate((maxRates: number) => {
|
||||||
const results: Array<{
|
const results: Array<{
|
||||||
bank_name: string;
|
bank_name: string;
|
||||||
@@ -152,120 +166,114 @@ async function fetchRatesFromPage(
|
|||||||
term_raw: string;
|
term_raw: string;
|
||||||
}> = [];
|
}> = [];
|
||||||
|
|
||||||
// Strategy 1: Look for detailed bank comparison tables
|
// Primary strategy: extract from Bankrate offer cards
|
||||||
const tables = document.querySelectorAll('table');
|
// Both sponsored and additional offer sections use the same card structure
|
||||||
for (const table of tables) {
|
const cards = [
|
||||||
const rows = table.querySelectorAll('tbody tr');
|
...document.querySelectorAll('.wrt-RateSections-sponsoredoffers > .rounded-md'),
|
||||||
if (rows.length < 3) continue;
|
...document.querySelectorAll('.wrt-RateSections-additionaloffers > .rounded-md'),
|
||||||
|
];
|
||||||
|
|
||||||
for (const row of rows) {
|
for (const card of cards) {
|
||||||
const cells = row.querySelectorAll('td, th');
|
const text = card.textContent || '';
|
||||||
if (cells.length < 3) continue;
|
|
||||||
|
|
||||||
const texts = Array.from(cells).map((c) => c.textContent?.trim() || '');
|
// Bank name: from the logo img alt attribute (most reliable)
|
||||||
const apyCell = texts.find((t) => /\d+\.\d+\s*%/.test(t));
|
const img = card.querySelector('img[alt]');
|
||||||
if (!apyCell) continue;
|
let bankName = img ? (img as HTMLImageElement).alt.trim() : '';
|
||||||
|
|
||||||
const bankCell = texts.find(
|
// Fallback: extract from text before "Add to compare"
|
||||||
(t) =>
|
if (!bankName) {
|
||||||
t.length > 3 &&
|
const addIdx = text.indexOf('Add to compare');
|
||||||
!/^\d/.test(t) &&
|
if (addIdx > 0) {
|
||||||
!t.includes('%') &&
|
bankName = text.substring(0, addIdx)
|
||||||
!t.startsWith('$') &&
|
.replace(/Editor's pick/gi, '')
|
||||||
!/^\d+\s*(month|year)/i.test(t),
|
.trim();
|
||||||
);
|
|
||||||
|
|
||||||
const linkEl = row.querySelector('a[href*="review"], a[href*="bank"], img[alt]');
|
|
||||||
const linkName = linkEl?.textContent?.trim() || (linkEl as HTMLImageElement)?.alt || '';
|
|
||||||
|
|
||||||
const name = linkName.length > 3 ? linkName : bankCell || '';
|
|
||||||
if (!name) continue;
|
|
||||||
|
|
||||||
results.push({
|
|
||||||
bank_name: name,
|
|
||||||
apy_raw: apyCell,
|
|
||||||
min_deposit_raw:
|
|
||||||
texts.find((t) => t.includes('$') || /no min/i.test(t)) || '',
|
|
||||||
term_raw: texts.find((t) => /\d+\s*(month|year)/i.test(t)) || '',
|
|
||||||
});
|
|
||||||
|
|
||||||
if (results.length >= maxRates) break;
|
|
||||||
}
|
|
||||||
if (results.length >= 5) break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy 2: Look for card/list layouts
|
|
||||||
if (results.length < 5) {
|
|
||||||
const cardSelectors = [
|
|
||||||
'[class*="product"]',
|
|
||||||
'[class*="offer-card"]',
|
|
||||||
'[class*="rate-card"]',
|
|
||||||
'[class*="ComparisonRow"]',
|
|
||||||
'[class*="comparison-row"]',
|
|
||||||
'[data-testid*="product"]',
|
|
||||||
'[class*="partner"]',
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const selector of cardSelectors) {
|
|
||||||
const cards = document.querySelectorAll(selector);
|
|
||||||
if (cards.length < 3) continue;
|
|
||||||
|
|
||||||
for (const card of cards) {
|
|
||||||
const text = card.textContent || '';
|
|
||||||
if (text.length < 20 || text.length > 2000) continue;
|
|
||||||
|
|
||||||
const apyMatch = text.match(/([\d.]+)\s*%/);
|
|
||||||
if (!apyMatch) continue;
|
|
||||||
|
|
||||||
const nameEl =
|
|
||||||
card.querySelector(
|
|
||||||
'h2, h3, h4, h5, strong, [class*="name"], [class*="bank"], [class*="title"], a[href*="review"], img[alt]',
|
|
||||||
);
|
|
||||||
let bankName = nameEl?.textContent?.trim() || (nameEl as HTMLImageElement)?.alt || '';
|
|
||||||
|
|
||||||
if (!bankName || bankName.length < 3 || /^\d/.test(bankName) || bankName.includes('%')) continue;
|
|
||||||
|
|
||||||
const depositMatch = text.match(/\$[\d,]+/);
|
|
||||||
const termMatch = text.match(/\d+\s*(?:month|year)s?/i);
|
|
||||||
|
|
||||||
results.push({
|
|
||||||
bank_name: bankName,
|
|
||||||
apy_raw: apyMatch[0],
|
|
||||||
min_deposit_raw: depositMatch?.[0] || '',
|
|
||||||
term_raw: termMatch?.[0] || '',
|
|
||||||
});
|
|
||||||
|
|
||||||
if (results.length >= maxRates) break;
|
|
||||||
}
|
}
|
||||||
if (results.length >= 5) break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fallback: extract from product name pattern (e.g. "NexBank CD")
|
||||||
|
if (!bankName) {
|
||||||
|
const productMatch = text.match(/^(?:Editor's pick)?\s*([A-Z][\w\s®*.'&-]+?(?:CD|Account|Savings|Money Market))/);
|
||||||
|
if (productMatch) bankName = productMatch[1].trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!bankName || bankName.length < 2) continue;
|
||||||
|
|
||||||
|
// APY: find the percentage that appears after "APY as of" context.
|
||||||
|
// Avoid picking up the Bankrate score (e.g. "4.5 Bankrate CD score").
|
||||||
|
// Use \b or (?<!\d) to avoid capturing leading periods from adjacent text.
|
||||||
|
let apyRaw = '';
|
||||||
|
const apySection = text.match(/APY as of[\s\S]*?(\d+\.?\d*)\s*%/);
|
||||||
|
if (apySection) {
|
||||||
|
apyRaw = apySection[1] + '%';
|
||||||
|
} else {
|
||||||
|
// Broader fallback: find "X.XX% APY" or just "X.XX%"
|
||||||
|
const apyMatch = text.match(/(\d+\.?\d*)\s*%\s*(?:APY)?/);
|
||||||
|
if (apyMatch) apyRaw = apyMatch[1] + '%';
|
||||||
|
}
|
||||||
|
if (!apyRaw) continue;
|
||||||
|
|
||||||
|
// Min. deposit: CDs use "Min. deposit $X", savings/MM use "Min. balance for APY$X"
|
||||||
|
let minDepositRaw = '';
|
||||||
|
const minDepMatch = text.match(/Min\.\s*deposit\s*\$\s*([\d,]+)/i);
|
||||||
|
const minBalMatch = text.match(/Min\.\s*balance\s*for\s*APY\s*\$\s*([\d,.]+)/i);
|
||||||
|
const noMin = /No minimum/i.test(text);
|
||||||
|
if (noMin) {
|
||||||
|
minDepositRaw = '$0';
|
||||||
|
} else if (minDepMatch) {
|
||||||
|
minDepositRaw = '$' + minDepMatch[1];
|
||||||
|
} else if (minBalMatch) {
|
||||||
|
minDepositRaw = '$' + minBalMatch[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Term: CDs have terms like "1yr", "14mo", "1.5yr"
|
||||||
|
let termRaw = '';
|
||||||
|
const termMatch = text.match(/Term\s*([\d.]+)\s*(yr|mo|year|month)s?/i);
|
||||||
|
if (termMatch) {
|
||||||
|
const num = termMatch[1];
|
||||||
|
const unit = termMatch[2].toLowerCase();
|
||||||
|
if (unit === 'yr' || unit === 'year') {
|
||||||
|
termRaw = `${num} year${num === '1' ? '' : 's'}`;
|
||||||
|
} else {
|
||||||
|
termRaw = `${num} month${num === '1' ? '' : 's'}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
bank_name: bankName,
|
||||||
|
apy_raw: apyRaw,
|
||||||
|
min_deposit_raw: minDepositRaw,
|
||||||
|
term_raw: termRaw,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (results.length >= maxRates) break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Strategy 3: Broad scan for rate-bearing elements
|
// Fallback strategy: if card-based extraction found nothing,
|
||||||
if (results.length < 5) {
|
// scan for any elements with bank-like names and APY percentages.
|
||||||
const allElements = document.querySelectorAll(
|
// This guards against future Bankrate layout changes.
|
||||||
'div, section, article, li',
|
if (results.length === 0) {
|
||||||
|
const fallbackCards = document.querySelectorAll(
|
||||||
|
'[class*="product"], [class*="offer"], [class*="rate-card"], [class*="ComparisonRow"]',
|
||||||
);
|
);
|
||||||
for (const el of allElements) {
|
for (const card of fallbackCards) {
|
||||||
if (el.children.length > 20) continue;
|
const text = card.textContent || '';
|
||||||
const text = el.textContent || '';
|
if (text.length < 20 || text.length > 2000) continue;
|
||||||
if (text.length < 20 || text.length > 500) continue;
|
|
||||||
|
|
||||||
const apyMatch = text.match(/([\d.]+)\s*%\s*(?:APY)?/i);
|
const apyMatch = text.match(/(\d+\.?\d*)\s*%\s*(?:APY)?/);
|
||||||
if (!apyMatch) continue;
|
if (!apyMatch) continue;
|
||||||
|
|
||||||
const bankEl = el.querySelector(
|
const nameEl = card.querySelector('img[alt], h2, h3, h4, h5, [class*="name"], [class*="bank"]');
|
||||||
'h2, h3, h4, h5, strong, b, a[href*="review"]',
|
const bankName = (nameEl as HTMLImageElement)?.alt
|
||||||
);
|
|| nameEl?.textContent?.trim()
|
||||||
let bankName = bankEl?.textContent?.trim() || '';
|
|| '';
|
||||||
if (!bankName || bankName.length < 3 || /^\d/.test(bankName)) continue;
|
if (!bankName || bankName.length < 2 || /^\d/.test(bankName) || bankName.includes('%')) continue;
|
||||||
|
|
||||||
const depositMatch = text.match(/\$[\d,]+/);
|
const depositMatch = text.match(/\$[\d,]+/);
|
||||||
const termMatch = text.match(/\d+\s*(?:month|year)s?/i);
|
const termMatch = text.match(/(\d+)\s*(?:month|year)s?/i);
|
||||||
|
|
||||||
results.push({
|
results.push({
|
||||||
bank_name: bankName,
|
bank_name: bankName,
|
||||||
apy_raw: apyMatch[0],
|
apy_raw: apyMatch[1] + '%',
|
||||||
min_deposit_raw: depositMatch?.[0] || '',
|
min_deposit_raw: depositMatch?.[0] || '',
|
||||||
term_raw: termMatch?.[0] || '',
|
term_raw: termMatch?.[0] || '',
|
||||||
});
|
});
|
||||||
@@ -284,20 +292,26 @@ async function fetchRatesFromPage(
|
|||||||
|
|
||||||
const parsed: MarketRate[] = rawRates
|
const parsed: MarketRate[] = rawRates
|
||||||
.map((r) => {
|
.map((r) => {
|
||||||
let bankName = r.bank_name.replace(/\s+/g, ' ').trim();
|
let bankName = r.bank_name
|
||||||
|
.replace(/\s+/g, ' ')
|
||||||
|
.replace(/Editor's pick/gi, '')
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
// Strip trailing product suffixes to normalize bank name
|
||||||
|
// e.g. "Marcus by Goldman Sachs CD" → "Marcus by Goldman Sachs"
|
||||||
|
bankName = bankName
|
||||||
|
.replace(/\s+(CD|Certificate of Deposit|Money Market|Savings|High[- ]Yield Savings)\s*$/i, '')
|
||||||
|
.trim();
|
||||||
|
|
||||||
const term = isTermProduct ? (r.term_raw || 'N/A') : 'N/A';
|
const term = isTermProduct ? (r.term_raw || 'N/A') : 'N/A';
|
||||||
|
|
||||||
// For CDs: if bank name looks like a term, label it descriptively
|
// Skip entries where bank_name still looks like a term or number (not a real bank)
|
||||||
if (isTermProduct) {
|
if (
|
||||||
const termText = r.term_raw || bankName;
|
/^\d+\s*(month|year)/i.test(bankName) ||
|
||||||
if (
|
/^\$/.test(bankName) ||
|
||||||
/^\d+\s*(month|year)/i.test(bankName) ||
|
bankName.length < 2
|
||||||
/no\s*min/i.test(bankName) ||
|
) {
|
||||||
/^\$/.test(bankName) ||
|
return null;
|
||||||
bankName.length < 4
|
|
||||||
) {
|
|
||||||
bankName = `Top CD Rate - ${termText.replace(/^\d+/, (m: string) => m + ' ')}`.replace(/\s+/g, ' ').trim();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -305,11 +319,11 @@ async function fetchRatesFromPage(
|
|||||||
apy: parseApy(r.apy_raw),
|
apy: parseApy(r.apy_raw),
|
||||||
min_deposit: parseMinDeposit(r.min_deposit_raw),
|
min_deposit: parseMinDeposit(r.min_deposit_raw),
|
||||||
term,
|
term,
|
||||||
term_months: isTermProduct ? parseTermMonths(r.term_raw || bankName) : null,
|
term_months: isTermProduct ? parseTermMonths(r.term_raw) : null,
|
||||||
rate_type: rateType,
|
rate_type: rateType,
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
.filter((r) => r.bank_name && r.apy > 0);
|
.filter((r): r is MarketRate => r !== null && r.bank_name.length > 0 && r.apy > 0 && r.apy <= 20);
|
||||||
|
|
||||||
// Deduplicate by bank name + term (keep highest APY)
|
// Deduplicate by bank name + term (keep highest APY)
|
||||||
const seen = new Map<string, MarketRate>();
|
const seen = new Map<string, MarketRate>();
|
||||||
@@ -388,9 +402,20 @@ async function main() {
|
|||||||
let browser: Browser | null = null;
|
let browser: Browser | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Use system Chromium if PUPPETEER_EXECUTABLE_PATH is set,
|
||||||
|
// or auto-detect common locations on Linux servers.
|
||||||
|
const executablePath =
|
||||||
|
process.env.PUPPETEER_EXECUTABLE_PATH ||
|
||||||
|
['/usr/bin/chromium-browser', '/usr/bin/chromium', '/usr/bin/google-chrome'].find(
|
||||||
|
(p) => { try { require('fs').accessSync(p); return true; } catch { return false; } },
|
||||||
|
) ||
|
||||||
|
undefined;
|
||||||
|
|
||||||
console.log('\nLaunching headless browser...');
|
console.log('\nLaunching headless browser...');
|
||||||
|
if (executablePath) console.log(`Using browser: ${executablePath}`);
|
||||||
browser = await puppeteer.launch({
|
browser = await puppeteer.launch({
|
||||||
headless: true,
|
headless: true,
|
||||||
|
executablePath,
|
||||||
args: [
|
args: [
|
||||||
'--no-sandbox',
|
'--no-sandbox',
|
||||||
'--disable-setuid-sandbox',
|
'--disable-setuid-sandbox',
|
||||||
|
|||||||
Reference in New Issue
Block a user