Initial release v1.1.0
- Complete MVP for tracking Fidelity brokerage account performance - Transaction import from CSV with deduplication - Automatic FIFO position tracking with options support - Real-time P&L calculations with market data caching - Dashboard with timeframe filtering (30/90/180 days, 1 year, YTD, all time) - Docker-based deployment with PostgreSQL backend - React/TypeScript frontend with TailwindCSS - FastAPI backend with SQLAlchemy ORM Features: - Multi-account support - Import via CSV upload or filesystem - Open and closed position tracking - Balance history charting - Performance analytics and metrics - Top trades analysis - Responsive UI design Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
72
backend/alembic/env.py
Normal file
72
backend/alembic/env.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Alembic environment configuration for database migrations."""
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from alembic import context
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to path to import app modules
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||
|
||||
from app.config import settings
|
||||
from app.database import Base
|
||||
from app.models import Account, Transaction, Position, PositionTransaction
|
||||
|
||||
# Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Override sqlalchemy.url with our settings
|
||||
config.set_main_option("sqlalchemy.url", settings.database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Target metadata for autogenerate support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""
|
||||
Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL and not an Engine,
|
||||
though an Engine is acceptable here as well. By skipping the Engine
|
||||
creation we don't even need a DBAPI to be available.
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""
|
||||
Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine and associate a
|
||||
connection with the context.
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
25
backend/alembic/script.py.mako
Normal file
25
backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,25 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
83
backend/alembic/versions/001_initial_schema.py
Normal file
83
backend/alembic/versions/001_initial_schema.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Initial schema
|
||||
|
||||
Revision ID: 001_initial_schema
|
||||
Revises:
|
||||
Create Date: 2026-01-20 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001_initial_schema'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create accounts table
|
||||
op.create_table(
|
||||
'accounts',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('account_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('account_name', sa.String(length=200), nullable=False),
|
||||
sa.Column('account_type', sa.Enum('CASH', 'MARGIN', name='accounttype'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_accounts_id'), 'accounts', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_accounts_account_number'), 'accounts', ['account_number'], unique=True)
|
||||
|
||||
# Create transactions table
|
||||
op.create_table(
|
||||
'transactions',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('run_date', sa.Date(), nullable=False),
|
||||
sa.Column('action', sa.String(length=500), nullable=False),
|
||||
sa.Column('symbol', sa.String(length=50), nullable=True),
|
||||
sa.Column('description', sa.String(length=500), nullable=True),
|
||||
sa.Column('transaction_type', sa.String(length=20), nullable=True),
|
||||
sa.Column('exchange_quantity', sa.Numeric(precision=20, scale=8), nullable=True),
|
||||
sa.Column('exchange_currency', sa.String(length=10), nullable=True),
|
||||
sa.Column('currency', sa.String(length=10), nullable=True),
|
||||
sa.Column('price', sa.Numeric(precision=20, scale=8), nullable=True),
|
||||
sa.Column('quantity', sa.Numeric(precision=20, scale=8), nullable=True),
|
||||
sa.Column('exchange_rate', sa.Numeric(precision=20, scale=8), nullable=True),
|
||||
sa.Column('commission', sa.Numeric(precision=20, scale=2), nullable=True),
|
||||
sa.Column('fees', sa.Numeric(precision=20, scale=2), nullable=True),
|
||||
sa.Column('accrued_interest', sa.Numeric(precision=20, scale=2), nullable=True),
|
||||
sa.Column('amount', sa.Numeric(precision=20, scale=2), nullable=True),
|
||||
sa.Column('cash_balance', sa.Numeric(precision=20, scale=2), nullable=True),
|
||||
sa.Column('settlement_date', sa.Date(), nullable=True),
|
||||
sa.Column('unique_hash', sa.String(length=64), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_transactions_id'), 'transactions', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_transactions_account_id'), 'transactions', ['account_id'], unique=False)
|
||||
op.create_index(op.f('ix_transactions_run_date'), 'transactions', ['run_date'], unique=False)
|
||||
op.create_index(op.f('ix_transactions_symbol'), 'transactions', ['symbol'], unique=False)
|
||||
op.create_index(op.f('ix_transactions_unique_hash'), 'transactions', ['unique_hash'], unique=True)
|
||||
op.create_index('idx_account_date', 'transactions', ['account_id', 'run_date'], unique=False)
|
||||
op.create_index('idx_account_symbol', 'transactions', ['account_id', 'symbol'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index('idx_account_symbol', table_name='transactions')
|
||||
op.drop_index('idx_account_date', table_name='transactions')
|
||||
op.drop_index(op.f('ix_transactions_unique_hash'), table_name='transactions')
|
||||
op.drop_index(op.f('ix_transactions_symbol'), table_name='transactions')
|
||||
op.drop_index(op.f('ix_transactions_run_date'), table_name='transactions')
|
||||
op.drop_index(op.f('ix_transactions_account_id'), table_name='transactions')
|
||||
op.drop_index(op.f('ix_transactions_id'), table_name='transactions')
|
||||
op.drop_table('transactions')
|
||||
op.drop_index(op.f('ix_accounts_account_number'), table_name='accounts')
|
||||
op.drop_index(op.f('ix_accounts_id'), table_name='accounts')
|
||||
op.drop_table('accounts')
|
||||
op.execute('DROP TYPE accounttype')
|
||||
70
backend/alembic/versions/002_add_positions.py
Normal file
70
backend/alembic/versions/002_add_positions.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Add positions tables
|
||||
|
||||
Revision ID: 002_add_positions
|
||||
Revises: 001_initial_schema
|
||||
Create Date: 2026-01-20 15:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002_add_positions'
|
||||
down_revision = '001_initial_schema'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create positions table
|
||||
op.create_table(
|
||||
'positions',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('symbol', sa.String(length=50), nullable=False),
|
||||
sa.Column('option_symbol', sa.String(length=100), nullable=True),
|
||||
sa.Column('position_type', sa.Enum('STOCK', 'CALL', 'PUT', name='positiontype'), nullable=False),
|
||||
sa.Column('status', sa.Enum('OPEN', 'CLOSED', name='positionstatus'), nullable=False),
|
||||
sa.Column('open_date', sa.Date(), nullable=False),
|
||||
sa.Column('close_date', sa.Date(), nullable=True),
|
||||
sa.Column('total_quantity', sa.Numeric(precision=20, scale=8), nullable=False),
|
||||
sa.Column('avg_entry_price', sa.Numeric(precision=20, scale=8), nullable=True),
|
||||
sa.Column('avg_exit_price', sa.Numeric(precision=20, scale=8), nullable=True),
|
||||
sa.Column('realized_pnl', sa.Numeric(precision=20, scale=2), nullable=True),
|
||||
sa.Column('unrealized_pnl', sa.Numeric(precision=20, scale=2), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_positions_id'), 'positions', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_positions_account_id'), 'positions', ['account_id'], unique=False)
|
||||
op.create_index(op.f('ix_positions_symbol'), 'positions', ['symbol'], unique=False)
|
||||
op.create_index(op.f('ix_positions_option_symbol'), 'positions', ['option_symbol'], unique=False)
|
||||
op.create_index(op.f('ix_positions_status'), 'positions', ['status'], unique=False)
|
||||
op.create_index('idx_account_status', 'positions', ['account_id', 'status'], unique=False)
|
||||
op.create_index('idx_account_symbol_status', 'positions', ['account_id', 'symbol', 'status'], unique=False)
|
||||
|
||||
# Create position_transactions junction table
|
||||
op.create_table(
|
||||
'position_transactions',
|
||||
sa.Column('position_id', sa.Integer(), nullable=False),
|
||||
sa.Column('transaction_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['position_id'], ['positions.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['transaction_id'], ['transactions.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('position_id', 'transaction_id')
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table('position_transactions')
|
||||
op.drop_index('idx_account_symbol_status', table_name='positions')
|
||||
op.drop_index('idx_account_status', table_name='positions')
|
||||
op.drop_index(op.f('ix_positions_status'), table_name='positions')
|
||||
op.drop_index(op.f('ix_positions_option_symbol'), table_name='positions')
|
||||
op.drop_index(op.f('ix_positions_symbol'), table_name='positions')
|
||||
op.drop_index(op.f('ix_positions_account_id'), table_name='positions')
|
||||
op.drop_index(op.f('ix_positions_id'), table_name='positions')
|
||||
op.drop_table('positions')
|
||||
op.execute('DROP TYPE positionstatus')
|
||||
op.execute('DROP TYPE positiontype')
|
||||
40
backend/alembic/versions/add_market_prices_table.py
Normal file
40
backend/alembic/versions/add_market_prices_table.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Add market_prices table for price caching
|
||||
|
||||
Revision ID: 003_market_prices
|
||||
Revises: 002_add_positions
|
||||
Create Date: 2026-01-20 16:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '003_market_prices'
|
||||
down_revision = '002_add_positions'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create market_prices table
|
||||
op.create_table(
|
||||
'market_prices',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('symbol', sa.String(length=20), nullable=False),
|
||||
sa.Column('price', sa.Numeric(precision=20, scale=6), nullable=False),
|
||||
sa.Column('fetched_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||
sa.Column('source', sa.String(length=50), default='yahoo_finance'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('idx_market_prices_symbol', 'market_prices', ['symbol'], unique=True)
|
||||
op.create_index('idx_symbol_fetched', 'market_prices', ['symbol', 'fetched_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index('idx_symbol_fetched', table_name='market_prices')
|
||||
op.drop_index('idx_market_prices_symbol', table_name='market_prices')
|
||||
op.drop_table('market_prices')
|
||||
Reference in New Issue
Block a user