Initial implementation of Options Sidekick
Full-stack iOS options trading assistant: - Python FastAPI backend with SQLite, APScheduler (15-min position monitor), APNs push notifications, and yfinance market data integration - Signal engine: IV Rank (rolling HV proxy), SMA-50/200, swing-based support/resistance, earnings detection, signal strength scoring and noise-resistant SHA hash for change detection - Recommendation engine: covered call and cash-secured put strike/expiry selection across 0DTE, 1DTE, weekly, and monthly horizons - REST API: /devices, /portfolio, /recommendations, /positions, /signals, /alerts - iOS SwiftUI app (iOS 17+): dashboard, recommendations, trades, portfolio, and alerts tabs with push notification deep-linking - Unit + integration tests for signal engine and API layer Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
7
backend/.env.example
Normal file
7
backend/.env.example
Normal file
@@ -0,0 +1,7 @@
|
||||
DATABASE_URL=sqlite:///./app.db
|
||||
APNS_KEY_PATH=./AuthKey_XXXXXXXXXX.p8
|
||||
APNS_KEY_ID=XXXXXXXXXX
|
||||
APNS_TEAM_ID=XXXXXXXXXX
|
||||
APNS_BUNDLE_ID=com.yourname.options-sidekick
|
||||
APNS_USE_SANDBOX=true
|
||||
RECOMMENDATION_THROTTLE_MINUTES=5
|
||||
1
backend/Procfile
Normal file
1
backend/Procfile
Normal file
@@ -0,0 +1 @@
|
||||
web: uvicorn app.main:app --host 0.0.0.0 --port $PORT
|
||||
0
backend/app/__init__.py
Normal file
0
backend/app/__init__.py
Normal file
19
backend/app/config.py
Normal file
19
backend/app/config.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
database_url: str = "sqlite:///./app.db"
|
||||
apns_key_path: str = "./AuthKey_XXXXXXXXXX.p8"
|
||||
apns_key_id: str = ""
|
||||
apns_team_id: str = ""
|
||||
apns_bundle_id: str = "com.yourname.options-sidekick"
|
||||
apns_use_sandbox: bool = True
|
||||
recommendation_throttle_minutes: int = 5
|
||||
monitor_interval_seconds: int = 900 # 15 minutes
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_file_encoding = "utf-8"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
27
backend/app/database.py
Normal file
27
backend/app/database.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, DeclarativeBase
|
||||
from app.config import settings
|
||||
|
||||
engine = create_engine(
|
||||
settings.database_url,
|
||||
connect_args={"check_same_thread": False} if "sqlite" in settings.database_url else {},
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def init_db():
|
||||
from app.models.db_models import Device, StockPosition, OptionPosition, Recommendation, Alert # noqa: F401
|
||||
Base.metadata.create_all(bind=engine)
|
||||
54
backend/app/main.py
Normal file
54
backend/app/main.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.database import init_db
|
||||
from app.scheduler import start_scheduler, stop_scheduler, scheduler
|
||||
from app.routers import devices, portfolio, recommendations, positions, signals, alerts
|
||||
from app.models.schemas import HealthResponse
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator:
|
||||
# Startup
|
||||
init_db()
|
||||
start_scheduler()
|
||||
yield
|
||||
# Shutdown
|
||||
stop_scheduler()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Options Sidekick",
|
||||
description="Covered call and cash-secured put recommendation engine",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Routers
|
||||
app.include_router(devices.router, prefix="/api/v1")
|
||||
app.include_router(portfolio.router, prefix="/api/v1")
|
||||
app.include_router(recommendations.router, prefix="/api/v1")
|
||||
app.include_router(positions.router, prefix="/api/v1")
|
||||
app.include_router(signals.router, prefix="/api/v1")
|
||||
app.include_router(alerts.router, prefix="/api/v1")
|
||||
|
||||
|
||||
@app.get("/api/v1/health", response_model=HealthResponse, tags=["health"])
|
||||
def health():
|
||||
from app.services.position_monitor import last_run
|
||||
return HealthResponse(
|
||||
status="ok",
|
||||
scheduler_running=scheduler.running,
|
||||
last_run=last_run,
|
||||
)
|
||||
0
backend/app/models/__init__.py
Normal file
0
backend/app/models/__init__.py
Normal file
95
backend/app/models/db_models.py
Normal file
95
backend/app/models/db_models.py
Normal file
@@ -0,0 +1,95 @@
|
||||
from datetime import datetime, date
|
||||
from sqlalchemy import Integer, String, Float, Boolean, DateTime, Date, ForeignKey, UniqueConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class Device(Base):
|
||||
__tablename__ = "devices"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
apns_token: Mapped[str] = mapped_column(String, unique=True, index=True)
|
||||
device_name: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
registered_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||
last_seen: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
stock_positions: Mapped[list["StockPosition"]] = relationship("StockPosition", back_populates="device", cascade="all, delete-orphan")
|
||||
option_positions: Mapped[list["OptionPosition"]] = relationship("OptionPosition", back_populates="device", cascade="all, delete-orphan")
|
||||
alerts: Mapped[list["Alert"]] = relationship("Alert", back_populates="device", cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class StockPosition(Base):
|
||||
__tablename__ = "stock_positions"
|
||||
__table_args__ = (UniqueConstraint("device_id", "ticker", name="uq_device_ticker"),)
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
device_id: Mapped[int] = mapped_column(Integer, ForeignKey("devices.id"), nullable=False)
|
||||
ticker: Mapped[str] = mapped_column(String, nullable=False)
|
||||
shares: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
cost_basis: Mapped[float | None] = mapped_column(Float, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
device: Mapped["Device"] = relationship("Device", back_populates="stock_positions")
|
||||
|
||||
|
||||
class OptionPosition(Base):
|
||||
__tablename__ = "option_positions"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
device_id: Mapped[int] = mapped_column(Integer, ForeignKey("devices.id"), nullable=False)
|
||||
ticker: Mapped[str] = mapped_column(String, nullable=False)
|
||||
strategy: Mapped[str] = mapped_column(String, nullable=False) # covered_call | cash_secured_put
|
||||
strike: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
expiration: Mapped[date] = mapped_column(Date, nullable=False)
|
||||
premium_received: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
contracts: Mapped[int] = mapped_column(Integer, default=1)
|
||||
status: Mapped[str] = mapped_column(String, default="open") # open | closed | rolled
|
||||
close_reason: Mapped[str | None] = mapped_column(String, nullable=True) # expired | bought_back | rolled
|
||||
opened_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||
closed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
last_signal_hash: Mapped[str | None] = mapped_column(String(16), nullable=True)
|
||||
|
||||
device: Mapped["Device"] = relationship("Device", back_populates="option_positions")
|
||||
alerts: Mapped[list["Alert"]] = relationship("Alert", back_populates="option_position")
|
||||
|
||||
|
||||
class Recommendation(Base):
|
||||
__tablename__ = "recommendations"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
device_id: Mapped[int] = mapped_column(Integer, ForeignKey("devices.id"), nullable=False)
|
||||
ticker: Mapped[str] = mapped_column(String, nullable=False)
|
||||
strategy: Mapped[str] = mapped_column(String, nullable=False) # covered_call | cash_secured_put
|
||||
time_horizon: Mapped[str] = mapped_column(String, nullable=False) # 0dte | 1dte | weekly | monthly
|
||||
current_price: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
recommended_strike: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
recommended_expiration: Mapped[date] = mapped_column(Date, nullable=False)
|
||||
estimated_premium: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
delta: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
theta: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
iv_rank: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
signal_strength: Mapped[str] = mapped_column(String, nullable=False) # strong | moderate | weak
|
||||
earnings_warning: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
earnings_date: Mapped[date | None] = mapped_column(Date, nullable=True)
|
||||
rationale: Mapped[str] = mapped_column(String, nullable=False)
|
||||
signal_hash: Mapped[str] = mapped_column(String(16), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
class Alert(Base):
|
||||
__tablename__ = "alerts"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
|
||||
device_id: Mapped[int] = mapped_column(Integer, ForeignKey("devices.id"), nullable=False)
|
||||
option_position_id: Mapped[int | None] = mapped_column(Integer, ForeignKey("option_positions.id"), nullable=True)
|
||||
ticker: Mapped[str] = mapped_column(String, nullable=False)
|
||||
alert_type: Mapped[str] = mapped_column(String, nullable=False) # close_early | roll_out | roll_up_down | earnings_warning | new_rec
|
||||
message: Mapped[str] = mapped_column(String, nullable=False)
|
||||
old_signal_hash: Mapped[str | None] = mapped_column(String(16), nullable=True)
|
||||
new_signal_hash: Mapped[str | None] = mapped_column(String(16), nullable=True)
|
||||
sent_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
||||
acknowledged: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
|
||||
device: Mapped["Device"] = relationship("Device", back_populates="alerts")
|
||||
option_position: Mapped["OptionPosition | None"] = relationship("OptionPosition", back_populates="alerts")
|
||||
166
backend/app/models/schemas.py
Normal file
166
backend/app/models/schemas.py
Normal file
@@ -0,0 +1,166 @@
|
||||
from datetime import datetime, date
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
# ─── Device ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class DeviceRegister(BaseModel):
|
||||
apns_token: str
|
||||
device_name: str | None = None
|
||||
|
||||
|
||||
class DeviceResponse(BaseModel):
|
||||
id: int
|
||||
apns_token: str
|
||||
device_name: str | None
|
||||
registered_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
# ─── Stock Portfolio ───────────────────────────────────────────────────────────
|
||||
|
||||
class StockPositionCreate(BaseModel):
|
||||
ticker: str
|
||||
shares: int
|
||||
cost_basis: float | None = None
|
||||
|
||||
@field_validator("ticker")
|
||||
@classmethod
|
||||
def uppercase_ticker(cls, v: str) -> str:
|
||||
return v.upper().strip()
|
||||
|
||||
@field_validator("shares")
|
||||
@classmethod
|
||||
def positive_shares(cls, v: int) -> int:
|
||||
if v <= 0:
|
||||
raise ValueError("shares must be positive")
|
||||
return v
|
||||
|
||||
|
||||
class StockPositionResponse(BaseModel):
|
||||
id: int
|
||||
ticker: str
|
||||
shares: int
|
||||
cost_basis: float | None
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
# ─── Option Position ───────────────────────────────────────────────────────────
|
||||
|
||||
class OptionPositionCreate(BaseModel):
|
||||
ticker: str
|
||||
strategy: str # covered_call | cash_secured_put
|
||||
strike: float
|
||||
expiration: date
|
||||
premium_received: float
|
||||
contracts: int = 1
|
||||
|
||||
@field_validator("ticker")
|
||||
@classmethod
|
||||
def uppercase_ticker(cls, v: str) -> str:
|
||||
return v.upper().strip()
|
||||
|
||||
@field_validator("strategy")
|
||||
@classmethod
|
||||
def valid_strategy(cls, v: str) -> str:
|
||||
if v not in ("covered_call", "cash_secured_put"):
|
||||
raise ValueError("strategy must be 'covered_call' or 'cash_secured_put'")
|
||||
return v
|
||||
|
||||
@field_validator("contracts")
|
||||
@classmethod
|
||||
def positive_contracts(cls, v: int) -> int:
|
||||
if v <= 0:
|
||||
raise ValueError("contracts must be positive")
|
||||
return v
|
||||
|
||||
|
||||
class OptionPositionClose(BaseModel):
|
||||
status: str # closed | rolled
|
||||
close_reason: str | None = None
|
||||
|
||||
|
||||
class OptionPositionResponse(BaseModel):
|
||||
id: int
|
||||
ticker: str
|
||||
strategy: str
|
||||
strike: float
|
||||
expiration: date
|
||||
premium_received: float
|
||||
contracts: int
|
||||
status: str
|
||||
close_reason: str | None
|
||||
opened_at: datetime
|
||||
closed_at: datetime | None
|
||||
last_signal_hash: str | None
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
# ─── Signals ──────────────────────────────────────────────────────────────────
|
||||
|
||||
class SignalSnapshot(BaseModel):
|
||||
ticker: str
|
||||
current_price: float
|
||||
iv_rank: float
|
||||
sma_50: float
|
||||
sma_200: float
|
||||
nearest_support: float | None
|
||||
nearest_resistance: float | None
|
||||
trend: str # uptrend | downtrend | sideways
|
||||
earnings_date: date | None
|
||||
computed_at: datetime
|
||||
|
||||
|
||||
# ─── Recommendations ──────────────────────────────────────────────────────────
|
||||
|
||||
class RecommendationResponse(BaseModel):
|
||||
id: int
|
||||
ticker: str
|
||||
strategy: str
|
||||
time_horizon: str
|
||||
current_price: float
|
||||
recommended_strike: float
|
||||
recommended_expiration: date
|
||||
estimated_premium: float
|
||||
delta: float
|
||||
theta: float
|
||||
iv_rank: float
|
||||
signal_strength: str
|
||||
earnings_warning: bool
|
||||
earnings_date: date | None
|
||||
rationale: str
|
||||
signal_hash: str
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class RecommendationWithSignals(BaseModel):
|
||||
recommendation: RecommendationResponse
|
||||
signals: SignalSnapshot
|
||||
|
||||
|
||||
# ─── Alerts ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class AlertResponse(BaseModel):
|
||||
id: int
|
||||
ticker: str
|
||||
option_position_id: int | None
|
||||
alert_type: str
|
||||
message: str
|
||||
sent_at: datetime
|
||||
acknowledged: bool
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
# ─── Health ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
status: str
|
||||
scheduler_running: bool
|
||||
last_run: datetime | None
|
||||
0
backend/app/routers/__init__.py
Normal file
0
backend/app/routers/__init__.py
Normal file
46
backend/app/routers/alerts.py
Normal file
46
backend/app/routers/alerts.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.db_models import Device, Alert
|
||||
from app.models.schemas import AlertResponse
|
||||
|
||||
router = APIRouter(prefix="/alerts", tags=["alerts"])
|
||||
|
||||
|
||||
def _get_device(x_device_token: str = Header(...), db: Session = Depends(get_db)) -> Device:
|
||||
device = db.query(Device).filter(Device.apns_token == x_device_token).first()
|
||||
if not device:
|
||||
raise HTTPException(status_code=404, detail="Device not registered.")
|
||||
return device
|
||||
|
||||
|
||||
@router.get("", response_model=list[AlertResponse])
|
||||
def get_alerts(
|
||||
unread_only: bool = Query(False),
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
query = db.query(Alert).filter(Alert.device_id == device.id)
|
||||
if unread_only:
|
||||
query = query.filter(Alert.acknowledged == False) # noqa: E712
|
||||
return query.order_by(Alert.sent_at.desc()).limit(100).all()
|
||||
|
||||
|
||||
@router.patch("/{alert_id}/acknowledge", response_model=AlertResponse)
|
||||
def acknowledge_alert(
|
||||
alert_id: int,
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
alert = (
|
||||
db.query(Alert)
|
||||
.filter(Alert.id == alert_id, Alert.device_id == device.id)
|
||||
.first()
|
||||
)
|
||||
if not alert:
|
||||
raise HTTPException(status_code=404, detail="Alert not found.")
|
||||
alert.acknowledged = True
|
||||
db.commit()
|
||||
db.refresh(alert)
|
||||
return alert
|
||||
29
backend/app/routers/devices.py
Normal file
29
backend/app/routers/devices.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.db_models import Device
|
||||
from app.models.schemas import DeviceRegister, DeviceResponse
|
||||
|
||||
router = APIRouter(prefix="/devices", tags=["devices"])
|
||||
|
||||
|
||||
@router.post("/register", response_model=DeviceResponse)
|
||||
def register_device(body: DeviceRegister, db: Session = Depends(get_db)):
|
||||
"""Register or refresh an APNs device token."""
|
||||
device = db.query(Device).filter(Device.apns_token == body.apns_token).first()
|
||||
|
||||
if device:
|
||||
device.device_name = body.device_name or device.device_name
|
||||
device.last_seen = datetime.utcnow()
|
||||
else:
|
||||
device = Device(
|
||||
apns_token=body.apns_token,
|
||||
device_name=body.device_name,
|
||||
)
|
||||
db.add(device)
|
||||
|
||||
db.commit()
|
||||
db.refresh(device)
|
||||
return device
|
||||
64
backend/app/routers/portfolio.py
Normal file
64
backend/app/routers/portfolio.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.db_models import Device, StockPosition
|
||||
from app.models.schemas import StockPositionCreate, StockPositionResponse
|
||||
|
||||
router = APIRouter(prefix="/portfolio", tags=["portfolio"])
|
||||
|
||||
|
||||
def _get_device(x_device_token: str = Header(...), db: Session = Depends(get_db)) -> Device:
|
||||
device = db.query(Device).filter(Device.apns_token == x_device_token).first()
|
||||
if not device:
|
||||
raise HTTPException(status_code=404, detail="Device not registered. Call /devices/register first.")
|
||||
return device
|
||||
|
||||
|
||||
@router.get("", response_model=list[StockPositionResponse])
|
||||
def get_portfolio(device: Device = Depends(_get_device), db: Session = Depends(get_db)):
|
||||
return db.query(StockPosition).filter(StockPosition.device_id == device.id).all()
|
||||
|
||||
|
||||
@router.post("", response_model=list[StockPositionResponse])
|
||||
def set_portfolio(
|
||||
positions: list[StockPositionCreate],
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Full replace — client sends complete list of stock holdings."""
|
||||
# Delete all existing for this device
|
||||
db.query(StockPosition).filter(StockPosition.device_id == device.id).delete()
|
||||
|
||||
new_positions = []
|
||||
for p in positions:
|
||||
sp = StockPosition(
|
||||
device_id=device.id,
|
||||
ticker=p.ticker,
|
||||
shares=p.shares,
|
||||
cost_basis=p.cost_basis,
|
||||
)
|
||||
db.add(sp)
|
||||
new_positions.append(sp)
|
||||
|
||||
db.commit()
|
||||
for sp in new_positions:
|
||||
db.refresh(sp)
|
||||
return new_positions
|
||||
|
||||
|
||||
@router.delete("/{ticker}", status_code=204)
|
||||
def delete_ticker(
|
||||
ticker: str,
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
ticker = ticker.upper()
|
||||
deleted = (
|
||||
db.query(StockPosition)
|
||||
.filter(StockPosition.device_id == device.id, StockPosition.ticker == ticker)
|
||||
.delete()
|
||||
)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail=f"Ticker {ticker} not in portfolio")
|
||||
db.commit()
|
||||
78
backend/app/routers/positions.py
Normal file
78
backend/app/routers/positions.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.db_models import Device, OptionPosition
|
||||
from app.models.schemas import OptionPositionCreate, OptionPositionClose, OptionPositionResponse
|
||||
|
||||
router = APIRouter(prefix="/positions", tags=["positions"])
|
||||
|
||||
|
||||
def _get_device(x_device_token: str = Header(...), db: Session = Depends(get_db)) -> Device:
|
||||
device = db.query(Device).filter(Device.apns_token == x_device_token).first()
|
||||
if not device:
|
||||
raise HTTPException(status_code=404, detail="Device not registered.")
|
||||
return device
|
||||
|
||||
|
||||
@router.get("", response_model=list[OptionPositionResponse])
|
||||
def get_positions(
|
||||
status: str | None = Query(None),
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
query = db.query(OptionPosition).filter(OptionPosition.device_id == device.id)
|
||||
if status:
|
||||
query = query.filter(OptionPosition.status == status)
|
||||
return query.order_by(OptionPosition.opened_at.desc()).all()
|
||||
|
||||
|
||||
@router.post("", response_model=OptionPositionResponse, status_code=201)
|
||||
def log_position(
|
||||
body: OptionPositionCreate,
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
position = OptionPosition(
|
||||
device_id=device.id,
|
||||
ticker=body.ticker,
|
||||
strategy=body.strategy,
|
||||
strike=body.strike,
|
||||
expiration=body.expiration,
|
||||
premium_received=body.premium_received,
|
||||
contracts=body.contracts,
|
||||
status="open",
|
||||
)
|
||||
db.add(position)
|
||||
db.commit()
|
||||
db.refresh(position)
|
||||
return position
|
||||
|
||||
|
||||
@router.patch("/{position_id}", response_model=OptionPositionResponse)
|
||||
def close_position(
|
||||
position_id: int,
|
||||
body: OptionPositionClose,
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
position = (
|
||||
db.query(OptionPosition)
|
||||
.filter(OptionPosition.id == position_id, OptionPosition.device_id == device.id)
|
||||
.first()
|
||||
)
|
||||
if not position:
|
||||
raise HTTPException(status_code=404, detail="Position not found.")
|
||||
|
||||
valid_statuses = ("closed", "rolled")
|
||||
if body.status not in valid_statuses:
|
||||
raise HTTPException(status_code=422, detail=f"status must be one of {valid_statuses}")
|
||||
|
||||
position.status = body.status
|
||||
position.close_reason = body.close_reason
|
||||
position.closed_at = datetime.utcnow()
|
||||
|
||||
db.commit()
|
||||
db.refresh(position)
|
||||
return position
|
||||
141
backend/app/routers/recommendations.py
Normal file
141
backend/app/routers/recommendations.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from datetime import datetime, timedelta
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.db_models import Device, StockPosition, Recommendation
|
||||
from app.models.schemas import RecommendationResponse, RecommendationWithSignals
|
||||
from app.services.signal_engine import compute_signals
|
||||
from app.services.recommendation_engine import build_recommendation
|
||||
from app.config import settings
|
||||
|
||||
router = APIRouter(prefix="/recommendations", tags=["recommendations"])
|
||||
|
||||
|
||||
def _get_device(x_device_token: str = Header(...), db: Session = Depends(get_db)) -> Device:
|
||||
device = db.query(Device).filter(Device.apns_token == x_device_token).first()
|
||||
if not device:
|
||||
raise HTTPException(status_code=404, detail="Device not registered.")
|
||||
return device
|
||||
|
||||
|
||||
@router.get("", response_model=list[RecommendationResponse])
|
||||
def get_recommendations(
|
||||
time_horizon: str | None = Query(None),
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Return latest cached recommendations for all portfolio tickers."""
|
||||
query = db.query(Recommendation).filter(Recommendation.device_id == device.id)
|
||||
if time_horizon:
|
||||
query = query.filter(Recommendation.time_horizon == time_horizon)
|
||||
return query.order_by(Recommendation.created_at.desc()).all()
|
||||
|
||||
|
||||
@router.get("/{ticker}", response_model=RecommendationWithSignals)
|
||||
def get_recommendation_for_ticker(
|
||||
ticker: str,
|
||||
time_horizon: str = Query("weekly"),
|
||||
strategy: str = Query("covered_call"),
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Return fresh recommendation + signals for a specific ticker."""
|
||||
ticker = ticker.upper()
|
||||
|
||||
snap = compute_signals(ticker)
|
||||
if snap is None:
|
||||
raise HTTPException(status_code=503, detail=f"Could not fetch market data for {ticker}")
|
||||
|
||||
rec = build_recommendation(
|
||||
device_id=device.id,
|
||||
ticker=ticker,
|
||||
strategy=strategy,
|
||||
time_horizon=time_horizon,
|
||||
snapshot=snap,
|
||||
)
|
||||
if rec is None:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"No qualifying options found for {ticker} {strategy} {time_horizon}",
|
||||
)
|
||||
|
||||
# Persist recommendation
|
||||
existing = (
|
||||
db.query(Recommendation)
|
||||
.filter(
|
||||
Recommendation.device_id == device.id,
|
||||
Recommendation.ticker == ticker,
|
||||
Recommendation.strategy == strategy,
|
||||
Recommendation.time_horizon == time_horizon,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
db.delete(existing)
|
||||
db.add(rec)
|
||||
db.commit()
|
||||
db.refresh(rec)
|
||||
|
||||
return RecommendationWithSignals(recommendation=RecommendationResponse.model_validate(rec), signals=snap)
|
||||
|
||||
|
||||
@router.post("/refresh", response_model=list[RecommendationResponse])
|
||||
def refresh_recommendations(
|
||||
device: Device = Depends(_get_device),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
On-demand recalculation for all portfolio tickers.
|
||||
Throttled: no-ops if last refresh was less than THROTTLE_MINUTES ago.
|
||||
"""
|
||||
throttle = timedelta(minutes=settings.recommendation_throttle_minutes)
|
||||
most_recent = (
|
||||
db.query(Recommendation)
|
||||
.filter(Recommendation.device_id == device.id)
|
||||
.order_by(Recommendation.created_at.desc())
|
||||
.first()
|
||||
)
|
||||
if most_recent and (datetime.utcnow() - most_recent.created_at) < throttle:
|
||||
return db.query(Recommendation).filter(Recommendation.device_id == device.id).all()
|
||||
|
||||
stock_positions = db.query(StockPosition).filter(StockPosition.device_id == device.id).all()
|
||||
if not stock_positions:
|
||||
return []
|
||||
|
||||
results = []
|
||||
for sp in stock_positions:
|
||||
snap = compute_signals(sp.ticker)
|
||||
if snap is None:
|
||||
continue
|
||||
for strategy in ("covered_call", "cash_secured_put"):
|
||||
for horizon in ("weekly", "monthly"):
|
||||
rec = build_recommendation(
|
||||
device_id=device.id,
|
||||
ticker=sp.ticker,
|
||||
strategy=strategy,
|
||||
time_horizon=horizon,
|
||||
snapshot=snap,
|
||||
)
|
||||
if rec is None:
|
||||
continue
|
||||
|
||||
existing = (
|
||||
db.query(Recommendation)
|
||||
.filter(
|
||||
Recommendation.device_id == device.id,
|
||||
Recommendation.ticker == sp.ticker,
|
||||
Recommendation.strategy == strategy,
|
||||
Recommendation.time_horizon == horizon,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
db.delete(existing)
|
||||
db.add(rec)
|
||||
results.append(rec)
|
||||
|
||||
db.commit()
|
||||
for r in results:
|
||||
db.refresh(r)
|
||||
return results
|
||||
14
backend/app/routers/signals.py
Normal file
14
backend/app/routers/signals.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from app.models.schemas import SignalSnapshot
|
||||
from app.services.signal_engine import compute_signals
|
||||
|
||||
router = APIRouter(prefix="/signals", tags=["signals"])
|
||||
|
||||
|
||||
@router.get("/{ticker}", response_model=SignalSnapshot)
|
||||
def get_signals(ticker: str):
|
||||
ticker = ticker.upper()
|
||||
snap = compute_signals(ticker)
|
||||
if snap is None:
|
||||
raise HTTPException(status_code=503, detail=f"Could not fetch market data for {ticker}")
|
||||
return snap
|
||||
38
backend/app/scheduler.py
Normal file
38
backend/app/scheduler.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
scheduler.py — APScheduler configuration.
|
||||
|
||||
The scheduler is started in main.py's lifespan event.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
|
||||
from app.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
scheduler = AsyncIOScheduler()
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
from app.services.position_monitor import monitor_all_positions
|
||||
|
||||
scheduler.add_job(
|
||||
monitor_all_positions,
|
||||
trigger=IntervalTrigger(seconds=settings.monitor_interval_seconds),
|
||||
id="position_monitor",
|
||||
name="Position Monitor",
|
||||
replace_existing=True,
|
||||
misfire_grace_time=60,
|
||||
)
|
||||
scheduler.start()
|
||||
logger.info(f"Scheduler started — monitor interval: {settings.monitor_interval_seconds}s")
|
||||
|
||||
|
||||
def stop_scheduler():
|
||||
if scheduler.running:
|
||||
scheduler.shutdown(wait=False)
|
||||
logger.info("Scheduler stopped")
|
||||
0
backend/app/services/__init__.py
Normal file
0
backend/app/services/__init__.py
Normal file
124
backend/app/services/apns_service.py
Normal file
124
backend/app/services/apns_service.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""
|
||||
apns_service.py — Send push notifications via APNs HTTP/2.
|
||||
|
||||
Uses JWT authentication with a .p8 key. JWT is cached for 50 minutes
|
||||
and auto-renewed before the 60-minute APNs expiry.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import httpx
|
||||
import jwt as pyjwt
|
||||
|
||||
from app.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_APNS_HOST_PROD = "https://api.push.apple.com"
|
||||
_APNS_HOST_SANDBOX = "https://api.sandbox.push.apple.com"
|
||||
|
||||
# Cached JWT state
|
||||
_jwt_token: Optional[str] = None
|
||||
_jwt_issued_at: Optional[float] = None
|
||||
_JWT_TTL_SECONDS = 50 * 60 # 50 minutes — renew before Apple's 60-min limit
|
||||
|
||||
|
||||
def _get_apns_host() -> str:
|
||||
return _APNS_HOST_SANDBOX if settings.apns_use_sandbox else _APNS_HOST_PROD
|
||||
|
||||
|
||||
def _load_private_key() -> str:
|
||||
try:
|
||||
with open(settings.apns_key_path, "r") as f:
|
||||
return f.read()
|
||||
except FileNotFoundError:
|
||||
logger.warning(f"APNs key not found at {settings.apns_key_path} — push disabled")
|
||||
return ""
|
||||
|
||||
|
||||
def _get_jwt() -> Optional[str]:
|
||||
global _jwt_token, _jwt_issued_at
|
||||
|
||||
now = time.time()
|
||||
if _jwt_token and _jwt_issued_at and (now - _jwt_issued_at) < _JWT_TTL_SECONDS:
|
||||
return _jwt_token
|
||||
|
||||
private_key = _load_private_key()
|
||||
if not private_key:
|
||||
return None
|
||||
|
||||
if not settings.apns_key_id or not settings.apns_team_id:
|
||||
logger.warning("APNs key ID or team ID not configured — push disabled")
|
||||
return None
|
||||
|
||||
payload = {
|
||||
"iss": settings.apns_team_id,
|
||||
"iat": int(now),
|
||||
}
|
||||
headers = {
|
||||
"alg": "ES256",
|
||||
"kid": settings.apns_key_id,
|
||||
}
|
||||
|
||||
try:
|
||||
token = pyjwt.encode(payload, private_key, algorithm="ES256", headers=headers)
|
||||
_jwt_token = token if isinstance(token, str) else token.decode("utf-8")
|
||||
_jwt_issued_at = now
|
||||
return _jwt_token
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate APNs JWT: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def send_push(
|
||||
apns_token: str,
|
||||
title: str,
|
||||
body: str,
|
||||
payload: Optional[dict] = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Send a push notification to a device.
|
||||
Returns True on success, False on failure.
|
||||
"""
|
||||
jwt_token = _get_jwt()
|
||||
if not jwt_token:
|
||||
logger.warning("APNs push skipped — no valid JWT (check .p8 key config)")
|
||||
return False
|
||||
|
||||
apns_payload = {
|
||||
"aps": {
|
||||
"alert": {"title": title, "body": body},
|
||||
"badge": 1,
|
||||
"sound": "default",
|
||||
"category": "POSITION_ALERT",
|
||||
}
|
||||
}
|
||||
if payload:
|
||||
apns_payload.update(payload)
|
||||
|
||||
url = f"{_get_apns_host()}/3/device/{apns_token}"
|
||||
headers = {
|
||||
"authorization": f"bearer {jwt_token}",
|
||||
"apns-topic": settings.apns_bundle_id,
|
||||
"apns-push-type": "alert",
|
||||
"content-type": "application/json",
|
||||
}
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(http2=True, timeout=10.0) as client:
|
||||
response = await client.post(url, headers=headers, content=json.dumps(apns_payload))
|
||||
|
||||
if response.status_code == 200:
|
||||
logger.info(f"Push sent to {apns_token[:8]}... — {title}")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"APNs rejected push: {response.status_code} {response.text}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"APNs push failed: {e}")
|
||||
return False
|
||||
198
backend/app/services/market_data.py
Normal file
198
backend/app/services/market_data.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""
|
||||
market_data.py — yfinance wrapper with 15-minute in-memory cache.
|
||||
|
||||
All other services call through this module. Never import yfinance directly
|
||||
outside of here — keeps caching and error handling centralized.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, date, timedelta
|
||||
from functools import lru_cache
|
||||
from typing import Optional
|
||||
import time
|
||||
|
||||
import pandas as pd
|
||||
import yfinance as yf
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# ─── Simple time-bucketed cache ───────────────────────────────────────────────
|
||||
# Key: (ticker, cache_bucket) where cache_bucket = int(time() // 900)
|
||||
_CACHE: dict = {}
|
||||
_CACHE_TTL_SECONDS = 900 # 15 minutes
|
||||
|
||||
|
||||
def _bucket() -> int:
|
||||
return int(time.time() // _CACHE_TTL_SECONDS)
|
||||
|
||||
|
||||
def _cache_get(key: tuple):
|
||||
return _CACHE.get((key, _bucket()))
|
||||
|
||||
|
||||
def _cache_set(key: tuple, value):
|
||||
_CACHE[(key, _bucket())] = value
|
||||
# Prune old entries to avoid unbounded growth
|
||||
current = _bucket()
|
||||
stale = [k for k in list(_CACHE) if k[1] < current - 1]
|
||||
for k in stale:
|
||||
del _CACHE[k]
|
||||
|
||||
|
||||
# ─── Price history ─────────────────────────────────────────────────────────────
|
||||
|
||||
def get_price_history(ticker: str, period: str = "1y") -> Optional[pd.DataFrame]:
|
||||
"""
|
||||
Return OHLCV DataFrame for the past year.
|
||||
Columns: Open, High, Low, Close, Volume
|
||||
Index: DatetimeIndex
|
||||
Returns None on failure.
|
||||
"""
|
||||
cache_key = ("price_history", ticker, period)
|
||||
cached = _cache_get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
try:
|
||||
df = yf.download(ticker, period=period, auto_adjust=True, progress=False)
|
||||
if df.empty:
|
||||
logger.warning(f"Empty price history for {ticker}")
|
||||
return None
|
||||
# Flatten multi-level columns if present (yfinance sometimes returns them)
|
||||
if isinstance(df.columns, pd.MultiIndex):
|
||||
df.columns = df.columns.get_level_values(0)
|
||||
_cache_set(cache_key, df)
|
||||
return df
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch price history for {ticker}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# ─── Current price ─────────────────────────────────────────────────────────────
|
||||
|
||||
def get_current_price(ticker: str) -> Optional[float]:
|
||||
cache_key = ("current_price", ticker)
|
||||
cached = _cache_get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
try:
|
||||
t = yf.Ticker(ticker)
|
||||
price = t.fast_info.get("last_price") or t.fast_info.get("previousClose")
|
||||
if price is None:
|
||||
hist = get_price_history(ticker, period="5d")
|
||||
if hist is not None and not hist.empty:
|
||||
price = float(hist["Close"].iloc[-1])
|
||||
if price is not None:
|
||||
price = float(price)
|
||||
_cache_set(cache_key, price)
|
||||
return price
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch current price for {ticker}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# ─── Options chain ─────────────────────────────────────────────────────────────
|
||||
|
||||
def get_option_expirations(ticker: str) -> list[str]:
|
||||
"""Return list of available expiration date strings (YYYY-MM-DD)."""
|
||||
cache_key = ("expirations", ticker)
|
||||
cached = _cache_get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
try:
|
||||
t = yf.Ticker(ticker)
|
||||
expirations = list(t.options)
|
||||
_cache_set(cache_key, expirations)
|
||||
return expirations
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch expirations for {ticker}: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_options_chain(ticker: str, expiration: str) -> Optional[dict]:
|
||||
"""
|
||||
Return {'calls': DataFrame, 'puts': DataFrame} for the given expiry.
|
||||
Columns include: strike, lastPrice, bid, ask, volume, openInterest,
|
||||
impliedVolatility, delta, theta, gamma (where available).
|
||||
Returns None on failure.
|
||||
"""
|
||||
cache_key = ("options_chain", ticker, expiration)
|
||||
cached = _cache_get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
try:
|
||||
t = yf.Ticker(ticker)
|
||||
chain = t.option_chain(expiration)
|
||||
result = {"calls": chain.calls.copy(), "puts": chain.puts.copy()}
|
||||
# yfinance does not always include Greeks — add NaN columns if missing
|
||||
for df in result.values():
|
||||
for col in ("delta", "theta", "gamma"):
|
||||
if col not in df.columns:
|
||||
df[col] = float("nan")
|
||||
_cache_set(cache_key, result)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch options chain for {ticker} exp={expiration}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_nearest_expiry(ticker: str, target_date: date) -> Optional[str]:
|
||||
"""Return the closest available expiry on or after target_date."""
|
||||
expirations = get_option_expirations(ticker)
|
||||
if not expirations:
|
||||
return None
|
||||
target_str = str(target_date)
|
||||
future = [e for e in sorted(expirations) if e >= target_str]
|
||||
return future[0] if future else None
|
||||
|
||||
|
||||
def get_same_day_expiry(ticker: str) -> Optional[str]:
|
||||
"""Return today's expiry string if it exists, else None."""
|
||||
today_str = str(date.today())
|
||||
expirations = get_option_expirations(ticker)
|
||||
return today_str if today_str in expirations else None
|
||||
|
||||
|
||||
# ─── Earnings / dividends ──────────────────────────────────────────────────────
|
||||
|
||||
def get_earnings_date(ticker: str) -> Optional[date]:
|
||||
"""Return the next earnings date or None."""
|
||||
cache_key = ("earnings", ticker)
|
||||
cached = _cache_get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
try:
|
||||
t = yf.Ticker(ticker)
|
||||
cal = t.calendar
|
||||
if cal is None:
|
||||
return None
|
||||
|
||||
# calendar can be a dict or DataFrame depending on yfinance version
|
||||
if isinstance(cal, dict):
|
||||
earnings_info = cal.get("Earnings Date", [])
|
||||
else:
|
||||
# DataFrame — look for "Earnings Date" row or column
|
||||
try:
|
||||
earnings_info = cal.loc["Earnings Date"].tolist()
|
||||
except Exception:
|
||||
earnings_info = []
|
||||
|
||||
result = None
|
||||
for item in (earnings_info if isinstance(earnings_info, list) else [earnings_info]):
|
||||
try:
|
||||
d = pd.Timestamp(item).date()
|
||||
if d >= date.today():
|
||||
result = d
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
_cache_set(cache_key, result)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch earnings date for {ticker}: {e}")
|
||||
return None
|
||||
331
backend/app/services/position_monitor.py
Normal file
331
backend/app/services/position_monitor.py
Normal file
@@ -0,0 +1,331 @@
|
||||
"""
|
||||
position_monitor.py — 15-minute job that re-evaluates all open option positions
|
||||
and fires alerts when signals change materially.
|
||||
|
||||
Also refreshes recommendations for all stock positions.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, date
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database import SessionLocal
|
||||
from app.models.db_models import OptionPosition, StockPosition, Recommendation, Alert, Device
|
||||
from app.services import market_data as md
|
||||
from app.services.signal_engine import (
|
||||
compute_iv_rank,
|
||||
compute_smas,
|
||||
compute_swing_levels,
|
||||
compute_trend,
|
||||
compute_signal_strength,
|
||||
compute_signal_hash,
|
||||
)
|
||||
from app.services.recommendation_engine import build_recommendation
|
||||
from app.services.apns_service import send_push
|
||||
from app.models.schemas import SignalSnapshot
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Tracks the last time this job ran
|
||||
last_run: Optional[datetime] = None
|
||||
|
||||
|
||||
def _determine_alert_type(
|
||||
position: OptionPosition,
|
||||
current_delta: float,
|
||||
new_signal_hash: str,
|
||||
new_rec: Optional[Recommendation],
|
||||
earnings_warning: bool,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Determine if and what type of alert to fire.
|
||||
Returns alert_type string or None.
|
||||
"""
|
||||
# Earnings warning newly triggered
|
||||
if earnings_warning and not _position_had_earnings_warning(position):
|
||||
return "earnings_warning"
|
||||
|
||||
# Deep ITM — delta threshold
|
||||
abs_delta = abs(current_delta)
|
||||
if abs_delta >= 0.45:
|
||||
return "close_early"
|
||||
|
||||
# Profit capture — if premium has decayed significantly
|
||||
# (We don't track current price here, but high delta ITM is a proxy)
|
||||
if abs_delta >= 0.40:
|
||||
return "close_early"
|
||||
|
||||
# Expiry-based recommendation changed
|
||||
if new_rec:
|
||||
days_to_expiry = (position.expiration - date.today()).days
|
||||
if days_to_expiry <= 5 and abs_delta <= 0.10:
|
||||
return "close_early" # expiring nearly worthless — take it off
|
||||
|
||||
# Roll suggestion: new recommendation is for a further-out expiry
|
||||
if new_rec.recommended_expiration > position.expiration:
|
||||
return "roll_out"
|
||||
|
||||
# Strike meaningfully different (more than 2 strikes, roughly $2-5 depending on underlying)
|
||||
if abs(new_rec.recommended_strike - position.strike) / position.strike > 0.02:
|
||||
return "roll_up_down"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _position_had_earnings_warning(position: OptionPosition) -> bool:
|
||||
"""Best-effort: check if earnings warning was already flagged on last hash."""
|
||||
# We encode earnings_warning in the hash payload so if it was True before
|
||||
# the hash would already reflect it. This is a simple flag check.
|
||||
return False # Simplified — the hash change will trigger the alert naturally
|
||||
|
||||
|
||||
async def monitor_all_positions():
|
||||
"""Main scheduler job. Runs every 15 minutes."""
|
||||
global last_run
|
||||
logger.info("Position monitor: starting run")
|
||||
|
||||
db: Session = SessionLocal()
|
||||
try:
|
||||
# 1. Get all open positions grouped by ticker to batch data fetching
|
||||
open_positions: list[OptionPosition] = (
|
||||
db.query(OptionPosition)
|
||||
.filter(OptionPosition.status == "open")
|
||||
.all()
|
||||
)
|
||||
|
||||
tickers_to_check = list({p.ticker for p in open_positions})
|
||||
|
||||
# Also collect all stock positions to refresh recommendations
|
||||
stock_positions: list[StockPosition] = db.query(StockPosition).all()
|
||||
stock_tickers = list({sp.ticker for sp in stock_positions})
|
||||
|
||||
all_tickers = list(set(tickers_to_check + stock_tickers))
|
||||
|
||||
if not all_tickers:
|
||||
logger.info("Position monitor: no tickers to check")
|
||||
last_run = datetime.utcnow()
|
||||
return
|
||||
|
||||
# 2. Pre-fetch market data for all tickers (cached by market_data module)
|
||||
logger.info(f"Position monitor: checking {len(all_tickers)} tickers")
|
||||
signal_snapshots: dict[str, Optional[SignalSnapshot]] = {}
|
||||
for ticker in all_tickers:
|
||||
snap = _compute_snapshot(ticker)
|
||||
signal_snapshots[ticker] = snap
|
||||
|
||||
# 3. Evaluate each open option position
|
||||
for position in open_positions:
|
||||
snap = signal_snapshots.get(position.ticker)
|
||||
if snap is None:
|
||||
continue
|
||||
|
||||
await _evaluate_position(db, position, snap)
|
||||
|
||||
# 4. Refresh recommendations for all stock positions
|
||||
for stock_pos in stock_positions:
|
||||
snap = signal_snapshots.get(stock_pos.ticker)
|
||||
if snap is None:
|
||||
continue
|
||||
_refresh_recommendations(db, stock_pos.device_id, stock_pos.ticker, snap)
|
||||
|
||||
db.commit()
|
||||
last_run = datetime.utcnow()
|
||||
logger.info("Position monitor: run complete")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Position monitor error: {e}", exc_info=True)
|
||||
db.rollback()
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def _compute_snapshot(ticker: str) -> Optional[SignalSnapshot]:
|
||||
"""Build signal snapshot from market data."""
|
||||
import math
|
||||
df = md.get_price_history(ticker)
|
||||
if df is None:
|
||||
return None
|
||||
current_price = md.get_current_price(ticker)
|
||||
if current_price is None:
|
||||
return None
|
||||
|
||||
iv_rank = compute_iv_rank(df)
|
||||
smas = compute_smas(df)
|
||||
swing = compute_swing_levels(df)
|
||||
trend = compute_trend(current_price, smas["sma_50"], smas["sma_200"])
|
||||
earnings_date = md.get_earnings_date(ticker)
|
||||
|
||||
return SignalSnapshot(
|
||||
ticker=ticker,
|
||||
current_price=current_price,
|
||||
iv_rank=iv_rank,
|
||||
sma_50=smas["sma_50"] if not math.isnan(smas["sma_50"]) else 0.0,
|
||||
sma_200=smas["sma_200"] if not math.isnan(smas["sma_200"]) else 0.0,
|
||||
nearest_support=swing["nearest_support"],
|
||||
nearest_resistance=swing["nearest_resistance"],
|
||||
trend=trend,
|
||||
earnings_date=earnings_date,
|
||||
computed_at=datetime.utcnow(),
|
||||
)
|
||||
|
||||
|
||||
async def _evaluate_position(db: Session, position: OptionPosition, snap: SignalSnapshot):
|
||||
"""Re-evaluate one open position and fire an alert if signal changed."""
|
||||
# Get current option data for this specific strike/expiry
|
||||
expiry_str = str(position.expiration)
|
||||
chain = md.get_options_chain(position.ticker, expiry_str)
|
||||
|
||||
current_delta = 0.25 # fallback
|
||||
if chain:
|
||||
chain_df = chain["calls"] if position.strategy == "covered_call" else chain["puts"]
|
||||
row = chain_df[chain_df["strike"] == position.strike]
|
||||
if not row.empty and "delta" in row.columns:
|
||||
delta_val = row["delta"].iloc[0]
|
||||
if delta_val == delta_val: # not NaN
|
||||
current_delta = abs(float(delta_val))
|
||||
|
||||
# Build a fresh new recommendation to compare expiry/strike
|
||||
new_rec = build_recommendation(
|
||||
device_id=position.device_id,
|
||||
ticker=position.ticker,
|
||||
strategy=position.strategy,
|
||||
time_horizon="weekly", # use weekly for monitoring comparisons
|
||||
snapshot=snap,
|
||||
)
|
||||
|
||||
earnings_warning = bool(snap.earnings_date and snap.earnings_date <= position.expiration)
|
||||
|
||||
new_hash = compute_signal_hash(
|
||||
iv_rank=snap.iv_rank,
|
||||
sma_50=snap.sma_50,
|
||||
sma_200=snap.sma_200,
|
||||
nearest_support=snap.nearest_support,
|
||||
nearest_resistance=snap.nearest_resistance,
|
||||
recommended_strike=new_rec.recommended_strike if new_rec else position.strike,
|
||||
recommended_expiration=new_rec.recommended_expiration if new_rec else position.expiration,
|
||||
earnings_warning=earnings_warning,
|
||||
)
|
||||
|
||||
# No change — skip
|
||||
if new_hash == position.last_signal_hash:
|
||||
return
|
||||
|
||||
# Determine alert type
|
||||
alert_type = _determine_alert_type(position, current_delta, new_hash, new_rec, earnings_warning)
|
||||
if alert_type is None:
|
||||
# Still update the hash even if no actionable alert
|
||||
position.last_signal_hash = new_hash
|
||||
return
|
||||
|
||||
# Build alert message
|
||||
message = _build_alert_message(position, alert_type, current_delta, snap, new_rec)
|
||||
|
||||
# Save alert to DB
|
||||
alert = Alert(
|
||||
device_id=position.device_id,
|
||||
option_position_id=position.id,
|
||||
ticker=position.ticker,
|
||||
alert_type=alert_type,
|
||||
message=message,
|
||||
old_signal_hash=position.last_signal_hash,
|
||||
new_signal_hash=new_hash,
|
||||
sent_at=datetime.utcnow(),
|
||||
acknowledged=False,
|
||||
)
|
||||
db.add(alert)
|
||||
|
||||
# Update position hash
|
||||
position.last_signal_hash = new_hash
|
||||
|
||||
# Send push notification
|
||||
device: Optional[Device] = db.query(Device).filter(Device.id == position.device_id).first()
|
||||
if device:
|
||||
strategy_label = "Covered Call" if position.strategy == "covered_call" else "Cash-Secured Put"
|
||||
await send_push(
|
||||
apns_token=device.apns_token,
|
||||
title=f"{position.ticker} {strategy_label} — Action Needed",
|
||||
body=message,
|
||||
payload={
|
||||
"alert_type": alert_type,
|
||||
"ticker": position.ticker,
|
||||
"position_id": position.id,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(f"Alert fired: {position.ticker} {alert_type} — {message[:60]}")
|
||||
|
||||
|
||||
def _build_alert_message(
|
||||
position: OptionPosition,
|
||||
alert_type: str,
|
||||
current_delta: float,
|
||||
snap: SignalSnapshot,
|
||||
new_rec: Optional[Recommendation],
|
||||
) -> str:
|
||||
strike = position.strike
|
||||
expiry = position.expiration
|
||||
|
||||
if alert_type == "close_early":
|
||||
if current_delta >= 0.45:
|
||||
return (
|
||||
f"Delta has risen to {current_delta:.2f} — position is deep ITM. "
|
||||
f"Consider closing early to limit risk on the ${strike:.0f} strike expiring {expiry}."
|
||||
)
|
||||
return (
|
||||
f"Signals suggest closing early on ${strike:.0f} strike expiring {expiry}. "
|
||||
f"Capturing premium now may be optimal."
|
||||
)
|
||||
|
||||
if alert_type == "roll_out" and new_rec:
|
||||
return (
|
||||
f"Consider rolling your ${strike:.0f} strike out to {new_rec.recommended_expiration} "
|
||||
f"at ${new_rec.recommended_strike:.0f} for ${new_rec.estimated_premium:.2f} credit."
|
||||
)
|
||||
|
||||
if alert_type == "roll_up_down" and new_rec:
|
||||
direction = "up" if new_rec.recommended_strike > strike else "down"
|
||||
return (
|
||||
f"Signals favor rolling {direction} from ${strike:.0f} to ${new_rec.recommended_strike:.0f} "
|
||||
f"at {new_rec.recommended_expiration} for ${new_rec.estimated_premium:.2f} credit."
|
||||
)
|
||||
|
||||
if alert_type == "earnings_warning":
|
||||
return (
|
||||
f"⚠️ Earnings date {snap.earnings_date} now falls within your expiry on {expiry}. "
|
||||
f"Consider closing or rolling before earnings."
|
||||
)
|
||||
|
||||
return f"Signal change detected on {position.ticker} ${strike:.0f} strike. Review your position."
|
||||
|
||||
|
||||
def _refresh_recommendations(db: Session, device_id: int, ticker: str, snap: SignalSnapshot):
|
||||
"""Rebuild and save latest recommendations for a ticker."""
|
||||
for strategy in ("covered_call", "cash_secured_put"):
|
||||
for horizon in ("weekly", "monthly"):
|
||||
rec = build_recommendation(
|
||||
device_id=device_id,
|
||||
ticker=ticker,
|
||||
strategy=strategy,
|
||||
time_horizon=horizon,
|
||||
snapshot=snap,
|
||||
)
|
||||
if rec is None:
|
||||
continue
|
||||
|
||||
# Replace existing recommendation for same device/ticker/strategy/horizon
|
||||
existing = (
|
||||
db.query(Recommendation)
|
||||
.filter(
|
||||
Recommendation.device_id == device_id,
|
||||
Recommendation.ticker == ticker,
|
||||
Recommendation.strategy == strategy,
|
||||
Recommendation.time_horizon == horizon,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
db.delete(existing)
|
||||
|
||||
db.add(rec)
|
||||
299
backend/app/services/recommendation_engine.py
Normal file
299
backend/app/services/recommendation_engine.py
Normal file
@@ -0,0 +1,299 @@
|
||||
"""
|
||||
recommendation_engine.py — Select optimal strike and expiry for a given strategy/horizon.
|
||||
|
||||
For each (ticker, strategy, time_horizon) combination:
|
||||
1. Determine the target expiration date
|
||||
2. Fetch the options chain for that expiry
|
||||
3. Filter by delta range (0.20-0.30 for CC/CSP)
|
||||
4. Among qualifying strikes, pick highest mid-price premium
|
||||
5. Build a Recommendation DB row
|
||||
"""
|
||||
|
||||
import logging
|
||||
import math
|
||||
from datetime import date, datetime
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from app.models.db_models import Recommendation
|
||||
from app.models.schemas import SignalSnapshot
|
||||
from app.services import market_data as md
|
||||
from app.services.signal_engine import (
|
||||
compute_iv_rank,
|
||||
compute_smas,
|
||||
compute_swing_levels,
|
||||
compute_trend,
|
||||
compute_signal_strength,
|
||||
compute_signal_hash,
|
||||
)
|
||||
from app.utils.date_helpers import (
|
||||
next_trading_day,
|
||||
next_friday,
|
||||
nearest_monthly_expiry,
|
||||
within_dte_window_for_0dte,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Delta target ranges for short premium selling
|
||||
DELTA_MIN = 0.18
|
||||
DELTA_MAX = 0.35
|
||||
|
||||
|
||||
def _target_expiry(ticker: str, time_horizon: str) -> Optional[str]:
|
||||
"""
|
||||
Return the best available expiry string for the given time horizon.
|
||||
Returns None if no suitable expiry exists.
|
||||
"""
|
||||
today = date.today()
|
||||
|
||||
if time_horizon == "0dte":
|
||||
if not within_dte_window_for_0dte():
|
||||
return None
|
||||
expiry = md.get_same_day_expiry(ticker)
|
||||
return expiry # None if today has no options
|
||||
|
||||
if time_horizon == "1dte":
|
||||
target = next_trading_day(today)
|
||||
return md.get_nearest_expiry(ticker, target)
|
||||
|
||||
if time_horizon == "weekly":
|
||||
target = next_friday(today)
|
||||
return md.get_nearest_expiry(ticker, target)
|
||||
|
||||
if time_horizon == "monthly":
|
||||
target = nearest_monthly_expiry(today, target_dte=30)
|
||||
return md.get_nearest_expiry(ticker, target)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _best_strike(
|
||||
chain_df: pd.DataFrame,
|
||||
strategy: str,
|
||||
current_price: float,
|
||||
nearest_support: Optional[float],
|
||||
nearest_resistance: Optional[float],
|
||||
) -> Optional[pd.Series]:
|
||||
"""
|
||||
Filter options chain for the best strike.
|
||||
|
||||
Rules:
|
||||
covered_call — calls, OTM (strike > current_price), delta 0.20-0.35
|
||||
cash_secured_put — puts, OTM (strike < current_price), |delta| 0.20-0.35,
|
||||
strike preferably > nearest_support
|
||||
Returns the best row or None.
|
||||
"""
|
||||
df = chain_df.copy()
|
||||
|
||||
# Ensure we have a usable delta column
|
||||
has_delta = "delta" in df.columns and df["delta"].notna().any()
|
||||
|
||||
if strategy == "covered_call":
|
||||
df = df[df["strike"] > current_price]
|
||||
if has_delta:
|
||||
df = df[(df["delta"] >= DELTA_MIN) & (df["delta"] <= DELTA_MAX)]
|
||||
else:
|
||||
# Approximate OTM calls: within 5% above current price
|
||||
df = df[df["strike"] <= current_price * 1.07]
|
||||
elif strategy == "cash_secured_put":
|
||||
df = df[df["strike"] < current_price]
|
||||
if has_delta:
|
||||
df = df[(df["delta"].abs() >= DELTA_MIN) & (df["delta"].abs() <= DELTA_MAX)]
|
||||
else:
|
||||
df = df[df["strike"] >= current_price * 0.93]
|
||||
|
||||
# Prefer strikes above nearest support to avoid selling below key level
|
||||
if nearest_support:
|
||||
above_support = df[df["strike"] >= nearest_support * 0.99]
|
||||
if not above_support.empty:
|
||||
df = above_support
|
||||
|
||||
if df.empty:
|
||||
return None
|
||||
|
||||
# Compute mid-price and pick highest premium
|
||||
df = df.copy()
|
||||
df["mid"] = (df["bid"] + df["ask"]) / 2
|
||||
df = df[df["mid"] > 0]
|
||||
|
||||
if df.empty:
|
||||
return None
|
||||
|
||||
return df.loc[df["mid"].idxmax()]
|
||||
|
||||
|
||||
def build_recommendation(
|
||||
device_id: int,
|
||||
ticker: str,
|
||||
strategy: str,
|
||||
time_horizon: str,
|
||||
snapshot: Optional[SignalSnapshot] = None,
|
||||
) -> Optional[Recommendation]:
|
||||
"""
|
||||
Build a Recommendation ORM object for the given parameters.
|
||||
Returns None if not enough data exists.
|
||||
"""
|
||||
# 1. Get signal snapshot (reuse if provided to avoid duplicate yfinance calls)
|
||||
if snapshot is None:
|
||||
df = md.get_price_history(ticker)
|
||||
if df is None:
|
||||
return None
|
||||
current_price = md.get_current_price(ticker)
|
||||
if current_price is None:
|
||||
return None
|
||||
iv_rank = compute_iv_rank(df)
|
||||
smas = compute_smas(df)
|
||||
swing = compute_swing_levels(df)
|
||||
trend = compute_trend(current_price, smas["sma_50"], smas["sma_200"])
|
||||
earnings_date = md.get_earnings_date(ticker)
|
||||
nearest_support = swing["nearest_support"]
|
||||
nearest_resistance = swing["nearest_resistance"]
|
||||
sma_50 = smas["sma_50"]
|
||||
sma_200 = smas["sma_200"]
|
||||
else:
|
||||
current_price = snapshot.current_price
|
||||
iv_rank = snapshot.iv_rank
|
||||
sma_50 = snapshot.sma_50
|
||||
sma_200 = snapshot.sma_200
|
||||
nearest_support = snapshot.nearest_support
|
||||
nearest_resistance = snapshot.nearest_resistance
|
||||
trend = snapshot.trend
|
||||
earnings_date = snapshot.earnings_date
|
||||
|
||||
# 2. Determine target expiry
|
||||
expiry_str = _target_expiry(ticker, time_horizon)
|
||||
if expiry_str is None:
|
||||
logger.debug(f"No expiry available for {ticker} {time_horizon}")
|
||||
return None
|
||||
|
||||
expiry_date = date.fromisoformat(expiry_str)
|
||||
|
||||
# 3. Earnings warning
|
||||
earnings_warning = bool(earnings_date and earnings_date <= expiry_date)
|
||||
|
||||
# 4. Fetch options chain
|
||||
chain = md.get_options_chain(ticker, expiry_str)
|
||||
if chain is None:
|
||||
return None
|
||||
|
||||
chain_df = chain["calls"] if strategy == "covered_call" else chain["puts"]
|
||||
|
||||
# 5. Pick best strike
|
||||
best = _best_strike(chain_df, strategy, current_price, nearest_support, nearest_resistance)
|
||||
if best is None:
|
||||
logger.debug(f"No qualifying strike for {ticker} {strategy} {time_horizon}")
|
||||
return None
|
||||
|
||||
strike = float(best["strike"])
|
||||
mid_price = float((best["bid"] + best["ask"]) / 2)
|
||||
delta = float(best["delta"]) if not math.isnan(best.get("delta", float("nan"))) else _estimate_delta(strike, current_price, strategy)
|
||||
theta = float(best["theta"]) if not math.isnan(best.get("theta", float("nan"))) else 0.0
|
||||
|
||||
# 6. Signal strength
|
||||
signal_strength = compute_signal_strength(
|
||||
iv_rank=iv_rank,
|
||||
trend=trend,
|
||||
strategy=strategy,
|
||||
nearest_support=nearest_support,
|
||||
nearest_resistance=nearest_resistance,
|
||||
recommended_strike=strike,
|
||||
earnings_warning=earnings_warning,
|
||||
)
|
||||
|
||||
# 7. Signal hash
|
||||
sig_hash = compute_signal_hash(
|
||||
iv_rank=iv_rank,
|
||||
sma_50=sma_50,
|
||||
sma_200=sma_200,
|
||||
nearest_support=nearest_support,
|
||||
nearest_resistance=nearest_resistance,
|
||||
recommended_strike=strike,
|
||||
recommended_expiration=expiry_date,
|
||||
earnings_warning=earnings_warning,
|
||||
)
|
||||
|
||||
# 8. Build human-readable rationale
|
||||
rationale = _build_rationale(
|
||||
strategy=strategy,
|
||||
time_horizon=time_horizon,
|
||||
current_price=current_price,
|
||||
strike=strike,
|
||||
iv_rank=iv_rank,
|
||||
trend=trend,
|
||||
earnings_warning=earnings_warning,
|
||||
earnings_date=earnings_date,
|
||||
signal_strength=signal_strength,
|
||||
)
|
||||
|
||||
return Recommendation(
|
||||
device_id=device_id,
|
||||
ticker=ticker,
|
||||
strategy=strategy,
|
||||
time_horizon=time_horizon,
|
||||
current_price=current_price,
|
||||
recommended_strike=strike,
|
||||
recommended_expiration=expiry_date,
|
||||
estimated_premium=mid_price,
|
||||
delta=delta,
|
||||
theta=theta,
|
||||
iv_rank=iv_rank,
|
||||
signal_strength=signal_strength,
|
||||
earnings_warning=earnings_warning,
|
||||
earnings_date=earnings_date,
|
||||
rationale=rationale,
|
||||
signal_hash=sig_hash,
|
||||
created_at=datetime.utcnow(),
|
||||
)
|
||||
|
||||
|
||||
def _estimate_delta(strike: float, current_price: float, strategy: str) -> float:
|
||||
"""Rough delta estimate when yfinance doesn't provide it."""
|
||||
moneyness = (strike - current_price) / current_price
|
||||
if strategy == "covered_call":
|
||||
return max(0.05, 0.50 - moneyness * 3)
|
||||
else:
|
||||
return max(0.05, 0.50 - abs(moneyness) * 3)
|
||||
|
||||
|
||||
def _build_rationale(
|
||||
strategy: str,
|
||||
time_horizon: str,
|
||||
current_price: float,
|
||||
strike: float,
|
||||
iv_rank: float,
|
||||
trend: str,
|
||||
earnings_warning: bool,
|
||||
earnings_date: Optional[date],
|
||||
signal_strength: str,
|
||||
) -> str:
|
||||
parts = []
|
||||
|
||||
strategy_label = "Covered Call" if strategy == "covered_call" else "Cash-Secured Put"
|
||||
direction = "above" if strategy == "covered_call" else "below"
|
||||
|
||||
parts.append(
|
||||
f"{signal_strength.capitalize()} {strategy_label} setup. "
|
||||
f"Strike ${strike:.2f} is {direction} current price ${current_price:.2f}."
|
||||
)
|
||||
|
||||
if iv_rank >= 50:
|
||||
parts.append(f"IV rank is elevated at {iv_rank:.0f}% — favorable premium-selling environment.")
|
||||
elif iv_rank >= 30:
|
||||
parts.append(f"IV rank is moderate at {iv_rank:.0f}%.")
|
||||
else:
|
||||
parts.append(f"IV rank is low at {iv_rank:.0f}% — premiums may be thin.")
|
||||
|
||||
trend_map = {"uptrend": "bullish uptrend", "downtrend": "bearish downtrend", "sideways": "sideways range"}
|
||||
parts.append(f"Price is in a {trend_map.get(trend, trend)}.")
|
||||
|
||||
if earnings_warning and earnings_date:
|
||||
parts.append(
|
||||
f"⚠️ Earnings on {earnings_date} fall within this expiry — elevated risk. Consider a shorter expiry."
|
||||
)
|
||||
|
||||
horizon_map = {"0dte": "0DTE", "1dte": "1DTE", "weekly": "weekly", "monthly": "monthly"}
|
||||
parts.append(f"Horizon: {horizon_map.get(time_horizon, time_horizon)}.")
|
||||
|
||||
return " ".join(parts)
|
||||
274
backend/app/services/signal_engine.py
Normal file
274
backend/app/services/signal_engine.py
Normal file
@@ -0,0 +1,274 @@
|
||||
"""
|
||||
signal_engine.py — Compute all signals for a ticker.
|
||||
|
||||
Signals:
|
||||
- IV Rank (using rolling 30-day HV as proxy, since yfinance lacks historical IV)
|
||||
- SMA-50, SMA-200
|
||||
- Swing-based support / resistance (20-day window)
|
||||
- Trend direction
|
||||
- Signal strength score
|
||||
- Signal hash (for change detection)
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import math
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from app.services.market_data import get_price_history, get_earnings_date, get_current_price
|
||||
from app.models.schemas import SignalSnapshot
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ─── IV Rank ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def compute_iv_rank(df: pd.DataFrame) -> float:
|
||||
"""
|
||||
Compute IV Rank (0-100) using a 30-day rolling realized volatility
|
||||
as a proxy for implied volatility.
|
||||
|
||||
Returns the current HV's rank within the past 52-week HV range.
|
||||
Clamped to [0, 100].
|
||||
"""
|
||||
if df is None or len(df) < 32:
|
||||
return 50.0 # neutral fallback
|
||||
|
||||
closes = df["Close"].squeeze()
|
||||
log_returns = np.log(closes / closes.shift(1)).dropna()
|
||||
|
||||
# 30-day rolling std, annualized
|
||||
hv_series = log_returns.rolling(30).std() * math.sqrt(252) * 100 # as percentage
|
||||
hv_series = hv_series.dropna()
|
||||
|
||||
if len(hv_series) < 2:
|
||||
return 50.0
|
||||
|
||||
window = hv_series.iloc[-252:] # last 52 weeks
|
||||
hv_low = float(window.min())
|
||||
hv_high = float(window.max())
|
||||
current_hv = float(hv_series.iloc[-1])
|
||||
|
||||
if hv_high == hv_low:
|
||||
return 50.0
|
||||
|
||||
rank = (current_hv - hv_low) / (hv_high - hv_low) * 100
|
||||
return max(0.0, min(100.0, rank))
|
||||
|
||||
|
||||
# ─── Moving Averages ──────────────────────────────────────────────────────────
|
||||
|
||||
def compute_smas(df: pd.DataFrame) -> dict:
|
||||
"""Return {'sma_50': float, 'sma_200': float}. Uses NaN if insufficient data."""
|
||||
closes = df["Close"].squeeze()
|
||||
sma_50 = float(closes.rolling(50).mean().iloc[-1]) if len(closes) >= 50 else float("nan")
|
||||
sma_200 = float(closes.rolling(200).mean().iloc[-1]) if len(closes) >= 200 else float("nan")
|
||||
return {"sma_50": sma_50, "sma_200": sma_200}
|
||||
|
||||
|
||||
def compute_trend(current_price: float, sma_50: float, sma_200: float) -> str:
|
||||
"""
|
||||
uptrend — price > sma50 > sma200
|
||||
downtrend — price < sma50 < sma200
|
||||
sideways — otherwise
|
||||
"""
|
||||
if any(math.isnan(v) for v in [sma_50, sma_200]):
|
||||
return "sideways"
|
||||
if current_price > sma_50 > sma_200:
|
||||
return "uptrend"
|
||||
if current_price < sma_50 < sma_200:
|
||||
return "downtrend"
|
||||
return "sideways"
|
||||
|
||||
|
||||
# ─── Support / Resistance ─────────────────────────────────────────────────────
|
||||
|
||||
def compute_swing_levels(df: pd.DataFrame, lookback: int = 20, neighbors: int = 2) -> dict:
|
||||
"""
|
||||
Find swing highs (resistance) and swing lows (support) over the last
|
||||
`lookback` trading days using a `neighbors`-bar pivot rule.
|
||||
|
||||
Returns:
|
||||
{
|
||||
'nearest_support': float | None,
|
||||
'nearest_resistance': float | None,
|
||||
'support_levels': [float],
|
||||
'resistance_levels': [float],
|
||||
}
|
||||
"""
|
||||
recent = df.tail(lookback + neighbors * 2)
|
||||
highs = recent["High"].squeeze().values
|
||||
lows = recent["Low"].squeeze().values
|
||||
n = len(highs)
|
||||
|
||||
swing_highs = []
|
||||
swing_lows = []
|
||||
|
||||
for i in range(neighbors, n - neighbors):
|
||||
if all(highs[i] > highs[i - j] for j in range(1, neighbors + 1)) and \
|
||||
all(highs[i] > highs[i + j] for j in range(1, neighbors + 1)):
|
||||
swing_highs.append(float(highs[i]))
|
||||
if all(lows[i] < lows[i - j] for j in range(1, neighbors + 1)) and \
|
||||
all(lows[i] < lows[i + j] for j in range(1, neighbors + 1)):
|
||||
swing_lows.append(float(lows[i]))
|
||||
|
||||
def cluster(levels: list[float], pct: float = 0.005) -> list[float]:
|
||||
"""Merge levels within pct% of each other into their mean."""
|
||||
if not levels:
|
||||
return []
|
||||
sorted_levels = sorted(levels)
|
||||
clusters = [[sorted_levels[0]]]
|
||||
for lvl in sorted_levels[1:]:
|
||||
if abs(lvl - clusters[-1][-1]) / clusters[-1][-1] <= pct:
|
||||
clusters[-1].append(lvl)
|
||||
else:
|
||||
clusters.append([lvl])
|
||||
return [sum(c) / len(c) for c in clusters]
|
||||
|
||||
resistance_levels = cluster(swing_highs)
|
||||
support_levels = cluster(swing_lows)
|
||||
|
||||
# Get current price for context
|
||||
current_price = float(df["Close"].squeeze().iloc[-1])
|
||||
|
||||
nearest_resistance = min(
|
||||
(r for r in resistance_levels if r > current_price), default=None
|
||||
)
|
||||
nearest_support = max(
|
||||
(s for s in support_levels if s < current_price), default=None
|
||||
)
|
||||
|
||||
return {
|
||||
"nearest_support": nearest_support,
|
||||
"nearest_resistance": nearest_resistance,
|
||||
"support_levels": support_levels,
|
||||
"resistance_levels": resistance_levels,
|
||||
}
|
||||
|
||||
|
||||
# ─── Signal Strength Scoring ──────────────────────────────────────────────────
|
||||
|
||||
def compute_signal_strength(
|
||||
iv_rank: float,
|
||||
trend: str,
|
||||
strategy: str,
|
||||
nearest_support: Optional[float],
|
||||
nearest_resistance: Optional[float],
|
||||
recommended_strike: Optional[float],
|
||||
earnings_warning: bool,
|
||||
) -> str:
|
||||
"""
|
||||
Score the signal quality and return 'strong', 'moderate', or 'weak'.
|
||||
|
||||
Scoring:
|
||||
+2 iv_rank >= 50 (premium-rich environment)
|
||||
+1 iv_rank >= 30
|
||||
+1 trend aligned (uptrend for covered_call, not downtrend for csp)
|
||||
+1 sma alignment bonus (trend == uptrend for CC)
|
||||
+1 strike positioned well vs nearest level
|
||||
-2 earnings_warning (dangerous to hold through earnings)
|
||||
"""
|
||||
score = 0
|
||||
|
||||
if iv_rank >= 50:
|
||||
score += 2
|
||||
elif iv_rank >= 30:
|
||||
score += 1
|
||||
|
||||
if strategy == "covered_call":
|
||||
if trend == "uptrend":
|
||||
score += 2 # trend aligned (counts as +1 trend + +1 sma bonus)
|
||||
elif trend == "sideways":
|
||||
score += 1
|
||||
if nearest_resistance and recommended_strike and recommended_strike < nearest_resistance:
|
||||
score += 1
|
||||
elif strategy == "cash_secured_put":
|
||||
if trend != "downtrend":
|
||||
score += 2
|
||||
if trend == "uptrend":
|
||||
score += 1 # extra bonus for strong bullish trend on a bullish strategy
|
||||
if nearest_support and recommended_strike and recommended_strike > nearest_support:
|
||||
score += 1
|
||||
|
||||
if earnings_warning:
|
||||
score -= 2
|
||||
|
||||
if score >= 5:
|
||||
return "strong"
|
||||
elif score >= 3:
|
||||
return "moderate"
|
||||
else:
|
||||
return "weak"
|
||||
|
||||
|
||||
# ─── Signal Hash ──────────────────────────────────────────────────────────────
|
||||
|
||||
def compute_signal_hash(
|
||||
iv_rank: float,
|
||||
sma_50: float,
|
||||
sma_200: float,
|
||||
nearest_support: Optional[float],
|
||||
nearest_resistance: Optional[float],
|
||||
recommended_strike: Optional[float],
|
||||
recommended_expiration: Optional[date],
|
||||
earnings_warning: bool,
|
||||
) -> str:
|
||||
"""
|
||||
16-char deterministic hash of rounded signal inputs.
|
||||
Only changes when signals shift meaningfully — not on every tick.
|
||||
"""
|
||||
payload = {
|
||||
"ivr": round(iv_rank, 1),
|
||||
"sma50": round(sma_50, 2) if sma_50 and not math.isnan(sma_50) else None,
|
||||
"sma200": round(sma_200, 2) if sma_200 and not math.isnan(sma_200) else None,
|
||||
"support": round(nearest_support, 2) if nearest_support else None,
|
||||
"resistance": round(nearest_resistance, 2) if nearest_resistance else None,
|
||||
"strike": recommended_strike,
|
||||
"expiry": str(recommended_expiration) if recommended_expiration else None,
|
||||
"ew": earnings_warning,
|
||||
}
|
||||
raw = json.dumps(payload, sort_keys=True)
|
||||
return hashlib.sha256(raw.encode()).hexdigest()[:16]
|
||||
|
||||
|
||||
# ─── Full Signal Computation ──────────────────────────────────────────────────
|
||||
|
||||
def compute_signals(ticker: str) -> Optional[SignalSnapshot]:
|
||||
"""
|
||||
Compute and return a full SignalSnapshot for a ticker.
|
||||
Returns None if market data is unavailable.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
df = get_price_history(ticker)
|
||||
if df is None or df.empty:
|
||||
logger.warning(f"No price history for {ticker} — cannot compute signals")
|
||||
return None
|
||||
|
||||
current_price = get_current_price(ticker)
|
||||
if current_price is None:
|
||||
current_price = float(df["Close"].squeeze().iloc[-1])
|
||||
|
||||
iv_rank = compute_iv_rank(df)
|
||||
smas = compute_smas(df)
|
||||
swing = compute_swing_levels(df)
|
||||
trend = compute_trend(current_price, smas["sma_50"], smas["sma_200"])
|
||||
earnings_date = get_earnings_date(ticker)
|
||||
|
||||
return SignalSnapshot(
|
||||
ticker=ticker,
|
||||
current_price=current_price,
|
||||
iv_rank=iv_rank,
|
||||
sma_50=smas["sma_50"] if not math.isnan(smas["sma_50"]) else 0.0,
|
||||
sma_200=smas["sma_200"] if not math.isnan(smas["sma_200"]) else 0.0,
|
||||
nearest_support=swing["nearest_support"],
|
||||
nearest_resistance=swing["nearest_resistance"],
|
||||
trend=trend,
|
||||
earnings_date=earnings_date,
|
||||
computed_at=datetime.utcnow(),
|
||||
)
|
||||
0
backend/app/utils/__init__.py
Normal file
0
backend/app/utils/__init__.py
Normal file
102
backend/app/utils/date_helpers.py
Normal file
102
backend/app/utils/date_helpers.py
Normal file
@@ -0,0 +1,102 @@
|
||||
from datetime import date, timedelta
|
||||
import pandas_market_calendars as _mcal # optional dep — fall back gracefully
|
||||
from typing import Optional
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_NYSE_CAL = None
|
||||
|
||||
|
||||
def _get_nyse():
|
||||
global _NYSE_CAL
|
||||
if _NYSE_CAL is None:
|
||||
try:
|
||||
import pandas_market_calendars as mcal
|
||||
_NYSE_CAL = mcal.get_calendar("NYSE")
|
||||
except ImportError:
|
||||
pass
|
||||
return _NYSE_CAL
|
||||
|
||||
|
||||
def is_trading_day(d: date) -> bool:
|
||||
"""Return True if d is a NYSE trading day."""
|
||||
cal = _get_nyse()
|
||||
if cal is None:
|
||||
return d.weekday() < 5
|
||||
import pandas as pd
|
||||
schedule = cal.schedule(start_date=str(d), end_date=str(d))
|
||||
return not schedule.empty
|
||||
|
||||
|
||||
def next_trading_day(d: date) -> date:
|
||||
"""Return the next trading day after d."""
|
||||
candidate = d + timedelta(days=1)
|
||||
while not is_trading_day(candidate):
|
||||
candidate += timedelta(days=1)
|
||||
return candidate
|
||||
|
||||
|
||||
def next_friday(from_date: Optional[date] = None) -> date:
|
||||
"""Return the next Friday on or after from_date."""
|
||||
d = from_date or date.today()
|
||||
days_ahead = 4 - d.weekday() # Friday is weekday 4
|
||||
if days_ahead < 0:
|
||||
days_ahead += 7
|
||||
elif days_ahead == 0:
|
||||
pass # today is Friday
|
||||
return d + timedelta(days=days_ahead)
|
||||
|
||||
|
||||
def nearest_monthly_expiry(from_date: Optional[date] = None, target_dte: int = 30) -> date:
|
||||
"""Return the standard monthly expiry (third Friday) closest to target_dte from from_date."""
|
||||
d = from_date or date.today()
|
||||
target = d + timedelta(days=target_dte)
|
||||
|
||||
# Find third Friday of that month
|
||||
year, month = target.year, target.month
|
||||
third_friday = _third_friday(year, month)
|
||||
|
||||
# If the third Friday of target month is already past relative to today, advance one month
|
||||
if third_friday <= d:
|
||||
if month == 12:
|
||||
year += 1
|
||||
month = 1
|
||||
else:
|
||||
month += 1
|
||||
third_friday = _third_friday(year, month)
|
||||
|
||||
return third_friday
|
||||
|
||||
|
||||
def _third_friday(year: int, month: int) -> date:
|
||||
"""Return the third Friday of the given month."""
|
||||
first = date(year, month, 1)
|
||||
# Find first Friday
|
||||
days_to_friday = (4 - first.weekday()) % 7
|
||||
first_friday = first + timedelta(days=days_to_friday)
|
||||
return first_friday + timedelta(weeks=2)
|
||||
|
||||
|
||||
def market_is_open_now() -> bool:
|
||||
"""Best-effort check: is the US market currently open (9:30–16:00 ET)?"""
|
||||
from datetime import datetime
|
||||
import zoneinfo
|
||||
now_et = datetime.now(tz=zoneinfo.ZoneInfo("America/New_York"))
|
||||
if not is_trading_day(now_et.date()):
|
||||
return False
|
||||
open_time = now_et.replace(hour=9, minute=30, second=0, microsecond=0)
|
||||
close_time = now_et.replace(hour=16, minute=0, second=0, microsecond=0)
|
||||
return open_time <= now_et <= close_time
|
||||
|
||||
|
||||
def within_dte_window_for_0dte() -> bool:
|
||||
"""True if it's a trading day and between 9:30 AM and 2:00 PM ET."""
|
||||
from datetime import datetime
|
||||
import zoneinfo
|
||||
now_et = datetime.now(tz=zoneinfo.ZoneInfo("America/New_York"))
|
||||
if not is_trading_day(now_et.date()):
|
||||
return False
|
||||
open_time = now_et.replace(hour=9, minute=30, second=0, microsecond=0)
|
||||
cutoff = now_et.replace(hour=14, minute=0, second=0, microsecond=0)
|
||||
return open_time <= now_et <= cutoff
|
||||
10
backend/migrations/init_db.py
Normal file
10
backend/migrations/init_db.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Run this once to initialize the SQLite database schema."""
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
from app.database import init_db
|
||||
|
||||
if __name__ == "__main__":
|
||||
init_db()
|
||||
print("Database initialized.")
|
||||
16
backend/requirements.txt
Normal file
16
backend/requirements.txt
Normal file
@@ -0,0 +1,16 @@
|
||||
fastapi==0.115.6
|
||||
uvicorn[standard]==0.34.0
|
||||
sqlalchemy==2.0.36
|
||||
yfinance==0.2.50
|
||||
pandas==2.2.3
|
||||
numpy==2.2.1
|
||||
apscheduler==3.10.4
|
||||
httpx==0.28.1
|
||||
PyJWT==2.10.1
|
||||
cryptography==44.0.0
|
||||
python-dotenv==1.0.1
|
||||
pydantic==2.10.4
|
||||
pydantic-settings==2.7.0
|
||||
pytest==8.3.4
|
||||
pytest-asyncio==0.25.0
|
||||
httpx[test]
|
||||
0
backend/tests/__init__.py
Normal file
0
backend/tests/__init__.py
Normal file
4
backend/tests/conftest.py
Normal file
4
backend/tests/conftest.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Pytest configuration — sets asyncio mode."""
|
||||
import pytest
|
||||
|
||||
pytest_plugins = ["pytest_asyncio"]
|
||||
220
backend/tests/test_api.py
Normal file
220
backend/tests/test_api.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""
|
||||
Integration tests for the FastAPI API layer.
|
||||
Uses TestClient with an in-memory SQLite DB — no live market data.
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from app.database import Base, get_db
|
||||
from app.main import app
|
||||
|
||||
# ─── Test DB setup ─────────────────────────────────────────────────────────────
|
||||
|
||||
TEST_DB_URL = "sqlite://" # in-memory
|
||||
|
||||
test_engine = create_engine(TEST_DB_URL, connect_args={"check_same_thread": False})
|
||||
TestSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=test_engine)
|
||||
|
||||
|
||||
def override_get_db():
|
||||
db = TestSessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_db():
|
||||
Base.metadata.create_all(bind=test_engine)
|
||||
yield
|
||||
Base.metadata.drop_all(bind=test_engine)
|
||||
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
# Disable scheduler during tests
|
||||
with patch("app.main.start_scheduler"), patch("app.main.stop_scheduler"):
|
||||
client = TestClient(app, raise_server_exceptions=True)
|
||||
|
||||
FAKE_TOKEN = "abc123device0000000000000000000000000000000000000000000000000000"
|
||||
|
||||
|
||||
# ─── Device registration ───────────────────────────────────────────────────────
|
||||
|
||||
def test_register_device():
|
||||
resp = client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN, "device_name": "Test iPhone"})
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert data["apns_token"] == FAKE_TOKEN
|
||||
assert "id" in data
|
||||
|
||||
|
||||
def test_register_device_idempotent():
|
||||
client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN})
|
||||
resp = client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN})
|
||||
assert resp.status_code == 200
|
||||
|
||||
|
||||
# ─── Portfolio ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.fixture
|
||||
def registered_device():
|
||||
client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN})
|
||||
return FAKE_TOKEN
|
||||
|
||||
|
||||
def test_add_portfolio(registered_device):
|
||||
resp = client.post(
|
||||
"/api/v1/portfolio",
|
||||
json=[{"ticker": "AAPL", "shares": 100}, {"ticker": "MSFT", "shares": 200}],
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
tickers = [p["ticker"] for p in resp.json()]
|
||||
assert "AAPL" in tickers
|
||||
assert "MSFT" in tickers
|
||||
|
||||
|
||||
def test_get_portfolio_empty(registered_device):
|
||||
resp = client.get("/api/v1/portfolio", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json() == []
|
||||
|
||||
|
||||
def test_get_portfolio_after_add(registered_device):
|
||||
client.post(
|
||||
"/api/v1/portfolio",
|
||||
json=[{"ticker": "NVDA", "shares": 50}],
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
resp = client.get("/api/v1/portfolio", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()[0]["ticker"] == "NVDA"
|
||||
|
||||
|
||||
def test_delete_ticker(registered_device):
|
||||
client.post(
|
||||
"/api/v1/portfolio",
|
||||
json=[{"ticker": "AMD", "shares": 100}],
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
resp = client.delete("/api/v1/portfolio/AMD", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 204
|
||||
remaining = client.get("/api/v1/portfolio", headers={"X-Device-Token": registered_device}).json()
|
||||
assert all(p["ticker"] != "AMD" for p in remaining)
|
||||
|
||||
|
||||
def test_portfolio_unregistered_device():
|
||||
resp = client.get("/api/v1/portfolio", headers={"X-Device-Token": "nonexistent_token"})
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
# ─── Option Positions ──────────────────────────────────────────────────────────
|
||||
|
||||
def test_log_position(registered_device):
|
||||
expiry = str(date.today() + timedelta(days=14))
|
||||
resp = client.post(
|
||||
"/api/v1/positions",
|
||||
json={
|
||||
"ticker": "AAPL",
|
||||
"strategy": "covered_call",
|
||||
"strike": 195.0,
|
||||
"expiration": expiry,
|
||||
"premium_received": 2.50,
|
||||
"contracts": 1,
|
||||
},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
assert resp.status_code == 201
|
||||
data = resp.json()
|
||||
assert data["ticker"] == "AAPL"
|
||||
assert data["status"] == "open"
|
||||
assert data["id"] is not None
|
||||
|
||||
|
||||
def test_close_position(registered_device):
|
||||
expiry = str(date.today() + timedelta(days=14))
|
||||
pos = client.post(
|
||||
"/api/v1/positions",
|
||||
json={
|
||||
"ticker": "AAPL",
|
||||
"strategy": "covered_call",
|
||||
"strike": 195.0,
|
||||
"expiration": expiry,
|
||||
"premium_received": 2.50,
|
||||
},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
).json()
|
||||
|
||||
resp = client.patch(
|
||||
f"/api/v1/positions/{pos['id']}",
|
||||
json={"status": "closed", "close_reason": "bought_back"},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["status"] == "closed"
|
||||
|
||||
|
||||
def test_get_open_positions_filter(registered_device):
|
||||
expiry = str(date.today() + timedelta(days=14))
|
||||
client.post(
|
||||
"/api/v1/positions",
|
||||
json={"ticker": "TSLA", "strategy": "cash_secured_put", "strike": 200.0, "expiration": expiry, "premium_received": 3.0},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
resp = client.get("/api/v1/positions?status=open", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert len(resp.json()) >= 1
|
||||
assert all(p["status"] == "open" for p in resp.json())
|
||||
|
||||
|
||||
# ─── Alerts ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_get_alerts_empty(registered_device):
|
||||
resp = client.get("/api/v1/alerts", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json() == []
|
||||
|
||||
|
||||
def test_acknowledge_alert(registered_device):
|
||||
from app.models.db_models import Alert
|
||||
from datetime import datetime
|
||||
|
||||
db = TestSessionLocal()
|
||||
device_id = client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN}).json()["id"]
|
||||
alert = Alert(
|
||||
device_id=device_id,
|
||||
ticker="AAPL",
|
||||
alert_type="close_early",
|
||||
message="Test alert",
|
||||
sent_at=datetime.utcnow(),
|
||||
acknowledged=False,
|
||||
)
|
||||
db.add(alert)
|
||||
db.commit()
|
||||
db.refresh(alert)
|
||||
alert_id = alert.id
|
||||
db.close()
|
||||
|
||||
resp = client.patch(
|
||||
f"/api/v1/alerts/{alert_id}/acknowledge",
|
||||
headers={"X-Device-Token": FAKE_TOKEN},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["acknowledged"] is True
|
||||
|
||||
|
||||
# ─── Health ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_health():
|
||||
with patch("app.routers"), patch("app.services.position_monitor.last_run", None):
|
||||
resp = client.get("/api/v1/health")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["status"] == "ok"
|
||||
267
backend/tests/test_signal_engine.py
Normal file
267
backend/tests/test_signal_engine.py
Normal file
@@ -0,0 +1,267 @@
|
||||
"""Unit tests for signal_engine.py — all run without live market data."""
|
||||
|
||||
import math
|
||||
from datetime import date, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from app.services.signal_engine import (
|
||||
compute_iv_rank,
|
||||
compute_smas,
|
||||
compute_swing_levels,
|
||||
compute_trend,
|
||||
compute_signal_strength,
|
||||
compute_signal_hash,
|
||||
)
|
||||
|
||||
|
||||
# ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def _make_price_df(closes: list[float]) -> pd.DataFrame:
|
||||
"""Create a minimal OHLCV DataFrame from a list of close prices."""
|
||||
n = len(closes)
|
||||
dates = pd.date_range(end="2024-01-31", periods=n, freq="B")
|
||||
arr = np.array(closes)
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"Open": arr * 0.999,
|
||||
"High": arr * 1.005,
|
||||
"Low": arr * 0.995,
|
||||
"Close": arr,
|
||||
"Volume": np.ones(n) * 1_000_000,
|
||||
},
|
||||
index=dates,
|
||||
)
|
||||
return df
|
||||
|
||||
|
||||
def _trending_prices(start: float, end: float, n: int = 252) -> list[float]:
|
||||
return list(np.linspace(start, end, n))
|
||||
|
||||
|
||||
# ─── IV Rank ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_iv_rank_high_returns_above_50():
|
||||
"""When current volatility is near the 52-week high, IVR should be > 50."""
|
||||
# Create prices that spike at the end (high recent volatility)
|
||||
stable = [100.0] * 220
|
||||
volatile = list(np.linspace(100, 130, 16)) + list(np.linspace(130, 85, 16))
|
||||
closes = stable + volatile
|
||||
df = _make_price_df(closes)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert ivr > 50, f"Expected ivr > 50, got {ivr:.1f}"
|
||||
|
||||
|
||||
def test_iv_rank_low_returns_below_50():
|
||||
"""Flat price series → low historical volatility → IVR near 0."""
|
||||
closes = [100.0] * 252
|
||||
df = _make_price_df(closes)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert ivr <= 50, f"Expected ivr <= 50, got {ivr:.1f}"
|
||||
|
||||
|
||||
def test_iv_rank_insufficient_data_returns_neutral():
|
||||
"""Too few data points should return the 50.0 neutral fallback."""
|
||||
df = _make_price_df([100.0] * 20)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert ivr == 50.0
|
||||
|
||||
|
||||
def test_iv_rank_clamped_to_0_100():
|
||||
closes = [100.0] * 252
|
||||
df = _make_price_df(closes)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert 0.0 <= ivr <= 100.0
|
||||
|
||||
|
||||
# ─── SMAs ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_sma_50_correct_value():
|
||||
closes = list(range(1, 253)) # 1..252
|
||||
df = _make_price_df(closes)
|
||||
smas = compute_smas(df)
|
||||
expected_sma50 = sum(range(203, 253)) / 50 # last 50 values: 203-252
|
||||
assert abs(smas["sma_50"] - expected_sma50) < 0.01
|
||||
|
||||
|
||||
def test_sma_200_nan_when_insufficient_data():
|
||||
df = _make_price_df([100.0] * 100)
|
||||
smas = compute_smas(df)
|
||||
assert math.isnan(smas["sma_200"])
|
||||
|
||||
|
||||
def test_sma_50_nan_when_insufficient_data():
|
||||
df = _make_price_df([100.0] * 30)
|
||||
smas = compute_smas(df)
|
||||
assert math.isnan(smas["sma_50"])
|
||||
|
||||
|
||||
# ─── Trend ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_trend_uptrend():
|
||||
assert compute_trend(110.0, 105.0, 100.0) == "uptrend"
|
||||
|
||||
|
||||
def test_trend_downtrend():
|
||||
assert compute_trend(90.0, 95.0, 100.0) == "downtrend"
|
||||
|
||||
|
||||
def test_trend_sideways_mixed():
|
||||
# price > sma50 but sma50 < sma200
|
||||
assert compute_trend(106.0, 105.0, 107.0) == "sideways"
|
||||
|
||||
|
||||
def test_trend_sideways_with_nan_smas():
|
||||
assert compute_trend(100.0, float("nan"), float("nan")) == "sideways"
|
||||
|
||||
|
||||
# ─── Support / Resistance ─────────────────────────────────────────────────────
|
||||
|
||||
def test_swing_levels_detects_swing_high():
|
||||
"""A clear peak in the middle of the window should be detected as resistance."""
|
||||
closes = [100.0] * 25
|
||||
highs = [100.0] * 25
|
||||
lows = [98.0] * 25
|
||||
# Create a swing high at index 12
|
||||
highs[12] = 108.0
|
||||
closes[12] = 107.0
|
||||
|
||||
dates = pd.date_range(end="2024-01-31", periods=25, freq="B")
|
||||
df = pd.DataFrame({"Open": closes, "High": highs, "Low": lows, "Close": closes, "Volume": [1e6] * 25}, index=dates)
|
||||
|
||||
result = compute_swing_levels(df, lookback=20)
|
||||
assert result["nearest_resistance"] is not None
|
||||
assert abs(result["nearest_resistance"] - 108.0) < 1.0
|
||||
|
||||
|
||||
def test_swing_levels_detects_swing_low():
|
||||
"""A clear trough should be detected as support."""
|
||||
closes = [100.0] * 25
|
||||
highs = [102.0] * 25
|
||||
lows = [98.0] * 25
|
||||
lows[12] = 90.0
|
||||
closes[12] = 91.0
|
||||
|
||||
dates = pd.date_range(end="2024-01-31", periods=25, freq="B")
|
||||
df = pd.DataFrame({"Open": closes, "High": highs, "Low": lows, "Close": closes, "Volume": [1e6] * 25}, index=dates)
|
||||
|
||||
result = compute_swing_levels(df, lookback=20)
|
||||
assert result["nearest_support"] is not None
|
||||
assert result["nearest_support"] < 100.0
|
||||
|
||||
|
||||
def test_swing_levels_flat_no_crash():
|
||||
"""Flat price should return None for support/resistance without crashing."""
|
||||
df = _make_price_df([100.0] * 30)
|
||||
result = compute_swing_levels(df, lookback=20)
|
||||
# No assertion on values — just shouldn't raise
|
||||
assert "nearest_support" in result
|
||||
assert "nearest_resistance" in result
|
||||
|
||||
|
||||
# ─── Signal Strength ──────────────────────────────────────────────────────────
|
||||
|
||||
def test_signal_strength_strong_covered_call():
|
||||
strength = compute_signal_strength(
|
||||
iv_rank=70.0,
|
||||
trend="uptrend",
|
||||
strategy="covered_call",
|
||||
nearest_support=95.0,
|
||||
nearest_resistance=110.0,
|
||||
recommended_strike=107.0,
|
||||
earnings_warning=False,
|
||||
)
|
||||
assert strength == "strong"
|
||||
|
||||
|
||||
def test_signal_strength_earnings_warning_downgrades():
|
||||
strength_no_ew = compute_signal_strength(
|
||||
iv_rank=70.0, trend="uptrend", strategy="covered_call",
|
||||
nearest_support=95.0, nearest_resistance=110.0,
|
||||
recommended_strike=107.0, earnings_warning=False,
|
||||
)
|
||||
strength_ew = compute_signal_strength(
|
||||
iv_rank=70.0, trend="uptrend", strategy="covered_call",
|
||||
nearest_support=95.0, nearest_resistance=110.0,
|
||||
recommended_strike=107.0, earnings_warning=True,
|
||||
)
|
||||
assert strength_ew in ("weak", "moderate")
|
||||
assert strength_no_ew == "strong"
|
||||
|
||||
|
||||
def test_signal_strength_low_ivr_gives_weak():
|
||||
strength = compute_signal_strength(
|
||||
iv_rank=10.0,
|
||||
trend="downtrend",
|
||||
strategy="covered_call",
|
||||
nearest_support=None,
|
||||
nearest_resistance=None,
|
||||
recommended_strike=None,
|
||||
earnings_warning=True,
|
||||
)
|
||||
assert strength == "weak"
|
||||
|
||||
|
||||
def test_signal_strength_csp_uptrend_bonus():
|
||||
strength = compute_signal_strength(
|
||||
iv_rank=55.0,
|
||||
trend="uptrend",
|
||||
strategy="cash_secured_put",
|
||||
nearest_support=95.0,
|
||||
nearest_resistance=110.0,
|
||||
recommended_strike=97.0,
|
||||
earnings_warning=False,
|
||||
)
|
||||
assert strength in ("strong", "moderate")
|
||||
|
||||
|
||||
# ─── Signal Hash ──────────────────────────────────────────────────────────────
|
||||
|
||||
def test_signal_hash_stability():
|
||||
"""Same inputs must always produce the same hash."""
|
||||
args = dict(
|
||||
iv_rank=45.5,
|
||||
sma_50=150.0,
|
||||
sma_200=145.0,
|
||||
nearest_support=148.0,
|
||||
nearest_resistance=155.0,
|
||||
recommended_strike=153.0,
|
||||
recommended_expiration=date(2024, 3, 15),
|
||||
earnings_warning=False,
|
||||
)
|
||||
h1 = compute_signal_hash(**args)
|
||||
h2 = compute_signal_hash(**args)
|
||||
assert h1 == h2
|
||||
assert len(h1) == 16
|
||||
|
||||
|
||||
def test_signal_hash_changes_on_strike_change():
|
||||
base = dict(
|
||||
iv_rank=45.5, sma_50=150.0, sma_200=145.0,
|
||||
nearest_support=148.0, nearest_resistance=155.0,
|
||||
recommended_strike=153.0, recommended_expiration=date(2024, 3, 15),
|
||||
earnings_warning=False,
|
||||
)
|
||||
h1 = compute_signal_hash(**base)
|
||||
base["recommended_strike"] = 155.0
|
||||
h2 = compute_signal_hash(**base)
|
||||
assert h1 != h2
|
||||
|
||||
|
||||
def test_signal_hash_stable_on_noise():
|
||||
"""Tiny IV rank change (< 0.1) should produce same hash due to rounding."""
|
||||
base = dict(
|
||||
iv_rank=45.500,
|
||||
sma_50=150.01, sma_200=145.009,
|
||||
nearest_support=148.001, nearest_resistance=155.002,
|
||||
recommended_strike=153.0, recommended_expiration=date(2024, 3, 15),
|
||||
earnings_warning=False,
|
||||
)
|
||||
tweaked = dict(base)
|
||||
tweaked["iv_rank"] = 45.503 # < 0.1 change → rounds to same 45.5
|
||||
h1 = compute_signal_hash(**base)
|
||||
h2 = compute_signal_hash(**tweaked)
|
||||
assert h1 == h2
|
||||
Reference in New Issue
Block a user