Initial implementation of Options Sidekick
Full-stack iOS options trading assistant: - Python FastAPI backend with SQLite, APScheduler (15-min position monitor), APNs push notifications, and yfinance market data integration - Signal engine: IV Rank (rolling HV proxy), SMA-50/200, swing-based support/resistance, earnings detection, signal strength scoring and noise-resistant SHA hash for change detection - Recommendation engine: covered call and cash-secured put strike/expiry selection across 0DTE, 1DTE, weekly, and monthly horizons - REST API: /devices, /portfolio, /recommendations, /positions, /signals, /alerts - iOS SwiftUI app (iOS 17+): dashboard, recommendations, trades, portfolio, and alerts tabs with push notification deep-linking - Unit + integration tests for signal engine and API layer Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
0
backend/tests/__init__.py
Normal file
0
backend/tests/__init__.py
Normal file
4
backend/tests/conftest.py
Normal file
4
backend/tests/conftest.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Pytest configuration — sets asyncio mode."""
|
||||
import pytest
|
||||
|
||||
pytest_plugins = ["pytest_asyncio"]
|
||||
220
backend/tests/test_api.py
Normal file
220
backend/tests/test_api.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""
|
||||
Integration tests for the FastAPI API layer.
|
||||
Uses TestClient with an in-memory SQLite DB — no live market data.
|
||||
"""
|
||||
|
||||
from datetime import date, timedelta
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from app.database import Base, get_db
|
||||
from app.main import app
|
||||
|
||||
# ─── Test DB setup ─────────────────────────────────────────────────────────────
|
||||
|
||||
TEST_DB_URL = "sqlite://" # in-memory
|
||||
|
||||
test_engine = create_engine(TEST_DB_URL, connect_args={"check_same_thread": False})
|
||||
TestSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=test_engine)
|
||||
|
||||
|
||||
def override_get_db():
|
||||
db = TestSessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_db():
|
||||
Base.metadata.create_all(bind=test_engine)
|
||||
yield
|
||||
Base.metadata.drop_all(bind=test_engine)
|
||||
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
# Disable scheduler during tests
|
||||
with patch("app.main.start_scheduler"), patch("app.main.stop_scheduler"):
|
||||
client = TestClient(app, raise_server_exceptions=True)
|
||||
|
||||
FAKE_TOKEN = "abc123device0000000000000000000000000000000000000000000000000000"
|
||||
|
||||
|
||||
# ─── Device registration ───────────────────────────────────────────────────────
|
||||
|
||||
def test_register_device():
|
||||
resp = client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN, "device_name": "Test iPhone"})
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert data["apns_token"] == FAKE_TOKEN
|
||||
assert "id" in data
|
||||
|
||||
|
||||
def test_register_device_idempotent():
|
||||
client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN})
|
||||
resp = client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN})
|
||||
assert resp.status_code == 200
|
||||
|
||||
|
||||
# ─── Portfolio ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.fixture
|
||||
def registered_device():
|
||||
client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN})
|
||||
return FAKE_TOKEN
|
||||
|
||||
|
||||
def test_add_portfolio(registered_device):
|
||||
resp = client.post(
|
||||
"/api/v1/portfolio",
|
||||
json=[{"ticker": "AAPL", "shares": 100}, {"ticker": "MSFT", "shares": 200}],
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
tickers = [p["ticker"] for p in resp.json()]
|
||||
assert "AAPL" in tickers
|
||||
assert "MSFT" in tickers
|
||||
|
||||
|
||||
def test_get_portfolio_empty(registered_device):
|
||||
resp = client.get("/api/v1/portfolio", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json() == []
|
||||
|
||||
|
||||
def test_get_portfolio_after_add(registered_device):
|
||||
client.post(
|
||||
"/api/v1/portfolio",
|
||||
json=[{"ticker": "NVDA", "shares": 50}],
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
resp = client.get("/api/v1/portfolio", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()[0]["ticker"] == "NVDA"
|
||||
|
||||
|
||||
def test_delete_ticker(registered_device):
|
||||
client.post(
|
||||
"/api/v1/portfolio",
|
||||
json=[{"ticker": "AMD", "shares": 100}],
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
resp = client.delete("/api/v1/portfolio/AMD", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 204
|
||||
remaining = client.get("/api/v1/portfolio", headers={"X-Device-Token": registered_device}).json()
|
||||
assert all(p["ticker"] != "AMD" for p in remaining)
|
||||
|
||||
|
||||
def test_portfolio_unregistered_device():
|
||||
resp = client.get("/api/v1/portfolio", headers={"X-Device-Token": "nonexistent_token"})
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
# ─── Option Positions ──────────────────────────────────────────────────────────
|
||||
|
||||
def test_log_position(registered_device):
|
||||
expiry = str(date.today() + timedelta(days=14))
|
||||
resp = client.post(
|
||||
"/api/v1/positions",
|
||||
json={
|
||||
"ticker": "AAPL",
|
||||
"strategy": "covered_call",
|
||||
"strike": 195.0,
|
||||
"expiration": expiry,
|
||||
"premium_received": 2.50,
|
||||
"contracts": 1,
|
||||
},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
assert resp.status_code == 201
|
||||
data = resp.json()
|
||||
assert data["ticker"] == "AAPL"
|
||||
assert data["status"] == "open"
|
||||
assert data["id"] is not None
|
||||
|
||||
|
||||
def test_close_position(registered_device):
|
||||
expiry = str(date.today() + timedelta(days=14))
|
||||
pos = client.post(
|
||||
"/api/v1/positions",
|
||||
json={
|
||||
"ticker": "AAPL",
|
||||
"strategy": "covered_call",
|
||||
"strike": 195.0,
|
||||
"expiration": expiry,
|
||||
"premium_received": 2.50,
|
||||
},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
).json()
|
||||
|
||||
resp = client.patch(
|
||||
f"/api/v1/positions/{pos['id']}",
|
||||
json={"status": "closed", "close_reason": "bought_back"},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["status"] == "closed"
|
||||
|
||||
|
||||
def test_get_open_positions_filter(registered_device):
|
||||
expiry = str(date.today() + timedelta(days=14))
|
||||
client.post(
|
||||
"/api/v1/positions",
|
||||
json={"ticker": "TSLA", "strategy": "cash_secured_put", "strike": 200.0, "expiration": expiry, "premium_received": 3.0},
|
||||
headers={"X-Device-Token": registered_device},
|
||||
)
|
||||
resp = client.get("/api/v1/positions?status=open", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert len(resp.json()) >= 1
|
||||
assert all(p["status"] == "open" for p in resp.json())
|
||||
|
||||
|
||||
# ─── Alerts ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_get_alerts_empty(registered_device):
|
||||
resp = client.get("/api/v1/alerts", headers={"X-Device-Token": registered_device})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json() == []
|
||||
|
||||
|
||||
def test_acknowledge_alert(registered_device):
|
||||
from app.models.db_models import Alert
|
||||
from datetime import datetime
|
||||
|
||||
db = TestSessionLocal()
|
||||
device_id = client.post("/api/v1/devices/register", json={"apns_token": FAKE_TOKEN}).json()["id"]
|
||||
alert = Alert(
|
||||
device_id=device_id,
|
||||
ticker="AAPL",
|
||||
alert_type="close_early",
|
||||
message="Test alert",
|
||||
sent_at=datetime.utcnow(),
|
||||
acknowledged=False,
|
||||
)
|
||||
db.add(alert)
|
||||
db.commit()
|
||||
db.refresh(alert)
|
||||
alert_id = alert.id
|
||||
db.close()
|
||||
|
||||
resp = client.patch(
|
||||
f"/api/v1/alerts/{alert_id}/acknowledge",
|
||||
headers={"X-Device-Token": FAKE_TOKEN},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["acknowledged"] is True
|
||||
|
||||
|
||||
# ─── Health ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_health():
|
||||
with patch("app.routers"), patch("app.services.position_monitor.last_run", None):
|
||||
resp = client.get("/api/v1/health")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["status"] == "ok"
|
||||
267
backend/tests/test_signal_engine.py
Normal file
267
backend/tests/test_signal_engine.py
Normal file
@@ -0,0 +1,267 @@
|
||||
"""Unit tests for signal_engine.py — all run without live market data."""
|
||||
|
||||
import math
|
||||
from datetime import date, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from app.services.signal_engine import (
|
||||
compute_iv_rank,
|
||||
compute_smas,
|
||||
compute_swing_levels,
|
||||
compute_trend,
|
||||
compute_signal_strength,
|
||||
compute_signal_hash,
|
||||
)
|
||||
|
||||
|
||||
# ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def _make_price_df(closes: list[float]) -> pd.DataFrame:
|
||||
"""Create a minimal OHLCV DataFrame from a list of close prices."""
|
||||
n = len(closes)
|
||||
dates = pd.date_range(end="2024-01-31", periods=n, freq="B")
|
||||
arr = np.array(closes)
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"Open": arr * 0.999,
|
||||
"High": arr * 1.005,
|
||||
"Low": arr * 0.995,
|
||||
"Close": arr,
|
||||
"Volume": np.ones(n) * 1_000_000,
|
||||
},
|
||||
index=dates,
|
||||
)
|
||||
return df
|
||||
|
||||
|
||||
def _trending_prices(start: float, end: float, n: int = 252) -> list[float]:
|
||||
return list(np.linspace(start, end, n))
|
||||
|
||||
|
||||
# ─── IV Rank ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_iv_rank_high_returns_above_50():
|
||||
"""When current volatility is near the 52-week high, IVR should be > 50."""
|
||||
# Create prices that spike at the end (high recent volatility)
|
||||
stable = [100.0] * 220
|
||||
volatile = list(np.linspace(100, 130, 16)) + list(np.linspace(130, 85, 16))
|
||||
closes = stable + volatile
|
||||
df = _make_price_df(closes)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert ivr > 50, f"Expected ivr > 50, got {ivr:.1f}"
|
||||
|
||||
|
||||
def test_iv_rank_low_returns_below_50():
|
||||
"""Flat price series → low historical volatility → IVR near 0."""
|
||||
closes = [100.0] * 252
|
||||
df = _make_price_df(closes)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert ivr <= 50, f"Expected ivr <= 50, got {ivr:.1f}"
|
||||
|
||||
|
||||
def test_iv_rank_insufficient_data_returns_neutral():
|
||||
"""Too few data points should return the 50.0 neutral fallback."""
|
||||
df = _make_price_df([100.0] * 20)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert ivr == 50.0
|
||||
|
||||
|
||||
def test_iv_rank_clamped_to_0_100():
|
||||
closes = [100.0] * 252
|
||||
df = _make_price_df(closes)
|
||||
ivr = compute_iv_rank(df)
|
||||
assert 0.0 <= ivr <= 100.0
|
||||
|
||||
|
||||
# ─── SMAs ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_sma_50_correct_value():
|
||||
closes = list(range(1, 253)) # 1..252
|
||||
df = _make_price_df(closes)
|
||||
smas = compute_smas(df)
|
||||
expected_sma50 = sum(range(203, 253)) / 50 # last 50 values: 203-252
|
||||
assert abs(smas["sma_50"] - expected_sma50) < 0.01
|
||||
|
||||
|
||||
def test_sma_200_nan_when_insufficient_data():
|
||||
df = _make_price_df([100.0] * 100)
|
||||
smas = compute_smas(df)
|
||||
assert math.isnan(smas["sma_200"])
|
||||
|
||||
|
||||
def test_sma_50_nan_when_insufficient_data():
|
||||
df = _make_price_df([100.0] * 30)
|
||||
smas = compute_smas(df)
|
||||
assert math.isnan(smas["sma_50"])
|
||||
|
||||
|
||||
# ─── Trend ────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_trend_uptrend():
|
||||
assert compute_trend(110.0, 105.0, 100.0) == "uptrend"
|
||||
|
||||
|
||||
def test_trend_downtrend():
|
||||
assert compute_trend(90.0, 95.0, 100.0) == "downtrend"
|
||||
|
||||
|
||||
def test_trend_sideways_mixed():
|
||||
# price > sma50 but sma50 < sma200
|
||||
assert compute_trend(106.0, 105.0, 107.0) == "sideways"
|
||||
|
||||
|
||||
def test_trend_sideways_with_nan_smas():
|
||||
assert compute_trend(100.0, float("nan"), float("nan")) == "sideways"
|
||||
|
||||
|
||||
# ─── Support / Resistance ─────────────────────────────────────────────────────
|
||||
|
||||
def test_swing_levels_detects_swing_high():
|
||||
"""A clear peak in the middle of the window should be detected as resistance."""
|
||||
closes = [100.0] * 25
|
||||
highs = [100.0] * 25
|
||||
lows = [98.0] * 25
|
||||
# Create a swing high at index 12
|
||||
highs[12] = 108.0
|
||||
closes[12] = 107.0
|
||||
|
||||
dates = pd.date_range(end="2024-01-31", periods=25, freq="B")
|
||||
df = pd.DataFrame({"Open": closes, "High": highs, "Low": lows, "Close": closes, "Volume": [1e6] * 25}, index=dates)
|
||||
|
||||
result = compute_swing_levels(df, lookback=20)
|
||||
assert result["nearest_resistance"] is not None
|
||||
assert abs(result["nearest_resistance"] - 108.0) < 1.0
|
||||
|
||||
|
||||
def test_swing_levels_detects_swing_low():
|
||||
"""A clear trough should be detected as support."""
|
||||
closes = [100.0] * 25
|
||||
highs = [102.0] * 25
|
||||
lows = [98.0] * 25
|
||||
lows[12] = 90.0
|
||||
closes[12] = 91.0
|
||||
|
||||
dates = pd.date_range(end="2024-01-31", periods=25, freq="B")
|
||||
df = pd.DataFrame({"Open": closes, "High": highs, "Low": lows, "Close": closes, "Volume": [1e6] * 25}, index=dates)
|
||||
|
||||
result = compute_swing_levels(df, lookback=20)
|
||||
assert result["nearest_support"] is not None
|
||||
assert result["nearest_support"] < 100.0
|
||||
|
||||
|
||||
def test_swing_levels_flat_no_crash():
|
||||
"""Flat price should return None for support/resistance without crashing."""
|
||||
df = _make_price_df([100.0] * 30)
|
||||
result = compute_swing_levels(df, lookback=20)
|
||||
# No assertion on values — just shouldn't raise
|
||||
assert "nearest_support" in result
|
||||
assert "nearest_resistance" in result
|
||||
|
||||
|
||||
# ─── Signal Strength ──────────────────────────────────────────────────────────
|
||||
|
||||
def test_signal_strength_strong_covered_call():
|
||||
strength = compute_signal_strength(
|
||||
iv_rank=70.0,
|
||||
trend="uptrend",
|
||||
strategy="covered_call",
|
||||
nearest_support=95.0,
|
||||
nearest_resistance=110.0,
|
||||
recommended_strike=107.0,
|
||||
earnings_warning=False,
|
||||
)
|
||||
assert strength == "strong"
|
||||
|
||||
|
||||
def test_signal_strength_earnings_warning_downgrades():
|
||||
strength_no_ew = compute_signal_strength(
|
||||
iv_rank=70.0, trend="uptrend", strategy="covered_call",
|
||||
nearest_support=95.0, nearest_resistance=110.0,
|
||||
recommended_strike=107.0, earnings_warning=False,
|
||||
)
|
||||
strength_ew = compute_signal_strength(
|
||||
iv_rank=70.0, trend="uptrend", strategy="covered_call",
|
||||
nearest_support=95.0, nearest_resistance=110.0,
|
||||
recommended_strike=107.0, earnings_warning=True,
|
||||
)
|
||||
assert strength_ew in ("weak", "moderate")
|
||||
assert strength_no_ew == "strong"
|
||||
|
||||
|
||||
def test_signal_strength_low_ivr_gives_weak():
|
||||
strength = compute_signal_strength(
|
||||
iv_rank=10.0,
|
||||
trend="downtrend",
|
||||
strategy="covered_call",
|
||||
nearest_support=None,
|
||||
nearest_resistance=None,
|
||||
recommended_strike=None,
|
||||
earnings_warning=True,
|
||||
)
|
||||
assert strength == "weak"
|
||||
|
||||
|
||||
def test_signal_strength_csp_uptrend_bonus():
|
||||
strength = compute_signal_strength(
|
||||
iv_rank=55.0,
|
||||
trend="uptrend",
|
||||
strategy="cash_secured_put",
|
||||
nearest_support=95.0,
|
||||
nearest_resistance=110.0,
|
||||
recommended_strike=97.0,
|
||||
earnings_warning=False,
|
||||
)
|
||||
assert strength in ("strong", "moderate")
|
||||
|
||||
|
||||
# ─── Signal Hash ──────────────────────────────────────────────────────────────
|
||||
|
||||
def test_signal_hash_stability():
|
||||
"""Same inputs must always produce the same hash."""
|
||||
args = dict(
|
||||
iv_rank=45.5,
|
||||
sma_50=150.0,
|
||||
sma_200=145.0,
|
||||
nearest_support=148.0,
|
||||
nearest_resistance=155.0,
|
||||
recommended_strike=153.0,
|
||||
recommended_expiration=date(2024, 3, 15),
|
||||
earnings_warning=False,
|
||||
)
|
||||
h1 = compute_signal_hash(**args)
|
||||
h2 = compute_signal_hash(**args)
|
||||
assert h1 == h2
|
||||
assert len(h1) == 16
|
||||
|
||||
|
||||
def test_signal_hash_changes_on_strike_change():
|
||||
base = dict(
|
||||
iv_rank=45.5, sma_50=150.0, sma_200=145.0,
|
||||
nearest_support=148.0, nearest_resistance=155.0,
|
||||
recommended_strike=153.0, recommended_expiration=date(2024, 3, 15),
|
||||
earnings_warning=False,
|
||||
)
|
||||
h1 = compute_signal_hash(**base)
|
||||
base["recommended_strike"] = 155.0
|
||||
h2 = compute_signal_hash(**base)
|
||||
assert h1 != h2
|
||||
|
||||
|
||||
def test_signal_hash_stable_on_noise():
|
||||
"""Tiny IV rank change (< 0.1) should produce same hash due to rounding."""
|
||||
base = dict(
|
||||
iv_rank=45.500,
|
||||
sma_50=150.01, sma_200=145.009,
|
||||
nearest_support=148.001, nearest_resistance=155.002,
|
||||
recommended_strike=153.0, recommended_expiration=date(2024, 3, 15),
|
||||
earnings_warning=False,
|
||||
)
|
||||
tweaked = dict(base)
|
||||
tweaked["iv_rank"] = 45.503 # < 0.1 change → rounds to same 45.5
|
||||
h1 = compute_signal_hash(**base)
|
||||
h2 = compute_signal_hash(**tweaked)
|
||||
assert h1 == h2
|
||||
Reference in New Issue
Block a user