- Created chatwoot-agent-bot/ with Node.js webhook server - Bot detects intent (greeting, billing, technical, features, account) - Auto-responds from FAQ knowledge base or escalates to human - FAQ-KB.md: Living knowledge base that grows with customer questions - CHATWOOT-SETUP.md: Complete deployment and configuration guide - Supports Telegram notifications on escalation - Bot runs on port 3001, ready for Chatwoot webhook integration
185 lines
5.1 KiB
Python
Executable File
185 lines
5.1 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Rank Tracker - Monitor SEO keyword positions
|
|
8 keywords to track for HOA Ledger IQ
|
|
"""
|
|
import json
|
|
import urllib.request
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
import re
|
|
|
|
WORKSPACE = Path(__file__).parent.parent
|
|
DATA_FILE = WORKSPACE / "state" / "rank-data.json"
|
|
LOG_DIR = WORKSPACE / "logs"
|
|
|
|
# Keywords to track
|
|
KEYWORDS = [
|
|
"HOA Software",
|
|
"HOA investments",
|
|
"HOA Reserves",
|
|
"HOA Reserve Study",
|
|
"HOA Funding",
|
|
"HOA Special Assessments",
|
|
"HOA Budget",
|
|
"HOA Reserve Planning"
|
|
]
|
|
|
|
DOMAIN = "hoaledgeriq.com"
|
|
|
|
def log(msg):
|
|
ts = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
line = f"[{ts}] [RANK] {msg}"
|
|
print(line)
|
|
log_file = LOG_DIR / f"rank-tracker-{datetime.now().strftime('%Y%m%d')}.log"
|
|
with open(log_file, 'a') as f:
|
|
f.write(line + '\n')
|
|
|
|
def load_data():
|
|
if DATA_FILE.exists():
|
|
return json.loads(DATA_FILE.read_text())
|
|
return {"positions": {}, "history": [], "baseline_date": None}
|
|
|
|
def save_data(data):
|
|
DATA_FILE.write_text(json.dumps(data, indent=2))
|
|
|
|
def check_rank_serpapi(keyword, api_key=None):
|
|
"""Check position using SerpAPI"""
|
|
if not api_key:
|
|
log(f"⚠️ No SerpAPI key - skipping rank check for '{keyword}'")
|
|
return None
|
|
|
|
url = f"https://serpapi.com/search?"
|
|
params = {
|
|
"q": keyword,
|
|
"location": "United States",
|
|
"hl": "en",
|
|
"gl": "us",
|
|
"api_key": api_key
|
|
}
|
|
|
|
try:
|
|
req_url = url + '&'.join(f"{k}={urllib.parse.quote(str(v))}" for k, v in params.items())
|
|
with urllib.request.urlopen(req_url, timeout=30) as r:
|
|
data = json.loads(r.read().decode())
|
|
|
|
# Find position
|
|
for i, result in enumerate(data.get('organic_results', [])):
|
|
if DOMAIN in result.get('link', ''):
|
|
return i + 1
|
|
return None # Not in top results
|
|
except Exception as e:
|
|
log(f"❌ SerpAPI error: {e}")
|
|
return None
|
|
|
|
def check_rank_manual(keyword):
|
|
"""Manual check placeholder - requires browser automation"""
|
|
log(f"🔍 Manual check required for '{keyword}'")
|
|
log(f" -> Visit: https://www.google.com/search?q={urllib.parse.quote(keyword)}")
|
|
return None
|
|
|
|
def track_all_ranks(api_key=None):
|
|
"""Track all keywords"""
|
|
log("=== Daily Rank Check ===")
|
|
data = load_data()
|
|
|
|
today = datetime.now().strftime('%Y-%m-%d')
|
|
current_positions = {}
|
|
|
|
for keyword in KEYWORDS:
|
|
if api_key:
|
|
pos = check_rank_serpapi(keyword, api_key)
|
|
else:
|
|
pos = check_rank_manual(keyword)
|
|
|
|
if pos:
|
|
current_positions[keyword] = pos
|
|
log(f" • '{keyword}': Position {pos}")
|
|
else:
|
|
current_positions[keyword] = None
|
|
log(f" • '{keyword}': Not tracked (need manual check)")
|
|
|
|
# Store history
|
|
data['history'].append({
|
|
"date": today,
|
|
"positions": current_positions
|
|
})
|
|
|
|
# Update current positions
|
|
data['positions'] = current_positions
|
|
|
|
if not data['baseline_date']:
|
|
data['baseline_date'] = today
|
|
|
|
save_data(data)
|
|
|
|
return current_positions
|
|
|
|
def get_rank_report():
|
|
"""Generate ranking report"""
|
|
data = load_data()
|
|
positions = data.get('positions', {})
|
|
|
|
if not positions:
|
|
return "📊 No rank data yet. Run track_all_ranks() to collect."
|
|
|
|
report = ["📈 *Keyword Rankings*\n"]
|
|
report.append(f"📆 {datetime.now().strftime('%Y-%m-%d')}\n")
|
|
|
|
tracked = 0
|
|
for kw, pos in positions.items():
|
|
if pos:
|
|
emoji = "🥇" if pos <= 3 else "🥈" if pos <= 10 else "📌"
|
|
report.append(f"{emoji} {kw}: #{pos}")
|
|
tracked += 1
|
|
else:
|
|
report.append(f"❓ {kw}: Not in top 100")
|
|
|
|
report.append(f"\n*Tracking:* {tracked}/{len(KEYWORDS)} keywords")
|
|
|
|
return "\n".join(report)
|
|
|
|
def detect_big_changes(threshold=5):
|
|
"""Alert if rankings changed significantly"""
|
|
data = load_data()
|
|
history = data.get('history', [])
|
|
|
|
if len(history) < 2:
|
|
return []
|
|
|
|
alerts = []
|
|
current = history[-1]['positions']
|
|
previous = history[-2]['positions']
|
|
|
|
for kw, pos in current.items():
|
|
prev_pos = previous.get(kw)
|
|
if prev_pos and pos:
|
|
change = prev_pos - pos
|
|
if abs(change) >= threshold:
|
|
direction = "📈 RISE" if change > 0 else "📉 DROP"
|
|
alerts.append(f"{direction}: '{kw}' #{prev_pos} → #{pos}")
|
|
|
|
return alerts
|
|
|
|
if __name__ == "__main__":
|
|
import sys
|
|
|
|
# Check for SerpAPI key in env
|
|
import os
|
|
api_key = os.environ.get('SERPAPI_KEY')
|
|
|
|
if len(sys.argv) > 1:
|
|
if sys.argv[1] == 'track':
|
|
track_all_ranks(api_key)
|
|
elif sys.argv[1] == 'report':
|
|
print(get_rank_report())
|
|
elif sys.argv[1] == 'alerts':
|
|
changes = detect_big_changes()
|
|
if changes:
|
|
for alert in changes:
|
|
log(alert)
|
|
else:
|
|
log("✅ No major ranking changes")
|
|
else:
|
|
track_all_ranks(api_key)
|