+ + Market Pulse + +
+ Live global market snapshot · Auto-refreshes every 60s ++ Global Indices
++ Commodities
++ Currency Pairs
+Run analysis first, then AI commentary will appear here.
+commit bac9eff6f63ecb11d05cf8484dc95e6e4c800672
Author: vanshshah10002-prog
+
+ Real-time Financial Analytics & Portfolio Simulation Platform
+
+ 📊 FinanceIQ
+
+
+
+
+
+ Built with ❤️ using Flask & yfinance +
diff --git a/advisor.py b/advisor.py new file mode 100644 index 0000000..8fa5041 --- /dev/null +++ b/advisor.py @@ -0,0 +1,106 @@ +""" +Financial Advisory Algorithm - Phase 7 (Bi-Directional / Market Neutral) +Analyzes Technicals + News to suggest Long, Short, or Hedge positions. +""" +import technical_analyzer +import news_analyzer +import sys + +API_KEY = "wybWEsp1oB9abHfz3yPpQYwffxaN21B7" + +def get_user_input(): + print("\n=== Financial Advisor (Bi-Directional Hedge Edition) ===") + ticker = input("Enter Stock Ticker (e.g., ^NSEBANK, NVDA): ").strip().upper() + if not ticker: ticker = "^NSEBANK" + try: + budget = float(input("Enter Monthly Budget (in USD/GBP): ").strip()) + except ValueError: budget = 150.0 + return ticker, budget + +def determine_strategy(tech, sent): + """ + Combines news sentiment and technicals to pick direction. + """ + price = tech['price'] + ema5 = tech.get('ema_5', price) + bb_upper = tech.get('bb_upper', 0) + bb_lower = tech.get('bb_lower', 0) + bb_width = (bb_upper - bb_lower) / price if price > 0 else 0 + + sentiment_score = sent['average_score'] + + analysis = [] + + # DIRECTIONAL BIAS + technical_bias = "BULLISH" if price > ema5 else "BEARISH" + sentiment_bias = "BULLISH" if sentiment_score > 0.1 else "BEARISH" if sentiment_score < -0.1 else "NEUTRAL" + + analysis.append(f"Technical Bias (EMA 5): {technical_bias}") + analysis.append(f"Sentiment Bias (News): {sentiment_bias} ({sentiment_score})") + + # DECISION LOGIC + if bb_width > 0.08: + # High volatility = Hedge + signal = "HEDGE (50% Long / 50% Short)" + reason = "Market volatility is extremely high (BB Width > 8%). Hedging will neutralize risk from weekend news gaps." + elif technical_bias == "BULLISH" and sentiment_bias != "BEARISH": + signal = "AGGRESSIVE LONG" + reason = "Price is trending above EMA 5 and news sentiment confirms strength." + elif technical_bias == "BEARISH" and sentiment_bias != "BULLISH": + signal = "AGGRESSIVE SHORT" + reason = "Price is breaking down and news sentiment is negative. Profit from the drop." + else: + # Conflict between news and tech + signal = "TACTICAL HEDGE / WAIT" + reason = "Conflicting signals (Tech vs Sentiment). Protecting capital is a priority." + + return { + 'signal': signal, + 'reason': reason, + 'details': analysis, + 'sentiment_data': sent + } + +def print_trade_plan(ticker, budget, tech, strategy): + print(f"\n--- {ticker} BI-DIRECTIONAL PLAN ---") + for detail in strategy['details']: + print(f" * {detail}") + + print(f"\n RECOMMENDED ACTION: {strategy['signal']}") + print(f" RATIONALE: {strategy['reason']}") + + price = tech['price'] + if "LONG" in strategy['signal']: + print(f"\n [BULLISH EXECUTION]") + print(f" - Entry: {price:.2f}") + print(f" - Target: {price * 1.05:.2f} (+5%)") + print(f" - Stop: {price * 0.97:.2f} (-3%)") + elif "SHORT" in strategy['signal']: + print(f"\n [BEARISH EXECUTION]") + print(f" - Entry: {price:.2f} (Sell)") + print(f" - Target: {price * 0.95:.2f} (Profit from drop)") + print(f" - Stop: {price * 1.03:.2f} (Exit if price rises)") + elif "HEDGE" in strategy['signal']: + print(f"\n [NEUTRAL EXECUTION]") + print(f" - Long Px: {price:.2f} (50% Allocation)") + print(f" - Short Px: {price:.2f} (50% Allocation)") + print(f" - Effect: Portfolio value stays stable regardless of Monday open.") + +def main(): + ticker, budget = get_user_input() + + print(f"\n... Analyzing {ticker} (Technicals + News) ...") + tech_data = technical_analyzer.get_technicals(ticker, API_KEY) + + # Fetch real news + news_items = news_analyzer.fetch_news(ticker, limit=5) + sentiment = news_analyzer.analyze_sentiment(news_items) + + if tech_data: + strategy = determine_strategy(tech_data, sentiment) + print_trade_plan(ticker, budget, tech_data, strategy) + else: + print("Failed to fetch data.") + +if __name__ == "__main__": + main() diff --git a/app.py b/app.py new file mode 100644 index 0000000..403d205 --- /dev/null +++ b/app.py @@ -0,0 +1,1672 @@ +""" +Financial Analysis Web App — Backend (Flask) v3 +Features: Interactive chart overlays, always-fetch financials, indicator tooltips, competitor analysis +""" +from flask import Flask, render_template, request, jsonify, send_file, Response +from financetoolkit import Toolkit +import requests as http_requests # for Ollama + external API calls +from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer +import feedparser +import pandas as pd +import numpy as np +import openpyxl +from io import BytesIO +from datetime import datetime, timedelta +from difflib import SequenceMatcher +import json, traceback, time, math + +app = Flask(__name__) + +class SafeJSONEncoder(json.JSONEncoder): + """JSON encoder that handles NaN, Inf, numpy types, pandas types.""" + def default(self, obj): + if isinstance(obj, (np.integer,)): return int(obj) + if isinstance(obj, (np.floating,)): + f = float(obj) + if math.isnan(f) or math.isinf(f): return None + return f + if isinstance(obj, (np.bool_,)): return bool(obj) + if isinstance(obj, np.ndarray): return obj.tolist() + if isinstance(obj, (pd.Timestamp, pd.Period)): return str(obj) + if hasattr(obj, 'item'): return obj.item() + return super().default(obj) + +def safe_jsonify(data, status=200): + """Serialize to JSON safely, replacing NaN/Inf with null.""" + text = json.dumps(data, cls=SafeJSONEncoder, allow_nan=True) + # Replace JavaScript-invalid NaN/Infinity tokens with null + text = text.replace(': NaN', ': null').replace(':NaN', ':null') + text = text.replace(': Infinity', ': null').replace(':Infinity', ':null') + text = text.replace(': -Infinity', ': null').replace(':-Infinity', ':null') + return Response(text, status=status, mimetype='application/json') + +FMP_API_KEY = "wybWEsp1oB9abHfz3yPpQYwffxaN21B7" +GEMINI_API_KEY = "AIzaSyBX8v2d_UV_Hktcj-AvV7TyR6TD5grn24w" # fallback +ALPHA_VANTAGE_KEY = "P0D5N0A8SVC00YUW" +FINNHUB_KEY = "d6ao9dhr01qqjvbr6m1gd6ao9dhr01qqjvbr6m20" +FRED_KEY = "010a35b0ca89efdef4234f33c5089d7a" + +# ── AI Backend: Ollama (local) with Gemini fallback ────── +OLLAMA_URL = "http://localhost:11434" +OLLAMA_MODEL = "llama3.1" # 8B params, runs well on 3050 GPU + +def call_ai(prompt): + """Try Ollama local first, fall back to Gemini API if unavailable.""" + # Try Ollama (local, free, instant) + try: + r = http_requests.post(f"{OLLAMA_URL}/api/generate", json={ + "model": OLLAMA_MODEL, + "prompt": prompt, + "stream": False, + "options": {"temperature": 0.7, "num_predict": 2048} + }, timeout=120) + if r.status_code == 200: + return r.json().get("response", "") + except Exception as e: + print(f"Ollama unavailable: {e}") + + # Fallback: Gemini API + try: + from google import genai + client = genai.Client(api_key=GEMINI_API_KEY) + response = client.models.generate_content(model="gemini-2.0-flash", contents=prompt) + return response.text + except Exception as e: + print(f"Gemini fallback also failed: {e}") + return None + +# ── TICKER DATABASE ────────────────────────────────────── +TICKER_DB = [ + ("AAPL","Apple"),("ABBV","AbbVie"),("ABT","Abbott Labs"),("ACN","Accenture"), + ("ADBE","Adobe"),("ADI","Analog Devices"),("ADM","Archer-Daniels"),("ADP","ADP"), + ("ADSK","Autodesk"),("AEP","American Electric"),("AFL","Aflac"),("AIG","AIG"), + ("AMAT","Applied Materials"),("AMD","AMD"),("AMGN","Amgen"),("AMP","Ameriprise"), + ("AMZN","Amazon"),("ANET","Arista Networks"),("ANSS","Ansys"),("AON","Aon"), + ("APD","Air Products"),("APH","Amphenol"),("AVGO","Broadcom"),("AXP","American Express"), + ("BA","Boeing"),("BAC","Bank of America"),("BAX","Baxter"),("BDX","Becton Dickinson"), + ("BK","Bank of NY Mellon"),("BKNG","Booking Holdings"),("BLK","BlackRock"), + ("BMY","Bristol-Myers"),("BRK.B","Berkshire Hathaway"),("BSX","Boston Scientific"), + ("C","Citigroup"),("CAT","Caterpillar"),("CB","Chubb"),("CCI","Crown Castle"), + ("CDNS","Cadence Design"),("CI","Cigna"),("CL","Colgate-Palmolive"),("CMCSA","Comcast"), + ("CME","CME Group"),("CNC","Centene"),("COF","Capital One"),("COP","ConocoPhillips"), + ("COST","Costco"),("CRM","Salesforce"),("CSCO","Cisco"),("CTAS","Cintas"), + ("CVS","CVS Health"),("CVX","Chevron"),("D","Dominion Energy"),("DD","DuPont"), + ("DE","Deere & Co"),("DHR","Danaher"),("DIS","Disney"),("DLTR","Dollar Tree"), + ("DOW","Dow Inc"),("DUK","Duke Energy"),("DVN","Devon Energy"),("DXCM","DexCom"), + ("EA","Electronic Arts"),("EBAY","eBay"),("ECL","Ecolab"),("EL","Estee Lauder"), + ("EMR","Emerson"),("ENPH","Enphase Energy"),("EOG","EOG Resources"),("EQIX","Equinix"), + ("EW","Edwards Lifesciences"),("EXC","Exelon"),("F","Ford"),("FAST","Fastenal"), + ("FCX","Freeport-McMoRan"),("FDX","FedEx"),("FSLR","First Solar"), + ("GD","General Dynamics"),("GE","GE Aerospace"),("GILD","Gilead"),("GIS","General Mills"), + ("GM","General Motors"),("GOOG","Alphabet A"),("GOOGL","Alphabet C"),("GPN","Global Payments"), + ("GS","Goldman Sachs"),("HAL","Halliburton"),("HD","Home Depot"),("HON","Honeywell"), + ("HPQ","HP Inc"),("HUM","Humana"),("IBM","IBM"),("ICE","Intercontinental Exchange"), + ("IDXX","IDEXX Labs"),("ILMN","Illumina"),("INTC","Intel"),("INTU","Intuit"), + ("ISRG","Intuitive Surgical"),("ITW","Illinois Tool Works"),("JCI","Johnson Controls"), + ("JNJ","Johnson & Johnson"),("JPM","JPMorgan Chase"),("KHC","Kraft Heinz"), + ("KLAC","KLA Corp"),("KMB","Kimberly-Clark"),("KO","Coca-Cola"),("LEN","Lennar"), + ("LHX","L3Harris"),("LIN","Linde"),("LLY","Eli Lilly"),("LMT","Lockheed Martin"), + ("LOW","Lowe's"),("LRCX","Lam Research"),("LULU","Lululemon"),("MA","Mastercard"), + ("MAR","Marriott"),("MCD","McDonald's"),("MCHP","Microchip Tech"),("MCK","McKesson"), + ("MCO","Moody's"),("MDLZ","Mondelez"),("MDT","Medtronic"),("MET","MetLife"), + ("META","Meta Platforms"),("MMC","Marsh McLennan"),("MMM","3M"),("MO","Altria"), + ("MPC","Marathon Petroleum"),("MRK","Merck"),("MRNA","Moderna"),("MS","Morgan Stanley"), + ("MSFT","Microsoft"),("MSI","Motorola Solutions"),("MU","Micron"),("NFLX","Netflix"), + ("NKE","Nike"),("NOC","Northrop Grumman"),("NOW","ServiceNow"),("NSC","Norfolk Southern"), + ("NTAP","NetApp"),("NVDA","NVIDIA"),("NVO","Novo Nordisk"),("NXPI","NXP Semi"), + ("O","Realty Income"),("ODFL","Old Dominion"),("ON","ON Semi"),("ORCL","Oracle"), + ("ORLY","O'Reilly Auto"),("OXY","Occidental"),("PANW","Palo Alto Networks"), + ("PARA","Paramount"),("PCAR","PACCAR"),("PEP","PepsiCo"),("PFE","Pfizer"), + ("PG","Procter & Gamble"),("PGR","Progressive"),("PLD","Prologis"),("PLTR","Palantir"), + ("PM","Philip Morris"),("PNC","PNC Financial"),("PSA","Public Storage"),("PSX","Phillips 66"), + ("PYPL","PayPal"),("QCOM","Qualcomm"),("REGN","Regeneron"),("RIVN","Rivian"), + ("ROKU","Roku"),("ROP","Roper Tech"),("ROST","Ross Stores"),("RTX","RTX Corp"), + ("SBUX","Starbucks"),("SCHW","Charles Schwab"),("SHW","Sherwin-Williams"), + ("SLB","Schlumberger"),("SMCI","Super Micro"),("SNAP","Snap Inc"),("SNPS","Synopsys"), + ("SO","Southern Co"),("SOFI","SoFi"),("SPG","Simon Property"),("SPGI","S&P Global"), + ("SQ","Block Inc"),("SRE","Sempra"),("STZ","Constellation Brands"),("SYK","Stryker"), + ("SYY","Sysco"),("T","AT&T"),("TDG","TransDigm"),("TGT","Target"),("TJX","TJX Cos"), + ("TMO","Thermo Fisher"),("TMUS","T-Mobile"),("TSLA","Tesla"),("TSM","TSMC"), + ("TSN","Tyson Foods"),("TXN","Texas Instruments"),("UNH","UnitedHealth"), + ("UNP","Union Pacific"),("UPS","UPS"),("URI","United Rentals"),("USB","US Bancorp"), + ("V","Visa"),("VLO","Valero Energy"),("VRSK","Verisk"),("VRTX","Vertex Pharma"), + ("VZ","Verizon"),("WBA","Walgreens"),("WBD","Warner Bros Discovery"),("WELL","Welltower"), + ("WFC","Wells Fargo"),("WM","Waste Management"),("WMT","Walmart"),("XEL","Xcel Energy"), + ("XOM","ExxonMobil"),("ZM","Zoom"),("ZTS","Zoetis"), + ("RELIANCE.NS","Reliance Industries"),("TCS.NS","TCS"),("INFY.NS","Infosys"), + ("HDFCBANK.NS","HDFC Bank"),("ICICIBANK.NS","ICICI Bank"),("SBIN.NS","SBI"), + ("BHARTIARTL.NS","Bharti Airtel"),("ITC.NS","ITC"),("KOTAKBANK.NS","Kotak Bank"), + ("LT.NS","Larsen & Toubro"),("HINDUNILVR.NS","Hindustan Unilever"), + ("BAJFINANCE.NS","Bajaj Finance"),("MARUTI.NS","Maruti Suzuki"),("WIPRO.NS","Wipro"), + ("TATAMOTORS.NS","Tata Motors"),("TATASTEEL.NS","Tata Steel"),("SUNPHARMA.NS","Sun Pharma"), + ("AXISBANK.NS","Axis Bank"),("ONGC.NS","ONGC"),("NTPC.NS","NTPC"), + ("ADANIENT.NS","Adani Enterprises"),("TITAN.NS","Titan"),("ASIANPAINT.NS","Asian Paints"), + # ── Futures ── + ("ES=F","S&P 500 Futures"),("NQ=F","Nasdaq 100 Futures"),("YM=F","Dow Futures"), + ("RTY=F","Russell 2000 Futures"),("GC=F","Gold Futures"),("SI=F","Silver Futures"), + ("CL=F","Crude Oil WTI Futures"),("BZ=F","Brent Crude Futures"),("NG=F","Natural Gas Futures"), + ("ZC=F","Corn Futures"),("ZW=F","Wheat Futures"),("ZS=F","Soybean Futures"), + ("HG=F","Copper Futures"),("PL=F","Platinum Futures"),("PA=F","Palladium Futures"), + ("ZB=F","US Treasury Bond Futures"),("ZN=F","10-Year Note Futures"), + # ── Currency Pairs ── + ("EURUSD=X","EUR/USD"),("GBPUSD=X","GBP/USD"),("USDJPY=X","USD/JPY"), + ("USDCHF=X","USD/CHF"),("AUDUSD=X","AUD/USD"),("USDCAD=X","USD/CAD"), + ("NZDUSD=X","NZD/USD"),("USDINR=X","USD/INR"),("GBPINR=X","GBP/INR"), + ("EURINR=X","EUR/INR"),("USDHKD=X","USD/HKD"),("USDSGD=X","USD/SGD"), + ("EURGBP=X","EUR/GBP"),("EURJPY=X","EUR/JPY"),("GBPJPY=X","GBP/JPY"), + # ── Indices (ETFs) ── + ("SPY","SPDR S&P 500 ETF"),("QQQ","Invesco Nasdaq 100 ETF"),("DIA","SPDR Dow Jones ETF"), + ("IWM","iShares Russell 2000"),("VTI","Vanguard Total Market"),("EEM","iShares EM ETF"), + ("GLD","SPDR Gold Shares"),("SLV","iShares Silver Trust"),("TLT","iShares 20+ Yr Bond"), + ("^NSEI","NIFTY 50"),("^BSESN","BSE SENSEX"), + # ── Global Indices ── + ("^GSPC","S&P 500"),("^IXIC","NASDAQ Composite"),("^DJI","Dow Jones Industrial"), + ("^FTSE","FTSE 100"),("^N225","Nikkei 225"),("^GDAXI","DAX"),("^HSI","Hang Seng"), + ("000001.SS","Shanghai Composite"),("^AXJO","ASX 200"),("^FCHI","CAC 40"), + ("^RUT","Russell 2000"),("^VIX","CBOE Volatility Index"), +] + +# ── SECTOR PEER GROUPS ─────────────────────────────────── +SECTOR_PEERS = { + "tech": ["AAPL","MSFT","GOOGL","META","AMZN","NVDA","TSLA","AMD","INTC","CRM","ADBE","ORCL","IBM","NOW","AVGO"], + "finance": ["JPM","BAC","GS","MS","WFC","C","BLK","SCHW","AXP","COF","PNC","USB","BK","MCO","SPGI"], + "healthcare": ["JNJ","UNH","PFE","MRK","ABBV","LLY","TMO","ABT","BMY","AMGN","GILD","MDT","ISRG","REGN","VRTX"], + "consumer": ["PG","KO","PEP","COST","WMT","HD","MCD","NKE","SBUX","TGT","CL","EL","KHC","MDLZ","GIS"], + "energy": ["XOM","CVX","COP","EOG","OXY","SLB","HAL","DVN","MPC","VLO","PSX"], + "industrial": ["CAT","DE","HON","BA","GE","RTX","LMT","NOC","GD","EMR","ITW","UNP","UPS","FDX"], + "telecom": ["T","VZ","TMUS","CMCSA"], +} + +def get_peers(ticker): + t = ticker.upper() + for sector, tickers in SECTOR_PEERS.items(): + if t in tickers: + return [p for p in tickers if p != t][:5], sector + return [], "unknown" + +def safe_val(val): + if val is None: return None + if isinstance(val, (np.integer,)): return int(val) + if isinstance(val, (np.floating,)): return float(val) + if isinstance(val, (np.bool_,)): return bool(val) + if isinstance(val, pd.Timestamp): return str(val) + if isinstance(val, (pd.Series, pd.DataFrame)): + return safe_val(val.iloc[-1] if len(val) > 0 else 0) + if hasattr(val, 'item'): return val.item() + return val + +def safe_num(val, decimals=2): + v = safe_val(val) + if v is None or (isinstance(v, float) and (np.isnan(v) or np.isinf(v))): + return "N/A" + try: return round(float(v), decimals) + except: return "N/A" + +def sanitize_for_json(obj): + """Recursively clean NaN/Inf/numpy types so jsonify won't crash on Windows.""" + if isinstance(obj, dict): + return {k: sanitize_for_json(v) for k, v in obj.items()} + if isinstance(obj, list): + return [sanitize_for_json(v) for v in obj] + if isinstance(obj, float): + if np.isnan(obj) or np.isinf(obj): return None + return obj + if isinstance(obj, (np.integer,)): return int(obj) + if isinstance(obj, (np.floating,)): + f = float(obj) + return None if np.isnan(f) or np.isinf(f) else f + if isinstance(obj, (np.bool_,)): return bool(obj) + if isinstance(obj, (pd.Timestamp, pd.Period)): return str(obj) + if hasattr(obj, 'item'): return obj.item() + return obj + +def get_fin_val(df, ticker, row_label): + try: + if isinstance(df.columns, pd.MultiIndex): + if ticker in df.columns.get_level_values(1): + df = df.xs(ticker, level=1, axis=1) + elif ticker in df.columns.get_level_values(0): + df = df.xs(ticker, level=0, axis=1) + if row_label in df.index: + return safe_num(df.loc[row_label].iloc[-1]) + except: pass + return "N/A" + +# ── Route: Dashboard ───────────────────────────────────── +@app.route("/") +def index(): + return render_template("index.html") + +# ── Route: Ticker Suggestions ──────────────────────────── +@app.route("/api/suggest", methods=["GET"]) +def suggest(): + q = request.args.get("q", "").upper().strip() + asset_type = request.args.get("asset_type", "").lower().strip() + if len(q) < 1: return safe_jsonify([]) + + def matches_asset_type(ticker): + if not asset_type: return True + if asset_type == "stocks": + return not ticker.endswith("=F") and not ticker.endswith("=X") + elif asset_type == "futures": + return ticker.endswith("=F") + elif asset_type == "currencies": + return ticker.endswith("=X") + elif asset_type == "options": + return not ticker.endswith("=F") and not ticker.endswith("=X") and not ticker.startswith("^") + return True + + results = [] + for ticker, name in TICKER_DB: + if not matches_asset_type(ticker): continue + score = 0 + if ticker.upper().startswith(q): score = 100 + (10 - len(ticker)) + elif q in ticker.upper(): score = 80 + elif q in name.upper(): score = 70 + else: + ratio = SequenceMatcher(None, q, ticker.upper()).ratio() + name_ratio = SequenceMatcher(None, q, name.upper()).ratio() + best = max(ratio, name_ratio) + if best > 0.5: score = int(best * 60) + if score > 0: + results.append({"ticker": ticker, "name": name, "score": score}) + results.sort(key=lambda x: -x["score"]) + return safe_jsonify(results[:10]) + +# ── Route: Full Analysis ───────────────────────────────── +@app.route("/api/analyze", methods=["POST"]) +def analyze(): + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + period = data.get("period", "yearly") + start_date = data.get("start_date", "") + end_date = data.get("end_date", "") + + timeframe = data.get("timeframe", "1Y") + if not start_date: + offsets = {"1M": 30, "3M": 90, "6M": 180, "1Y": 365, "2Y": 730, "5Y": 1825, "MAX": 7300} + days = offsets.get(timeframe, 365) + start_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d") + if not end_date: + end_date = datetime.now().strftime("%Y-%m-%d") + + try: + # Chart data uses the user's timeframe + hist = None + try: + chart_toolkit = Toolkit(tickers=[ticker], api_key=FMP_API_KEY, start_date=start_date, end_date=end_date) + hist = chart_toolkit.get_historical_data(period="daily") + except OSError as oe: + print(f"FinanceToolkit OSError (Windows caching): {oe}") + # Fallback: use yfinance for price history + import yfinance as yf + yf_ticker = yf.Ticker(ticker) + yf_hist = yf_ticker.history(start=start_date, end=end_date) + if not yf_hist.empty: + yf_hist.index.name = 'date' + yf_hist.columns = pd.MultiIndex.from_product([yf_hist.columns, [ticker]]) + hist = yf_hist + except Exception as e: + print(f"FinanceToolkit error: {e}") + # Fallback: use yfinance + import yfinance as yf + yf_ticker = yf.Ticker(ticker) + yf_hist = yf_ticker.history(start=start_date, end=end_date) + if not yf_hist.empty: + yf_hist.index.name = 'date' + yf_hist.columns = pd.MultiIndex.from_product([yf_hist.columns, [ticker]]) + hist = yf_hist + + if hist is None or hist.empty: + return safe_jsonify({"error": f"No data found for {ticker}"}), 404 + + if isinstance(hist.columns, pd.MultiIndex): + if ticker in hist.columns.get_level_values(1): + hd = hist.xs(ticker, level=1, axis=1) + elif ticker in hist.columns.get_level_values(0): + hd = hist.xs(ticker, level=0, axis=1) + else: hd = hist + else: hd = hist + + close = hd['Close'] + price = safe_num(close.iloc[-1]) + + # Compute technicals from price data + delta = close.diff() + gain = delta.where(delta > 0, 0).rolling(14).mean() + loss = (-delta.where(delta < 0, 0)).rolling(14).mean() + rs = gain / loss + rsi = safe_num(100 - (100 / (1 + rs)).iloc[-1]) + + ema5 = safe_num(close.ewm(span=5).mean().iloc[-1]) + ema10 = safe_num(close.ewm(span=10).mean().iloc[-1]) + ema20 = safe_num(close.ewm(span=20).mean().iloc[-1]) + ema50 = safe_num(close.ewm(span=50).mean().iloc[-1]) + sma200 = safe_num(close.rolling(200).mean().iloc[-1]) + + ema12 = close.ewm(span=12).mean() + ema26 = close.ewm(span=26).mean() + macd_line = ema12 - ema26 + signal_line = macd_line.ewm(span=9).mean() + macd_val = safe_num(macd_line.iloc[-1]) + macd_signal = safe_num(signal_line.iloc[-1]) + + ma20 = close.rolling(20).mean() + std20 = close.rolling(20).std() + bb_upper = safe_num((ma20 + 2 * std20).iloc[-1]) + bb_lower = safe_num((ma20 - 2 * std20).iloc[-1]) + + tr = pd.concat([hd['High'] - hd['Low'], (hd['High'] - close.shift()).abs(), (hd['Low'] - close.shift()).abs()], axis=1).max(axis=1) + atr = safe_num(tr.rolling(14).mean().iloc[-1]) + + vol = safe_num(hd['Volume'].iloc[-1]) + avg_vol = safe_num(hd['Volume'].rolling(20).mean().iloc[-1]) + + technicals = { + "price": price, "ema_5": ema5, "ema_10": ema10, "ema_20": ema20, + "ema_50": ema50, "sma_200": sma200, + "rsi": rsi, "macd": macd_val, "macd_signal": macd_signal, + "bb_upper": bb_upper, "bb_lower": bb_lower, "atr": atr, + "volume": vol, "avg_volume_20": avg_vol + } + + # ── FINANCIALS — always fetch with 5Y lookback ── + fin_toolkit = Toolkit(tickers=[ticker], api_key=FMP_API_KEY, + quarterly=(period == "quarterly")) + financials = {} + try: + income = fin_toolkit.get_income_statement() + balance = fin_toolkit.get_balance_sheet_statement() + financials = { + "revenue": get_fin_val(income, ticker, "Revenue"), + "cost_of_goods": get_fin_val(income, ticker, "Cost of Goods Sold"), + "gross_profit": get_fin_val(income, ticker, "Gross Profit"), + "operating_income": get_fin_val(income, ticker, "Operating Income"), + "ebitda": get_fin_val(income, ticker, "EBITDA"), + "net_income": get_fin_val(income, ticker, "Net Income"), + "eps": get_fin_val(income, ticker, "EPS"), + "eps_diluted": get_fin_val(income, ticker, "EPS Diluted"), + "total_assets": get_fin_val(balance, ticker, "Total Assets"), + "total_debt": get_fin_val(balance, ticker, "Total Debt"), + "net_debt": get_fin_val(balance, ticker, "Net Debt"), + "cash": get_fin_val(balance, ticker, "Cash and Cash Equivalents"), + "total_equity": get_fin_val(balance, ticker, "Total Equity"), + "retained_earnings": get_fin_val(balance, ticker, "Retained Earnings"), + } + except Exception as e: + financials = {"error": str(e)} + + # ── RATIOS ── + ratios = {} + try: + prof = fin_toolkit.ratios.collect_profitability_ratios() + val = fin_toolkit.ratios.collect_valuation_ratios() + ratios = { + "gross_margin": get_fin_val(prof, ticker, "Gross Margin"), + "operating_margin": get_fin_val(prof, ticker, "Operating Margin"), + "net_profit_margin": get_fin_val(prof, ticker, "Net Profit Margin"), + "roe": get_fin_val(prof, ticker, "Return on Equity"), + "roa": get_fin_val(prof, ticker, "Return on Assets"), + "roic": get_fin_val(prof, ticker, "Return on Invested Capital"), + "pe_ratio": get_fin_val(val, ticker, "Price-to-Earnings"), + "pb_ratio": get_fin_val(val, ticker, "Price-to-Book"), + "ev_ebitda": get_fin_val(val, ticker, "EV-to-EBITDA"), + "ev_sales": get_fin_val(val, ticker, "EV-to-Sales"), + "dividend_yield": get_fin_val(val, ticker, "Dividend Yield"), + "market_cap": get_fin_val(val, ticker, "Market Cap"), + } + except Exception as e: + ratios = {"error": str(e)} + + # ── FCFF ── + fcff = {} + try: + cf = fin_toolkit.get_cash_flow_statement() + fcff = { + "cash_flow_from_operations": get_fin_val(cf, ticker, "Cash Flow from Operations"), + "capital_expenditure": get_fin_val(cf, ticker, "Capital Expenditure"), + "free_cash_flow": get_fin_val(cf, ticker, "Free Cash Flow"), + "net_change_in_cash": get_fin_val(cf, ticker, "Net Change in Cash"), + "cash_flow_from_investing": get_fin_val(cf, ticker, "Cash Flow from Investing"), + "cash_flow_from_financing": get_fin_val(cf, ticker, "Cash Flow from Financing"), + "dividends_paid": get_fin_val(cf, ticker, "Dividends Paid"), + "stock_based_compensation": get_fin_val(cf, ticker, "Stock Based Compensation"), + } + except Exception as e: + fcff = {"error": str(e)} + + # ── Full price history for interactive chart ── + price_history = [] + try: + for date, row in hd.iterrows(): + price_history.append({ + "date": str(date)[:10], + "open": safe_num(row.get('Open', 0)), + "high": safe_num(row.get('High', 0)), + "low": safe_num(row.get('Low', 0)), + "close": safe_num(row['Close']), + "volume": safe_num(row['Volume']) + }) + except: pass + + result = sanitize_for_json({ + "ticker": ticker, "technicals": technicals, + "financials": financials, "ratios": ratios, "fcff": fcff, + "price_history": price_history, + "date_range": {"start": start_date, "end": end_date} + }) + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Competitor Analysis ─────────────────────────── +def get_multi_val(df, ticker, metric): + """Extract value from multi-ticker financetoolkit DataFrame. + Multi-ticker DataFrames have a row MultiIndex: (ticker, metric). Columns are year periods.""" + try: + if isinstance(df.index, pd.MultiIndex): + if (ticker, metric) in df.index: + row = df.loc[(ticker, metric)] + return safe_num(row.iloc[-1]) + else: + if metric in df.index: + return safe_num(df.loc[metric].iloc[-1]) + except: pass + return "N/A" + +@app.route("/api/competitors", methods=["POST"]) +def competitors(): + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + peers, sector = get_peers(ticker) + + if not peers: + return safe_jsonify({"sector": "unknown", "peers": [], "message": "No peer data available for this ticker."}) + + try: + all_tickers = [ticker] + peers + comp_toolkit = Toolkit(tickers=all_tickers, api_key=FMP_API_KEY) + val = comp_toolkit.ratios.collect_valuation_ratios() + prof = comp_toolkit.ratios.collect_profitability_ratios() + + result = [] + for t in all_tickers: + entry = {"ticker": t, "is_target": (t == ticker)} + entry["pe_ratio"] = get_multi_val(val, t, "Price-to-Earnings") + entry["pb_ratio"] = get_multi_val(val, t, "Price-to-Book") + entry["ev_ebitda"] = get_multi_val(val, t, "EV-to-EBITDA") + entry["ev_sales"] = get_multi_val(val, t, "EV-to-Sales") + entry["market_cap"] = get_multi_val(val, t, "Market Cap") + entry["net_margin"] = get_multi_val(prof, t, "Net Profit Margin") + entry["roe"] = get_multi_val(prof, t, "Return on Equity") + entry["gross_margin"] = get_multi_val(prof, t, "Gross Margin") + result.append(entry) + + return safe_jsonify({"sector": sector, "peers": result}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"sector": sector, "peers": [], "error": str(e)}) + +# ── Route: News ────────────────────────────────────────── +@app.route("/api/news", methods=["POST"]) +def news(): + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + try: + encoded = ticker.replace("&", "%26") + rss_url = f"https://news.google.com/rss/search?q={encoded}+stock&hl=en-US&gl=US&ceid=US:en" + feed = feedparser.parse(rss_url) + analyzer = SentimentIntensityAnalyzer() + news_items = [] + for entry in feed.entries[:5]: + title = entry.title + sentiment = analyzer.polarity_scores(f"{title}. {entry.get('description', '')}") + news_items.append({ + "title": title, "link": entry.link, + "published": entry.get("published", ""), + "sentiment_score": round(sentiment['compound'], 4), + "sentiment_label": "Positive" if sentiment['compound'] > 0.05 else "Negative" if sentiment['compound'] < -0.05 else "Neutral" + }) + avg = sum(n['sentiment_score'] for n in news_items) / len(news_items) if news_items else 0 + return safe_jsonify({"ticker": ticker, "news": news_items, "average_sentiment": round(avg, 4), + "overall_label": "Positive" if avg > 0.05 else "Negative" if avg < -0.05 else "Neutral"}) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: AI ──────────────────────────────────────────── +@app.route("/api/ai", methods=["POST"]) +def ai_overview(): + data = request.json + ticker = data.get("ticker", "") + analysis_data = data.get("analysis", {}) + news_data = data.get("news", {}) + user_question = data.get("question", "") + + prompt = f"""You are a top-tier financial analyst. Here is the latest data for {ticker}: + +TECHNICALS: +{json.dumps(analysis_data.get('technicals', {}), indent=2)} + +FINANCIALS: +{json.dumps(analysis_data.get('financials', {}), indent=2)} + +RATIOS: +{json.dumps(analysis_data.get('ratios', {}), indent=2)} + +FREE CASH FLOW: +{json.dumps(analysis_data.get('fcff', {}), indent=2)} + +NEWS SENTIMENT: +{json.dumps(news_data, indent=2)} + +Based on the data, provide: +1. Key strengths and competitive advantages +2. Risks and red flags +3. Clear recommendation (Buy / Hold / Sell) with reasoning + +{f'The user also asks: {user_question}' if user_question else ''} +""" + result = call_ai(prompt) + if result: + return safe_jsonify({"overview": result}) + return safe_jsonify({"error": "AI unavailable. Please ensure Ollama is running: 'ollama serve' in terminal."}), 500 + +# ── Route: Excel Export ────────────────────────────────── +@app.route("/api/export", methods=["POST"]) +def export_excel(): + data = request.json + ticker = data.get("ticker", "DATA") + analysis = data.get("analysis", {}) + news_data = data.get("news", {}) + ai_text = data.get("ai_overview", "") + + wb = openpyxl.Workbook() + ws1 = wb.active; ws1.title = "Technicals" + ws1.append(["Indicator", "Value"]) + for k, v in analysis.get("technicals", {}).items(): + ws1.append([k.replace("_"," ").title(), str(v)]) + + ws2 = wb.create_sheet("Financials") + ws2.append(["Metric", "Value"]) + for k, v in analysis.get("financials", {}).items(): + ws2.append([k.replace("_"," ").title(), str(v)]) + + ws3 = wb.create_sheet("Ratios & Valuation") + ws3.append(["Ratio", "Value"]) + for k, v in analysis.get("ratios", {}).items(): + ws3.append([k.replace("_"," ").title(), str(v)]) + + ws4 = wb.create_sheet("Cash Flow (FCFF)") + ws4.append(["Metric", "Value"]) + for k, v in analysis.get("fcff", {}).items(): + ws4.append([k.replace("_"," ").title(), str(v)]) + + ws5 = wb.create_sheet("News & Sentiment") + ws5.append(["Title", "Sentiment", "Score"]) + for n in news_data.get("news", []): + ws5.append([n.get("title",""), n.get("sentiment_label",""), n.get("sentiment_score","")]) + + ws6 = wb.create_sheet("AI Overview") + ws6.append(["AI Analyst Commentary"]) + for line in ai_text.split("\n"): + ws6.append([line]) + + for ws in wb.worksheets: + for col in ws.columns: + max_len = max(len(str(c.value or "")) for c in col) + ws.column_dimensions[col[0].column_letter].width = min(max_len + 2, 60) + + buffer = BytesIO() + wb.save(buffer) + buffer.seek(0) + return send_file(buffer, as_attachment=True, download_name=f"{ticker}_analysis.xlsx", + mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") + +# ══════════════════════════════════════════════════════════ +# STAGE 2 — NEW ENDPOINTS +# ══════════════════════════════════════════════════════════ + +# ── Route: DCF Intrinsic Valuation ─────────────────────── +@app.route("/api/dcf", methods=["POST"]) +def dcf_valuation(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + tk = Toolkit(tickers=[ticker], api_key=FMP_API_KEY) + + income = tk.get_income_statement() + balance = tk.get_balance_sheet_statement() + cf = tk.get_cash_flow_statement() + + fcf = get_fin_val(cf, ticker, "Free Cash Flow") + revenue = get_fin_val(income, ticker, "Revenue") + ebit = get_fin_val(income, ticker, "Operating Income") + total_debt = get_fin_val(balance, ticker, "Total Debt") + cash = get_fin_val(balance, ticker, "Cash and Cash Equivalents") + total_equity = get_fin_val(balance, ticker, "Total Equity") + shares = get_fin_val(income, ticker, "Weighted Average Shares Outstanding") + total_assets = get_fin_val(balance, ticker, "Total Assets") + + if any(v == "N/A" for v in [fcf, total_debt, cash, shares, total_equity]): + return safe_jsonify({"error": "Insufficient data for DCF calculation", "ticker": ticker}) + + # WACC estimation + risk_free = 0.043 # ~10Y Treasury + equity_premium = 0.055 + beta = 1.0 # Default + cost_of_equity = risk_free + beta * equity_premium + cost_of_debt = 0.05 + tax_rate = 0.21 + total_cap = abs(total_equity) + abs(total_debt) if total_debt != 0 else abs(total_equity) + eq_weight = abs(total_equity) / total_cap if total_cap > 0 else 0.7 + debt_weight = 1 - eq_weight + wacc = eq_weight * cost_of_equity + debt_weight * cost_of_debt * (1 - tax_rate) + wacc = max(wacc, 0.06) + + # Project FCF + growth_rate = 0.08 # Conservative growth + terminal_growth = 0.025 + projection_years = 5 + projected_fcf = [] + current_fcf = float(fcf) + for y in range(1, projection_years + 1): + current_fcf *= (1 + growth_rate) + pv = current_fcf / ((1 + wacc) ** y) + projected_fcf.append({"year": y, "fcf": round(current_fcf, 0), "pv": round(pv, 0)}) + + # Terminal value + terminal_value = (current_fcf * (1 + terminal_growth)) / (wacc - terminal_growth) + pv_terminal = terminal_value / ((1 + wacc) ** projection_years) + + # Enterprise value & intrinsic value + sum_pv_fcf = sum(p["pv"] for p in projected_fcf) + enterprise_value = sum_pv_fcf + pv_terminal + equity_value = enterprise_value - float(total_debt) + float(cash) + intrinsic_per_share = equity_value / float(shares) + + # Get current price for comparison + import yfinance as yf + stock = yf.Ticker(ticker) + current_price = stock.info.get("currentPrice", stock.info.get("previousClose", 0)) + + upside = ((intrinsic_per_share - current_price) / current_price * 100) if current_price > 0 else 0 + verdict = "UNDERVALUED" if upside > 15 else "OVERVALUED" if upside < -15 else "FAIRLY VALUED" + + return safe_jsonify(sanitize_for_json({ + "ticker": ticker, "intrinsic_value": round(intrinsic_per_share, 2), + "current_price": round(current_price, 2), "upside_pct": round(upside, 2), + "verdict": verdict, + "wacc": round(wacc * 100, 2), "growth_rate": growth_rate * 100, + "terminal_growth": terminal_growth * 100, + "enterprise_value": round(enterprise_value, 0), + "projected_fcf": projected_fcf, + "pv_terminal": round(pv_terminal, 0), + "base_fcf": round(float(fcf), 0) + })) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Candlestick Pattern Recognition ─────────────── +@app.route("/api/patterns", methods=["POST"]) +def candlestick_patterns(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + prices = data.get("prices", []) + lookback = int(data.get("lookback_days", 7)) + if len(prices) < 5: + return safe_jsonify({"patterns": [], "message": "Not enough price data"}) + + # Filter to last N trading days (default 7) unless custom range requested + analysis_prices = prices[-max(lookback + 3, 10):] # extra context for multi-candle patterns + + predictions = { + "Doji": "Indecision detected — watch for a breakout in either direction over the next 1-3 days. If followed by a bullish candle, expect upside; if bearish, expect downside.", + "Hammer": "Buyers are stepping in at lower levels. Expect a potential bullish reversal in the next 2-5 days if volume confirms.", + "Shooting Star": "Sellers are pushing back at higher levels. Watch for a bearish pullback over the next 2-5 days.", + "Bullish Engulfing": "Strong buying pressure — expect continuation higher over the next 3-7 days. Consider setting a stop below the engulfing candle low.", + "Bearish Engulfing": "Strong selling pressure — expect further downside in the next 3-7 days. The engulfing high acts as resistance.", + "Morning Star": "Classic bottom reversal. Expect a 3-7 day rally from current levels. Pattern reliability: ~70% historically.", + "Evening Star": "Classic top reversal. Expect a 3-7 day decline. Pattern reliability: ~70% historically.", + "Bullish Harami": "Selling momentum is fading. Watch for bullish confirmation candle tomorrow — if it appears, expect 2-5 days upside.", + "Bearish Harami": "Buying momentum is fading. Watch for bearish confirmation candle tomorrow — if it appears, expect 2-5 days downside.", + } + + patterns_found = [] + for i in range(2, len(analysis_prices)): + o, h, l, c = analysis_prices[i]["open"], analysis_prices[i]["high"], analysis_prices[i]["low"], analysis_prices[i]["close"] + body = abs(c - o) + total_range = h - l + if total_range == 0: continue + body_ratio = body / total_range + prev_o, prev_c = analysis_prices[i-1]["open"], analysis_prices[i-1]["close"] + prev_body = abs(prev_c - prev_o) + date = analysis_prices[i]["date"] + + lower_shadow = min(o, c) - l + upper_shadow = h - max(o, c) + + detected = [] + + if body_ratio < 0.1 and total_range > 0: + detected.append({"pattern": "Doji", "type": "neutral", "description": "Indecision candle — body is tiny relative to range. Potential reversal signal."}) + + if body_ratio < 0.35 and lower_shadow > body * 2 and upper_shadow < body * 0.5 and c > o: + detected.append({"pattern": "Hammer", "type": "bullish", "description": "Bullish reversal — long lower wick shows buyers stepped in."}) + + if body_ratio < 0.35 and upper_shadow > body * 2 and lower_shadow < body * 0.5 and c < o: + detected.append({"pattern": "Shooting Star", "type": "bearish", "description": "Bearish reversal — long upper wick shows sellers pushed price down."}) + + if i >= 2 and prev_c < prev_o and c > o and c > prev_o and o < prev_c and body > prev_body: + detected.append({"pattern": "Bullish Engulfing", "type": "bullish", "description": "Strong bullish reversal — today's green candle engulfs yesterday's red candle."}) + + if i >= 2 and prev_c > prev_o and c < o and c < prev_o and o > prev_c and body > prev_body: + detected.append({"pattern": "Bearish Engulfing", "type": "bearish", "description": "Strong bearish reversal — today's red candle engulfs yesterday's green candle."}) + + if i >= 3: + p2_o, p2_c = analysis_prices[i-2]["open"], analysis_prices[i-2]["close"] + p1_o, p1_c = analysis_prices[i-1]["open"], analysis_prices[i-1]["close"] + p2_body = abs(p2_c - p2_o) + p1_body = abs(p1_c - p1_o) + if p2_c < p2_o and p2_body > 0 and p1_body < p2_body * 0.3 and c > o and body > p2_body * 0.5: + detected.append({"pattern": "Morning Star", "type": "bullish", "description": "Bullish reversal — big red candle, small body, then big green candle."}) + if p2_c > p2_o and p2_body > 0 and p1_body < p2_body * 0.3 and c < o and body > p2_body * 0.5: + detected.append({"pattern": "Evening Star", "type": "bearish", "description": "Bearish reversal — big green candle, small body, then big red candle."}) + + if i >= 2 and prev_c < prev_o and c > o and o > prev_c and c < prev_o: + detected.append({"pattern": "Bullish Harami", "type": "bullish", "description": "Potential bullish reversal — small green candle inside previous red candle."}) + + if i >= 2 and prev_c > prev_o and c < o and o < prev_c and c > prev_o: + detected.append({"pattern": "Bearish Harami", "type": "bearish", "description": "Potential bearish reversal — small red candle inside previous green candle."}) + + for p in detected: + p["date"] = date + p["prediction"] = predictions.get(p["pattern"], "Monitor closely for confirmation.") + patterns_found.append(p) + + # Aggregate prediction outlook + bullish = sum(1 for p in patterns_found if p["type"] == "bullish") + bearish = sum(1 for p in patterns_found if p["type"] == "bearish") + neutral = sum(1 for p in patterns_found if p["type"] == "neutral") + if bullish > bearish + neutral: + outlook = "BULLISH — Majority of recent patterns signal upside. Consider entries on pullbacks." + elif bearish > bullish + neutral: + outlook = "BEARISH — Majority of recent patterns signal downside. Exercise caution." + else: + outlook = "MIXED/NEUTRAL — Conflicting signals. Wait for clearer pattern confirmation." + + return safe_jsonify({"patterns": patterns_found, "total_found": len(patterns_found), + "outlook": outlook, "lookback_days": lookback, + "bullish_count": bullish, "bearish_count": bearish, "neutral_count": neutral}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Analyst Ratings & Price Targets (Finnhub) ───── +@app.route("/api/analyst", methods=["POST"]) +def analyst_ratings(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + import requests as req + + # Recommendations + rec_url = f"https://finnhub.io/api/v1/stock/recommendation?symbol={ticker}&token={FINNHUB_KEY}" + rec_resp = req.get(rec_url, timeout=10).json() + latest = rec_resp[0] if isinstance(rec_resp, list) and len(rec_resp) > 0 else None + + # Price target + pt_url = f"https://finnhub.io/api/v1/stock/price-target?symbol={ticker}&token={FINNHUB_KEY}" + pt_resp = req.get(pt_url, timeout=10).json() + + if latest is None: + return safe_jsonify({ + "ticker": ticker, + "recommendation": {"buy": "N/A", "hold": "N/A", "sell": "N/A", "strong_buy": "N/A", "strong_sell": "N/A", "period": "N/A"}, + "price_target": {"high": "N/A", "low": "N/A", "mean": "N/A", "median": "N/A"}, + "available": False + }) + + return safe_jsonify(sanitize_for_json({ + "ticker": ticker, + "available": True, + "recommendation": { + "buy": latest.get("buy", 0), "hold": latest.get("hold", 0), + "sell": latest.get("sell", 0), "strong_buy": latest.get("strongBuy", 0), + "strong_sell": latest.get("strongSell", 0), "period": latest.get("period", ""), + }, + "price_target": { + "high": pt_resp.get("targetHigh") if pt_resp.get("targetHigh") else "N/A", + "low": pt_resp.get("targetLow") if pt_resp.get("targetLow") else "N/A", + "mean": pt_resp.get("targetMean") if pt_resp.get("targetMean") else "N/A", + "median": pt_resp.get("targetMedian") if pt_resp.get("targetMedian") else "N/A", + } + })) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Insider Trading (Finnhub) ───────────────────── +@app.route("/api/insider", methods=["POST"]) +def insider_trading(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + import requests as req + url = f"https://finnhub.io/api/v1/stock/insider-transactions?symbol={ticker}&token={FINNHUB_KEY}" + resp = req.get(url, timeout=10).json() + txns = resp.get("data", [])[:20] # Last 20 + + result = [] + for t in txns: + result.append({ + "name": t.get("name", "Unknown"), + "share": safe_num(t.get("share", 0), 0), + "change": safe_num(t.get("change", 0), 0), + "transaction_type": t.get("transactionType", ""), + "filing_date": t.get("filingDate", ""), + "transaction_date": t.get("transactionDate", ""), + }) + + return safe_jsonify({"ticker": ticker, "transactions": result}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Market Sentiment (Fear & Greed + VIX) ───────── +@app.route("/api/sentiment-market", methods=["POST"]) +def market_sentiment(): + try: + result = {} + # Fear & Greed Index + try: + import fear_and_greed + fg = fear_and_greed.get() + result["fear_greed"] = { + "value": round(fg.value, 1), + "description": fg.description, + } + except Exception as e: + result["fear_greed"] = {"value": 50, "description": "Neutral", "error": str(e)} + + # VIX + try: + import yfinance as yf + vix = yf.Ticker("^VIX") + vix_hist = vix.history(period="5d") + if not vix_hist.empty: + vix_close = float(vix_hist["Close"].iloc[-1]) + result["vix"] = { + "value": round(vix_close, 2), + "label": "Low Volatility" if vix_close < 15 else "Moderate" if vix_close < 25 else "High Volatility" if vix_close < 35 else "Extreme Fear" + } + else: + result["vix"] = {"value": "N/A", "label": "Unavailable"} + except Exception as e: + result["vix"] = {"value": "N/A", "label": "Unavailable", "error": str(e)} + + return safe_jsonify(result) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: FRED Economic Indicators ────────────────────── +@app.route("/api/macro", methods=["POST"]) +def macro_indicators(): + try: + from fredapi import Fred + fred = Fred(api_key=FRED_KEY) + indicators = {} + series_map = { + "gdp_growth": ("GDPC1", "GDP (Real, Quarterly)"), + "cpi": ("CPIAUCSL", "Consumer Price Index"), + "fed_rate": ("FEDFUNDS", "Federal Funds Rate"), + "unemployment": ("UNRATE", "Unemployment Rate"), + "treasury_2y": ("DGS2", "2-Year Treasury Yield"), + "treasury_10y": ("DGS10", "10-Year Treasury Yield"), + "treasury_30y": ("DGS30", "30-Year Treasury Yield"), + } + + def assess_indicator(key, val): + """Returns (status, explanation) — status is 'good', 'bad', or 'neutral'""" + if val == "N/A": return "neutral", "Data unavailable." + v = float(val) + assessments = { + "gdp_growth": lambda v: ("good", f"GDP at ${v:,.0f}B indicates a strong economy. Higher GDP means more corporate earnings.") if v > 20000 else ("neutral", f"GDP at ${v:,.0f}B — moderate economic output."), + "cpi": lambda v: ("bad", f"CPI at {v:.1f} — inflation is elevated, eroding purchasing power and pressuring the Fed to keep rates high.") if v > 310 else ("good", f"CPI at {v:.1f} — inflation is relatively contained, supportive of lower interest rates."), + "fed_rate": lambda v: ("good", f"Fed rate at {v:.2f}% — low rates stimulate borrowing and boost stock valuations.") if v < 2.5 else (("neutral", f"Fed rate at {v:.2f}% — moderate rates balance growth and inflation.") if v < 4.5 else ("bad", f"Fed rate at {v:.2f}% — high rates increase borrowing costs and can weigh on equity valuations.")), + "unemployment": lambda v: ("good", f"Unemployment at {v:.1f}% — tight labor market indicates strong economic health.") if v < 4.0 else (("neutral", f"Unemployment at {v:.1f}% — within normal range but rising unemployment could signal slowing growth.") if v < 5.5 else ("bad", f"Unemployment at {v:.1f}% — elevated joblessness signals economic weakness.")), + "treasury_2y": lambda v: ("neutral", f"2Y yield at {v:.2f}% — reflects market's short-term rate expectations. Higher yield means tighter policy expected.") if v < 4.5 else ("bad", f"2Y yield at {v:.2f}% — elevated short-term yields signal aggressive monetary tightening."), + "treasury_10y": lambda v: ("good", f"10Y yield at {v:.2f}% — low long-term rates are supportive of equity valuations and reduce borrowing costs.") if v < 3.5 else (("neutral", f"10Y yield at {v:.2f}% — moderate levels. Stocks can handle this but it raises cost of capital.") if v < 4.5 else ("bad", f"10Y yield at {v:.2f}% — high yields compete with stocks for investor capital and increase discount rates.")), + "treasury_30y": lambda v: ("neutral", f"30Y yield at {v:.2f}% — reflects long-term growth and inflation expectations.") if v < 4.0 else ("bad", f"30Y yield at {v:.2f}% — elevated long-term yields suggest persistent inflation concerns."), + } + fn = assessments.get(key) + if fn: return fn(v) + return "neutral", "No assessment available." + + for key, (series_id, label) in series_map.items(): + try: + s = fred.get_series(series_id) + latest = s.dropna().iloc[-1] + val = round(float(latest), 2) + status, explanation = assess_indicator(key, val) + indicators[key] = {"value": val, "label": label, "status": status, "explanation": explanation} + except: + indicators[key] = {"value": "N/A", "label": label, "status": "neutral", "explanation": "Data currently unavailable."} + + return safe_jsonify({"indicators": indicators}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Earnings Calendar (Finnhub) ─────────────────── +@app.route("/api/earnings", methods=["POST"]) +def earnings_calendar(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + import requests as req + url = f"https://finnhub.io/api/v1/stock/earnings?symbol={ticker}&token={FINNHUB_KEY}" + resp = req.get(url, timeout=10).json() + + earnings = [] + for e in resp[:12]: # Last 12 quarters + earnings.append(sanitize_for_json({ + "period": e.get("period", ""), + "actual": e.get("actual", "N/A"), + "estimate": e.get("estimate", "N/A"), + "surprise": e.get("surprise", "N/A"), + "surprise_pct": e.get("surprisePercent", "N/A"), + })) + + return safe_jsonify({"ticker": ticker, "earnings": earnings}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Monte Carlo Simulation ──────────────────────── +@app.route("/api/monte-carlo", methods=["POST"]) +def monte_carlo(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + days = int(data.get("days", 60)) + simulations = int(data.get("simulations", 1000)) + prices = data.get("prices", []) + + if len(prices) < 30: + return safe_jsonify({"error": "Need at least 30 data points"}) + + closes = np.array([p["close"] for p in prices], dtype=float) + returns = np.diff(np.log(closes)) + mu = np.mean(returns) + sigma = np.std(returns) + last_price = closes[-1] + + # Run simulations + np.random.seed(42) + all_paths = np.zeros((simulations, days)) + for i in range(simulations): + daily_returns = np.random.normal(mu, sigma, days) + price_path = last_price * np.exp(np.cumsum(daily_returns)) + all_paths[i] = price_path + + # Calculate percentile bands + percentiles = {} + for p in [10, 25, 50, 75, 90]: + band = np.percentile(all_paths, p, axis=0) + percentiles[f"p{p}"] = [round(float(v), 2) for v in band] + + final_prices = all_paths[:, -1] + return safe_jsonify(sanitize_for_json({ + "ticker": ticker, "days": days, "simulations": simulations, + "start_price": round(float(last_price), 2), + "percentiles": percentiles, + "final_stats": { + "mean": round(float(np.mean(final_prices)), 2), + "median": round(float(np.median(final_prices)), 2), + "std": round(float(np.std(final_prices)), 2), + "p10": round(float(np.percentile(final_prices, 10)), 2), + "p90": round(float(np.percentile(final_prices, 90)), 2), + } + })) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Altman Z-Score ──────────────────────────────── +@app.route("/api/zscore", methods=["POST"]) +def altman_zscore(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + tk = Toolkit(tickers=[ticker], api_key=FMP_API_KEY) + balance = tk.get_balance_sheet_statement() + income = tk.get_income_statement() + + ta = get_fin_val(balance, ticker, "Total Assets") + tl = get_fin_val(balance, ticker, "Total Liabilities") + ca = get_fin_val(balance, ticker, "Total Current Assets") + cl = get_fin_val(balance, ticker, "Total Current Liabilities") + re = get_fin_val(balance, ticker, "Retained Earnings") + ebit = get_fin_val(income, ticker, "Operating Income") + revenue = get_fin_val(income, ticker, "Revenue") + te = get_fin_val(balance, ticker, "Total Equity") + + if any(v == "N/A" or v == 0 for v in [ta]): + return safe_jsonify({"error": "Insufficient data", "ticker": ticker}) + + ta, tl = float(ta), float(tl) if tl != "N/A" else 0 + ca = float(ca) if ca != "N/A" else 0 + cl = float(cl) if cl != "N/A" else 0 + re = float(re) if re != "N/A" else 0 + ebit = float(ebit) if ebit != "N/A" else 0 + revenue = float(revenue) if revenue != "N/A" else 0 + te = float(te) if te != "N/A" else 0 + + # Get market cap for ratio D + import yfinance as yf + stock = yf.Ticker(ticker) + market_cap = stock.info.get("marketCap", 0) or 0 + + wc = ca - cl + A = wc / ta if ta != 0 else 0 + B = re / ta if ta != 0 else 0 + C = ebit / ta if ta != 0 else 0 + D = market_cap / tl if tl != 0 else 0 + E = revenue / ta if ta != 0 else 0 + + z = 1.2 * A + 1.4 * B + 3.3 * C + 0.6 * D + E + + if z > 2.99: zone = "Safe Zone" + elif z > 1.81: zone = "Grey Zone" + else: zone = "Distress Zone" + + return safe_jsonify(sanitize_for_json({ + "ticker": ticker, "z_score": round(z, 2), "zone": zone, + "components": { + "A_working_capital": round(A, 4), "B_retained_earnings": round(B, 4), + "C_ebit": round(C, 4), "D_market_cap_debt": round(D, 4), + "E_revenue": round(E, 4), + } + })) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Dividend Analysis ───────────────────────────── +@app.route("/api/dividends", methods=["POST"]) +def dividend_analysis(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + import yfinance as yf + stock = yf.Ticker(ticker) + info = stock.info + dividends = stock.dividends + + div_history = [] + if dividends is not None and len(dividends) > 0: + for date, amount in dividends.tail(20).items(): + div_history.append({"date": str(date)[:10], "amount": round(float(amount), 4)}) + + return safe_jsonify(sanitize_for_json({ + "ticker": ticker, + "dividend_yield": info.get("dividendYield", "N/A"), + "dividend_rate": info.get("dividendRate", "N/A"), + "payout_ratio": info.get("payoutRatio", "N/A"), + "ex_dividend_date": str(info.get("exDividendDate", "N/A")), + "five_year_avg_yield": info.get("fiveYearAvgDividendYield", "N/A"), + "history": div_history, + })) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Correlation Matrix ──────────────────────────── +@app.route("/api/correlation", methods=["POST"]) +def correlation_matrix(): + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + import yfinance as yf + + benchmarks = ["SPY", "QQQ", "DIA", "GLD", "TLT", "VIX"] + all_tickers = [ticker] + [b for b in benchmarks if b.upper() != ticker] + + closes = {} + for t in all_tickers: + try: + sym = f"^{t}" if t == "VIX" else t + hist = yf.Ticker(sym).history(period="1y") + if not hist.empty: + closes[t] = hist["Close"].pct_change().dropna() + except: pass + + if len(closes) < 2: + return safe_jsonify({"error": "Not enough data for correlation"}) + + df = pd.DataFrame(closes) + corr = df.corr() + + matrix = {} + for col in corr.columns: + matrix[col] = {} + for row in corr.index: + matrix[col][row] = round(float(corr.loc[row, col]), 3) + + return safe_jsonify({"ticker": ticker, "tickers": list(corr.columns), "matrix": matrix}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Sector Performance Heatmap (Alpha Vantage) ──── +@app.route("/api/heatmap", methods=["POST"]) +def sector_heatmap(): + try: + import requests as req + url = f"https://www.alphavantage.co/query?function=SECTOR&apikey={ALPHA_VANTAGE_KEY}" + resp = req.get(url, timeout=15).json() + + sectors = {} + timeframes = { + "1D": "Rank A: Real-Time Performance", + "1W": "Rank C: 5 Day Performance", + "1M": "Rank D: 1 Month Performance", + "3M": "Rank E: 3 Month Performance", + "YTD": "Rank F: Year-to-Date (YTD) Performance", + "1Y": "Rank G: 1 Year Performance", + } + + for tf_key, av_key in timeframes.items(): + data = resp.get(av_key, {}) + sectors[tf_key] = {} + for sector, pct in data.items(): + try: + sectors[tf_key][sector] = float(pct.replace("%", "")) + except: + sectors[tf_key][sector] = 0 + + return safe_jsonify({"sectors": sectors, "timeframes": list(timeframes.keys())}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Currency Conversion Rates ───────────────────── +@app.route("/api/currency", methods=["POST"]) +def currency_rates(): + """Return conversion rates from USD to GBP/INR so the frontend can convert all values.""" + try: + import requests as req + url = f"https://www.alphavantage.co/query?function=CURRENCY_EXCHANGE_RATE&from_currency=USD&to_currency=GBP&apikey={ALPHA_VANTAGE_KEY}" + resp = req.get(url, timeout=10).json() + usd_gbp = float(resp.get("Realtime Currency Exchange Rate", {}).get("5. Exchange Rate", 0.79)) + except: + usd_gbp = 0.79 # Fallback + try: + import requests as req + url = f"https://www.alphavantage.co/query?function=CURRENCY_EXCHANGE_RATE&from_currency=USD&to_currency=INR&apikey={ALPHA_VANTAGE_KEY}" + resp = req.get(url, timeout=10).json() + usd_inr = float(resp.get("Realtime Currency Exchange Rate", {}).get("5. Exchange Rate", 83.5)) + except: + usd_inr = 83.5 + return safe_jsonify({"USD": 1.0, "GBP": usd_gbp, "INR": usd_inr}) + +# ── Route: Options Chain (with Greeks) ─────────────────── +@app.route("/api/options/chain", methods=["POST"]) +def options_chain(): + """Fetch options chain data with calculated Greeks.""" + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + import yfinance as yf + from options_engine import enrich_chain_with_greeks + stock = yf.Ticker(ticker) + expirations = stock.options + if not expirations: + return safe_jsonify({"error": f"No options data available for {ticker}", "available": False}) + + exp = data.get("expiration", expirations[0]) + if exp not in expirations: + exp = expirations[0] + + chain = stock.option_chain(exp) + calls = chain.calls.fillna(0).to_dict(orient="records") + puts = chain.puts.fillna(0).to_dict(orient="records") + + # Clean up timestamps + for row in calls + puts: + for k, v in row.items(): + if hasattr(v, 'isoformat'): + row[k] = str(v) + + info = stock.info + current_price = info.get("regularMarketPrice") or info.get("previousClose", 0) + + # Enrich with Greeks + for c in calls: c["_type"] = "call" + for p in puts: p["_type"] = "put" + calls = enrich_chain_with_greeks(calls, current_price, exp) + puts = enrich_chain_with_greeks(puts, current_price, exp) + + return safe_jsonify({ + "ticker": ticker, + "current_price": current_price, + "expiration": exp, + "expirations": list(expirations), + "calls": calls[:50], + "puts": puts[:50], + "call_count": len(calls), + "put_count": len(puts), + }) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Futures Data ────────────────────────────────── +@app.route("/api/futures", methods=["POST"]) +def futures_data(): + """Fetch futures contract data via yfinance.""" + try: + data = request.json + ticker = data.get("ticker", "ES=F").upper().strip() + import yfinance as yf + fut = yf.Ticker(ticker) + info = fut.info + + # Historical data + hist = fut.history(period="6mo") + price_history = [] + for date, row in hist.iterrows(): + price_history.append({ + "date": date.strftime("%Y-%m-%d"), + "open": round(row["Open"], 2), + "high": round(row["High"], 2), + "low": round(row["Low"], 2), + "close": round(row["Close"], 2), + "volume": int(row.get("Volume", 0)), + }) + + return safe_jsonify({ + "ticker": ticker, + "name": info.get("shortName", ticker), + "price": info.get("regularMarketPrice") or info.get("previousClose", 0), + "change": info.get("regularMarketChange", 0), + "change_pct": info.get("regularMarketChangePercent", 0), + "day_high": info.get("regularMarketDayHigh", 0), + "day_low": info.get("regularMarketDayLow", 0), + "open_interest": info.get("openInterest", "N/A"), + "volume": info.get("regularMarketVolume", 0), + "prev_close": info.get("regularMarketPreviousClose", 0), + "price_history": price_history, + }) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Forex Pair Data ─────────────────────────────── +@app.route("/api/forex", methods=["POST"]) +def forex_data(): + """Fetch forex pair data via yfinance.""" + try: + data = request.json + pair = data.get("pair", "EURUSD=X").upper().strip() + import yfinance as yf + fx = yf.Ticker(pair) + info = fx.info + + hist = fx.history(period="6mo") + price_history = [] + for date, row in hist.iterrows(): + price_history.append({ + "date": date.strftime("%Y-%m-%d"), + "open": round(row["Open"], 6), + "high": round(row["High"], 6), + "low": round(row["Low"], 6), + "close": round(row["Close"], 6), + "volume": int(row.get("Volume", 0)), + }) + + return safe_jsonify({ + "pair": pair, + "name": info.get("shortName", pair), + "rate": info.get("regularMarketPrice") or info.get("previousClose", 0), + "change": info.get("regularMarketChange", 0), + "change_pct": info.get("regularMarketChangePercent", 0), + "day_high": info.get("regularMarketDayHigh", 0), + "day_low": info.get("regularMarketDayLow", 0), + "bid": info.get("bid", 0), + "ask": info.get("ask", 0), + "price_history": price_history, + }) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Live Price (for auto-refresh) ───────────────── +@app.route("/api/live-price", methods=["POST"]) +def live_price(): + """Quick price fetch for auto-refresh polling.""" + try: + data = request.json + ticker = data.get("ticker", "AAPL").upper().strip() + import yfinance as yf + stock = yf.Ticker(ticker) + info = stock.info + return safe_jsonify({ + "ticker": ticker, + "price": info.get("regularMarketPrice") or info.get("previousClose", 0), + "change": info.get("regularMarketChange", 0), + "change_pct": info.get("regularMarketChangePercent", 0), + "volume": info.get("regularMarketVolume", 0), + "timestamp": datetime.now().isoformat(), + }) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Options Payoff Diagram ───────────────────── +@app.route("/api/options/payoff", methods=["POST"]) +def options_payoff(): + try: + from options_engine import payoff_diagram + data = request.json + K = float(data.get("strike", 100)) + premium = float(data.get("premium", 5)) + opt_type = data.get("option_type", "call") + is_long = data.get("is_long", True) + S = float(data.get("current_price", K)) + points = payoff_diagram(S, K, premium, opt_type, is_long) + return safe_jsonify({"points": points, "strike": K, "premium": premium, "type": opt_type, "direction": "long" if is_long else "short"}) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Portfolio ────────────────────────────────── +import portfolio_engine + +def _fetch_live_prices(tickers): + """Helper: fetch live prices for a list of tickers.""" + import yfinance as yf + prices = {} + for t in tickers: + try: + stock = yf.Ticker(t) + info = stock.info + prices[t] = info.get("regularMarketPrice") or info.get("previousClose", 0) + except: + pass + return prices + +@app.route("/api/portfolio", methods=["GET"]) +def get_portfolio(): + """Get current portfolio with live prices.""" + try: + positions = portfolio_engine.get_positions() + current_prices = _fetch_live_prices([p["ticker"] for p in positions]) + summary = portfolio_engine.get_portfolio_summary(current_prices) + # Record equity point and daily snapshot + portfolio_engine.record_equity_point(summary["total_value"], summary["cash"], summary["positions_value"]) + portfolio_engine.save_daily_snapshot(summary["total_value"], summary["cash"], summary["positions_value"]) + # Check pending orders + filled = portfolio_engine.check_and_fill_orders(current_prices) + summary["filled_orders"] = filled + summary["pending_orders"] = portfolio_engine.get_pending_orders() + return safe_jsonify(summary) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/buy", methods=["POST"]) +def portfolio_buy(): + try: + data = request.json + ticker = data.get("ticker", "").upper().strip() + shares = float(data.get("shares", 0)) + asset_type = data.get("asset_type", "stock") + import yfinance as yf + stock = yf.Ticker(ticker) + info = stock.info + price = info.get("regularMarketPrice") or info.get("previousClose", 0) + if price <= 0: + return safe_jsonify({"error": f"Cannot get price for {ticker}"}), 400 + result = portfolio_engine.buy(ticker, shares, price, asset_type) + if "error" in result: + return safe_jsonify(result), 400 + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/sell", methods=["POST"]) +def portfolio_sell(): + try: + data = request.json + ticker = data.get("ticker", "").upper().strip() + shares = float(data.get("shares", 0)) + import yfinance as yf + stock = yf.Ticker(ticker) + info = stock.info + price = info.get("regularMarketPrice") or info.get("previousClose", 0) + if price <= 0: + return safe_jsonify({"error": f"Cannot get price for {ticker}"}), 400 + result = portfolio_engine.sell(ticker, shares, price) + if "error" in result: + return safe_jsonify(result), 400 + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/short", methods=["POST"]) +def portfolio_short(): + """Open a short position.""" + try: + data = request.json + ticker = data.get("ticker", "").upper().strip() + shares = float(data.get("shares", 0)) + asset_type = data.get("asset_type", "stock") + import yfinance as yf + stock = yf.Ticker(ticker) + info = stock.info + price = info.get("regularMarketPrice") or info.get("previousClose", 0) + if price <= 0: + return safe_jsonify({"error": f"Cannot get price for {ticker}"}), 400 + result = portfolio_engine.short_sell(ticker, shares, price, asset_type) + if "error" in result: + return safe_jsonify(result), 400 + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/cover", methods=["POST"]) +def portfolio_cover(): + """Cover (close) a short position.""" + try: + data = request.json + ticker = data.get("ticker", "").upper().strip() + shares = float(data.get("shares", 0)) + import yfinance as yf + stock = yf.Ticker(ticker) + info = stock.info + price = info.get("regularMarketPrice") or info.get("previousClose", 0) + if price <= 0: + return safe_jsonify({"error": f"Cannot get price for {ticker}"}), 400 + result = portfolio_engine.cover_short(ticker, shares, price) + if "error" in result: + return safe_jsonify(result), 400 + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/limit-order", methods=["POST"]) +def portfolio_limit_order(): + """Place a limit order.""" + try: + data = request.json + result = portfolio_engine.place_limit_order( + ticker=data.get("ticker", "").upper().strip(), + side=data.get("side", "BUY").upper(), + shares=float(data.get("shares", 0)), + limit_price=float(data.get("price", 0)), + asset_type=data.get("asset_type", "stock"), + ) + if "error" in result: + return safe_jsonify(result), 400 + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/stop-order", methods=["POST"]) +def portfolio_stop_order(): + """Place a stop order (stop-loss / take-profit).""" + try: + data = request.json + result = portfolio_engine.place_stop_order( + ticker=data.get("ticker", "").upper().strip(), + side=data.get("side", "SELL").upper(), + shares=float(data.get("shares", 0)), + stop_price=float(data.get("price", 0)), + asset_type=data.get("asset_type", "stock"), + ) + if "error" in result: + return safe_jsonify(result), 400 + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/orders", methods=["GET"]) +def portfolio_orders(): + """Get pending orders.""" + try: + return safe_jsonify({"orders": portfolio_engine.get_pending_orders()}) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/cancel-order", methods=["POST"]) +def portfolio_cancel_order(): + """Cancel a pending order.""" + try: + data = request.json + order_id = int(data.get("order_id", 0)) + return safe_jsonify(portfolio_engine.cancel_order(order_id)) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/equity-curve", methods=["GET"]) +def portfolio_equity_curve(): + """Get equity curve for charting.""" + try: + return safe_jsonify({"curve": portfolio_engine.get_equity_curve()}) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/history", methods=["GET"]) +def portfolio_history(): + try: + return safe_jsonify({"transactions": portfolio_engine.get_transactions()}) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/analytics", methods=["GET"]) +def portfolio_analytics(): + try: + positions = portfolio_engine.get_positions() + current_prices = _fetch_live_prices([p["ticker"] for p in positions]) + return safe_jsonify(portfolio_engine.get_analytics(current_prices)) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +@app.route("/api/portfolio/reset", methods=["POST"]) +def portfolio_reset(): + try: + return safe_jsonify(portfolio_engine.reset_portfolio()) + except Exception as e: + return safe_jsonify({"error": str(e)}), 500 + +# ── Route: Market Summary (Dashboard) ──────────────── +_market_cache = {"data": None, "ts": 0} + +@app.route("/api/market-summary", methods=["GET"]) +def market_summary(): + """Global market snapshot for the dashboard landing page.""" + import time + now = time.time() + # 5-minute cache + if _market_cache["data"] and (now - _market_cache["ts"]) < 300: + return safe_jsonify(_market_cache["data"]) + + try: + import yfinance as yf + + symbols = { + "indices": [ + ("^NSEI", "NIFTY 50"), ("^BSESN", "SENSEX"), + ("^GSPC", "S&P 500"), ("^IXIC", "NASDAQ"), + ("^FTSE", "FTSE 100"), ("^N225", "Nikkei 225"), + ], + "commodities": [ + ("GC=F", "Gold"), ("SI=F", "Silver"), + ("CL=F", "Crude Oil WTI"), ("NG=F", "Natural Gas"), + ], + "currencies": [ + ("EURUSD=X", "EUR/USD"), ("GBPUSD=X", "GBP/USD"), + ("USDJPY=X", "USD/JPY"), ("USDINR=X", "USD/INR"), + ], + } + + result = {} + for category, items in symbols.items(): + cat_list = [] + tickers_str = " ".join([s[0] for s in items]) + try: + data = yf.download(tickers_str, period="2d", group_by="ticker", progress=False) + for sym, name in items: + try: + if len(items) == 1: + df = data + else: + df = data[sym] if sym in data.columns.get_level_values(0) else None + + if df is not None and len(df) >= 1: + latest = df.iloc[-1] + prev = df.iloc[-2] if len(df) >= 2 else df.iloc[-1] + price = float(latest["Close"]) + prev_close = float(prev["Close"]) + change = price - prev_close + change_pct = (change / prev_close * 100) if prev_close else 0 + cat_list.append({ + "symbol": sym, "name": name, + "price": round(price, 2), + "change": round(change, 2), + "change_pct": round(change_pct, 2), + }) + else: + cat_list.append({"symbol": sym, "name": name, "price": 0, "change": 0, "change_pct": 0}) + except Exception: + cat_list.append({"symbol": sym, "name": name, "price": 0, "change": 0, "change_pct": 0}) + except Exception: + for sym, name in items: + cat_list.append({"symbol": sym, "name": name, "price": 0, "change": 0, "change_pct": 0}) + result[category] = cat_list + + _market_cache["data"] = result + _market_cache["ts"] = now + return safe_jsonify(result) + except Exception as e: + traceback.print_exc() + return safe_jsonify({"error": str(e)}), 500 + +if __name__ == "__main__": + import socket + hostname = socket.gethostname() + local_ip = socket.gethostbyname(hostname) + print(f"\n FinanceIQ v5.2") + print(f" Local: http://localhost:5000") + print(f" Network: http://{local_ip}:5000\n") + app.run(debug=True, port=5000, host="0.0.0.0") + diff --git a/backtester.py b/backtester.py new file mode 100644 index 0000000..783fdf7 --- /dev/null +++ b/backtester.py @@ -0,0 +1,177 @@ +""" +Backtesting Engine - Phase 7 (Bi-Directional Market Neutral) +Profits from both rising and falling markets. +Logic: +1. Long: Price > EMA 5 +2. Short: Price < EMA 5 +3. Hedge: If BB Width > 5% (High Volatility), hold both to stay neutral. +""" +import pandas as pd +import numpy as np +from financetoolkit import Toolkit +import sys + +# Constants +API_KEY = "wybWEsp1oB9abHfz3yPpQYwffxaN21B7" +TICKERS = ["^NSEBANK", "NVDA", "TSLA"] +MONTHLY_BUDGET = 300.0 + +def fetch_portfolio_data(): + """Fetches 1 year of historical data with technicals.""" + print(f"--- Fetching 1 Year Data for {TICKERS} ---") + try: + companies = Toolkit( + tickers=TICKERS, + api_key=API_KEY, + start_date="2024-01-01", + ) + historical = companies.get_historical_data(period="daily") + + portfolio = {} + for ticker in TICKERS: + if ticker in historical.columns.get_level_values(1): + df = historical.xs(ticker, level=1, axis=1).copy() + elif ticker in historical.columns.get_level_values(0): + df = historical.xs(ticker, level=0, axis=1).copy() + else: continue + + # Technicals for signals + df['EMA_5'] = df['Close'].ewm(span=5, adjust=False).mean() + # Bollinger Band Width + std_20 = df['Close'].rolling(window=20).std() + ma_20 = df['Close'].rolling(window=20).mean() + df['BB_Width'] = (4 * std_20) / ma_20 + + df.dropna(inplace=True) + portfolio[ticker] = df + + return portfolio + except Exception as e: + print(f"Error fetching data: {e}") + return {} + +def simulate_market_neutral(data, ticker): + """ + Simulates Bi-Directional strategy. + Can be Long, Short, or Hedged. + """ + cash = 0 + long_holdings = 0 + short_holdings = 0 + short_entry_price = 0 + + total_invested = 0 + budget_per_month = MONTHLY_BUDGET / len(TICKERS) + + day_count = 0 + for date, row in data.iterrows(): + day_count += 1 + price = row['Close'] + + # Monthly Injection + if day_count % 20 == 0: + cash += budget_per_month + total_invested += budget_per_month + + # 1. EXIT CURRENT POSITIONS IF SIGNAL CHANGES + # (Simplified: check daily if we should switch) + + signal = "NEUTRAL" + if row['BB_Width'] > 0.08: # Hedge if high volatility (>8% width) + signal = "HEDGE" + elif price > row['EMA_5']: + signal = "LONG" + else: + signal = "SHORT" + + # 2. EXECUTE + if signal == "LONG": + # Close Short if any + if short_holdings > 0: + profit = (short_entry_price - price) * short_holdings + cash += (short_holdings * short_entry_price) + profit + short_holdings = 0 + # Open Long + if cash > 10: + long_holdings += cash / price + cash = 0 + + elif signal == "SHORT": + # Close Long if any + if long_holdings > 0: + cash += long_holdings * price + long_holdings = 0 + # Open Short + if cash > 10: + short_holdings = cash / price + short_entry_price = price + cash = 0 + + elif signal == "HEDGE": + # Split 50/50 + current_val = cash + (long_holdings * price) + (short_holdings * price if short_holdings > 0 else 0) + # Flatten everything first for simplicity in simulation + if short_holdings > 0: + profit = (short_entry_price - price) * short_holdings + current_val += profit # Adjust for short profit/loss + + cash = current_val + long_holdings = (cash * 0.5) / price + short_holdings = (cash * 0.5) / price + short_entry_price = price + cash = 0 + + # Final Liquidation + final_price = data.iloc[-1]['Close'] + short_profit = (short_entry_price - final_price) * short_holdings if short_holdings > 0 else 0 + final_val = cash + (long_holdings * final_price) + (short_holdings * short_entry_price) + short_profit + + profit = final_val - total_invested + roi = profit / total_invested if total_invested > 0 else 0 + + # Bench + start = data.iloc[0]['Close'] + end = data.iloc[-1]['Close'] + bench_roi = (end / start) - 1 + + return { + 'ticker': ticker, + 'final_val': final_val, + 'invested': total_invested, + 'roi': roi * 100, + 'bench_roi': bench_roi * 100 + } + +def main(): + portfolio = fetch_portfolio_data() + if not portfolio: return + + print("\n--- Running Phase 7 Backtest (Bi-Directional Long/Short) ---") + results = [] + + for ticker, df in portfolio.items(): + res = simulate_market_neutral(df, ticker) + results.append(res) + + print("\n" + "="*60) + print(f"{'Ticker':<10} | {'Strategy ROI':<15} | {'Bench ROI':<10}") + print("-" * 60) + + total_invested = 0 + total_final = 0 + + for r in results: + print(f"{r['ticker']:<10} | {r['roi']:>14.2f}% | {r['bench_roi']:>9.2f}%") + total_invested += r['invested'] + total_final += r['final_val'] + + final_roi = ((total_final - total_invested) / total_invested) * 100 if total_invested > 0 else 0 + + print("-" * 60) + print(f"{'TOTAL':<10} | {final_roi:>14.2f}% | {'---':>9}") + print("="*60) + + print("\nNote: ROI includes profits from both price gains (Long) and price drops (Short).") + +if __name__ == "__main__": + main() diff --git a/news_analyzer.py b/news_analyzer.py new file mode 100644 index 0000000..05148e2 --- /dev/null +++ b/news_analyzer.py @@ -0,0 +1,101 @@ +""" +Module for fetching financial news via Google News RSS and analyzing sentiment using VADER. +""" +import feedparser +from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer +from datetime import datetime +import time + +def fetch_news(ticker, limit=5): + """ + Fetches the latest news for a given ticker from Google News RSS. + + Args: + ticker (str): Stock ticker symbol (e.g., 'AAPL', 'NVDA', 'RELIANCE.NS'). + limit (int): Maximum number of news items to return. + + Returns: + list: A list of dictionaries containing 'title', 'link', 'published', and 'summary'. + """ + # Use standard Google News RSS search query + # "when:1d" parameter ensures recent news if supported, but RSS feed order is usually chronological + encoded_ticker = ticker.replace("&", "%26") + rss_url = f"https://news.google.com/rss/search?q={encoded_ticker}+stock+when:1d&hl=en-US&gl=US&ceid=US:en" + + try: + feed = feedparser.parse(rss_url) + news_items = [] + + for entry in feed.entries[:limit]: + news_items.append({ + 'title': entry.title, + 'link': entry.link, + 'published': entry.get('published', datetime.now().strftime("%a, %d %b %Y %H:%M:%S GMT")), + 'summary': entry.get('description', '') + }) + + return news_items + except Exception as e: + print(f"Error fetching news for {ticker}: {e}") + return [] + +def analyze_sentiment(news_items): + """ + Analyzes the sentiment of a list of news items. + + Args: + news_items (list): List of news dictionaries. + + Returns: + dict: Contains 'average_score', 'sentiment_label', and 'scored_news'. + """ + analyzer = SentimentIntensityAnalyzer() + total_score = 0 + scored_news = [] + + if not news_items: + return { + 'average_score': 0, + 'sentiment_label': 'Neutral', + 'scored_news': [] + } + + for item in news_items: + # Analyze title mostly as it contains the key info in RSS + text_to_analyze = f"{item['title']}. {item['summary']}" + sentiment = analyzer.polarity_scores(text_to_analyze) + compound_score = sentiment['compound'] + + total_score += compound_score + + item_with_score = item.copy() + item_with_score['sentiment_score'] = compound_score + scored_news.append(item_with_score) + + average_score = total_score / len(news_items) + + if average_score >= 0.05: + label = "Positive" + elif average_score <= -0.05: + label = "Negative" + else: + label = "Neutral" + + return { + 'average_score': round(average_score, 4), + 'sentiment_label': label, + 'scored_news': scored_news + } + +if __name__ == "__main__": + # Test Block + ticker = "AAPL" + print(f"--- Fetching News for {ticker} ---") + news = fetch_news(ticker) + for n in news: + print(f"- {n['title']}") + + print(f"\n--- Analyzing Sentiment ---") + result = analyze_sentiment(news) + print(f"Average Score: {result['average_score']}") + print(f"Label: {result['sentiment_label']}") diff --git a/options_engine.py b/options_engine.py new file mode 100644 index 0000000..c50b3c5 --- /dev/null +++ b/options_engine.py @@ -0,0 +1,185 @@ +""" +FinanceIQ v5.1 — Options Analytics Engine +Black-Scholes pricing, Greeks, implied volatility, and payoff diagrams. +""" +import math +from scipy.stats import norm + + +def d1(S, K, T, r, sigma): + """Calculate d1 for Black-Scholes.""" + if T <= 0 or sigma <= 0: + return 0.0 + return (math.log(S / K) + (r + 0.5 * sigma ** 2) * T) / (sigma * math.sqrt(T)) + + +def d2(S, K, T, r, sigma): + """Calculate d2 for Black-Scholes.""" + return d1(S, K, T, r, sigma) - sigma * math.sqrt(T) if T > 0 and sigma > 0 else 0.0 + + +def bs_call_price(S, K, T, r, sigma): + """Black-Scholes call option price.""" + if T <= 0: + return max(S - K, 0) + _d1 = d1(S, K, T, r, sigma) + _d2 = d2(S, K, T, r, sigma) + return S * norm.cdf(_d1) - K * math.exp(-r * T) * norm.cdf(_d2) + + +def bs_put_price(S, K, T, r, sigma): + """Black-Scholes put option price.""" + if T <= 0: + return max(K - S, 0) + _d1 = d1(S, K, T, r, sigma) + _d2 = d2(S, K, T, r, sigma) + return K * math.exp(-r * T) * norm.cdf(-_d2) - S * norm.cdf(-_d1) + + +def calculate_greeks(S, K, T, r, sigma, option_type="call"): + """ + Calculate all Greeks for an option. + S: underlying price, K: strike, T: time to expiry (years), + r: risk-free rate, sigma: implied volatility + """ + if T <= 0 or sigma <= 0: + intrinsic = max(S - K, 0) if option_type == "call" else max(K - S, 0) + return { + "delta": 1.0 if (option_type == "call" and S > K) else (-1.0 if option_type == "put" and K > S else 0.0), + "gamma": 0.0, + "theta": 0.0, + "vega": 0.0, + "rho": 0.0, + "price": intrinsic + } + + _d1 = d1(S, K, T, r, sigma) + _d2 = d2(S, K, T, r, sigma) + sqrt_T = math.sqrt(T) + + # Delta + delta = norm.cdf(_d1) if option_type == "call" else norm.cdf(_d1) - 1 + + # Gamma (same for calls & puts) + gamma = norm.pdf(_d1) / (S * sigma * sqrt_T) + + # Theta (per day) + theta_common = -(S * norm.pdf(_d1) * sigma) / (2 * sqrt_T) + if option_type == "call": + theta = (theta_common - r * K * math.exp(-r * T) * norm.cdf(_d2)) / 365 + else: + theta = (theta_common + r * K * math.exp(-r * T) * norm.cdf(-_d2)) / 365 + + # Vega (per 1% move in IV) + vega = S * sqrt_T * norm.pdf(_d1) / 100 + + # Rho (per 1% move in rate) + if option_type == "call": + rho = K * T * math.exp(-r * T) * norm.cdf(_d2) / 100 + else: + rho = -K * T * math.exp(-r * T) * norm.cdf(-_d2) / 100 + + # Price + price = bs_call_price(S, K, T, r, sigma) if option_type == "call" else bs_put_price(S, K, T, r, sigma) + + return { + "delta": round(delta, 4), + "gamma": round(gamma, 6), + "theta": round(theta, 4), + "vega": round(vega, 4), + "rho": round(rho, 4), + "price": round(price, 2) + } + + +def implied_volatility(market_price, S, K, T, r, option_type="call", tol=1e-6, max_iter=100): + """ + Calculate implied volatility using Newton-Raphson method. + """ + if T <= 0 or market_price <= 0: + return 0.0 + + sigma = 0.3 # initial guess + for _ in range(max_iter): + if option_type == "call": + price = bs_call_price(S, K, T, r, sigma) + else: + price = bs_put_price(S, K, T, r, sigma) + + diff = price - market_price + if abs(diff) < tol: + return round(sigma, 6) + + # Vega for Newton-Raphson step + _d1 = d1(S, K, T, r, sigma) + vega = S * math.sqrt(T) * norm.pdf(_d1) + if vega < 1e-12: + break + sigma -= diff / vega + sigma = max(sigma, 0.001) # floor + + return round(sigma, 6) + + +def payoff_diagram(S, K, premium, option_type="call", is_long=True, num_points=50): + """ + Generate payoff diagram data points. + Returns list of {price, payoff, profit} dicts. + """ + low = K * 0.7 + high = K * 1.3 + step = (high - low) / num_points + points = [] + for i in range(num_points + 1): + price = low + i * step + if option_type == "call": + payoff = max(price - K, 0) + else: + payoff = max(K - price, 0) + + if is_long: + profit = payoff - premium + else: + profit = premium - payoff + + points.append({ + "price": round(price, 2), + "payoff": round(payoff, 2), + "profit": round(profit, 2) + }) + return points + + +def enrich_chain_with_greeks(chain_data, current_price, expiry_date_str, risk_free_rate=0.05): + """ + Add Greeks to each option in a chain. + chain_data: list of dicts from yfinance option_chain + expiry_date_str: 'YYYY-MM-DD' + """ + from datetime import datetime + try: + expiry = datetime.strptime(expiry_date_str, "%Y-%m-%d") + today = datetime.now() + T = max((expiry - today).days / 365.0, 0.001) + except: + T = 0.1 # fallback + + for opt in chain_data: + strike = opt.get("strike", 0) + last_price = opt.get("lastPrice", 0) + iv = opt.get("impliedVolatility", 0.3) + opt_type = "call" if opt.get("_type", "call") == "call" else "put" + + if strike > 0 and current_price > 0 and iv > 0: + greeks = calculate_greeks(current_price, strike, T, risk_free_rate, iv, opt_type) + opt["delta"] = greeks["delta"] + opt["gamma"] = greeks["gamma"] + opt["theta"] = greeks["theta"] + opt["vega"] = greeks["vega"] + opt["rho"] = greeks["rho"] + opt["bs_price"] = greeks["price"] + else: + opt["delta"] = opt["gamma"] = opt["theta"] = opt["vega"] = opt["rho"] = 0 + opt["bs_price"] = 0 + + return chain_data diff --git a/portfolio_engine.py b/portfolio_engine.py new file mode 100644 index 0000000..96e0457 --- /dev/null +++ b/portfolio_engine.py @@ -0,0 +1,741 @@ +""" +FinanceIQ v5.2 — Enhanced Paper Trading Portfolio Engine +SQLite-backed virtual portfolio with $100K starting capital. + +Features inspired by hftbacktest: + - Market, Limit, Stop-Loss, Take-Profit orders + - Slippage & commission modeling + - Short selling with margin tracking + - Equity curve tracking + - Enhanced analytics (Sortino, Calmar, profit factor, avg hold period) +""" +import sqlite3 +import os +import math +from datetime import datetime + +DB_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "portfolio.db") +INITIAL_CASH = 100000.00 +DEFAULT_SLIPPAGE_BPS = 5 # 0.05% default slippage +DEFAULT_COMMISSION_PER_SHARE = 0.005 # $0.005 per share (IBKR-style) +MIN_COMMISSION = 1.00 # $1 minimum per trade + + +def get_db(): + """Get database connection and ensure tables exist.""" + conn = sqlite3.connect(DB_PATH) + conn.row_factory = sqlite3.Row + conn.execute("PRAGMA journal_mode=WAL") + + conn.execute(""" + CREATE TABLE IF NOT EXISTS portfolio_meta ( + id INTEGER PRIMARY KEY CHECK (id = 1), + cash REAL NOT NULL DEFAULT 100000.00, + slippage_bps REAL NOT NULL DEFAULT 5, + commission_per_share REAL NOT NULL DEFAULT 0.005, + margin_used REAL NOT NULL DEFAULT 0, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) + ) + """) + conn.execute(""" + CREATE TABLE IF NOT EXISTS positions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + ticker TEXT NOT NULL, + asset_type TEXT NOT NULL DEFAULT 'stock', + shares REAL NOT NULL DEFAULT 0, + avg_cost REAL NOT NULL DEFAULT 0, + side TEXT NOT NULL DEFAULT 'LONG', + opened_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + UNIQUE(ticker, side) + ) + """) + conn.execute(""" + CREATE TABLE IF NOT EXISTS transactions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + ticker TEXT NOT NULL, + asset_type TEXT NOT NULL DEFAULT 'stock', + action TEXT NOT NULL, + side TEXT NOT NULL DEFAULT 'LONG', + shares REAL NOT NULL, + price REAL NOT NULL, + slippage REAL NOT NULL DEFAULT 0, + commission REAL NOT NULL DEFAULT 0, + total REAL NOT NULL, + pnl REAL DEFAULT NULL, + timestamp TEXT NOT NULL DEFAULT (datetime('now')) + ) + """) + conn.execute(""" + CREATE TABLE IF NOT EXISTS pending_orders ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + ticker TEXT NOT NULL, + asset_type TEXT NOT NULL DEFAULT 'stock', + order_type TEXT NOT NULL, + side TEXT NOT NULL DEFAULT 'BUY', + shares REAL NOT NULL, + target_price REAL NOT NULL, + status TEXT NOT NULL DEFAULT 'PENDING', + created_at TEXT NOT NULL DEFAULT (datetime('now')), + filled_at TEXT DEFAULT NULL, + expires_at TEXT DEFAULT NULL + ) + """) + conn.execute(""" + CREATE TABLE IF NOT EXISTS equity_curve ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp TEXT NOT NULL, + total_value REAL NOT NULL, + cash REAL NOT NULL, + positions_value REAL NOT NULL, + daily_return REAL DEFAULT 0 + ) + """) + conn.execute(""" + CREATE TABLE IF NOT EXISTS portfolio_snapshots ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + date TEXT NOT NULL UNIQUE, + total_value REAL NOT NULL, + cash REAL NOT NULL, + positions_value REAL NOT NULL + ) + """) + + cursor = conn.execute("SELECT COUNT(*) FROM portfolio_meta") + if cursor.fetchone()[0] == 0: + conn.execute("INSERT INTO portfolio_meta (id, cash) VALUES (1, ?)", (INITIAL_CASH,)) + conn.commit() + return conn + + +def _get_meta(conn): + row = conn.execute("SELECT * FROM portfolio_meta WHERE id = 1").fetchone() + return dict(row) + + +def _calc_slippage(price, shares, slippage_bps, side): + """Calculate slippage based on order direction.""" + slip_pct = slippage_bps / 10000.0 + if side == "BUY": + return price * slip_pct # pay more when buying + else: + return -price * slip_pct # receive less when selling + + +def _calc_commission(shares, comm_per_share): + """Calculate commission with minimum.""" + return max(abs(shares) * comm_per_share, MIN_COMMISSION) + + +def get_cash(): + conn = get_db() + row = conn.execute("SELECT cash FROM portfolio_meta WHERE id = 1").fetchone() + conn.close() + return row["cash"] if row else INITIAL_CASH + + +def get_positions(): + conn = get_db() + rows = conn.execute("SELECT * FROM positions WHERE shares > 0").fetchall() + conn.close() + return [dict(r) for r in rows] + + +def buy(ticker, shares, price, asset_type="stock"): + """Buy shares (long) with slippage and commission.""" + if shares <= 0 or price <= 0: + return {"error": "Invalid shares or price"} + + conn = get_db() + meta = _get_meta(conn) + cash = meta["cash"] + slippage_bps = meta["slippage_bps"] + comm_per_share = meta["commission_per_share"] + + # Apply slippage + slip = _calc_slippage(price, shares, slippage_bps, "BUY") + exec_price = price + slip + + # Calculate commission + commission = _calc_commission(shares, comm_per_share) + + total_cost = shares * exec_price + commission + + if total_cost > cash: + conn.close() + return {"error": f"Insufficient cash. Need ${total_cost:.2f}, have ${cash:.2f}"} + + new_cash = cash - total_cost + conn.execute("UPDATE portfolio_meta SET cash = ?, updated_at = datetime('now') WHERE id = 1", (new_cash,)) + + existing = conn.execute("SELECT * FROM positions WHERE ticker = ? AND side = 'LONG'", (ticker,)).fetchone() + if existing: + old_shares = existing["shares"] + old_avg = existing["avg_cost"] + new_shares = old_shares + shares + new_avg = ((old_shares * old_avg) + (shares * exec_price)) / new_shares + conn.execute( + "UPDATE positions SET shares = ?, avg_cost = ?, updated_at = datetime('now') WHERE ticker = ? AND side = 'LONG'", + (new_shares, new_avg, ticker) + ) + else: + conn.execute( + "INSERT INTO positions (ticker, asset_type, shares, avg_cost, side) VALUES (?, ?, ?, ?, 'LONG')", + (ticker, asset_type, shares, exec_price) + ) + + conn.execute( + """INSERT INTO transactions (ticker, asset_type, action, side, shares, price, slippage, commission, total) + VALUES (?, ?, 'BUY', 'LONG', ?, ?, ?, ?, ?)""", + (ticker, asset_type, shares, exec_price, slip * shares, commission, total_cost) + ) + conn.commit() + conn.close() + + return { + "success": True, "action": "BUY", "ticker": ticker, + "shares": shares, "price": round(exec_price, 4), + "slippage": round(slip, 4), "commission": round(commission, 2), + "total": round(total_cost, 2), "remaining_cash": round(new_cash, 2) + } + + +def sell(ticker, shares, price): + """Sell long shares with slippage and commission.""" + if shares <= 0 or price <= 0: + return {"error": "Invalid shares or price"} + + conn = get_db() + meta = _get_meta(conn) + slippage_bps = meta["slippage_bps"] + comm_per_share = meta["commission_per_share"] + + existing = conn.execute("SELECT * FROM positions WHERE ticker = ? AND side = 'LONG'", (ticker,)).fetchone() + if not existing or existing["shares"] < shares: + conn.close() + available = existing["shares"] if existing else 0 + return {"error": f"Insufficient shares. Have {available}, trying to sell {shares}"} + + slip = _calc_slippage(price, shares, slippage_bps, "SELL") + exec_price = price + slip # slip is negative for sells + commission = _calc_commission(shares, comm_per_share) + total_proceeds = shares * exec_price - commission + + new_shares = existing["shares"] - shares + cash = meta["cash"] + new_cash = cash + total_proceeds + + conn.execute("UPDATE portfolio_meta SET cash = ?, updated_at = datetime('now') WHERE id = 1", (new_cash,)) + + if new_shares <= 0: + conn.execute("DELETE FROM positions WHERE ticker = ? AND side = 'LONG'", (ticker,)) + else: + conn.execute( + "UPDATE positions SET shares = ?, updated_at = datetime('now') WHERE ticker = ? AND side = 'LONG'", + (new_shares, ticker) + ) + + pnl = (exec_price - existing["avg_cost"]) * shares - commission + conn.execute( + """INSERT INTO transactions (ticker, asset_type, action, side, shares, price, slippage, commission, total, pnl) + VALUES (?, ?, 'SELL', 'LONG', ?, ?, ?, ?, ?, ?)""", + (ticker, existing["asset_type"], shares, exec_price, slip * shares, commission, total_proceeds, pnl) + ) + conn.commit() + conn.close() + + return { + "success": True, "action": "SELL", "ticker": ticker, + "shares": shares, "price": round(exec_price, 4), + "slippage": round(slip, 4), "commission": round(commission, 2), + "total": round(total_proceeds, 2), "pnl": round(pnl, 2), + "remaining_cash": round(new_cash, 2) + } + + +def short_sell(ticker, shares, price, asset_type="stock"): + """Open a short position. Margin is deducted from cash.""" + if shares <= 0 or price <= 0: + return {"error": "Invalid shares or price"} + + conn = get_db() + meta = _get_meta(conn) + cash = meta["cash"] + slippage_bps = meta["slippage_bps"] + comm_per_share = meta["commission_per_share"] + + slip = _calc_slippage(price, shares, slippage_bps, "SELL") + exec_price = price + slip # worse price when selling (slippage) + commission = _calc_commission(shares, comm_per_share) + + # Margin requirement: 100% of position value + commission + margin_req = shares * exec_price + commission + if margin_req > cash: + conn.close() + return {"error": f"Insufficient funds. Need ${margin_req:.2f}, have ${cash:.2f}"} + + # Deduct margin from cash (no proceeds added — paper trading model) + new_cash = cash - margin_req + new_margin = meta["margin_used"] + (shares * exec_price) + + conn.execute("UPDATE portfolio_meta SET cash = ?, margin_used = ?, updated_at = datetime('now') WHERE id = 1", + (new_cash, new_margin)) + + existing = conn.execute("SELECT * FROM positions WHERE ticker = ? AND side = 'SHORT'", (ticker,)).fetchone() + if existing: + old_shares = existing["shares"] + old_avg = existing["avg_cost"] + new_shares = old_shares + shares + new_avg = ((old_shares * old_avg) + (shares * exec_price)) / new_shares + conn.execute( + "UPDATE positions SET shares = ?, avg_cost = ?, updated_at = datetime('now') WHERE ticker = ? AND side = 'SHORT'", + (new_shares, new_avg, ticker) + ) + else: + conn.execute( + "INSERT INTO positions (ticker, asset_type, shares, avg_cost, side) VALUES (?, ?, ?, ?, 'SHORT')", + (ticker, asset_type, shares, exec_price) + ) + + conn.execute( + """INSERT INTO transactions (ticker, asset_type, action, side, shares, price, slippage, commission, total) + VALUES (?, ?, 'SHORT', 'SHORT', ?, ?, ?, ?, ?)""", + (ticker, asset_type, shares, exec_price, abs(slip) * shares, commission, margin_req) + ) + conn.commit() + conn.close() + + return { + "success": True, "action": "SHORT", "ticker": ticker, + "shares": shares, "price": round(exec_price, 4), + "margin_required": round(margin_req, 2), + "commission": round(commission, 2) + } + + +def cover_short(ticker, shares, price): + """Cover (buy back) a short position. Releases margin + P&L to cash.""" + if shares <= 0 or price <= 0: + return {"error": "Invalid shares or price"} + + conn = get_db() + meta = _get_meta(conn) + slippage_bps = meta["slippage_bps"] + comm_per_share = meta["commission_per_share"] + + existing = conn.execute("SELECT * FROM positions WHERE ticker = ? AND side = 'SHORT'", (ticker,)).fetchone() + if not existing or existing["shares"] < shares: + conn.close() + available = existing["shares"] if existing else 0 + return {"error": f"No short position. Have {available} short shares."} + + slip = _calc_slippage(price, shares, slippage_bps, "BUY") + exec_price = price + slip # worse price when buying back (slippage) + commission = _calc_commission(shares, comm_per_share) + + # P&L: profit when we shorted high and cover low + entry_value = shares * existing["avg_cost"] + cover_cost = shares * exec_price + pnl = entry_value - cover_cost - commission # short profit when price drops + + # Release margin and return to cash with P&L + cash = meta["cash"] + margin_release = entry_value # we held 100% of entry value as margin + new_cash = cash + margin_release + pnl # margin back + profit/loss + new_margin = max(0, meta["margin_used"] - margin_release) + + conn.execute("UPDATE portfolio_meta SET cash = ?, margin_used = ?, updated_at = datetime('now') WHERE id = 1", + (new_cash, new_margin)) + + new_shares = existing["shares"] - shares + if new_shares <= 0: + conn.execute("DELETE FROM positions WHERE ticker = ? AND side = 'SHORT'", (ticker,)) + else: + conn.execute( + "UPDATE positions SET shares = ?, updated_at = datetime('now') WHERE ticker = ? AND side = 'SHORT'", + (new_shares, ticker) + ) + + conn.execute( + """INSERT INTO transactions (ticker, asset_type, action, side, shares, price, slippage, commission, total, pnl) + VALUES (?, ?, 'COVER', 'SHORT', ?, ?, ?, ?, ?, ?)""", + (ticker, existing["asset_type"], shares, exec_price, abs(slip) * shares, commission, cover_cost + commission, pnl) + ) + conn.commit() + conn.close() + + return { + "success": True, "action": "COVER", "ticker": ticker, + "shares": shares, "price": round(exec_price, 4), + "pnl": round(pnl, 2), "commission": round(commission, 2), + "remaining_cash": round(new_cash, 2) + } + + +# ── Pending Orders (Limit / Stop) ──────────────────────── + +def place_limit_order(ticker, side, shares, limit_price, asset_type="stock", expires_hours=24): + """Place a limit order that fills when market price hits the target.""" + if shares <= 0 or limit_price <= 0: + return {"error": "Invalid order parameters"} + + expires = datetime.now() + from datetime import timedelta + expires = (expires + timedelta(hours=expires_hours)).isoformat() + + conn = get_db() + conn.execute( + """INSERT INTO pending_orders (ticker, asset_type, order_type, side, shares, target_price, expires_at) + VALUES (?, ?, 'LIMIT', ?, ?, ?, ?)""", + (ticker, asset_type, side, shares, limit_price, expires) + ) + conn.commit() + order_id = conn.execute("SELECT last_insert_rowid()").fetchone()[0] + conn.close() + + return {"success": True, "order_id": order_id, "order_type": "LIMIT", + "side": side, "ticker": ticker, "shares": shares, + "target_price": limit_price, "expires_at": expires} + + +def place_stop_order(ticker, side, shares, stop_price, asset_type="stock", expires_hours=24): + """Place a stop order (stop-loss or take-profit).""" + if shares <= 0 or stop_price <= 0: + return {"error": "Invalid order parameters"} + + expires = datetime.now() + from datetime import timedelta + expires = (expires + timedelta(hours=expires_hours)).isoformat() + + conn = get_db() + conn.execute( + """INSERT INTO pending_orders (ticker, asset_type, order_type, side, shares, target_price, expires_at) + VALUES (?, ?, 'STOP', ?, ?, ?, ?)""", + (ticker, asset_type, side, shares, stop_price, expires) + ) + conn.commit() + order_id = conn.execute("SELECT last_insert_rowid()").fetchone()[0] + conn.close() + + return {"success": True, "order_id": order_id, "order_type": "STOP", + "side": side, "ticker": ticker, "shares": shares, + "target_price": stop_price, "expires_at": expires} + + +def get_pending_orders(): + """Get all pending orders.""" + conn = get_db() + # Expire old orders + conn.execute("UPDATE pending_orders SET status = 'EXPIRED' WHERE status = 'PENDING' AND expires_at < datetime('now')") + conn.commit() + rows = conn.execute("SELECT * FROM pending_orders WHERE status = 'PENDING' ORDER BY created_at DESC").fetchall() + conn.close() + return [dict(r) for r in rows] + + +def cancel_order(order_id): + """Cancel a pending order.""" + conn = get_db() + conn.execute("UPDATE pending_orders SET status = 'CANCELLED' WHERE id = ? AND status = 'PENDING'", (order_id,)) + conn.commit() + conn.close() + return {"success": True, "order_id": order_id, "status": "CANCELLED"} + + +def check_and_fill_orders(current_prices): + """Check pending orders against current prices and fill if conditions met. + Called during live polling. + current_prices: dict {ticker: price} + """ + conn = get_db() + pending = conn.execute("SELECT * FROM pending_orders WHERE status = 'PENDING'").fetchall() + filled = [] + + for order in pending: + order = dict(order) + ticker = order["ticker"] + if ticker not in current_prices: + continue + + market_price = current_prices[ticker] + should_fill = False + + if order["order_type"] == "LIMIT": + if order["side"] == "BUY" and market_price <= order["target_price"]: + should_fill = True + elif order["side"] == "SELL" and market_price >= order["target_price"]: + should_fill = True + elif order["order_type"] == "STOP": + if order["side"] == "SELL" and market_price <= order["target_price"]: + should_fill = True # stop-loss triggered + elif order["side"] == "BUY" and market_price >= order["target_price"]: + should_fill = True # stop-buy triggered + + if should_fill: + conn.execute("UPDATE pending_orders SET status = 'FILLED', filled_at = datetime('now') WHERE id = ?", + (order["id"],)) + + if order["side"] == "BUY": + result = buy(ticker, order["shares"], market_price, order["asset_type"]) + else: + result = sell(ticker, order["shares"], market_price) + + filled.append({"order_id": order["id"], "ticker": ticker, "result": result}) + + conn.commit() + conn.close() + return filled + + +# ── Equity Curve ────────────────────────────────────── + +def record_equity_point(total_value, cash, positions_value): + """Record a point on the equity curve.""" + conn = get_db() + # Get last point for daily return calc + last = conn.execute("SELECT total_value FROM equity_curve ORDER BY id DESC LIMIT 1").fetchone() + daily_return = 0 + if last and last["total_value"] > 0: + daily_return = (total_value - last["total_value"]) / last["total_value"] + + conn.execute( + "INSERT INTO equity_curve (timestamp, total_value, cash, positions_value, daily_return) VALUES (datetime('now'), ?, ?, ?, ?)", + (total_value, cash, positions_value, daily_return) + ) + conn.commit() + conn.close() + + +def get_equity_curve(limit=500): + """Get equity curve data for charting.""" + conn = get_db() + rows = conn.execute( + "SELECT timestamp, total_value, cash, positions_value, daily_return FROM equity_curve ORDER BY id DESC LIMIT ?", + (limit,) + ).fetchall() + conn.close() + return [dict(r) for r in reversed(rows)] + + +# ── Portfolio Summary ───────────────────────────────── + +def get_portfolio_summary(current_prices=None): + if current_prices is None: + current_prices = {} + + conn = get_db() + meta = _get_meta(conn) + cash = meta["cash"] + margin_used = meta["margin_used"] + positions = conn.execute("SELECT * FROM positions WHERE shares > 0").fetchall() + conn.close() + + pos_list = [] + total_long_value = 0 + total_short_value = 0 + total_cost_basis = 0 + + for p in positions: + ticker = p["ticker"] + shares = p["shares"] + avg_cost = p["avg_cost"] + side = p["side"] + current_price = current_prices.get(ticker, avg_cost) + + if side == "LONG": + market_value = shares * current_price + cost_basis = shares * avg_cost + pnl = market_value - cost_basis + total_long_value += market_value + else: # SHORT + market_value = shares * avg_cost # short value is entry value + cost_basis = shares * avg_cost + pnl = (avg_cost - current_price) * shares # profit when price drops + total_short_value += shares * current_price + + pnl_pct = (pnl / cost_basis * 100) if cost_basis > 0 else 0 + total_cost_basis += cost_basis + + pos_list.append({ + "ticker": ticker, "asset_type": p["asset_type"], + "shares": shares, "avg_cost": round(avg_cost, 2), + "current_price": round(current_price, 2), + "market_value": round(market_value, 2), + "side": side, + "cost_basis": round(cost_basis, 2), + "pnl": round(pnl, 2), "pnl_pct": round(pnl_pct, 2), + "allocation_pct": 0 + }) + + positions_value = total_long_value + # total_value = available cash + long positions + margin held + unrealized short P&L + # margin_used tracks entry value of shorts (deducted from cash at open) + # short_unrealized_pnl = sum of (entry_price - current_price) * shares for shorts + short_unrealized_pnl = 0 + for p in pos_list: + if p["side"] == "SHORT": + short_unrealized_pnl += p["pnl"] + total_value = cash + positions_value + margin_used + short_unrealized_pnl + total_pnl = total_value - INITIAL_CASH + total_pnl_pct = (total_pnl / INITIAL_CASH * 100) + + for p in pos_list: + p["allocation_pct"] = round((abs(p["market_value"]) / total_value * 100) if total_value > 0 else 0, 2) + + return { + "cash": round(cash, 2), + "positions_value": round(positions_value, 2), + "total_value": round(total_value, 2), + "total_pnl": round(total_pnl, 2), + "total_pnl_pct": round(total_pnl_pct, 2), + "initial_capital": INITIAL_CASH, + "margin_used": round(margin_used, 2), + "buying_power": round(cash, 2), + "positions": pos_list, + "cash_allocation_pct": round((cash / total_value * 100) if total_value > 0 else 100, 2), + "slippage_bps": round(meta["slippage_bps"], 1), + "commission_per_share": round(meta["commission_per_share"], 4), + } + + +def get_transactions(limit=50): + conn = get_db() + rows = conn.execute( + "SELECT * FROM transactions ORDER BY timestamp DESC LIMIT ?", (limit,) + ).fetchall() + conn.close() + return [dict(r) for r in rows] + + +def get_analytics(current_prices=None): + """Enhanced analytics with Sortino, Calmar, profit factor.""" + conn = get_db() + txns = conn.execute("SELECT * FROM transactions ORDER BY timestamp ASC").fetchall() + equity = conn.execute("SELECT * FROM equity_curve ORDER BY id ASC").fetchall() + conn.close() + + if not txns: + return _empty_analytics() + + # Daily returns from equity curve + daily_returns = [dict(e)["daily_return"] for e in equity if dict(e)["daily_return"] != 0] + + if len(daily_returns) >= 2: + avg_ret = sum(daily_returns) / len(daily_returns) + std_ret = math.sqrt(sum((r - avg_ret) ** 2 for r in daily_returns) / len(daily_returns)) + neg_returns = [r for r in daily_returns if r < 0] + downside_dev = math.sqrt(sum(r ** 2 for r in neg_returns) / len(neg_returns)) if neg_returns else 0 + + sharpe = (avg_ret / std_ret * math.sqrt(252)) if std_ret > 0 else 0 + sortino = (avg_ret / downside_dev * math.sqrt(252)) if downside_dev > 0 else 0 + + # Max drawdown + values = [dict(e)["total_value"] for e in equity] + peak = values[0] if values else INITIAL_CASH + max_dd = 0 + for v in values: + if v > peak: peak = v + dd = (peak - v) / peak + if dd > max_dd: max_dd = dd + + # Calmar ratio (annualized return / max drawdown) + total_return = (values[-1] / values[0] - 1) if values and values[0] > 0 else 0 + calmar = (total_return / max_dd) if max_dd > 0 else 0 + else: + sharpe = sortino = calmar = 0 + max_dd = 0 + + # Trade analysis + pnls = [] + for t in txns: + t = dict(t) + if t.get("pnl") is not None: + pnls.append(t["pnl"]) + + wins = [p for p in pnls if p > 0] + losses = [p for p in pnls if p <= 0] + total_trades = len(pnls) + win_rate = (len(wins) / total_trades * 100) if total_trades > 0 else 0 + + gross_profit = sum(wins) if wins else 0 + gross_loss = abs(sum(losses)) if losses else 0 + profit_factor = (gross_profit / gross_loss) if gross_loss > 0 else float('inf') if gross_profit > 0 else 0 + + # Total commissions and slippage + total_commission = sum(dict(t).get("commission", 0) for t in txns) + total_slippage = sum(abs(dict(t).get("slippage", 0)) for t in txns) + + summary = get_portfolio_summary(current_prices) + + return { + "sharpe_ratio": round(sharpe, 2), + "sortino_ratio": round(sortino, 2), + "calmar_ratio": round(calmar, 2), + "max_drawdown_pct": round(max_dd * 100, 2), + "profit_factor": round(profit_factor, 2) if profit_factor != float('inf') else "∞", + "win_rate": round(win_rate, 1), + "total_trades": total_trades, + "winning_trades": len(wins), + "losing_trades": len(losses), + "avg_win": round(sum(wins) / len(wins), 2) if wins else 0, + "avg_loss": round(sum(losses) / len(losses), 2) if losses else 0, + "best_trade": round(max(pnls), 2) if pnls else 0, + "worst_trade": round(min(pnls), 2) if pnls else 0, + "total_return_pct": round(summary["total_pnl_pct"], 2), + "total_commission": round(total_commission, 2), + "total_slippage": round(total_slippage, 2), + "gross_profit": round(gross_profit, 2), + "gross_loss": round(gross_loss, 2), + } + + +def _empty_analytics(): + return { + "sharpe_ratio": 0, "sortino_ratio": 0, "calmar_ratio": 0, + "max_drawdown_pct": 0, "profit_factor": 0, + "win_rate": 0, "total_trades": 0, + "winning_trades": 0, "losing_trades": 0, + "avg_win": 0, "avg_loss": 0, + "best_trade": 0, "worst_trade": 0, + "total_return_pct": 0, "total_commission": 0, "total_slippage": 0, + "gross_profit": 0, "gross_loss": 0, + } + + +def save_daily_snapshot(total_value, cash, positions_value): + conn = get_db() + today = datetime.now().strftime("%Y-%m-%d") + conn.execute( + "INSERT OR REPLACE INTO portfolio_snapshots (date, total_value, cash, positions_value) VALUES (?, ?, ?, ?)", + (today, total_value, cash, positions_value) + ) + conn.commit() + conn.close() + + +def reset_portfolio(): + """Reset portfolio to initial state — clears everything.""" + conn = get_db() + conn.execute("DELETE FROM portfolio_meta") + conn.execute("INSERT INTO portfolio_meta (id, cash) VALUES (1, ?)", (INITIAL_CASH,)) + conn.execute("DELETE FROM positions") + conn.execute("DELETE FROM transactions") + conn.execute("DELETE FROM pending_orders") + conn.execute("DELETE FROM equity_curve") + conn.execute("DELETE FROM portfolio_snapshots") + conn.commit() + conn.close() + return {"success": True, "message": "Portfolio reset to $100,000"} + + +def update_settings(slippage_bps=None, commission_per_share=None): + """Update portfolio simulation settings.""" + conn = get_db() + if slippage_bps is not None: + conn.execute("UPDATE portfolio_meta SET slippage_bps = ? WHERE id = 1", (slippage_bps,)) + if commission_per_share is not None: + conn.execute("UPDATE portfolio_meta SET commission_per_share = ? WHERE id = 1", (commission_per_share,)) + conn.commit() + conn.close() + return {"success": True} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..685136e --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +flask>=3.0 +yfinance>=1.0 +financetoolkit>=2.0 +openpyxl>=3.1 +requests>=2.31 +requests-cache>=1.2 +python-dotenv>=1.0 diff --git a/static/script.js b/static/script.js new file mode 100644 index 0000000..3fcc2de --- /dev/null +++ b/static/script.js @@ -0,0 +1,1649 @@ +/* FinanceIQ v5 — Frontend Logic */ + +// ══════════════ THEME TOGGLE ══════════════ +function getTheme() { return localStorage.getItem('fiq-theme') || 'dark'; } +function setTheme(t) { + document.documentElement.setAttribute('data-theme', t); + localStorage.setItem('fiq-theme', t); + const icon = document.getElementById('themeIcon'); + const label = document.getElementById('themeLabel'); + if (icon) icon.setAttribute('data-lucide', t === 'dark' ? 'moon' : 'sun'); + if (label) label.textContent = t === 'dark' ? 'Dark Mode' : 'Light Mode'; + if (typeof lucide !== 'undefined') lucide.createIcons(); + // Update charts if they exist + if (chartInstance) updateChartTheme(); + if (mcChartInstance) updateMCChartTheme(); +} +function updateChartTheme() { + const t = getTheme(); + const bg = t === 'dark' ? '#141a2a' : '#ffffff'; + const txt = t === 'dark' ? '#8b95a8' : '#5a6577'; + const grid = t === 'dark' ? 'rgba(255,255,255,0.03)' : 'rgba(0,0,0,0.04)'; + const border = t === 'dark' ? 'rgba(255,255,255,0.1)' : 'rgba(0,0,0,0.1)'; + chartInstance.applyOptions({ layout: { background: { color: bg }, textColor: txt }, grid: { vertLines: { color: grid }, horzLines: { color: grid } }, timeScale: { borderColor: border }, rightPriceScale: { borderColor: border } }); +} +function updateMCChartTheme() { + const t = getTheme(); + const bg = t === 'dark' ? '#141a2a' : '#ffffff'; + const txt = t === 'dark' ? '#8b95a8' : '#5a6577'; + const grid = t === 'dark' ? 'rgba(255,255,255,0.03)' : 'rgba(0,0,0,0.04)'; + const border = t === 'dark' ? 'rgba(255,255,255,0.1)' : 'rgba(0,0,0,0.1)'; + mcChartInstance.applyOptions({ layout: { background: { color: bg }, textColor: txt }, grid: { vertLines: { color: grid }, horzLines: { color: grid } }, timeScale: { borderColor: border }, rightPriceScale: { borderColor: border } }); +} +function getChartColors() { + const t = getTheme(); + return { + bg: t === 'dark' ? '#141a2a' : '#ffffff', + text: t === 'dark' ? '#8b95a8' : '#5a6577', + grid: t === 'dark' ? 'rgba(255,255,255,0.03)' : 'rgba(0,0,0,0.04)', + border: t === 'dark' ? 'rgba(255,255,255,0.1)' : 'rgba(0,0,0,0.1)', + }; +} +// Apply saved theme on load +document.addEventListener('DOMContentLoaded', () => { + setTheme(getTheme()); + const themeBtn = document.getElementById('themeToggle'); + if (themeBtn) themeBtn.addEventListener('click', () => setTheme(getTheme() === 'dark' ? 'light' : 'dark')); + // Sidebar toggle for mobile + const sidebarToggle = document.getElementById('sidebarToggle'); + const sidebar = document.getElementById('sidebar'); + if (sidebarToggle && sidebar) { + sidebarToggle.addEventListener('click', () => sidebar.classList.toggle('open')); + } + // Asset type selector + document.querySelectorAll('.asset-btn').forEach(btn => { + btn.addEventListener('click', () => { + document.querySelectorAll('.asset-btn').forEach(b => b.classList.remove('active')); + btn.classList.add('active'); + currentAssetType = btn.dataset.asset; + const input = document.getElementById('tickerInput'); + const placeholders = { stocks: 'Search ticker (e.g. AAPL, TSLA, NIFTY)', futures: 'Search futures (e.g. ES=F, NQ=F, GC=F)', options: 'Search underlying (e.g. AAPL, SPY)', currencies: 'Search pair (e.g. USDINR=X, EURUSD=X)' }; + if (input) input.placeholder = placeholders[currentAssetType] || placeholders.stocks; + // Filter sidebar nav items by asset type + filterSidebarByAsset(currentAssetType); + }); + }); +}); + +let chartInstance = null; +let candleSeries = null; +let volumeSeries = null; +let overlays = {}; +let analysisData = null; +let newsData = null; +let currentTicker = ""; +let currentAssetType = "stocks"; + +// Helper: detect asset type from ticker +function getAssetType(ticker) { + if (!ticker) return currentAssetType; + if (ticker.endsWith('=F')) return 'futures'; + if (ticker.endsWith('=X')) return 'currencies'; + if (ticker.startsWith('^')) return 'stocks'; // indices treated as stocks + return currentAssetType; // use the selector's value +} + +// Filter sidebar nav items by asset type +function filterSidebarByAsset(assetType) { + document.querySelectorAll('.nav-item[data-asset]').forEach(btn => { + const allowed = (btn.dataset.asset || '').split(','); + if (allowed.includes(assetType)) { + btn.style.display = ''; + } else { + btn.style.display = 'none'; + // If this tab was active, switch to overview + if (btn.classList.contains('active')) { + btn.classList.remove('active'); + document.querySelectorAll('.tab-page').forEach(p => p.classList.remove('active')); + const overviewBtn = document.querySelector('.nav-item[data-tab="overview"]'); + if (overviewBtn) overviewBtn.classList.add('active'); + const overviewPage = document.getElementById('page-overview'); + if (overviewPage) overviewPage.classList.add('active'); + } + } + }); +} + +// ══════════════ CURRENCY STATE ══════════════ +let currencyRates = { USD: 1.0, GBP: 0.79, INR: 83.5 }; +let currentCurrency = "GBP"; // default +const currencySymbols = { USD: "$", GBP: "£", INR: "₹" }; + +// Fetch live rates on load +(async function fetchCurrencyRates() { + try { + const res = await fetch("/api/currency", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({}) }); + const data = await res.json(); + currencyRates = data; + } catch (e) { console.warn("Using fallback currency rates"); } +})(); + +function convertCurrency(usdValue) { + if (usdValue === null || usdValue === undefined || usdValue === "N/A") return "N/A"; + const n = parseFloat(usdValue); + if (isNaN(n)) return String(usdValue); + return n * currencyRates[currentCurrency]; +} +function fmtCurrency(usdValue) { + const c = convertCurrency(usdValue); + if (c === "N/A") return "N/A"; + const sym = currencySymbols[currentCurrency]; + return sym + Number(c).toLocaleString(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 2 }); +} + +// ══════════════ WELCOME PAGE ══════════════ +document.getElementById("letsBeginBtn").addEventListener("click", () => { + document.getElementById("welcomePage").style.display = "none"; + document.getElementById("mainApp").style.display = "flex"; +}); + +// ══════════════ SIDEBAR NAVIGATION ══════════════ +document.querySelectorAll(".nav-item[data-tab]").forEach(btn => { + btn.addEventListener("click", () => { + document.querySelectorAll(".nav-item[data-tab]").forEach(b => b.classList.remove("active")); + document.querySelectorAll(".tab-page").forEach(p => p.classList.remove("active")); + btn.classList.add("active"); + document.getElementById("page-" + btn.dataset.tab).classList.add("active"); + // Close mobile sidebar + const sidebar = document.getElementById('sidebar'); + if (sidebar && window.innerWidth <= 1024) sidebar.classList.remove('open'); + }); +}); + +// ══════════════ SEARCH / SUGGEST ══════════════ +const tickerInput = document.getElementById("tickerInput"); +const dropdown = document.getElementById("dropdown"); +const timeframeSelect = document.getElementById("timeframeSelect"); +const periodSelect = document.getElementById("periodSelect"); +const currencySelect = document.getElementById("currencySelect"); +const customDates = document.getElementById("customDates"); +let debounceTimer; + +tickerInput.addEventListener("input", () => { + clearTimeout(debounceTimer); + debounceTimer = setTimeout(async () => { + const q = tickerInput.value.trim(); + if (q.length < 1) { dropdown.style.display = "none"; return; } + try { + const res = await fetch(`/api/suggest?q=${encodeURIComponent(q)}&asset_type=${encodeURIComponent(currentAssetType)}`); + const data = await res.json(); + if (!data.length) { dropdown.style.display = "none"; return; } + dropdown.innerHTML = data.map(d => + `No significant patterns detected in the last 7 trading days.
'; + return; + } + el.innerHTML = data.patterns.map(p => ` +${p.description}
+No recent insider transactions.
'; return; } + const rows = data.transactions.slice(0, 10).map(t => { + const isBuy = (t.change > 0 || t.transaction_type === "P - Purchase"); + return `| Date | Insider | Type | Shares |
|---|
No recent news.
'; return; } + + const avgCls = newsData.overall_label === "Positive" ? "positive" : newsData.overall_label === "Negative" ? "negative" : "neutral"; + el.innerHTML = `${data.error}
`; return; } + el.innerHTML = Object.entries(data.indicators || {}).map(([k, v]) => { + const statusBadge = v.status ? `${v.status}` : ""; + const explanation = v.explanation ? `No earnings data available.
'; return; } + const rows = data.earnings.map(e => { + const surprise = parseFloat(e.surprise_pct); + const cls = surprise > 0 ? "surprise-positive" : surprise < 0 ? "surprise-negative" : ""; + return `| Period | Estimate | Actual | Surprise | Surprise % |
|---|
${mcRawData.error}
`; return; } + + analysisData._mc = mcRawData; + renderMCChart(mcRawData); + renderMCStats(mcRawData); + setupMCBandToggles(); + } catch (e) { console.error("Monte Carlo:", e); } +} + +function renderMCChart(data) { + const container = document.getElementById("monteCarloChart"); + container.innerHTML = ""; + mcBandSeries = {}; + + const cc = getChartColors(); + mcChartInstance = LightweightCharts.createChart(container, { + layout: { background: { color: cc.bg }, textColor: cc.text }, + grid: { vertLines: { color: cc.grid }, horzLines: { color: cc.grid } }, + timeScale: { borderColor: cc.border }, rightPriceScale: { borderColor: cc.border }, + }); + + const today = new Date(); + function dayStr(offset) { const d = new Date(today); d.setDate(d.getDate() + offset); return d.toISOString().slice(0, 10); } + + const bands = [ + { key: "p90", color: "rgba(34,197,94,0.4)", label: "P90 (Bull)", lineWidth: 1 }, + { key: "p75", color: "rgba(34,197,94,0.6)", label: "P75", lineWidth: 1 }, + { key: "p50", color: "rgba(99,102,241,0.9)", label: "P50 (Median)", lineWidth: 2 }, + { key: "p25", color: "rgba(239,68,68,0.6)", label: "P25", lineWidth: 1 }, + { key: "p10", color: "rgba(239,68,68,0.4)", label: "P10 (Bear)", lineWidth: 1 }, + ]; + + bands.forEach(b => { + if (data.percentiles && data.percentiles[b.key]) { + const series = mcChartInstance.addLineSeries({ color: b.color, lineWidth: b.lineWidth, priceLineVisible: false, lastValueVisible: false }); + series.setData(data.percentiles[b.key].map((v, i) => ({ time: dayStr(i + 1), value: v }))); + mcBandSeries[b.key] = series; + } + }); + mcChartInstance.timeScale().fitContent(); +} + +function renderMCStats(data) { + const s = data.final_stats || {}; + document.getElementById("monteCarloStats").innerHTML = [ + { label: "Start Price", value: fmtCurrency(data.start_price) }, + { label: "Median (P50)", value: fmtCurrency(s.median) }, + { label: "Mean", value: fmtCurrency(s.mean) }, + { label: "P10 (Bearish)", value: fmtCurrency(s.p10), cls: "negative" }, + { label: "P90 (Bullish)", value: fmtCurrency(s.p90), cls: "positive" }, + ].map(i => `${data.error}
`; return; } + analysisData._dcf = data; + renderDCFData(data); + } catch (e) { console.error("DCF:", e); } +} +function renderDCFData(data) { + const el = document.getElementById("dcfValuation"); + const cls = data.verdict === "UNDERVALUED" ? "undervalued" : data.verdict === "OVERVALUED" ? "overvalued" : "fair"; + const arrow = data.upside_pct > 0 ? "↑" : "↓"; + el.innerHTML = ` +${data.error}
`; return; } + + const cls = data.zone === "Safe Zone" ? "safe" : data.zone === "Grey Zone" ? "grey" : "distress"; + el.innerHTML = ` +${data.error}
`; return; } + analysisData._dividends = data; + renderDividendData(data); + } catch (e) { console.error("Dividends:", e); } +} +function renderDividendData(data) { + const el = document.getElementById("dividendAnalysis"); + const yld = data.dividend_yield && data.dividend_yield !== "N/A" ? (data.dividend_yield * 100).toFixed(2) + "%" : "N/A"; + const payout = data.payout_ratio && data.payout_ratio !== "N/A" ? (data.payout_ratio * 100).toFixed(1) + "%" : "N/A"; + el.innerHTML = ` +${data.error}
`; return; } + + const tickers = data.tickers || []; + const m = data.matrix || {}; + let html = '| '; + tickers.forEach(t => html += ` | ${t} | `); + html += '
|---|---|
| ${row} | `; + tickers.forEach(col => { + const val = (m[col] && m[col][row] !== undefined) ? m[col][row] : 0; + const bg = corrColor(val); + html += `${val.toFixed(2)} | `; + }); + html += '
${heatmapData.error}
`; return; } + + const controls = document.getElementById("heatmapControls"); + controls.innerHTML = (heatmapData.timeframes || []).map((tf, i) => + `` + ).join(""); + controls.querySelectorAll(".heatmap-btn").forEach(btn => { + btn.addEventListener("click", () => { + controls.querySelectorAll(".heatmap-btn").forEach(b => b.classList.remove("active")); + btn.classList.add("active"); + renderHeatmap(btn.dataset.tf); + }); + }); + if (heatmapData.timeframes && heatmapData.timeframes.length) renderHeatmap(heatmapData.timeframes[0]); + } catch (e) { console.error("Heatmap:", e); } +} +function renderHeatmap(tf) { + const sectors = heatmapData.sectors[tf] || {}; + const entries = Object.entries(sectors); + if (!entries.length) { + document.getElementById("sectorHeatmap").innerHTML = 'Sector data temporarily unavailable (API rate limit).
'; + return; + } + document.getElementById("sectorHeatmap").innerHTML = entries.map(([name, pct]) => { + const bg = pct >= 0 ? `rgba(34,197,94,${Math.min(Math.abs(pct) / 5, 0.7) + 0.1})` : `rgba(239,68,68,${Math.min(Math.abs(pct) / 5, 0.7) + 0.1})`; + const color = Math.abs(pct) > 1 ? "white" : "var(--text-secondary)"; + return `No competitor data available. ${data.sector || "unknown"}
`; + if (data.error) el.innerHTML += `${data.error}
`; + return; + } + + let html = `Sector: ${data.sector}
`; + html += '| Ticker | Market Cap | P/E | EV/EBITDA | ROE | Net Margin | Gross Margin |
|---|---|---|---|---|---|---|
| ${p.ticker} | +${fmtCurrencyLarge(p.market_cap)} | +${fmtVal(p.pe_ratio)} | +${fmtVal(p.ev_ebitda)} | +${fmtPctVal(p.roe)} | +${fmtPctVal(p.net_margin)} | +${fmtPctVal(p.gross_margin)} | +
${data.error || "No response"}
`; + } catch (e) { document.getElementById("aiContent").innerHTML = 'AI unavailable.
'; } +} + +document.getElementById("askAi").addEventListener("click", async () => { + const q = document.getElementById("aiInput").value.trim(); + if (!q || !currentTicker) return; + document.getElementById("aiContent").innerHTML = 'Thinking...
'; + try { + const payload = { ticker: currentTicker, analysis: analysisData || {}, news: newsData || {}, question: q }; + const res = await fetch("/api/ai", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(payload) }); + const data = await res.json(); + document.getElementById("aiContent").innerHTML = data.overview ? formatAI(data.overview) : `${data.error || "No response"}
`; + } catch (e) { document.getElementById("aiContent").innerHTML = 'Error.
'; } +}); + +function formatAI(text) { + return text.replace(/\*\*(.*?)\*\*/g, '$1') + .replace(/^### (.*?)$/gm, '${optionsChainData.error}
`; + return; + } + // Populate expiry selector + const sel = document.getElementById("optExpiry"); + sel.innerHTML = (optionsChainData.expirations || []).map(e => ``).join(""); + // Set payoff strike default to current price + if (optionsChainData.current_price) { + document.getElementById("payoffStrike").value = Math.round(optionsChainData.current_price); + } + renderOptionsChain(); + } catch (e) { + document.getElementById("optionsChainTable").innerHTML = `Failed to load options chain
`; + } +} + +function renderOptionsChain() { + if (!optionsChainData) return; + const data = showCallsOrPuts === 'calls' ? optionsChainData.calls : optionsChainData.puts; + if (!data || !data.length) { + document.getElementById("optionsChainTable").innerHTML = `No data available
`; + return; + } + const curPrice = optionsChainData.current_price || 0; + let html = `| Strike | Last | Bid | Ask | Volume | OI | IV | +Delta | Gamma | Theta | Vega | +
|---|---|---|---|---|---|---|---|---|---|---|
| ${opt.strike} | +${(opt.lastPrice || 0).toFixed(2)} | +${(opt.bid || 0).toFixed(2)} | +${(opt.ask || 0).toFixed(2)} | +${(opt.volume || 0).toLocaleString()} | +${(opt.openInterest || 0).toLocaleString()} | +${((opt.impliedVolatility || 0) * 100).toFixed(1)}% | +${(opt.delta || 0).toFixed(4)} | +${(opt.gamma || 0).toFixed(6)} | +${(opt.theta || 0).toFixed(4)} | +${(opt.vega || 0).toFixed(4)} | +
No open positions. Use the Trade Terminal to buy your first asset.
'; + return; + } + let html = `| Ticker | Side | Type | Shares | Avg Cost | Current | Value | P&L | P&L % | Alloc % | +
|---|---|---|---|---|---|---|---|---|---|
| ${p.ticker} | +${p.side} | +${p.asset_type} | +${p.shares} | +$${p.avg_cost.toFixed(2)} | +$${p.current_price.toFixed(2)} | +$${p.market_value.toLocaleString(undefined, { minimumFractionDigits: 2 })} | +${p.pnl >= 0 ? '+' : ''}$${p.pnl.toFixed(2)} | +${p.pnl_pct >= 0 ? '+' : ''}${p.pnl_pct.toFixed(2)}% | +${p.allocation_pct.toFixed(1)}% | +
No pending orders.
'; + return; + } + let html = `| ID | Type | Side | Ticker | Shares | Target Price | Created | Action | +
|---|---|---|---|---|---|---|---|
| #${o.id} | +${o.order_type} | +${o.side} | +${o.ticker} | +${o.shares} | +$${o.target_price.toFixed(2)} | +${o.created_at} | ++ |
No transactions yet.
'; + return; + } + let html = `| Date | Action | Side | Ticker | Shares | Price | Slip | Comm | Total | P&L | +
|---|---|---|---|---|---|---|---|---|---|
| ${t.timestamp} | +${t.action} | +${t.side || 'LONG'} | +${t.ticker} | +${t.shares} | +$${t.price.toFixed(4)} | +$${(t.slippage || 0).toFixed(4)} | +$${(t.commission || 0).toFixed(2)} | +$${t.total.toFixed(2)} | +${pnlStr} | +
Multi-Asset Intelligence Platform
+
+ Stocks · Futures · Options · Currencies
+ AI-powered analysis, live charts, DCF valuation, Monte Carlo forecasting, and institutional-grade
+ analytics.
+
Real-time candlestick charts with EMA, SMA, Bollinger, and drawing tools.
+Intrinsic value via discounted cash flows, WACC, and terminal growth.
+LLM-powered research synthesizing technicals, fundamentals, and sentiment.
+1,000-path probabilistic simulation with percentile bands.
+Live Wall Street ratings, price targets, and insider activity.
+FRED indicators, sector heatmaps, correlation and peer analysis.
+