{ "HOW_TO_USE_THIS_FILE": { "purpose": "Complete Ghost Node project archive for seamless continuation on a new Claude account", "step_1": "Start a new conversation on your new Claude account", "step_2": "Upload this JSON file and the 4 source code files (worker.py, models.py, database.py, dashboard.html) separately", "step_3": "Paste this exact prompt to Claude:\n\n---\nYou are continuing the Ghost Node auction sniper project. I have uploaded the complete project archive (ghost_node_ultimate_export.json) and all current source files.\n\nPlease do the following:\n1. Read the 'project_state_for_new_claude' section in the JSON — this is your briefing\n2. Read the 'what_to_build_next' section — this is the immediate priority queue\n3. Confirm you understand the full architecture by summarising the 5 threads, the two navigation modes, and the 3 alert channels\n4. Tell me which feature from 'what_to_build_next' you recommend we tackle first and why\n\nAll source files are already attached. Do not re-read them from the JSON — use the uploaded files directly.\n---", "step_4": "The new Claude will be fully up to speed and ready to continue building immediately", "tip": "If uploading all files at once, always name this file first so Claude processes the briefing before the code" }, "project_state_for_new_claude": { "project_name": "Ghost Node — International Auction Sniper", "tagline": "Multi-threaded automated lot monitoring across global auction sites with real-time multi-channel alerts, human-mimicry anti-bot engine, and self-healing site health system", "language": "Python 3.10+", "platform": "Windows 11 (primary), Linux compatible", "current_version": "v2.1 — as of 2026-03-10", "owner_context": "User runs this on a Windows 11 machine with RTX 3050. Primary auction sites are eBay UK, eBay US, ShopGoodwill, HiBid. Uses Telegram for alerts. Goal is international scale across multiple auction platforms simultaneously.", "thread_architecture": { "Thread_A_FastAPI": "Port 8000 — cyberpunk dashboard UI + 26 REST API endpoints", "Thread_B_Scraper": "nuclear_engine() — async Playwright loop, one browser context per site, all keywords per site per cycle", "Thread_C_Telegram_C2": "telegram_c2_loop() — polls getUpdates every 3s, handles /status /pause /resume /listings", "Thread_D_Price_Refresh": "Isolated asyncio loop — revisits each saved lot URL every 5min, updates price + time_left + price_updated_at", "Thread_E_Closing_Alert": "closing_alert_loop() — polls DB every 60s, fires one alert per lot when time_left_mins drops below user threshold" }, "navigation_modes": { "Mode_A_Direct": "url_template contains {keyword} → substitute + navigate directly. Example: ebay.co.uk/sch/i.html?_nkw={keyword}", "Mode_B_Homepage": "No {keyword} in url_template → navigate to homepage, auto-discover search box via ARIA/placeholder/label/ID heuristics, type keyword as human" }, "humanize_levels": { "raw": "Instant fill(), no mouse, no scroll — fastest, zero protection", "low": "Single mouse movement + 1-2 scroll steps", "medium": "Bezier mouse curve + 2-4 scroll steps with read pauses + char-by-char typing at variable WPM", "heavy": "Full: bezier mouse with micro-tremors + 4-7 scroll steps + char-by-char with 12% typo+backspace rate + homepage pre-visit + all stealth patches" }, "stealth_patches": "30+ navigator/window patches: webdriver hidden, 5 real plugin objects, languages, platform, vendor, hardwareConcurrency, deviceMemory, WebGL renderer (no SwiftShader), canvas fingerprint noise, audio fingerprint noise, battery API (realistic charge), network info (4G effectiveType), media devices (audio outputs), iframe contentWindow patching, chrome.runtime, chrome.loadTimes(), chrome.csi(), Permissions API, screen dimensions, performance timing", "database_schema": { "Listing": "id, title, price (Float), currency (String10), price_raw (String100), time_left (String60), time_left_mins (Float — null if no time data captured), price_updated_at (DateTime), link (unique), score (Int), keyword (String), site_name (String), timestamp (DateTime), closing_alert_sent (Boolean)", "Keyword": "id, term (unique String200), weight (Int — score multiplier)", "Config": "id, key (unique String100), value (Text)", "TargetSite": "id, name, url_template, search_selector, enabled (0/1), max_pages (Int), last_error (Text), error_count (Int), consecutive_failures (Int), last_success_at (DateTime), cooldown_until (DateTime), requires_login (Boolean), login_url (String), login_check_selector (String), login_enabled (Boolean)" }, "all_config_keys": { "telegram_token": "Bot token from @BotFather", "telegram_chat_id": "Your Telegram chat/group ID", "timer": "Seconds between full scrape cycles (default 120)", "browser_choice": "auto | edge | yandex | chrome | brave | chromium", "incognito_mode": "true | false — ephemeral vs persistent browser profile", "show_browser": "true | false — headless vs visible window", "delay_launch": "Seconds to wait after browser opens, before first navigation", "delay_site_open": "Seconds to wait once per site after homepage loads", "delay_post_search": "Seconds to wait after results page loads, before extracting", "delay_page_hold": "Seconds to hold results page, re-scraping continuously", "humanize_level": "raw | low | medium | heavy", "captcha_solver": "none | 2captcha | capsolver", "captcha_api_key": "API key for chosen CAPTCHA service", "alert_channels": "Comma-separated: telegram,discord,email", "discord_webhook": "Full Discord webhook URL", "email_smtp_host": "SMTP server hostname", "email_smtp_port": "SMTP port (default 587)", "email_smtp_user": "SMTP login username", "email_smtp_pass": "SMTP login password (use app password for Gmail)", "email_from": "From address", "email_to": "Destination address for alerts", "closing_alert_enabled": "true | false", "closing_alert_mins": "Integer minutes threshold for closing-soon alerts", "db_url": "Leave empty for SQLite. Set to postgresql://... for Postgres", "site_auto_disable_after": "Consecutive failures before 30min cooldown (0 = never)" }, "all_api_endpoints": { "GET /": "Serves dashboard.html", "GET /api/stats": "Engine status, scanned count, alert count, uptime", "GET /api/listings?limit=N": "All listings, ordered by timestamp desc", "DELETE /api/listings/{id}": "Delete one listing", "DELETE /api/listings": "Clear all listings", "GET /api/listings/countdown-sync": "Lightweight: id + time_left_mins + timestamps only", "GET /api/listings/refresh-status": "Last price-refresh timestamp for Thread D polling", "GET /api/keywords": "All keywords", "POST /api/keywords": "Add keyword {term, weight}", "DELETE /api/keywords/{id}": "Delete keyword", "GET /api/sites": "All target sites with full health data", "POST /api/sites": "Add site {name, url_template, search_selector, max_pages, requires_login, login_url, login_check_selector, login_enabled}", "PUT /api/sites/{id}": "Update any site field", "DELETE /api/sites/{id}": "Delete site", "POST /api/sites/{id}/login": "Open visible browser to login_url for manual session save", "GET /api/config": "All config key-value pairs", "POST /api/config": "Save config dict (upsert)", "POST /api/engine/pause": "Set engine_status = Paused", "POST /api/engine/resume": "Set engine_status = Running", "POST /api/engine/restart": "Kill + relaunch scraper thread", "POST /api/engine/kill": "os._exit(0) — hard shutdown", "POST /api/telegram/test": "Send test message, return full Telegram response", "GET /api/export/csv": "Download all listings as CSV", "GET /api/export/json": "Download all listings as JSON", "GET /api/export/html": "Download cyberpunk HTML report", "GET /api/debug/db": "Return raw DB contents for diagnostics", "GET /api/backup/download": "Download timestamped .db backup file (SQLite only)", "POST /api/backup/restore": "Upload .db file to restore — auto-backs up current DB first, then restarts" }, "scoring_system": { "positive_signals": { "GB": 10, "RAM": 10, "Unlocked": 10, "SSD": 10, "RTX": 10, "GPU": 10, "S10": 10, "NVMe": 8, "OLED": 8, "5G": 6, "New": 5, "Sealed": 5 }, "negative_signals": { "Cover": -10, "Case": -10, "Sleeve": -10, "Box Only": -10, "Broken": -10, "For Parts": -10, "Cracked": -8, "Damaged": -8, "Read": -5, "Faulty": -10 }, "rule": "score = sum(positive_signals) + sum(negative_signals) * keyword.weight. Only score >= 0 triggers alerts. Negative score = accessory spam = silently ignored.", "note": "Currently HARDCODED in models.py. N6 (editable scoring) would move these to a DB table." }, "known_bugs_and_quirks": { "SQLite_WAL_locking": "db.flush() called before db.commit() everywhere to push INSERTs/UPDATEs to WAL before scraper thread reads them", "enabled_coercion": "TargetSite.enabled must be stored as Python int (1/0) not bool — SQLite filter TargetSite.enabled==1 breaks on True", "background_browser": "Three layers prevent Chromium throttling when minimised: launch flags (--disable-renderer-backgrounding etc.), JS init script (visibilityState→visible, hasFocus→true), no bring_to_front()", "JS_f_string_apostrophes": "All JS strings in _humanizeDescs and similar must use backtick literals — apostrophes inside regular quotes break the JS parser silently", "HiBid_card_anchored_extraction": "HiBid renders price/time OUTSIDE the card container in sibling DOM nodes. Two-stage extraction: Stage 1 queries inside cards, Stage 2 walks up DOM ancestors to find siblings containing price data. JS_EXTRACT is the page.evaluate() string that does this.", "page_evaluate_locator": "_discover_search_input() returns a Playwright Locator — must call .element_handle() before passing to page.evaluate(). Passing the Locator directly causes 'object could not be serialized' error.", "networkidle_timeout": "Heavy JS sites (HiBid, BidSpotter) often time out on wait_until='networkidle'. Fallback to 'domcontentloaded' is always in place.", "closing_alert_stale_lots": "Closing alert only fires on lots captured within last 7 days (timedelta(days=7) guard) to prevent alerting on old archived data" }, "browser_resolution_priority": "auto mode tries: 1) Edge (C:/Program Files (x86)/Microsoft/Edge/Application/msedge.exe), 2) Yandex (C:/Users/*/AppData/Local/Yandex/YandexBrowser/Application/browser.exe), 3) Chrome (C:/Program Files/Google/Chrome/Application/chrome.exe), 4) Brave, 5) Playwright bundled Chromium", "file_roles": { "database.py": "SQLAlchemy engine only. Reads DATABASE_URL env var. SQLite default with WAL pragmas. PostgreSQL with connection pooling.", "models.py": "All ORM models. POSITIVE/NEGATIVE_SIGNALS. calculate_attribute_score(). SEED_KEYWORDS/SEED_SITES/SEED_CONFIG. _migrate_schema() — safe idempotent column additions for both SQLite and PostgreSQL.", "worker.py": "EVERYTHING ELSE. All 5 threads. All 26 API endpoints. scrape_site(). nuclear_engine(). All stealth/humanize code. CAPTCHA solvers. Block detection. Health tracking. Export endpoints. Login trigger.", "dashboard.html": "Single-file 2210-line cyberpunk terminal UI. 5 tabs: Dashboard (stats + live countdown), Listings (sortable table + urgency colours), Keywords, Target Sites (health column + login button + page count badge), Settings (all controls)." }, "features_completed": [ "N2 — CAPTCHA solver (2captcha + CapSolver, wired into navigation)", "N3 — Block/429 detection + 30min site cooldown + health tracking", "N5 — Pagination (max_pages per site, universal next-page detection)", "N9 — Thread E closing-soon alerts (user-controlled threshold + toggle)", "N10 — Multi-channel alerts (Telegram + Discord + Email, simultaneous or single)", "N12 — PostgreSQL support via DATABASE_URL env var", "N13 — Site health dashboard column (cooldown, error count, last error preview)", "N14 — Login session support (persistent profile, 🔑 Login button, session check)", "N15 — Export (CSV, JSON, HTML report) + Database Backup & Restore", "Anti-bot humanisation — 4 levels, 30+ stealth patches, bezier mouse, typo simulation", "Live countdown — 1s ticker, 60s sync, Thread D price refresh", "Sortable listings table — price, time left, score, timestamp columns", "Browser selector — Edge / Yandex / Chrome / Brave / Chromium auto-detect", "Telegram C2 — /status /pause /resume /listings commands", "setup.bat — one-click Windows setup script" ] }, "what_to_build_next": { "priority_1_RECOMMENDED": { "feature": "N1 — Residential Proxy Rotation", "why": "Without this, every request comes from your home IP. Any serious auction site (BidSpotter, HiBid, Invaluable) will ban it within hours of real use. This is the single biggest blocker to international scale.", "implementation": { "config_keys_to_add": [ "proxy_enabled (true/false)", "proxy_url (e.g. http://user:pass@proxy.oxylabs.io:7777)", "proxy_per_site (true/false — rotate per site or per cycle)", "proxy_provider (oxylabs/brightdata/smartproxy/custom)" ], "code_change": "In nuclear_engine(), after resolving browser, pass proxy to new_context(): await browser.new_context(proxy={'server': proxy_url, 'username': ..., 'password': ...})", "dashboard_change": "New Settings section // PROXY ROTATION with toggle, URL field, and test button", "services": "Oxylabs Residential (best), Bright Data, Smartproxy, Webshare (budget option)", "cost": "~$15-50/month for residential proxies" } }, "priority_2": { "feature": "N4 — Price Normalisation to USD", "why": "Currently GBP/USD/EUR listings can't be compared or sorted cross-site. A £200 item and a $200 item are just strings.", "implementation": { "api": "https://api.frankfurter.app/latest?from=GBP&to=USD — free, no API key", "new_column": "price_usd Float on Listing table", "caching": "Cache exchange rates in Config table (exchange_rates_json, exchange_rates_updated_at) — refresh daily", "dashboard_change": "Add USD column to listings table, sort by it by default" } }, "priority_3": { "feature": "N6 — Editable Scoring Rules (UI)", "why": "Currently POSITIVE/NEGATIVE_SIGNALS are hardcoded in models.py. You can't add 'Lot' or 'Estate' signals for international antique auctions without editing Python code.", "implementation": { "new_model": "class ScoringRule(Base): id, signal (String), delta (Int), category (positive/negative), notes (Text)", "migration": "Seed from current POSITIVE/NEGATIVE_SIGNALS lists", "api_endpoints": "GET/POST/PUT/DELETE /api/scoring-rules", "dashboard_tab": "New tab or section in Keywords tab — table of signals with +/- delta editing", "calculate_attribute_score_change": "Query DB instead of using hardcoded lists" } }, "priority_4": { "feature": "N7 — Price Filters per Keyword", "why": "Searching 'RTX 4090' returns $5 broken listings as score >= 0. You want to only alert on items between $500-$1200.", "implementation": { "model_change": "Add min_price (Float, nullable), max_price (Float, nullable) to Keyword", "scrape_site_change": "After extracting price: if keyword.min_price and amount < keyword.min_price: continue. Same for max_price.", "dashboard_change": "Add min/max price columns to Keywords tab table with inline editing" } }, "priority_5": { "feature": "N8 — Scheduled Scraping Windows", "why": "Auction sites have predictable closing times. Running full scrapes at 3am is wasteful. Running every 2min in the last hour of lots is critical.", "implementation": { "config_keys": [ "scrape_window_enabled (true/false)", "scrape_start_hour (0-23)", "scrape_end_hour (0-23)", "boost_enabled (true/false)", "boost_final_mins (60 = last hour)", "boost_interval_secs (120 = every 2min)" ], "nuclear_engine_change": "Before each cycle: check current hour against window. If in boost window (lots closing within boost_final_mins), override timer with boost_interval_secs." } }, "priority_6": { "feature": "N11 — Fuzzy Cross-site Deduplication", "why": "Same physical item on eBay UK + eBay US + ShopGoodwill fires 3 alerts. Noise kills the signal.", "implementation": { "library": "difflib.SequenceMatcher (standard library, no install needed)", "logic": "In scrape_site(), before saving: query last 100 listings, check SequenceMatcher(None, title.lower(), existing.title.lower()).ratio() > 0.85. If duplicate found: update existing record's link list instead of creating new row.", "db_change": "Add alt_links (Text/JSON) to Listing for storing secondary site URLs of same item" } } }, "debugging_reference": { "common_errors_and_fixes": { "Telegram_400_chat_not_found": "Bot has no open session with user. Fix: open Telegram, find the bot, press START or send /start. For groups: chat_id must start with minus sign e.g. -100123456789", "Telegram_401_Unauthorized": "Token is wrong. Check telegram_token in Settings starts with a number:letter format like '7123456789:AAF...'", "networkidle_timeout": "Heavy JS sites timeout on networkidle. Already handled with domcontentloaded fallback. If still failing, increase timeout from 60_000 to 90_000ms.", "no_listings_extracted": "Check browser is not minimised (use show_browser=true to diagnose). Check site has not changed DOM structure. Run GET /api/debug/db to confirm site is enabled and not in cooldown.", "SyntaxError_in_worker": "Almost always an f-string with apostrophe inside regular quotes. Fix: wrap in backtick template literals or use escaped quotes.", "search_box_not_found_Mode_B": "_discover_search_input() tries ARIA role='searchbox', then input[type=search], then placeholder text match, then label association, then ID/class heuristics. Add the site's actual selector to search_selector field as fallback.", "SQLite_database_locked": "Two threads writing simultaneously. Already mitigated with db.flush() before db.commit() and WAL mode. If persisting, switch to PostgreSQL.", "price_shows_as_null": "_extract_price_and_currency() couldn't find a numeric pattern. Check price_text in logs. Add site to JS_EXTRACT selectors if needed.", "closing_alert_not_firing": "Check: 1) closing_alert_enabled='true' in config. 2) Lot has time_left_mins set (not null). 3) Lot timestamp within last 7 days. 4) closing_alert_sent=False. Check via GET /api/debug/db.", "login_check_always_false": "login_check_selector not matching. Use show_browser=true, navigate to site logged-in, inspect the element that only appears when logged in (e.g. username display, logout button). Copy its CSS selector." }, "useful_diagnostic_endpoints": { "GET /api/debug/db": "Returns all Config values (token masked), all Sites with health data, all Keywords, listing count", "GET /api/stats": "Engine status, total scanned, total alerts, last cycle time, uptime seconds", "POST /api/telegram/test": "Sends test message, returns full Telegram JSON response body for diagnosing 401/400 errors", "GET /api/listings?limit=5": "Check most recent 5 captured listings to verify extraction is working", "GET /api/listings/countdown-sync": "Verify time_left_mins is being stored correctly" } }, "international_expansion_guide": { "adding_new_auction_sites": { "step_1": "Identify if site has stable search URL (Mode A) or requires homepage search (Mode B)", "step_2_mode_a": "Mode A: find the search results URL pattern, replace search term with {keyword}. Example: invaluable.com/buy/?keywords={keyword}&page=1", "step_2_mode_b": "Mode B: use the homepage URL as url_template, inspect DOM for search input selector", "step_3": "In dashboard → Target Sites → Register New Site. Set Pages to 2-5 for better coverage.", "step_4": "Test with show_browser=true and humanize_level=raw first to confirm scraping works, then switch to heavy for production", "tested_sites": { "eBay UK": "DIRECT — https://www.ebay.co.uk/sch/i.html?_nkw={keyword}&_sop=10 | selector: #gh-ac", "eBay US": "DIRECT — https://www.ebay.com/sch/i.html?_nkw={keyword}&_sop=10 | selector: #gh-ac", "ShopGoodwill": "HOMEPAGE — https://shopgoodwill.com/home | selector: input#st", "HiBid": "HOMEPAGE — https://hibid.com/ | selector: auto-discover. NOTE: card-anchored extraction required — price/time outside card container" }, "sites_to_add_next": { "Invaluable": "DIRECT — https://www.invaluable.com/buy/?keywords={keyword} | major international platform", "BidSpotter": "DIRECT — https://www.bidspotter.com/en-us/search?q={keyword} | industrial/estate auctions", "Proxibid": "DIRECT — https://www.proxibid.com/asp/Search.asp?searchStr={keyword} | North American auctions", "i-bidder": "DIRECT — https://www.i-bidder.com/en-gb/search/{keyword} | UK/Europe", "Catawiki": "DIRECT — https://www.catawiki.com/en/search?q={keyword} | European specialist auctions", "Copart": "HOMEPAGE — https://www.copart.com | vehicles — requires login", "IAAI": "HOMEPAGE — https://www.iaai.com | salvage vehicles — requires login", "LiveAuctioneers": "DIRECT — https://www.liveauctioneers.com/search/?keyword={keyword} | art/antiques/collectibles", "Lot-tissimo": "DIRECT — https://www.lot-tissimo.com/en/search/{keyword} | Europe", "Auctionet": "DIRECT — https://auctionet.com/en/search?q={keyword} | Scandinavia", "Drouot": "HOMEPAGE — https://www.drouot.com | France — major European platform", "Bonhams": "DIRECT — https://www.bonhams.com/search/?q={keyword} | high-value lots" } }, "currency_codes_supported": "USD, GBP, EUR, CAD, AUD, JPY, CHF, SEK, NOK, DKK, NZD, HKD, SGD, MXN, BRL, INR, KRW, CNY, ZAR, AED — 20 currencies detected from price strings", "timezone_note": "Browser context uses Europe/London + en-GB locale by default. For US-focused scraping change to America/New_York + en-US in browser.new_context() call in nuclear_engine()" }, "prompt_library": { "description": "Copy-paste prompts for the most common Ghost Node tasks. Each includes the context Claude needs — no re-explaining required.", "GETTING_STARTED": { "label": "Start a new session (use this every time you open a new Claude conversation)", "prompt": "I am continuing the Ghost Node auction sniper project. \nThe complete project archive is in ghost_node_ultimate_export.json (uploaded).\nThe current source files are also uploaded: worker.py, models.py, database.py, dashboard.html.\n\nPlease:\n1. Read the 'project_state_for_new_claude' section — this is your full briefing\n2. Read 'what_to_build_next' for the current priority queue\n3. Confirm you're ready by summarising the 5-thread architecture in one sentence each\n4. Tell me which feature you recommend we tackle first\n\nUse the uploaded source files directly — do not re-read them from the JSON." }, "ADD_NEW_AUCTION_SITE": { "label": "Add and configure a new auction site", "prompt": "I want to add a new auction site to Ghost Node. The site is: [SITE NAME AND URL]\n\nPlease:\n1. Determine if it uses Mode A (direct URL with {keyword}) or Mode B (homepage search)\n2. Find the correct search URL template or homepage URL\n3. Find the search box CSS selector if Mode B\n4. Advise on max_pages setting for this site\n5. Note any special extraction challenges (unusual DOM structure, login required, heavy JS, CAPTCHA)\n6. Give me the exact values to enter in the dashboard → Target Sites → Register New Site form\n\nTest this by navigating to the site manually first if needed." }, "DEBUG_ZERO_RESULTS": { "label": "Scraper is running but finding 0 listings", "prompt": "Ghost Node is running but returning 0 new listings. The logs show the engine is cycling but nothing is being saved.\n\nMy setup:\n- Site: [SITE NAME]\n- Keyword: [KEYWORD]\n- Navigation mode: [Mode A direct URL / Mode B homepage]\n- Browser visible: [yes/no]\n- Humanize level: [raw/low/medium/heavy]\n\nRelevant log output:\n[PASTE LOG LINES HERE]\n\nPlease diagnose systematically:\n1. Check if the issue is navigation (can't reach page), extraction (page loads but no items found), or scoring (items found but filtered out)\n2. Check the listing_selectors list in scrape_site() — do any match this site's DOM?\n3. Check if score >= 0 filter is filtering everything out\n4. Suggest the most likely fix and implement it" }, "DEBUG_TELEGRAM_NOT_WORKING": { "label": "Telegram alerts have stopped / never worked", "prompt": "My Telegram alerts are not working in Ghost Node.\n\nCurrent situation: [describe — e.g. \"no alerts received\", \"error in logs\", \"test button fails\"]\nLog output from the terminal: [PASTE RELEVANT LINES]\nResult from POST /api/telegram/test (check in browser or curl): [PASTE JSON RESPONSE]\nResult from GET /api/debug/db → config section: [PASTE telegram_token and telegram_chat_id values]\n\nPlease diagnose: is this a token issue, chat_id issue, network issue, or code issue?\nThen fix it." }, "IMPLEMENT_PROXY_ROTATION": { "label": "Add residential proxy rotation (N1 — highest priority unbuilt feature)", "prompt": "Implement N1 — Proxy Rotation for Ghost Node. \n\nMy proxy service is: [oxylabs / brightdata / smartproxy / custom]\nMy proxy endpoint/credentials are: [ENTER OR LEAVE BLANK TO USE PLACEHOLDER]\n\nRequirements (as designed in 'what_to_build_next'):\n1. New config keys: proxy_enabled, proxy_url, proxy_per_site (rotate per site vs per cycle)\n2. Pass proxy to browser.new_context() in nuclear_engine()\n3. New Settings section // PROXY ROTATION with toggle, URL field, and test button\n4. Test button should verify the proxy works by fetching https://api.ipify.org?format=json and showing the IP\n5. If proxy fails mid-scrape, log the error and continue without proxy (fail-open, not fail-closed)\n\nPlease implement across worker.py, models.py (SEED_CONFIG), and dashboard.html." }, "IMPLEMENT_PRICE_FILTERS": { "label": "Add min/max price filters per keyword (N7)", "prompt": "Implement N7 — Price Filters per Keyword for Ghost Node.\n\nRequirements (as designed in 'what_to_build_next'):\n1. Add min_price (Float, nullable) and max_price (Float, nullable) to Keyword model\n2. Add migration in _migrate_schema() for both SQLite and PostgreSQL\n3. In scrape_site(), after extracting price: skip listings outside min/max range\n4. In dashboard Keywords tab: add Min £ and Max £ inline editable columns\n5. PUT /api/keywords/{id} endpoint to save the new fields\n6. Make the filter optional — null means no filter on that side\n\nPlease implement across worker.py, models.py, and dashboard.html." }, "IMPLEMENT_EDITABLE_SCORING": { "label": "Add editable scoring rules UI (N6)", "prompt": "Implement N6 — Editable Scoring Rules for Ghost Node.\n\nCurrent state: POSITIVE_SIGNALS and NEGATIVE_SIGNALS are hardcoded lists in models.py.\n\nRequirements (as designed in 'what_to_build_next'):\n1. New ORM model: ScoringRule(id, signal String, delta Int, active Boolean default True)\n2. Seed from current POSITIVE_SIGNALS and NEGATIVE_SIGNALS lists\n3. New API endpoints: GET/POST/PUT/DELETE /api/scoring-rules\n4. Update calculate_attribute_score() to query DB rules instead of hardcoded lists\n - Cache rules in memory for 60s to avoid DB hit on every listing\n5. New dashboard tab or section (within Keywords tab) showing:\n - Table of all rules with signal text, delta value (+10 / -8 etc), active toggle\n - Add new rule form\n - Delete button per row\n6. Positive rules shown in green, negative in red\n\nPlease implement across worker.py, models.py, and dashboard.html." }, "IMPLEMENT_PRICE_NORMALISATION": { "label": "Add price normalisation to USD (N4)", "prompt": "Implement N4 — Price Normalisation to USD for Ghost Node.\n\nRequirements (as designed in 'what_to_build_next'):\n1. Free exchange rate API: https://api.frankfurter.app/latest?from=USD — no key needed\n2. Cache rates in Config table as 'exchange_rates_json' (JSON string) + 'exchange_rates_updated_at'\n - Refresh once per day maximum\n3. Add price_usd Float column to Listing model + migration\n4. In scrape_site(), after extracting price and currency: convert to USD and store in price_usd\n5. Thread D (price refresh) should also update price_usd when it refreshes prices\n6. Dashboard listings table: add USD Price column, make it the default sort column\n7. In /api/export/csv and /api/export/html: include price_usd column\n\nPlease implement across worker.py, models.py, and dashboard.html." }, "FIX_SITE_NOT_SCRAPING": { "label": "A specific site is being skipped or showing errors", "prompt": "A specific site in Ghost Node is not being scraped correctly.\n\nSite name: [SITE NAME]\nURL template: [URL]\nError in logs: [PASTE LOG OUTPUT]\nSite health status (from dashboard Sites tab): [OK / N failures / COOLDOWN]\n\nPlease check:\n1. Is the site in cooldown? (check consecutive_failures and cooldown_until via GET /api/debug/db)\n2. Is the site disabled? (enabled field)\n3. Is the URL template correct — does it contain {keyword}?\n4. Is the search selector correct for Mode B sites?\n5. Is there a login requirement blocking access?\n\nThen fix the root cause." }, "ADD_CLOSING_ALERT_FOR_SITE": { "label": "Closing-soon alerts not firing for a specific site", "prompt": "The closing-soon alert (Thread E) is not firing for lots from [SITE NAME].\n\nMy settings:\n- closing_alert_enabled: [true/false]\n- closing_alert_mins: [N]\n\nPlease check:\n1. Is time_left_mins being extracted and stored for lots from this site? (GET /api/listings?limit=10 and check time_left_mins field)\n2. If time_left_mins is null: the scraper is not finding the time-left element. Check the time_left selectors in scrape_site() and add new selectors for this site's DOM\n3. If time_left_mins is set: check closing_alert_loop() — is the remaining time calculation correct?\n4. Check if closing_alert_sent is already True on the lots (would mean alert fired but not received — check Telegram/Discord/Email config)\n\nDiagnose and fix." }, "MOVE_TO_NEW_MACHINE": { "label": "Moving Ghost Node to a new Windows machine", "prompt": "I need to move Ghost Node to a new Windows machine and restore all my data.\n\nI have:\n- The Ghost Node source files (worker.py, models.py, database.py, dashboard.html, requirements.txt)\n- A backup of my sniper.db file (downloaded from Settings → Database Backup)\n\nPlease give me the exact step-by-step process to:\n1. Set up Ghost Node on the new machine using setup.bat\n2. Restore my sniper.db backup so I keep all my listings, sites, keywords and settings\n3. Verify everything is working correctly after restore\n4. Any Windows-specific gotchas to watch out for (PATH, firewall, venv activation)" }, "PERFORMANCE_TUNING": { "label": "Scraper is too slow / too fast — tune for my use case", "prompt": "I need to tune Ghost Node's speed settings for my situation.\n\nMy use case: [describe — e.g. \"monitoring 8 sites for 15 keywords, want results within 10 minutes of a listing going live\"]\nCurrent cycle time: [how long each full cycle takes]\nCurrent settings: timer=[N]s, delay_launch=[N], delay_site_open=[N], delay_post_search=[N], delay_page_hold=[N], humanize_level=[level], max_pages=[N]\nCurrent problem: [too slow / getting blocked / missing listings / using too much CPU]\n\nPlease recommend optimal settings for my use case and explain the tradeoffs:\n- Which delays can safely be reduced\n- Which sites need more caution (higher delays, heavy humanize)\n- Recommended timer interval given my site count and keyword count\n- Whether page_hold is worth it for my sites" }, "UPGRADE_EXPORT_FILE": { "label": "Rebuild the project export file with latest changes", "prompt": "The Ghost Node project has been updated since the last export. Please rebuild ghost_node_ultimate_export.json with all current changes.\n\nFiles to include (all attached):\n- worker.py (current)\n- models.py (current) \n- database.py (current)\n- dashboard.html (current)\n- requirements.txt (current)\n- setup.bat (current)\n\nPlease:\n1. Update 'final_project_state.files' with the new file contents and line counts\n2. Update 'final_project_state.last_updated' to today's date\n3. Update 'what_to_build_next' — remove any features that have now been built, add any new ones discussed\n4. Update 'project_state_for_new_claude.all_config_keys' if any new config keys were added\n5. Update 'project_state_for_new_claude.all_api_endpoints' if any new endpoints were added\n6. Add a new entry to 'build_history' describing what was built in this session\n7. Save the updated file as ghost_node_ultimate_export.json" } }, "final_project_state": { "description": "DEFINITIVE current source files as of 2026-03-09. These are the files to use when starting on a new account.", "last_updated": "2026-03-09T21:07:29.302891", "file_line_counts": { "worker.py": 3267, "models.py": 260, "database.py": 76, "dashboard.html": 2210, "requirements.txt": 12, "README.md": 106, "setup.bat": 101 }, "note": "Source files are uploaded separately alongside this JSON — do not look for them here." }, "_stripped_sections": { "sessions": "REMOVED — full 94-message conversation history stripped to reduce file size. Original available in ghost_node_ultimate_export.json.", "final_project_state.files": "REMOVED — source files are uploaded as separate attachments alongside this JSON." } }