import time
import requests
import threading
import json 
import logging
import concurrent.futures # ADDED: For parallel processing
from logging.handlers import RotatingFileHandler
from flask import Flask, request
from py_clob_client.client import ClobClient
from py_clob_client.clob_types import OrderArgs, OrderType, MarketOrderArgs
from py_clob_client.order_builder.constants import BUY
from config import Config # Import the config class


app = Flask(__name__)

# --- LOGGING ---
logger = logging.getLogger("MokaBot")
logger.setLevel(logging.INFO)
file_handler = RotatingFileHandler('trade.log', maxBytes=5*1024*1024, backupCount=5)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(logging.StreamHandler())


# --- INITIALIZE CLIENT ---
try:
    client = ClobClient(
        Config.HOST, 
        key=Config.KEY, 
        chain_id=Config.CHAIN_ID, 
        signature_type=1, 
        funder=Config.FUNDER
    )
    client.set_api_creds(client.create_or_derive_api_creds())
except Exception as e:
    logger.error(f"CRITICAL: Client init failed: {e}")

# Global State
MARKET_CACHE = {}
CACHE_METADATA = {"last_updated": None, "status": "Initializing", "items_count": 0} # <--- ADD THIS
# Event flag to signal when cache is ready
CACHE_READY_EVENT = threading.Event() 


# If you are using Flask/FastAPI
#@app.route('/health')
#def health_check():
#    # You can check this URL anytime to see if the cache is stale
#    return CACHE_METADATA

#@app.route('/debug-cache')
#def debug_cache_status():
#    import os
#    return {
#        "pid": os.getpid(),
#        "cache_size": len(MARKET_CACHE),
#        "cache_metadata": CACHE_METADATA,
#        "sample_keys": list(MARKET_CACHE.keys())[:100],
#        "system_time": time.ctime()
#    }


# --- OPTIMIZATION 1: CONNECTION KEEPALIVE ---
def connection_heartbeat():
    """Pings the API every 60s to keep the SSL session alive."""
    while True:
        try:
            client.get_sampling_markets()
        except Exception:
            pass 
        time.sleep(60)

# Start Heartbeat Thread immediately
threading.Thread(target=connection_heartbeat, daemon=True).start()

# --- CACHE LOGIC ---
def fetch_single_market(session, market_time, asset_prefix):
    # Dynamic Slug: btc-updown... or eth-updown...
    slug = f"{asset_prefix}-updown-15m-{market_time}"
    url = f"https://gamma-api.polymarket.com/markets?slug={slug}"
    try:
        headers = {"User-Agent": "Mozilla/5.0", "Accept": "application/json"}
        response = session.get(url, headers=headers, timeout=5)
        if response.status_code == 200:
            data = response.json()
            if isinstance(data, list) and len(data) > 0:
                market = data[0]
                raw_tokens = market.get("clobTokenIds", "[]")
                # ... Parsing Logic ...
                if isinstance(raw_tokens, str):
                    try:
                        tokens = json.loads(raw_tokens)
                    except json.JSONDecodeError:
                        tokens = []
                else:
                    tokens = raw_tokens
                
                if isinstance(tokens, list) and len(tokens) == 2:
                    return {
                        "key": f"{asset_prefix}_{market_time}",  # <--- THIS LINE WAS LIKELY MISSING
                        "data": {"green": tokens[0], "red": tokens[1], "slug": slug}
                    }
    except Exception:
        pass
    return None

def update_market_cache():
    """
    Performs one single update. Does NOT schedule itself.
    Called by the background worker loop.
    """
    global MARKET_CACHE
    logger.info("Updating Market Cache...")
    start_time = time.perf_counter()
    assets = ["btc", "eth"]
    
    now = int(time.time())
    start_timestamp = now - (now % 900)
    timestamps_to_fetch = [start_timestamp + (i * 900) for i in range(-1, 98)]
    
    session = requests.Session()
    temp_cache = {}
    
    # Concurrent Fetching
#    with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
#        futures = [executor.submit(fetch_single_market, session, ts) for ts in timestamps_to_fetch]
#        for future in concurrent.futures.as_completed(futures):
#            result = future.result()
#            if result:
#                temp_cache[result['ts']] = result['data']

# Parallel Fetching for ALL assets
    with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
        futures = []
        # Create tasks for every combination of Asset + Time
        for ts in timestamps_to_fetch:
            for asset in assets:
                futures.append(executor.submit(fetch_single_market, session, ts, asset))
                
        for future in concurrent.futures.as_completed(futures):
            result = future.result()
            if result:
                # Store using the Composite Key
                temp_cache[result['key']] = result['data']
                
    MARKET_CACHE = temp_cache
    CACHE_METADATA["last_updated"] = time.ctime()
    CACHE_METADATA["items_count"] = len(MARKET_CACHE)
    CACHE_METADATA["status"] = "Healthy"
    
    elapsed = time.perf_counter() - start_time
    logger.info(f"Cache Refresh Completed. Loaded {len(MARKET_CACHE)} markets in {elapsed:.2f}s.")

# --- BACKGROUND WORKER (Restored) ---
def market_cache_loop():
    """Robust loop that survives crashes."""
    while True:
        try:
            update_market_cache()
            # Signal readiness on first successful run
            if not CACHE_READY_EVENT.is_set():
                CACHE_READY_EVENT.set()
            
            # Sleep 1 hour
            time.sleep(3600)
        except Exception as e:
            logger.error(f"CRITICAL: Cache update crashed: {e}. Retrying in 5 min.")
            CACHE_METADATA["status"] = f"Error: {str(e)}"
            time.sleep(300)

def start_background_worker():
    t = threading.Thread(target=market_cache_loop, daemon=True)
    t.start()
    return t



# --- WEBHOOK (THE TRADING HOT PATH) ---
@app.route('/webhook', methods=['POST'])
def webhook():
    t0 = time.perf_counter()
    
# ... parse json ...
    try:
        data = request.json
        action = data.get("action")
        slug_ts = data.get("slug_ts")
        asset = data.get("asset", "btc").lower() # Default to btc if missing
        # 1. Extract Price (Default to 0.99 if missing)
        bid_price = float(data.get("bid_price", Config.DEFAULT_PRICE))        
        # 2. Extract Size (Default to 2.0 if missing)
        bid_size = float(data.get("bid_size", Config.DEFAULT_SIZE))       
 
    except Exception:
        return {"status": "error", "msg": "Invalid Payload"}, 400
        
    if not slug_ts:
        return {"status": "error", "msg": "Missing timestamp"}, 400
        
    # 1. Cache Lookup
    cache_key = f"{asset}_{slug_ts}"
    market_data = MARKET_CACHE.get(cache_key)
    
    if not market_data:
        # Failsafe: Try to reload cache immediately if key is missing?
        # Or just log error. For speed, we log error.
        logger.error(f"Cache Miss! TS: {slug_ts}")
        return {"status": "error", "msg": "Market not found"}, 404

    # 2. Token Selection
    token_id = None
    if action == "buy_no_turn_green":
        token_id = market_data["green"]
    elif action == "buy_no_turn_red":
        token_id = market_data["red"]

    if token_id:        
        try:
            t1 = time.perf_counter()
            
            # --- OPTIMIZATION: USE LIMIT FAK ---
            # Faster to construct than MarketOrderArgs
            # --- UPDATED ORDER CONSTRUCTION ---
            # Uses the variables from TradingView
            order_args = OrderArgs(
                price=bid_price, 
                size=bid_size,         
                side=BUY,
                token_id=token_id
            )
            
            # Sign (CPU)
            signed_order = client.create_order(order_args)
            t2 = time.perf_counter()
            
            # Send (Network)
            resp = client.post_order(signed_order, OrderType.GTC)
            t3 = time.perf_counter()
            # --- IMPROVED LOGGING ---
            # Extract the actual success boolean from the Polymarket response body
            poly_success = resp.get("success")
            poly_error = resp.get("errorMsg")
            order_id = resp.get("orderID", "N/A")
            
            total_latency = (t3 - t0) * 1000
            signing_time = (t2 - t1) * 1000
            network_time = (t3 - t2) * 1000
            
            if poly_success:
                logger.info(f"TRADE PLACED! | ID: {order_id} | TS: {slug_ts} | {action} | {asset} | Size: {bid_size} @ ${bid_price} | Total: {total_latency:.2f}ms (Sign: {signing_time:.2f}ms, Net: {network_time:.2f}ms)")
            else:
                # This will tell you EXACTLY why it didn't show up on the site
                logger.error(f"POLYMARKET REJECTED: {poly_error} | TS: {slug_ts} | {asset}")
            return {"status": "success", "resp": resp}, 200
            
        except Exception as e:
            logger.error(f"Execution Failed: {e}")
            return {"status": "error"}, 500

    return {"status": "ignored"}, 200
    
# --- STARTUP LOGIC ---

# This function runs regardless of whether you use 'python app.py' or 'gunicorn'
def initialize_app():
    logger.info("--- INITIALIZING BOT PROCESS ---")
    # Start the worker thread
    start_background_worker()
    
    # Wait for the first cache fill (Blocking startup until ready)
    logger.info("Waiting for initial market cache fill...")
    CACHE_READY_EVENT.wait() 
    logger.info("Cache Ready. Process is prepared to handle webhooks.")

# Call it immediately
initialize_app()

if __name__ == '__main__':
    # This block only runs during local development
    app.run(port=80, debug=False)
