

import os
import sys
import time
import asyncio
from datetime import datetime, timezone

# Add parent directories to path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))

from API.bonkAPI import BonkAPI
from API.jupAPI import JupAPI
from utils.logger import *
from utils.config import *
from .ClickHouseManager import ClickHouseManager


def get_tokens_info(token_list):
    """
    Получает время миграций и создателей токенов.
    Комбинирует данные из get_recent_tokens_from_raydium_2() и JupAPI().get_token_pool_info()
    """
    migrated = []
    
    print(f"Len of {len(token_list)}")

    for i, token_data in enumerate(token_list):
        # Add delay between requests to avoid rate limiting
        if i > 0:
            time.sleep(0.5)  # 500ms delay between requests
            
        # Если token_data это словарь (из get_recent_tokens_from_raydium_2)
        if isinstance(token_data, dict):
            token_mint = token_data.get("mint")
        else:
            # Если это строка (mint токена)
            token_mint = token_data
            token_data = {"mint": token_mint}
            
        if not token_mint:
            continue
            
        # Получаем дополнительную информацию из JupAPI
        jup_info = JupAPI().get_token_pool_info(token_mint)
        if not jup_info:
            bonk_logger.warning(f"No JupAPI info for token {token_mint}")
            continue

        ts = jup_info.get('migration_started')
        if not ts:
            bonk_logger.warning(f"No migration_started for token {token_mint}")
            continue

        # Комбинируем данные из двух источников
        token_hash = jup_info.get("token_hash") or token_mint
        creator_hash = jup_info.get("creator_hash") or token_data.get("creator")
        migration_started = datetime.fromtimestamp(ts, tz=timezone.utc).replace(tzinfo=None)
        
        # Приоритет: сначала данные из JupAPI, потом из Raydium
        symbol = jup_info.get("symbol") or token_data.get("symbol")
        icon = jup_info.get("icon") or token_data.get("imgUrl")
        name = token_data.get("name")

        migrated.append({
            "token_hash": token_hash,
            "name": name,
            "symbol": symbol,
            "migration_started": migration_started,
            "creator_hash": creator_hash,
            "icon": icon,
            
        })
        bonk_logger.info(f"Processed token {i+1}/{len(token_list)}: {symbol or token_mint}")

    return migrated

def insert_tokens_to_database(migrated_tokens, pool = 'BONK'):
    """
    Insert migrated tokens to the ClickHouse tokens database
    """
    if not migrated_tokens:
        pump_logger.warning("No tokens to insert into database")
        return False
    
    try:
        # Initialize ClickHouse manager
        db_manager = ClickHouseManager()
        
        # Check which tokens already exist in the database
        token_addresses = [token.get('token_hash', '') for token in migrated_tokens]
        existing_tokens = set()
        
        if token_addresses:
            # Create a query to check for existing tokens
            placeholders = ','.join([f"'{addr}'" for addr in token_addresses])
            check_query = f"SELECT token_address FROM tokens_info.tokens WHERE token_address IN ({placeholders})"
            
            try:
                result = db_manager.execute_query(check_query)
                if result:
                    existing_tokens = {row[0] for row in result}
                    pump_logger.info(f"Found {len(existing_tokens)} existing tokens in database")
            except Exception as e:
                pump_logger.warning(f"Could not check for existing tokens: {e}")
        
        # Prepare data for insertion according to tokens table schema
        data_to_insert = []
        current_time = int(time.time())
        skipped_count = 0
        
        for token in migrated_tokens:
            token_address = token.get('token_hash', '')
            
            # Skip if token already exists
            if token_address in existing_tokens:
                pump_logger.info(f"Skipping existing token: {token_address}")
                skipped_count += 1
                continue
            
            creator = token.get('creator_hash', '')

            # Migration timestamp - convert from datetime to unix timestamp
            migration_time = token.get('migration_started')
            if isinstance(migration_time, datetime):
                migrated = int(migration_time.timestamp())
            else:
                migrated = current_time
            
            processed_date = current_time
            processed = 0  
            quality = 'unsorted'  
            shortName = token.get('symbol', '') 
            image = token.get('icon', '')  
            
            data_to_insert.append((
                token_address,
                creator,
                pool,
                migrated,
                processed_date,
                processed,
                quality,
                shortName,
                image
            ))
        
        if not data_to_insert:
            pump_logger.info(f"No new tokens to insert. Skipped {skipped_count} existing tokens")
            return True
        
        # Define columns according to the tokens table schema
        columns = [
            'token_address',
            'creator',
            'pool',
            'migrated',
            'processed_date',
            'processed',
            'quality',
            'shortName',
            'image'
        ]
        
        # Execute bulk insert
        success = db_manager.execute_insert('tokens_info.tokens', columns, data_to_insert)
        
        if success:
            pump_logger.info(f"✅ Successfully inserted {len(data_to_insert)} new migrated tokens into database (skipped {skipped_count} existing)")
            return True
        else:
            pump_logger.error("❌ Failed to insert tokens into database")
            return False
            
    except Exception as e:
        pump_logger.error(f"❌ Error inserting tokens to database: {e}")
        return False

async def bonk_migrated(days=3):
    """
    Получает бонк токены, проверяет время миграции и создателя, заносит в базу как:
    token_hash, migration_started, creator_hash, processed=0, good=0, pool=BONK
    """
    bonk_tokens = BonkAPI().get_recent_tokens_from_raydium_2(days)
    bonk_logger.info(f"BonkTokens {len(bonk_tokens)} tokens")
    
    # Обрабатываем каждый токен и получаем дополнительную информацию
    migrated_tokens = get_tokens_info(bonk_tokens)
    bonk_logger.info(f"Successfully processed {len(migrated_tokens)} tokens with migration data")
    
    # Insert all migrated tokens into database
    bonk_logger.info(f"Inserting {len(migrated_tokens)} tokens into database...")
    success = insert_tokens_to_database(migrated_tokens)
    
    if success:
        bonk_logger.info("✅ All migrated tokens successfully added to database")
    else:
        bonk_logger.error("❌ Failed to add some or all tokens to database")
    
    
if __name__ == "__main__":
    asyncio.run(bonk_migrated(7))
