#!/usr/bin/env python3
"""
Precise comparison test: Local proxy vs OpenRouter
Simulating EXACT Health Bot scenario
"""

import os
import sys
sys.path.append(os.getcwd())

from openai import OpenAI
import json

# Get the exact system prompt and tools that Health Bot uses
from slack_bot.llm.gemini import get_system_instruction
from slack_bot.tools.registry import TOOLS_SCHEMA

system_prompt = get_system_instruction()

print("=" * 80)
print("EXACT Health Bot Request Comparison Test")
print("=" * 80)

# Test message (same as in logs)
test_message = "今天睡得怎么样？"

print(f"\nTest scenario:")
print(f"  Message: {test_message}")
print(f"  System prompt: {len(system_prompt)} chars")
print(f"  Tools: {len(TOOLS_SCHEMA)} tools")
print(f"  Context: Empty (first message)")

# ========== CONFIG 1: Local Proxy ==========
print("\n" + "=" * 80)
print("CONFIG 1: Local Proxy (.gemini.local.env)")
print("=" * 80)

local_config = {
    "base_url": "http://127.0.0.1:8045",
    "api_key": "sk-457cbcd2e0a4467e90db1af0ae65748e",
    "model": "gemini-3-pro-high"
}

print(f"Base URL: {local_config['base_url']}")
print(f"Model: {local_config['model']}")

client_local = OpenAI(
    api_key=local_config['api_key'],
    base_url=f"{local_config['base_url']}/v1"
)

try:
    print("\nSending request...")
    response_local = client_local.chat.completions.create(
        model=local_config['model'],
        messages=[
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": test_message}
        ],
        tools=TOOLS_SCHEMA
    )

    msg = response_local.choices[0].message
    content = msg.content or ""
    finish_reason = response_local.choices[0].finish_reason
    has_tool_calls = hasattr(msg, 'tool_calls') and msg.tool_calls

    print(f"\n✅ Request succeeded")
    print(f"  Finish reason: {finish_reason}")
    print(f"  Response length: {len(content)} chars")
    print(f"  Has tool calls: {has_tool_calls}")

    if has_tool_calls:
        print(f"  Tool calls: {[tc.function.name for tc in msg.tool_calls]}")

    if content:
        print(f"  Response preview: {content[:200]}...")
    else:
        print(f"  ⚠️  EMPTY RESPONSE!")

    result_local = "WORKS" if (content or has_tool_calls) else "EMPTY"

except Exception as e:
    print(f"\n❌ Request failed")
    print(f"  Error: {str(e)[:200]}")
    result_local = "ERROR"

# ========== CONFIG 2: OpenRouter ==========
print("\n" + "=" * 80)
print("CONFIG 2: OpenRouter (.gemini.openrouter.env)")
print("=" * 80)

openrouter_config = {
    "base_url": "https://openrouter.ai/api",
    "api_key": "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",
    "model": "google/gemini-3-flash-preview"
}

print(f"Base URL: {openrouter_config['base_url']}")
print(f"Model: {openrouter_config['model']}")

# OpenRouter requires special headers
headers = {
    "HTTP-Referer": "https://github.com/your-username/butler",
    "X-Title": "Butler Health Assistant"
}

client_openrouter = OpenAI(
    api_key=openrouter_config['api_key'],
    base_url=f"{openrouter_config['base_url']}/v1",
    default_headers=headers
)

try:
    print("\nSending request...")
    response_or = client_openrouter.chat.completions.create(
        model=openrouter_config['model'],
        messages=[
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": test_message}
        ],
        tools=TOOLS_SCHEMA
    )

    msg = response_or.choices[0].message
    content = msg.content or ""
    finish_reason = response_or.choices[0].finish_reason
    has_tool_calls = hasattr(msg, 'tool_calls') and msg.tool_calls

    print(f"\n✅ Request succeeded")
    print(f"  Finish reason: {finish_reason}")
    print(f"  Response length: {len(content)} chars")
    print(f"  Has tool calls: {has_tool_calls}")

    if has_tool_calls:
        print(f"  Tool calls: {[tc.function.name for tc in msg.tool_calls]}")

    if content:
        print(f"  Response preview: {content[:200]}...")
    else:
        print(f"  ⚠️  EMPTY RESPONSE!")

    result_or = "WORKS" if (content or has_tool_calls) else "EMPTY"

except Exception as e:
    print(f"\n❌ Request failed")
    print(f"  Error: {str(e)[:200]}")
    result_or = "ERROR"

# ========== COMPARISON ==========
print("\n" + "=" * 80)
print("COMPARISON RESULT")
print("=" * 80)

print(f"\nLocal Proxy:  {result_local}")
print(f"OpenRouter:   {result_or}")

if result_local == "EMPTY" and result_or == "WORKS":
    print("\n🔍 CONFIRMED: Local proxy returns empty, OpenRouter works!")
    print("\nPossible reasons:")
    print("1. Local proxy doesn't support the model 'gemini-3-pro-high'")
    print("2. Local proxy has issues with function calling format")
    print("3. Local proxy has content filtering/limits")
    print("4. Local proxy's Gemini integration is outdated/broken")
elif result_local == "WORKS" and result_or == "WORKS":
    print("\n✅ Both work! Problem might be elsewhere (context, timing, etc)")
elif result_local == "EMPTY" and result_or == "EMPTY":
    print("\n⚠️  Both return empty - issue might be in request format")
else:
    print(f"\n⚠️  Unexpected combination: local={result_local}, or={result_or}")

print("\n" + "=" * 80)
