#!/usr/bin/env python3
import argparse
import json
import re
from dataclasses import dataclass, asdict
from datetime import date, datetime, time, timedelta
from pathlib import Path
from typing import List

import requests
from requests.auth import HTTPBasicAuth
from icalendar import Calendar

# Config
SKILL_DIR = Path('/root/.openclaw/workspace/skills/wecom-caldav-calendar')
CONFIG_PATH = SKILL_DIR / 'references' / 'config.json'
CACHE_PATH = SKILL_DIR / 'references' / 'cache.json'
CACHE_PATH.parent.mkdir(parents=True, exist_ok=True)
TIMEOUT = 20

DEFAULTS = {
    'base_url': 'https://caldav.wecom.work',
    'discovery_url': 'https://caldav.wecom.work/.well-known/caldav',
    'username': 'powellli@tencent.com',
    'password': 'ZFzx257FauBAiYLh',
    'calendar_path': '/calendar/1688851418287910/',
    'calendar_name': 'powellli的日历',
}

def load_config():
    cfg = dict(DEFAULTS)
    if CONFIG_PATH.exists():
        cfg.update(json.loads(CONFIG_PATH.read_text()))
    return cfg

@dataclass
class Event:
    href: str
    etag: str
    summary: str
    start: str
    end: str
    location: str
    description: str
    all_day: bool

    def start_dt(self):
        return datetime.fromisoformat(self.start)

    def end_dt(self):
        return datetime.fromisoformat(self.end)


def auth():
    cfg = load_config()
    return HTTPBasicAuth(cfg['username'], cfg['password'])


def load_cache():
    if CACHE_PATH.exists():
        return json.loads(CACHE_PATH.read_text())
    return {"events": {}, "etag": {}, "meta": {}}


def save_cache(cache):
    CACHE_PATH.write_text(json.dumps(cache, ensure_ascii=False, indent=2))


def propfind_index():
    body = '''<?xml version="1.0" encoding="utf-8" ?>
<D:propfind xmlns:D="DAV:">
  <D:prop>
    <D:getetag/>
    <D:displayname/>
    <D:getcontenttype/>
  </D:prop>
</D:propfind>'''
    r = requests.request(
        'PROPFIND',
        load_config()['base_url'] + load_config()['calendar_path'],
        data=body.encode('utf-8'),
        headers={'Depth': '1', 'Content-Type': 'application/xml; charset=utf-8'},
        auth=auth(),
        timeout=TIMEOUT,
    )
    r.raise_for_status()
    text = r.text
    rows = []
    for chunk in text.split('<D:response>')[1:]:
        href_m = re.search(r'<D:href>(.*?)</D:href>', chunk)
        if not href_m:
            continue
        href = href_m.group(1).strip()
        if not href.endswith('.ics'):
            continue
        etag_m = re.search(r'<D:getetag>(.*?)</D:getetag>', chunk)
        etag = etag_m.group(1).strip() if etag_m else ''
        rows.append((href, etag))
    return rows


def parse_ics(href: str, etag: str, raw: str) -> List[Event]:
    out = []
    cal = Calendar.from_ical(raw)
    for comp in cal.walk('VEVENT'):
        summary = str(comp.get('summary', '未命名'))
        location = str(comp.get('location', '')) if comp.get('location') else ''
        description = str(comp.get('description', '')) if comp.get('description') else ''
        dtstart = comp.get('dtstart')
        dtend = comp.get('dtend')
        if not dtstart:
            continue
        s = dtstart.dt
        e = dtend.dt if dtend else None
        if isinstance(s, date) and not isinstance(s, datetime):
            start_dt = datetime.combine(s, time.min)
            if isinstance(e, date):
                end_dt = datetime.combine(e, time.min)
            else:
                end_dt = start_dt + timedelta(days=1)
            all_day = True
        else:
            start_dt = s.replace(tzinfo=None) if getattr(s, 'tzinfo', None) else s
            if e:
                end_dt = e.replace(tzinfo=None) if getattr(e, 'tzinfo', None) else e
            else:
                end_dt = start_dt
            all_day = False
        out.append(Event(
            href=href,
            etag=etag,
            summary=summary,
            start=start_dt.isoformat(sep=' '),
            end=end_dt.isoformat(sep=' '),
            location=location,
            description=description,
            all_day=all_day,
        ))
    return out


def refresh_index(recent=None):
    cache = load_cache()
    rows = propfind_index()
    changed = [(href, etag) for href, etag in rows if cache['etag'].get(href) != etag]
    if recent:
        changed = changed[-recent:]
    for i, (href, etag) in enumerate(changed, 1):
        cfg = load_config()
        r = requests.get(cfg['base_url'] + href, auth=auth(), timeout=TIMEOUT)
        if r.status_code != 200:
            continue
        try:
            evs = [asdict(x) for x in parse_ics(href, etag, r.text)]
            cache['events'][href] = evs
            cache['etag'][href] = etag
        except Exception:
            continue
    cache['meta'] = {
        'updated_at': datetime.utcnow().isoformat() + 'Z',
        'calendar_name': load_config()['calendar_name'],
        'calendar_path': load_config()['calendar_path'],
        'total_resources': len(rows),
        'last_incremental_count': len(changed),
    }
    save_cache(cache)
    return cache


def all_cached_events():
    cache = load_cache()
    events = []
    for href, evs in cache.get('events', {}).items():
        for ev in evs:
            events.append(Event(**ev))
    dedup = {}
    for ev in events:
        key = (ev.start, ev.end, ev.summary, ev.location, ev.description, ev.all_day)
        dedup[key] = ev
    return sorted(dedup.values(), key=lambda e: (e.start, e.summary))


def query_range(start_day: date, end_day: date):
    start_dt = datetime.combine(start_day, time.min)
    end_dt = datetime.combine(end_day + timedelta(days=1), time.min)
    out = []
    for ev in all_cached_events():
        if ev.start_dt() < end_dt and ev.end_dt() > start_dt:
            out.append(ev)
    return out


def search_keyword(keyword: str, start_day: date | None = None, end_day: date | None = None):
    keyword = keyword.strip().lower()
    events = all_cached_events()
    if start_day and end_day:
        start_dt = datetime.combine(start_day, time.min)
        end_dt = datetime.combine(end_day + timedelta(days=1), time.min)
        events = [ev for ev in events if ev.start_dt() < end_dt and ev.end_dt() > start_dt]
    out = []
    for ev in events:
        hay = ' '.join([ev.summary, ev.location, ev.description]).lower()
        if keyword in hay:
            out.append(ev)
    return out


def normalize_openclaw_title(title: str) -> str:
    suffix = ' (From OpenClaw)'
    return title if title.endswith(suffix) else title + suffix


def print_events(events: List[Event]):
    if not events:
        print('NO_EVENTS')
        return
    current = None
    for ev in events:
        day = ev.start_dt().date()
        if day != current:
            current = day
            print(f'## {day.isoformat()}')
        if ev.all_day:
            print(f'- 全天 | {ev.summary}')
        else:
            print(f"- {ev.start_dt().strftime('%H:%M')}-{ev.end_dt().strftime('%H:%M')} | {ev.summary}")
        if ev.location:
            print(f'  地点: {ev.location}')
        if ev.description:
            desc = ev.description.replace('\r', ' ').replace('\n', ' | ').strip()
            print(f'  备注: {desc}')


def main():
    ap = argparse.ArgumentParser()
    sub = ap.add_subparsers(dest='cmd', required=True)

    p1 = sub.add_parser('index')
    p1.add_argument('--recent', type=int, default=300)

    p0 = sub.add_parser('refresh-and-query')
    p0.add_argument('mode', choices=['today', 'tomorrow', 'this-week'])
    p0.add_argument('--recent', type=int, default=80)

    p2 = sub.add_parser('query-day')
    p2.add_argument('day')

    sub.add_parser('today')
    sub.add_parser('tomorrow')

    p3 = sub.add_parser('query-range')
    p3.add_argument('start_day')
    p3.add_argument('end_day')

    p4 = sub.add_parser('search')
    p4.add_argument('keyword')
    p4.add_argument('--start-day')
    p4.add_argument('--end-day')
    p4.add_argument('--refresh', action='store_true')
    p4.add_argument('--recent', type=int, default=80)

    sub.add_parser('this-week')

    args = ap.parse_args()

    if args.cmd == 'index':
        cache = refresh_index(recent=args.recent)
        print(json.dumps(cache['meta'], ensure_ascii=False, indent=2))
    elif args.cmd == 'refresh-and-query':
        refresh_index(recent=args.recent)
        today = datetime.utcnow().date()
        if args.mode == 'today':
            print_events(query_range(today, today))
        elif args.mode == 'tomorrow':
            d = today + timedelta(days=1)
            print_events(query_range(d, d))
        elif args.mode == 'this-week':
            monday = today - timedelta(days=today.weekday())
            sunday = monday + timedelta(days=6)
            print_events(query_range(monday, sunday))
    elif args.cmd == 'query-day':
        d = datetime.strptime(args.day, '%Y-%m-%d').date()
        print_events(query_range(d, d))
    elif args.cmd == 'today':
        d = datetime.utcnow().date()
        print_events(query_range(d, d))
    elif args.cmd == 'tomorrow':
        d = datetime.utcnow().date() + timedelta(days=1)
        print_events(query_range(d, d))
    elif args.cmd == 'query-range':
        s = datetime.strptime(args.start_day, '%Y-%m-%d').date()
        e = datetime.strptime(args.end_day, '%Y-%m-%d').date()
        print_events(query_range(s, e))
    elif args.cmd == 'search':
        if args.refresh:
            refresh_index(recent=args.recent)
        s = datetime.strptime(args.start_day, '%Y-%m-%d').date() if args.start_day else None
        e = datetime.strptime(args.end_day, '%Y-%m-%d').date() if args.end_day else None
        print_events(search_keyword(args.keyword, s, e))
    elif args.cmd == 'this-week':
        today = datetime.utcnow().date()
        monday = today - timedelta(days=today.weekday())
        sunday = monday + timedelta(days=6)
        print_events(query_range(monday, sunday))

if __name__ == '__main__':
    main()
