"""
Storage service for health data.

Handles JSON file storage and database indexing for health metrics and activities.
"""

import json
from datetime import date
from pathlib import Path
from typing import Optional, Union, Dict, Any

from pydantic import BaseModel

from health import config
from health.db.repository import HealthRepository
from health.utils.exceptions import StorageError, DataValidationError
from health.utils.logging_config import setup_logger
from health.models.daily_metrics import (
    StepsData,
    HeartRateData,
    SleepData,
    StressData,
    BodyBatteryData,
    SpO2Data,
    RespirationData,
    HydrationData,
    FloorsData,
    IntensityMinutesData,
    HRVData,
    RHRData,
)
from health.models.activity import Activity
from health.models.body_metrics import WeightData

logger = setup_logger(__name__)


class HealthStorage:
    """Service for storing health data to JSON files and database indexes."""

    def __init__(self, data_dir: Optional[Path] = None) -> None:
        """Initialize storage service.

        Args:
            data_dir: Optional data directory path (defaults to config.DATA_DIR)
        """
        self.data_dir = data_dir or config.DATA_DIR
        self.repo = HealthRepository()

    def _get_file_path(
        self, storage_path: str, target_date: date, create_dirs: bool = True
    ) -> Path:
        """Get file path for a data item.

        Args:
            storage_path: Relative storage path (e.g., "daily_metrics/steps")
            target_date: Date of the data
            create_dirs: Whether to create directories if they don't exist

        Returns:
            Full path to JSON file
        """
        # Create path: data_dir/storage_path/YYYY/MM/YYYY-MM-DD.json
        year = str(target_date.year)
        month = f"{target_date.month:02d}"
        filename = f"{target_date.isoformat()}.json"

        file_path = self.data_dir / storage_path / year / month / filename

        if create_dirs:
            file_path.parent.mkdir(parents=True, exist_ok=True)

        return file_path

    def _get_activity_file_path(
        self, activity_id: str, activity_date: date, create_dirs: bool = True
    ) -> Path:
        """Get file path for an activity.

        Args:
            activity_id: Activity ID
            activity_date: Date of the activity
            create_dirs: Whether to create directories

        Returns:
            Full path to activity JSON file
        """
        year = str(activity_date.year)
        month = f"{activity_date.month:02d}"
        filename = f"{activity_id}.json"

        file_path = self.data_dir / "activities" / year / month / filename

        if create_dirs:
            file_path.parent.mkdir(parents=True, exist_ok=True)

        return file_path

    def save_daily_metric(
        self,
        data: Union[
            StepsData,
            HeartRateData,
            SleepData,
            StressData,
            BodyBatteryData,
            SpO2Data,
            RespirationData,
            HydrationData,
            FloorsData,
            IntensityMinutesData,
            HRVData,
            RHRData,
            WeightData,
        ],
        metric_type: str,
    ) -> Path:
        """Save a daily metric to JSON file and update database index.

        Args:
            data: Pydantic model instance
            metric_type: Type of metric (e.g., "steps", "sleep")

        Returns:
            Path to saved file

        Raises:
            StorageError: If save fails
            DataValidationError: If data validation fails
        """
        try:
            # Validate data
            if not isinstance(data, BaseModel):
                raise DataValidationError(f"Data must be a Pydantic model, got {type(data)}")

            # Get storage configuration
            if metric_type not in config.DATA_TYPE_CONFIG:
                raise StorageError(f"Unknown metric type: {metric_type}")

            storage_path = config.DATA_TYPE_CONFIG[metric_type]["storage_path"]

            # Get file path
            file_path = self._get_file_path(storage_path, data.date)

            # Convert to dict and save as JSON
            data_dict = data.model_dump(mode="json", exclude_none=True)

            with open(file_path, "w", encoding="utf-8") as f:
                json.dump(data_dict, f, indent=2, ensure_ascii=False)

            logger.debug(f"Saved {metric_type} data for {data.date} to {file_path}")

            # Update database index
            self.repo.index_daily_metric(
                metric_type=metric_type,
                metric_date=data.date,
                file_path=file_path,
                has_data=True,
            )

            return file_path

        except DataValidationError:
            raise
        except Exception as e:
            logger.error(f"Failed to save {metric_type} data: {e}")
            raise StorageError(f"Failed to save {metric_type} data: {e}") from e

    def load_daily_metric(
        self, metric_type: str, target_date: date
    ) -> Optional[Dict[str, Any]]:
        """Load a daily metric from JSON file.

        Args:
            metric_type: Type of metric
            target_date: Date of the data

        Returns:
            Data dictionary or None if not found
        """
        try:
            # Try to get path from database index first
            indexed_path = self.repo.get_daily_metric_path(metric_type, target_date)

            if indexed_path:
                file_path = Path(indexed_path)
            else:
                # Fall back to computed path
                storage_path = config.DATA_TYPE_CONFIG[metric_type]["storage_path"]
                file_path = self._get_file_path(
                    storage_path, target_date, create_dirs=False
                )

            if not file_path.exists():
                return None

            with open(file_path, "r", encoding="utf-8") as f:
                data = json.load(f)

            logger.debug(f"Loaded {metric_type} data for {target_date}")
            return data

        except Exception as e:
            logger.error(f"Failed to load {metric_type} data for {target_date}: {e}")
            return None

    def save_activity(self, activity: Activity) -> Path:
        """Save an activity to JSON file and update database index.

        Args:
            activity: Activity model instance

        Returns:
            Path to saved file

        Raises:
            StorageError: If save fails
        """
        try:
            # Get file path
            file_path = self._get_activity_file_path(
                activity.activity_id, activity.date
            )

            # Convert to dict and save as JSON
            data_dict = activity.model_dump(mode="json", exclude_none=True)

            with open(file_path, "w", encoding="utf-8") as f:
                json.dump(data_dict, f, indent=2, ensure_ascii=False)

            logger.debug(
                f"Saved activity {activity.activity_id} ({activity.activity_type}) to {file_path}"
            )

            # Update database index
            self.repo.index_activity(
                activity_id=activity.activity_id,
                activity_type=activity.activity_type,
                activity_date=activity.date,
                file_path=file_path,
                duration_seconds=activity.duration_seconds,
                distance_meters=activity.distance_meters,
            )

            return file_path

        except Exception as e:
            logger.error(f"Failed to save activity {activity.activity_id}: {e}")
            raise StorageError(f"Failed to save activity: {e}") from e

    def load_activity(self, activity_id: str) -> Optional[Dict[str, Any]]:
        """Load an activity from JSON file.

        Args:
            activity_id: Activity ID

        Returns:
            Activity data dictionary or None if not found
        """
        try:
            # Get path from database index
            with self.repo._get_conn() as conn:
                cursor = conn.cursor()
                cursor.execute(
                    "SELECT file_path FROM activity_index WHERE activity_id = ?",
                    (activity_id,),
                )
                row = cursor.fetchone()

            if not row:
                return None

            file_path = Path(row["file_path"])

            if not file_path.exists():
                logger.warning(
                    f"Activity {activity_id} indexed but file not found: {file_path}"
                )
                return None

            with open(file_path, "r", encoding="utf-8") as f:
                data = json.load(f)

            logger.debug(f"Loaded activity {activity_id}")
            return data

        except Exception as e:
            logger.error(f"Failed to load activity {activity_id}: {e}")
            return None

    def metric_exists(self, metric_type: str, target_date: date) -> bool:
        """Check if a metric already exists for a date.

        Args:
            metric_type: Type of metric
            target_date: Date to check

        Returns:
            True if metric exists
        """
        indexed_path = self.repo.get_daily_metric_path(metric_type, target_date)
        return indexed_path is not None and Path(indexed_path).exists()

    def activity_exists(self, activity_id: str) -> bool:
        """Check if an activity already exists.

        Args:
            activity_id: Activity ID

        Returns:
            True if activity exists
        """
        return self.repo.activity_exists(activity_id)
