Coverage for health / services / storage.py: 0%
104 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-02 17:44 +0800
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-02 17:44 +0800
1"""
2Storage service for health data.
4Handles JSON file storage and database indexing for health metrics and activities.
5"""
7import json
8from datetime import date
9from pathlib import Path
10from typing import Optional, Union, Dict, Any
12from pydantic import BaseModel
14from health import config
15from health.db.repository import HealthRepository
16from health.utils.exceptions import StorageError, DataValidationError
17from health.utils.logging_config import setup_logger
18from health.models.daily_metrics import (
19 StepsData,
20 HeartRateData,
21 SleepData,
22 StressData,
23 BodyBatteryData,
24 SpO2Data,
25 RespirationData,
26 HydrationData,
27 FloorsData,
28 IntensityMinutesData,
29 HRVData,
30 RHRData,
31 LifestyleLoggingData,
32)
33from health.models.activity import Activity
34from health.models.body_metrics import WeightData
36logger = setup_logger(__name__)
39class HealthStorage:
40 """Service for storing health data to JSON files and database indexes."""
42 def __init__(self, data_dir: Optional[Path] = None) -> None:
43 """Initialize storage service.
45 Args:
46 data_dir: Optional data directory path (defaults to config.DATA_DIR)
47 """
48 self.data_dir = data_dir or config.DATA_DIR
49 self.repo = HealthRepository()
51 def _get_file_path(
52 self, storage_path: str, target_date: date, create_dirs: bool = True
53 ) -> Path:
54 """Get file path for a data item.
56 Args:
57 storage_path: Relative storage path (e.g., "daily_metrics/steps")
58 target_date: Date of the data
59 create_dirs: Whether to create directories if they don't exist
61 Returns:
62 Full path to JSON file
63 """
64 # Create path: data_dir/storage_path/YYYY/MM/YYYY-MM-DD.json
65 year = str(target_date.year)
66 month = f"{target_date.month:02d}"
67 filename = f"{target_date.isoformat()}.json"
69 file_path = self.data_dir / storage_path / year / month / filename
71 if create_dirs:
72 file_path.parent.mkdir(parents=True, exist_ok=True)
74 return file_path
76 def _get_activity_file_path(
77 self, activity_id: str, activity_date: date, create_dirs: bool = True
78 ) -> Path:
79 """Get file path for an activity.
81 Args:
82 activity_id: Activity ID
83 activity_date: Date of the activity
84 create_dirs: Whether to create directories
86 Returns:
87 Full path to activity JSON file
88 """
89 year = str(activity_date.year)
90 month = f"{activity_date.month:02d}"
91 filename = f"{activity_id}.json"
93 file_path = self.data_dir / "activities" / year / month / filename
95 if create_dirs:
96 file_path.parent.mkdir(parents=True, exist_ok=True)
98 return file_path
100 def save_daily_metric(
101 self,
102 data: Union[
103 StepsData,
104 HeartRateData,
105 SleepData,
106 StressData,
107 BodyBatteryData,
108 SpO2Data,
109 RespirationData,
110 HydrationData,
111 FloorsData,
112 IntensityMinutesData,
113 HRVData,
114 RHRData,
115 WeightData,
116 LifestyleLoggingData,
117 ],
118 metric_type: str,
119 ) -> Path:
120 """Save a daily metric to JSON file and update database index.
122 Args:
123 data: Pydantic model instance
124 metric_type: Type of metric (e.g., "steps", "sleep")
126 Returns:
127 Path to saved file
129 Raises:
130 StorageError: If save fails
131 DataValidationError: If data validation fails
132 """
133 try:
134 # Validate data
135 if not isinstance(data, BaseModel):
136 raise DataValidationError(f"Data must be a Pydantic model, got {type(data)}")
138 # Get storage configuration
139 if metric_type not in config.DATA_TYPE_CONFIG:
140 raise StorageError(f"Unknown metric type: {metric_type}")
142 storage_path = config.DATA_TYPE_CONFIG[metric_type]["storage_path"]
144 # Get file path
145 file_path = self._get_file_path(storage_path, data.date)
147 # Convert to dict and save as JSON
148 data_dict = data.model_dump(mode="json", exclude_none=True)
150 with open(file_path, "w", encoding="utf-8") as f:
151 json.dump(data_dict, f, indent=2, ensure_ascii=False)
153 logger.debug(f"Saved {metric_type} data for {data.date} to {file_path}")
155 # Update database index
156 self.repo.index_daily_metric(
157 metric_type=metric_type,
158 metric_date=data.date,
159 file_path=file_path,
160 has_data=True,
161 )
163 return file_path
165 except DataValidationError:
166 raise
167 except Exception as e:
168 logger.error(f"Failed to save {metric_type} data: {e}")
169 raise StorageError(f"Failed to save {metric_type} data: {e}") from e
171 def load_daily_metric(
172 self, metric_type: str, target_date: date
173 ) -> Optional[Dict[str, Any]]:
174 """Load a daily metric from JSON file.
176 Args:
177 metric_type: Type of metric
178 target_date: Date of the data
180 Returns:
181 Data dictionary or None if not found
182 """
183 try:
184 # Try to get path from database index first
185 indexed_path = self.repo.get_daily_metric_path(metric_type, target_date)
187 if indexed_path:
188 file_path = Path(indexed_path)
189 else:
190 # Fall back to computed path
191 storage_path = config.DATA_TYPE_CONFIG[metric_type]["storage_path"]
192 file_path = self._get_file_path(
193 storage_path, target_date, create_dirs=False
194 )
196 if not file_path.exists():
197 return None
199 with open(file_path, "r", encoding="utf-8") as f:
200 data = json.load(f)
202 logger.debug(f"Loaded {metric_type} data for {target_date}")
203 return data
205 except Exception as e:
206 logger.error(f"Failed to load {metric_type} data for {target_date}: {e}")
207 return None
209 def save_activity(self, activity: Activity) -> Path:
210 """Save an activity to JSON file and update database index.
212 Args:
213 activity: Activity model instance
215 Returns:
216 Path to saved file
218 Raises:
219 StorageError: If save fails
220 """
221 try:
222 # Get file path
223 file_path = self._get_activity_file_path(
224 activity.activity_id, activity.date
225 )
227 # Convert to dict and save as JSON
228 data_dict = activity.model_dump(mode="json", exclude_none=True)
230 with open(file_path, "w", encoding="utf-8") as f:
231 json.dump(data_dict, f, indent=2, ensure_ascii=False)
233 logger.debug(
234 f"Saved activity {activity.activity_id} ({activity.activity_type}) to {file_path}"
235 )
237 # Update database index
238 self.repo.index_activity(
239 activity_id=activity.activity_id,
240 activity_type=activity.activity_type,
241 activity_date=activity.date,
242 file_path=file_path,
243 duration_seconds=activity.duration_seconds,
244 distance_meters=activity.distance_meters,
245 )
247 return file_path
249 except Exception as e:
250 logger.error(f"Failed to save activity {activity.activity_id}: {e}")
251 raise StorageError(f"Failed to save activity: {e}") from e
253 def load_activity(self, activity_id: str) -> Optional[Dict[str, Any]]:
254 """Load an activity from JSON file.
256 Args:
257 activity_id: Activity ID
259 Returns:
260 Activity data dictionary or None if not found
261 """
262 try:
263 # Get path from database index
264 with self.repo._get_conn() as conn:
265 cursor = conn.cursor()
266 cursor.execute(
267 "SELECT file_path FROM activity_index WHERE activity_id = ?",
268 (activity_id,),
269 )
270 row = cursor.fetchone()
272 if not row:
273 return None
275 file_path = Path(row["file_path"])
277 if not file_path.exists():
278 logger.warning(
279 f"Activity {activity_id} indexed but file not found: {file_path}"
280 )
281 return None
283 with open(file_path, "r", encoding="utf-8") as f:
284 data = json.load(f)
286 logger.debug(f"Loaded activity {activity_id}")
287 return data
289 except Exception as e:
290 logger.error(f"Failed to load activity {activity_id}: {e}")
291 return None
293 def metric_exists(self, metric_type: str, target_date: date) -> bool:
294 """Check if a metric already exists for a date.
296 Args:
297 metric_type: Type of metric
298 target_date: Date to check
300 Returns:
301 True if metric exists
302 """
303 indexed_path = self.repo.get_daily_metric_path(metric_type, target_date)
304 return indexed_path is not None and Path(indexed_path).exists()
306 def activity_exists(self, activity_id: str) -> bool:
307 """Check if an activity already exists.
309 Args:
310 activity_id: Activity ID
312 Returns:
313 True if activity exists
314 """
315 return self.repo.activity_exists(activity_id)