""" 元数据字段定义缓存服务 使用 Redis 缓存 metadata_field_definitions,减少数据库查询 """ import json import logging from typing import Any from app.core.config import get_settings logger = logging.getLogger(__name__) class MetadataCacheService: """ 元数据字段定义缓存服务 缓存策略: - Key: metadata:fields:{tenant_id} - Value: JSON 序列化的字段定义列表 - TTL: 1小时(3600秒) - 更新策略:写时更新 + 定时刷新 """ CACHE_KEY_PREFIX = "metadata:fields" DEFAULT_TTL = 3600 # 1小时 def __init__(self): self._settings = get_settings() self._redis_client = None self._enabled = self._settings.redis_enabled async def _get_redis(self): """获取 Redis 连接(延迟初始化)""" if not self._enabled: return None if self._redis_client is None: try: import redis.asyncio as redis self._redis_client = redis.from_url( self._settings.redis_url, decode_responses=True ) except Exception as e: logger.error(f"[MetadataCache] Failed to connect Redis: {e}") self._enabled = False return None return self._redis_client def _make_key(self, tenant_id: str) -> str: """生成缓存 key""" return f"{self.CACHE_KEY_PREFIX}:{tenant_id}" async def get_fields(self, tenant_id: str) -> list[dict[str, Any]] | None: """ 获取缓存的字段定义 Args: tenant_id: 租户 ID Returns: 字段定义列表,未缓存返回 None """ if not self._enabled: return None try: redis = await self._get_redis() if not redis: return None key = self._make_key(tenant_id) cached_data = await redis.get(key) if cached_data: logger.info(f"[MetadataCache] Cache hit for tenant={tenant_id}") return json.loads(cached_data) logger.info(f"[MetadataCache] Cache miss for tenant={tenant_id}") return None except Exception as e: logger.error(f"[MetadataCache] Get cache error: {e}") return None async def set_fields( self, tenant_id: str, fields: list[dict[str, Any]], ttl: int | None = None ) -> bool: """ 缓存字段定义 Args: tenant_id: 租户 ID fields: 字段定义列表 ttl: 过期时间(秒),默认 1小时 Returns: 是否成功 """ if not self._enabled: return False try: redis = await self._get_redis() if not redis: return False key = self._make_key(tenant_id) ttl = ttl or self.DEFAULT_TTL await redis.setex( key, ttl, json.dumps(fields, ensure_ascii=False, default=str) ) logger.info( f"[MetadataCache] Cached {len(fields)} fields for tenant={tenant_id}, " f"ttl={ttl}s" ) return True except Exception as e: logger.error(f"[MetadataCache] Set cache error: {e}") return False async def invalidate(self, tenant_id: str) -> bool: """ 使缓存失效(字段定义更新时调用) Args: tenant_id: 租户 ID Returns: 是否成功 """ if not self._enabled: return False try: redis = await self._get_redis() if not redis: return False key = self._make_key(tenant_id) result = await redis.delete(key) if result: logger.info(f"[MetadataCache] Invalidated cache for tenant={tenant_id}") return bool(result) except Exception as e: logger.error(f"[MetadataCache] Invalidate error: {e}") return False async def invalidate_all(self) -> bool: """ 使所有元数据缓存失效 Returns: 是否成功 """ if not self._enabled: return False try: redis = await self._get_redis() if not redis: return False # 查找所有元数据缓存 key pattern = f"{self.CACHE_KEY_PREFIX}:*" keys = [] async for key in redis.scan_iter(match=pattern): keys.append(key) if keys: await redis.delete(*keys) logger.info(f"[MetadataCache] Invalidated {len(keys)} cache entries") return True except Exception as e: logger.error(f"[MetadataCache] Invalidate all error: {e}") return False # 全局缓存服务实例 _metadata_cache_service: MetadataCacheService | None = None async def get_metadata_cache_service() -> MetadataCacheService: """获取元数据缓存服务实例(单例)""" global _metadata_cache_service if _metadata_cache_service is None: _metadata_cache_service = MetadataCacheService() return _metadata_cache_service