From cd5fa88ec49e63d9378f9436c5616fd9477b4a31 Mon Sep 17 00:00:00 2001 From: Tian jianyong <11429339@qq.com> Date: Wed, 24 Dec 2025 20:14:04 +0800 Subject: [PATCH] =?UTF-8?q?=E6=9B=B4=E6=96=B0=20redis=20=E5=BC=82=E6=AD=A5?= =?UTF-8?q?=E5=AD=98=E5=8F=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../utils/semantic_cache_service.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/ruoyi-fastapi-backend/utils/semantic_cache_service.py b/ruoyi-fastapi-backend/utils/semantic_cache_service.py index 63de9ca..343097b 100644 --- a/ruoyi-fastapi-backend/utils/semantic_cache_service.py +++ b/ruoyi-fastapi-backend/utils/semantic_cache_service.py @@ -230,14 +230,14 @@ class SemanticCacheService: chat_id=chat_id ) - existing = redis.get(cache_key) + existing = await redis.get(cache_key) if existing: logger.debug(f"[SemanticCache] 问题已存在,跳过: {question[:30]}...") return True - self._cleanup_old_cache(chat_id, redis) + await self._cleanup_old_cache(chat_id, redis) - redis.set( + await redis.set( cache_key, json.dumps(entry.to_dict()), ex=self.CACHE_TTL_HOURS * 3600 @@ -245,7 +245,7 @@ class SemanticCacheService: logger.info(f"[SemanticCache] 已缓存 | key={cache_key} | question={question[:30]}...") - verify = redis.get(cache_key) + verify = await redis.get(cache_key) logger.info(f"[SemanticCache] 存储验证 | key={cache_key} | found={verify is not None}") return True @@ -253,7 +253,7 @@ class SemanticCacheService: logger.error(f"[SemanticCache] 存储失败: {e}") return False - def _cleanup_old_cache(self, chat_id: str, redis_client=None) -> int: + async def _cleanup_old_cache(self, chat_id: str, redis_client=None) -> int: """ 清理旧缓存(当缓存过多时) @@ -268,14 +268,14 @@ class SemanticCacheService: try: pattern = f"{self.CACHE_PREFIX}:{chat_id}:*" - cache_keys = redis.keys(pattern) + cache_keys = await redis.keys(pattern) if len(cache_keys) <= self.MAX_CACHE_SIZE: return 0 cache_info = [] for key in cache_keys: - data = redis.get(key) + data = await redis.get(key) if data: try: entry = CacheEntry.from_dict(json.loads(data)) @@ -288,7 +288,7 @@ class SemanticCacheService: deleted = 0 for key, _ in cache_info[:delete_count]: - redis.delete(key) + await redis.delete(key) deleted += 1 if deleted > 0: