修改异步问题

This commit is contained in:
Tian jianyong 2025-12-24 20:06:34 +08:00
parent 4cc990c477
commit 564c712e36

View File

@ -212,12 +212,10 @@ class SemanticCacheService:
return False
try:
# 过滤无效答案
if not answer or len(answer.strip()) < 10:
logger.info(f"[SemanticCache] 答案太短,不缓存: {answer[:20]}...")
return False
# 构建缓存条目
normalized = question.lower().strip()
question_hash = hashlib.md5(normalized.encode('utf-8')).hexdigest()[:16]
cache_key = self._build_cache_key(chat_id, question_hash)
@ -232,17 +230,14 @@ class SemanticCacheService:
chat_id=chat_id
)
# 检查是否已存在(避免重复存储)
existing = await redis.get(cache_key)
existing = redis.get(cache_key)
if existing:
logger.debug(f"[SemanticCache] 问题已存在,跳过: {question[:30]}...")
return True
# 清理旧缓存如果该chat_id缓存过多
await self._cleanup_old_cache(chat_id, redis)
self._cleanup_old_cache(chat_id, redis)
# 存储缓存
await redis.set(
redis.set(
cache_key,
json.dumps(entry.to_dict()),
ex=self.CACHE_TTL_HOURS * 3600
@ -250,8 +245,7 @@ class SemanticCacheService:
logger.info(f"[SemanticCache] 已缓存 | key={cache_key} | question={question[:30]}...")
# 验证存储是否成功
verify = await redis.get(cache_key)
verify = redis.get(cache_key)
logger.info(f"[SemanticCache] 存储验证 | key={cache_key} | found={verify is not None}")
return True
@ -259,7 +253,7 @@ class SemanticCacheService:
logger.error(f"[SemanticCache] 存储失败: {e}")
return False
async def _cleanup_old_cache(self, chat_id: str, redis_client=None) -> int:
def _cleanup_old_cache(self, chat_id: str, redis_client=None) -> int:
"""
清理旧缓存当缓存过多时
@ -274,15 +268,14 @@ class SemanticCacheService:
try:
pattern = f"{self.CACHE_PREFIX}:{chat_id}:*"
cache_keys = await redis.keys(pattern)
cache_keys = redis.keys(pattern)
if len(cache_keys) <= self.MAX_CACHE_SIZE:
return 0
# 获取所有缓存的创建时间
cache_info = []
for key in cache_keys:
data = await redis.get(key)
data = redis.get(key)
if data:
try:
entry = CacheEntry.from_dict(json.loads(data))
@ -290,13 +283,12 @@ class SemanticCacheService:
except:
pass
# 按创建时间排序,删除最旧的
cache_info.sort(key=lambda x: x[1])
delete_count = len(cache_keys) - self.MAX_CACHE_SIZE
deleted = 0
for key, _ in cache_info[:delete_count]:
await redis.delete(key)
redis.delete(key)
deleted += 1
if deleted > 0: