FC/FFAI/catch.py
Friendfeng 6b7ae8f26e new file: FFAI/__pycache__/catch.cpython-313.pyc
modified:   FFAI/__pycache__/crawlers.cpython-313.pyc
	new file:   FFAI/__pycache__/crawlers_core.cpython-313.pyc
	new file:   缓存文件
	modified:   旧文件
	new file:   爬虫文件
	modified:   主文件
	new file:   cache/cache__E4_BA_BA_E5_B7_A5_E6_99_BA_E8_83_BD_json.txt
	new file:   测试文件
	modified:   readme.md
2025-06-07 09:01:37 +08:00

49 lines
1.7 KiB
Python

import os
import json
import hashlib
from datetime import datetime
class CacheManager:
def __init__(self, cache_dir=".cache"):
self.cache_dir = cache_dir
os.makedirs(cache_dir, exist_ok=True)
def _get_cache_path(self, query: str) -> str:
"""生成基于查询内容的缓存文件名"""
query_hash = hashlib.md5(query.encode('utf-8')).hexdigest()
return os.path.join(self.cache_dir, f"{query_hash}.json")
def save_to_cache(self, query: str, data: dict) -> bool:
"""保存数据到缓存(带时间戳)"""
cache_data = {
'timestamp': datetime.now().isoformat(),
'query': query,
'data': data
}
try:
with open(self._get_cache_path(query), 'w', encoding='utf-8') as f:
json.dump(cache_data, f, ensure_ascii=False, indent=2)
return True
except Exception as e:
print(f"缓存保存失败: {e}")
return False
def load_from_cache(self, query: str, max_age_hours=24) -> dict:
"""从缓存加载数据(可设置最大有效期)"""
cache_file = self._get_cache_path(query)
if not os.path.exists(cache_file):
return None
try:
with open(cache_file, 'r', encoding='utf-8') as f:
cache_data = json.load(f)
# 检查缓存有效期
cache_time = datetime.fromisoformat(cache_data['timestamp'])
if (datetime.now() - cache_time).total_seconds() > max_age_hours * 3600:
return None
return cache_data['data']
except Exception as e:
print(f"缓存读取失败: {e}")
return None