renamed: FFAI/__pycache__/catch.cpython-313.pyc -> FFAICilent/__pycache__/catch.cpython-313.pyc renamed: FFAI/__pycache__/crawlers.cpython-313.pyc -> FFAICilent/__pycache__/crawlers.cpython-313.pyc renamed: FFAI/__pycache__/crawlers_core.cpython-313.pyc -> FFAICilent/__pycache__/crawlers_core.cpython-313.pyc renamed: FFAI/analyzer.py -> FFAICilent/analyzer.py renamed: FFAI/catch.py -> FFAICilent/catch.py new file: FFAICilent/cloud.py new file: FFAICilent/config/config.ini new file: FFAICilent/config/configloder.py renamed: FFAI/crawlers.py -> FFAICilent/crawlers.py renamed: FFAI/crawlers_core.py -> FFAICilent/crawlers_core.py new file: FFAICilent/local.py new file: FFAICilent/logger.py renamed: FFAI/main.py -> FFAICilent/main.py new file: FFAICilent/manger.py
39 lines
1.3 KiB
Python
39 lines
1.3 KiB
Python
import re
|
|
from collections import Counter
|
|
|
|
class PureAnalyzer:
|
|
@staticmethod
|
|
def search_in_cache(query: str, cache_dir="cache") -> list:
|
|
"""在缓存中检索历史记录"""
|
|
if not os.path.exists(cache_dir):
|
|
return []
|
|
|
|
related_files = []
|
|
safe_query = query.lower()
|
|
for filename in os.listdir(cache_dir):
|
|
if safe_query in filename.lower():
|
|
with open(f"{cache_dir}/{filename}", "r", encoding="utf-8") as f:
|
|
content = f.read()
|
|
related_files.append({
|
|
"query": filename.replace(".txt", ""),
|
|
"content": content
|
|
})
|
|
return related_files
|
|
|
|
@staticmethod
|
|
def analyze(data: list, query: str) -> dict:
|
|
# 先检查缓存中的相关记录
|
|
history = PureAnalyzer.search_in_cache(query)
|
|
|
|
# 合并新旧数据
|
|
all_text = " ".join(d.get("text", "") for d in data)
|
|
if history:
|
|
all_text += " " + " ".join(h["content"] for h in history)
|
|
|
|
# ...(保持原有的分析逻辑)...
|
|
return {
|
|
"summary": summary,
|
|
"keywords": keywords,
|
|
"sources": [d["url"] for d in data],
|
|
"related_history": [h["query"] for h in history]
|
|
} |