renamed: FFAI/__pycache__/analyzer.cpython-313.pyc -> FFAICilent/__pycache__/analyzer.cpython-313.pyc

renamed:    FFAI/__pycache__/catch.cpython-313.pyc -> FFAICilent/__pycache__/catch.cpython-313.pyc
	renamed:    FFAI/__pycache__/crawlers.cpython-313.pyc -> FFAICilent/__pycache__/crawlers.cpython-313.pyc
	renamed:    FFAI/__pycache__/crawlers_core.cpython-313.pyc -> FFAICilent/__pycache__/crawlers_core.cpython-313.pyc
	renamed:    FFAI/analyzer.py -> FFAICilent/analyzer.py
	renamed:    FFAI/catch.py -> FFAICilent/catch.py
	new file:   FFAICilent/cloud.py
	new file:   FFAICilent/config/config.ini
	new file:   FFAICilent/config/configloder.py
	renamed:    FFAI/crawlers.py -> FFAICilent/crawlers.py
	renamed:    FFAI/crawlers_core.py -> FFAICilent/crawlers_core.py
	new file:   FFAICilent/local.py
	new file:   FFAICilent/logger.py
	renamed:    FFAI/main.py -> FFAICilent/main.py
	new file:   FFAICilent/manger.py
This commit is contained in:
Friendfeng 2025-06-15 13:29:11 +08:00
parent 44cce38817
commit b3b036429a
15 changed files with 172 additions and 1 deletions

37
FFAICilent/cloud.py Normal file
View File

@ -0,0 +1,37 @@
import requests
from ..security import decrypt_secret
from logger import log
class CloudConnector:
def __init__(self, config):
self.endpoint = config['Cloud']['endpoint']
self.timeout = int(config['Deployment']['cloud_timeout'])
self.retry = int(config['Cloud']['retry_times'])
self.token = decrypt_secret('api_token')
def _make_request(self, data):
headers = {
"Authorization": f"Bearer {self.token}",
"Content-Type": "application/json"
}
try:
resp = requests.post(
self.endpoint,
json=data,
headers=headers,
timeout=self.timeout
)
resp.raise_for_status()
return resp.json()
except requests.exceptions.RequestException as e:
log.error(f"Cloud request failed: {str(e)}")
raise ConnectionError("Cloud service unavailable")
def execute(self, command):
for attempt in range(self.retry):
try:
return self._make_request({"command": command})
except ConnectionError:
if attempt == self.retry - 1:
raise
log.warning(f"Retrying... ({attempt + 1}/{self.retry})")

View File

@ -0,0 +1,12 @@
[Deployment]
default_mode = cloud
cloud_timeout = 10
local_fallback = true
[Cloud]
endpoint = https://api.yourservice.com/v1
retry_times = 3
[Local]
db_path = ./local_data.db
max_rows = 1000

View File

@ -0,0 +1,11 @@
import configparser
import os
from pathlib import Path
def load_config():
config = configparser.ConfigParser()
config.read([
Path(__file__).parent.parent / 'config/config.ini',
# Path(__file__).parent.parent / 'config/secrets.enc'
])
return config

17
FFAICilent/local.py Normal file
View File

@ -0,0 +1,17 @@
import sqlite3
from logger import log
class LocalConnector:
def __init__(self, config):
self.db_path = config['Local']['db_path']
self.max_rows = int(config['Local']['max_rows'])
def execute(self, query):
try:
with sqlite3.connect(self.db_path) as conn:
cursor = conn.cursor()
cursor.execute(query)
return cursor.fetchmany(self.max_rows)
except Exception as e:
log.error(f"Local DB error: {str(e)}")
raise ConnectionError("Local storage unavailable")

27
FFAICilent/logger.py Normal file
View File

@ -0,0 +1,27 @@
from logger import log
def main():
log.info("程序启动")
try:
# 你的代码
log.debug("调试信息: %s", some_var) # type: ignore
except Exception as e:
log.error("操作失败: %s", str(e), exc_info=True)
raise
# 自定义日志器
custom_log = setup_logging(
name="MyModule",
log_dir="custom_logs",
file_level=logging.INFO,
console_level=logging.DEBUG,
max_bytes=5*1024*1024, # 5MB
backup_count=3
)
# 使用示例
custom_log.warning("自定义日志记录")
# 使用示例
custom_log.warning("自定义日志记录")
if __name__ == "__main__":
main()

View File

@ -2,7 +2,8 @@
from analyzer import PureAnalyzer # type: ignore
from crawlers_core import CrawlerEngine
from catch import CacheManager
from manger import ConnectionManager
from utils.logger import setup_logging
class PureInfoHunter:
def __init__(self):
@ -41,6 +42,11 @@ class PureInfoHunter:
f.write(content)
print(f"报告已保存到 reports/{safe_query}_report.txt")
def sync_local_cache(self):
if self.mode == 'local':
cloud_data = self.cloud.execute("get_all_updates")
self.local.save_cache(cloud_data)
if __name__ == "__main__":
import sys
import os
@ -59,6 +65,27 @@ if __name__ == "__main__":
hunter = PureInfoHunter()
setup_logging()
parser = argparse.ArgumentParser()
parser.add_argument('command', help='执行指令或查询')
parser.add_argument('--local', action='store_true',
help='强制使用本地模式')
args = parser.parse_args()
manager = ConnectionManager()
try:
if args.local:
manager.mode = 'local'
result = manager.execute(args.command)
print(f"✅ 执行成功 (模式: {manager.mode.upper()})")
print(result)
except Exception as e:
print(f"❌ 执行失败: {str(e)}")
exit(1)
if force_update:
print("强制更新模式(忽略缓存)")
data = hunter.crawler.crawl(query) # 使用实际存在的方法名

40
FFAICilent/manger.py Normal file
View File

@ -0,0 +1,40 @@
from cloud import CloudConnector
from local import LocalConnector
from ..FFAICilent.config.configloder import load_config
class ConnectionManager:
def __init__(self):
self.config = load_config()
self.mode = self.config['Deployment']['default_mode']
self._init_connectors()
def _init_connectors(self):
self.cloud = CloudConnector(self.config)
self.local = LocalConnector(self.config)
def execute(self, command):
# 优先使用默认模式
if self.mode == 'cloud':
try:
return self.cloud.execute(command)
except ConnectionError as e:
if self.config.getboolean('Deployment', 'local_fallback'):
return self._fallback_to_local(command)
raise
else:
return self.local.execute(command)
def _fallback_to_local(self, command):
"""自动降级到本地模式"""
from logger import log
log.warning("Falling back to local mode")
self.mode = 'local'
# 转换云指令到本地查询
local_query = self._adapt_command(command)
return self.local.execute(local_query)
def _adapt_command(self, cloud_cmd):
"""将云API指令转换为本地查询示例"""
# 这里添加你的业务逻辑转换规则
return f"SELECT * FROM cache WHERE key='{cloud_cmd}'"