From b3b036429ad14a6ed10024388afdf967aa6c7de1 Mon Sep 17 00:00:00 2001 From: Friendfeng <3880261409@qq.com> Date: Sun, 15 Jun 2025 13:29:11 +0800 Subject: [PATCH] renamed: FFAI/__pycache__/analyzer.cpython-313.pyc -> FFAICilent/__pycache__/analyzer.cpython-313.pyc renamed: FFAI/__pycache__/catch.cpython-313.pyc -> FFAICilent/__pycache__/catch.cpython-313.pyc renamed: FFAI/__pycache__/crawlers.cpython-313.pyc -> FFAICilent/__pycache__/crawlers.cpython-313.pyc renamed: FFAI/__pycache__/crawlers_core.cpython-313.pyc -> FFAICilent/__pycache__/crawlers_core.cpython-313.pyc renamed: FFAI/analyzer.py -> FFAICilent/analyzer.py renamed: FFAI/catch.py -> FFAICilent/catch.py new file: FFAICilent/cloud.py new file: FFAICilent/config/config.ini new file: FFAICilent/config/configloder.py renamed: FFAI/crawlers.py -> FFAICilent/crawlers.py renamed: FFAI/crawlers_core.py -> FFAICilent/crawlers_core.py new file: FFAICilent/local.py new file: FFAICilent/logger.py renamed: FFAI/main.py -> FFAICilent/main.py new file: FFAICilent/manger.py --- .../__pycache__/analyzer.cpython-313.pyc | Bin .../__pycache__/catch.cpython-313.pyc | Bin .../__pycache__/crawlers.cpython-313.pyc | Bin .../__pycache__/crawlers_core.cpython-313.pyc | Bin {FFAI => FFAICilent}/analyzer.py | 0 {FFAI => FFAICilent}/catch.py | 0 FFAICilent/cloud.py | 37 ++++++++++++++++ FFAICilent/config/config.ini | 12 ++++++ FFAICilent/config/configloder.py | 11 +++++ {FFAI => FFAICilent}/crawlers.py | 0 {FFAI => FFAICilent}/crawlers_core.py | 0 FFAICilent/local.py | 17 ++++++++ FFAICilent/logger.py | 27 ++++++++++++ {FFAI => FFAICilent}/main.py | 29 ++++++++++++- FFAICilent/manger.py | 40 ++++++++++++++++++ 15 files changed, 172 insertions(+), 1 deletion(-) rename {FFAI => FFAICilent}/__pycache__/analyzer.cpython-313.pyc (100%) rename {FFAI => FFAICilent}/__pycache__/catch.cpython-313.pyc (100%) rename {FFAI => FFAICilent}/__pycache__/crawlers.cpython-313.pyc (100%) rename {FFAI => FFAICilent}/__pycache__/crawlers_core.cpython-313.pyc (100%) rename {FFAI => FFAICilent}/analyzer.py (100%) rename {FFAI => FFAICilent}/catch.py (100%) create mode 100644 FFAICilent/cloud.py create mode 100644 FFAICilent/config/config.ini create mode 100644 FFAICilent/config/configloder.py rename {FFAI => FFAICilent}/crawlers.py (100%) rename {FFAI => FFAICilent}/crawlers_core.py (100%) create mode 100644 FFAICilent/local.py create mode 100644 FFAICilent/logger.py rename {FFAI => FFAICilent}/main.py (73%) create mode 100644 FFAICilent/manger.py diff --git a/FFAI/__pycache__/analyzer.cpython-313.pyc b/FFAICilent/__pycache__/analyzer.cpython-313.pyc similarity index 100% rename from FFAI/__pycache__/analyzer.cpython-313.pyc rename to FFAICilent/__pycache__/analyzer.cpython-313.pyc diff --git a/FFAI/__pycache__/catch.cpython-313.pyc b/FFAICilent/__pycache__/catch.cpython-313.pyc similarity index 100% rename from FFAI/__pycache__/catch.cpython-313.pyc rename to FFAICilent/__pycache__/catch.cpython-313.pyc diff --git a/FFAI/__pycache__/crawlers.cpython-313.pyc b/FFAICilent/__pycache__/crawlers.cpython-313.pyc similarity index 100% rename from FFAI/__pycache__/crawlers.cpython-313.pyc rename to FFAICilent/__pycache__/crawlers.cpython-313.pyc diff --git a/FFAI/__pycache__/crawlers_core.cpython-313.pyc b/FFAICilent/__pycache__/crawlers_core.cpython-313.pyc similarity index 100% rename from FFAI/__pycache__/crawlers_core.cpython-313.pyc rename to FFAICilent/__pycache__/crawlers_core.cpython-313.pyc diff --git a/FFAI/analyzer.py b/FFAICilent/analyzer.py similarity index 100% rename from FFAI/analyzer.py rename to FFAICilent/analyzer.py diff --git a/FFAI/catch.py b/FFAICilent/catch.py similarity index 100% rename from FFAI/catch.py rename to FFAICilent/catch.py diff --git a/FFAICilent/cloud.py b/FFAICilent/cloud.py new file mode 100644 index 0000000..df17a9e --- /dev/null +++ b/FFAICilent/cloud.py @@ -0,0 +1,37 @@ +import requests +from ..security import decrypt_secret +from logger import log + +class CloudConnector: + def __init__(self, config): + self.endpoint = config['Cloud']['endpoint'] + self.timeout = int(config['Deployment']['cloud_timeout']) + self.retry = int(config['Cloud']['retry_times']) + self.token = decrypt_secret('api_token') + + def _make_request(self, data): + headers = { + "Authorization": f"Bearer {self.token}", + "Content-Type": "application/json" + } + try: + resp = requests.post( + self.endpoint, + json=data, + headers=headers, + timeout=self.timeout + ) + resp.raise_for_status() + return resp.json() + except requests.exceptions.RequestException as e: + log.error(f"Cloud request failed: {str(e)}") + raise ConnectionError("Cloud service unavailable") + + def execute(self, command): + for attempt in range(self.retry): + try: + return self._make_request({"command": command}) + except ConnectionError: + if attempt == self.retry - 1: + raise + log.warning(f"Retrying... ({attempt + 1}/{self.retry})") \ No newline at end of file diff --git a/FFAICilent/config/config.ini b/FFAICilent/config/config.ini new file mode 100644 index 0000000..01745fb --- /dev/null +++ b/FFAICilent/config/config.ini @@ -0,0 +1,12 @@ +[Deployment] +default_mode = cloud +cloud_timeout = 10 +local_fallback = true + +[Cloud] +endpoint = https://api.yourservice.com/v1 +retry_times = 3 + +[Local] +db_path = ./local_data.db +max_rows = 1000 \ No newline at end of file diff --git a/FFAICilent/config/configloder.py b/FFAICilent/config/configloder.py new file mode 100644 index 0000000..612a308 --- /dev/null +++ b/FFAICilent/config/configloder.py @@ -0,0 +1,11 @@ +import configparser +import os +from pathlib import Path + +def load_config(): + config = configparser.ConfigParser() + config.read([ + Path(__file__).parent.parent / 'config/config.ini', + # Path(__file__).parent.parent / 'config/secrets.enc' + ]) + return config \ No newline at end of file diff --git a/FFAI/crawlers.py b/FFAICilent/crawlers.py similarity index 100% rename from FFAI/crawlers.py rename to FFAICilent/crawlers.py diff --git a/FFAI/crawlers_core.py b/FFAICilent/crawlers_core.py similarity index 100% rename from FFAI/crawlers_core.py rename to FFAICilent/crawlers_core.py diff --git a/FFAICilent/local.py b/FFAICilent/local.py new file mode 100644 index 0000000..1c7ebd8 --- /dev/null +++ b/FFAICilent/local.py @@ -0,0 +1,17 @@ +import sqlite3 +from logger import log + +class LocalConnector: + def __init__(self, config): + self.db_path = config['Local']['db_path'] + self.max_rows = int(config['Local']['max_rows']) + + def execute(self, query): + try: + with sqlite3.connect(self.db_path) as conn: + cursor = conn.cursor() + cursor.execute(query) + return cursor.fetchmany(self.max_rows) + except Exception as e: + log.error(f"Local DB error: {str(e)}") + raise ConnectionError("Local storage unavailable") \ No newline at end of file diff --git a/FFAICilent/logger.py b/FFAICilent/logger.py new file mode 100644 index 0000000..ed7ff8d --- /dev/null +++ b/FFAICilent/logger.py @@ -0,0 +1,27 @@ +from logger import log + +def main(): + log.info("程序启动") + try: + # 你的代码 + log.debug("调试信息: %s", some_var) # type: ignore + except Exception as e: + log.error("操作失败: %s", str(e), exc_info=True) + raise + # 自定义日志器 +custom_log = setup_logging( + name="MyModule", + log_dir="custom_logs", + file_level=logging.INFO, + console_level=logging.DEBUG, + max_bytes=5*1024*1024, # 5MB + backup_count=3 +) + +# 使用示例 +custom_log.warning("自定义日志记录") +# 使用示例 +custom_log.warning("自定义日志记录") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/FFAI/main.py b/FFAICilent/main.py similarity index 73% rename from FFAI/main.py rename to FFAICilent/main.py index 80c0d2a..410b8dd 100644 --- a/FFAI/main.py +++ b/FFAICilent/main.py @@ -2,7 +2,8 @@ from analyzer import PureAnalyzer # type: ignore from crawlers_core import CrawlerEngine from catch import CacheManager - +from manger import ConnectionManager +from utils.logger import setup_logging class PureInfoHunter: def __init__(self): @@ -41,6 +42,11 @@ class PureInfoHunter: f.write(content) print(f"报告已保存到 reports/{safe_query}_report.txt") + def sync_local_cache(self): + if self.mode == 'local': + cloud_data = self.cloud.execute("get_all_updates") + self.local.save_cache(cloud_data) + if __name__ == "__main__": import sys import os @@ -59,6 +65,27 @@ if __name__ == "__main__": hunter = PureInfoHunter() + setup_logging() + + parser = argparse.ArgumentParser() + parser.add_argument('command', help='执行指令或查询') + parser.add_argument('--local', action='store_true', + help='强制使用本地模式') + args = parser.parse_args() + + manager = ConnectionManager() + try: + if args.local: + manager.mode = 'local' + + result = manager.execute(args.command) + print(f"✅ 执行成功 (模式: {manager.mode.upper()})") + print(result) + + except Exception as e: + print(f"❌ 执行失败: {str(e)}") + exit(1) + if force_update: print("强制更新模式(忽略缓存)") data = hunter.crawler.crawl(query) # 使用实际存在的方法名 diff --git a/FFAICilent/manger.py b/FFAICilent/manger.py new file mode 100644 index 0000000..78e44e8 --- /dev/null +++ b/FFAICilent/manger.py @@ -0,0 +1,40 @@ +from cloud import CloudConnector +from local import LocalConnector +from ..FFAICilent.config.configloder import load_config + +class ConnectionManager: + def __init__(self): + self.config = load_config() + self.mode = self.config['Deployment']['default_mode'] + self._init_connectors() + + def _init_connectors(self): + self.cloud = CloudConnector(self.config) + self.local = LocalConnector(self.config) + + def execute(self, command): + # 优先使用默认模式 + if self.mode == 'cloud': + try: + return self.cloud.execute(command) + except ConnectionError as e: + if self.config.getboolean('Deployment', 'local_fallback'): + return self._fallback_to_local(command) + raise + else: + return self.local.execute(command) + + def _fallback_to_local(self, command): + """自动降级到本地模式""" + from logger import log + log.warning("Falling back to local mode") + self.mode = 'local' + + # 转换云指令到本地查询 + local_query = self._adapt_command(command) + return self.local.execute(local_query) + + def _adapt_command(self, cloud_cmd): + """将云API指令转换为本地查询(示例)""" + # 这里添加你的业务逻辑转换规则 + return f"SELECT * FROM cache WHERE key='{cloud_cmd}'" \ No newline at end of file