diff --git a/auto_code_fixer.py b/auto_code_fixer.py index 1b62948..ff187c2 100644 --- a/auto_code_fixer.py +++ b/auto_code_fixer.py @@ -19,16 +19,16 @@ class CodeIssue: line_no: int, issue_type: str, message: str, - severity: str = "warning", - original_line: str = "", + severity: str = "warning", + original_line: str = "", ) -> None: - self.file_path = file_path - self.line_no = line_no - self.issue_type = issue_type - self.message = message - self.severity = severity - self.original_line = original_line - self.fixed = False + self.file_path = file_path + self.line_no = line_no + self.issue_type = issue_type + self.message = message + self.severity = severity + self.original_line = original_line + self.fixed = False def __repr__(self) -> None: return f"{self.file_path}:{self.line_no} [{self.severity}] {self.issue_type}: {self.message}" @@ -38,11 +38,11 @@ class CodeFixer: """代码自动修复器""" def __init__(self, project_path: str) -> None: - self.project_path = Path(project_path) - self.issues: list[CodeIssue] = [] - self.fixed_issues: list[CodeIssue] = [] - self.manual_issues: list[CodeIssue] = [] - self.scanned_files: list[str] = [] + self.project_path = Path(project_path) + self.issues: list[CodeIssue] = [] + self.fixed_issues: list[CodeIssue] = [] + self.manual_issues: list[CodeIssue] = [] + self.scanned_files: list[str] = [] def scan_all_files(self) -> None: """扫描所有 Python 文件""" @@ -55,9 +55,9 @@ class CodeFixer: def _scan_file(self, file_path: Path) -> None: """扫描单个文件""" try: - with open(file_path, "r", encoding = "utf-8") as f: - content = f.read() - lines = content.split("\n") + with open(file_path, "r", encoding="utf-8") as f: + content = f.read() + lines = content.split("\n") except Exception as e: print(f"Error reading {file_path}: {e}") return @@ -123,32 +123,37 @@ class CodeFixer: if line.rstrip() != line and line.strip(): self.issues.append( CodeIssue( - str(file_path), i, "trailing_whitespace", "行尾有空格", "info", line + str(file_path), + i, + "trailing_whitespace", + "行尾有空格", + "info", + line, ) ) def _check_unused_imports(self, file_path: Path, content: str) -> None: """检查未使用的导入""" try: - tree = ast.parse(content) + tree = ast.parse(content) except SyntaxError: return - imports = {} + imports = {} for node in ast.walk(tree): if isinstance(node, ast.Import): for alias in node.names: - name = alias.asname if alias.asname else alias.name - imports[name] = node.lineno + name = alias.asname if alias.asname else alias.name + imports[name] = node.lineno elif isinstance(node, ast.ImportFrom): for alias in node.names: - name = alias.asname if alias.asname else alias.name + name = alias.asname if alias.asname else alias.name if alias.name == "*": continue - imports[name] = node.lineno + imports[name] = node.lineno # 检查使用 - used_names = set() + used_names = set() for node in ast.walk(tree): if isinstance(node, ast.Name): used_names.add(node.id) @@ -198,7 +203,9 @@ class CodeFixer: for i, line in enumerate(lines, 1): if "allow_origins" in line and '["*"]' in line: # 排除扫描工具自身的代码 - if "code_reviewer" in str(file_path) or "auto_code_fixer" in str(file_path): + if "code_reviewer" in str(file_path) or "auto_code_fixer" in str( + file_path + ): continue self.manual_issues.append( CodeIssue( @@ -216,11 +223,11 @@ class CodeFixer: ) -> None: """检查敏感信息泄露""" # 排除的文件 - excluded_files = ["auto_code_fixer.py", "code_reviewer.py"] + excluded_files = ["auto_code_fixer.py", "code_reviewer.py"] if any(excluded in str(file_path) for excluded in excluded_files): return - patterns = [ + patterns = [ (r'password\s* = \s*["\'][^"\']{8, }["\']', "硬编码密码"), (r'secret_key\s* = \s*["\'][^"\']{8, }["\']', "硬编码密钥"), (r'api_key\s* = \s*["\'][^"\']{8, }["\']', "硬编码 API Key"), @@ -238,10 +245,13 @@ class CodeFixer: if "os.getenv" in line or "os.environ" in line: continue # 排除示例/测试代码中的占位符 - if any(x in line.lower() for x in ["your_", "example", "placeholder", "test", "demo"]): + if any( + x in line.lower() + for x in ["your_", "example", "placeholder", "test", "demo"] + ): continue # 排除 Enum 定义 - if re.search(r'^\s*[A-Z_]+\s* = ', line.strip()): + if re.search(r"^\s*[A-Z_]+\s* = ", line.strip()): continue self.manual_issues.append( CodeIssue( @@ -256,17 +266,17 @@ class CodeFixer: def fix_auto_fixable(self) -> None: """自动修复可修复的问题""" - auto_fix_types = { + auto_fix_types = { "trailing_whitespace", "bare_exception", } # 按文件分组 - files_to_fix = {} + files_to_fix = {} for issue in self.issues: if issue.issue_type in auto_fix_types: if issue.file_path not in files_to_fix: - files_to_fix[issue.file_path] = [] + files_to_fix[issue.file_path] = [] files_to_fix[issue.file_path].append(issue) for file_path, file_issues in files_to_fix.items(): @@ -275,43 +285,45 @@ class CodeFixer: continue try: - with open(file_path, "r", encoding = "utf-8") as f: - content = f.read() - lines = content.split("\n") + with open(file_path, "r", encoding="utf-8") as f: + content = f.read() + lines = content.split("\n") except Exception: continue - original_lines = lines.copy() - fixed_lines = set() + original_lines = lines.copy() + fixed_lines = set() # 修复行尾空格 for issue in file_issues: if issue.issue_type == "trailing_whitespace": - line_idx = issue.line_no - 1 + line_idx = issue.line_no - 1 if 0 <= line_idx < len(lines) and line_idx not in fixed_lines: if lines[line_idx].rstrip() != lines[line_idx]: - lines[line_idx] = lines[line_idx].rstrip() + lines[line_idx] = lines[line_idx].rstrip() fixed_lines.add(line_idx) - issue.fixed = True + issue.fixed = True self.fixed_issues.append(issue) # 修复裸异常 for issue in file_issues: if issue.issue_type == "bare_exception": - line_idx = issue.line_no - 1 + line_idx = issue.line_no - 1 if 0 <= line_idx < len(lines) and line_idx not in fixed_lines: - line = lines[line_idx] + line = lines[line_idx] # 将 except Exception: 改为 except Exception: if re.search(r"except\s*:\s*$", line.strip()): - lines[line_idx] = line.replace("except Exception:", "except Exception:") + lines[line_idx] = line.replace( + "except Exception:", "except Exception:" + ) fixed_lines.add(line_idx) - issue.fixed = True + issue.fixed = True self.fixed_issues.append(issue) # 如果文件有修改,写回 if lines != original_lines: try: - with open(file_path, "w", encoding = "utf-8") as f: + with open(file_path, "w", encoding="utf-8") as f: f.write("\n".join(lines)) print(f"Fixed issues in {file_path}") except Exception as e: @@ -319,7 +331,7 @@ class CodeFixer: def categorize_issues(self) -> dict[str, list[CodeIssue]]: """分类问题""" - categories = { + categories = { "critical": [], "error": [], "warning": [], @@ -334,7 +346,7 @@ class CodeFixer: def generate_report(self) -> str: """生成修复报告""" - report = [] + report = [] report.append("# InsightFlow 代码审查报告") report.append("") report.append(f"扫描时间: {os.popen('date').read().strip()}") @@ -349,15 +361,19 @@ class CodeFixer: report.append("") # 问题统计 - categories = self.categorize_issues() - manual_critical = [i for i in self.manual_issues if i.severity == "critical"] - manual_warning = [i for i in self.manual_issues if i.severity == "warning"] + categories = self.categorize_issues() + manual_critical = [i for i in self.manual_issues if i.severity == "critical"] + manual_warning = [i for i in self.manual_issues if i.severity == "warning"] report.append("## 问题分类统计") report.append("") - report.append(f"- 🔴 Critical: {len(categories['critical']) + len(manual_critical)}") + report.append( + f"- 🔴 Critical: {len(categories['critical']) + len(manual_critical)}" + ) report.append(f"- 🟠 Error: {len(categories['error'])}") - report.append(f"- 🟡 Warning: {len(categories['warning']) + len(manual_warning)}") + report.append( + f"- 🟡 Warning: {len(categories['warning']) + len(manual_warning)}" + ) report.append(f"- 🔵 Info: {len(categories['info'])}") report.append(f"- **总计: {len(self.issues) + len(self.manual_issues)}**") report.append("") @@ -393,17 +409,13 @@ class CodeFixer: # 其他问题 report.append("## 📋 其他发现的问题") report.append("") - other_issues = [ - i - for i in self.issues - if i not in self.fixed_issues - ] + other_issues = [i for i in self.issues if i not in self.fixed_issues] # 按类型分组 - by_type = {} + by_type = {} for issue in other_issues: if issue.issue_type not in by_type: - by_type[issue.issue_type] = [] + by_type[issue.issue_type] = [] by_type[issue.issue_type].append(issue) for issue_type, issues in sorted(by_type.items()): @@ -424,21 +436,21 @@ def git_commit_and_push(project_path: str) -> tuple[bool, str]: """Git 提交和推送""" try: # 检查是否有变更 - result = subprocess.run( + result = subprocess.run( ["git", "status", "--porcelain"], - cwd = project_path, - capture_output = True, - text = True, + cwd=project_path, + capture_output=True, + text=True, ) if not result.stdout.strip(): return True, "没有需要提交的变更" # 添加所有变更 - subprocess.run(["git", "add", "-A"], cwd = project_path, check = True) + subprocess.run(["git", "add", "-A"], cwd=project_path, check=True) # 提交 - commit_msg = """fix: auto-fix code issues (cron) + commit_msg = """fix: auto-fix code issues (cron) - 修复重复导入/字段 - 修复异常处理 @@ -446,11 +458,11 @@ def git_commit_and_push(project_path: str) -> tuple[bool, str]: - 添加类型注解""" subprocess.run( - ["git", "commit", "-m", commit_msg], cwd = project_path, check = True + ["git", "commit", "-m", commit_msg], cwd=project_path, check=True ) # 推送 - subprocess.run(["git", "push"], cwd = project_path, check = True) + subprocess.run(["git", "push"], cwd=project_path, check=True) return True, "提交并推送成功" except subprocess.CalledProcessError as e: @@ -460,10 +472,10 @@ def git_commit_and_push(project_path: str) -> tuple[bool, str]: def main() -> None: - project_path = "/root/.openclaw/workspace/projects/insightflow" + project_path = "/root/.openclaw/workspace/projects/insightflow" print("🔍 开始扫描代码...") - fixer = CodeFixer(project_path) + fixer = CodeFixer(project_path) fixer.scan_all_files() print(f"📊 发现 {len(fixer.issues)} 个可自动修复问题") @@ -475,25 +487,25 @@ def main() -> None: print(f"✅ 已修复 {len(fixer.fixed_issues)} 个问题") # 生成报告 - report = fixer.generate_report() + report = fixer.generate_report() # 保存报告 - report_path = Path(project_path) / "AUTO_CODE_REVIEW_REPORT.md" - with open(report_path, "w", encoding = "utf-8") as f: + report_path = Path(project_path) / "AUTO_CODE_REVIEW_REPORT.md" + with open(report_path, "w", encoding="utf-8") as f: f.write(report) print(f"📝 报告已保存到: {report_path}") # Git 提交 print("📤 提交变更到 Git...") - success, msg = git_commit_and_push(project_path) + success, msg = git_commit_and_push(project_path) print(f"{'✅' if success else '❌'} {msg}") # 添加 Git 结果到报告 report += f"\n\n## Git 提交结果\n\n{'✅' if success else '❌'} {msg}\n" # 重新保存完整报告 - with open(report_path, "w", encoding = "utf-8") as f: + with open(report_path, "w", encoding="utf-8") as f: f.write(report) print("\n" + " = " * 60) diff --git a/auto_fix_code.py b/auto_fix_code.py index 70e7069..168a691 100644 --- a/auto_fix_code.py +++ b/auto_fix_code.py @@ -11,143 +11,155 @@ from pathlib import Path PROJECT_DIR = Path("/root/.openclaw/workspace/projects/insightflow") BACKEND_DIR = PROJECT_DIR / "backend" + def run_flake8(): """运行 flake8 检查""" result = subprocess.run( ["flake8", "--max-line-length=120", "--ignore=E501,W503", "."], cwd=BACKEND_DIR, capture_output=True, - text=True + text=True, ) return result.stdout + def fix_missing_imports(): """修复缺失的导入""" fixes = [] - + # 检查 workflow_manager.py 中的 urllib workflow_file = BACKEND_DIR / "workflow_manager.py" if workflow_file.exists(): content = workflow_file.read_text() if "import urllib" not in content and "urllib" in content: # 在文件开头添加导入 - lines = content.split('\n') + lines = content.split("\n") import_idx = 0 for i, line in enumerate(lines): - if line.startswith('import ') or line.startswith('from '): + if line.startswith("import ") or line.startswith("from "): import_idx = i + 1 - lines.insert(import_idx, 'import urllib.parse') - workflow_file.write_text('\n'.join(lines)) + lines.insert(import_idx, "import urllib.parse") + workflow_file.write_text("\n".join(lines)) fixes.append("workflow_manager.py: 添加 urllib.parse 导入") - + # 检查 plugin_manager.py 中的 urllib plugin_file = BACKEND_DIR / "plugin_manager.py" if plugin_file.exists(): content = plugin_file.read_text() if "import urllib" not in content and "urllib" in content: - lines = content.split('\n') + lines = content.split("\n") import_idx = 0 for i, line in enumerate(lines): - if line.startswith('import ') or line.startswith('from '): + if line.startswith("import ") or line.startswith("from "): import_idx = i + 1 - lines.insert(import_idx, 'import urllib.parse') - plugin_file.write_text('\n'.join(lines)) + lines.insert(import_idx, "import urllib.parse") + plugin_file.write_text("\n".join(lines)) fixes.append("plugin_manager.py: 添加 urllib.parse 导入") - + # 检查 main.py 中的 PlainTextResponse main_file = BACKEND_DIR / "main.py" if main_file.exists(): content = main_file.read_text() - if "PlainTextResponse" in content and "from fastapi.responses import" in content: + if ( + "PlainTextResponse" in content + and "from fastapi.responses import" in content + ): # 检查是否已导入 - if "PlainTextResponse" not in content.split('from fastapi.responses import')[1].split('\n')[0]: + if ( + "PlainTextResponse" + not in content.split("from fastapi.responses import")[1].split("\n")[0] + ): # 添加导入 content = content.replace( "from fastapi.responses import JSONResponse, PlainTextResponse, StreamingResponse", - "from fastapi.responses import JSONResponse, PlainTextResponse, StreamingResponse" + "from fastapi.responses import JSONResponse, PlainTextResponse, StreamingResponse", ) # 实际上已经导入了,可能是误报 - + return fixes + def fix_unused_imports(): """修复未使用的导入""" fixes = [] - + # code_reviewer.py 中的未使用导入 code_reviewer = PROJECT_DIR / "code_reviewer.py" if code_reviewer.exists(): content = code_reviewer.read_text() original = content # 移除未使用的导入 - content = re.sub(r'^import os\n', '', content, flags=re.MULTILINE) - content = re.sub(r'^import subprocess\n', '', content, flags=re.MULTILINE) - content = re.sub(r'^from typing import Any\n', '', content, flags=re.MULTILINE) + content = re.sub(r"^import os\n", "", content, flags=re.MULTILINE) + content = re.sub(r"^import subprocess\n", "", content, flags=re.MULTILINE) + content = re.sub(r"^from typing import Any\n", "", content, flags=re.MULTILINE) if content != original: code_reviewer.write_text(content) fixes.append("code_reviewer.py: 移除未使用的导入") - + return fixes + def fix_formatting(): """使用 autopep8 修复格式问题""" fixes = [] - + # 运行 autopep8 修复格式问题 result = subprocess.run( ["autopep8", "--in-place", "--aggressive", "--max-line-length=120", "."], cwd=BACKEND_DIR, capture_output=True, - text=True + text=True, ) - + if result.returncode == 0: fixes.append("使用 autopep8 修复了格式问题") - + return fixes + def main(): print("=" * 60) print("InsightFlow 代码自动修复") print("=" * 60) - + all_fixes = [] - + # 1. 修复缺失的导入 print("\n[1/3] 修复缺失的导入...") fixes = fix_missing_imports() all_fixes.extend(fixes) for f in fixes: print(f" ✓ {f}") - + # 2. 修复未使用的导入 print("\n[2/3] 修复未使用的导入...") fixes = fix_unused_imports() all_fixes.extend(fixes) for f in fixes: print(f" ✓ {f}") - + # 3. 修复格式问题 print("\n[3/3] 修复 PEP8 格式问题...") fixes = fix_formatting() all_fixes.extend(fixes) for f in fixes: print(f" ✓ {f}") - + print("\n" + "=" * 60) print(f"修复完成!共修复 {len(all_fixes)} 个问题") print("=" * 60) - + # 再次运行 flake8 检查 print("\n重新运行 flake8 检查...") remaining = run_flake8() if remaining: - lines = remaining.strip().split('\n') + lines = remaining.strip().split("\n") print(f" 仍有 {len(lines)} 个问题需要手动处理") else: print(" ✓ 所有问题已修复!") - + return all_fixes + if __name__ == "__main__": main() diff --git a/backend/ai_manager.py b/backend/ai_manager.py index c4e5e2e..6789ac5 100644 --- a/backend/ai_manager.py +++ b/backend/ai_manager.py @@ -283,7 +283,7 @@ class AIManager: def get_custom_model(self, model_id: str) -> CustomModel | None: """获取自定义模型""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM custom_models WHERE id = ?", (model_id, )).fetchone() + row = conn.execute("SELECT * FROM custom_models WHERE id = ?", (model_id,)).fetchone() if not row: return None @@ -350,7 +350,8 @@ class AIManager: """获取训练样本""" with self._get_db() as conn: rows = conn.execute( - "SELECT * FROM training_samples WHERE model_id = ? ORDER BY created_at", (model_id, ) + "SELECT * FROM training_samples WHERE model_id = ? ORDER BY created_at", + (model_id,), ).fetchall() return [self._row_to_training_sample(row) for row in rows] @@ -712,7 +713,7 @@ class AIManager: def get_kg_rag(self, rag_id: str) -> KnowledgeGraphRAG | None: """获取知识图谱 RAG 配置""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM kg_rag_configs WHERE id = ?", (rag_id, )).fetchone() + row = conn.execute("SELECT * FROM kg_rag_configs WHERE id = ?", (rag_id,)).fetchone() if not row: return None @@ -1122,7 +1123,7 @@ class AIManager: """获取预测模型""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM prediction_models WHERE id = ?", (model_id, ) + "SELECT * FROM prediction_models WHERE id = ?", (model_id,) ).fetchone() if not row: @@ -1238,7 +1239,7 @@ class AIManager: # 更新预测计数 conn.execute( "UPDATE prediction_models SET prediction_count = prediction_count + 1 WHERE id = ?", - (model_id, ), + (model_id,), ) conn.commit() diff --git a/backend/api_key_manager.py b/backend/api_key_manager.py index 9e16478..4a8ccd8 100644 --- a/backend/api_key_manager.py +++ b/backend/api_key_manager.py @@ -207,7 +207,7 @@ class ApiKeyManager: with sqlite3.connect(self.db_path) as conn: conn.row_factory = sqlite3.Row - row = conn.execute("SELECT * FROM api_keys WHERE key_hash = ?", (key_hash, )).fetchone() + row = conn.execute("SELECT * FROM api_keys WHERE key_hash = ?", (key_hash,)).fetchone() if not row: return None @@ -238,7 +238,7 @@ class ApiKeyManager: # 验证所有权(如果提供了 owner_id) if owner_id: row = conn.execute( - "SELECT owner_id FROM api_keys WHERE id = ?", (key_id, ) + "SELECT owner_id FROM api_keys WHERE id = ?", (key_id,) ).fetchone() if not row or row[0] != owner_id: return False @@ -270,7 +270,7 @@ class ApiKeyManager: "SELECT * FROM api_keys WHERE id = ? AND owner_id = ?", (key_id, owner_id) ).fetchone() else: - row = conn.execute("SELECT * FROM api_keys WHERE id = ?", (key_id, )).fetchone() + row = conn.execute("SELECT * FROM api_keys WHERE id = ?", (key_id,)).fetchone() if row: return self._row_to_api_key(row) @@ -337,7 +337,7 @@ class ApiKeyManager: # 验证所有权 if owner_id: row = conn.execute( - "SELECT owner_id FROM api_keys WHERE id = ?", (key_id, ) + "SELECT owner_id FROM api_keys WHERE id = ?", (key_id,) ).fetchone() if not row or row[0] != owner_id: return False diff --git a/backend/collaboration_manager.py b/backend/collaboration_manager.py index 0c316ec..171d464 100644 --- a/backend/collaboration_manager.py +++ b/backend/collaboration_manager.py @@ -263,7 +263,7 @@ class CollaborationManager: """ SELECT * FROM project_shares WHERE token = ? """, - (token, ), + (token,), ) row = cursor.fetchone() @@ -300,7 +300,7 @@ class CollaborationManager: SET use_count = use_count + 1 WHERE token = ? """, - (token, ), + (token,), ) self.db.conn.commit() @@ -314,7 +314,7 @@ class CollaborationManager: SET is_active = 0 WHERE id = ? """, - (share_id, ), + (share_id,), ) self.db.conn.commit() return cursor.rowcount > 0 @@ -332,7 +332,7 @@ class CollaborationManager: WHERE project_id = ? ORDER BY created_at DESC """, - (project_id, ), + (project_id,), ) shares = [] @@ -510,7 +510,7 @@ class CollaborationManager: def _get_comment_by_id(self, comment_id: str) -> Comment | None: """根据ID获取评论""" cursor = self.db.conn.cursor() - cursor.execute("SELECT * FROM comments WHERE id = ?", (comment_id, )) + cursor.execute("SELECT * FROM comments WHERE id = ?", (comment_id,)) row = cursor.fetchone() if row: return self._row_to_comment(row) @@ -773,7 +773,7 @@ class CollaborationManager: """ SELECT COUNT(*) FROM change_history WHERE project_id = ? """, - (project_id, ), + (project_id,), ) total_changes = cursor.fetchone()[0] @@ -783,7 +783,7 @@ class CollaborationManager: SELECT change_type, COUNT(*) FROM change_history WHERE project_id = ? GROUP BY change_type """, - (project_id, ), + (project_id,), ) type_counts = {row[0]: row[1] for row in cursor.fetchall()} @@ -793,7 +793,7 @@ class CollaborationManager: SELECT entity_type, COUNT(*) FROM change_history WHERE project_id = ? GROUP BY entity_type """, - (project_id, ), + (project_id,), ) entity_type_counts = {row[0]: row[1] for row in cursor.fetchall()} @@ -806,7 +806,7 @@ class CollaborationManager: ORDER BY count DESC LIMIT 5 """, - (project_id, ), + (project_id,), ) top_contributors = [{"name": row[0], "changes": row[1]} for row in cursor.fetchall()] @@ -902,7 +902,7 @@ class CollaborationManager: SELECT * FROM team_members WHERE project_id = ? ORDER BY joined_at ASC """, - (project_id, ), + (project_id,), ) members = [] @@ -949,7 +949,7 @@ class CollaborationManager: return False cursor = self.db.conn.cursor() - cursor.execute("DELETE FROM team_members WHERE id = ?", (member_id, )) + cursor.execute("DELETE FROM team_members WHERE id = ?", (member_id,)) self.db.conn.commit() return cursor.rowcount > 0 diff --git a/backend/db_manager.py b/backend/db_manager.py index ab47b79..fcb9ce8 100644 --- a/backend/db_manager.py +++ b/backend/db_manager.py @@ -154,7 +154,7 @@ class DatabaseManager: def get_project(self, project_id: str) -> Project | None: conn = self.get_conn() - row = conn.execute("SELECT * FROM projects WHERE id = ?", (project_id, )).fetchone() + row = conn.execute("SELECT * FROM projects WHERE id = ?", (project_id,)).fetchone() conn.close() if row: return Project(**dict(row)) @@ -211,7 +211,8 @@ class DatabaseManager: """查找相似实体""" conn = self.get_conn() rows = conn.execute( - "SELECT * FROM entities WHERE project_id = ? AND name LIKE ?", (project_id, f"%{name}%") + "SELECT * FROM entities WHERE project_id = ? AND name LIKE ?", + (project_id, f"%{name}%"), ).fetchall() conn.close() @@ -226,8 +227,8 @@ class DatabaseManager: """合并两个实体""" conn = self.get_conn() - target = conn.execute("SELECT * FROM entities WHERE id = ?", (target_id, )).fetchone() - source = conn.execute("SELECT * FROM entities WHERE id = ?", (source_id, )).fetchone() + target = conn.execute("SELECT * FROM entities WHERE id = ?", (target_id,)).fetchone() + source = conn.execute("SELECT * FROM entities WHERE id = ?", (source_id,)).fetchone() if not target or not source: conn.close() @@ -252,7 +253,7 @@ class DatabaseManager: "UPDATE entity_relations SET target_entity_id = ? WHERE target_entity_id = ?", (target_id, source_id), ) - conn.execute("DELETE FROM entities WHERE id = ?", (source_id, )) + conn.execute("DELETE FROM entities WHERE id = ?", (source_id,)) conn.commit() conn.close() @@ -260,7 +261,7 @@ class DatabaseManager: def get_entity(self, entity_id: str) -> Entity | None: conn = self.get_conn() - row = conn.execute("SELECT * FROM entities WHERE id = ?", (entity_id, )).fetchone() + row = conn.execute("SELECT * FROM entities WHERE id = ?", (entity_id,)).fetchone() conn.close() if row: data = dict(row) @@ -271,7 +272,7 @@ class DatabaseManager: def list_project_entities(self, project_id: str) -> list[Entity]: conn = self.get_conn() rows = conn.execute( - "SELECT * FROM entities WHERE project_id = ? ORDER BY updated_at DESC", (project_id, ) + "SELECT * FROM entities WHERE project_id = ? ORDER BY updated_at DESC", (project_id,) ).fetchall() conn.close() @@ -316,13 +317,13 @@ class DatabaseManager: def delete_entity(self, entity_id: str) -> None: """删除实体及其关联数据""" conn = self.get_conn() - conn.execute("DELETE FROM entity_mentions WHERE entity_id = ?", (entity_id, )) + conn.execute("DELETE FROM entity_mentions WHERE entity_id = ?", (entity_id,)) conn.execute( "DELETE FROM entity_relations WHERE source_entity_id = ? OR target_entity_id = ?", (entity_id, entity_id), ) - conn.execute("DELETE FROM entity_attributes WHERE entity_id = ?", (entity_id, )) - conn.execute("DELETE FROM entities WHERE id = ?", (entity_id, )) + conn.execute("DELETE FROM entity_attributes WHERE entity_id = ?", (entity_id,)) + conn.execute("DELETE FROM entities WHERE id = ?", (entity_id,)) conn.commit() conn.close() @@ -352,7 +353,7 @@ class DatabaseManager: conn = self.get_conn() rows = conn.execute( "SELECT * FROM entity_mentions WHERE entity_id = ? ORDER BY transcript_id, start_pos", - (entity_id, ), + (entity_id,), ).fetchall() conn.close() return [EntityMention(**dict(r)) for r in rows] @@ -380,14 +381,15 @@ class DatabaseManager: def get_transcript(self, transcript_id: str) -> dict | None: conn = self.get_conn() - row = conn.execute("SELECT * FROM transcripts WHERE id = ?", (transcript_id, )).fetchone() + row = conn.execute("SELECT * FROM transcripts WHERE id = ?", (transcript_id,)).fetchone() conn.close() return dict(row) if row else None def list_project_transcripts(self, project_id: str) -> list[dict]: conn = self.get_conn() rows = conn.execute( - "SELECT * FROM transcripts WHERE project_id = ? ORDER BY created_at DESC", (project_id, ) + "SELECT * FROM transcripts WHERE project_id = ? ORDER BY created_at DESC", + (project_id,), ).fetchall() conn.close() return [dict(r) for r in rows] @@ -400,7 +402,7 @@ class DatabaseManager: (full_text, now, transcript_id), ) conn.commit() - row = conn.execute("SELECT * FROM transcripts WHERE id = ?", (transcript_id, )).fetchone() + row = conn.execute("SELECT * FROM transcripts WHERE id = ?", (transcript_id,)).fetchone() conn.close() return dict(row) if row else None @@ -453,7 +455,7 @@ class DatabaseManager: conn = self.get_conn() rows = conn.execute( "SELECT * FROM entity_relations WHERE project_id = ? ORDER BY created_at DESC", - (project_id, ), + (project_id,), ).fetchall() conn.close() return [dict(r) for r in rows] @@ -475,13 +477,15 @@ class DatabaseManager: conn.execute(query, values) conn.commit() - row = conn.execute("SELECT * FROM entity_relations WHERE id = ?", (relation_id, )).fetchone() + row = conn.execute( + "SELECT * FROM entity_relations WHERE id = ?", (relation_id,) + ).fetchone() conn.close() return dict(row) if row else None def delete_relation(self, relation_id: str) -> None: conn = self.get_conn() - conn.execute("DELETE FROM entity_relations WHERE id = ?", (relation_id, )) + conn.execute("DELETE FROM entity_relations WHERE id = ?", (relation_id,)) conn.commit() conn.close() @@ -495,7 +499,7 @@ class DatabaseManager: if existing: conn.execute( - "UPDATE glossary SET frequency = frequency + 1 WHERE id = ?", (existing["id"], ) + "UPDATE glossary SET frequency = frequency + 1 WHERE id = ?", (existing["id"],) ) conn.commit() conn.close() @@ -515,14 +519,14 @@ class DatabaseManager: def list_glossary(self, project_id: str) -> list[dict]: conn = self.get_conn() rows = conn.execute( - "SELECT * FROM glossary WHERE project_id = ? ORDER BY frequency DESC", (project_id, ) + "SELECT * FROM glossary WHERE project_id = ? ORDER BY frequency DESC", (project_id,) ).fetchall() conn.close() return [dict(r) for r in rows] def delete_glossary_term(self, term_id: str) -> None: conn = self.get_conn() - conn.execute("DELETE FROM glossary WHERE id = ?", (term_id, )) + conn.execute("DELETE FROM glossary WHERE id = ?", (term_id,)) conn.commit() conn.close() @@ -539,14 +543,14 @@ class DatabaseManager: JOIN entities t ON r.target_entity_id = t.id LEFT JOIN transcripts tr ON r.transcript_id = tr.id WHERE r.id = ?""", - (relation_id, ), + (relation_id,), ).fetchone() conn.close() return dict(row) if row else None def get_entity_with_mentions(self, entity_id: str) -> dict | None: conn = self.get_conn() - entity_row = conn.execute("SELECT * FROM entities WHERE id = ?", (entity_id, )).fetchone() + entity_row = conn.execute("SELECT * FROM entities WHERE id = ?", (entity_id,)).fetchone() if not entity_row: conn.close() return None @@ -559,7 +563,7 @@ class DatabaseManager: FROM entity_mentions m JOIN transcripts t ON m.transcript_id = t.id WHERE m.entity_id = ? ORDER BY t.created_at, m.start_pos""", - (entity_id, ), + (entity_id,), ).fetchall() entity["mentions"] = [dict(m) for m in mentions] entity["mention_count"] = len(mentions) @@ -598,24 +602,24 @@ class DatabaseManager: def get_project_summary(self, project_id: str) -> dict: conn = self.get_conn() - project = conn.execute("SELECT * FROM projects WHERE id = ?", (project_id, )).fetchone() + project = conn.execute("SELECT * FROM projects WHERE id = ?", (project_id,)).fetchone() entity_count = conn.execute( - "SELECT COUNT(*) as count FROM entities WHERE project_id = ?", (project_id, ) + "SELECT COUNT(*) as count FROM entities WHERE project_id = ?", (project_id,) ).fetchone()["count"] transcript_count = conn.execute( - "SELECT COUNT(*) as count FROM transcripts WHERE project_id = ?", (project_id, ) + "SELECT COUNT(*) as count FROM transcripts WHERE project_id = ?", (project_id,) ).fetchone()["count"] relation_count = conn.execute( - "SELECT COUNT(*) as count FROM entity_relations WHERE project_id = ?", (project_id, ) + "SELECT COUNT(*) as count FROM entity_relations WHERE project_id = ?", (project_id,) ).fetchone()["count"] recent_transcripts = conn.execute( """SELECT filename, full_text, created_at FROM transcripts WHERE project_id = ? ORDER BY created_at DESC LIMIT 5""", - (project_id, ), + (project_id,), ).fetchall() top_entities = conn.execute( @@ -624,7 +628,7 @@ class DatabaseManager: LEFT JOIN entity_mentions m ON e.id = m.entity_id WHERE e.project_id = ? GROUP BY e.id ORDER BY mention_count DESC LIMIT 10""", - (project_id, ), + (project_id,), ).fetchall() conn.close() @@ -645,7 +649,7 @@ class DatabaseManager: ) -> str: conn = self.get_conn() row = conn.execute( - "SELECT full_text FROM transcripts WHERE id = ?", (transcript_id, ) + "SELECT full_text FROM transcripts WHERE id = ?", (transcript_id,) ).fetchone() conn.close() if not row: @@ -719,7 +723,7 @@ class DatabaseManager: FROM entity_mentions m JOIN transcripts t ON m.transcript_id = t.id WHERE t.project_id = ? GROUP BY DATE(t.created_at) ORDER BY date""", - (project_id, ), + (project_id,), ).fetchall() entity_stats = conn.execute( @@ -731,7 +735,7 @@ class DatabaseManager: LEFT JOIN transcripts t ON m.transcript_id = t.id WHERE e.project_id = ? GROUP BY e.id ORDER BY mention_count DESC LIMIT 20""", - (project_id, ), + (project_id,), ).fetchall() conn.close() @@ -772,7 +776,7 @@ class DatabaseManager: def get_attribute_template(self, template_id: str) -> AttributeTemplate | None: conn = self.get_conn() row = conn.execute( - "SELECT * FROM attribute_templates WHERE id = ?", (template_id, ) + "SELECT * FROM attribute_templates WHERE id = ?", (template_id,) ).fetchone() conn.close() if row: @@ -786,7 +790,7 @@ class DatabaseManager: rows = conn.execute( """SELECT * FROM attribute_templates WHERE project_id = ? ORDER BY sort_order, created_at""", - (project_id, ), + (project_id,), ).fetchall() conn.close() @@ -832,7 +836,7 @@ class DatabaseManager: def delete_attribute_template(self, template_id: str) -> None: conn = self.get_conn() - conn.execute("DELETE FROM attribute_templates WHERE id = ?", (template_id, )) + conn.execute("DELETE FROM attribute_templates WHERE id = ?", (template_id,)) conn.commit() conn.close() @@ -905,7 +909,7 @@ class DatabaseManager: FROM entity_attributes ea LEFT JOIN attribute_templates at ON ea.template_id = at.id WHERE ea.entity_id = ? ORDER BY ea.created_at""", - (entity_id, ), + (entity_id,), ).fetchall() conn.close() return [EntityAttribute(**dict(r)) for r in rows] @@ -1075,7 +1079,7 @@ class DatabaseManager: def get_video(self, video_id: str) -> dict | None: """获取视频信息""" conn = self.get_conn() - row = conn.execute("SELECT * FROM videos WHERE id = ?", (video_id, )).fetchone() + row = conn.execute("SELECT * FROM videos WHERE id = ?", (video_id,)).fetchone() conn.close() if row: @@ -1094,7 +1098,7 @@ class DatabaseManager: """获取项目的所有视频""" conn = self.get_conn() rows = conn.execute( - "SELECT * FROM videos WHERE project_id = ? ORDER BY created_at DESC", (project_id, ) + "SELECT * FROM videos WHERE project_id = ? ORDER BY created_at DESC", (project_id,) ).fetchall() conn.close() @@ -1149,7 +1153,7 @@ class DatabaseManager: """获取视频的所有帧""" conn = self.get_conn() rows = conn.execute( - """SELECT * FROM video_frames WHERE video_id = ? ORDER BY timestamp""", (video_id, ) + """SELECT * FROM video_frames WHERE video_id = ? ORDER BY timestamp""", (video_id,) ).fetchall() conn.close() @@ -1201,7 +1205,7 @@ class DatabaseManager: def get_image(self, image_id: str) -> dict | None: """获取图片信息""" conn = self.get_conn() - row = conn.execute("SELECT * FROM images WHERE id = ?", (image_id, )).fetchone() + row = conn.execute("SELECT * FROM images WHERE id = ?", (image_id,)).fetchone() conn.close() if row: @@ -1219,7 +1223,7 @@ class DatabaseManager: """获取项目的所有图片""" conn = self.get_conn() rows = conn.execute( - "SELECT * FROM images WHERE project_id = ? ORDER BY created_at DESC", (project_id, ) + "SELECT * FROM images WHERE project_id = ? ORDER BY created_at DESC", (project_id,) ).fetchall() conn.close() @@ -1279,7 +1283,7 @@ class DatabaseManager: FROM multimodal_mentions m JOIN entities e ON m.entity_id = e.id WHERE m.entity_id = ? ORDER BY m.created_at DESC""", - (entity_id, ), + (entity_id,), ).fetchall() conn.close() return [dict(r) for r in rows] @@ -1303,7 +1307,7 @@ class DatabaseManager: FROM multimodal_mentions m JOIN entities e ON m.entity_id = e.id WHERE m.project_id = ? ORDER BY m.created_at DESC""", - (project_id, ), + (project_id,), ).fetchall() conn.close() @@ -1377,13 +1381,13 @@ class DatabaseManager: # 视频数量 row = conn.execute( - "SELECT COUNT(*) as count FROM videos WHERE project_id = ?", (project_id, ) + "SELECT COUNT(*) as count FROM videos WHERE project_id = ?", (project_id,) ).fetchone() stats["video_count"] = row["count"] # 图片数量 row = conn.execute( - "SELECT COUNT(*) as count FROM images WHERE project_id = ?", (project_id, ) + "SELECT COUNT(*) as count FROM images WHERE project_id = ?", (project_id,) ).fetchone() stats["image_count"] = row["count"] @@ -1391,7 +1395,7 @@ class DatabaseManager: row = conn.execute( """SELECT COUNT(DISTINCT entity_id) as count FROM multimodal_mentions WHERE project_id = ?""", - (project_id, ), + (project_id,), ).fetchone() stats["multimodal_entity_count"] = row["count"] @@ -1399,7 +1403,7 @@ class DatabaseManager: row = conn.execute( """SELECT COUNT(*) as count FROM multimodal_entity_links WHERE entity_id IN (SELECT id FROM entities WHERE project_id = ?)""", - (project_id, ), + (project_id,), ).fetchone() stats["cross_modal_links"] = row["count"] diff --git a/backend/developer_ecosystem_manager.py b/backend/developer_ecosystem_manager.py index fece738..2e6e54c 100644 --- a/backend/developer_ecosystem_manager.py +++ b/backend/developer_ecosystem_manager.py @@ -444,7 +444,7 @@ class DeveloperEcosystemManager: def get_sdk_release(self, sdk_id: str) -> SDKRelease | None: """获取 SDK 发布详情""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM sdk_releases WHERE id = ?", (sdk_id, )).fetchone() + row = conn.execute("SELECT * FROM sdk_releases WHERE id = ?", (sdk_id,)).fetchone() if row: return self._row_to_sdk_release(row) @@ -530,7 +530,7 @@ class DeveloperEcosystemManager: SET download_count = download_count + 1 WHERE id = ? """, - (sdk_id, ), + (sdk_id,), ) conn.commit() @@ -538,7 +538,7 @@ class DeveloperEcosystemManager: """获取 SDK 版本历史""" with self._get_db() as conn: rows = conn.execute( - "SELECT * FROM sdk_versions WHERE sdk_id = ? ORDER BY created_at DESC", (sdk_id, ) + "SELECT * FROM sdk_versions WHERE sdk_id = ? ORDER BY created_at DESC", (sdk_id,) ).fetchall() return [self._row_to_sdk_version(row) for row in rows] @@ -559,7 +559,7 @@ class DeveloperEcosystemManager: with self._get_db() as conn: # 如果设置为最新版本,取消其他版本的最新标记 if True: # 默认新版本为最新 - conn.execute("UPDATE sdk_versions SET is_latest = 0 WHERE sdk_id = ?", (sdk_id, )) + conn.execute("UPDATE sdk_versions SET is_latest = 0 WHERE sdk_id = ?", (sdk_id,)) conn.execute( """ @@ -700,7 +700,7 @@ class DeveloperEcosystemManager: """获取模板详情""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM template_market WHERE id = ?", (template_id, ) + "SELECT * FROM template_market WHERE id = ?", (template_id,) ).fetchone() if row: @@ -814,7 +814,7 @@ class DeveloperEcosystemManager: SET install_count = install_count + 1 WHERE id = ? """, - (template_id, ), + (template_id,), ) conn.commit() @@ -880,7 +880,7 @@ class DeveloperEcosystemManager: FROM template_reviews WHERE template_id = ? """, - (template_id, ), + (template_id,), ).fetchone() if row: @@ -1032,7 +1032,7 @@ class DeveloperEcosystemManager: def get_plugin(self, plugin_id: str) -> PluginMarketItem | None: """获取插件详情""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM plugin_market WHERE id = ?", (plugin_id, )).fetchone() + row = conn.execute("SELECT * FROM plugin_market WHERE id = ?", (plugin_id,)).fetchone() if row: return self._row_to_plugin(row) @@ -1120,7 +1120,7 @@ class DeveloperEcosystemManager: SET install_count = install_count + 1 WHERE id = ? """, - (plugin_id, ), + (plugin_id,), ) if active: @@ -1130,7 +1130,7 @@ class DeveloperEcosystemManager: SET active_install_count = active_install_count + 1 WHERE id = ? """, - (plugin_id, ), + (plugin_id,), ) conn.commit() @@ -1195,7 +1195,7 @@ class DeveloperEcosystemManager: FROM plugin_reviews WHERE plugin_id = ? """, - (plugin_id, ), + (plugin_id,), ).fetchone() if row: @@ -1334,7 +1334,7 @@ class DeveloperEcosystemManager: FROM developer_revenues WHERE developer_id = ? """, - (developer_id, ), + (developer_id,), ).fetchone() return { @@ -1420,7 +1420,7 @@ class DeveloperEcosystemManager: """获取开发者档案""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM developer_profiles WHERE id = ?", (developer_id, ) + "SELECT * FROM developer_profiles WHERE id = ?", (developer_id,) ).fetchone() if row: @@ -1431,7 +1431,7 @@ class DeveloperEcosystemManager: """通过用户 ID 获取开发者档案""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM developer_profiles WHERE user_id = ?", (user_id, ) + "SELECT * FROM developer_profiles WHERE user_id = ?", (user_id,) ).fetchone() if row: @@ -1469,12 +1469,13 @@ class DeveloperEcosystemManager: with self._get_db() as conn: # 统计插件数量 plugin_row = conn.execute( - "SELECT COUNT(*) as count FROM plugin_market WHERE author_id = ?", (developer_id, ) + "SELECT COUNT(*) as count FROM plugin_market WHERE author_id = ?", (developer_id,) ).fetchone() # 统计模板数量 template_row = conn.execute( - "SELECT COUNT(*) as count FROM template_market WHERE author_id = ?", (developer_id, ) + "SELECT COUNT(*) as count FROM template_market WHERE author_id = ?", + (developer_id,), ).fetchone() # 统计总下载量 @@ -1581,7 +1582,9 @@ class DeveloperEcosystemManager: def get_code_example(self, example_id: str) -> CodeExample | None: """获取代码示例""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM code_examples WHERE id = ?", (example_id, )).fetchone() + row = conn.execute( + "SELECT * FROM code_examples WHERE id = ?", (example_id,) + ).fetchone() if row: return self._row_to_code_example(row) @@ -1626,7 +1629,7 @@ class DeveloperEcosystemManager: SET view_count = view_count + 1 WHERE id = ? """, - (example_id, ), + (example_id,), ) conn.commit() @@ -1639,7 +1642,7 @@ class DeveloperEcosystemManager: SET copy_count = copy_count + 1 WHERE id = ? """, - (example_id, ), + (example_id,), ) conn.commit() @@ -1695,7 +1698,9 @@ class DeveloperEcosystemManager: def get_api_documentation(self, doc_id: str) -> APIDocumentation | None: """获取 API 文档""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM api_documentation WHERE id = ?", (doc_id, )).fetchone() + row = conn.execute( + "SELECT * FROM api_documentation WHERE id = ?", (doc_id,) + ).fetchone() if row: return self._row_to_api_documentation(row) @@ -1794,7 +1799,7 @@ class DeveloperEcosystemManager: """获取开发者门户配置""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM developer_portal_configs WHERE id = ?", (config_id, ) + "SELECT * FROM developer_portal_configs WHERE id = ?", (config_id,) ).fetchone() if row: diff --git a/backend/enterprise_manager.py b/backend/enterprise_manager.py index 2ffe5e8..4ae84a9 100644 --- a/backend/enterprise_manager.py +++ b/backend/enterprise_manager.py @@ -688,7 +688,7 @@ class EnterpriseManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM sso_configs WHERE id = ?", (config_id, )) + cursor.execute("SELECT * FROM sso_configs WHERE id = ?", (config_id,)) row = cursor.fetchone() if row: @@ -722,7 +722,7 @@ class EnterpriseManager: WHERE tenant_id = ? AND status = 'active' ORDER BY created_at DESC LIMIT 1 """, - (tenant_id, ), + (tenant_id,), ) row = cursor.fetchone() @@ -802,7 +802,7 @@ class EnterpriseManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("DELETE FROM sso_configs WHERE id = ?", (config_id, )) + cursor.execute("DELETE FROM sso_configs WHERE id = ?", (config_id,)) conn.commit() return cursor.rowcount > 0 finally: @@ -818,7 +818,7 @@ class EnterpriseManager: SELECT * FROM sso_configs WHERE tenant_id = ? ORDER BY created_at DESC """, - (tenant_id, ), + (tenant_id,), ) rows = cursor.fetchall() @@ -926,7 +926,7 @@ class EnterpriseManager: """ SELECT * FROM saml_auth_requests WHERE request_id = ? """, - (request_id, ), + (request_id,), ) row = cursor.fetchone() @@ -1084,7 +1084,7 @@ class EnterpriseManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM scim_configs WHERE id = ?", (config_id, )) + cursor.execute("SELECT * FROM scim_configs WHERE id = ?", (config_id,)) row = cursor.fetchone() if row: @@ -1104,7 +1104,7 @@ class EnterpriseManager: SELECT * FROM scim_configs WHERE tenant_id = ? ORDER BY created_at DESC LIMIT 1 """, - (tenant_id, ), + (tenant_id,), ) row = cursor.fetchone() @@ -1398,7 +1398,7 @@ class EnterpriseManager: UPDATE audit_log_exports SET status = 'processing' WHERE id = ? """, - (export_id, ), + (export_id,), ) conn.commit() @@ -1523,7 +1523,7 @@ class EnterpriseManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM audit_log_exports WHERE id = ?", (export_id, )) + cursor.execute("SELECT * FROM audit_log_exports WHERE id = ?", (export_id,)) row = cursor.fetchone() if row: @@ -1661,7 +1661,7 @@ class EnterpriseManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM data_retention_policies WHERE id = ?", (policy_id, )) + cursor.execute("SELECT * FROM data_retention_policies WHERE id = ?", (policy_id,)) row = cursor.fetchone() if row: @@ -1758,7 +1758,7 @@ class EnterpriseManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("DELETE FROM data_retention_policies WHERE id = ?", (policy_id, )) + cursor.execute("DELETE FROM data_retention_policies WHERE id = ?", (policy_id,)) conn.commit() return cursor.rowcount > 0 finally: @@ -1887,7 +1887,7 @@ class EnterpriseManager: SELECT COUNT(*) as count FROM audit_logs WHERE created_at < ? """, - (cutoff_date, ), + (cutoff_date,), ) count = cursor.fetchone()["count"] @@ -1896,7 +1896,7 @@ class EnterpriseManager: """ DELETE FROM audit_logs WHERE created_at < ? """, - (cutoff_date, ), + (cutoff_date,), ) deleted = cursor.rowcount return {"affected": count, "archived": 0, "deleted": deleted, "errors": 0} @@ -1927,7 +1927,7 @@ class EnterpriseManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM data_retention_jobs WHERE id = ?", (job_id, )) + cursor.execute("SELECT * FROM data_retention_jobs WHERE id = ?", (job_id,)) row = cursor.fetchone() if row: diff --git a/backend/growth_manager.py b/backend/growth_manager.py index ee51790..dcf4dcd 100644 --- a/backend/growth_manager.py +++ b/backend/growth_manager.py @@ -677,7 +677,9 @@ class GrowthManager: ) -> FunnelAnalysis | None: """分析漏斗转化率""" with self._get_db() as conn: - funnel_row = conn.execute("SELECT * FROM funnels WHERE id = ?", (funnel_id, )).fetchone() + funnel_row = conn.execute( + "SELECT * FROM funnels WHERE id = ?", (funnel_id,) + ).fetchone() if not funnel_row: return None @@ -891,7 +893,7 @@ class GrowthManager: """获取实验详情""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM experiments WHERE id = ?", (experiment_id, ) + "SELECT * FROM experiments WHERE id = ?", (experiment_id,) ).fetchone() if row: @@ -1246,7 +1248,7 @@ class GrowthManager: """获取邮件模板""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM email_templates WHERE id = ?", (template_id, ) + "SELECT * FROM email_templates WHERE id = ?", (template_id,) ).fetchone() if row: @@ -1452,7 +1454,7 @@ class GrowthManager: """发送整个营销活动""" with self._get_db() as conn: campaign_row = conn.execute( - "SELECT * FROM email_campaigns WHERE id = ?", (campaign_id, ) + "SELECT * FROM email_campaigns WHERE id = ?", (campaign_id,) ).fetchone() if not campaign_row: @@ -1573,7 +1575,8 @@ class GrowthManager: """触发自动化工作流""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM automation_workflows WHERE id = ? AND is_active = 1", (workflow_id, ) + "SELECT * FROM automation_workflows WHERE id = ? AND is_active = 1", + (workflow_id,), ).fetchone() if not row: @@ -1592,7 +1595,7 @@ class GrowthManager: # 更新执行计数 conn.execute( "UPDATE automation_workflows SET execution_count = execution_count + 1 WHERE id = ?", - (workflow_id, ), + (workflow_id,), ) conn.commit() @@ -1766,7 +1769,7 @@ class GrowthManager: with self._get_db() as conn: row = conn.execute( - "SELECT 1 FROM referrals WHERE referral_code = ?", (code, ) + "SELECT 1 FROM referrals WHERE referral_code = ?", (code,) ).fetchone() if not row: @@ -1776,7 +1779,7 @@ class GrowthManager: """获取推荐计划""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM referral_programs WHERE id = ?", (program_id, ) + "SELECT * FROM referral_programs WHERE id = ?", (program_id,) ).fetchone() if row: @@ -1811,7 +1814,7 @@ class GrowthManager: def reward_referral(self, referral_id: str) -> bool: """发放推荐奖励""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM referrals WHERE id = ?", (referral_id, )).fetchone() + row = conn.execute("SELECT * FROM referrals WHERE id = ?", (referral_id,)).fetchone() if not row or row["status"] != ReferralStatus.CONVERTED.value: return False @@ -1972,7 +1975,7 @@ class GrowthManager: ORDER BY timestamp DESC LIMIT 20 """, - (tenant_id, ), + (tenant_id,), ).fetchall() # 热门功能 diff --git a/backend/localization_manager.py b/backend/localization_manager.py index f150ef9..1d1c882 100644 --- a/backend/localization_manager.py +++ b/backend/localization_manager.py @@ -1074,7 +1074,7 @@ class LocalizationManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM language_configs WHERE code = ?", (code, )) + cursor.execute("SELECT * FROM language_configs WHERE code = ?", (code,)) row = cursor.fetchone() if row: return self._row_to_language_config(row) @@ -1100,7 +1100,7 @@ class LocalizationManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM data_centers WHERE id = ?", (dc_id, )) + cursor.execute("SELECT * FROM data_centers WHERE id = ?", (dc_id,)) row = cursor.fetchone() if row: return self._row_to_data_center(row) @@ -1112,7 +1112,7 @@ class LocalizationManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM data_centers WHERE region_code = ?", (region_code, )) + cursor.execute("SELECT * FROM data_centers WHERE region_code = ?", (region_code,)) row = cursor.fetchone() if row: return self._row_to_data_center(row) @@ -1146,7 +1146,7 @@ class LocalizationManager: try: cursor = conn.cursor() cursor.execute( - "SELECT * FROM tenant_data_center_mappings WHERE tenant_id = ?", (tenant_id, ) + "SELECT * FROM tenant_data_center_mappings WHERE tenant_id = ?", (tenant_id,) ) row = cursor.fetchone() if row: @@ -1166,7 +1166,7 @@ class LocalizationManager: SELECT * FROM data_centers WHERE supported_regions LIKE ? AND status = 'active' ORDER BY priority LIMIT 1 """, - (f'%"{region_code}"%', ), + (f'%"{region_code}"%',), ) row = cursor.fetchone() if not row: @@ -1182,7 +1182,7 @@ class LocalizationManager: """ SELECT * FROM data_centers WHERE id != ? AND status = 'active' ORDER BY priority LIMIT 1 """, - (primary_dc_id, ), + (primary_dc_id,), ) secondary_row = cursor.fetchone() secondary_dc_id = secondary_row["id"] if secondary_row else None @@ -1222,7 +1222,7 @@ class LocalizationManager: try: cursor = conn.cursor() cursor.execute( - "SELECT * FROM localized_payment_methods WHERE provider = ?", (provider, ) + "SELECT * FROM localized_payment_methods WHERE provider = ?", (provider,) ) row = cursor.fetchone() if row: @@ -1278,7 +1278,7 @@ class LocalizationManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM country_configs WHERE code = ?", (code, )) + cursor.execute("SELECT * FROM country_configs WHERE code = ?", (code,)) row = cursor.fetchone() if row: return self._row_to_country_config(row) @@ -1408,7 +1408,7 @@ class LocalizationManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM localization_settings WHERE tenant_id = ?", (tenant_id, )) + cursor.execute("SELECT * FROM localization_settings WHERE tenant_id = ?", (tenant_id,)) row = cursor.fetchone() if row: return self._row_to_localization_settings(row) @@ -1505,7 +1505,8 @@ class LocalizationManager: params.append(tenant_id) cursor = conn.cursor() cursor.execute( - f"UPDATE localization_settings SET {', '.join(updates)} WHERE tenant_id = ?", params + f"UPDATE localization_settings SET {', '.join(updates)} WHERE tenant_id = ?", + params, ) conn.commit() return self.get_localization_settings(tenant_id) diff --git a/backend/main.py b/backend/main.py index 8138b71..97733de 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1166,7 +1166,7 @@ async def create_manual_entity( start_pos=entity.start_pos, end_pos=entity.end_pos, text_snippet=text[ - max(0, entity.start_pos - 20): min(len(text), entity.end_pos + 20) + max(0, entity.start_pos - 20) : min(len(text), entity.end_pos + 20) ], confidence=1.0, ) @@ -1408,7 +1408,7 @@ async def upload_audio(project_id: str, file: UploadFile = File(...), _=Depends( start_pos=pos, end_pos=pos + len(name), text_snippet=full_text[ - max(0, pos - 20): min(len(full_text), pos + len(name) + 20) + max(0, pos - 20) : min(len(full_text), pos + len(name) + 20) ], confidence=1.0, ) @@ -1534,7 +1534,7 @@ async def upload_document(project_id: str, file: UploadFile = File(...), _=Depen start_pos=pos, end_pos=pos + len(name), text_snippet=full_text[ - max(0, pos - 20): min(len(full_text), pos + len(name) + 20) + max(0, pos - 20) : min(len(full_text), pos + len(name) + 20) ], confidence=1.0, ) diff --git a/backend/ops_manager.py b/backend/ops_manager.py index da7a992..d02f4f3 100644 --- a/backend/ops_manager.py +++ b/backend/ops_manager.py @@ -549,7 +549,7 @@ class OpsManager: def get_alert_rule(self, rule_id: str) -> AlertRule | None: """获取告警规则""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM alert_rules WHERE id = ?", (rule_id, )).fetchone() + row = conn.execute("SELECT * FROM alert_rules WHERE id = ?", (rule_id,)).fetchone() if row: return self._row_to_alert_rule(row) @@ -616,7 +616,7 @@ class OpsManager: def delete_alert_rule(self, rule_id: str) -> bool: """删除告警规则""" with self._get_db() as conn: - conn.execute("DELETE FROM alert_rules WHERE id = ?", (rule_id, )) + conn.execute("DELETE FROM alert_rules WHERE id = ?", (rule_id,)) conn.commit() return conn.total_changes > 0 @@ -680,7 +680,7 @@ class OpsManager: """获取告警渠道""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM alert_channels WHERE id = ?", (channel_id, ) + "SELECT * FROM alert_channels WHERE id = ?", (channel_id,) ).fetchone() if row: @@ -692,7 +692,7 @@ class OpsManager: with self._get_db() as conn: rows = conn.execute( "SELECT * FROM alert_channels WHERE tenant_id = ? ORDER BY created_at DESC", - (tenant_id, ), + (tenant_id,), ).fetchall() return [self._row_to_alert_channel(row) for row in rows] @@ -1220,7 +1220,7 @@ class OpsManager: def get_alert(self, alert_id: str) -> Alert | None: """获取告警详情""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM alerts WHERE id = ?", (alert_id, )).fetchone() + row = conn.execute("SELECT * FROM alerts WHERE id = ?", (alert_id,)).fetchone() if row: return self._row_to_alert(row) @@ -1294,7 +1294,7 @@ class OpsManager: SET suppression_count = suppression_count + 1 WHERE id = ? """, - (alert_id, ), + (alert_id,), ) conn.commit() @@ -1304,7 +1304,7 @@ class OpsManager: """更新告警通知状态""" with self._get_db() as conn: row = conn.execute( - "SELECT notification_sent FROM alerts WHERE id = ?", (alert_id, ) + "SELECT notification_sent FROM alerts WHERE id = ?", (alert_id,) ).fetchone() if row: @@ -1394,7 +1394,7 @@ class OpsManager: """检查告警是否被抑制""" with self._get_db() as conn: rows = conn.execute( - "SELECT * FROM alert_suppression_rules WHERE tenant_id = ?", (rule.tenant_id, ) + "SELECT * FROM alert_suppression_rules WHERE tenant_id = ?", (rule.tenant_id,) ).fetchall() for row in rows: @@ -1627,7 +1627,7 @@ class OpsManager: with self._get_db() as conn: rows = conn.execute( "SELECT * FROM capacity_plans WHERE tenant_id = ? ORDER BY created_at DESC", - (tenant_id, ), + (tenant_id,), ).fetchall() return [self._row_to_capacity_plan(row) for row in rows] @@ -1704,7 +1704,7 @@ class OpsManager: """获取自动扩缩容策略""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM auto_scaling_policies WHERE id = ?", (policy_id, ) + "SELECT * FROM auto_scaling_policies WHERE id = ?", (policy_id,) ).fetchone() if row: @@ -1716,7 +1716,7 @@ class OpsManager: with self._get_db() as conn: rows = conn.execute( "SELECT * FROM auto_scaling_policies WHERE tenant_id = ? ORDER BY created_at DESC", - (tenant_id, ), + (tenant_id,), ).fetchall() return [self._row_to_auto_scaling_policy(row) for row in rows] @@ -1818,7 +1818,7 @@ class OpsManager: """SELECT * FROM scaling_events WHERE policy_id = ? ORDER BY started_at DESC LIMIT 1""", - (policy_id, ), + (policy_id,), ).fetchone() if row: @@ -1857,7 +1857,7 @@ class OpsManager: def get_scaling_event(self, event_id: str) -> ScalingEvent | None: """获取扩缩容事件""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM scaling_events WHERE id = ?", (event_id, )).fetchone() + row = conn.execute("SELECT * FROM scaling_events WHERE id = ?", (event_id,)).fetchone() if row: return self._row_to_scaling_event(row) @@ -1951,7 +1951,7 @@ class OpsManager: def get_health_check(self, check_id: str) -> HealthCheck | None: """获取健康检查配置""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM health_checks WHERE id = ?", (check_id, )).fetchone() + row = conn.execute("SELECT * FROM health_checks WHERE id = ?", (check_id,)).fetchone() if row: return self._row_to_health_check(row) @@ -1962,7 +1962,7 @@ class OpsManager: with self._get_db() as conn: rows = conn.execute( "SELECT * FROM health_checks WHERE tenant_id = ? ORDER BY created_at DESC", - (tenant_id, ), + (tenant_id,), ).fetchall() return [self._row_to_health_check(row) for row in rows] @@ -2153,7 +2153,7 @@ class OpsManager: """获取故障转移配置""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM failover_configs WHERE id = ?", (config_id, ) + "SELECT * FROM failover_configs WHERE id = ?", (config_id,) ).fetchone() if row: @@ -2165,7 +2165,7 @@ class OpsManager: with self._get_db() as conn: rows = conn.execute( "SELECT * FROM failover_configs WHERE tenant_id = ? ORDER BY created_at DESC", - (tenant_id, ), + (tenant_id,), ).fetchall() return [self._row_to_failover_config(row) for row in rows] @@ -2258,7 +2258,9 @@ class OpsManager: def get_failover_event(self, event_id: str) -> FailoverEvent | None: """获取故障转移事件""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM failover_events WHERE id = ?", (event_id, )).fetchone() + row = conn.execute( + "SELECT * FROM failover_events WHERE id = ?", (event_id,) + ).fetchone() if row: return self._row_to_failover_event(row) @@ -2344,7 +2346,7 @@ class OpsManager: def get_backup_job(self, job_id: str) -> BackupJob | None: """获取备份任务""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM backup_jobs WHERE id = ?", (job_id, )).fetchone() + row = conn.execute("SELECT * FROM backup_jobs WHERE id = ?", (job_id,)).fetchone() if row: return self._row_to_backup_job(row) @@ -2355,7 +2357,7 @@ class OpsManager: with self._get_db() as conn: rows = conn.execute( "SELECT * FROM backup_jobs WHERE tenant_id = ? ORDER BY created_at DESC", - (tenant_id, ), + (tenant_id,), ).fetchall() return [self._row_to_backup_job(row) for row in rows] @@ -2427,7 +2429,9 @@ class OpsManager: def get_backup_record(self, record_id: str) -> BackupRecord | None: """获取备份记录""" with self._get_db() as conn: - row = conn.execute("SELECT * FROM backup_records WHERE id = ?", (record_id, )).fetchone() + row = conn.execute( + "SELECT * FROM backup_records WHERE id = ?", (record_id,) + ).fetchone() if row: return self._row_to_backup_record(row) @@ -2700,7 +2704,7 @@ class OpsManager: with self._get_db() as conn: rows = conn.execute( "SELECT * FROM idle_resources WHERE tenant_id = ? ORDER BY detected_at DESC", - (tenant_id, ), + (tenant_id,), ).fetchall() return [self._row_to_idle_resource(row) for row in rows] @@ -2814,7 +2818,7 @@ class OpsManager: """获取成本优化建议详情""" with self._get_db() as conn: row = conn.execute( - "SELECT * FROM cost_optimization_suggestions WHERE id = ?", (suggestion_id, ) + "SELECT * FROM cost_optimization_suggestions WHERE id = ?", (suggestion_id,) ).fetchone() if row: diff --git a/backend/performance_manager.py b/backend/performance_manager.py index 39c3fca..da64b84 100644 --- a/backend/performance_manager.py +++ b/backend/performance_manager.py @@ -500,7 +500,7 @@ class CacheManager: WHERE e.project_id = ? ORDER BY mention_count DESC LIMIT 100""", - (project_id, ), + (project_id,), ).fetchall() for entity in entities: @@ -517,7 +517,7 @@ class CacheManager: JOIN entities e2 ON r.target_entity_id = e2.id WHERE r.project_id = ? LIMIT 200""", - (project_id, ), + (project_id,), ).fetchall() for relation in relations: @@ -531,7 +531,7 @@ class CacheManager: WHERE project_id = ? ORDER BY created_at DESC LIMIT 10""", - (project_id, ), + (project_id,), ).fetchall() for transcript in transcripts: @@ -548,11 +548,11 @@ class CacheManager: # 预热项目知识库摘要 entity_count = conn.execute( - "SELECT COUNT(*) FROM entities WHERE project_id = ?", (project_id, ) + "SELECT COUNT(*) FROM entities WHERE project_id = ?", (project_id,) ).fetchone()[0] relation_count = conn.execute( - "SELECT COUNT(*) FROM entity_relations WHERE project_id = ?", (project_id, ) + "SELECT COUNT(*) FROM entity_relations WHERE project_id = ?", (project_id,) ).fetchone()[0] summary = { @@ -757,11 +757,11 @@ class DatabaseSharding: source_conn.row_factory = sqlite3.Row entities = source_conn.execute( - "SELECT * FROM entities WHERE project_id = ?", (project_id, ) + "SELECT * FROM entities WHERE project_id = ?", (project_id,) ).fetchall() relations = source_conn.execute( - "SELECT * FROM entity_relations WHERE project_id = ?", (project_id, ) + "SELECT * FROM entity_relations WHERE project_id = ?", (project_id,) ).fetchall() source_conn.close() @@ -794,8 +794,8 @@ class DatabaseSharding: # 从源分片删除数据 source_conn = sqlite3.connect(source_info.db_path) - source_conn.execute("DELETE FROM entities WHERE project_id = ?", (project_id, )) - source_conn.execute("DELETE FROM entity_relations WHERE project_id = ?", (project_id, )) + source_conn.execute("DELETE FROM entities WHERE project_id = ?", (project_id,)) + source_conn.execute("DELETE FROM entity_relations WHERE project_id = ?", (project_id,)) source_conn.commit() source_conn.close() @@ -1024,7 +1024,7 @@ class TaskQueue: with self.task_lock: self.tasks[task_id] = task # 异步执行 - threading.Thread(target=self._execute_task, args=(task_id, ), daemon=True).start() + threading.Thread(target=self._execute_task, args=(task_id,), daemon=True).start() # 保存到数据库 self._save_task(task) @@ -1061,7 +1061,7 @@ class TaskQueue: task.status = "retrying" # 延迟重试 threading.Timer( - 10 * task.retry_count, self._execute_task, args=(task_id, ) + 10 * task.retry_count, self._execute_task, args=(task_id,) ).start() else: task.status = "failed" @@ -1248,7 +1248,7 @@ class TaskQueue: if not self.use_celery: with self.task_lock: self.tasks[task_id] = task - threading.Thread(target=self._execute_task, args=(task_id, ), daemon=True).start() + threading.Thread(target=self._execute_task, args=(task_id,), daemon=True).start() self._update_task_status(task) return True @@ -1439,7 +1439,7 @@ class PerformanceMonitor: FROM performance_metrics WHERE timestamp > datetime('now', ?) """, - (f"-{hours} hours", ), + (f"-{hours} hours",), ).fetchone() # 按类型统计 @@ -1454,7 +1454,7 @@ class PerformanceMonitor: WHERE timestamp > datetime('now', ?) GROUP BY metric_type """, - (f"-{hours} hours", ), + (f"-{hours} hours",), ).fetchall() # 按端点统计(API) @@ -1472,7 +1472,7 @@ class PerformanceMonitor: ORDER BY avg_duration DESC LIMIT 20 """, - (f"-{hours} hours", ), + (f"-{hours} hours",), ).fetchall() # 慢查询统计 @@ -1597,7 +1597,7 @@ class PerformanceMonitor: DELETE FROM performance_metrics WHERE timestamp < datetime('now', ?) """, - (f"-{days} days", ), + (f"-{days} days",), ) deleted = cursor.rowcount diff --git a/backend/plugin_manager.py b/backend/plugin_manager.py index c6da345..46d9bea 100644 --- a/backend/plugin_manager.py +++ b/backend/plugin_manager.py @@ -212,7 +212,7 @@ class PluginManager: def get_plugin(self, plugin_id: str) -> Plugin | None: """获取插件""" conn = self.db.get_conn() - row = conn.execute("SELECT * FROM plugins WHERE id = ?", (plugin_id, )).fetchone() + row = conn.execute("SELECT * FROM plugins WHERE id = ?", (plugin_id,)).fetchone() conn.close() if row: @@ -283,10 +283,10 @@ class PluginManager: conn = self.db.get_conn() # 删除关联的配置 - conn.execute("DELETE FROM plugin_configs WHERE plugin_id = ?", (plugin_id, )) + conn.execute("DELETE FROM plugin_configs WHERE plugin_id = ?", (plugin_id,)) # 删除插件 - cursor = conn.execute("DELETE FROM plugins WHERE id = ?", (plugin_id, )) + cursor = conn.execute("DELETE FROM plugins WHERE id = ?", (plugin_id,)) conn.commit() conn.close() @@ -318,7 +318,8 @@ class PluginManager: # 检查是否已存在 existing = conn.execute( - "SELECT id FROM plugin_configs WHERE plugin_id = ? AND config_key = ?", (plugin_id, key) + "SELECT id FROM plugin_configs WHERE plugin_id = ? AND config_key = ?", + (plugin_id, key), ).fetchone() if existing: @@ -366,7 +367,7 @@ class PluginManager: """获取插件所有配置""" conn = self.db.get_conn() rows = conn.execute( - "SELECT config_key, config_value FROM plugin_configs WHERE plugin_id = ?", (plugin_id, ) + "SELECT config_key, config_value FROM plugin_configs WHERE plugin_id = ?", (plugin_id,) ).fetchall() conn.close() @@ -416,7 +417,9 @@ class ChromeExtensionHandler: token_id = str(uuid.uuid4())[:UUID_LENGTH] # 生成随机令牌 - raw_token = f"if_ext_{base64.urlsafe_b64encode(os.urandom(32)).decode('utf-8').rstrip(' = ')}" + raw_token = ( + f"if_ext_{base64.urlsafe_b64encode(os.urandom(32)).decode('utf-8').rstrip(' = ')}" + ) # 哈希存储 token_hash = hashlib.sha256(raw_token.encode()).hexdigest() @@ -469,7 +472,7 @@ class ChromeExtensionHandler: row = conn.execute( """SELECT * FROM chrome_extension_tokens WHERE token_hash = ? AND is_revoked = 0""", - (token_hash, ), + (token_hash,), ).fetchone() conn.close() @@ -509,7 +512,7 @@ class ChromeExtensionHandler: """撤销令牌""" conn = self.pm.db.get_conn() cursor = conn.execute( - "UPDATE chrome_extension_tokens SET is_revoked = 1 WHERE id = ?", (token_id, ) + "UPDATE chrome_extension_tokens SET is_revoked = 1 WHERE id = ?", (token_id,) ) conn.commit() conn.close() @@ -685,7 +688,7 @@ class BotHandler: rows = conn.execute( """SELECT * FROM bot_sessions WHERE bot_type = ? ORDER BY created_at DESC""", - (self.bot_type, ), + (self.bot_type,), ).fetchall() conn.close() @@ -1015,7 +1018,7 @@ class WebhookIntegration: rows = conn.execute( """SELECT * FROM webhook_endpoints WHERE endpoint_type = ? ORDER BY created_at DESC""", - (self.endpoint_type, ), + (self.endpoint_type,), ).fetchall() conn.close() @@ -1064,7 +1067,7 @@ class WebhookIntegration: def delete_endpoint(self, endpoint_id: str) -> bool: """删除端点""" conn = self.pm.db.get_conn() - cursor = conn.execute("DELETE FROM webhook_endpoints WHERE id = ?", (endpoint_id, )) + cursor = conn.execute("DELETE FROM webhook_endpoints WHERE id = ?", (endpoint_id,)) conn.commit() conn.close() @@ -1219,7 +1222,7 @@ class WebDAVSyncManager: def get_sync(self, sync_id: str) -> WebDAVSync | None: """获取同步配置""" conn = self.pm.db.get_conn() - row = conn.execute("SELECT * FROM webdav_syncs WHERE id = ?", (sync_id, )).fetchone() + row = conn.execute("SELECT * FROM webdav_syncs WHERE id = ?", (sync_id,)).fetchone() conn.close() if row: @@ -1233,7 +1236,7 @@ class WebDAVSyncManager: if project_id: rows = conn.execute( "SELECT * FROM webdav_syncs WHERE project_id = ? ORDER BY created_at DESC", - (project_id, ), + (project_id,), ).fetchall() else: rows = conn.execute("SELECT * FROM webdav_syncs ORDER BY created_at DESC").fetchall() @@ -1282,7 +1285,7 @@ class WebDAVSyncManager: def delete_sync(self, sync_id: str) -> bool: """删除同步配置""" conn = self.pm.db.get_conn() - cursor = conn.execute("DELETE FROM webdav_syncs WHERE id = ?", (sync_id, )) + cursor = conn.execute("DELETE FROM webdav_syncs WHERE id = ?", (sync_id,)) conn.commit() conn.close() diff --git a/backend/search_manager.py b/backend/search_manager.py index 026b64d..dc5e219 100644 --- a/backend/search_manager.py +++ b/backend/search_manager.py @@ -385,7 +385,7 @@ class FullTextSearch: # 排序和分页 scored_results.sort(key=lambda x: x.score, reverse=True) - return scored_results[offset: offset + limit] + return scored_results[offset : offset + limit] def _parse_boolean_query(self, query: str) -> dict: """ @@ -551,13 +551,13 @@ class FullTextSearch: try: if content_type == "transcript": row = conn.execute( - "SELECT full_text FROM transcripts WHERE id = ?", (content_id, ) + "SELECT full_text FROM transcripts WHERE id = ?", (content_id,) ).fetchone() return row["full_text"] if row else None elif content_type == "entity": row = conn.execute( - "SELECT name, definition FROM entities WHERE id = ?", (content_id, ) + "SELECT name, definition FROM entities WHERE id = ?", (content_id,) ).fetchone() if row: return f"{row['name']} {row['definition'] or ''}" @@ -571,7 +571,7 @@ class FullTextSearch: JOIN entities e1 ON r.source_entity_id = e1.id JOIN entities e2 ON r.target_entity_id = e2.id WHERE r.id = ?""", - (content_id, ), + (content_id,), ).fetchone() if row: return f"{row['source_name']} {row['relation_type']} {row['target_name']} {row['evidence'] or ''}" @@ -589,15 +589,15 @@ class FullTextSearch: try: if content_type == "transcript": row = conn.execute( - "SELECT project_id FROM transcripts WHERE id = ?", (content_id, ) + "SELECT project_id FROM transcripts WHERE id = ?", (content_id,) ).fetchone() elif content_type == "entity": row = conn.execute( - "SELECT project_id FROM entities WHERE id = ?", (content_id, ) + "SELECT project_id FROM entities WHERE id = ?", (content_id,) ).fetchone() elif content_type == "relation": row = conn.execute( - "SELECT project_id FROM entity_relations WHERE id = ?", (content_id, ) + "SELECT project_id FROM entity_relations WHERE id = ?", (content_id,) ).fetchone() else: return None @@ -738,7 +738,7 @@ class FullTextSearch: # 索引转录文本 transcripts = conn.execute( "SELECT id, project_id, full_text FROM transcripts WHERE project_id = ?", - (project_id, ), + (project_id,), ).fetchall() for t in transcripts: @@ -751,7 +751,7 @@ class FullTextSearch: # 索引实体 entities = conn.execute( "SELECT id, project_id, name, definition FROM entities WHERE project_id = ?", - (project_id, ), + (project_id,), ).fetchall() for e in entities: @@ -769,7 +769,7 @@ class FullTextSearch: JOIN entities e1 ON r.source_entity_id = e1.id JOIN entities e2 ON r.target_entity_id = e2.id WHERE r.project_id = ?""", - (project_id, ), + (project_id,), ).fetchall() for r in relations: @@ -1029,13 +1029,13 @@ class SemanticSearch: try: if content_type == "transcript": row = conn.execute( - "SELECT full_text FROM transcripts WHERE id = ?", (content_id, ) + "SELECT full_text FROM transcripts WHERE id = ?", (content_id,) ).fetchone() result = row["full_text"] if row else None elif content_type == "entity": row = conn.execute( - "SELECT name, definition FROM entities WHERE id = ?", (content_id, ) + "SELECT name, definition FROM entities WHERE id = ?", (content_id,) ).fetchone() result = f"{row['name']}: {row['definition']}" if row else None @@ -1047,7 +1047,7 @@ class SemanticSearch: JOIN entities e1 ON r.source_entity_id = e1.id JOIN entities e2 ON r.target_entity_id = e2.id WHERE r.id = ?""", - (content_id, ), + (content_id,), ).fetchone() result = ( f"{row['source_name']} {row['relation_type']} {row['target_name']}" @@ -1192,7 +1192,7 @@ class EntityPathDiscovery: # 获取项目ID row = conn.execute( - "SELECT project_id FROM entities WHERE id = ?", (source_entity_id, ) + "SELECT project_id FROM entities WHERE id = ?", (source_entity_id,) ).fetchone() if not row: @@ -1203,7 +1203,8 @@ class EntityPathDiscovery: # 验证目标实体也在同一项目 row = conn.execute( - "SELECT 1 FROM entities WHERE id = ? AND project_id = ?", (target_entity_id, project_id) + "SELECT 1 FROM entities WHERE id = ? AND project_id = ?", + (target_entity_id, project_id), ).fetchone() if not row: @@ -1267,7 +1268,7 @@ class EntityPathDiscovery: # 获取项目ID row = conn.execute( - "SELECT project_id FROM entities WHERE id = ?", (source_entity_id, ) + "SELECT project_id FROM entities WHERE id = ?", (source_entity_id,) ).fetchone() if not row: @@ -1278,7 +1279,9 @@ class EntityPathDiscovery: paths = [] - def dfs(current_id: str, target_id: str, path: list[str], visited: set[str], depth: int) -> None: + def dfs( + current_id: str, target_id: str, path: list[str], visited: set[str], depth: int + ) -> None: if depth > max_depth: return @@ -1325,7 +1328,7 @@ class EntityPathDiscovery: nodes = [] for entity_id in entity_ids: row = conn.execute( - "SELECT id, name, type FROM entities WHERE id = ?", (entity_id, ) + "SELECT id, name, type FROM entities WHERE id = ?", (entity_id,) ).fetchone() if row: nodes.append({"id": row["id"], "name": row["name"], "type": row["type"]}) @@ -1395,7 +1398,7 @@ class EntityPathDiscovery: # 获取项目ID row = conn.execute( - "SELECT project_id, name FROM entities WHERE id = ?", (entity_id, ) + "SELECT project_id, name FROM entities WHERE id = ?", (entity_id,) ).fetchone() if not row: @@ -1442,7 +1445,7 @@ class EntityPathDiscovery: # 获取邻居信息 neighbor_info = conn.execute( - "SELECT name, type FROM entities WHERE id = ?", (neighbor_id, ) + "SELECT name, type FROM entities WHERE id = ?", (neighbor_id,) ).fetchone() if neighbor_info: @@ -1562,7 +1565,7 @@ class EntityPathDiscovery: # 获取所有实体 entities = conn.execute( - "SELECT id, name FROM entities WHERE project_id = ?", (project_id, ) + "SELECT id, name FROM entities WHERE project_id = ?", (project_id,) ).fetchall() # 计算每个实体作为桥梁的次数 @@ -1688,7 +1691,7 @@ class KnowledgeGapDetection: # 获取项目的属性模板 templates = conn.execute( "SELECT id, name, type, is_required FROM attribute_templates WHERE project_id = ?", - (project_id, ), + (project_id,), ).fetchall() if not templates: @@ -1703,7 +1706,7 @@ class KnowledgeGapDetection: # 检查每个实体的属性完整性 entities = conn.execute( - "SELECT id, name FROM entities WHERE project_id = ?", (project_id, ) + "SELECT id, name FROM entities WHERE project_id = ?", (project_id,) ).fetchall() for entity in entities: @@ -1711,7 +1714,7 @@ class KnowledgeGapDetection: # 获取实体已有的属性 existing_attrs = conn.execute( - "SELECT template_id FROM entity_attributes WHERE entity_id = ?", (entity_id, ) + "SELECT template_id FROM entity_attributes WHERE entity_id = ?", (entity_id,) ).fetchall() existing_template_ids = {a["template_id"] for a in existing_attrs} @@ -1723,7 +1726,7 @@ class KnowledgeGapDetection: missing_names = [] for template_id in missing_templates: template = conn.execute( - "SELECT name FROM attribute_templates WHERE id = ?", (template_id, ) + "SELECT name FROM attribute_templates WHERE id = ?", (template_id,) ).fetchone() if template: missing_names.append(template["name"]) @@ -1756,7 +1759,7 @@ class KnowledgeGapDetection: # 获取所有实体及其关系数量 entities = conn.execute( - "SELECT id, name, type FROM entities WHERE project_id = ?", (project_id, ) + "SELECT id, name, type FROM entities WHERE project_id = ?", (project_id,) ).fetchall() for entity in entities: @@ -1831,7 +1834,7 @@ class KnowledgeGapDetection: AND r1.id IS NULL AND r2.id IS NULL """, - (project_id, ), + (project_id,), ).fetchall() for entity in isolated: @@ -1869,7 +1872,7 @@ class KnowledgeGapDetection: WHERE project_id = ? AND (definition IS NULL OR definition = '') """, - (project_id, ), + (project_id,), ).fetchall() for entity in incomplete: @@ -1897,7 +1900,7 @@ class KnowledgeGapDetection: # 分析转录文本中频繁提及但未提取为实体的词 transcripts = conn.execute( - "SELECT full_text FROM transcripts WHERE project_id = ?", (project_id, ) + "SELECT full_text FROM transcripts WHERE project_id = ?", (project_id,) ).fetchall() # 合并所有文本 @@ -1905,7 +1908,7 @@ class KnowledgeGapDetection: # 获取现有实体名称 existing_entities = conn.execute( - "SELECT name FROM entities WHERE project_id = ?", (project_id, ) + "SELECT name FROM entities WHERE project_id = ?", (project_id,) ).fetchall() existing_names = {e["name"].lower() for e in existing_entities} @@ -2138,7 +2141,7 @@ class SearchManager: # 索引转录文本 transcripts = conn.execute( "SELECT id, project_id, full_text FROM transcripts WHERE project_id = ?", - (project_id, ), + (project_id,), ).fetchall() for t in transcripts: @@ -2152,7 +2155,7 @@ class SearchManager: # 索引实体 entities = conn.execute( "SELECT id, project_id, name, definition FROM entities WHERE project_id = ?", - (project_id, ), + (project_id,), ).fetchall() for e in entities: @@ -2191,7 +2194,7 @@ class SearchManager: """SELECT content_type, COUNT(*) as count FROM search_indexes WHERE project_id = ? GROUP BY content_type""", - (project_id, ), + (project_id,), ).fetchall() type_stats = {r["content_type"]: r["count"] for r in rows} diff --git a/backend/security_manager.py b/backend/security_manager.py index 9ff1299..9e4a19b 100644 --- a/backend/security_manager.py +++ b/backend/security_manager.py @@ -546,7 +546,7 @@ class SecurityManager: cursor = conn.cursor() # 检查是否已存在配置 - cursor.execute("SELECT id FROM encryption_configs WHERE project_id = ?", (project_id, )) + cursor.execute("SELECT id FROM encryption_configs WHERE project_id = ?", (project_id,)) existing = cursor.fetchone() if existing: @@ -641,7 +641,7 @@ class SecurityManager: cursor.execute( "SELECT master_key_hash, salt FROM encryption_configs WHERE project_id = ?", - (project_id, ), + (project_id,), ) row = cursor.fetchone() conn.close() @@ -660,7 +660,7 @@ class SecurityManager: conn = sqlite3.connect(self.db_path) cursor = conn.cursor() - cursor.execute("SELECT * FROM encryption_configs WHERE project_id = ?", (project_id, )) + cursor.execute("SELECT * FROM encryption_configs WHERE project_id = ?", (project_id,)) row = cursor.fetchone() conn.close() @@ -847,7 +847,7 @@ class SecurityManager: # 获取更新后的规则 conn = sqlite3.connect(self.db_path) cursor = conn.cursor() - cursor.execute("SELECT * FROM masking_rules WHERE id = ?", (rule_id, )) + cursor.execute("SELECT * FROM masking_rules WHERE id = ?", (rule_id,)) row = cursor.fetchone() conn.close() @@ -873,7 +873,7 @@ class SecurityManager: conn = sqlite3.connect(self.db_path) cursor = conn.cursor() - cursor.execute("DELETE FROM masking_rules WHERE id = ?", (rule_id, )) + cursor.execute("DELETE FROM masking_rules WHERE id = ?", (rule_id,)) success = cursor.rowcount > 0 conn.commit() @@ -1028,7 +1028,7 @@ class SecurityManager: cursor = conn.cursor() cursor.execute( - "SELECT * FROM data_access_policies WHERE id = ? AND is_active = 1", (policy_id, ) + "SELECT * FROM data_access_policies WHERE id = ? AND is_active = 1", (policy_id,) ) row = cursor.fetchone() conn.close() @@ -1184,7 +1184,7 @@ class SecurityManager: conn.commit() # 获取更新后的请求 - cursor.execute("SELECT * FROM access_requests WHERE id = ?", (request_id, )) + cursor.execute("SELECT * FROM access_requests WHERE id = ?", (request_id,)) row = cursor.fetchone() conn.close() @@ -1219,7 +1219,7 @@ class SecurityManager: conn.commit() - cursor.execute("SELECT * FROM access_requests WHERE id = ?", (request_id, )) + cursor.execute("SELECT * FROM access_requests WHERE id = ?", (request_id,)) row = cursor.fetchone() conn.close() diff --git a/backend/subscription_manager.py b/backend/subscription_manager.py index 29f074f..678d673 100644 --- a/backend/subscription_manager.py +++ b/backend/subscription_manager.py @@ -572,7 +572,7 @@ class SubscriptionManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM subscription_plans WHERE id = ?", (plan_id, )) + cursor.execute("SELECT * FROM subscription_plans WHERE id = ?", (plan_id,)) row = cursor.fetchone() if row: @@ -588,7 +588,7 @@ class SubscriptionManager: try: cursor = conn.cursor() cursor.execute( - "SELECT * FROM subscription_plans WHERE tier = ? AND is_active = 1", (tier, ) + "SELECT * FROM subscription_plans WHERE tier = ? AND is_active = 1", (tier,) ) row = cursor.fetchone() @@ -760,7 +760,7 @@ class SubscriptionManager: SELECT * FROM subscriptions WHERE tenant_id = ? AND status IN ('active', 'trial', 'pending') """, - (tenant_id, ), + (tenant_id,), ) existing = cursor.fetchone() @@ -878,7 +878,7 @@ class SubscriptionManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM subscriptions WHERE id = ?", (subscription_id, )) + cursor.execute("SELECT * FROM subscriptions WHERE id = ?", (subscription_id,)) row = cursor.fetchone() if row: @@ -899,7 +899,7 @@ class SubscriptionManager: WHERE tenant_id = ? AND status IN ('active', 'trial', 'past_due', 'pending') ORDER BY created_at DESC LIMIT 1 """, - (tenant_id, ), + (tenant_id,), ) row = cursor.fetchone() @@ -1389,7 +1389,7 @@ class SubscriptionManager: def _get_payment_internal(self, conn: sqlite3.Connection, payment_id: str) -> Payment | None: """内部方法:获取支付记录""" cursor = conn.cursor() - cursor.execute("SELECT * FROM payments WHERE id = ?", (payment_id, )) + cursor.execute("SELECT * FROM payments WHERE id = ?", (payment_id,)) row = cursor.fetchone() if row: @@ -1475,7 +1475,7 @@ class SubscriptionManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM invoices WHERE id = ?", (invoice_id, )) + cursor.execute("SELECT * FROM invoices WHERE id = ?", (invoice_id,)) row = cursor.fetchone() if row: @@ -1490,7 +1490,7 @@ class SubscriptionManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM invoices WHERE invoice_number = ?", (invoice_number, )) + cursor.execute("SELECT * FROM invoices WHERE invoice_number = ?", (invoice_number,)) row = cursor.fetchone() if row: @@ -1568,7 +1568,7 @@ class SubscriptionManager: SELECT COUNT(*) as count FROM invoices WHERE invoice_number LIKE ? """, - (f"{prefix}%", ), + (f"{prefix}%",), ) row = cursor.fetchone() count = row["count"] + 1 @@ -1803,7 +1803,7 @@ class SubscriptionManager: def _get_refund_internal(self, conn: sqlite3.Connection, refund_id: str) -> Refund | None: """内部方法:获取退款记录""" cursor = conn.cursor() - cursor.execute("SELECT * FROM refunds WHERE id = ?", (refund_id, )) + cursor.execute("SELECT * FROM refunds WHERE id = ?", (refund_id,)) row = cursor.fetchone() if row: diff --git a/backend/tenant_manager.py b/backend/tenant_manager.py index 66390b6..307ed8a 100644 --- a/backend/tenant_manager.py +++ b/backend/tenant_manager.py @@ -495,7 +495,7 @@ class TenantManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM tenants WHERE id = ?", (tenant_id, )) + cursor.execute("SELECT * FROM tenants WHERE id = ?", (tenant_id,)) row = cursor.fetchone() if row: @@ -510,7 +510,7 @@ class TenantManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM tenants WHERE slug = ?", (slug, )) + cursor.execute("SELECT * FROM tenants WHERE slug = ?", (slug,)) row = cursor.fetchone() if row: @@ -531,7 +531,7 @@ class TenantManager: JOIN tenant_domains d ON t.id = d.tenant_id WHERE d.domain = ? AND d.status = 'verified' """, - (domain, ), + (domain,), ) row = cursor.fetchone() @@ -605,7 +605,7 @@ class TenantManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("DELETE FROM tenants WHERE id = ?", (tenant_id, )) + cursor.execute("DELETE FROM tenants WHERE id = ?", (tenant_id,)) conn.commit() return cursor.rowcount > 0 finally: @@ -684,7 +684,7 @@ class TenantManager: UPDATE tenant_domains SET is_primary = 0 WHERE tenant_id = ? """, - (tenant_id, ), + (tenant_id,), ) cursor.execute( @@ -782,7 +782,7 @@ class TenantManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM tenant_domains WHERE id = ?", (domain_id, )) + cursor.execute("SELECT * FROM tenant_domains WHERE id = ?", (domain_id,)) row = cursor.fetchone() if not row: @@ -841,7 +841,7 @@ class TenantManager: WHERE tenant_id = ? ORDER BY is_primary DESC, created_at DESC """, - (tenant_id, ), + (tenant_id,), ) rows = cursor.fetchall() @@ -857,7 +857,7 @@ class TenantManager: conn = self._get_connection() try: cursor = conn.cursor() - cursor.execute("SELECT * FROM tenant_branding WHERE tenant_id = ?", (tenant_id, )) + cursor.execute("SELECT * FROM tenant_branding WHERE tenant_id = ?", (tenant_id,)) row = cursor.fetchone() if row: @@ -885,7 +885,7 @@ class TenantManager: cursor = conn.cursor() # 检查是否已存在 - cursor.execute("SELECT id FROM tenant_branding WHERE tenant_id = ?", (tenant_id, )) + cursor.execute("SELECT id FROM tenant_branding WHERE tenant_id = ?", (tenant_id,)) existing = cursor.fetchone() if existing: @@ -1197,7 +1197,7 @@ class TenantManager: WHERE m.user_id = ? AND m.status = 'active' ORDER BY t.created_at DESC """, - (user_id, ), + (user_id,), ) rows = cursor.fetchall() @@ -1388,7 +1388,7 @@ class TenantManager: counter = 1 while True: - cursor.execute("SELECT id FROM tenants WHERE slug = ?", (slug, )) + cursor.execute("SELECT id FROM tenants WHERE slug = ?", (slug,)) if not cursor.fetchone(): break slug = f"{base_slug}-{counter}" diff --git a/backend/test_phase8_task8.py b/backend/test_phase8_task8.py index a24307d..900e6d3 100644 --- a/backend/test_phase8_task8.py +++ b/backend/test_phase8_task8.py @@ -198,7 +198,7 @@ class TestOpsManager: for channel in channels: if channel.tenant_id == self.tenant_id: with self.manager._get_db() as conn: - conn.execute("DELETE FROM alert_channels WHERE id = ?", (channel.id, )) + conn.execute("DELETE FROM alert_channels WHERE id = ?", (channel.id,)) conn.commit() self.log("Deleted test alert channels") @@ -320,8 +320,8 @@ class TestOpsManager: # 清理 self.manager.delete_alert_rule(rule.id) with self.manager._get_db() as conn: - conn.execute("DELETE FROM alerts WHERE id = ?", (alert_id, )) - conn.execute("DELETE FROM resource_metrics WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM alerts WHERE id = ?", (alert_id,)) + conn.execute("DELETE FROM resource_metrics WHERE tenant_id = ?", (self.tenant_id,)) conn.commit() self.log("Cleaned up test data") @@ -381,8 +381,8 @@ class TestOpsManager: # 清理 with self.manager._get_db() as conn: - conn.execute("DELETE FROM capacity_plans WHERE tenant_id = ?", (self.tenant_id, )) - conn.execute("DELETE FROM resource_metrics WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM capacity_plans WHERE tenant_id = ?", (self.tenant_id,)) + conn.execute("DELETE FROM resource_metrics WHERE tenant_id = ?", (self.tenant_id,)) conn.commit() self.log("Cleaned up capacity planning test data") @@ -437,9 +437,9 @@ class TestOpsManager: # 清理 with self.manager._get_db() as conn: - conn.execute("DELETE FROM scaling_events WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM scaling_events WHERE tenant_id = ?", (self.tenant_id,)) conn.execute( - "DELETE FROM auto_scaling_policies WHERE tenant_id = ?", (self.tenant_id, ) + "DELETE FROM auto_scaling_policies WHERE tenant_id = ?", (self.tenant_id,) ) conn.commit() self.log("Cleaned up auto scaling test data") @@ -495,7 +495,7 @@ class TestOpsManager: # 清理 with self.manager._get_db() as conn: - conn.execute("DELETE FROM health_checks WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM health_checks WHERE tenant_id = ?", (self.tenant_id,)) conn.commit() self.log("Cleaned up health check test data") @@ -550,8 +550,8 @@ class TestOpsManager: # 清理 with self.manager._get_db() as conn: - conn.execute("DELETE FROM failover_events WHERE tenant_id = ?", (self.tenant_id, )) - conn.execute("DELETE FROM failover_configs WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM failover_events WHERE tenant_id = ?", (self.tenant_id,)) + conn.execute("DELETE FROM failover_configs WHERE tenant_id = ?", (self.tenant_id,)) conn.commit() self.log("Cleaned up failover test data") @@ -604,8 +604,8 @@ class TestOpsManager: # 清理 with self.manager._get_db() as conn: - conn.execute("DELETE FROM backup_records WHERE tenant_id = ?", (self.tenant_id, )) - conn.execute("DELETE FROM backup_jobs WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM backup_records WHERE tenant_id = ?", (self.tenant_id,)) + conn.execute("DELETE FROM backup_jobs WHERE tenant_id = ?", (self.tenant_id,)) conn.commit() self.log("Cleaned up backup test data") @@ -687,13 +687,13 @@ class TestOpsManager: with self.manager._get_db() as conn: conn.execute( "DELETE FROM cost_optimization_suggestions WHERE tenant_id = ?", - (self.tenant_id, ), + (self.tenant_id,), ) - conn.execute("DELETE FROM idle_resources WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM idle_resources WHERE tenant_id = ?", (self.tenant_id,)) conn.execute( - "DELETE FROM resource_utilizations WHERE tenant_id = ?", (self.tenant_id, ) + "DELETE FROM resource_utilizations WHERE tenant_id = ?", (self.tenant_id,) ) - conn.execute("DELETE FROM cost_reports WHERE tenant_id = ?", (self.tenant_id, )) + conn.execute("DELETE FROM cost_reports WHERE tenant_id = ?", (self.tenant_id,)) conn.commit() self.log("Cleaned up cost optimization test data") diff --git a/backend/workflow_manager.py b/backend/workflow_manager.py index 033ff0a..afffea8 100644 --- a/backend/workflow_manager.py +++ b/backend/workflow_manager.py @@ -487,7 +487,7 @@ class WorkflowManager: """获取工作流""" conn = self.db.get_conn() try: - row = conn.execute("SELECT * FROM workflows WHERE id = ?", (workflow_id, )).fetchone() + row = conn.execute("SELECT * FROM workflows WHERE id = ?", (workflow_id,)).fetchone() if not row: return None @@ -584,10 +584,10 @@ class WorkflowManager: self.scheduler.remove_job(job_id) # 删除相关任务 - conn.execute("DELETE FROM workflow_tasks WHERE workflow_id = ?", (workflow_id, )) + conn.execute("DELETE FROM workflow_tasks WHERE workflow_id = ?", (workflow_id,)) # 删除工作流 - conn.execute("DELETE FROM workflows WHERE id = ?", (workflow_id, )) + conn.execute("DELETE FROM workflows WHERE id = ?", (workflow_id,)) conn.commit() return True @@ -653,7 +653,7 @@ class WorkflowManager: """获取任务""" conn = self.db.get_conn() try: - row = conn.execute("SELECT * FROM workflow_tasks WHERE id = ?", (task_id, )).fetchone() + row = conn.execute("SELECT * FROM workflow_tasks WHERE id = ?", (task_id,)).fetchone() if not row: return None @@ -668,7 +668,7 @@ class WorkflowManager: try: rows = conn.execute( "SELECT * FROM workflow_tasks WHERE workflow_id = ? ORDER BY task_order", - (workflow_id, ), + (workflow_id,), ).fetchall() return [self._row_to_task(row) for row in rows] @@ -719,7 +719,7 @@ class WorkflowManager: """删除任务""" conn = self.db.get_conn() try: - conn.execute("DELETE FROM workflow_tasks WHERE id = ?", (task_id, )) + conn.execute("DELETE FROM workflow_tasks WHERE id = ?", (task_id,)) conn.commit() return True finally: @@ -780,7 +780,7 @@ class WorkflowManager: conn = self.db.get_conn() try: row = conn.execute( - "SELECT * FROM webhook_configs WHERE id = ?", (webhook_id, ) + "SELECT * FROM webhook_configs WHERE id = ?", (webhook_id,) ).fetchone() if not row: @@ -843,7 +843,7 @@ class WorkflowManager: """删除 Webhook 配置""" conn = self.db.get_conn() try: - conn.execute("DELETE FROM webhook_configs WHERE id = ?", (webhook_id, )) + conn.execute("DELETE FROM webhook_configs WHERE id = ?", (webhook_id,)) conn.commit() return True finally: @@ -951,7 +951,7 @@ class WorkflowManager: """获取日志""" conn = self.db.get_conn() try: - row = conn.execute("SELECT * FROM workflow_logs WHERE id = ?", (log_id, )).fetchone() + row = conn.execute("SELECT * FROM workflow_logs WHERE id = ?", (log_id,)).fetchone() if not row: return None diff --git a/code_review_fixer.py b/code_review_fixer.py index e84141e..1612b1b 100644 --- a/code_review_fixer.py +++ b/code_review_fixer.py @@ -11,231 +11,264 @@ from pathlib import Path from typing import Any # 项目路径 -PROJECT_PATH = Path("/root/.openclaw/workspace/projects/insightflow") +PROJECT_PATH = Path("/root/.openclaw/workspace/projects/insightflow") # 修复报告 -report = { - "fixed": [], - "manual_review": [], - "errors": [] -} +report = {"fixed": [], "manual_review": [], "errors": []} + def find_python_files() -> list[Path]: """查找所有 Python 文件""" - py_files = [] + py_files = [] for py_file in PROJECT_PATH.rglob("*.py"): if "__pycache__" not in str(py_file): py_files.append(py_file) return py_files + def check_duplicate_imports(content: str, file_path: Path) -> list[dict]: """检查重复导入""" - issues = [] - lines = content.split('\n') - imports = {} + issues = [] + lines = content.split("\n") + imports = {} for i, line in enumerate(lines, 1): - line_stripped = line.strip() - if line_stripped.startswith('import ') or line_stripped.startswith('from '): + line_stripped = line.strip() + if line_stripped.startswith("import ") or line_stripped.startswith("from "): if line_stripped in imports: - issues.append({ - "line": i, - "type": "duplicate_import", - "content": line_stripped, - "original_line": imports[line_stripped] - }) + issues.append( + { + "line": i, + "type": "duplicate_import", + "content": line_stripped, + "original_line": imports[line_stripped], + } + ) else: - imports[line_stripped] = i + imports[line_stripped] = i return issues + def check_bare_excepts(content: str, file_path: Path) -> list[dict]: """检查裸异常捕获""" - issues = [] - lines = content.split('\n') + issues = [] + lines = content.split("\n") for i, line in enumerate(lines, 1): - stripped = line.strip() + stripped = line.strip() # 检查 except Exception: 或 except Exception: - if re.match(r'^except\s*:', stripped): - issues.append({ - "line": i, - "type": "bare_except", - "content": stripped - }) + if re.match(r"^except\s*:", stripped): + issues.append({"line": i, "type": "bare_except", "content": stripped}) return issues + def check_line_length(content: str, file_path: Path) -> list[dict]: """检查行长度(PEP8: 79字符,这里放宽到 100)""" - issues = [] - lines = content.split('\n') + issues = [] + lines = content.split("\n") for i, line in enumerate(lines, 1): if len(line) > 100: - issues.append({ - "line": i, - "type": "line_too_long", - "length": len(line), - "content": line[:80] + "..." - }) + issues.append( + { + "line": i, + "type": "line_too_long", + "length": len(line), + "content": line[:80] + "...", + } + ) return issues + def check_unused_imports(content: str, file_path: Path) -> list[dict]: """检查未使用的导入""" - issues = [] + issues = [] try: - tree = ast.parse(content) - imports = {} - used_names = set() + tree = ast.parse(content) + imports = {} + used_names = set() for node in ast.walk(tree): if isinstance(node, ast.Import): for alias in node.names: - imports[alias.asname or alias.name] = node + imports[alias.asname or alias.name] = node elif isinstance(node, ast.ImportFrom): for alias in node.names: - name = alias.asname or alias.name - if name != '*': - imports[name] = node + name = alias.asname or alias.name + if name != "*": + imports[name] = node elif isinstance(node, ast.Name): used_names.add(node.id) for name, node in imports.items(): - if name not in used_names and not name.startswith('_'): - issues.append({ - "line": node.lineno, - "type": "unused_import", - "name": name - }) + if name not in used_names and not name.startswith("_"): + issues.append( + {"line": node.lineno, "type": "unused_import", "name": name} + ) except SyntaxError: pass return issues + def check_string_formatting(content: str, file_path: Path) -> list[dict]: """检查混合字符串格式化(建议使用 f-string)""" - issues = [] - lines = content.split('\n') + issues = [] + lines = content.split("\n") for i, line in enumerate(lines, 1): # 检查 % 格式化 - if re.search(r'["\'].*%\s*\w+', line) and '%' in line: - if not line.strip().startswith('#'): - issues.append({ - "line": i, - "type": "percent_formatting", - "content": line.strip()[:60] - }) + if re.search(r'["\'].*%\s*\w+', line) and "%" in line: + if not line.strip().startswith("#"): + issues.append( + { + "line": i, + "type": "percent_formatting", + "content": line.strip()[:60], + } + ) # 检查 .format() - if '.format(' in line: - if not line.strip().startswith('#'): - issues.append({ - "line": i, - "type": "format_method", - "content": line.strip()[:60] - }) + if ".format(" in line: + if not line.strip().startswith("#"): + issues.append( + {"line": i, "type": "format_method", "content": line.strip()[:60]} + ) return issues + def check_magic_numbers(content: str, file_path: Path) -> list[dict]: """检查魔法数字""" - issues = [] - lines = content.split('\n') + issues = [] + lines = content.split("\n") # 常见魔法数字模式(排除常见索引和简单值) - magic_pattern = re.compile(r'(? list[dict]: """检查 SQL 注入风险""" - issues = [] - lines = content.split('\n') + issues = [] + lines = content.split("\n") for i, line in enumerate(lines, 1): # 检查字符串拼接的 SQL - if 'execute(' in line or 'executescript(' in line or 'executemany(' in line: + if "execute(" in line or "executescript(" in line or "executemany(" in line: # 检查是否有 f-string 或 .format 在 SQL 中 - if 'f"' in line or "f'" in line or '.format(' in line or '%' in line: - if 'SELECT' in line.upper() or 'INSERT' in line.upper() or 'UPDATE' in line.upper() or 'DELETE' in line.upper(): - issues.append({ - "line": i, - "type": "sql_injection_risk", - "content": line.strip()[:80], - "severity": "high" - }) + if 'f"' in line or "f'" in line or ".format(" in line or "%" in line: + if ( + "SELECT" in line.upper() + or "INSERT" in line.upper() + or "UPDATE" in line.upper() + or "DELETE" in line.upper() + ): + issues.append( + { + "line": i, + "type": "sql_injection_risk", + "content": line.strip()[:80], + "severity": "high", + } + ) return issues + def check_cors_config(content: str, file_path: Path) -> list[dict]: """检查 CORS 配置""" - issues = [] - lines = content.split('\n') + issues = [] + lines = content.split("\n") for i, line in enumerate(lines, 1): - if 'allow_origins' in line and '["*"]' in line: - issues.append({ - "line": i, - "type": "cors_wildcard", - "content": line.strip(), - "severity": "medium" - }) + if "allow_origins" in line and '["*"]' in line: + issues.append( + { + "line": i, + "type": "cors_wildcard", + "content": line.strip(), + "severity": "medium", + } + ) return issues + def fix_bare_excepts(content: str) -> str: """修复裸异常捕获""" - lines = content.split('\n') - new_lines = [] + lines = content.split("\n") + new_lines = [] for line in lines: - stripped = line.strip() - if re.match(r'^except\s*:', stripped): + stripped = line.strip() + if re.match(r"^except\s*:", stripped): # 替换为具体异常 - indent = len(line) - len(line.lstrip()) - new_line = ' ' * indent + 'except (RuntimeError, ValueError, TypeError):' + indent = len(line) - len(line.lstrip()) + new_line = " " * indent + "except (RuntimeError, ValueError, TypeError):" new_lines.append(new_line) else: new_lines.append(line) - return '\n'.join(new_lines) + return "\n".join(new_lines) + def fix_line_length(content: str) -> str: """修复行长度问题(简单折行)""" - lines = content.split('\n') - new_lines = [] + lines = content.split("\n") + new_lines = [] for line in lines: if len(line) > 100: # 尝试在逗号或运算符处折行 - if ', ' in line[80:]: + if ", " in line[80:]: # 简单处理:截断并添加续行 - indent = len(line) - len(line.lstrip()) + indent = len(line) - len(line.lstrip()) new_lines.append(line) else: new_lines.append(line) else: new_lines.append(line) - return '\n'.join(new_lines) + return "\n".join(new_lines) + def analyze_file(file_path: Path) -> dict: """分析单个文件""" try: - content = file_path.read_text(encoding = 'utf-8') + content = file_path.read_text(encoding="utf-8") except Exception as e: return {"error": str(e)} - issues = { + issues = { "duplicate_imports": check_duplicate_imports(content, file_path), "bare_excepts": check_bare_excepts(content, file_path), "line_length": check_line_length(content, file_path), @@ -248,35 +281,37 @@ def analyze_file(file_path: Path) -> dict: return issues + def fix_file(file_path: Path, issues: dict) -> bool: """自动修复文件问题""" try: - content = file_path.read_text(encoding = 'utf-8') - original_content = content + content = file_path.read_text(encoding="utf-8") + original_content = content # 修复裸异常 if issues.get("bare_excepts"): - content = fix_bare_excepts(content) + content = fix_bare_excepts(content) # 如果有修改,写回文件 if content != original_content: - file_path.write_text(content, encoding = 'utf-8') + file_path.write_text(content, encoding="utf-8") return True return False except Exception as e: report["errors"].append(f"{file_path}: {e}") return False + def generate_report(all_issues: dict) -> str: """生成修复报告""" - lines = [] + lines = [] lines.append("# InsightFlow 代码审查报告") lines.append(f"\n生成时间: {__import__('datetime').datetime.now().isoformat()}") lines.append("\n## 自动修复的问题\n") - total_fixed = 0 + total_fixed = 0 for file_path, issues in all_issues.items(): - fixed_count = 0 + fixed_count = 0 for issue_type, issue_list in issues.items(): if issue_type in ["bare_excepts"] and issue_list: fixed_count += len(issue_list) @@ -293,9 +328,9 @@ def generate_report(all_issues: dict) -> str: lines.append("\n## 需要人工确认的问题\n") - total_manual = 0 + total_manual = 0 for file_path, issues in all_issues.items(): - manual_issues = [] + manual_issues = [] if issues.get("sql_injection"): manual_issues.extend(issues["sql_injection"]) @@ -305,7 +340,9 @@ def generate_report(all_issues: dict) -> str: if manual_issues: lines.append(f"### {file_path}") for issue in manual_issues: - lines.append(f"- **{issue['type']}** (第 {issue['line']} 行): {issue.get('content', '')}") + lines.append( + f"- **{issue['type']}** (第 {issue['line']} 行): {issue.get('content', '')}" + ) total_manual += len(manual_issues) if total_manual == 0: @@ -316,7 +353,7 @@ def generate_report(all_issues: dict) -> str: lines.append("\n## 代码风格建议\n") for file_path, issues in all_issues.items(): - style_issues = [] + style_issues = [] if issues.get("line_length"): style_issues.extend(issues["line_length"]) if issues.get("string_formatting"): @@ -331,7 +368,8 @@ def generate_report(all_issues: dict) -> str: if len(style_issues) > 5: lines.append(f"- ... 还有 {len(style_issues) - 5} 个类似问题") - return '\n'.join(lines) + return "\n".join(lines) + def git_commit_and_push() -> None: """提交并推送代码""" @@ -339,31 +377,34 @@ def git_commit_and_push() -> None: os.chdir(PROJECT_PATH) # 检查是否有修改 - result = subprocess.run( - ["git", "status", "--porcelain"], - capture_output = True, - text = True + result = subprocess.run( + ["git", "status", "--porcelain"], capture_output=True, text=True ) if not result.stdout.strip(): return "没有需要提交的更改" # 添加所有修改 - subprocess.run(["git", "add", "-A"], check = True) + subprocess.run(["git", "add", "-A"], check=True) # 提交 subprocess.run( - ["git", "commit", "-m", """fix: auto-fix code issues (cron) + [ + "git", + "commit", + "-m", + """fix: auto-fix code issues (cron) - 修复重复导入/字段 - 修复异常处理 - 修复PEP8格式问题 -- 添加类型注解"""], - check = True +- 添加类型注解""", + ], + check=True, ) # 推送 - subprocess.run(["git", "push"], check = True) + subprocess.run(["git", "push"], check=True) return "✅ 提交并推送成功" except subprocess.CalledProcessError as e: @@ -371,42 +412,44 @@ def git_commit_and_push() -> None: except Exception as e: return f"❌ 错误: {e}" + def main() -> None: """主函数""" print("🔍 开始代码审查...") - py_files = find_python_files() + py_files = find_python_files() print(f"📁 找到 {len(py_files)} 个 Python 文件") - all_issues = {} + all_issues = {} for py_file in py_files: print(f" 分析: {py_file.name}") - issues = analyze_file(py_file) - all_issues[py_file] = issues + issues = analyze_file(py_file) + all_issues[py_file] = issues # 自动修复 if fix_file(py_file, issues): report["fixed"].append(str(py_file)) # 生成报告 - report_content = generate_report(all_issues) - report_path = PROJECT_PATH / "AUTO_CODE_REVIEW_REPORT.md" - report_path.write_text(report_content, encoding = 'utf-8') + report_content = generate_report(all_issues) + report_path = PROJECT_PATH / "AUTO_CODE_REVIEW_REPORT.md" + report_path.write_text(report_content, encoding="utf-8") print("\n📄 报告已生成:", report_path) # Git 提交 print("\n🚀 提交代码...") - git_result = git_commit_and_push() + git_result = git_commit_and_push() print(git_result) # 追加提交结果到报告 - with open(report_path, 'a', encoding = 'utf-8') as f: + with open(report_path, "a", encoding="utf-8") as f: f.write(f"\n\n## Git 提交结果\n\n{git_result}\n") print("\n✅ 代码审查完成!") return report_content + if __name__ == "__main__": main() diff --git a/code_reviewer.py b/code_reviewer.py index 90f84bd..3b8cd57 100644 --- a/code_reviewer.py +++ b/code_reviewer.py @@ -4,7 +4,9 @@ InsightFlow 代码审查与自动修复脚本 """ import ast +import os import re +import subprocess from pathlib import Path @@ -15,25 +17,25 @@ class CodeIssue: line_no: int, issue_type: str, message: str, - severity: str = "info", + severity: str = "info", ) -> None: - self.file_path = file_path - self.line_no = line_no - self.issue_type = issue_type - self.message = message - self.severity = severity # info, warning, error - self.fixed = False + self.file_path = file_path + self.line_no = line_no + self.issue_type = issue_type + self.message = message + self.severity = severity # info, warning, error + self.fixed = False - def __repr__(self) -> None: + def __repr__(self) -> str: return f"{self.severity.upper()}: {self.file_path}:{self.line_no} - {self.issue_type}: {self.message}" class CodeReviewer: def __init__(self, base_path: str) -> None: - self.base_path = Path(base_path) - self.issues: list[CodeIssue] = [] - self.fixed_issues: list[CodeIssue] = [] - self.manual_review_issues: list[CodeIssue] = [] + self.base_path = Path(base_path) + self.issues: list[CodeIssue] = [] + self.fixed_issues: list[CodeIssue] = [] + self.manual_review_issues: list[CodeIssue] = [] def scan_all(self) -> None: """扫描所有 Python 文件""" @@ -45,14 +47,14 @@ class CodeReviewer: def scan_file(self, file_path: Path) -> None: """扫描单个文件""" try: - with open(file_path, "r", encoding = "utf-8") as f: - content = f.read() - lines = content.split("\n") + with open(file_path, "r", encoding="utf-8") as f: + content = f.read() + lines = content.split("\n") except Exception as e: print(f"Error reading {file_path}: {e}") return - rel_path = str(file_path.relative_to(self.base_path)) + rel_path = str(file_path.relative_to(self.base_path)) # 1. 检查裸异常捕获 self._check_bare_exceptions(content, lines, rel_path) @@ -92,7 +94,7 @@ class CodeReviewer: # 跳过有注释说明的情况 if "# noqa" in line or "# intentional" in line.lower(): continue - issue = CodeIssue( + issue = CodeIssue( file_path, i, "bare_exception", @@ -105,17 +107,17 @@ class CodeReviewer: self, content: str, lines: list[str], file_path: str ) -> None: """检查重复导入""" - imports = {} + imports = {} for i, line in enumerate(lines, 1): - match = re.match(r"^(?:from\s+(\S+)\s+)?import\s+(.+)$", line.strip()) + match = re.match(r"^(?:from\s+(\S+)\s+)?import\s+(.+)$", line.strip()) if match: - module = match.group(1) or "" - names = match.group(2).split(", ") + module = match.group(1) or "" + names = match.group(2).split(", ") for name in names: - name = name.strip().split()[0] # 处理 'as' 别名 - key = f"{module}.{name}" if module else name + name = name.strip().split()[0] # 处理 'as' 别名 + key = f"{module}.{name}" if module else name if key in imports: - issue = CodeIssue( + issue = CodeIssue( file_path, i, "duplicate_import", @@ -123,7 +125,7 @@ class CodeReviewer: "warning", ) self.issues.append(issue) - imports[key] = i + imports[key] = i def _check_pep8_issues( self, content: str, lines: list[str], file_path: str @@ -132,7 +134,7 @@ class CodeReviewer: for i, line in enumerate(lines, 1): # 行长度超过 120 if len(line) > 120: - issue = CodeIssue( + issue = CodeIssue( file_path, i, "line_too_long", @@ -143,7 +145,7 @@ class CodeReviewer: # 行尾空格 if line.rstrip() != line: - issue = CodeIssue( + issue = CodeIssue( file_path, i, "trailing_whitespace", "行尾有空格", "info" ) self.issues.append(issue) @@ -151,7 +153,7 @@ class CodeReviewer: # 多余的空行 if i > 1 and line.strip() == "" and lines[i - 2].strip() == "": if i < len(lines) and lines[i].strip() == "": - issue = CodeIssue( + issue = CodeIssue( file_path, i, "extra_blank_line", "多余的空行", "info" ) self.issues.append(issue) @@ -161,23 +163,23 @@ class CodeReviewer: ) -> None: """检查未使用的导入""" try: - tree = ast.parse(content) + tree = ast.parse(content) except SyntaxError: return - imported_names = {} - used_names = set() + imported_names = {} + used_names = set() for node in ast.walk(tree): if isinstance(node, ast.Import): for alias in node.names: - name = alias.asname if alias.asname else alias.name - imported_names[name] = node.lineno + name = alias.asname if alias.asname else alias.name + imported_names[name] = node.lineno elif isinstance(node, ast.ImportFrom): for alias in node.names: - name = alias.asname if alias.asname else alias.name + name = alias.asname if alias.asname else alias.name if name != "*": - imported_names[name] = node.lineno + imported_names[name] = node.lineno elif isinstance(node, ast.Name): used_names.add(node.id) @@ -186,7 +188,7 @@ class CodeReviewer: # 排除一些常见例外 if name in ["annotations", "TYPE_CHECKING"]: continue - issue = CodeIssue( + issue = CodeIssue( file_path, lineno, "unused_import", f"未使用的导入: {name}", "info" ) self.issues.append(issue) @@ -195,20 +197,20 @@ class CodeReviewer: self, content: str, lines: list[str], file_path: str ) -> None: """检查混合字符串格式化""" - has_fstring = False - has_percent = False - has_format = False + has_fstring = False + has_percent = False + has_format = False for i, line in enumerate(lines, 1): if re.search(r'f["\']', line): - has_fstring = True + has_fstring = True if re.search(r"%[sdfr]", line) and not re.search(r"\d+%", line): - has_percent = True + has_percent = True if ".format(" in line: - has_format = True + has_format = True if has_fstring and (has_percent or has_format): - issue = CodeIssue( + issue = CodeIssue( file_path, 0, "mixed_formatting", @@ -222,7 +224,7 @@ class CodeReviewer: ) -> None: """检查魔法数字""" # 常见的魔法数字模式 - magic_patterns = [ + magic_patterns = [ (r" = \s*(\d{3, })\s*[^:]", "可能的魔法数字"), (r"timeout\s* = \s*(\d+)", "timeout 魔法数字"), (r"limit\s* = \s*(\d+)", "limit 魔法数字"), @@ -231,16 +233,16 @@ class CodeReviewer: for i, line in enumerate(lines, 1): # 跳过注释和字符串 - code_part = line.split("#")[0] + code_part = line.split("#")[0] if not code_part.strip(): continue for pattern, msg in magic_patterns: if re.search(pattern, code_part, re.IGNORECASE): # 排除常见的合理数字 - match = re.search(r"(\d{3, })", code_part) + match = re.search(r"(\d{3, })", code_part) if match: - num = int(match.group(1)) + num = int(match.group(1)) if num in [ 200, 404, @@ -257,7 +259,7 @@ class CodeReviewer: 8000, ]: continue - issue = CodeIssue( + issue = CodeIssue( file_path, i, "magic_number", f"{msg}: {num}", "info" ) self.issues.append(issue) @@ -272,7 +274,7 @@ class CodeReviewer: r'execute\s*\(\s*f["\']', line ): if "?" not in line and "%s" in line: - issue = CodeIssue( + issue = CodeIssue( file_path, i, "sql_injection_risk", @@ -287,7 +289,7 @@ class CodeReviewer: """检查 CORS 配置""" for i, line in enumerate(lines, 1): if "allow_origins" in line and '["*"]' in line: - issue = CodeIssue( + issue = CodeIssue( file_path, i, "cors_wildcard", @@ -316,7 +318,7 @@ class CodeReviewer: if not re.search(r'["\']\*+["\']', line) and not re.search( r'["\']<[^"\']*>["\']', line ): - issue = CodeIssue( + issue = CodeIssue( file_path, i, "hardcoded_secret", @@ -328,62 +330,64 @@ class CodeReviewer: def auto_fix(self) -> None: """自动修复问题""" # 按文件分组问题 - issues_by_file: dict[str, list[CodeIssue]] = {} + issues_by_file: dict[str, list[CodeIssue]] = {} for issue in self.issues: if issue.file_path not in issues_by_file: - issues_by_file[issue.file_path] = [] + issues_by_file[issue.file_path] = [] issues_by_file[issue.file_path].append(issue) for file_path, issues in issues_by_file.items(): - full_path = self.base_path / file_path + full_path = self.base_path / file_path if not full_path.exists(): continue try: - with open(full_path, "r", encoding = "utf-8") as f: - content = f.read() - lines = content.split("\n") + with open(full_path, "r", encoding="utf-8") as f: + content = f.read() + lines = content.split("\n") except Exception as e: print(f"Error reading {full_path}: {e}") continue - original_lines = lines.copy() + original_lines = lines.copy() # 修复行尾空格 for issue in issues: if issue.issue_type == "trailing_whitespace": - idx = issue.line_no - 1 + idx = issue.line_no - 1 if 0 <= idx < len(lines): - lines[idx] = lines[idx].rstrip() - issue.fixed = True + lines[idx] = lines[idx].rstrip() + issue.fixed = True # 修复裸异常 for issue in issues: if issue.issue_type == "bare_exception": - idx = issue.line_no - 1 + idx = issue.line_no - 1 if 0 <= idx < len(lines): - line = lines[idx] + line = lines[idx] # 将 except Exception: 改为 except Exception: if re.search(r"except\s*:\s*$", line.strip()): - lines[idx] = line.replace("except Exception:", "except Exception:") - issue.fixed = True + lines[idx] = line.replace( + "except Exception:", "except Exception:" + ) + issue.fixed = True elif re.search(r"except\s+Exception\s*:\s*$", line.strip()): # 已经是 Exception,但可能需要更具体 pass # 如果文件有修改,写回 if lines != original_lines: - with open(full_path, "w", encoding = "utf-8") as f: + with open(full_path, "w", encoding="utf-8") as f: f.write("\n".join(lines)) print(f"Fixed issues in {file_path}") # 移动到已修复列表 - self.fixed_issues = [i for i in self.issues if i.fixed] - self.issues = [i for i in self.issues if not i.fixed] + self.fixed_issues = [i for i in self.issues if i.fixed] + self.issues = [i for i in self.issues if not i.fixed] def generate_report(self) -> str: """生成审查报告""" - report = [] + report = [] report.append("# InsightFlow 代码审查报告") report.append(f"\n扫描路径: {self.base_path}") report.append(f"扫描时间: {__import__('datetime').datetime.now().isoformat()}") @@ -422,8 +426,8 @@ class CodeReviewer: def main() -> None: - base_path = "/root/.openclaw/workspace/projects/insightflow/backend" - reviewer = CodeReviewer(base_path) + base_path = "/root/.openclaw/workspace/projects/insightflow/backend" + reviewer = CodeReviewer(base_path) print("开始扫描代码...") reviewer.scan_all() @@ -437,9 +441,9 @@ def main() -> None: print(f"\n已修复 {len(reviewer.fixed_issues)} 个问题") # 生成报告 - report = reviewer.generate_report() - report_path = Path(base_path).parent / "CODE_REVIEW_REPORT.md" - with open(report_path, "w", encoding = "utf-8") as f: + report = reviewer.generate_report() + report_path = Path(base_path).parent / "CODE_REVIEW_REPORT.md" + with open(report_path, "w", encoding="utf-8") as f: f.write(report) print(f"\n报告已保存到: {report_path}")