fix: auto-fix code issues (cron)

- 修复隐式 Optional 类型注解 (RUF013)
- 修复不必要的赋值后返回 (RET504)
- 优化列表推导式 (PERF401)
- 修复未使用的参数 (ARG002)
- 清理重复导入
- 优化异常处理
This commit is contained in:
AutoFix Bot
2026-03-03 21:11:47 +08:00
parent d17a58ceae
commit 259f2c90d0
36 changed files with 1651 additions and 863 deletions

View File

@@ -291,7 +291,10 @@ class AIManager:
return self._row_to_custom_model(row)
def list_custom_models(
self, tenant_id: str, model_type: ModelType | None = None, status: ModelStatus | None = None,
self,
tenant_id: str,
model_type: ModelType | None = None,
status: ModelStatus | None = None,
) -> list[CustomModel]:
"""列出自定义模型"""
query = "SELECT * FROM custom_models WHERE tenant_id = ?"
@@ -311,7 +314,11 @@ class AIManager:
return [self._row_to_custom_model(row) for row in rows]
def add_training_sample(
self, model_id: str, text: str, entities: list[dict], metadata: dict = None,
self,
model_id: str,
text: str,
entities: list[dict],
metadata: dict | None = None,
) -> TrainingSample:
"""添加训练样本"""
sample_id = f"ts_{uuid.uuid4().hex[:16]}"
@@ -463,8 +470,7 @@ class AIManager:
json_match = re.search(r"\[.*?\]", content, re.DOTALL)
if json_match:
try:
entities = json.loads(json_match.group())
return entities
return json.loads(json_match.group())
except (json.JSONDecodeError, ValueError):
pass
@@ -542,8 +548,9 @@ class AIManager:
}
content = [{"type": "text", "text": prompt}]
for url in image_urls:
content.append({"type": "image_url", "image_url": {"url": url}})
content.extend(
[{"type": "image_url", "image_url": {"url": url}} for url in image_urls]
)
payload = {
"model": "gpt-4-vision-preview",
@@ -575,9 +582,9 @@ class AIManager:
"anthropic-version": "2023-06-01",
}
content = []
for url in image_urls:
content.append({"type": "image", "source": {"type": "url", "url": url}})
content = [
{"type": "image", "source": {"type": "url", "url": url}} for url in image_urls
]
content.append({"type": "text", "text": prompt})
payload = {
@@ -638,7 +645,9 @@ class AIManager:
}
def get_multimodal_analyses(
self, tenant_id: str, project_id: str | None = None,
self,
tenant_id: str,
project_id: str | None = None,
) -> list[MultimodalAnalysis]:
"""获取多模态分析历史"""
query = "SELECT * FROM multimodal_analyses WHERE tenant_id = ?"
@@ -721,7 +730,9 @@ class AIManager:
return self._row_to_kg_rag(row)
def list_kg_rags(
self, tenant_id: str, project_id: str | None = None,
self,
tenant_id: str,
project_id: str | None = None,
) -> list[KnowledgeGraphRAG]:
"""列出知识图谱 RAG 配置"""
query = "SELECT * FROM kg_rag_configs WHERE tenant_id = ?"
@@ -738,7 +749,11 @@ class AIManager:
return [self._row_to_kg_rag(row) for row in rows]
async def query_kg_rag(
self, rag_id: str, query: str, project_entities: list[dict], project_relations: list[dict],
self,
rag_id: str,
query: str,
project_entities: list[dict],
project_relations: list[dict],
) -> RAGQuery:
"""基于知识图谱的 RAG 查询"""
start_time = time.time()
@@ -771,14 +786,15 @@ class AIManager:
relevant_entities = relevant_entities[:top_k]
# 检索相关关系
relevant_relations = []
entity_ids = {e["id"] for e in relevant_entities}
for relation in project_relations:
relevant_relations = [
relation
for relation in project_relations
if (
relation.get("source_entity_id") in entity_ids
or relation.get("target_entity_id") in entity_ids
):
relevant_relations.append(relation)
)
]
# 2. 构建上下文
context = {"entities": relevant_entities, "relations": relevant_relations[:10]}
@@ -1123,7 +1139,8 @@ class AIManager:
"""获取预测模型"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM prediction_models WHERE id = ?", (model_id,),
"SELECT * FROM prediction_models WHERE id = ?",
(model_id,),
).fetchone()
if not row:
@@ -1132,7 +1149,9 @@ class AIManager:
return self._row_to_prediction_model(row)
def list_prediction_models(
self, tenant_id: str, project_id: str | None = None,
self,
tenant_id: str,
project_id: str | None = None,
) -> list[PredictionModel]:
"""列出预测模型"""
query = "SELECT * FROM prediction_models WHERE tenant_id = ?"
@@ -1149,7 +1168,9 @@ class AIManager:
return [self._row_to_prediction_model(row) for row in rows]
async def train_prediction_model(
self, model_id: str, historical_data: list[dict],
self,
model_id: str,
historical_data: list[dict],
) -> PredictionModel:
"""训练预测模型"""
model = self.get_prediction_model(model_id)
@@ -1369,7 +1390,9 @@ class AIManager:
predicted_relations = [
{"type": rel_type, "likelihood": min(count / len(relation_history), 0.95)}
for rel_type, count in sorted(
relation_counts.items(), key=lambda x: x[1], reverse=True,
relation_counts.items(),
key=lambda x: x[1],
reverse=True,
)[:5]
]
@@ -1394,7 +1417,10 @@ class AIManager:
return [self._row_to_prediction_result(row) for row in rows]
def update_prediction_feedback(
self, prediction_id: str, actual_value: str, is_correct: bool,
self,
prediction_id: str,
actual_value: str,
is_correct: bool,
) -> None:
"""更新预测反馈(用于模型改进)"""
with self._get_db() as conn:

View File

@@ -132,7 +132,7 @@ class ApiKeyManager:
self,
name: str,
owner_id: str | None = None,
permissions: list[str] = None,
permissions: list[str] | None = None,
rate_limit: int = 60,
expires_days: int | None = None,
) -> tuple[str, ApiKey]:
@@ -238,7 +238,8 @@ class ApiKeyManager:
# 验证所有权(如果提供了 owner_id
if owner_id:
row = conn.execute(
"SELECT owner_id FROM api_keys WHERE id = ?", (key_id,),
"SELECT owner_id FROM api_keys WHERE id = ?",
(key_id,),
).fetchone()
if not row or row[0] != owner_id:
return False
@@ -267,7 +268,8 @@ class ApiKeyManager:
if owner_id:
row = conn.execute(
"SELECT * FROM api_keys WHERE id = ? AND owner_id = ?", (key_id, owner_id),
"SELECT * FROM api_keys WHERE id = ? AND owner_id = ?",
(key_id, owner_id),
).fetchone()
else:
row = conn.execute("SELECT * FROM api_keys WHERE id = ?", (key_id,)).fetchone()
@@ -337,7 +339,8 @@ class ApiKeyManager:
# 验证所有权
if owner_id:
row = conn.execute(
"SELECT owner_id FROM api_keys WHERE id = ?", (key_id,),
"SELECT owner_id FROM api_keys WHERE id = ?",
(key_id,),
).fetchone()
if not row or row[0] != owner_id:
return False
@@ -465,7 +468,8 @@ class ApiKeyManager:
endpoint_params = []
if api_key_id:
endpoint_query = endpoint_query.replace(
"WHERE created_at", "WHERE api_key_id = ? AND created_at",
"WHERE created_at",
"WHERE api_key_id = ? AND created_at",
)
endpoint_params.insert(0, api_key_id)
@@ -486,7 +490,8 @@ class ApiKeyManager:
daily_params = []
if api_key_id:
daily_query = daily_query.replace(
"WHERE created_at", "WHERE api_key_id = ? AND created_at",
"WHERE created_at",
"WHERE api_key_id = ? AND created_at",
)
daily_params.insert(0, api_key_id)

View File

@@ -304,7 +304,7 @@ class CollaborationManager:
)
self.db.conn.commit()
def revoke_share_link(self, share_id: str, revoked_by: str) -> bool:
def revoke_share_link(self, share_id: str, _revoked_by: str) -> bool:
"""撤销分享链接"""
if self.db:
cursor = self.db.conn.cursor()
@@ -335,26 +335,24 @@ class CollaborationManager:
(project_id,),
)
shares = []
for row in cursor.fetchall():
shares.append(
ProjectShare(
id=row[0],
project_id=row[1],
token=row[2],
permission=row[3],
created_by=row[4],
created_at=row[5],
expires_at=row[6],
max_uses=row[7],
use_count=row[8],
password_hash=row[9],
is_active=bool(row[10]),
allow_download=bool(row[11]),
allow_export=bool(row[12]),
),
return [
ProjectShare(
id=row[0],
project_id=row[1],
token=row[2],
permission=row[3],
created_by=row[4],
created_at=row[5],
expires_at=row[6],
max_uses=row[7],
use_count=row[8],
password_hash=row[9],
is_active=bool(row[10]),
allow_download=bool(row[11]),
allow_export=bool(row[12]),
)
return shares
for row in cursor.fetchall()
]
# ============ 评论和批注 ============
@@ -435,7 +433,10 @@ class CollaborationManager:
self.db.conn.commit()
def get_comments(
self, target_type: str, target_id: str, include_resolved: bool = True,
self,
target_type: str,
target_id: str,
include_resolved: bool = True,
) -> list[Comment]:
"""获取评论列表"""
if not self.db:
@@ -461,10 +462,7 @@ class CollaborationManager:
(target_type, target_id),
)
comments = []
for row in cursor.fetchall():
comments.append(self._row_to_comment(row))
return comments
return [self._row_to_comment(row) for row in cursor.fetchall()]
def _row_to_comment(self, row) -> Comment:
"""将数据库行转换为Comment对象"""
@@ -554,7 +552,10 @@ class CollaborationManager:
return cursor.rowcount > 0
def get_project_comments(
self, project_id: str, limit: int = 50, offset: int = 0,
self,
project_id: str,
limit: int = 50,
offset: int = 0,
) -> list[Comment]:
"""获取项目下的所有评论"""
if not self.db:
@@ -571,10 +572,7 @@ class CollaborationManager:
(project_id, limit, offset),
)
comments = []
for row in cursor.fetchall():
comments.append(self._row_to_comment(row))
return comments
return [self._row_to_comment(row) for row in cursor.fetchall()]
# ============ 变更历史 ============
@@ -697,10 +695,7 @@ class CollaborationManager:
(project_id, limit, offset),
)
records = []
for row in cursor.fetchall():
records.append(self._row_to_change_record(row))
return records
return [self._row_to_change_record(row) for row in cursor.fetchall()]
def _row_to_change_record(self, row) -> ChangeRecord:
"""将数据库行转换为ChangeRecord对象"""

View File

@@ -37,7 +37,7 @@ class Entity:
canonical_name: str = ""
aliases: list[str] = None
embedding: str = "" # Phase 3: 实体嵌入向量
attributes: dict = None # Phase 5: 实体属性
attributes: dict | None = None # Phase 5: 实体属性
created_at: str = ""
updated_at: str = ""
@@ -149,7 +149,11 @@ class DatabaseManager:
conn.commit()
conn.close()
return Project(
id=project_id, name=name, description=description, created_at=now, updated_at=now,
id=project_id,
name=name,
description=description,
created_at=now,
updated_at=now,
)
def get_project(self, project_id: str) -> Project | None:
@@ -206,7 +210,10 @@ class DatabaseManager:
return None
def find_similar_entities(
self, project_id: str, name: str, threshold: float = 0.8,
self,
project_id: str,
name: str,
threshold: float = 0.8,
) -> list[Entity]:
"""查找相似实体"""
conn = self.get_conn()
@@ -243,7 +250,8 @@ class DatabaseManager:
(json.dumps(list(target_aliases)), datetime.now().isoformat(), target_id),
)
conn.execute(
"UPDATE entity_mentions SET entity_id = ? WHERE entity_id = ?", (target_id, source_id),
"UPDATE entity_mentions SET entity_id = ? WHERE entity_id = ?",
(target_id, source_id),
)
conn.execute(
"UPDATE entity_relations SET source_entity_id = ? WHERE source_entity_id = ?",
@@ -272,7 +280,8 @@ class DatabaseManager:
def list_project_entities(self, project_id: str) -> list[Entity]:
conn = self.get_conn()
rows = conn.execute(
"SELECT * FROM entities WHERE project_id = ? ORDER BY updated_at DESC", (project_id,),
"SELECT * FROM entities WHERE project_id = ? ORDER BY updated_at DESC",
(project_id,),
).fetchall()
conn.close()
@@ -478,7 +487,8 @@ class DatabaseManager:
conn.commit()
row = conn.execute(
"SELECT * FROM entity_relations WHERE id = ?", (relation_id,),
"SELECT * FROM entity_relations WHERE id = ?",
(relation_id,),
).fetchone()
conn.close()
return dict(row) if row else None
@@ -494,12 +504,14 @@ class DatabaseManager:
def add_glossary_term(self, project_id: str, term: str, pronunciation: str = "") -> str:
conn = self.get_conn()
existing = conn.execute(
"SELECT * FROM glossary WHERE project_id = ? AND term = ?", (project_id, term),
"SELECT * FROM glossary WHERE project_id = ? AND term = ?",
(project_id, term),
).fetchone()
if existing:
conn.execute(
"UPDATE glossary SET frequency = frequency + 1 WHERE id = ?", (existing["id"],),
"UPDATE glossary SET frequency = frequency + 1 WHERE id = ?",
(existing["id"],),
)
conn.commit()
conn.close()
@@ -519,7 +531,8 @@ class DatabaseManager:
def list_glossary(self, project_id: str) -> list[dict]:
conn = self.get_conn()
rows = conn.execute(
"SELECT * FROM glossary WHERE project_id = ? ORDER BY frequency DESC", (project_id,),
"SELECT * FROM glossary WHERE project_id = ? ORDER BY frequency DESC",
(project_id,),
).fetchall()
conn.close()
return [dict(r) for r in rows]
@@ -605,15 +618,18 @@ class DatabaseManager:
project = conn.execute("SELECT * FROM projects WHERE id = ?", (project_id,)).fetchone()
entity_count = conn.execute(
"SELECT COUNT(*) as count FROM entities WHERE project_id = ?", (project_id,),
"SELECT COUNT(*) as count FROM entities WHERE project_id = ?",
(project_id,),
).fetchone()["count"]
transcript_count = conn.execute(
"SELECT COUNT(*) as count FROM transcripts WHERE project_id = ?", (project_id,),
"SELECT COUNT(*) as count FROM transcripts WHERE project_id = ?",
(project_id,),
).fetchone()["count"]
relation_count = conn.execute(
"SELECT COUNT(*) as count FROM entity_relations WHERE project_id = ?", (project_id,),
"SELECT COUNT(*) as count FROM entity_relations WHERE project_id = ?",
(project_id,),
).fetchone()["count"]
recent_transcripts = conn.execute(
@@ -645,11 +661,15 @@ class DatabaseManager:
}
def get_transcript_context(
self, transcript_id: str, position: int, context_chars: int = 200,
self,
transcript_id: str,
position: int,
context_chars: int = 200,
) -> str:
conn = self.get_conn()
row = conn.execute(
"SELECT full_text FROM transcripts WHERE id = ?", (transcript_id,),
"SELECT full_text FROM transcripts WHERE id = ?",
(transcript_id,),
).fetchone()
conn.close()
if not row:
@@ -662,7 +682,11 @@ class DatabaseManager:
# ==================== Phase 5: Timeline Operations ====================
def get_project_timeline(
self, project_id: str, entity_id: str = None, start_date: str = None, end_date: str = None,
self,
project_id: str,
entity_id: str | None = None,
start_date: str = None,
end_date: str = None,
) -> list[dict]:
conn = self.get_conn()
@@ -776,7 +800,8 @@ class DatabaseManager:
def get_attribute_template(self, template_id: str) -> AttributeTemplate | None:
conn = self.get_conn()
row = conn.execute(
"SELECT * FROM attribute_templates WHERE id = ?", (template_id,),
"SELECT * FROM attribute_templates WHERE id = ?",
(template_id,),
).fetchone()
conn.close()
if row:
@@ -841,7 +866,10 @@ class DatabaseManager:
conn.close()
def set_entity_attribute(
self, attr: EntityAttribute, changed_by: str = "system", change_reason: str = "",
self,
attr: EntityAttribute,
changed_by: str = "system",
change_reason: str = "",
) -> EntityAttribute:
conn = self.get_conn()
now = datetime.now().isoformat()
@@ -930,7 +958,11 @@ class DatabaseManager:
return entity
def delete_entity_attribute(
self, entity_id: str, template_id: str, changed_by: str = "system", change_reason: str = "",
self,
entity_id: str,
template_id: str,
changed_by: str = "system",
change_reason: str = "",
) -> None:
conn = self.get_conn()
old_row = conn.execute(
@@ -964,7 +996,10 @@ class DatabaseManager:
conn.close()
def get_attribute_history(
self, entity_id: str = None, template_id: str = None, limit: int = 50,
self,
entity_id: str | None = None,
template_id: str = None,
limit: int = 50,
) -> list[AttributeHistory]:
conn = self.get_conn()
conditions = []
@@ -990,7 +1025,9 @@ class DatabaseManager:
return [AttributeHistory(**dict(r)) for r in rows]
def search_entities_by_attributes(
self, project_id: str, attribute_filters: dict[str, str],
self,
project_id: str,
attribute_filters: dict[str, str],
) -> list[Entity]:
entities = self.list_project_entities(project_id)
if not attribute_filters:
@@ -1040,8 +1077,8 @@ class DatabaseManager:
filename: str,
duration: float = 0,
fps: float = 0,
resolution: dict = None,
audio_transcript_id: str = None,
resolution: dict | None = None,
audio_transcript_id: str | None = None,
full_ocr_text: str = "",
extracted_entities: list[dict] = None,
extracted_relations: list[dict] = None,
@@ -1098,7 +1135,8 @@ class DatabaseManager:
"""获取项目的所有视频"""
conn = self.get_conn()
rows = conn.execute(
"SELECT * FROM videos WHERE project_id = ? ORDER BY created_at DESC", (project_id,),
"SELECT * FROM videos WHERE project_id = ? ORDER BY created_at DESC",
(project_id,),
).fetchall()
conn.close()
@@ -1121,8 +1159,8 @@ class DatabaseManager:
video_id: str,
frame_number: int,
timestamp: float,
image_url: str = None,
ocr_text: str = None,
image_url: str | None = None,
ocr_text: str | None = None,
extracted_entities: list[dict] = None,
) -> str:
"""创建视频帧记录"""
@@ -1153,7 +1191,8 @@ class DatabaseManager:
"""获取视频的所有帧"""
conn = self.get_conn()
rows = conn.execute(
"""SELECT * FROM video_frames WHERE video_id = ? ORDER BY timestamp""", (video_id,),
"""SELECT * FROM video_frames WHERE video_id = ? ORDER BY timestamp""",
(video_id,),
).fetchall()
conn.close()
@@ -1223,7 +1262,8 @@ class DatabaseManager:
"""获取项目的所有图片"""
conn = self.get_conn()
rows = conn.execute(
"SELECT * FROM images WHERE project_id = ? ORDER BY created_at DESC", (project_id,),
"SELECT * FROM images WHERE project_id = ? ORDER BY created_at DESC",
(project_id,),
).fetchall()
conn.close()
@@ -1288,7 +1328,9 @@ class DatabaseManager:
conn.close()
return [dict(r) for r in rows]
def get_project_multimodal_mentions(self, project_id: str, modality: str = None) -> list[dict]:
def get_project_multimodal_mentions(
self, project_id: str, modality: str | None = None
) -> list[dict]:
"""获取项目的多模态提及"""
conn = self.get_conn()
@@ -1381,13 +1423,15 @@ class DatabaseManager:
# 视频数量
row = conn.execute(
"SELECT COUNT(*) as count FROM videos WHERE project_id = ?", (project_id,),
"SELECT COUNT(*) as count FROM videos WHERE project_id = ?",
(project_id,),
).fetchone()
stats["video_count"] = row["count"]
# 图片数量
row = conn.execute(
"SELECT COUNT(*) as count FROM images WHERE project_id = ?", (project_id,),
"SELECT COUNT(*) as count FROM images WHERE project_id = ?",
(project_id,),
).fetchone()
stats["image_count"] = row["count"]

View File

@@ -538,7 +538,8 @@ class DeveloperEcosystemManager:
"""获取 SDK 版本历史"""
with self._get_db() as conn:
rows = conn.execute(
"SELECT * FROM sdk_versions WHERE sdk_id = ? ORDER BY created_at DESC", (sdk_id,),
"SELECT * FROM sdk_versions WHERE sdk_id = ? ORDER BY created_at DESC",
(sdk_id,),
).fetchall()
return [self._row_to_sdk_version(row) for row in rows]
@@ -700,7 +701,8 @@ class DeveloperEcosystemManager:
"""获取模板详情"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM template_market WHERE id = ?", (template_id,),
"SELECT * FROM template_market WHERE id = ?",
(template_id,),
).fetchone()
if row:
@@ -1076,7 +1078,11 @@ class DeveloperEcosystemManager:
return [self._row_to_plugin(row) for row in rows]
def review_plugin(
self, plugin_id: str, reviewed_by: str, status: PluginStatus, notes: str = "",
self,
plugin_id: str,
reviewed_by: str,
status: PluginStatus,
notes: str = "",
) -> PluginMarketItem | None:
"""审核插件"""
now = datetime.now().isoformat()
@@ -1420,7 +1426,8 @@ class DeveloperEcosystemManager:
"""获取开发者档案"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM developer_profiles WHERE id = ?", (developer_id,),
"SELECT * FROM developer_profiles WHERE id = ?",
(developer_id,),
).fetchone()
if row:
@@ -1431,7 +1438,8 @@ class DeveloperEcosystemManager:
"""通过用户 ID 获取开发者档案"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM developer_profiles WHERE user_id = ?", (user_id,),
"SELECT * FROM developer_profiles WHERE user_id = ?",
(user_id,),
).fetchone()
if row:
@@ -1439,7 +1447,9 @@ class DeveloperEcosystemManager:
return None
def verify_developer(
self, developer_id: str, status: DeveloperStatus,
self,
developer_id: str,
status: DeveloperStatus,
) -> DeveloperProfile | None:
"""验证开发者"""
now = datetime.now().isoformat()
@@ -1453,9 +1463,11 @@ class DeveloperEcosystemManager:
""",
(
status.value,
now
if status in [DeveloperStatus.VERIFIED, DeveloperStatus.CERTIFIED]
else None,
(
now
if status in [DeveloperStatus.VERIFIED, DeveloperStatus.CERTIFIED]
else None
),
now,
developer_id,
),
@@ -1469,7 +1481,8 @@ class DeveloperEcosystemManager:
with self._get_db() as conn:
# 统计插件数量
plugin_row = conn.execute(
"SELECT COUNT(*) as count FROM plugin_market WHERE author_id = ?", (developer_id,),
"SELECT COUNT(*) as count FROM plugin_market WHERE author_id = ?",
(developer_id,),
).fetchone()
# 统计模板数量
@@ -1583,7 +1596,8 @@ class DeveloperEcosystemManager:
"""获取代码示例"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM code_examples WHERE id = ?", (example_id,),
"SELECT * FROM code_examples WHERE id = ?",
(example_id,),
).fetchone()
if row:
@@ -1699,7 +1713,8 @@ class DeveloperEcosystemManager:
"""获取 API 文档"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM api_documentation WHERE id = ?", (doc_id,),
"SELECT * FROM api_documentation WHERE id = ?",
(doc_id,),
).fetchone()
if row:
@@ -1799,7 +1814,8 @@ class DeveloperEcosystemManager:
"""获取开发者门户配置"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM developer_portal_configs WHERE id = ?", (config_id,),
"SELECT * FROM developer_portal_configs WHERE id = ?",
(config_id,),
).fetchone()
if row:

View File

@@ -78,7 +78,7 @@ class DocumentProcessor:
"PDF processing requires PyPDF2 or pdfplumber. Install with: pip install PyPDF2",
)
except Exception as e:
raise ValueError(f"PDF extraction failed: {str(e)}")
raise ValueError(f"PDF extraction failed: {e!s}")
def _extract_docx(self, content: bytes) -> str:
"""提取 DOCX 文本"""
@@ -109,7 +109,7 @@ class DocumentProcessor:
"DOCX processing requires python-docx. Install with: pip install python-docx",
)
except Exception as e:
raise ValueError(f"DOCX extraction failed: {str(e)}")
raise ValueError(f"DOCX extraction failed: {e!s}")
def _extract_txt(self, content: bytes) -> str:
"""提取纯文本"""

View File

@@ -699,7 +699,9 @@ class EnterpriseManager:
conn.close()
def get_tenant_sso_config(
self, tenant_id: str, provider: str | None = None,
self,
tenant_id: str,
provider: str | None = None,
) -> SSOConfig | None:
"""获取租户的 SSO 配置"""
conn = self._get_connection()
@@ -871,7 +873,10 @@ class EnterpriseManager:
return metadata
def create_saml_auth_request(
self, tenant_id: str, config_id: str, relay_state: str | None = None,
self,
tenant_id: str,
config_id: str,
relay_state: str | None = None,
) -> SAMLAuthRequest:
"""创建 SAML 认证请求"""
conn = self._get_connection()
@@ -1235,7 +1240,10 @@ class EnterpriseManager:
return []
def _upsert_scim_user(
self, conn: sqlite3.Connection, tenant_id: str, user_data: dict[str, Any],
self,
conn: sqlite3.Connection,
tenant_id: str,
user_data: dict[str, Any],
) -> None:
"""插入或更新 SCIM 用户"""
cursor = conn.cursor()
@@ -1405,7 +1413,11 @@ class EnterpriseManager:
try:
# 获取审计日志数据
logs = self._fetch_audit_logs(
export.tenant_id, export.start_date, export.end_date, export.filters, db_manager,
export.tenant_id,
export.start_date,
export.end_date,
export.filters,
db_manager,
)
# 根据合规标准过滤字段
@@ -1414,7 +1426,9 @@ class EnterpriseManager:
# 生成导出文件
file_path, file_size, checksum = self._generate_export_file(
export_id, logs, export.export_format,
export_id,
logs,
export.export_format,
)
now = datetime.now()
@@ -1465,7 +1479,9 @@ class EnterpriseManager:
return []
def _apply_compliance_filter(
self, logs: list[dict[str, Any]], standard: str,
self,
logs: list[dict[str, Any]],
standard: str,
) -> list[dict[str, Any]]:
"""应用合规标准字段过滤"""
fields = self.COMPLIANCE_FIELDS.get(ComplianceStandard(standard), [])
@@ -1481,7 +1497,10 @@ class EnterpriseManager:
return filtered_logs
def _generate_export_file(
self, export_id: str, logs: list[dict[str, Any]], format: str,
self,
export_id: str,
logs: list[dict[str, Any]],
format: str,
) -> tuple[str, int, str]:
"""生成导出文件"""
import hashlib
@@ -1672,7 +1691,9 @@ class EnterpriseManager:
conn.close()
def list_retention_policies(
self, tenant_id: str, resource_type: str | None = None,
self,
tenant_id: str,
resource_type: str | None = None,
) -> list[DataRetentionPolicy]:
"""列出数据保留策略"""
conn = self._get_connection()
@@ -1876,7 +1897,10 @@ class EnterpriseManager:
conn.close()
def _retain_audit_logs(
self, conn: sqlite3.Connection, policy: DataRetentionPolicy, cutoff_date: datetime,
self,
conn: sqlite3.Connection,
policy: DataRetentionPolicy,
cutoff_date: datetime,
) -> dict[str, int]:
"""保留审计日志"""
cursor = conn.cursor()
@@ -1909,14 +1933,20 @@ class EnterpriseManager:
return {"affected": 0, "archived": 0, "deleted": 0, "errors": 0}
def _retain_projects(
self, conn: sqlite3.Connection, policy: DataRetentionPolicy, cutoff_date: datetime,
self,
conn: sqlite3.Connection,
policy: DataRetentionPolicy,
cutoff_date: datetime,
) -> dict[str, int]:
"""保留项目数据"""
# 简化实现
return {"affected": 0, "archived": 0, "deleted": 0, "errors": 0}
def _retain_transcripts(
self, conn: sqlite3.Connection, policy: DataRetentionPolicy, cutoff_date: datetime,
self,
conn: sqlite3.Connection,
policy: DataRetentionPolicy,
cutoff_date: datetime,
) -> dict[str, int]:
"""保留转录数据"""
# 简化实现
@@ -2101,9 +2131,11 @@ class EnterpriseManager:
if isinstance(row["start_date"], str)
else row["start_date"]
),
end_date=datetime.fromisoformat(row["end_date"])
if isinstance(row["end_date"], str)
else row["end_date"],
end_date=(
datetime.fromisoformat(row["end_date"])
if isinstance(row["end_date"], str)
else row["end_date"]
),
filters=json.loads(row["filters"] or "{}"),
compliance_standard=row["compliance_standard"],
status=row["status"],

View File

@@ -178,7 +178,10 @@ class EntityAligner:
return best_match
def _fallback_similarity_match(
self, entities: list[object], name: str, exclude_id: str | None = None,
self,
entities: list[object],
name: str,
exclude_id: str | None = None,
) -> object | None:
"""
回退到简单的相似度匹配(不使用 embedding
@@ -212,7 +215,10 @@ class EntityAligner:
return None
def batch_align_entities(
self, project_id: str, new_entities: list[dict], threshold: float | None = None,
self,
project_id: str,
new_entities: list[dict],
threshold: float | None = None,
) -> list[dict]:
"""
批量对齐实体
@@ -232,7 +238,10 @@ class EntityAligner:
for new_ent in new_entities:
matched = self.find_similar_entity(
project_id, new_ent["name"], new_ent.get("definition", ""), threshold=threshold,
project_id,
new_ent["name"],
new_ent.get("definition", ""),
threshold=threshold,
)
result = {

View File

@@ -75,7 +75,10 @@ class ExportManager:
self.db = db_manager
def export_knowledge_graph_svg(
self, project_id: str, entities: list[ExportEntity], relations: list[ExportRelation],
self,
project_id: str,
entities: list[ExportEntity],
relations: list[ExportRelation],
) -> str:
"""
导出知识图谱为 SVG 格式
@@ -220,7 +223,10 @@ class ExportManager:
return "\n".join(svg_parts)
def export_knowledge_graph_png(
self, project_id: str, entities: list[ExportEntity], relations: list[ExportRelation],
self,
project_id: str,
entities: list[ExportEntity],
relations: list[ExportRelation],
) -> bytes:
"""
导出知识图谱为 PNG 格式
@@ -337,7 +343,9 @@ class ExportManager:
return output.getvalue()
def export_transcript_markdown(
self, transcript: ExportTranscript, entities_map: dict[str, ExportEntity],
self,
transcript: ExportTranscript,
entities_map: dict[str, ExportEntity],
) -> str:
"""
导出转录文本为 Markdown 格式
@@ -417,7 +425,12 @@ class ExportManager:
output = io.BytesIO()
doc = SimpleDocTemplate(
output, pagesize=A4, rightMargin=72, leftMargin=72, topMargin=72, bottomMargin=18,
output,
pagesize=A4,
rightMargin=72,
leftMargin=72,
topMargin=72,
bottomMargin=18,
)
# 样式
@@ -510,7 +523,8 @@ class ExportManager:
)
entity_table = Table(
entity_data, colWidths=[1.5 * inch, 1 * inch, 1 * inch, 2.5 * inch],
entity_data,
colWidths=[1.5 * inch, 1 * inch, 1 * inch, 2.5 * inch],
)
entity_table.setStyle(
TableStyle(
@@ -539,7 +553,8 @@ class ExportManager:
relation_data.append([r.source, r.relation_type, r.target, f"{r.confidence:.2f}"])
relation_table = Table(
relation_data, colWidths=[2 * inch, 1.5 * inch, 2 * inch, 1 * inch],
relation_data,
colWidths=[2 * inch, 1.5 * inch, 2 * inch, 1 * inch],
)
relation_table.setStyle(
TableStyle(

View File

@@ -383,11 +383,11 @@ class GrowthManager:
user_id: str,
event_type: EventType,
event_name: str,
properties: dict = None,
session_id: str = None,
device_info: dict = None,
referrer: str = None,
utm_params: dict = None,
properties: dict | None = None,
session_id: str | None = None,
device_info: dict | None = None,
referrer: str | None = None,
utm_params: dict | None = None,
) -> AnalyticsEvent:
"""追踪事件"""
event_id = f"evt_{uuid.uuid4().hex[:16]}"
@@ -475,7 +475,10 @@ class GrowthManager:
async with httpx.AsyncClient() as client:
await client.post(
"https://api.mixpanel.com/track", headers=headers, json=[payload], timeout=10.0,
"https://api.mixpanel.com/track",
headers=headers,
json=[payload],
timeout=10.0,
)
except (RuntimeError, ValueError, TypeError) as e:
print(f"Failed to send to Mixpanel: {e}")
@@ -509,7 +512,11 @@ class GrowthManager:
print(f"Failed to send to Amplitude: {e}")
async def _update_user_profile(
self, tenant_id: str, user_id: str, event_type: EventType, event_name: str,
self,
tenant_id: str,
user_id: str,
event_type: EventType,
event_name: str,
) -> None:
"""更新用户画像"""
with self._get_db() as conn:
@@ -581,7 +588,10 @@ class GrowthManager:
return None
def get_user_analytics_summary(
self, tenant_id: str, start_date: datetime = None, end_date: datetime = None,
self,
tenant_id: str,
start_date: datetime | None = None,
end_date: datetime = None,
) -> dict:
"""获取用户分析汇总"""
with self._get_db() as conn:
@@ -635,7 +645,12 @@ class GrowthManager:
}
def create_funnel(
self, tenant_id: str, name: str, description: str, steps: list[dict], created_by: str,
self,
tenant_id: str,
name: str,
description: str,
steps: list[dict],
created_by: str,
) -> Funnel:
"""创建转化漏斗"""
funnel_id = f"fnl_{uuid.uuid4().hex[:16]}"
@@ -673,12 +688,16 @@ class GrowthManager:
return funnel
def analyze_funnel(
self, funnel_id: str, period_start: datetime = None, period_end: datetime = None,
self,
funnel_id: str,
period_start: datetime | None = None,
period_end: datetime = None,
) -> FunnelAnalysis | None:
"""分析漏斗转化率"""
with self._get_db() as conn:
funnel_row = conn.execute(
"SELECT * FROM funnels WHERE id = ?", (funnel_id,),
"SELECT * FROM funnels WHERE id = ?",
(funnel_id,),
).fetchone()
if not funnel_row:
@@ -704,7 +723,8 @@ class GrowthManager:
WHERE event_name = ? AND timestamp >= ? AND timestamp <= ?
"""
row = conn.execute(
query, (event_name, period_start.isoformat(), period_end.isoformat()),
query,
(event_name, period_start.isoformat(), period_end.isoformat()),
).fetchone()
user_count = row["user_count"] if row else 0
@@ -752,7 +772,10 @@ class GrowthManager:
)
def calculate_retention(
self, tenant_id: str, cohort_date: datetime, periods: list[int] = None,
self,
tenant_id: str,
cohort_date: datetime,
periods: list[int] = None,
) -> dict:
"""计算留存率"""
if periods is None:
@@ -825,7 +848,7 @@ class GrowthManager:
secondary_metrics: list[str],
min_sample_size: int = 100,
confidence_level: float = 0.95,
created_by: str = None,
created_by: str | None = None,
) -> Experiment:
"""创建 A/B 测试实验"""
experiment_id = f"exp_{uuid.uuid4().hex[:16]}"
@@ -893,14 +916,17 @@ class GrowthManager:
"""获取实验详情"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM experiments WHERE id = ?", (experiment_id,),
"SELECT * FROM experiments WHERE id = ?",
(experiment_id,),
).fetchone()
if row:
return self._row_to_experiment(row)
return None
def list_experiments(self, tenant_id: str, status: ExperimentStatus = None) -> list[Experiment]:
def list_experiments(
self, tenant_id: str, status: ExperimentStatus | None = None
) -> list[Experiment]:
"""列出实验"""
query = "SELECT * FROM experiments WHERE tenant_id = ?"
params = [tenant_id]
@@ -916,7 +942,10 @@ class GrowthManager:
return [self._row_to_experiment(row) for row in rows]
def assign_variant(
self, experiment_id: str, user_id: str, user_attributes: dict = None,
self,
experiment_id: str,
user_id: str,
user_attributes: dict | None = None,
) -> str | None:
"""为用户分配实验变体"""
experiment = self.get_experiment(experiment_id)
@@ -939,11 +968,15 @@ class GrowthManager:
variant_id = self._random_allocation(experiment.variants, experiment.traffic_split)
elif experiment.traffic_allocation == TrafficAllocationType.STRATIFIED:
variant_id = self._stratified_allocation(
experiment.variants, experiment.traffic_split, user_attributes,
experiment.variants,
experiment.traffic_split,
user_attributes,
)
else: # TARGETED
variant_id = self._targeted_allocation(
experiment.variants, experiment.target_audience, user_attributes,
experiment.variants,
experiment.target_audience,
user_attributes,
)
if variant_id:
@@ -978,7 +1011,10 @@ class GrowthManager:
return random.choices(variant_ids, weights=normalized_weights, k=1)[0]
def _stratified_allocation(
self, variants: list[dict], traffic_split: dict[str, float], user_attributes: dict,
self,
variants: list[dict],
traffic_split: dict[str, float],
user_attributes: dict,
) -> str:
"""分层分配(基于用户属性)"""
# 简化的分层分配:根据用户 ID 哈希值分配
@@ -991,7 +1027,10 @@ class GrowthManager:
return self._random_allocation(variants, traffic_split)
def _targeted_allocation(
self, variants: list[dict], target_audience: dict, user_attributes: dict,
self,
variants: list[dict],
target_audience: dict,
user_attributes: dict,
) -> str | None:
"""定向分配(基于目标受众条件)"""
# 检查用户是否符合目标受众条件
@@ -1005,7 +1044,14 @@ class GrowthManager:
user_value = user_attributes.get(attr_name) if user_attributes else None
if operator == "equals" and user_value != value or operator == "not_equals" and user_value == value or operator == "in" and user_value not in value:
if (
operator == "equals"
and user_value != value
or operator == "not_equals"
and user_value == value
or operator == "in"
and user_value not in value
):
matches = False
break
@@ -1177,11 +1223,11 @@ class GrowthManager:
template_type: EmailTemplateType,
subject: str,
html_content: str,
text_content: str = None,
text_content: str | None = None,
variables: list[str] = None,
from_name: str = None,
from_email: str = None,
reply_to: str = None,
from_name: str | None = None,
from_email: str | None = None,
reply_to: str | None = None,
) -> EmailTemplate:
"""创建邮件模板"""
template_id = f"et_{uuid.uuid4().hex[:16]}"
@@ -1242,7 +1288,8 @@ class GrowthManager:
"""获取邮件模板"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM email_templates WHERE id = ?", (template_id,),
"SELECT * FROM email_templates WHERE id = ?",
(template_id,),
).fetchone()
if row:
@@ -1250,7 +1297,9 @@ class GrowthManager:
return None
def list_email_templates(
self, tenant_id: str, template_type: EmailTemplateType = None,
self,
tenant_id: str,
template_type: EmailTemplateType | None = None,
) -> list[EmailTemplate]:
"""列出邮件模板"""
query = "SELECT * FROM email_templates WHERE tenant_id = ? AND is_active = 1"
@@ -1297,7 +1346,7 @@ class GrowthManager:
name: str,
template_id: str,
recipient_list: list[dict],
scheduled_at: datetime = None,
scheduled_at: datetime | None = None,
) -> EmailCampaign:
"""创建邮件营销活动"""
campaign_id = f"ec_{uuid.uuid4().hex[:16]}"
@@ -1377,7 +1426,12 @@ class GrowthManager:
return campaign
async def send_email(
self, campaign_id: str, user_id: str, email: str, template_id: str, variables: dict,
self,
campaign_id: str,
user_id: str,
email: str,
template_id: str,
variables: dict,
) -> bool:
"""发送单封邮件"""
template = self.get_email_template(template_id)
@@ -1448,7 +1502,8 @@ class GrowthManager:
"""发送整个营销活动"""
with self._get_db() as conn:
campaign_row = conn.execute(
"SELECT * FROM email_campaigns WHERE id = ?", (campaign_id,),
"SELECT * FROM email_campaigns WHERE id = ?",
(campaign_id,),
).fetchone()
if not campaign_row:
@@ -1478,7 +1533,11 @@ class GrowthManager:
variables = self._get_user_variables(log["tenant_id"], log["user_id"])
success = await self.send_email(
campaign_id, log["user_id"], log["email"], log["template_id"], variables,
campaign_id,
log["user_id"],
log["email"],
log["template_id"],
variables,
)
if success:
@@ -1763,7 +1822,8 @@ class GrowthManager:
with self._get_db() as conn:
row = conn.execute(
"SELECT 1 FROM referrals WHERE referral_code = ?", (code,),
"SELECT 1 FROM referrals WHERE referral_code = ?",
(code,),
).fetchone()
if not row:
@@ -1773,7 +1833,8 @@ class GrowthManager:
"""获取推荐计划"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM referral_programs WHERE id = ?", (program_id,),
"SELECT * FROM referral_programs WHERE id = ?",
(program_id,),
).fetchone()
if row:
@@ -1859,7 +1920,8 @@ class GrowthManager:
"expired": stats["expired"] or 0,
"unique_referrers": stats["unique_referrers"] or 0,
"conversion_rate": round(
(stats["converted"] or 0) / max(stats["total_referrals"] or 1, 1), 4,
(stats["converted"] or 0) / max(stats["total_referrals"] or 1, 1),
4,
),
}
@@ -1922,7 +1984,10 @@ class GrowthManager:
return incentive
def check_team_incentive_eligibility(
self, tenant_id: str, current_tier: str, team_size: int,
self,
tenant_id: str,
current_tier: str,
team_size: int,
) -> list[TeamIncentive]:
"""检查团队激励资格"""
with self._get_db() as conn:

View File

@@ -96,7 +96,7 @@ class ImageProcessor:
"other": "其他",
}
def __init__(self, temp_dir: str = None) -> None:
def __init__(self, temp_dir: str | None = None) -> None:
"""
初始化图片处理器
@@ -106,7 +106,7 @@ class ImageProcessor:
self.temp_dir = temp_dir or os.path.join(os.getcwd(), "temp", "images")
os.makedirs(self.temp_dir, exist_ok=True)
def preprocess_image(self, image, image_type: str = None) -> None:
def preprocess_image(self, image, image_type: str | None = None) -> None:
"""
预处理图片以提高OCR质量
@@ -328,7 +328,10 @@ class ImageProcessor:
return unique_entities
def generate_description(
self, image_type: str, ocr_text: str, entities: list[ImageEntity],
self,
image_type: str,
ocr_text: str,
entities: list[ImageEntity],
) -> str:
"""
生成图片描述
@@ -361,8 +364,8 @@ class ImageProcessor:
def process_image(
self,
image_data: bytes,
filename: str = None,
image_id: str = None,
filename: str | None = None,
image_id: str | None = None,
detect_type: bool = True,
) -> ImageProcessingResult:
"""
@@ -487,7 +490,9 @@ class ImageProcessor:
return relations
def process_batch(
self, images_data: list[tuple[bytes, str]], project_id: str = None,
self,
images_data: list[tuple[bytes, str]],
project_id: str | None = None,
) -> BatchProcessingResult:
"""
批量处理图片
@@ -561,7 +566,7 @@ class ImageProcessor:
_image_processor = None
def get_image_processor(temp_dir: str = None) -> ImageProcessor:
def get_image_processor(temp_dir: str | None = None) -> ImageProcessor:
"""获取图片处理器单例"""
global _image_processor
if _image_processor is None:

View File

@@ -51,7 +51,7 @@ class InferencePath:
class KnowledgeReasoner:
"""知识推理引擎"""
def __init__(self, api_key: str = None, base_url: str = None) -> None:
def __init__(self, api_key: str | None = None, base_url: str = None) -> None:
self.api_key = api_key or KIMI_API_KEY
self.base_url = base_url or KIMI_BASE_URL
self.headers = {
@@ -82,7 +82,11 @@ class KnowledgeReasoner:
return result["choices"][0]["message"]["content"]
async def enhanced_qa(
self, query: str, project_context: dict, graph_data: dict, reasoning_depth: str = "medium",
self,
query: str,
project_context: dict,
graph_data: dict,
reasoning_depth: str = "medium",
) -> ReasoningResult:
"""
增强问答 - 结合图谱推理的问答
@@ -139,7 +143,10 @@ class KnowledgeReasoner:
return {"type": "factual", "entities": [], "intent": "general", "complexity": "simple"}
async def _causal_reasoning(
self, query: str, project_context: dict, graph_data: dict,
self,
query: str,
project_context: dict,
graph_data: dict,
) -> ReasoningResult:
"""因果推理 - 分析原因和影响"""
@@ -200,7 +207,10 @@ class KnowledgeReasoner:
)
async def _comparative_reasoning(
self, query: str, project_context: dict, graph_data: dict,
self,
query: str,
project_context: dict,
graph_data: dict,
) -> ReasoningResult:
"""对比推理 - 比较实体间的异同"""
@@ -254,7 +264,10 @@ class KnowledgeReasoner:
)
async def _temporal_reasoning(
self, query: str, project_context: dict, graph_data: dict,
self,
query: str,
project_context: dict,
graph_data: dict,
) -> ReasoningResult:
"""时序推理 - 分析时间线和演变"""
@@ -308,7 +321,10 @@ class KnowledgeReasoner:
)
async def _associative_reasoning(
self, query: str, project_context: dict, graph_data: dict,
self,
query: str,
project_context: dict,
graph_data: dict,
) -> ReasoningResult:
"""关联推理 - 发现实体间的隐含关联"""
@@ -362,7 +378,11 @@ class KnowledgeReasoner:
)
def find_inference_paths(
self, start_entity: str, end_entity: str, graph_data: dict, max_depth: int = 3,
self,
start_entity: str,
end_entity: str,
graph_data: dict,
max_depth: int = 3,
) -> list[InferencePath]:
"""
发现两个实体之间的推理路径
@@ -449,7 +469,10 @@ class KnowledgeReasoner:
return length_factor * confidence_factor
async def summarize_project(
self, project_context: dict, graph_data: dict, summary_type: str = "comprehensive",
self,
project_context: dict,
graph_data: dict,
summary_type: str = "comprehensive",
) -> dict:
"""
项目智能总结

View File

@@ -43,7 +43,7 @@ class RelationExtractionResult:
class LLMClient:
"""Kimi API 客户端"""
def __init__(self, api_key: str = None, base_url: str = None) -> None:
def __init__(self, api_key: str | None = None, base_url: str = None) -> None:
self.api_key = api_key or KIMI_API_KEY
self.base_url = base_url or KIMI_BASE_URL
self.headers = {
@@ -52,7 +52,10 @@ class LLMClient:
}
async def chat(
self, messages: list[ChatMessage], temperature: float = 0.3, stream: bool = False,
self,
messages: list[ChatMessage],
temperature: float = 0.3,
stream: bool = False,
) -> str:
"""发送聊天请求"""
if not self.api_key:
@@ -77,7 +80,9 @@ class LLMClient:
return result["choices"][0]["message"]["content"]
async def chat_stream(
self, messages: list[ChatMessage], temperature: float = 0.3,
self,
messages: list[ChatMessage],
temperature: float = 0.3,
) -> AsyncGenerator[str, None]:
"""流式聊天请求"""
if not self.api_key:
@@ -90,13 +95,16 @@ class LLMClient:
"stream": True,
}
async with httpx.AsyncClient() as client, client.stream(
"POST",
f"{self.base_url}/v1/chat/completions",
headers=self.headers,
json=payload,
timeout=120.0,
) as response:
async with (
httpx.AsyncClient() as client,
client.stream(
"POST",
f"{self.base_url}/v1/chat/completions",
headers=self.headers,
json=payload,
timeout=120.0,
) as response,
):
response.raise_for_status()
async for line in response.aiter_lines():
if line.startswith("data: "):
@@ -112,7 +120,8 @@ class LLMClient:
pass
async def extract_entities_with_confidence(
self, text: str,
self,
text: str,
) -> tuple[list[EntityExtractionResult], list[RelationExtractionResult]]:
"""提取实体和关系,带置信度分数"""
prompt = f"""从以下会议文本中提取关键实体和它们之间的关系,以 JSON 格式返回:
@@ -189,7 +198,8 @@ class LLMClient:
messages = [
ChatMessage(
role="system", content="你是一个专业的项目分析助手,擅长从会议记录中提取洞察。",
role="system",
content="你是一个专业的项目分析助手,擅长从会议记录中提取洞察。",
),
ChatMessage(role="user", content=prompt),
]

View File

@@ -963,7 +963,11 @@ class LocalizationManager:
self._close_if_file_db(conn)
def get_translation(
self, key: str, language: str, namespace: str = "common", fallback: bool = True,
self,
key: str,
language: str,
namespace: str = "common",
fallback: bool = True,
) -> str | None:
conn = self._get_connection()
try:
@@ -979,7 +983,10 @@ class LocalizationManager:
lang_config = self.get_language_config(language)
if lang_config and lang_config.fallback_language:
return self.get_translation(
key, lang_config.fallback_language, namespace, False,
key,
lang_config.fallback_language,
namespace,
False,
)
if language != "en":
return self.get_translation(key, "en", namespace, False)
@@ -1019,7 +1026,11 @@ class LocalizationManager:
self._close_if_file_db(conn)
def _get_translation_internal(
self, conn: sqlite3.Connection, key: str, language: str, namespace: str,
self,
conn: sqlite3.Connection,
key: str,
language: str,
namespace: str,
) -> Translation | None:
cursor = conn.cursor()
cursor.execute(
@@ -1121,7 +1132,9 @@ class LocalizationManager:
self._close_if_file_db(conn)
def list_data_centers(
self, status: str | None = None, region: str | None = None,
self,
status: str | None = None,
region: str | None = None,
) -> list[DataCenter]:
conn = self._get_connection()
try:
@@ -1146,7 +1159,8 @@ class LocalizationManager:
try:
cursor = conn.cursor()
cursor.execute(
"SELECT * FROM tenant_data_center_mappings WHERE tenant_id = ?", (tenant_id,),
"SELECT * FROM tenant_data_center_mappings WHERE tenant_id = ?",
(tenant_id,),
)
row = cursor.fetchone()
if row:
@@ -1156,7 +1170,10 @@ class LocalizationManager:
self._close_if_file_db(conn)
def set_tenant_data_center(
self, tenant_id: str, region_code: str, data_residency: str = "regional",
self,
tenant_id: str,
region_code: str,
data_residency: str = "regional",
) -> TenantDataCenterMapping:
conn = self._get_connection()
try:
@@ -1222,7 +1239,8 @@ class LocalizationManager:
try:
cursor = conn.cursor()
cursor.execute(
"SELECT * FROM localized_payment_methods WHERE provider = ?", (provider,),
"SELECT * FROM localized_payment_methods WHERE provider = ?",
(provider,),
)
row = cursor.fetchone()
if row:
@@ -1232,7 +1250,10 @@ class LocalizationManager:
self._close_if_file_db(conn)
def list_payment_methods(
self, country_code: str | None = None, currency: str | None = None, active_only: bool = True,
self,
country_code: str | None = None,
currency: str | None = None,
active_only: bool = True,
) -> list[LocalizedPaymentMethod]:
conn = self._get_connection()
try:
@@ -1255,7 +1276,9 @@ class LocalizationManager:
self._close_if_file_db(conn)
def get_localized_payment_methods(
self, country_code: str, language: str = "en",
self,
country_code: str,
language: str = "en",
) -> list[dict[str, Any]]:
methods = self.list_payment_methods(country_code=country_code)
result = []
@@ -1287,7 +1310,9 @@ class LocalizationManager:
self._close_if_file_db(conn)
def list_country_configs(
self, region: str | None = None, active_only: bool = True,
self,
region: str | None = None,
active_only: bool = True,
) -> list[CountryConfig]:
conn = self._get_connection()
try:
@@ -1345,14 +1370,19 @@ class LocalizationManager:
return dt.strftime("%Y-%m-%d %H:%M")
def format_number(
self, number: float, language: str = "en", decimal_places: int | None = None,
self,
number: float,
language: str = "en",
decimal_places: int | None = None,
) -> str:
try:
if BABEL_AVAILABLE:
try:
locale = Locale.parse(language.replace("_", "-"))
return numbers.format_decimal(
number, locale=locale, decimal_quantization=(decimal_places is not None),
number,
locale=locale,
decimal_quantization=(decimal_places is not None),
)
except (ValueError, AttributeError):
pass
@@ -1514,7 +1544,9 @@ class LocalizationManager:
self._close_if_file_db(conn)
def detect_user_preferences(
self, accept_language: str | None = None, ip_country: str | None = None,
self,
accept_language: str | None = None,
ip_country: str | None = None,
) -> dict[str, str]:
preferences = {"language": "en", "country": "US", "timezone": "UTC", "currency": "USD"}
if accept_language:

File diff suppressed because it is too large Load Diff

View File

@@ -30,7 +30,7 @@ class MultimodalEntity:
source_id: str
mention_context: str
confidence: float
modality_features: dict = None # 模态特定特征
modality_features: dict | None = None # 模态特定特征
def __post_init__(self) -> None:
if self.modality_features is None:
@@ -137,7 +137,8 @@ class MultimodalEntityLinker:
"""
# 名称相似度
name_sim = self.calculate_string_similarity(
entity1.get("name", ""), entity2.get("name", ""),
entity1.get("name", ""),
entity2.get("name", ""),
)
# 如果名称完全匹配
@@ -158,7 +159,8 @@ class MultimodalEntityLinker:
# 定义相似度
def_sim = self.calculate_string_similarity(
entity1.get("definition", ""), entity2.get("definition", ""),
entity1.get("definition", ""),
entity2.get("definition", ""),
)
# 综合相似度
@@ -170,7 +172,10 @@ class MultimodalEntityLinker:
return combined_sim, "none"
def find_matching_entity(
self, query_entity: dict, candidate_entities: list[dict], exclude_ids: set[str] = None,
self,
query_entity: dict,
candidate_entities: list[dict],
exclude_ids: set[str] = None,
) -> AlignmentResult | None:
"""
在候选实体中查找匹配的实体
@@ -270,7 +275,10 @@ class MultimodalEntityLinker:
return links
def fuse_entity_knowledge(
self, entity_id: str, linked_entities: list[dict], multimodal_mentions: list[dict],
self,
entity_id: str,
linked_entities: list[dict],
multimodal_mentions: list[dict],
) -> FusionResult:
"""
融合多模态实体知识
@@ -394,7 +402,9 @@ class MultimodalEntityLinker:
return conflicts
def suggest_entity_merges(
self, entities: list[dict], existing_links: list[EntityLink] = None,
self,
entities: list[dict],
existing_links: list[EntityLink] = None,
) -> list[dict]:
"""
建议实体合并
@@ -510,9 +520,9 @@ class MultimodalEntityLinker:
"total_multimodal_records": len(multimodal_entities),
"unique_entities": len(entity_modalities),
"cross_modal_entities": cross_modal_count,
"cross_modal_ratio": cross_modal_count / len(entity_modalities)
if entity_modalities
else 0,
"cross_modal_ratio": (
cross_modal_count / len(entity_modalities) if entity_modalities else 0
),
}

View File

@@ -74,7 +74,7 @@ class VideoInfo:
transcript_id: str = ""
status: str = "pending"
error_message: str = ""
metadata: dict = None
metadata: dict | None = None
def __post_init__(self) -> None:
if self.metadata is None:
@@ -97,7 +97,7 @@ class VideoProcessingResult:
class MultimodalProcessor:
"""多模态处理器 - 处理视频文件"""
def __init__(self, temp_dir: str = None, frame_interval: int = 5) -> None:
def __init__(self, temp_dir: str | None = None, frame_interval: int = 5) -> None:
"""
初始化多模态处理器
@@ -130,10 +130,12 @@ class MultimodalProcessor:
if FFMPEG_AVAILABLE:
probe = ffmpeg.probe(video_path)
video_stream = next(
(s for s in probe["streams"] if s["codec_type"] == "video"), None,
(s for s in probe["streams"] if s["codec_type"] == "video"),
None,
)
audio_stream = next(
(s for s in probe["streams"] if s["codec_type"] == "audio"), None,
(s for s in probe["streams"] if s["codec_type"] == "audio"),
None,
)
if video_stream:
@@ -165,9 +167,9 @@ class MultimodalProcessor:
return {
"duration": float(data["format"].get("duration", 0)),
"width": int(data["streams"][0].get("width", 0)) if data["streams"] else 0,
"height": int(data["streams"][0].get("height", 0))
if data["streams"]
else 0,
"height": (
int(data["streams"][0].get("height", 0)) if data["streams"] else 0
),
"fps": 30.0, # 默认值
"has_audio": len(data["streams"]) > 1,
"bitrate": int(data["format"].get("bit_rate", 0)),
@@ -177,7 +179,7 @@ class MultimodalProcessor:
return {"duration": 0, "width": 0, "height": 0, "fps": 0, "has_audio": False, "bitrate": 0}
def extract_audio(self, video_path: str, output_path: str = None) -> str:
def extract_audio(self, video_path: str, output_path: str | None = None) -> str:
"""
从视频中提取音频
@@ -223,7 +225,9 @@ class MultimodalProcessor:
print(f"Error extracting audio: {e}")
raise
def extract_keyframes(self, video_path: str, video_id: str, interval: int = None) -> list[str]:
def extract_keyframes(
self, video_path: str, video_id: str, interval: int | None = None
) -> list[str]:
"""
从视频中提取关键帧
@@ -260,7 +264,8 @@ class MultimodalProcessor:
if frame_number % frame_interval_frames == 0:
timestamp = frame_number / fps
frame_path = os.path.join(
video_frames_dir, f"frame_{frame_number:06d}_{timestamp:.2f}.jpg",
video_frames_dir,
f"frame_{frame_number:06d}_{timestamp:.2f}.jpg",
)
cv2.imwrite(frame_path, frame)
frame_paths.append(frame_path)
@@ -333,7 +338,11 @@ class MultimodalProcessor:
return "", 0.0
def process_video(
self, video_data: bytes, filename: str, project_id: str, video_id: str = None,
self,
video_data: bytes,
filename: str,
project_id: str,
video_id: str | None = None,
) -> VideoProcessingResult:
"""
处理视频文件提取音频、关键帧、OCR
@@ -426,7 +435,7 @@ class MultimodalProcessor:
error_message=str(e),
)
def cleanup(self, video_id: str = None) -> None:
def cleanup(self, video_id: str | None = None) -> None:
"""
清理临时文件
@@ -457,7 +466,9 @@ class MultimodalProcessor:
_multimodal_processor = None
def get_multimodal_processor(temp_dir: str = None, frame_interval: int = 5) -> MultimodalProcessor:
def get_multimodal_processor(
temp_dir: str | None = None, frame_interval: int = 5
) -> MultimodalProcessor:
"""获取多模态处理器单例"""
global _multimodal_processor
if _multimodal_processor is None:

View File

@@ -37,7 +37,7 @@ class GraphEntity:
type: str
definition: str = ""
aliases: list[str] = None
properties: dict = None
properties: dict | None = None
def __post_init__(self) -> None:
if self.aliases is None:
@@ -55,7 +55,7 @@ class GraphRelation:
target_id: str
relation_type: str
evidence: str = ""
properties: dict = None
properties: dict | None = None
def __post_init__(self) -> None:
if self.properties is None:
@@ -95,7 +95,7 @@ class CentralityResult:
class Neo4jManager:
"""Neo4j 图数据库管理器"""
def __init__(self, uri: str = None, user: str = None, password: str = None) -> None:
def __init__(self, uri: str | None = None, user: str = None, password: str = None) -> None:
self.uri = uri or NEO4J_URI
self.user = user or NEO4J_USER
self.password = password or NEO4J_PASSWORD
@@ -179,7 +179,10 @@ class Neo4jManager:
# ==================== 数据同步 ====================
def sync_project(
self, project_id: str, project_name: str, project_description: str = "",
self,
project_id: str,
project_name: str,
project_description: str = "",
) -> None:
"""同步项目节点到 Neo4j"""
if not self._driver:
@@ -352,7 +355,10 @@ class Neo4jManager:
# ==================== 复杂图查询 ====================
def find_shortest_path(
self, source_id: str, target_id: str, max_depth: int = 10,
self,
source_id: str,
target_id: str,
max_depth: int = 10,
) -> PathResult | None:
"""
查找两个实体之间的最短路径
@@ -404,11 +410,17 @@ class Neo4jManager:
]
return PathResult(
nodes=nodes, relationships=relationships, length=len(path.relationships),
nodes=nodes,
relationships=relationships,
length=len(path.relationships),
)
def find_all_paths(
self, source_id: str, target_id: str, max_depth: int = 5, limit: int = 10,
self,
source_id: str,
target_id: str,
max_depth: int = 5,
limit: int = 10,
) -> list[PathResult]:
"""
查找两个实体之间的所有路径
@@ -460,14 +472,19 @@ class Neo4jManager:
paths.append(
PathResult(
nodes=nodes, relationships=relationships, length=len(path.relationships),
nodes=nodes,
relationships=relationships,
length=len(path.relationships),
),
)
return paths
def find_neighbors(
self, entity_id: str, relation_type: str = None, limit: int = 50,
self,
entity_id: str,
relation_type: str | None = None,
limit: int = 50,
) -> list[dict]:
"""
查找实体的邻居节点
@@ -752,7 +769,10 @@ class Neo4jManager:
results.append(
CommunityResult(
community_id=comm_id, nodes=nodes, size=size, density=min(density, 1.0),
community_id=comm_id,
nodes=nodes,
size=size,
density=min(density, 1.0),
),
)
@@ -761,7 +781,9 @@ class Neo4jManager:
return results
def find_central_entities(
self, project_id: str, metric: str = "degree",
self,
project_id: str,
metric: str = "degree",
) -> list[CentralityResult]:
"""
查找中心实体
@@ -896,9 +918,11 @@ class Neo4jManager:
"type_distribution": types,
"average_degree": round(avg_degree, 2) if avg_degree else 0,
"relation_type_distribution": relation_types,
"density": round(relation_count / (entity_count * (entity_count - 1)), 4)
if entity_count > 1
else 0,
"density": (
round(relation_count / (entity_count * (entity_count - 1)), 4)
if entity_count > 1
else 0
),
}
def get_subgraph(self, entity_ids: list[str], depth: int = 1) -> dict:
@@ -993,7 +1017,10 @@ def close_neo4j_manager() -> None:
def sync_project_to_neo4j(
project_id: str, project_name: str, entities: list[dict], relations: list[dict],
project_id: str,
project_name: str,
entities: list[dict],
relations: list[dict],
) -> None:
"""
同步整个项目到 Neo4j

View File

@@ -680,7 +680,8 @@ class OpsManager:
"""获取告警渠道"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM alert_channels WHERE id = ?", (channel_id,),
"SELECT * FROM alert_channels WHERE id = ?",
(channel_id,),
).fetchone()
if row:
@@ -819,7 +820,9 @@ class OpsManager:
for rule in rules:
# 获取相关指标
metrics = self.get_recent_metrics(
tenant_id, rule.metric, seconds=rule.duration + rule.evaluation_interval,
tenant_id,
rule.metric,
seconds=rule.duration + rule.evaluation_interval,
)
# 评估规则
@@ -1129,7 +1132,9 @@ class OpsManager:
async with httpx.AsyncClient() as client:
response = await client.post(
"https://events.pagerduty.com/v2/enqueue", json=message, timeout=30.0,
"https://events.pagerduty.com/v2/enqueue",
json=message,
timeout=30.0,
)
success = response.status_code == 202
self._update_channel_stats(channel.id, success)
@@ -1299,12 +1304,16 @@ class OpsManager:
conn.commit()
def _update_alert_notification_status(
self, alert_id: str, channel_id: str, success: bool,
self,
alert_id: str,
channel_id: str,
success: bool,
) -> None:
"""更新告警通知状态"""
with self._get_db() as conn:
row = conn.execute(
"SELECT notification_sent FROM alerts WHERE id = ?", (alert_id,),
"SELECT notification_sent FROM alerts WHERE id = ?",
(alert_id,),
).fetchone()
if row:
@@ -1394,7 +1403,8 @@ class OpsManager:
"""检查告警是否被抑制"""
with self._get_db() as conn:
rows = conn.execute(
"SELECT * FROM alert_suppression_rules WHERE tenant_id = ?", (rule.tenant_id,),
"SELECT * FROM alert_suppression_rules WHERE tenant_id = ?",
(rule.tenant_id,),
).fetchall()
for row in rows:
@@ -1436,7 +1446,7 @@ class OpsManager:
metric_name: str,
metric_value: float,
unit: str,
metadata: dict = None,
metadata: dict | None = None,
) -> ResourceMetric:
"""记录资源指标"""
metric_id = f"rm_{uuid.uuid4().hex[:16]}"
@@ -1479,7 +1489,10 @@ class OpsManager:
return metric
def get_recent_metrics(
self, tenant_id: str, metric_name: str, seconds: int = 3600,
self,
tenant_id: str,
metric_name: str,
seconds: int = 3600,
) -> list[ResourceMetric]:
"""获取最近的指标数据"""
cutoff_time = (datetime.now() - timedelta(seconds=seconds)).isoformat()
@@ -1531,7 +1544,9 @@ class OpsManager:
# 基于历史数据预测
metrics = self.get_recent_metrics(
tenant_id, f"{resource_type.value}_usage", seconds=30 * 24 * 3600,
tenant_id,
f"{resource_type.value}_usage",
seconds=30 * 24 * 3600,
)
if metrics:
@@ -1704,7 +1719,8 @@ class OpsManager:
"""获取自动扩缩容策略"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM auto_scaling_policies WHERE id = ?", (policy_id,),
"SELECT * FROM auto_scaling_policies WHERE id = ?",
(policy_id,),
).fetchone()
if row:
@@ -1721,7 +1737,10 @@ class OpsManager:
return [self._row_to_auto_scaling_policy(row) for row in rows]
def evaluate_scaling_policy(
self, policy_id: str, current_instances: int, current_utilization: float,
self,
policy_id: str,
current_instances: int,
current_utilization: float,
) -> ScalingEvent | None:
"""评估扩缩容策略"""
policy = self.get_auto_scaling_policy(policy_id)
@@ -1826,7 +1845,10 @@ class OpsManager:
return None
def update_scaling_event_status(
self, event_id: str, status: str, error_message: str = None,
self,
event_id: str,
status: str,
error_message: str | None = None,
) -> ScalingEvent | None:
"""更新扩缩容事件状态"""
now = datetime.now().isoformat()
@@ -1864,7 +1886,10 @@ class OpsManager:
return None
def list_scaling_events(
self, tenant_id: str, policy_id: str = None, limit: int = 100,
self,
tenant_id: str,
policy_id: str | None = None,
limit: int = 100,
) -> list[ScalingEvent]:
"""列出租户的扩缩容事件"""
query = "SELECT * FROM scaling_events WHERE tenant_id = ?"
@@ -2056,7 +2081,8 @@ class OpsManager:
start_time = time.time()
try:
reader, writer = await asyncio.wait_for(
asyncio.open_connection(host, port), timeout=check.timeout,
asyncio.open_connection(host, port),
timeout=check.timeout,
)
response_time = (time.time() - start_time) * 1000
writer.close()
@@ -2101,7 +2127,7 @@ class OpsManager:
failover_trigger: str,
auto_failover: bool = False,
failover_timeout: int = 300,
health_check_id: str = None,
health_check_id: str | None = None,
) -> FailoverConfig:
"""创建故障转移配置"""
config_id = f"fc_{uuid.uuid4().hex[:16]}"
@@ -2153,7 +2179,8 @@ class OpsManager:
"""获取故障转移配置"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM failover_configs WHERE id = ?", (config_id,),
"SELECT * FROM failover_configs WHERE id = ?",
(config_id,),
).fetchone()
if row:
@@ -2259,7 +2286,8 @@ class OpsManager:
"""获取故障转移事件"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM failover_events WHERE id = ?", (event_id,),
"SELECT * FROM failover_events WHERE id = ?",
(event_id,),
).fetchone()
if row:
@@ -2290,7 +2318,7 @@ class OpsManager:
retention_days: int = 30,
encryption_enabled: bool = True,
compression_enabled: bool = True,
storage_location: str = None,
storage_location: str | None = None,
) -> BackupJob:
"""创建备份任务"""
job_id = f"bj_{uuid.uuid4().hex[:16]}"
@@ -2410,7 +2438,9 @@ class OpsManager:
return record
def _complete_backup(self, record_id: str, size_bytes: int, checksum: str = None) -> None:
def _complete_backup(
self, record_id: str, size_bytes: int, checksum: str | None = None
) -> None:
"""完成备份"""
now = datetime.now().isoformat()
checksum = checksum or hashlib.sha256(str(time.time()).encode()).hexdigest()[:16]
@@ -2430,7 +2460,8 @@ class OpsManager:
"""获取备份记录"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM backup_records WHERE id = ?", (record_id,),
"SELECT * FROM backup_records WHERE id = ?",
(record_id,),
).fetchone()
if row:
@@ -2438,7 +2469,10 @@ class OpsManager:
return None
def list_backup_records(
self, tenant_id: str, job_id: str = None, limit: int = 100,
self,
tenant_id: str,
job_id: str | None = None,
limit: int = 100,
) -> list[BackupRecord]:
"""列出租户的备份记录"""
query = "SELECT * FROM backup_records WHERE tenant_id = ?"
@@ -2624,7 +2658,9 @@ class OpsManager:
return util
def get_resource_utilizations(
self, tenant_id: str, report_period: str,
self,
tenant_id: str,
report_period: str,
) -> list[ResourceUtilization]:
"""获取资源利用率列表"""
with self._get_db() as conn:
@@ -2709,7 +2745,8 @@ class OpsManager:
return [self._row_to_idle_resource(row) for row in rows]
def generate_cost_optimization_suggestions(
self, tenant_id: str,
self,
tenant_id: str,
) -> list[CostOptimizationSuggestion]:
"""生成成本优化建议"""
suggestions = []
@@ -2777,7 +2814,9 @@ class OpsManager:
return suggestions
def get_cost_optimization_suggestions(
self, tenant_id: str, is_applied: bool = None,
self,
tenant_id: str,
is_applied: bool | None = None,
) -> list[CostOptimizationSuggestion]:
"""获取成本优化建议"""
query = "SELECT * FROM cost_optimization_suggestions WHERE tenant_id = ?"
@@ -2794,7 +2833,8 @@ class OpsManager:
return [self._row_to_cost_optimization_suggestion(row) for row in rows]
def apply_cost_optimization_suggestion(
self, suggestion_id: str,
self,
suggestion_id: str,
) -> CostOptimizationSuggestion | None:
"""应用成本优化建议"""
now = datetime.now().isoformat()
@@ -2813,12 +2853,14 @@ class OpsManager:
return self.get_cost_optimization_suggestion(suggestion_id)
def get_cost_optimization_suggestion(
self, suggestion_id: str,
self,
suggestion_id: str,
) -> CostOptimizationSuggestion | None:
"""获取成本优化建议详情"""
with self._get_db() as conn:
row = conn.execute(
"SELECT * FROM cost_optimization_suggestions WHERE id = ?", (suggestion_id,),
"SELECT * FROM cost_optimization_suggestions WHERE id = ?",
(suggestion_id,),
).fetchone()
if row:

View File

@@ -444,7 +444,8 @@ class CacheManager:
"memory_size_bytes": self.current_memory_size,
"max_memory_size_bytes": self.max_memory_size,
"memory_usage_percent": round(
self.current_memory_size / self.max_memory_size * 100, 2,
self.current_memory_size / self.max_memory_size * 100,
2,
),
"cache_entries": len(self.memory_cache),
},
@@ -548,11 +549,13 @@ class CacheManager:
# 预热项目知识库摘要
entity_count = conn.execute(
"SELECT COUNT(*) FROM entities WHERE project_id = ?", (project_id,),
"SELECT COUNT(*) FROM entities WHERE project_id = ?",
(project_id,),
).fetchone()[0]
relation_count = conn.execute(
"SELECT COUNT(*) FROM entity_relations WHERE project_id = ?", (project_id,),
"SELECT COUNT(*) FROM entity_relations WHERE project_id = ?",
(project_id,),
).fetchone()[0]
summary = {
@@ -757,11 +760,13 @@ class DatabaseSharding:
source_conn.row_factory = sqlite3.Row
entities = source_conn.execute(
"SELECT * FROM entities WHERE project_id = ?", (project_id,),
"SELECT * FROM entities WHERE project_id = ?",
(project_id,),
).fetchall()
relations = source_conn.execute(
"SELECT * FROM entity_relations WHERE project_id = ?", (project_id,),
"SELECT * FROM entity_relations WHERE project_id = ?",
(project_id,),
).fetchall()
source_conn.close()
@@ -1061,7 +1066,9 @@ class TaskQueue:
task.status = "retrying"
# 延迟重试
threading.Timer(
10 * task.retry_count, self._execute_task, args=(task_id,),
10 * task.retry_count,
self._execute_task,
args=(task_id,),
).start()
else:
task.status = "failed"
@@ -1163,7 +1170,10 @@ class TaskQueue:
return self.tasks.get(task_id)
def list_tasks(
self, status: str | None = None, task_type: str | None = None, limit: int = 100,
self,
status: str | None = None,
task_type: str | None = None,
limit: int = 100,
) -> list[TaskInfo]:
"""列出任务"""
conn = sqlite3.connect(self.db_path)
@@ -1635,7 +1645,7 @@ def cached(
cache_key = key_func(*args, **kwargs)
else:
# 默认使用函数名和参数哈希
key_data = f"{func.__name__}:{str(args)}:{str(kwargs)}"
key_data = f"{func.__name__}:{args!s}:{kwargs!s}"
cache_key = f"{key_prefix}:{hashlib.md5(key_data.encode()).hexdigest()[:16]}"
# 尝试从缓存获取
@@ -1754,12 +1764,16 @@ _performance_manager = None
def get_performance_manager(
db_path: str = "insightflow.db", redis_url: str | None = None, enable_sharding: bool = False,
db_path: str = "insightflow.db",
redis_url: str | None = None,
enable_sharding: bool = False,
) -> PerformanceManager:
"""获取性能管理器单例"""
global _performance_manager
if _performance_manager is None:
_performance_manager = PerformanceManager(
db_path=db_path, redis_url=redis_url, enable_sharding=enable_sharding,
db_path=db_path,
redis_url=redis_url,
enable_sharding=enable_sharding,
)
return _performance_manager

View File

@@ -220,7 +220,10 @@ class PluginManager:
return None
def list_plugins(
self, project_id: str = None, plugin_type: str = None, status: str = None,
self,
project_id: str | None = None,
plugin_type: str = None,
status: str = None,
) -> list[Plugin]:
"""列出插件"""
conn = self.db.get_conn()
@@ -241,7 +244,8 @@ class PluginManager:
where_clause = " AND ".join(conditions) if conditions else "1 = 1"
rows = conn.execute(
f"SELECT * FROM plugins WHERE {where_clause} ORDER BY created_at DESC", params,
f"SELECT * FROM plugins WHERE {where_clause} ORDER BY created_at DESC",
params,
).fetchall()
conn.close()
@@ -310,7 +314,11 @@ class PluginManager:
# ==================== Plugin Config ====================
def set_plugin_config(
self, plugin_id: str, key: str, value: str, is_encrypted: bool = False,
self,
plugin_id: str,
key: str,
value: str,
is_encrypted: bool = False,
) -> PluginConfig:
"""设置插件配置"""
conn = self.db.get_conn()
@@ -367,7 +375,8 @@ class PluginManager:
"""获取插件所有配置"""
conn = self.db.get_conn()
rows = conn.execute(
"SELECT config_key, config_value FROM plugin_configs WHERE plugin_id = ?", (plugin_id,),
"SELECT config_key, config_value FROM plugin_configs WHERE plugin_id = ?",
(plugin_id,),
).fetchall()
conn.close()
@@ -377,7 +386,8 @@ class PluginManager:
"""删除插件配置"""
conn = self.db.get_conn()
cursor = conn.execute(
"DELETE FROM plugin_configs WHERE plugin_id = ? AND config_key = ?", (plugin_id, key),
"DELETE FROM plugin_configs WHERE plugin_id = ? AND config_key = ?",
(plugin_id, key),
)
conn.commit()
conn.close()
@@ -408,10 +418,10 @@ class ChromeExtensionHandler:
def create_token(
self,
name: str,
user_id: str = None,
project_id: str = None,
user_id: str | None = None,
project_id: str | None = None,
permissions: list[str] = None,
expires_days: int = None,
expires_days: int | None = None,
) -> ChromeExtensionToken:
"""创建 Chrome 扩展令牌"""
token_id = str(uuid.uuid4())[:UUID_LENGTH]
@@ -512,7 +522,8 @@ class ChromeExtensionHandler:
"""撤销令牌"""
conn = self.pm.db.get_conn()
cursor = conn.execute(
"UPDATE chrome_extension_tokens SET is_revoked = 1 WHERE id = ?", (token_id,),
"UPDATE chrome_extension_tokens SET is_revoked = 1 WHERE id = ?",
(token_id,),
)
conn.commit()
conn.close()
@@ -520,7 +531,9 @@ class ChromeExtensionHandler:
return cursor.rowcount > 0
def list_tokens(
self, user_id: str = None, project_id: str = None,
self,
user_id: str | None = None,
project_id: str = None,
) -> list[ChromeExtensionToken]:
"""列出令牌"""
conn = self.pm.db.get_conn()
@@ -569,7 +582,7 @@ class ChromeExtensionHandler:
url: str,
title: str,
content: str,
html_content: str = None,
html_content: str | None = None,
) -> dict:
"""导入网页内容"""
if not token.project_id:
@@ -616,7 +629,7 @@ class BotHandler:
self,
session_id: str,
session_name: str,
project_id: str = None,
project_id: str | None = None,
webhook_url: str = "",
secret: str = "",
) -> BotSession:
@@ -674,7 +687,7 @@ class BotHandler:
return self._row_to_session(row)
return None
def list_sessions(self, project_id: str = None) -> list[BotSession]:
def list_sessions(self, project_id: str | None = None) -> list[BotSession]:
"""列出会话"""
conn = self.pm.db.get_conn()
@@ -849,7 +862,7 @@ class BotHandler:
}
except Exception as e:
return {"success": False, "error": f"Failed to process audio: {str(e)}"}
return {"success": False, "error": f"Failed to process audio: {e!s}"}
async def _handle_file_message(self, session: BotSession, message: dict) -> dict:
"""处理文件消息"""
@@ -897,12 +910,17 @@ class BotHandler:
async with httpx.AsyncClient() as client:
response = await client.post(
session.webhook_url, json=payload, headers={"Content-Type": "application/json"},
session.webhook_url,
json=payload,
headers={"Content-Type": "application/json"},
)
return response.status_code == 200
async def _send_dingtalk_message(
self, session: BotSession, message: str, msg_type: str,
self,
session: BotSession,
message: str,
msg_type: str,
) -> bool:
"""发送钉钉消息"""
timestamp = str(round(time.time() * 1000))
@@ -928,7 +946,9 @@ class BotHandler:
async with httpx.AsyncClient() as client:
response = await client.post(
url, json=payload, headers={"Content-Type": "application/json"},
url,
json=payload,
headers={"Content-Type": "application/json"},
)
return response.status_code == 200
@@ -944,9 +964,9 @@ class WebhookIntegration:
self,
name: str,
endpoint_url: str,
project_id: str = None,
project_id: str | None = None,
auth_type: str = "none",
auth_config: dict = None,
auth_config: dict | None = None,
trigger_events: list[str] = None,
) -> WebhookEndpoint:
"""创建 Webhook 端点"""
@@ -1004,7 +1024,7 @@ class WebhookIntegration:
return self._row_to_endpoint(row)
return None
def list_endpoints(self, project_id: str = None) -> list[WebhookEndpoint]:
def list_endpoints(self, project_id: str | None = None) -> list[WebhookEndpoint]:
"""列出端点"""
conn = self.pm.db.get_conn()
@@ -1115,7 +1135,10 @@ class WebhookIntegration:
async with httpx.AsyncClient() as client:
response = await client.post(
endpoint.endpoint_url, json=payload, headers=headers, timeout=30.0,
endpoint.endpoint_url,
json=payload,
headers=headers,
timeout=30.0,
)
success = response.status_code in [200, 201, 202]
@@ -1229,7 +1252,7 @@ class WebDAVSyncManager:
return self._row_to_sync(row)
return None
def list_syncs(self, project_id: str = None) -> list[WebDAVSync]:
def list_syncs(self, project_id: str | None = None) -> list[WebDAVSync]:
"""列出同步配置"""
conn = self.pm.db.get_conn()

View File

@@ -120,7 +120,10 @@ class RateLimiter:
await counter.add_request()
return RateLimitInfo(
allowed=True, remaining=remaining - 1, reset_time=reset_time, retry_after=0,
allowed=True,
remaining=remaining - 1,
reset_time=reset_time,
retry_after=0,
)
async def get_limit_info(self, key: str) -> RateLimitInfo:
@@ -145,9 +148,9 @@ class RateLimiter:
allowed=current_count < config.requests_per_minute,
remaining=remaining,
reset_time=reset_time,
retry_after=max(0, config.window_size)
if current_count >= config.requests_per_minute
else 0,
retry_after=(
max(0, config.window_size) if current_count >= config.requests_per_minute else 0
),
)
def reset(self, key: str | None = None) -> None:

View File

@@ -385,7 +385,7 @@ class FullTextSearch:
# 排序和分页
scored_results.sort(key=lambda x: x.score, reverse=True)
return scored_results[offset: offset + limit]
return scored_results[offset : offset + limit]
def _parse_boolean_query(self, query: str) -> dict:
"""
@@ -545,19 +545,24 @@ class FullTextSearch:
return results
def _get_content_by_id(
self, conn: sqlite3.Connection, content_id: str, content_type: str,
self,
conn: sqlite3.Connection,
content_id: str,
content_type: str,
) -> str | None:
"""根据ID获取内容"""
try:
if content_type == "transcript":
row = conn.execute(
"SELECT full_text FROM transcripts WHERE id = ?", (content_id,),
"SELECT full_text FROM transcripts WHERE id = ?",
(content_id,),
).fetchone()
return row["full_text"] if row else None
elif content_type == "entity":
row = conn.execute(
"SELECT name, definition FROM entities WHERE id = ?", (content_id,),
"SELECT name, definition FROM entities WHERE id = ?",
(content_id,),
).fetchone()
if row:
return f"{row['name']} {row['definition'] or ''}"
@@ -583,21 +588,27 @@ class FullTextSearch:
return None
def _get_project_id(
self, conn: sqlite3.Connection, content_id: str, content_type: str,
self,
conn: sqlite3.Connection,
content_id: str,
content_type: str,
) -> str | None:
"""获取内容所属的项目ID"""
try:
if content_type == "transcript":
row = conn.execute(
"SELECT project_id FROM transcripts WHERE id = ?", (content_id,),
"SELECT project_id FROM transcripts WHERE id = ?",
(content_id,),
).fetchone()
elif content_type == "entity":
row = conn.execute(
"SELECT project_id FROM entities WHERE id = ?", (content_id,),
"SELECT project_id FROM entities WHERE id = ?",
(content_id,),
).fetchone()
elif content_type == "relation":
row = conn.execute(
"SELECT project_id FROM entity_relations WHERE id = ?", (content_id,),
"SELECT project_id FROM entity_relations WHERE id = ?",
(content_id,),
).fetchone()
else:
return None
@@ -880,7 +891,11 @@ class SemanticSearch:
return None
def index_embedding(
self, content_id: str, content_type: str, project_id: str, text: str,
self,
content_id: str,
content_type: str,
project_id: str,
text: str,
) -> bool:
"""
为内容生成并保存 embedding
@@ -1029,13 +1044,15 @@ class SemanticSearch:
try:
if content_type == "transcript":
row = conn.execute(
"SELECT full_text FROM transcripts WHERE id = ?", (content_id,),
"SELECT full_text FROM transcripts WHERE id = ?",
(content_id,),
).fetchone()
result = row["full_text"] if row else None
elif content_type == "entity":
row = conn.execute(
"SELECT name, definition FROM entities WHERE id = ?", (content_id,),
"SELECT name, definition FROM entities WHERE id = ?",
(content_id,),
).fetchone()
result = f"{row['name']}: {row['definition']}" if row else None
@@ -1067,7 +1084,10 @@ class SemanticSearch:
return None
def find_similar_content(
self, content_id: str, content_type: str, top_k: int = 5,
self,
content_id: str,
content_type: str,
top_k: int = 5,
) -> list[SemanticSearchResult]:
"""
查找与指定内容相似的内容
@@ -1175,7 +1195,10 @@ class EntityPathDiscovery:
return conn
def find_shortest_path(
self, source_entity_id: str, target_entity_id: str, max_depth: int = 5,
self,
source_entity_id: str,
target_entity_id: str,
max_depth: int = 5,
) -> EntityPath | None:
"""
查找两个实体之间的最短路径BFS算法
@@ -1192,7 +1215,8 @@ class EntityPathDiscovery:
# 获取项目ID
row = conn.execute(
"SELECT project_id FROM entities WHERE id = ?", (source_entity_id,),
"SELECT project_id FROM entities WHERE id = ?",
(source_entity_id,),
).fetchone()
if not row:
@@ -1250,7 +1274,11 @@ class EntityPathDiscovery:
return None
def find_all_paths(
self, source_entity_id: str, target_entity_id: str, max_depth: int = 4, max_paths: int = 10,
self,
source_entity_id: str,
target_entity_id: str,
max_depth: int = 4,
max_paths: int = 10,
) -> list[EntityPath]:
"""
查找两个实体之间的所有路径(限制数量和深度)
@@ -1268,7 +1296,8 @@ class EntityPathDiscovery:
# 获取项目ID
row = conn.execute(
"SELECT project_id FROM entities WHERE id = ?", (source_entity_id,),
"SELECT project_id FROM entities WHERE id = ?",
(source_entity_id,),
).fetchone()
if not row:
@@ -1280,7 +1309,11 @@ class EntityPathDiscovery:
paths = []
def dfs(
current_id: str, target_id: str, path: list[str], visited: set[str], depth: int,
current_id: str,
target_id: str,
path: list[str],
visited: set[str],
depth: int,
) -> None:
if depth > max_depth:
return
@@ -1328,7 +1361,8 @@ class EntityPathDiscovery:
nodes = []
for entity_id in entity_ids:
row = conn.execute(
"SELECT id, name, type FROM entities WHERE id = ?", (entity_id,),
"SELECT id, name, type FROM entities WHERE id = ?",
(entity_id,),
).fetchone()
if row:
nodes.append({"id": row["id"], "name": row["name"], "type": row["type"]})
@@ -1398,7 +1432,8 @@ class EntityPathDiscovery:
# 获取项目ID
row = conn.execute(
"SELECT project_id, name FROM entities WHERE id = ?", (entity_id,),
"SELECT project_id, name FROM entities WHERE id = ?",
(entity_id,),
).fetchone()
if not row:
@@ -1445,7 +1480,8 @@ class EntityPathDiscovery:
# 获取邻居信息
neighbor_info = conn.execute(
"SELECT name, type FROM entities WHERE id = ?", (neighbor_id,),
"SELECT name, type FROM entities WHERE id = ?",
(neighbor_id,),
).fetchone()
if neighbor_info:
@@ -1458,7 +1494,10 @@ class EntityPathDiscovery:
"relation_type": neighbor["relation_type"],
"evidence": neighbor["evidence"],
"path": self._get_path_to_entity(
entity_id, neighbor_id, project_id, conn,
entity_id,
neighbor_id,
project_id,
conn,
),
},
)
@@ -1470,7 +1509,11 @@ class EntityPathDiscovery:
return relations
def _get_path_to_entity(
self, source_id: str, target_id: str, project_id: str, conn: sqlite3.Connection,
self,
source_id: str,
target_id: str,
project_id: str,
conn: sqlite3.Connection,
) -> list[str]:
"""获取从源实体到目标实体的路径(简化版)"""
# BFS 找路径
@@ -1565,7 +1608,8 @@ class EntityPathDiscovery:
# 获取所有实体
entities = conn.execute(
"SELECT id, name FROM entities WHERE project_id = ?", (project_id,),
"SELECT id, name FROM entities WHERE project_id = ?",
(project_id,),
).fetchall()
# 计算每个实体作为桥梁的次数
@@ -1706,7 +1750,8 @@ class KnowledgeGapDetection:
# 检查每个实体的属性完整性
entities = conn.execute(
"SELECT id, name FROM entities WHERE project_id = ?", (project_id,),
"SELECT id, name FROM entities WHERE project_id = ?",
(project_id,),
).fetchall()
for entity in entities:
@@ -1714,7 +1759,8 @@ class KnowledgeGapDetection:
# 获取实体已有的属性
existing_attrs = conn.execute(
"SELECT template_id FROM entity_attributes WHERE entity_id = ?", (entity_id,),
"SELECT template_id FROM entity_attributes WHERE entity_id = ?",
(entity_id,),
).fetchall()
existing_template_ids = {a["template_id"] for a in existing_attrs}
@@ -1726,7 +1772,8 @@ class KnowledgeGapDetection:
missing_names = []
for template_id in missing_templates:
template = conn.execute(
"SELECT name FROM attribute_templates WHERE id = ?", (template_id,),
"SELECT name FROM attribute_templates WHERE id = ?",
(template_id,),
).fetchone()
if template:
missing_names.append(template["name"])
@@ -1759,7 +1806,8 @@ class KnowledgeGapDetection:
# 获取所有实体及其关系数量
entities = conn.execute(
"SELECT id, name, type FROM entities WHERE project_id = ?", (project_id,),
"SELECT id, name, type FROM entities WHERE project_id = ?",
(project_id,),
).fetchall()
for entity in entities:
@@ -1900,7 +1948,8 @@ class KnowledgeGapDetection:
# 分析转录文本中频繁提及但未提取为实体的词
transcripts = conn.execute(
"SELECT full_text FROM transcripts WHERE project_id = ?", (project_id,),
"SELECT full_text FROM transcripts WHERE project_id = ?",
(project_id,),
).fetchall()
# 合并所有文本
@@ -1908,7 +1957,8 @@ class KnowledgeGapDetection:
# 获取现有实体名称
existing_entities = conn.execute(
"SELECT name FROM entities WHERE project_id = ?", (project_id,),
"SELECT name FROM entities WHERE project_id = ?",
(project_id,),
).fetchall()
existing_names = {e["name"].lower() for e in existing_entities}
@@ -2146,7 +2196,10 @@ class SearchManager:
for t in transcripts:
if t["full_text"] and self.semantic_search.index_embedding(
t["id"], "transcript", t["project_id"], t["full_text"],
t["id"],
"transcript",
t["project_id"],
t["full_text"],
):
semantic_stats["indexed"] += 1
else:
@@ -2179,12 +2232,14 @@ class SearchManager:
# 全文索引统计
fulltext_count = conn.execute(
f"SELECT COUNT(*) as count FROM search_indexes {where_clause}", params,
f"SELECT COUNT(*) as count FROM search_indexes {where_clause}",
params,
).fetchone()["count"]
# 语义索引统计
semantic_count = conn.execute(
f"SELECT COUNT(*) as count FROM embeddings {where_clause}", params,
f"SELECT COUNT(*) as count FROM embeddings {where_clause}",
params,
).fetchone()["count"]
# 按类型统计
@@ -2225,7 +2280,9 @@ def get_search_manager(db_path: str = "insightflow.db") -> SearchManager:
def fulltext_search(
query: str, project_id: str | None = None, limit: int = 20,
query: str,
project_id: str | None = None,
limit: int = 20,
) -> list[SearchResult]:
"""全文搜索便捷函数"""
manager = get_search_manager()
@@ -2233,7 +2290,9 @@ def fulltext_search(
def semantic_search(
query: str, project_id: str | None = None, top_k: int = 10,
query: str,
project_id: str | None = None,
top_k: int = 10,
) -> list[SemanticSearchResult]:
"""语义搜索便捷函数"""
manager = get_search_manager()

View File

@@ -464,7 +464,9 @@ class SecurityManager:
return logs
def get_audit_stats(
self, start_time: str | None = None, end_time: str | None = None,
self,
start_time: str | None = None,
end_time: str | None = None,
) -> dict[str, Any]:
"""获取审计统计"""
conn = sqlite3.connect(self.db_path)
@@ -882,7 +884,10 @@ class SecurityManager:
return success
def apply_masking(
self, text: str, project_id: str, rule_types: list[MaskingRuleType] | None = None,
self,
text: str,
project_id: str,
rule_types: list[MaskingRuleType] | None = None,
) -> str:
"""应用脱敏规则到文本"""
rules = self.get_masking_rules(project_id)
@@ -906,7 +911,9 @@ class SecurityManager:
return masked_text
def apply_masking_to_entity(
self, entity_data: dict[str, Any], project_id: str,
self,
entity_data: dict[str, Any],
project_id: str,
) -> dict[str, Any]:
"""对实体数据应用脱敏"""
masked_data = entity_data.copy()
@@ -982,7 +989,9 @@ class SecurityManager:
return policy
def get_access_policies(
self, project_id: str, active_only: bool = True,
self,
project_id: str,
active_only: bool = True,
) -> list[DataAccessPolicy]:
"""获取数据访问策略"""
conn = sqlite3.connect(self.db_path)
@@ -1021,14 +1030,18 @@ class SecurityManager:
return policies
def check_access_permission(
self, policy_id: str, user_id: str, user_ip: str | None = None,
self,
policy_id: str,
user_id: str,
user_ip: str | None = None,
) -> tuple[bool, str | None]:
"""检查访问权限"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute(
"SELECT * FROM data_access_policies WHERE id = ? AND is_active = 1", (policy_id,),
"SELECT * FROM data_access_policies WHERE id = ? AND is_active = 1",
(policy_id,),
)
row = cursor.fetchone()
conn.close()
@@ -1163,7 +1176,10 @@ class SecurityManager:
return request
def approve_access_request(
self, request_id: str, approved_by: str, expires_hours: int = 24,
self,
request_id: str,
approved_by: str,
expires_hours: int = 24,
) -> AccessRequest | None:
"""批准访问请求"""
conn = sqlite3.connect(self.db_path)

View File

@@ -588,7 +588,8 @@ class SubscriptionManager:
try:
cursor = conn.cursor()
cursor.execute(
"SELECT * FROM subscription_plans WHERE tier = ? AND is_active = 1", (tier,),
"SELECT * FROM subscription_plans WHERE tier = ? AND is_active = 1",
(tier,),
)
row = cursor.fetchone()
@@ -963,7 +964,9 @@ class SubscriptionManager:
conn.close()
def cancel_subscription(
self, subscription_id: str, at_period_end: bool = True,
self,
subscription_id: str,
at_period_end: bool = True,
) -> Subscription | None:
"""取消订阅"""
conn = self._get_connection()
@@ -1017,7 +1020,10 @@ class SubscriptionManager:
conn.close()
def change_plan(
self, subscription_id: str, new_plan_id: str, prorate: bool = True,
self,
subscription_id: str,
new_plan_id: str,
prorate: bool = True,
) -> Subscription | None:
"""更改订阅计划"""
conn = self._get_connection()
@@ -1125,7 +1131,10 @@ class SubscriptionManager:
conn.close()
def get_usage_summary(
self, tenant_id: str, start_date: datetime | None = None, end_date: datetime | None = None,
self,
tenant_id: str,
start_date: datetime | None = None,
end_date: datetime | None = None,
) -> dict[str, Any]:
"""获取用量汇总"""
conn = self._get_connection()
@@ -1268,7 +1277,9 @@ class SubscriptionManager:
conn.close()
def confirm_payment(
self, payment_id: str, provider_payment_id: str | None = None,
self,
payment_id: str,
provider_payment_id: str | None = None,
) -> Payment | None:
"""确认支付完成"""
conn = self._get_connection()
@@ -1361,7 +1372,11 @@ class SubscriptionManager:
conn.close()
def list_payments(
self, tenant_id: str, status: str | None = None, limit: int = 100, offset: int = 0,
self,
tenant_id: str,
status: str | None = None,
limit: int = 100,
offset: int = 0,
) -> list[Payment]:
"""列出支付记录"""
conn = self._get_connection()
@@ -1501,7 +1516,11 @@ class SubscriptionManager:
conn.close()
def list_invoices(
self, tenant_id: str, status: str | None = None, limit: int = 100, offset: int = 0,
self,
tenant_id: str,
status: str | None = None,
limit: int = 100,
offset: int = 0,
) -> list[Invoice]:
"""列出发票"""
conn = self._get_connection()
@@ -1581,7 +1600,12 @@ class SubscriptionManager:
# ==================== 退款管理 ====================
def request_refund(
self, tenant_id: str, payment_id: str, amount: float, reason: str, requested_by: str,
self,
tenant_id: str,
payment_id: str,
amount: float,
reason: str,
requested_by: str,
) -> Refund:
"""申请退款"""
conn = self._get_connection()
@@ -1690,7 +1714,9 @@ class SubscriptionManager:
conn.close()
def complete_refund(
self, refund_id: str, provider_refund_id: str | None = None,
self,
refund_id: str,
provider_refund_id: str | None = None,
) -> Refund | None:
"""完成退款"""
conn = self._get_connection()
@@ -1775,7 +1801,11 @@ class SubscriptionManager:
conn.close()
def list_refunds(
self, tenant_id: str, status: str | None = None, limit: int = 100, offset: int = 0,
self,
tenant_id: str,
status: str | None = None,
limit: int = 100,
offset: int = 0,
) -> list[Refund]:
"""列出退款记录"""
conn = self._get_connection()
@@ -1902,7 +1932,10 @@ class SubscriptionManager:
}
def create_alipay_order(
self, tenant_id: str, plan_id: str, billing_cycle: str = "monthly",
self,
tenant_id: str,
plan_id: str,
billing_cycle: str = "monthly",
) -> dict[str, Any]:
"""创建支付宝订单(占位实现)"""
# 这里应该集成支付宝 SDK
@@ -1919,7 +1952,10 @@ class SubscriptionManager:
}
def create_wechat_order(
self, tenant_id: str, plan_id: str, billing_cycle: str = "monthly",
self,
tenant_id: str,
plan_id: str,
billing_cycle: str = "monthly",
) -> dict[str, Any]:
"""创建微信支付订单(占位实现)"""
# 这里应该集成微信支付 SDK

View File

@@ -433,7 +433,8 @@ class TenantManager:
TenantTier(tier) if tier in [t.value for t in TenantTier] else TenantTier.FREE
)
resource_limits = self.DEFAULT_LIMITS.get(
tier_enum, self.DEFAULT_LIMITS[TenantTier.FREE],
tier_enum,
self.DEFAULT_LIMITS[TenantTier.FREE],
)
tenant = Tenant(
@@ -612,7 +613,11 @@ class TenantManager:
conn.close()
def list_tenants(
self, status: str | None = None, tier: str | None = None, limit: int = 100, offset: int = 0,
self,
status: str | None = None,
tier: str | None = None,
limit: int = 100,
offset: int = 0,
) -> list[Tenant]:
"""列出租户"""
conn = self._get_connection()
@@ -1103,7 +1108,11 @@ class TenantManager:
conn.close()
def update_member_role(
self, tenant_id: str, member_id: str, role: str, permissions: list[str] | None = None,
self,
tenant_id: str,
member_id: str,
role: str,
permissions: list[str] | None = None,
) -> bool:
"""更新成员角色"""
conn = self._get_connection()
@@ -1268,7 +1277,10 @@ class TenantManager:
conn.close()
def get_usage_stats(
self, tenant_id: str, start_date: datetime | None = None, end_date: datetime | None = None,
self,
tenant_id: str,
start_date: datetime | None = None,
end_date: datetime | None = None,
) -> dict[str, Any]:
"""获取使用统计"""
conn = self._get_connection()
@@ -1314,23 +1326,28 @@ class TenantManager:
"limits": limits,
"usage_percentages": {
"storage": self._calc_percentage(
row["total_storage"] or 0, limits.get("max_storage_mb", 0) * 1024 * 1024,
row["total_storage"] or 0,
limits.get("max_storage_mb", 0) * 1024 * 1024,
),
"transcription": self._calc_percentage(
row["total_transcription"] or 0,
limits.get("max_transcription_minutes", 0) * 60,
),
"api_calls": self._calc_percentage(
row["total_api_calls"] or 0, limits.get("max_api_calls_per_day", 0),
row["total_api_calls"] or 0,
limits.get("max_api_calls_per_day", 0),
),
"projects": self._calc_percentage(
row["max_projects"] or 0, limits.get("max_projects", 0),
row["max_projects"] or 0,
limits.get("max_projects", 0),
),
"entities": self._calc_percentage(
row["max_entities"] or 0, limits.get("max_entities", 0),
row["max_entities"] or 0,
limits.get("max_entities", 0),
),
"members": self._calc_percentage(
row["max_members"] or 0, limits.get("max_team_members", 0),
row["max_members"] or 0,
limits.get("max_team_members", 0),
),
},
}
@@ -1406,8 +1423,10 @@ class TenantManager:
def _validate_domain(self, domain: str) -> bool:
"""验证域名格式"""
pattern = (r"^(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]{0, 61}[a-zA-Z0-9])?\.)*"
r"[a-zA-Z0-9](?:[a-zA-Z0-9-]{0, 61}[a-zA-Z0-9])$")
pattern = (
r"^(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]{0, 61}[a-zA-Z0-9])?\.)*"
r"[a-zA-Z0-9](?:[a-zA-Z0-9-]{0, 61}[a-zA-Z0-9])$"
)
return bool(re.match(pattern, domain))
def _check_domain_verification(self, domain: str, token: str, method: str) -> bool:

View File

@@ -159,7 +159,8 @@ def test_cache_manager() -> None:
# 批量操作
cache.set_many(
{"batch_key_1": "value1", "batch_key_2": "value2", "batch_key_3": "value3"}, ttl=60,
{"batch_key_1": "value1", "batch_key_2": "value2", "batch_key_3": "value3"},
ttl=60,
)
print(" ✓ 批量设置缓存")
@@ -208,7 +209,8 @@ def test_task_queue() -> None:
# 提交任务
task_id = queue.submit(
task_type="test_task", payload={"test": "data", "timestamp": time.time()},
task_type="test_task",
payload={"test": "data", "timestamp": time.time()},
)
print(" ✓ 提交任务: {task_id}")

View File

@@ -29,7 +29,10 @@ def test_tenant_management() -> None:
# 1. 创建租户
print("\n1.1 创建租户...")
tenant = manager.create_tenant(
name="Test Company", owner_id="user_001", tier="pro", description="A test company tenant",
name="Test Company",
owner_id="user_001",
tier="pro",
description="A test company tenant",
)
print(f"✅ 租户创建成功: {tenant.id}")
print(f" - 名称: {tenant.name}")
@@ -53,7 +56,9 @@ def test_tenant_management() -> None:
# 4. 更新租户
print("\n1.4 更新租户信息...")
updated = manager.update_tenant(
tenant_id=tenant.id, name="Test Company Updated", tier="enterprise",
tenant_id=tenant.id,
name="Test Company Updated",
tier="enterprise",
)
assert updated is not None, "更新租户失败"
print(f"✅ 租户更新成功: {updated.name}, 层级: {updated.tier}")
@@ -163,7 +168,10 @@ def test_member_management(tenant_id: str) -> None:
# 1. 邀请成员
print("\n4.1 邀请成员...")
member1 = manager.invite_member(
tenant_id=tenant_id, email="admin@test.com", role="admin", invited_by="user_001",
tenant_id=tenant_id,
email="admin@test.com",
role="admin",
invited_by="user_001",
)
print(f"✅ 成员邀请成功: {member1.email}")
print(f" - ID: {member1.id}")
@@ -171,7 +179,10 @@ def test_member_management(tenant_id: str) -> None:
print(f" - 权限: {member1.permissions}")
member2 = manager.invite_member(
tenant_id=tenant_id, email="member@test.com", role="member", invited_by="user_001",
tenant_id=tenant_id,
email="member@test.com",
role="member",
invited_by="user_001",
)
print(f"✅ 成员邀请成功: {member2.email}")

View File

@@ -205,7 +205,8 @@ def test_subscription_manager() -> None:
# 更改计划
changed = manager.change_plan(
subscription_id=subscription.id, new_plan_id=enterprise_plan.id,
subscription_id=subscription.id,
new_plan_id=enterprise_plan.id,
)
print(f"✓ 更改计划: {changed.plan_id} (Enterprise)")

View File

@@ -181,14 +181,16 @@ async def test_predictions(trend_model_id: str, anomaly_model_id: str) -> None:
# 2. 趋势预测
print("2. 趋势预测...")
trend_result = await manager.predict(
trend_model_id, {"historical_values": [10, 12, 15, 14, 18, 20, 22]},
trend_model_id,
{"historical_values": [10, 12, 15, 14, 18, 20, 22]},
)
print(f" 预测结果: {trend_result.prediction_data}")
# 3. 异常检测
print("3. 异常检测...")
anomaly_result = await manager.predict(
anomaly_model_id, {"value": 50, "historical_values": [10, 12, 11, 13, 12, 14, 13]},
anomaly_model_id,
{"value": 50, "historical_values": [10, 12, 11, 13, 12, 14, 13]},
)
print(f" 检测结果: {anomaly_result.prediction_data}")

View File

@@ -525,7 +525,8 @@ class TestGrowthManager:
try:
referral = self.manager.generate_referral_code(
program_id=program_id, referrer_id="referrer_user_001",
program_id=program_id,
referrer_id="referrer_user_001",
)
if referral:
@@ -551,7 +552,8 @@ class TestGrowthManager:
try:
success = self.manager.apply_referral_code(
referral_code=referral_code, referee_id="new_user_001",
referral_code=referral_code,
referee_id="new_user_001",
)
if success:
@@ -618,7 +620,9 @@ class TestGrowthManager:
try:
incentives = self.manager.check_team_incentive_eligibility(
tenant_id=self.test_tenant_id, current_tier="free", team_size=5,
tenant_id=self.test_tenant_id,
current_tier="free",
team_size=5,
)
self.log(f"找到 {len(incentives)} 个符合条件的激励")

View File

@@ -162,7 +162,7 @@ class TestDeveloperEcosystem:
self.log(f"Created SDK: {sdk_js.name} ({sdk_js.id})")
except Exception as e:
self.log(f"Failed to create SDK: {str(e)}", success=False)
self.log(f"Failed to create SDK: {e!s}", success=False)
def test_sdk_list(self) -> None:
"""测试列出 SDK"""
@@ -179,7 +179,7 @@ class TestDeveloperEcosystem:
self.log(f"Search found {len(search_results)} SDKs")
except Exception as e:
self.log(f"Failed to list SDKs: {str(e)}", success=False)
self.log(f"Failed to list SDKs: {e!s}", success=False)
def test_sdk_get(self) -> None:
"""测试获取 SDK 详情"""
@@ -191,19 +191,20 @@ class TestDeveloperEcosystem:
else:
self.log("SDK not found", success=False)
except Exception as e:
self.log(f"Failed to get SDK: {str(e)}", success=False)
self.log(f"Failed to get SDK: {e!s}", success=False)
def test_sdk_update(self) -> None:
"""测试更新 SDK"""
try:
if self.created_ids["sdk"]:
sdk = self.manager.update_sdk_release(
self.created_ids["sdk"][0], description="Updated description",
self.created_ids["sdk"][0],
description="Updated description",
)
if sdk:
self.log(f"Updated SDK: {sdk.name}")
except Exception as e:
self.log(f"Failed to update SDK: {str(e)}", success=False)
self.log(f"Failed to update SDK: {e!s}", success=False)
def test_sdk_publish(self) -> None:
"""测试发布 SDK"""
@@ -213,7 +214,7 @@ class TestDeveloperEcosystem:
if sdk:
self.log(f"Published SDK: {sdk.name} (status: {sdk.status.value})")
except Exception as e:
self.log(f"Failed to publish SDK: {str(e)}", success=False)
self.log(f"Failed to publish SDK: {e!s}", success=False)
def test_sdk_version_add(self) -> None:
"""测试添加 SDK 版本"""
@@ -230,7 +231,7 @@ class TestDeveloperEcosystem:
)
self.log(f"Added SDK version: {version.version}")
except Exception as e:
self.log(f"Failed to add SDK version: {str(e)}", success=False)
self.log(f"Failed to add SDK version: {e!s}", success=False)
def test_template_create(self) -> None:
"""测试创建模板"""
@@ -273,7 +274,7 @@ class TestDeveloperEcosystem:
self.log(f"Created free template: {template_free.name}")
except Exception as e:
self.log(f"Failed to create template: {str(e)}", success=False)
self.log(f"Failed to create template: {e!s}", success=False)
def test_template_list(self) -> None:
"""测试列出模板"""
@@ -290,7 +291,7 @@ class TestDeveloperEcosystem:
self.log(f"Found {len(free_templates)} free templates")
except Exception as e:
self.log(f"Failed to list templates: {str(e)}", success=False)
self.log(f"Failed to list templates: {e!s}", success=False)
def test_template_get(self) -> None:
"""测试获取模板详情"""
@@ -300,19 +301,20 @@ class TestDeveloperEcosystem:
if template:
self.log(f"Retrieved template: {template.name}")
except Exception as e:
self.log(f"Failed to get template: {str(e)}", success=False)
self.log(f"Failed to get template: {e!s}", success=False)
def test_template_approve(self) -> None:
"""测试审核通过模板"""
try:
if self.created_ids["template"]:
template = self.manager.approve_template(
self.created_ids["template"][0], reviewed_by="admin_001",
self.created_ids["template"][0],
reviewed_by="admin_001",
)
if template:
self.log(f"Approved template: {template.name}")
except Exception as e:
self.log(f"Failed to approve template: {str(e)}", success=False)
self.log(f"Failed to approve template: {e!s}", success=False)
def test_template_publish(self) -> None:
"""测试发布模板"""
@@ -322,7 +324,7 @@ class TestDeveloperEcosystem:
if template:
self.log(f"Published template: {template.name}")
except Exception as e:
self.log(f"Failed to publish template: {str(e)}", success=False)
self.log(f"Failed to publish template: {e!s}", success=False)
def test_template_review(self) -> None:
"""测试添加模板评价"""
@@ -338,7 +340,7 @@ class TestDeveloperEcosystem:
)
self.log(f"Added template review: {review.rating} stars")
except Exception as e:
self.log(f"Failed to add template review: {str(e)}", success=False)
self.log(f"Failed to add template review: {e!s}", success=False)
def test_plugin_create(self) -> None:
"""测试创建插件"""
@@ -384,7 +386,7 @@ class TestDeveloperEcosystem:
self.log(f"Created free plugin: {plugin_free.name}")
except Exception as e:
self.log(f"Failed to create plugin: {str(e)}", success=False)
self.log(f"Failed to create plugin: {e!s}", success=False)
def test_plugin_list(self) -> None:
"""测试列出插件"""
@@ -397,7 +399,7 @@ class TestDeveloperEcosystem:
self.log(f"Found {len(integration_plugins)} integration plugins")
except Exception as e:
self.log(f"Failed to list plugins: {str(e)}", success=False)
self.log(f"Failed to list plugins: {e!s}", success=False)
def test_plugin_get(self) -> None:
"""测试获取插件详情"""
@@ -407,7 +409,7 @@ class TestDeveloperEcosystem:
if plugin:
self.log(f"Retrieved plugin: {plugin.name}")
except Exception as e:
self.log(f"Failed to get plugin: {str(e)}", success=False)
self.log(f"Failed to get plugin: {e!s}", success=False)
def test_plugin_review(self) -> None:
"""测试审核插件"""
@@ -422,7 +424,7 @@ class TestDeveloperEcosystem:
if plugin:
self.log(f"Reviewed plugin: {plugin.name} ({plugin.status.value})")
except Exception as e:
self.log(f"Failed to review plugin: {str(e)}", success=False)
self.log(f"Failed to review plugin: {e!s}", success=False)
def test_plugin_publish(self) -> None:
"""测试发布插件"""
@@ -432,7 +434,7 @@ class TestDeveloperEcosystem:
if plugin:
self.log(f"Published plugin: {plugin.name}")
except Exception as e:
self.log(f"Failed to publish plugin: {str(e)}", success=False)
self.log(f"Failed to publish plugin: {e!s}", success=False)
def test_plugin_review_add(self) -> None:
"""测试添加插件评价"""
@@ -448,7 +450,7 @@ class TestDeveloperEcosystem:
)
self.log(f"Added plugin review: {review.rating} stars")
except Exception as e:
self.log(f"Failed to add plugin review: {str(e)}", success=False)
self.log(f"Failed to add plugin review: {e!s}", success=False)
def test_developer_profile_create(self) -> None:
"""测试创建开发者档案"""
@@ -479,7 +481,7 @@ class TestDeveloperEcosystem:
self.log(f"Created developer profile: {profile2.display_name}")
except Exception as e:
self.log(f"Failed to create developer profile: {str(e)}", success=False)
self.log(f"Failed to create developer profile: {e!s}", success=False)
def test_developer_profile_get(self) -> None:
"""测试获取开发者档案"""
@@ -489,19 +491,20 @@ class TestDeveloperEcosystem:
if profile:
self.log(f"Retrieved developer profile: {profile.display_name}")
except Exception as e:
self.log(f"Failed to get developer profile: {str(e)}", success=False)
self.log(f"Failed to get developer profile: {e!s}", success=False)
def test_developer_verify(self) -> None:
"""测试验证开发者"""
try:
if self.created_ids["developer"]:
profile = self.manager.verify_developer(
self.created_ids["developer"][0], DeveloperStatus.VERIFIED,
self.created_ids["developer"][0],
DeveloperStatus.VERIFIED,
)
if profile:
self.log(f"Verified developer: {profile.display_name} ({profile.status.value})")
except Exception as e:
self.log(f"Failed to verify developer: {str(e)}", success=False)
self.log(f"Failed to verify developer: {e!s}", success=False)
def test_developer_stats_update(self) -> None:
"""测试更新开发者统计"""
@@ -513,7 +516,7 @@ class TestDeveloperEcosystem:
f"Updated developer stats: {profile.plugin_count} plugins, {profile.template_count} templates",
)
except Exception as e:
self.log(f"Failed to update developer stats: {str(e)}", success=False)
self.log(f"Failed to update developer stats: {e!s}", success=False)
def test_code_example_create(self) -> None:
"""测试创建代码示例"""
@@ -562,7 +565,7 @@ console.log('Upload complete:', result.id);
self.log(f"Created code example: {example_js.title}")
except Exception as e:
self.log(f"Failed to create code example: {str(e)}", success=False)
self.log(f"Failed to create code example: {e!s}", success=False)
def test_code_example_list(self) -> None:
"""测试列出代码示例"""
@@ -575,7 +578,7 @@ console.log('Upload complete:', result.id);
self.log(f"Found {len(python_examples)} Python examples")
except Exception as e:
self.log(f"Failed to list code examples: {str(e)}", success=False)
self.log(f"Failed to list code examples: {e!s}", success=False)
def test_code_example_get(self) -> None:
"""测试获取代码示例详情"""
@@ -587,7 +590,7 @@ console.log('Upload complete:', result.id);
f"Retrieved code example: {example.title} (views: {example.view_count})",
)
except Exception as e:
self.log(f"Failed to get code example: {str(e)}", success=False)
self.log(f"Failed to get code example: {e!s}", success=False)
def test_portal_config_create(self) -> None:
"""测试创建开发者门户配置"""
@@ -608,7 +611,7 @@ console.log('Upload complete:', result.id);
self.log(f"Created portal config: {config.name}")
except Exception as e:
self.log(f"Failed to create portal config: {str(e)}", success=False)
self.log(f"Failed to create portal config: {e!s}", success=False)
def test_portal_config_get(self) -> None:
"""测试获取开发者门户配置"""
@@ -624,7 +627,7 @@ console.log('Upload complete:', result.id);
self.log(f"Active portal config: {active_config.name}")
except Exception as e:
self.log(f"Failed to get portal config: {str(e)}", success=False)
self.log(f"Failed to get portal config: {e!s}", success=False)
def test_revenue_record(self) -> None:
"""测试记录开发者收益"""
@@ -644,7 +647,7 @@ console.log('Upload complete:', result.id);
self.log(f" - Platform fee: {revenue.platform_fee}")
self.log(f" - Developer earnings: {revenue.developer_earnings}")
except Exception as e:
self.log(f"Failed to record revenue: {str(e)}", success=False)
self.log(f"Failed to record revenue: {e!s}", success=False)
def test_revenue_summary(self) -> None:
"""测试获取开发者收益汇总"""
@@ -659,7 +662,7 @@ console.log('Upload complete:', result.id);
self.log(f" - Total earnings: {summary['total_earnings']}")
self.log(f" - Transaction count: {summary['transaction_count']}")
except Exception as e:
self.log(f"Failed to get revenue summary: {str(e)}", success=False)
self.log(f"Failed to get revenue summary: {e!s}", success=False)
def print_summary(self) -> None:
"""打印测试摘要"""

View File

@@ -129,7 +129,9 @@ class TestOpsManager:
# 更新告警规则
updated_rule = self.manager.update_alert_rule(
rule1.id, threshold=85.0, description="更新后的描述",
rule1.id,
threshold=85.0,
description="更新后的描述",
)
assert updated_rule.threshold == 85.0
self.log(f"Updated alert rule threshold to {updated_rule.threshold}")
@@ -421,7 +423,9 @@ class TestOpsManager:
# 模拟扩缩容评估
event = self.manager.evaluate_scaling_policy(
policy_id=policy.id, current_instances=3, current_utilization=0.85,
policy_id=policy.id,
current_instances=3,
current_utilization=0.85,
)
if event:
@@ -439,7 +443,8 @@ class TestOpsManager:
with self.manager._get_db() as conn:
conn.execute("DELETE FROM scaling_events WHERE tenant_id = ?", (self.tenant_id,))
conn.execute(
"DELETE FROM auto_scaling_policies WHERE tenant_id = ?", (self.tenant_id,),
"DELETE FROM auto_scaling_policies WHERE tenant_id = ?",
(self.tenant_id,),
)
conn.commit()
self.log("Cleaned up auto scaling test data")
@@ -530,7 +535,8 @@ class TestOpsManager:
# 发起故障转移
event = self.manager.initiate_failover(
config_id=config.id, reason="Primary region health check failed",
config_id=config.id,
reason="Primary region health check failed",
)
if event:
@@ -638,7 +644,9 @@ class TestOpsManager:
# 生成成本报告
now = datetime.now()
report = self.manager.generate_cost_report(
tenant_id=self.tenant_id, year=now.year, month=now.month,
tenant_id=self.tenant_id,
year=now.year,
month=now.month,
)
self.log(f"Generated cost report: {report.id}")
@@ -691,7 +699,8 @@ class TestOpsManager:
)
conn.execute("DELETE FROM idle_resources WHERE tenant_id = ?", (self.tenant_id,))
conn.execute(
"DELETE FROM resource_utilizations WHERE tenant_id = ?", (self.tenant_id,),
"DELETE FROM resource_utilizations WHERE tenant_id = ?",
(self.tenant_id,),
)
conn.execute("DELETE FROM cost_reports WHERE tenant_id = ?", (self.tenant_id,))
conn.commit()

View File

@@ -19,7 +19,11 @@ class TingwuClient:
raise ValueError("ALI_ACCESS_KEY and ALI_SECRET_KEY required")
def _sign_request(
self, method: str, uri: str, query: str = "", body: str = "",
self,
method: str,
uri: str,
query: str = "",
body: str = "",
) -> dict[str, str]:
"""阿里云签名 V3"""
timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
@@ -43,7 +47,8 @@ class TingwuClient:
from alibabacloud_tingwu20230930.client import Client as TingwuSDKClient
config = open_api_models.Config(
access_key_id=self.access_key, access_key_secret=self.secret_key,
access_key_id=self.access_key,
access_key_secret=self.secret_key,
)
config.endpoint = "tingwu.cn-beijing.aliyuncs.com"
client = TingwuSDKClient(config)
@@ -53,7 +58,8 @@ class TingwuClient:
input=tingwu_models.Input(source="OSS", file_url=audio_url),
parameters=tingwu_models.Parameters(
transcription=tingwu_models.Transcription(
diarization_enabled=True, sentence_max_length=20,
diarization_enabled=True,
sentence_max_length=20,
),
),
)
@@ -73,7 +79,10 @@ class TingwuClient:
return f"mock_task_{int(time.time())}"
def get_task_result(
self, task_id: str, max_retries: int = 60, interval: int = 5,
self,
task_id: str,
max_retries: int = 60,
interval: int = 5,
) -> dict[str, Any]:
"""获取任务结果"""
try:
@@ -83,7 +92,8 @@ class TingwuClient:
from alibabacloud_tingwu20230930.client import Client as TingwuSDKClient
config = open_api_models.Config(
access_key_id=self.access_key, access_key_secret=self.secret_key,
access_key_id=self.access_key,
access_key_secret=self.secret_key,
)
config.endpoint = "tingwu.cn-beijing.aliyuncs.com"
client = TingwuSDKClient(config)

View File

@@ -264,7 +264,9 @@ class WebhookNotifier:
secret_enc = config.secret.encode("utf-8")
string_to_sign = f"{timestamp}\n{config.secret}"
hmac_code = hmac.new(
secret_enc, string_to_sign.encode("utf-8"), digestmod=hashlib.sha256,
secret_enc,
string_to_sign.encode("utf-8"),
digestmod=hashlib.sha256,
).digest()
sign = urllib.parse.quote_plus(base64.b64encode(hmac_code))
url = f"{config.url}&timestamp = {timestamp}&sign = {sign}"
@@ -497,7 +499,10 @@ class WorkflowManager:
conn.close()
def list_workflows(
self, project_id: str = None, status: str = None, workflow_type: str = None,
self,
project_id: str | None = None,
status: str = None,
workflow_type: str = None,
) -> list[Workflow]:
"""列出工作流"""
conn = self.db.get_conn()
@@ -518,7 +523,8 @@ class WorkflowManager:
where_clause = " AND ".join(conditions) if conditions else "1 = 1"
rows = conn.execute(
f"SELECT * FROM workflows WHERE {where_clause} ORDER BY created_at DESC", params,
f"SELECT * FROM workflows WHERE {where_clause} ORDER BY created_at DESC",
params,
).fetchall()
return [self._row_to_workflow(row) for row in rows]
@@ -780,7 +786,8 @@ class WorkflowManager:
conn = self.db.get_conn()
try:
row = conn.execute(
"SELECT * FROM webhook_configs WHERE id = ?", (webhook_id,),
"SELECT * FROM webhook_configs WHERE id = ?",
(webhook_id,),
).fetchone()
if not row:
@@ -962,9 +969,9 @@ class WorkflowManager:
def list_logs(
self,
workflow_id: str = None,
task_id: str = None,
status: str = None,
workflow_id: str | None = None,
task_id: str | None = None,
status: str | None = None,
limit: int = 100,
offset: int = 0,
) -> list[WorkflowLog]:
@@ -1074,7 +1081,7 @@ class WorkflowManager:
# ==================== Workflow Execution ====================
async def execute_workflow(self, workflow_id: str, input_data: dict = None) -> dict:
async def execute_workflow(self, workflow_id: str, input_data: dict | None = None) -> dict:
"""执行工作流"""
workflow = self.get_workflow(workflow_id)
if not workflow:
@@ -1159,7 +1166,10 @@ class WorkflowManager:
raise
async def _execute_tasks_with_deps(
self, tasks: list[WorkflowTask], input_data: dict, log_id: str,
self,
tasks: list[WorkflowTask],
input_data: dict,
log_id: str,
) -> dict:
"""按依赖顺序执行任务"""
results = {}
@@ -1413,7 +1423,10 @@ class WorkflowManager:
# ==================== Notification ====================
async def _send_workflow_notification(
self, workflow: Workflow, results: dict, success: bool = True,
self,
workflow: Workflow,
results: dict,
success: bool = True,
) -> None:
"""发送工作流执行通知"""
if not workflow.webhook_ids: