diff --git a/backend/developer_ecosystem_manager.py b/backend/developer_ecosystem_manager.py
index 8a0bdc5..2b170e6 100644
--- a/backend/developer_ecosystem_manager.py
+++ b/backend/developer_ecosystem_manager.py
@@ -661,8 +661,10 @@ class DeveloperEcosystemManager:
(id, name, description, category, subcategory, tags, author_id, author_name,
status, price, currency, preview_image_url, demo_url, documentation_url,
download_url, install_count, rating, rating_count, review_count, version,
- min_platform_version, file_size, checksum, created_at, updated_at, published_at)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ min_platform_version, file_size, checksum, created_at, updated_at,
+ published_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
+ ?, ?)
""",
(
template.id,
diff --git a/backend/document_processor.py b/backend/document_processor.py
index 61d396b..88c29a5 100644
--- a/backend/document_processor.py
+++ b/backend/document_processor.py
@@ -75,7 +75,8 @@ class DocumentProcessor:
return "\n\n".join(text_parts)
except ImportError:
raise ImportError(
- "PDF processing requires PyPDF2 or pdfplumber. Install with: pip install PyPDF2",
+ "PDF processing requires PyPDF2 or pdfplumber. "
+ "Install with: pip install PyPDF2",
)
except Exception as e:
raise ValueError(f"PDF extraction failed: {e!s}")
diff --git a/backend/enterprise_manager.py b/backend/enterprise_manager.py
index 938c388..cafdd1e 100644
--- a/backend/enterprise_manager.py
+++ b/backend/enterprise_manager.py
@@ -407,7 +407,8 @@ class EnterpriseManager:
processed INTEGER DEFAULT 0,
processed_at TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- FOREIGN KEY (request_id) REFERENCES saml_auth_requests(request_id) ON DELETE CASCADE,
+ FOREIGN KEY (request_id) REFERENCES saml_auth_requests(request_id)
+ ON DELETE CASCADE,
FOREIGN KEY (tenant_id) REFERENCES tenants(id) ON DELETE CASCADE
)
""")
@@ -522,7 +523,8 @@ class EnterpriseManager:
error_count INTEGER DEFAULT 0,
details TEXT DEFAULT '{}',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- FOREIGN KEY (policy_id) REFERENCES data_retention_policies(id) ON DELETE CASCADE,
+ FOREIGN KEY (policy_id) REFERENCES data_retention_policies(id)
+ ON DELETE CASCADE,
FOREIGN KEY (tenant_id) REFERENCES tenants(id) ON DELETE CASCADE
)
""")
@@ -531,40 +533,51 @@ class EnterpriseManager:
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sso_tenant ON sso_configs(tenant_id)")
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sso_provider ON sso_configs(provider)")
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_saml_requests_config ON saml_auth_requests(sso_config_id)",
+ "CREATE INDEX IF NOT EXISTS idx_saml_requests_config "
+ "ON saml_auth_requests(sso_config_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_saml_requests_expires ON saml_auth_requests(expires_at)",
+ "CREATE INDEX IF NOT EXISTS idx_saml_requests_expires "
+ "ON saml_auth_requests(expires_at)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_saml_responses_request ON saml_auth_responses(request_id)",
+ "CREATE INDEX IF NOT EXISTS idx_saml_responses_request "
+ "ON saml_auth_responses(request_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_scim_config_tenant ON scim_configs(tenant_id)",
+ "CREATE INDEX IF NOT EXISTS idx_scim_config_tenant "
+ "ON scim_configs(tenant_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_scim_users_tenant ON scim_users(tenant_id)",
+ "CREATE INDEX IF NOT EXISTS idx_scim_users_tenant "
+ "ON scim_users(tenant_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_scim_users_external ON scim_users(external_id)",
+ "CREATE INDEX IF NOT EXISTS idx_scim_users_external "
+ "ON scim_users(external_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_audit_export_tenant ON audit_log_exports(tenant_id)",
+ "CREATE INDEX IF NOT EXISTS idx_audit_export_tenant "
+ "ON audit_log_exports(tenant_id)",
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_audit_export_status ON audit_log_exports(status)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_retention_tenant ON data_retention_policies(tenant_id)",
+ "CREATE INDEX IF NOT EXISTS idx_retention_tenant "
+ "ON data_retention_policies(tenant_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_retention_type ON data_retention_policies(resource_type)",
+ "CREATE INDEX IF NOT EXISTS idx_retention_type "
+ "ON data_retention_policies(resource_type)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_retention_jobs_policy ON data_retention_jobs(policy_id)",
+ "CREATE INDEX IF NOT EXISTS idx_retention_jobs_policy "
+ "ON data_retention_jobs(policy_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_retention_jobs_status ON data_retention_jobs(status)",
+ "CREATE INDEX IF NOT EXISTS idx_retention_jobs_status "
+ "ON data_retention_jobs(status)",
)
conn.commit()
@@ -901,7 +914,8 @@ class EnterpriseManager:
cursor.execute(
"""
INSERT INTO saml_auth_requests
- (id, tenant_id, sso_config_id, request_id, relay_state, created_at, expires_at, used)
+ (id, tenant_id, sso_config_id, request_id, relay_state,
+ created_at, expires_at, used)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
diff --git a/backend/export_manager.py b/backend/export_manager.py
index c91b3e9..f57683e 100644
--- a/backend/export_manager.py
+++ b/backend/export_manager.py
@@ -153,20 +153,21 @@ class ExportManager:
y2 = y2 - dy * offset / dist
svg_parts.append(
- f'',
+ f'',
)
# 关系标签
mid_x = (x1 + x2) / 2
mid_y = (y1 + y2) / 2
svg_parts.append(
- f'',
+ f'',
)
svg_parts.append(
- f'{rel.relation_type}',
+ f'{rel.relation_type}',
)
# 绘制实体节点
@@ -177,19 +178,21 @@ class ExportManager:
# 节点圆圈
svg_parts.append(
- f'',
+ f'',
)
# 实体名称
svg_parts.append(
- f'{entity.name[:8]}',
+ f''
+ f'{entity.name[:8]}',
)
# 实体类型
svg_parts.append(
- f'{entity.type}',
+ f'{entity.type}',
)
# 图例
diff --git a/backend/growth_manager.py b/backend/growth_manager.py
index c94ce8e..c667b9b 100644
--- a/backend/growth_manager.py
+++ b/backend/growth_manager.py
@@ -1405,7 +1405,8 @@ class GrowthManager:
conn.execute(
"""
INSERT INTO email_logs
- (id, campaign_id, tenant_id, user_id, email, template_id, status, subject, created_at)
+ (id, campaign_id, tenant_id, user_id, email, template_id,
+ status, subject, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -1647,7 +1648,8 @@ class GrowthManager:
# 更新执行计数
conn.execute(
- "UPDATE automation_workflows SET execution_count = execution_count + 1 WHERE id = ?",
+ """UPDATE automation_workflows
+ SET execution_count = execution_count + 1 WHERE id = ?""",
(workflow_id,),
)
conn.commit()
diff --git a/backend/llm_client.py b/backend/llm_client.py
index 2603c7c..6d6a6d5 100644
--- a/backend/llm_client.py
+++ b/backend/llm_client.py
@@ -12,8 +12,6 @@ from dataclasses import dataclass
import httpx
-# re is already imported above
-
KIMI_API_KEY = os.getenv("KIMI_API_KEY", "")
KIMI_BASE_URL = os.getenv("KIMI_BASE_URL", "https://api.kimi.com/coding")
diff --git a/backend/localization_manager.py b/backend/localization_manager.py
index 8d8679b..f7c4dbe 100644
--- a/backend/localization_manager.py
+++ b/backend/localization_manager.py
@@ -756,10 +756,11 @@ class LocalizationManager:
cursor.execute("""
CREATE TABLE IF NOT EXISTS language_configs (
code TEXT PRIMARY KEY, name TEXT NOT NULL, name_local TEXT NOT NULL,
- is_rtl INTEGER DEFAULT 0, is_active INTEGER DEFAULT 1, is_default INTEGER DEFAULT 0,
- fallback_language TEXT, date_format TEXT, time_format TEXT, datetime_format TEXT,
- number_format TEXT, currency_format TEXT, first_day_of_week INTEGER DEFAULT 1,
- calendar_type TEXT DEFAULT 'gregorian'
+ is_rtl INTEGER DEFAULT 0, is_active INTEGER DEFAULT 1,
+ is_default INTEGER DEFAULT 0, fallback_language TEXT,
+ date_format TEXT, time_format TEXT, datetime_format TEXT,
+ number_format TEXT, currency_format TEXT,
+ first_day_of_week INTEGER DEFAULT 1, calendar_type TEXT DEFAULT 'gregorian'
)
""")
cursor.execute("""
@@ -773,8 +774,10 @@ class LocalizationManager:
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS tenant_data_center_mappings (
- id TEXT PRIMARY KEY, tenant_id TEXT NOT NULL UNIQUE, primary_dc_id TEXT NOT NULL,
- secondary_dc_id TEXT, region_code TEXT NOT NULL, data_residency TEXT DEFAULT 'regional',
+ id TEXT PRIMARY KEY, tenant_id TEXT NOT NULL UNIQUE,
+ primary_dc_id TEXT NOT NULL, secondary_dc_id TEXT,
+ region_code TEXT NOT NULL,
+ data_residency TEXT DEFAULT 'regional',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (tenant_id) REFERENCES tenants(id) ON DELETE CASCADE,
@@ -788,43 +791,52 @@ class LocalizationManager:
name_local TEXT DEFAULT '{}', supported_countries TEXT DEFAULT '[]',
supported_currencies TEXT DEFAULT '[]', is_active INTEGER DEFAULT 1,
config TEXT DEFAULT '{}', icon_url TEXT, display_order INTEGER DEFAULT 0,
- min_amount REAL, max_amount REAL, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ min_amount REAL, max_amount REAL,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS country_configs (
code TEXT PRIMARY KEY, code3 TEXT NOT NULL, name TEXT NOT NULL,
- name_local TEXT DEFAULT '{}', region TEXT NOT NULL, default_language TEXT NOT NULL,
- supported_languages TEXT DEFAULT '[]', default_currency TEXT NOT NULL,
- supported_currencies TEXT DEFAULT '[]', timezone TEXT NOT NULL,
- calendar_type TEXT DEFAULT 'gregorian', date_format TEXT, time_format TEXT,
- number_format TEXT, address_format TEXT, phone_format TEXT, vat_rate REAL,
+ name_local TEXT DEFAULT '{}', region TEXT NOT NULL,
+ default_language TEXT NOT NULL, supported_languages TEXT DEFAULT '[]',
+ default_currency TEXT NOT NULL, supported_currencies TEXT DEFAULT '[]',
+ timezone TEXT NOT NULL, calendar_type TEXT DEFAULT 'gregorian',
+ date_format TEXT, time_format TEXT, number_format TEXT,
+ address_format TEXT, phone_format TEXT, vat_rate REAL,
is_active INTEGER DEFAULT 1
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS timezone_configs (
- id TEXT PRIMARY KEY, timezone TEXT NOT NULL UNIQUE, utc_offset TEXT NOT NULL, dst_offset TEXT,
- country_code TEXT NOT NULL, region TEXT NOT NULL, is_active INTEGER DEFAULT 1
+ id TEXT PRIMARY KEY, timezone TEXT NOT NULL UNIQUE,
+ utc_offset TEXT NOT NULL, dst_offset TEXT, country_code TEXT NOT NULL,
+ region TEXT NOT NULL, is_active INTEGER DEFAULT 1
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS currency_configs (
- code TEXT PRIMARY KEY, name TEXT NOT NULL, name_local TEXT DEFAULT '{}', symbol TEXT NOT NULL,
+ code TEXT PRIMARY KEY, name TEXT NOT NULL,
+ name_local TEXT DEFAULT '{}', symbol TEXT NOT NULL,
decimal_places INTEGER DEFAULT 2, decimal_separator TEXT DEFAULT '.',
thousands_separator TEXT DEFAULT ', ', is_active INTEGER DEFAULT 1
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS localization_settings (
- id TEXT PRIMARY KEY, tenant_id TEXT NOT NULL UNIQUE, default_language TEXT DEFAULT 'en',
- supported_languages TEXT DEFAULT '["en"]', default_currency TEXT DEFAULT 'USD',
- supported_currencies TEXT DEFAULT '["USD"]', default_timezone TEXT DEFAULT 'UTC',
- default_date_format TEXT, default_time_format TEXT, default_number_format TEXT,
- calendar_type TEXT DEFAULT 'gregorian', first_day_of_week INTEGER DEFAULT 1,
- region_code TEXT DEFAULT 'global', data_residency TEXT DEFAULT 'regional',
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ id TEXT PRIMARY KEY, tenant_id TEXT NOT NULL UNIQUE,
+ default_language TEXT DEFAULT 'en',
+ supported_languages TEXT DEFAULT '["en"]',
+ default_currency TEXT DEFAULT 'USD',
+ supported_currencies TEXT DEFAULT '["USD"]',
+ default_timezone TEXT DEFAULT 'UTC',
+ default_date_format TEXT, default_time_format TEXT,
+ default_number_format TEXT, calendar_type TEXT DEFAULT 'gregorian',
+ first_day_of_week INTEGER DEFAULT 1, region_code TEXT DEFAULT 'global',
+ data_residency TEXT DEFAULT 'regional',
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (tenant_id) REFERENCES tenants(id) ON DELETE CASCADE
)
""")
@@ -838,22 +850,28 @@ class LocalizationManager:
cursor.execute("CREATE INDEX IF NOT EXISTS idx_dc_region ON data_centers(region_code)")
cursor.execute("CREATE INDEX IF NOT EXISTS idx_dc_status ON data_centers(status)")
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_tenant_dc ON tenant_data_center_mappings(tenant_id)",
+ "CREATE INDEX IF NOT EXISTS idx_tenant_dc "
+ "ON tenant_data_center_mappings(tenant_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_payment_provider ON localized_payment_methods(provider)",
+ "CREATE INDEX IF NOT EXISTS idx_payment_provider "
+ "ON localized_payment_methods(provider)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_payment_active ON localized_payment_methods(is_active)",
+ "CREATE INDEX IF NOT EXISTS idx_payment_active "
+ "ON localized_payment_methods(is_active)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_country_region ON country_configs(region)",
+ "CREATE INDEX IF NOT EXISTS idx_country_region "
+ "ON country_configs(region)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_tz_country ON timezone_configs(country_code)",
+ "CREATE INDEX IF NOT EXISTS idx_tz_country "
+ "ON timezone_configs(country_code)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_locale_settings_tenant ON localization_settings(tenant_id)",
+ "CREATE INDEX IF NOT EXISTS idx_locale_settings_tenant "
+ "ON localization_settings(tenant_id)",
)
conn.commit()
logger.info("Localization tables initialized successfully")
@@ -898,7 +916,8 @@ class LocalizationManager:
cursor.execute(
"""
INSERT OR IGNORE INTO data_centers
- (id, region_code, name, location, endpoint, priority, supported_regions, capabilities)
+ (id, region_code, name, location, endpoint, priority,
+ supported_regions, capabilities)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -1188,7 +1207,9 @@ class LocalizationManager:
row = cursor.fetchone()
if not row:
cursor.execute("""
- SELECT * FROM data_centers WHERE supported_regions LIKE '%"global"%' AND status = 'active'
+ SELECT * FROM data_centers
+ WHERE supported_regions LIKE '%"global"%'
+ AND status = 'active'
ORDER BY priority LIMIT 1
""")
row = cursor.fetchone()
@@ -1197,7 +1218,9 @@ class LocalizationManager:
primary_dc_id = row["id"]
cursor.execute(
"""
- SELECT * FROM data_centers WHERE id != ? AND status = 'active' ORDER BY priority LIMIT 1
+ SELECT * FROM data_centers
+ WHERE id != ? AND status = 'active'
+ ORDER BY priority LIMIT 1
""",
(primary_dc_id,),
)
@@ -1468,9 +1491,10 @@ class LocalizationManager:
cursor.execute(
"""
INSERT INTO localization_settings
- (id, tenant_id, default_language, supported_languages, default_currency, supported_currencies,
- default_timezone, default_date_format, default_time_format, default_number_format, calendar_type,
- first_day_of_week, region_code, data_residency, created_at, updated_at)
+ (id, tenant_id, default_language, supported_languages, default_currency,
+ supported_currencies, default_timezone, default_date_format, default_time_format,
+ default_number_format, calendar_type, first_day_of_week, region_code,
+ data_residency, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
diff --git a/backend/main.py b/backend/main.py
index 1456d02..1d22ae2 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -1250,8 +1250,10 @@ def extract_entities_with_llm(text: str) -> tuple[list[dict], list[dict]]:
文本:{text[:3000]}
要求:
-1. entities: 每个实体包含 name(名称), type(类型: PROJECT/TECH/PERSON/ORG/OTHER), definition(一句话定义)
-2. relations: 每个关系包含 source(源实体名), target(目标实体名), type(关系类型: belongs_to/works_with/depends_on/mentions/related)
+1. entities: 每个实体包含 name(名称), type(类型: PROJECT/TECH/PERSON/ORG/OTHER),
+ definition(一句话定义)
+2. relations: 每个关系包含 source(源实体名), target(目标实体名),
+ type(关系类型: belongs_to/works_with/depends_on/mentions/related)
3. 只返回 JSON 对象,格式: {{"entities": [...], "relations": [...]}}
示例:
@@ -2053,7 +2055,8 @@ async def agent_suggest(project_id: str, _=Depends(verify_api_key)):
2. 建议的操作(如合并相似实体、补充定义等)
3. 值得关注的关键信息
-返回 JSON 格式:{{"suggestions": [{{"type": "insight|action", "title": "...", "description": "..."}}]}}"""
+返回 JSON 格式:{{"suggestions": [{{"type": "insight|action", "title": "...",
+"description": "..."}}]}}"""
messages = [ChatMessage(role="user", content=prompt)]
content = await llm.chat(messages, temperature=0.3)
@@ -2593,7 +2596,8 @@ async def set_entity_attribute_endpoint(
# 记录历史
conn.execute(
"""INSERT INTO attribute_history
- (id, entity_id, attribute_name, old_value, new_value, changed_by, changed_at, change_reason)
+ (id, entity_id, attribute_name, old_value, new_value,
+ changed_by, changed_at, change_reason)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(
str(uuid.uuid4())[:UUID_LENGTH],
@@ -2628,7 +2632,8 @@ async def set_entity_attribute_endpoint(
# 记录历史
conn.execute(
"""INSERT INTO attribute_history
- (id, entity_id, attribute_name, old_value, new_value, changed_by, changed_at, change_reason)
+ (id, entity_id, attribute_name, old_value, new_value,
+ changed_by, changed_at, change_reason)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(
str(uuid.uuid4())[:UUID_LENGTH],
@@ -3292,7 +3297,9 @@ async def export_transcript_markdown_endpoint(transcript_id: str, _=Depends(veri
io.BytesIO(markdown_content.encode("utf-8")),
media_type="text/markdown",
headers={
- "Content-Disposition": f"attachment; filename=insightflow-transcript-{transcript_id}.md",
+ "Content-Disposition": (
+ f"attachment; filename=insightflow-transcript-{transcript_id}.md"
+ ),
},
)
@@ -3395,7 +3402,9 @@ async def neo4j_sync_project(request: Neo4jSyncRequest, _=Depends(verify_api_key
"project_id": request.project_id,
"entities_synced": len(entities_data),
"relations_synced": len(relations_data),
- "message": f"Synced {len(entities_data)} entities and {len(relations_data)} relations to Neo4j",
+ "message": (
+ f"Synced {len(entities_data)} entities and {len(relations_data)} relations to Neo4j"
+ ),
}
@@ -4516,7 +4525,8 @@ async def upload_video_endpoint(
for frame in result.frames:
conn.execute(
"""INSERT INTO video_frames
- (id, video_id, frame_number, timestamp, image_url, ocr_text, extracted_entities, created_at)
+ (id, video_id, frame_number, timestamp, image_url, ocr_text,
+ extracted_entities, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(
frame.id,
@@ -4560,7 +4570,8 @@ async def upload_video_endpoint(
conn = db.get_conn()
conn.execute(
"""INSERT OR REPLACE INTO multimodal_mentions
- (id, project_id, entity_id, modality, source_id, source_type, text_snippet, confidence, created_at)
+ (id, project_id, entity_id, modality, source_id, source_type,
+ text_snippet, confidence, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
str(uuid.uuid4())[:UUID_LENGTH],
@@ -4718,7 +4729,8 @@ async def upload_image_endpoint(
conn = db.get_conn()
conn.execute(
"""INSERT OR REPLACE INTO multimodal_mentions
- (id, project_id, entity_id, modality, source_id, source_type, text_snippet, confidence, created_at)
+ (id, project_id, entity_id, modality, source_id, source_type,
+ text_snippet, confidence, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
str(uuid.uuid4())[:UUID_LENGTH],
@@ -4925,7 +4937,8 @@ async def align_multimodal_entities_endpoint(
for link in links:
conn.execute(
"""INSERT OR REPLACE INTO multimodal_entity_links
- (id, entity_id, linked_entity_id, link_type, confidence, evidence, modalities, created_at)
+ (id, entity_id, linked_entity_id, link_type, confidence,
+ evidence, modalities, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(
link.id,
@@ -5012,7 +5025,8 @@ async def get_multimodal_stats_endpoint(project_id: str, _=Depends(verify_api_ke
modality_dist = {}
for modality in ["audio", "video", "image", "document"]:
count = conn.execute(
- "SELECT COUNT(*) as count FROM multimodal_mentions WHERE project_id = ? AND modality = ?",
+ """SELECT COUNT(*) as count FROM multimodal_mentions
+ WHERE project_id = ? AND modality = ?""",
(project_id, modality),
).fetchone()["count"]
modality_dist[modality] = count
@@ -5300,7 +5314,10 @@ class PluginCreate(BaseModel):
name: str = Field(..., description="插件名称")
plugin_type: str = Field(
...,
- description="插件类型: chrome_extension, feishu_bot, dingtalk_bot, zapier, make, webdav, custom",
+ description=(
+ "插件类型: chrome_extension, feishu_bot, dingtalk_bot, "
+ "zapier, make, webdav, custom"
+ ),
)
project_id: str = Field(..., description="关联项目ID")
config: dict = Field(default_factory=dict, description="插件配置")
diff --git a/backend/neo4j_manager.py b/backend/neo4j_manager.py
index 9ac9c44..1e60024 100644
--- a/backend/neo4j_manager.py
+++ b/backend/neo4j_manager.py
@@ -378,7 +378,8 @@ class Neo4jManager:
result = session.run(
"""
MATCH path = shortestPath(
- (source:Entity {id: $source_id})-[*1..$max_depth]-(target:Entity {id: $target_id})
+ (source:Entity {id: $source_id})-[*1..$max_depth]-
+ (target:Entity {id: $target_id})
)
RETURN path
""",
@@ -440,7 +441,8 @@ class Neo4jManager:
with self._driver.session() as session:
result = session.run(
"""
- MATCH path = (source:Entity {id: $source_id})-[*1..$max_depth]-(target:Entity {id: $target_id})
+ MATCH path = (source:Entity {id: $source_id})-[*1..$max_depth]-
+ (target:Entity {id: $target_id})
WHERE source <> target
RETURN path
LIMIT $limit
@@ -504,7 +506,9 @@ class Neo4jManager:
if relation_type:
result = session.run(
"""
- MATCH (e:Entity {id: $entity_id})-[r:RELATES_TO {relation_type: $relation_type}]-(neighbor:Entity)
+ MATCH (e:Entity {id: $entity_id})-
+ [r:RELATES_TO {relation_type: $relation_type}]-
+ (neighbor:Entity)
RETURN neighbor, r.relation_type as rel_type, r.evidence as evidence
LIMIT $limit
""",
@@ -555,7 +559,8 @@ class Neo4jManager:
with self._driver.session() as session:
result = session.run(
"""
- MATCH (e1:Entity {id: $id1})-[:RELATES_TO]-(common:Entity)-[:RELATES_TO]-(e2:Entity {id: $id2})
+ MATCH (e1:Entity {id: $id1})-[:RELATES_TO]-(common:Entity)-
+ [:RELATES_TO]-(e2:Entity {id: $id2})
RETURN DISTINCT common
""",
id1=entity_id1,
@@ -593,7 +598,8 @@ class Neo4jManager:
CALL gds.graph.exists('project-graph-$project_id') YIELD exists
WITH exists
CALL apoc.do.when(exists,
- 'CALL gds.graph.drop("project-graph-$project_id") YIELD graphName RETURN graphName',
+ 'CALL gds.graph.drop("project-graph-$project_id") '
+ 'YIELD graphName RETURN graphName',
'RETURN "none" as graphName',
{}
) YIELD value RETURN value
@@ -1069,7 +1075,8 @@ def sync_project_to_neo4j(
manager.sync_relations_batch(graph_relations)
logger.info(
- f"Synced project {project_id} to Neo4j: {len(entities)} entities, {len(relations)} relations",
+ f"Synced project {project_id} to Neo4j: {len(entities)} entities, "
+ f"{len(relations)} relations",
)
diff --git a/backend/ops_manager.py b/backend/ops_manager.py
index 4814a25..4a381a4 100644
--- a/backend/ops_manager.py
+++ b/backend/ops_manager.py
@@ -1690,7 +1690,8 @@ class OpsManager:
INSERT INTO auto_scaling_policies
(id, tenant_id, name, resource_type, min_instances, max_instances,
target_utilization, scale_up_threshold, scale_down_threshold,
- scale_up_step, scale_down_step, cooldown_period, is_enabled, created_at, updated_at)
+ scale_up_step, scale_down_step, cooldown_period, is_enabled, created_at,
+ updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -2153,7 +2154,8 @@ class OpsManager:
"""
INSERT INTO failover_configs
(id, tenant_id, name, primary_region, secondary_regions, failover_trigger,
- auto_failover, failover_timeout, health_check_id, is_enabled, created_at, updated_at)
+ auto_failover, failover_timeout, health_check_id, is_enabled, created_at,
+ updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -2545,7 +2547,8 @@ class OpsManager:
conn.execute(
"""
INSERT INTO cost_reports
- (id, tenant_id, report_period, total_cost, currency, breakdown, trends, anomalies, created_at)
+ (id, tenant_id, report_period, total_cost, currency, breakdown,
+ trends, anomalies, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -2786,7 +2789,8 @@ class OpsManager:
"""
INSERT INTO cost_optimization_suggestions
(id, tenant_id, category, title, description, potential_savings, currency,
- confidence, difficulty, implementation_steps, risk_level, is_applied, created_at)
+ confidence, difficulty, implementation_steps, risk_level, is_applied,
+ created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
diff --git a/backend/performance_manager.py b/backend/performance_manager.py
index be485f9..689acd8 100644
--- a/backend/performance_manager.py
+++ b/backend/performance_manager.py
@@ -496,7 +496,8 @@ class CacheManager:
# 预热实体数据
entities = conn.execute(
"""SELECT e.*,
- (SELECT COUNT(*) FROM entity_mentions m WHERE m.entity_id = e.id) as mention_count
+ (SELECT COUNT(*) FROM entity_mentions m WHERE m.entity_id = e.id)
+ as mention_count
FROM entities e
WHERE e.project_id = ?
ORDER BY mention_count DESC
@@ -788,7 +789,8 @@ class DatabaseSharding:
target_conn.execute(
"""
INSERT OR REPLACE INTO entity_relations
- (id, project_id, source_entity_id, target_entity_id, relation_type, evidence, created_at)
+ (id, project_id, source_entity_id, target_entity_id, relation_type,
+ evidence, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
""",
tuple(relation),
@@ -1409,7 +1411,10 @@ class PerformanceMonitor:
"type": "performance_alert",
"metric": metric.to_dict(),
"threshold": self.alert_threshold,
- "message": f"{metric.metric_type} exceeded threshold: {metric.duration_ms}ms > {self.alert_threshold}ms",
+ "message": (
+ f"{metric.metric_type} exceeded threshold: "
+ f"{metric.duration_ms}ms > {self.alert_threshold}ms"
+ ),
}
for handler in self.alert_handlers:
diff --git a/backend/plugin_manager.py b/backend/plugin_manager.py
index 9e0114f..80832a6 100644
--- a/backend/plugin_manager.py
+++ b/backend/plugin_manager.py
@@ -188,7 +188,8 @@ class PluginManager:
conn.execute(
"""INSERT INTO plugins
- (id, name, plugin_type, project_id, status, config, created_at, updated_at, use_count)
+ (id, name, plugin_type, project_id, status, config, created_at, updated_at,
+ use_count)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
plugin.id,
@@ -1204,7 +1205,8 @@ class WebDAVSyncManager:
conn.execute(
"""INSERT INTO webdav_syncs
(id, name, project_id, server_url, username, password, remote_path,
- sync_mode, sync_interval, last_sync_status, is_active, created_at, updated_at, sync_count)
+ sync_mode, sync_interval, last_sync_status, is_active, created_at, updated_at,
+ sync_count)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
sync_id,
diff --git a/backend/search_manager.py b/backend/search_manager.py
index 6b6ba5e..1f6db0f 100644
--- a/backend/search_manager.py
+++ b/backend/search_manager.py
@@ -233,7 +233,8 @@ class FullTextSearch:
# 创建索引
conn.execute(
- "CREATE INDEX IF NOT EXISTS idx_search_content ON search_indexes(content_id, content_type)",
+ """CREATE INDEX IF NOT EXISTS idx_search_content
+ ON search_indexes(content_id, content_type)""",
)
conn.execute("CREATE INDEX IF NOT EXISTS idx_search_project ON search_indexes(project_id)")
conn.execute("CREATE INDEX IF NOT EXISTS idx_term_freq_term ON search_term_freq(term)")
@@ -310,7 +311,8 @@ class FullTextSearch:
conn.execute(
"""
INSERT OR REPLACE INTO search_indexes
- (id, content_id, content_type, project_id, tokens, token_positions, created_at, updated_at)
+ (id, content_id, content_type, project_id, tokens, token_positions,
+ created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -579,7 +581,10 @@ class FullTextSearch:
(content_id,),
).fetchone()
if row:
- return f"{row['source_name']} {row['relation_type']} {row['target_name']} {row['evidence'] or ''}"
+ return (
+ f"{row['source_name']} {row['relation_type']} "
+ f"{row['target_name']} {row['evidence'] or ''}"
+ )
return None
return None
@@ -784,7 +789,10 @@ class FullTextSearch:
).fetchall()
for r in relations:
- text = f"{r['source_name']} {r['relation_type']} {r['target_name']} {r['evidence'] or ''}"
+ text = (
+ f"{r['source_name']} {r['relation_type']} "
+ f"{r['target_name']} {r['evidence'] or ''}"
+ )
if self.index_content(r["id"], "relation", r["project_id"], text):
stats["relations"] += 1
else:
@@ -854,7 +862,8 @@ class SemanticSearch:
""")
conn.execute(
- "CREATE INDEX IF NOT EXISTS idx_embedding_content ON embeddings(content_id, content_type)",
+ """CREATE INDEX IF NOT EXISTS idx_embedding_content
+ ON embeddings(content_id, content_type)""",
)
conn.execute("CREATE INDEX IF NOT EXISTS idx_embedding_project ON embeddings(project_id)")
@@ -1107,7 +1116,8 @@ class SemanticSearch:
conn = self._get_conn()
row = conn.execute(
- "SELECT embedding, project_id FROM embeddings WHERE content_id = ? AND content_type = ?",
+ """SELECT embedding, project_id FROM embeddings
+ WHERE content_id = ? AND content_type = ?""",
(content_id, content_type),
).fetchone()
diff --git a/backend/security_manager.py b/backend/security_manager.py
index 9e6a7e6..8f7a274 100644
--- a/backend/security_manager.py
+++ b/backend/security_manager.py
@@ -315,7 +315,8 @@ class SecurityManager:
"CREATE INDEX IF NOT EXISTS idx_masking_project ON masking_rules(project_id)",
)
cursor.execute(
- "CREATE INDEX IF NOT EXISTS idx_access_policy_project ON data_access_policies(project_id)",
+ """CREATE INDEX IF NOT EXISTS idx_access_policy_project
+ ON data_access_policies(project_id)""",
)
conn.commit()
diff --git a/backend/subscription_manager.py b/backend/subscription_manager.py
index c66e4df..f517e69 100644
--- a/backend/subscription_manager.py
+++ b/backend/subscription_manager.py
@@ -1108,7 +1108,8 @@ class SubscriptionManager:
cursor.execute(
"""
INSERT INTO usage_records
- (id, tenant_id, resource_type, quantity, unit, recorded_at, cost, description, metadata)
+ (id, tenant_id, resource_type, quantity, unit, recorded_at,
+ cost, description, metadata)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -1780,7 +1781,9 @@ class SubscriptionManager:
cursor.execute(
"""
UPDATE refunds
- SET status = 'rejected', metadata = json_set(metadata, '$.rejection_reason', ?), updated_at = ?
+ SET status = 'rejected',
+ metadata = json_set(metadata, '$.rejection_reason', ?),
+ updated_at = ?
WHERE id = ?
""",
(reason, now, refund_id),
@@ -1860,7 +1863,8 @@ class SubscriptionManager:
cursor.execute(
"""
INSERT INTO billing_history
- (id, tenant_id, type, amount, currency, description, reference_id, balance_after, created_at, metadata)
+ (id, tenant_id, type, amount, currency, description,
+ reference_id, balance_after, created_at, metadata)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -1943,7 +1947,8 @@ class SubscriptionManager:
amount = plan.price_yearly if billing_cycle == "yearly" else plan.price_monthly
return {
- "order_id": f"ALI{datetime.now().strftime('%Y%m%d%H%M%S')}{uuid.uuid4().hex[:8].upper()}",
+ "order_id": f"ALI{datetime.now().strftime('%Y%m%d%H%M%S')}"
+ f"{uuid.uuid4().hex[:8].upper()}",
"amount": amount,
"currency": plan.currency,
"qr_code_url": f"https://qr.alipay.com/mock/{uuid.uuid4().hex[:16]}",
@@ -1963,7 +1968,8 @@ class SubscriptionManager:
amount = plan.price_yearly if billing_cycle == "yearly" else plan.price_monthly
return {
- "order_id": f"WX{datetime.now().strftime('%Y%m%d%H%M%S')}{uuid.uuid4().hex[:8].upper()}",
+ "order_id": f"WX{datetime.now().strftime('%Y%m%d%H%M%S')}"
+ f"{uuid.uuid4().hex[:8].upper()}",
"amount": amount,
"currency": plan.currency,
"prepay_id": f"wx{uuid.uuid4().hex[:32]}",
diff --git a/backend/tenant_manager.py b/backend/tenant_manager.py
index d68c8c8..174eee6 100644
--- a/backend/tenant_manager.py
+++ b/backend/tenant_manager.py
@@ -457,7 +457,8 @@ class TenantManager:
cursor.execute(
"""
INSERT INTO tenants (id, name, slug, description, tier, status, owner_id,
- created_at, updated_at, expires_at, settings, resource_limits, metadata)
+ created_at, updated_at, expires_at, settings,
+ resource_limits, metadata)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -1451,7 +1452,10 @@ class TenantManager:
# TODO: 实现 HTTP 文件验证
# import requests
# try:
- # response = requests.get(f"http://{domain}/.well-known/insightflow-verify.txt", timeout = 10)
+ # response = requests.get(
+ # f"http://{domain}/.well-known/insightflow-verify.txt",
+ # timeout = 10
+ # )
# if response.status_code == 200 and token in response.text:
# return True
# except (ImportError, Exception):
diff --git a/backend/test_phase8_task6.py b/backend/test_phase8_task6.py
index 2572433..0c6d65a 100644
--- a/backend/test_phase8_task6.py
+++ b/backend/test_phase8_task6.py
@@ -513,7 +513,8 @@ class TestDeveloperEcosystem:
self.manager.update_developer_stats(self.created_ids["developer"][0])
profile = self.manager.get_developer_profile(self.created_ids["developer"][0])
self.log(
- f"Updated developer stats: {profile.plugin_count} plugins, {profile.template_count} templates",
+ f"Updated developer stats: {profile.plugin_count} plugins, "
+ f"{profile.template_count} templates",
)
except Exception as e:
self.log(f"Failed to update developer stats: {e!s}", success=False)
diff --git a/backend/test_phase8_task8.py b/backend/test_phase8_task8.py
index 5356e55..039ae70 100644
--- a/backend/test_phase8_task8.py
+++ b/backend/test_phase8_task8.py
@@ -275,7 +275,8 @@ class TestOpsManager:
"""
INSERT INTO alerts
(id, rule_id, tenant_id, severity, status, title, description,
- metric, value, threshold, labels, annotations, started_at, notification_sent, suppression_count)
+ metric, value, threshold, labels, annotations, started_at,
+ notification_sent, suppression_count)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
@@ -343,7 +344,8 @@ class TestOpsManager:
conn.execute(
"""
INSERT INTO resource_metrics
- (id, tenant_id, resource_type, resource_id, metric_name, metric_value, unit, timestamp)
+ (id, tenant_id, resource_type, resource_id, metric_name,
+ metric_value, unit, timestamp)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""",
(
diff --git a/backend/tingwu_client.py b/backend/tingwu_client.py
index 4f1e621..1fbe97a 100644
--- a/backend/tingwu_client.py
+++ b/backend/tingwu_client.py
@@ -35,7 +35,8 @@ class TingwuClient:
"x-acs-action": "CreateTask",
"x-acs-version": "2023-09-30",
"x-acs-date": timestamp,
- "Authorization": f"ACS3-HMAC-SHA256 Credential = {self.access_key}/acs/tingwu/cn-beijing",
+ "Authorization": f"ACS3-HMAC-SHA256 Credential = {self.access_key}"
+ f"/acs/tingwu/cn-beijing",
}
def create_task(self, audio_url: str, language: str = "zh") -> str:
diff --git a/backend/workflow_manager.py b/backend/workflow_manager.py
index 5ce6447..e067b8f 100644
--- a/backend/workflow_manager.py
+++ b/backend/workflow_manager.py
@@ -1019,20 +1019,23 @@ class WorkflowManager:
# 成功次数
success = conn.execute(
- "SELECT COUNT(*) FROM workflow_logs WHERE workflow_id = ? AND status = 'success' AND created_at > ?",
+ """SELECT COUNT(*) FROM workflow_logs
+ WHERE workflow_id = ? AND status = 'success' AND created_at > ?""",
(workflow_id, since),
).fetchone()[0]
# 失败次数
failed = conn.execute(
- "SELECT COUNT(*) FROM workflow_logs WHERE workflow_id = ? AND status = 'failed' AND created_at > ?",
+ """SELECT COUNT(*) FROM workflow_logs
+ WHERE workflow_id = ? AND status = 'failed' AND created_at > ?""",
(workflow_id, since),
).fetchone()[0]
# 平均执行时间
avg_duration = (
conn.execute(
- "SELECT AVG(duration_ms) FROM workflow_logs WHERE workflow_id = ? AND created_at > ?",
+ """SELECT AVG(duration_ms) FROM workflow_logs
+ WHERE workflow_id = ? AND created_at > ?""",
(workflow_id, since),
).fetchone()[0]
or 0