fix: auto-fix code issues (cron)
- 修复重复导入/字段 - 修复异常处理 - 修复PEP8格式问题 - 添加类型注解
This commit is contained in:
@@ -12,8 +12,8 @@ from dataclasses import dataclass
|
||||
|
||||
import httpx
|
||||
|
||||
KIMI_API_KEY = os.getenv("KIMI_API_KEY", "")
|
||||
KIMI_BASE_URL = os.getenv("KIMI_BASE_URL", "https://api.kimi.com/coding")
|
||||
KIMI_API_KEY = os.getenv("KIMI_API_KEY", "")
|
||||
KIMI_BASE_URL = os.getenv("KIMI_BASE_URL", "https://api.kimi.com/coding")
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -41,22 +41,22 @@ class RelationExtractionResult:
|
||||
class LLMClient:
|
||||
"""Kimi API 客户端"""
|
||||
|
||||
def __init__(self, api_key: str = None, base_url: str = None) -> None:
|
||||
self.api_key = api_key or KIMI_API_KEY
|
||||
self.base_url = base_url or KIMI_BASE_URL
|
||||
self.headers = {
|
||||
def __init__(self, api_key: str = None, base_url: str = None) -> None:
|
||||
self.api_key = api_key or KIMI_API_KEY
|
||||
self.base_url = base_url or KIMI_BASE_URL
|
||||
self.headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
async def chat(
|
||||
self, messages: list[ChatMessage], temperature: float = 0.3, stream: bool = False
|
||||
self, messages: list[ChatMessage], temperature: float = 0.3, stream: bool = False
|
||||
) -> str:
|
||||
"""发送聊天请求"""
|
||||
if not self.api_key:
|
||||
raise ValueError("KIMI_API_KEY not set")
|
||||
|
||||
payload = {
|
||||
payload = {
|
||||
"model": "k2p5",
|
||||
"messages": [{"role": m.role, "content": m.content} for m in messages],
|
||||
"temperature": temperature,
|
||||
@@ -64,24 +64,24 @@ class LLMClient:
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
response = await client.post(
|
||||
f"{self.base_url}/v1/chat/completions",
|
||||
headers = self.headers,
|
||||
json = payload,
|
||||
timeout = 120.0,
|
||||
headers=self.headers,
|
||||
json=payload,
|
||||
timeout=120.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
result = response.json()
|
||||
return result["choices"][0]["message"]["content"]
|
||||
|
||||
async def chat_stream(
|
||||
self, messages: list[ChatMessage], temperature: float = 0.3
|
||||
self, messages: list[ChatMessage], temperature: float = 0.3
|
||||
) -> AsyncGenerator[str, None]:
|
||||
"""流式聊天请求"""
|
||||
if not self.api_key:
|
||||
raise ValueError("KIMI_API_KEY not set")
|
||||
|
||||
payload = {
|
||||
payload = {
|
||||
"model": "k2p5",
|
||||
"messages": [{"role": m.role, "content": m.content} for m in messages],
|
||||
"temperature": temperature,
|
||||
@@ -92,19 +92,19 @@ class LLMClient:
|
||||
async with client.stream(
|
||||
"POST",
|
||||
f"{self.base_url}/v1/chat/completions",
|
||||
headers = self.headers,
|
||||
json = payload,
|
||||
timeout = 120.0,
|
||||
headers=self.headers,
|
||||
json=payload,
|
||||
timeout=120.0,
|
||||
) as response:
|
||||
response.raise_for_status()
|
||||
async for line in response.aiter_lines():
|
||||
if line.startswith("data: "):
|
||||
data = line[6:]
|
||||
data = line[6:]
|
||||
if data == "[DONE]":
|
||||
break
|
||||
try:
|
||||
chunk = json.loads(data)
|
||||
delta = chunk["choices"][0]["delta"]
|
||||
chunk = json.loads(data)
|
||||
delta = chunk["choices"][0]["delta"]
|
||||
if "content" in delta:
|
||||
yield delta["content"]
|
||||
except (json.JSONDecodeError, KeyError, IndexError):
|
||||
@@ -114,7 +114,7 @@ class LLMClient:
|
||||
self, text: str
|
||||
) -> tuple[list[EntityExtractionResult], list[RelationExtractionResult]]:
|
||||
"""提取实体和关系,带置信度分数"""
|
||||
prompt = f"""从以下会议文本中提取关键实体和它们之间的关系,以 JSON 格式返回:
|
||||
prompt = f"""从以下会议文本中提取关键实体和它们之间的关系,以 JSON 格式返回:
|
||||
|
||||
文本:{text[:3000]}
|
||||
|
||||
@@ -139,30 +139,30 @@ class LLMClient:
|
||||
]
|
||||
}}"""
|
||||
|
||||
messages = [ChatMessage(role = "user", content = prompt)]
|
||||
content = await self.chat(messages, temperature = 0.1)
|
||||
messages = [ChatMessage(role="user", content=prompt)]
|
||||
content = await self.chat(messages, temperature=0.1)
|
||||
|
||||
json_match = re.search(r"\{{.*?\}}", content, re.DOTALL)
|
||||
json_match = re.search(r"\{{.*?\}}", content, re.DOTALL)
|
||||
if not json_match:
|
||||
return [], []
|
||||
|
||||
try:
|
||||
data = json.loads(json_match.group())
|
||||
entities = [
|
||||
data = json.loads(json_match.group())
|
||||
entities = [
|
||||
EntityExtractionResult(
|
||||
name = e["name"],
|
||||
type = e.get("type", "OTHER"),
|
||||
definition = e.get("definition", ""),
|
||||
confidence = e.get("confidence", 0.8),
|
||||
name=e["name"],
|
||||
type=e.get("type", "OTHER"),
|
||||
definition=e.get("definition", ""),
|
||||
confidence=e.get("confidence", 0.8),
|
||||
)
|
||||
for e in data.get("entities", [])
|
||||
]
|
||||
relations = [
|
||||
relations = [
|
||||
RelationExtractionResult(
|
||||
source = r["source"],
|
||||
target = r["target"],
|
||||
type = r.get("type", "related"),
|
||||
confidence = r.get("confidence", 0.8),
|
||||
source=r["source"],
|
||||
target=r["target"],
|
||||
type=r.get("type", "related"),
|
||||
confidence=r.get("confidence", 0.8),
|
||||
)
|
||||
for r in data.get("relations", [])
|
||||
]
|
||||
@@ -173,10 +173,10 @@ class LLMClient:
|
||||
|
||||
async def rag_query(self, query: str, context: str, project_context: dict) -> str:
|
||||
"""RAG 问答 - 基于项目上下文回答问题"""
|
||||
prompt = f"""你是一个专业的项目分析助手。基于以下项目信息回答问题:
|
||||
prompt = f"""你是一个专业的项目分析助手。基于以下项目信息回答问题:
|
||||
|
||||
## 项目信息
|
||||
{json.dumps(project_context, ensure_ascii = False, indent = 2)}
|
||||
{json.dumps(project_context, ensure_ascii=False, indent=2)}
|
||||
|
||||
## 相关上下文
|
||||
{context[:4000]}
|
||||
@@ -186,21 +186,21 @@ class LLMClient:
|
||||
|
||||
请用中文回答,保持简洁专业。如果信息不足,请明确说明。"""
|
||||
|
||||
messages = [
|
||||
messages = [
|
||||
ChatMessage(
|
||||
role = "system", content = "你是一个专业的项目分析助手,擅长从会议记录中提取洞察。"
|
||||
role="system", content="你是一个专业的项目分析助手,擅长从会议记录中提取洞察。"
|
||||
),
|
||||
ChatMessage(role = "user", content = prompt),
|
||||
ChatMessage(role="user", content=prompt),
|
||||
]
|
||||
|
||||
return await self.chat(messages, temperature = 0.3)
|
||||
return await self.chat(messages, temperature=0.3)
|
||||
|
||||
async def agent_command(self, command: str, project_context: dict) -> dict:
|
||||
"""Agent 指令解析 - 将自然语言指令转换为结构化操作"""
|
||||
prompt = f"""解析以下用户指令,转换为结构化操作:
|
||||
prompt = f"""解析以下用户指令,转换为结构化操作:
|
||||
|
||||
## 项目信息
|
||||
{json.dumps(project_context, ensure_ascii = False, indent = 2)}
|
||||
{json.dumps(project_context, ensure_ascii=False, indent=2)}
|
||||
|
||||
## 用户指令
|
||||
{command}
|
||||
@@ -221,10 +221,10 @@ class LLMClient:
|
||||
- create_relation: 创建关系,params 包含 source(源实体), target(目标实体), relation_type(关系类型)
|
||||
"""
|
||||
|
||||
messages = [ChatMessage(role = "user", content = prompt)]
|
||||
content = await self.chat(messages, temperature = 0.1)
|
||||
messages = [ChatMessage(role="user", content=prompt)]
|
||||
content = await self.chat(messages, temperature=0.1)
|
||||
|
||||
json_match = re.search(r"\{{.*?\}}", content, re.DOTALL)
|
||||
json_match = re.search(r"\{{.*?\}}", content, re.DOTALL)
|
||||
if not json_match:
|
||||
return {"intent": "unknown", "explanation": "无法解析指令"}
|
||||
|
||||
@@ -235,14 +235,14 @@ class LLMClient:
|
||||
|
||||
async def analyze_entity_evolution(self, entity_name: str, mentions: list[dict]) -> str:
|
||||
"""分析实体在项目中的演变/态度变化"""
|
||||
mentions_text = "\n".join(
|
||||
mentions_text = "\n".join(
|
||||
[
|
||||
f"[{m.get('created_at', '未知时间')}] {m.get('text_snippet', '')}"
|
||||
for m in mentions[:20]
|
||||
] # 限制数量
|
||||
)
|
||||
|
||||
prompt = f"""分析实体 "{entity_name}" 在项目中的演变和态度变化:
|
||||
prompt = f"""分析实体 "{entity_name}" 在项目中的演变和态度变化:
|
||||
|
||||
## 提及记录
|
||||
{mentions_text}
|
||||
@@ -255,16 +255,16 @@ class LLMClient:
|
||||
|
||||
用中文回答,结构清晰。"""
|
||||
|
||||
messages = [ChatMessage(role = "user", content = prompt)]
|
||||
return await self.chat(messages, temperature = 0.3)
|
||||
messages = [ChatMessage(role="user", content=prompt)]
|
||||
return await self.chat(messages, temperature=0.3)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_llm_client = None
|
||||
_llm_client = None
|
||||
|
||||
|
||||
def get_llm_client() -> LLMClient:
|
||||
global _llm_client
|
||||
if _llm_client is None:
|
||||
_llm_client = LLMClient()
|
||||
_llm_client = LLMClient()
|
||||
return _llm_client
|
||||
|
||||
Reference in New Issue
Block a user