| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101 |
- import base64
- import json
- from openai import AsyncOpenAI
- import logging
- from typing import List, Any
- logger = logging.getLogger(__name__)
- async def analyze_image(
- base_url: str,
- api_key: str,
- model_name: str,
- rules: List[Any],
- image_bytes: bytes
- ) -> str:
- try:
- client = AsyncOpenAI(
- base_url=base_url,
- api_key=api_key,
- )
-
- # Prepare rules text
- # Handle both dicts and Pydantic models
- rules_data = []
- for r in rules:
- if hasattr(r, 'model_dump'):
- rules_data.append(r.model_dump())
- elif hasattr(r, 'dict'):
- rules_data.append(r.dict())
- else:
- rules_data.append(r)
-
- rules_text = json.dumps(rules_data, ensure_ascii=False, indent=2)
-
- system_prompt = """你是一个智能安防助手。
- 请根据提供的监控画面和告警规则列表,逐项检查是否触发告警。
- 请严格按照 JSON 格式返回结果,返回一个对象列表。
- 每个对象包含以下字段:
- - "alarm_name": 告警规则名称 (String)
- - "alarm_content": 详细描述发现的情况,如果没有异常请填 "正常" (String)
- - "is_alarm": 是否触发告警 (Boolean, true/false)
- - "area": 异常发生的区域描述,如果无异常可填 "全局" (String)
- 不要包含 markdown 格式标记(如 ```json),直接返回 JSON 字符串。
- """
- user_content = f"请根据以下规则检查画面:\n{rules_text}"
- b64_image = base64.b64encode(image_bytes).decode('utf-8')
-
- response = await client.chat.completions.create(
- model=model_name,
- messages=[
- {
- "role": "system",
- "content": system_prompt
- },
- {
- "role": "user",
- "content": [
- {"type": "text", "text": user_content},
- {
- "type": "image_url",
- "image_url": {
- "url": f"data:image/jpeg;base64,{b64_image}"
- },
- },
- ],
- }
- ],
- max_tokens=1000
- )
-
- return response.choices[0].message.content
- except Exception as e:
- logger.error(f"LLM Error: {e}")
- return json.dumps([{"alarm_name": "System Error", "alarm_content": str(e), "is_alarm": True, "time": "", "area": ""}])
- async def test_connection(
- base_url: str,
- api_key: str,
- model_name: str
- ) -> str:
- try:
- client = AsyncOpenAI(
- base_url=base_url,
- api_key=api_key,
- )
-
- response = await client.chat.completions.create(
- model=model_name,
- messages=[
- {"role": "user", "content": "Hello, this is a connection test."}
- ],
- max_tokens=10
- )
-
- return response.choices[0].message.content
- except Exception as e:
- logger.error(f"LLM Test Error: {e}")
- raise e
|