PhysicsCorrection/scripts/test_llm_api.py

148 lines
3.9 KiB
Python

#!/usr/bin/env python3
"""
LLM API 端点测试脚本
测试不同的 LLM API 端点配置
"""
import os
from typing import List, Dict, Any
from coze_coding_dev_sdk import LLMClient, Config
from coze_coding_utils.runtime_ctx.context import new_context
from langchain_core.messages import HumanMessage
def test_llm_endpoint(base_url: str = None) -> Dict[str, Any]:
"""
测试 LLM 端点
Args:
base_url: 自定义的 base URL
Returns:
测试结果
"""
result = {
"base_url": base_url or "默认",
"success": False,
"error": None,
"response": None
}
try:
# 创建上下文
ctx = new_context(method="invoke")
# 创建配置
if base_url:
config = Config(
api_key=os.environ.get("COZE_API_KEY", ""),
base_url=base_url,
timeout=30
)
client = LLMClient(config=config, ctx=ctx)
else:
client = LLMClient(ctx=ctx)
# 尝试调用 LLM
messages = [
HumanMessage(content="你好,请回复'测试成功'")
]
print(f"正在测试端点: {base_url or '默认'}...")
response = client.invoke(
messages=messages,
model="doubao-seed-2-0-lite-260215", # 使用轻量模型快速测试
temperature=0.1,
max_completion_tokens=100
)
result["success"] = True
result["response"] = str(response.content)[:100]
print(f"✅ 成功!响应: {result['response']}")
except Exception as e:
result["error"] = str(e)[:200]
print(f"❌ 失败!错误: {result['error']}")
return result
def main():
print("=" * 80)
print("LLM API 端点测试")
print("=" * 80)
print()
# 测试不同的端点
test_endpoints = [
None, # 默认端点
"https://integration.coze.cn/api", # 可能的集成端点
"https://api.coze.cn/v1", # v1 API
"https://api.coze.cn/v1/chat", # chat API
]
results = []
for endpoint in test_endpoints:
result = test_llm_endpoint(endpoint)
results.append(result)
print()
# 输出汇总
print("=" * 80)
print("测试汇总")
print("=" * 80)
print()
for result in results:
status = "✅ 成功" if result["success"] else "❌ 失败"
print(f"{status} - {result['base_url']}")
if result["error"]:
print(f" 错误: {result['error']}")
if result["response"]:
print(f" 响应: {result['response']}")
print()
# 查找成功的端点
successful_endpoints = [r for r in results if r["success"]]
if successful_endpoints:
print("=" * 80)
print("建议")
print("=" * 80)
print()
print("以下端点测试成功,建议使用:")
for r in successful_endpoints:
print(f" - {r['base_url']}")
print()
print("如果使用自定义端点,请在代码中配置:")
print("""
from coze_coding_dev_sdk import LLMClient, Config
config = Config(
base_url="正确的端点",
timeout=30
)
client = LLMClient(config=config, ctx=ctx)
""")
else:
print("=" * 80)
print("诊断")
print("=" * 80)
print()
print("所有端点测试都失败了。可能的原因:")
print("1. 认证信息缺失或无效")
print("2. 网络连接问题")
print("3. LLM 服务不可用")
print("4. 模型 ID 不正确")
print()
print("请检查:")
print("- 确保在 Coze 平台中正确配置了 LLM 服务")
print("- 检查网络连接")
print("- 联系 Coze 技术支持获取正确的 API 端点")
if __name__ == "__main__":
main()