修复 kimi模型 max token导致执行失败问题

This commit is contained in:
小肥羊 2024-11-18 11:35:09 +08:00
parent f627891634
commit d477051c06
1 changed files with 3 additions and 3 deletions

View File

@ -86,7 +86,7 @@ namespace VideoAnalysisCore.AICore.ChatGPT.KIMI
var modelId = reqTokenCount > 32 * 1000 ? "moonshot-v1-128k" : "moonshot-v1-32k";
var chatRep = new ChatReq
{
max_tokens = reqTokenCount * 2,
max_tokens = 32*1024 - (reqTokenCount + 20),
temperature = 0.3f,
frequency_penalty = 0,
presence_penalty = 0,
@ -99,8 +99,8 @@ namespace VideoAnalysisCore.AICore.ChatGPT.KIMI
var chatResp = await moonshotClient.Chat(chatRep);
RedisExpand.SetTaskGPTCached(task, chatResp);
if (chatResp is null || chatResp.error != null)
throw new Exception($"KIMI模型返回异常 Chat 请求参数: {System.Text.Json.JsonSerializer.Serialize(chatRep)} " +
$" chatResp {System.Text.Json.JsonSerializer.Serialize(chatResp)}");
throw new Exception($"KIMI模型返回异常 Chat 返回参数: " +
$" {System.Text.Json.JsonSerializer.Serialize(chatResp)}");
var chatResContent = chatResp?.choices.FirstOrDefault()?.message.content;