修复 优化后导致的CHATGPT分析异常
This commit is contained in:
parent
836ba81a73
commit
94aa7572bf
|
|
@ -64,9 +64,7 @@ onBeforeMount(() => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
onMounted(() => {
|
onMounted(() => {
|
||||||
nextTick(async () => {
|
window.addEventListener("resize", appStyle);
|
||||||
// appStyle();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
onUnmounted(() => {});
|
onUnmounted(() => {});
|
||||||
// defineExpose({
|
// defineExpose({
|
||||||
|
|
|
||||||
|
|
@ -51,6 +51,8 @@ namespace VideoAnalysisCore.AICore.GPT
|
||||||
/// <para>此功能处于 Beta 阶段。 如果指定,我们的系统将尽最大努力确定性地采样,这样具有相同 and 参数的重复请求应该返回相同的结果。 无法保证确定性,您应该参考 response 参数来监控后端的变化</para>
|
/// <para>此功能处于 Beta 阶段。 如果指定,我们的系统将尽最大努力确定性地采样,这样具有相同 and 参数的重复请求应该返回相同的结果。 无法保证确定性,您应该参考 response 参数来监控后端的变化</para>
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public int? seed { get; set; } = null;
|
public int? seed { get; set; } = null;
|
||||||
|
[json]
|
||||||
|
public string? title { get; set; } = null;
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 推理模型 (deepseek-reasoner)
|
/// 推理模型 (deepseek-reasoner)
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ namespace VideoAnalysisCore.AICore.GPT.ChatGPT
|
||||||
private readonly IHttpClientFactory _httpClientFactory;
|
private readonly IHttpClientFactory _httpClientFactory;
|
||||||
private readonly RedisManager redisManager;
|
private readonly RedisManager redisManager;
|
||||||
|
|
||||||
public ChatGPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager):base(httpClientFactory, redisManager)
|
public ChatGPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager) : base(httpClientFactory, redisManager)
|
||||||
{
|
{
|
||||||
_httpClientFactory = httpClientFactory;
|
_httpClientFactory = httpClientFactory;
|
||||||
this.redisManager = redisManager;
|
this.redisManager = redisManager;
|
||||||
|
|
@ -41,15 +41,27 @@ namespace VideoAnalysisCore.AICore.GPT.ChatGPT
|
||||||
/// <param name="max_tokens">最大token <para>不设置默认最大值 16000/8000</para></param>
|
/// <param name="max_tokens">最大token <para>不设置默认最大值 16000/8000</para></param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
/// <exception cref="Exception"></exception>
|
/// <exception cref="Exception"></exception>
|
||||||
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model =null, int max_tokens = 8000)
|
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model = null, int max_tokens = 8000)
|
||||||
{
|
{
|
||||||
|
Message[] messageArr = [
|
||||||
|
new Message(postMessages,"user"),
|
||||||
|
];
|
||||||
|
messageArr = messageArr.Where(s => s != null).ToArray();
|
||||||
|
var chatReq = new ChatRequest
|
||||||
|
{
|
||||||
|
taskId = task,
|
||||||
|
model = model,
|
||||||
|
max_tokens = model == "deepseek-reasoner" ? 16000 : 8000,
|
||||||
|
stream = true,
|
||||||
|
temperature = 0.2f,
|
||||||
|
messages = messageArr
|
||||||
|
};
|
||||||
|
|
||||||
|
chatReq.modalities = null;
|
||||||
|
chatReq.max_tokens = null;
|
||||||
|
chatReq.top_p = null;
|
||||||
|
|
||||||
//chatReq.modalities = null;
|
return await base.ChatAsync<T>(chatReq);
|
||||||
//chatReq.max_tokens = null;
|
|
||||||
//chatReq.top_p = null;
|
|
||||||
|
|
||||||
return await base.ChatAsync<T>(task, postMessages, title, model ?? ChatGPTType.GPT5_mini, max_tokens);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -44,9 +44,20 @@ namespace VideoAnalysisCore.AICore.GPT.ChatGPT
|
||||||
/// <exception cref="Exception"></exception>
|
/// <exception cref="Exception"></exception>
|
||||||
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model =null, int max_tokens = 8000)
|
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model =null, int max_tokens = 8000)
|
||||||
{
|
{
|
||||||
model = model ?? ChatGPTType.Deepseek_Reasoner;
|
Message[] messageArr = [
|
||||||
max_tokens = model == ChatGPTType.Deepseek_Reasoner ? 16000 : max_tokens;
|
new Message(postMessages,"user"),
|
||||||
return await base.ChatAsync<T>(task, postMessages, title, model, max_tokens);
|
];
|
||||||
|
messageArr = messageArr.Where(s => s != null).ToArray();
|
||||||
|
var chatReq = new ChatRequest
|
||||||
|
{
|
||||||
|
taskId = task,
|
||||||
|
model = model ?? ChatGPTType.Deepseek_Reasoner,
|
||||||
|
max_tokens = model == ChatGPTType.Deepseek_Reasoner ? 16000 : max_tokens,
|
||||||
|
stream = true,
|
||||||
|
temperature = 0.2f,
|
||||||
|
messages = messageArr
|
||||||
|
};
|
||||||
|
return await base.ChatAsync<T>(chatReq);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -150,31 +150,16 @@ namespace VideoAnalysisCore.AICore.GPT
|
||||||
/// <param name="max_tokens">最大token <para>不设置默认最大值 16000/8000</para></param>
|
/// <param name="max_tokens">最大token <para>不设置默认最大值 16000/8000</para></param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
/// <exception cref="Exception"></exception>
|
/// <exception cref="Exception"></exception>
|
||||||
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model = "deepseek-reasoner", int max_tokens = -1)
|
public async Task<T> ChatAsync<T>(ChatRequest chatRep)
|
||||||
{
|
{
|
||||||
Message[] messageArr = [
|
|
||||||
new Message(postMessages,"user"),
|
|
||||||
];
|
|
||||||
messageArr = messageArr.Where(s => s != null).ToArray();
|
|
||||||
var chatRep = new ChatRequest
|
|
||||||
{
|
|
||||||
taskId = task,
|
|
||||||
model = model,
|
|
||||||
max_tokens = model == "deepseek-reasoner" ? 16000 : 8000,
|
|
||||||
stream = true,
|
|
||||||
temperature = 0.2f,
|
|
||||||
messages = messageArr
|
|
||||||
};
|
|
||||||
if (max_tokens != -1)
|
|
||||||
chatRep.max_tokens = max_tokens;
|
|
||||||
var tryCount = 10;
|
var tryCount = 10;
|
||||||
while (tryCount-- > 0)
|
while (tryCount-- > 0)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var time = title + DateTime.Now.ToString("MMddHHmmss");
|
var time = chatRep.title + DateTime.Now.ToString("MMddHHmmss");
|
||||||
var redisCached = new object[2] { chatRep, null };
|
var redisCached = new object[2] { chatRep, null };
|
||||||
redisManager.SetTaskGPTCached(task, time, chatRep);
|
redisManager.SetTaskGPTCached(chatRep.taskId, time, chatRep);
|
||||||
var chatResp = await Chat(chatRep);
|
var chatResp = await Chat(chatRep);
|
||||||
var chatResContent = chatResp?.res;
|
var chatResContent = chatResp?.res;
|
||||||
if (string.IsNullOrEmpty(chatResContent))
|
if (string.IsNullOrEmpty(chatResContent))
|
||||||
|
|
@ -182,7 +167,7 @@ namespace VideoAnalysisCore.AICore.GPT
|
||||||
if (chatResp != null)
|
if (chatResp != null)
|
||||||
{
|
{
|
||||||
redisCached[1] = new object[] { chatResp.Value.res, chatResp.Value.u, chatResp.Value.reasoning };
|
redisCached[1] = new object[] { chatResp.Value.res, chatResp.Value.u, chatResp.Value.reasoning };
|
||||||
redisManager.SetTaskGPTCached(task, time, redisCached);
|
redisManager.SetTaskGPTCached(chatRep.taskId, time, redisCached);
|
||||||
}
|
}
|
||||||
chatResContent = chatResContent?.ExtractJsonStrings()?.FirstOrDefault();
|
chatResContent = chatResContent?.ExtractJsonStrings()?.FirstOrDefault();
|
||||||
chatResContent = chatResContent?.Replace("\n", "");
|
chatResContent = chatResContent?.Replace("\n", "");
|
||||||
|
|
@ -214,10 +199,10 @@ namespace VideoAnalysisCore.AICore.GPT
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
await redisManager.AddTaskLog(task, $"=>GPT结果解析错误 重试剩余{tryCount} {ex.Message}");
|
await redisManager.AddTaskLog(chatRep.taskId, $"=>GPT结果解析错误 重试剩余{tryCount} {ex.Message}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await redisManager.AddTaskLog(task, $"=>GPT请求失败次数过多!!!");
|
await redisManager.AddTaskLog(chatRep.taskId, $"=>GPT请求失败次数过多!!!");
|
||||||
throw new Exception(DateTime.Now + "=>GPT请求失败次数过多!!!");
|
throw new Exception(DateTime.Now + "=>GPT请求失败次数过多!!!");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -215,6 +215,9 @@ namespace VideoAnalysisCore.Common
|
||||||
/// <param name="msg">内容</param>
|
/// <param name="msg">内容</param>
|
||||||
public async Task AddTaskLog(object taskId, string msg)
|
public async Task AddTaskLog(object taskId, string msg)
|
||||||
{
|
{
|
||||||
|
#if DEBUG
|
||||||
|
Console.WriteLine($"{DateTime.Now.ToString("MM-dd HH:mm:ss")} => {taskId} \r\n{msg}\r\n");
|
||||||
|
#endif
|
||||||
await Redis.RPushAsync(RedisExpandKey.TaskLog,
|
await Redis.RPushAsync(RedisExpandKey.TaskLog,
|
||||||
new TaskLog()
|
new TaskLog()
|
||||||
{
|
{
|
||||||
|
|
@ -222,17 +225,18 @@ namespace VideoAnalysisCore.Common
|
||||||
CreateTime = DateTime.Now,
|
CreateTime = DateTime.Now,
|
||||||
Message = msg
|
Message = msg
|
||||||
});
|
});
|
||||||
|
var count = 50;
|
||||||
lock (RedisExpandKey.TaskLog)
|
lock (RedisExpandKey.TaskLog)
|
||||||
{
|
{
|
||||||
var oldTaskCount = Redis.LLen(RedisExpandKey.TaskLog);
|
var oldTaskCount = Redis.LLen(RedisExpandKey.TaskLog);
|
||||||
if (oldTaskCount > 100)
|
if (oldTaskCount > count)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var insertData = Redis.LRange<TaskLog>(RedisExpandKey.TaskLog, 0, 99);
|
var insertData = Redis.LRange<TaskLog>(RedisExpandKey.TaskLog, 0, count -1);
|
||||||
taskLogDB.AsInsertable(insertData).ExecuteCommand();
|
taskLogDB.AsInsertable(insertData).ExecuteCommand();
|
||||||
//同步删除redis
|
//同步删除redis
|
||||||
Redis.LTrim(RedisExpandKey.TaskLog, 100, 1000);
|
Redis.LTrim(RedisExpandKey.TaskLog, count, 1000);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue