优化 gpt的注入方式

This commit is contained in:
小肥羊 2025-11-07 11:02:37 +08:00
parent d437ff0a46
commit 836ba81a73
12 changed files with 187 additions and 342 deletions

View File

@ -86,12 +86,7 @@ namespace Learn.VideoAnalysis
builder.Services.AddHttpClient();
builder.Services.AddHttpContextAccessor();
builder.Services.AddSingleton<ChatGPTClient>();
builder.Services.AddSingleton<DeepSeekGPTClient>();
//builder.Services.AddSingleton<IBserGPT, KIMI_GPT>();
//builder.Services.AddSingleton<IBserGPT, Chat_GPT>();
builder.Services.AddSingleton<IBserGPT, DeepSeek_GPT>();
builder.Services.AddGPTService();
builder.Services.AddTaskSubscribe();
var app = builder.Build();

View File

@ -12,6 +12,15 @@ export default {
rank: 0
},
children: [
{
path: "/welcome/runningTask",
name: "runningTask",
component: () => import("@/views/welcome/runningTask.vue"),
meta: {
title: "进行中任务",
showLink: true
}
},
{
path: "/welcome",
name: "Welcome",
@ -30,18 +39,9 @@ export default {
showLink: false
}
},
{
path: "/welcome/runningTask",
name: "runningTask",
component: () => import("@/views/welcome/runningTask.vue"),
meta: {
title: "进行中任务",
showLink: true
}
},
{
path: "/welcome/errorTask",
name: "runningTask",
name: "errorTask",
component: () => import("@/views/welcome/errorTask.vue"),
meta: {
title: "错误任务",

View File

@ -44,11 +44,13 @@
"ChatGpt": {
//"Host": "https://api.g4f.icu/",
"Host": "https://api.oaibest.com/",
"ApiKey": "sk-D15tBln31N7dI9Fi7lds7OySFv5tOEK7DMNsG5rY2E6DCr4s"
"ApiKey": "sk-D15tBln31N7dI9Fi7lds7OySFv5tOEK7DMNsG5rY2E6DCr4s",
"Path": "v1/chat/completions"
},
"DeepSeek": {
"Host": "https://api.deepseek.com/chat/completions",
"ApiKey": "sk-88d3d2bc3dae4d50854b2569b281cf76"
"Host": "https://api.deepseek.com/",
"ApiKey": "sk-88d3d2bc3dae4d50854b2569b281cf76",
"Path": "chat/completions"
},
"aliyun": {
"Host": "https://dashscope.aliyuncs.com/compatible-mode/",

View File

@ -7,6 +7,9 @@ using VideoAnalysisCore.AICore.GPT.Dto;
using VideoAnalysisCore.AICore.SherpaOnnx;
using VideoAnalysisCore.Common;
using Whisper.net;
using Microsoft.Extensions.DependencyInjection;
using VideoAnalysisCore.AICore.GPT.ChatGPT;
using VideoAnalysisCore.AICore.GPT.DeepSeek;
namespace VideoAnalysisCore.AICore.GPT
{
@ -173,47 +176,21 @@ namespace VideoAnalysisCore.AICore.GPT
public string reasoning_content { get; set; }
public string refusal { get; set; }
}
public static class GPTHttpClientExp
public static class GPTExpand
{
public static async Task<HttpResponseMessage> PostJsonStreamAsync(
this IHttpClientFactory _httpClientFactory,
string path, string json,string apiKey, bool readAll = false)
/// <summary>
/// 注册GPT服务
/// </summary>
/// <param name="services"></param>
public static void AddGPTService(this IServiceCollection services)
{
var uriBuilder = new UriBuilder(path);
var maxRestart = 4;
var errorMSG = new Exception[maxRestart];
for (int i = 0; i < maxRestart; i++)
{
try
{
var client = _httpClientFactory.CreateClient();
client.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Bearer", apiKey);
client.Timeout = TimeSpan.FromSeconds(60 * 20);//超时时间20分钟
client.DefaultRequestVersion = HttpVersion.Version20;
client.DefaultVersionPolicy = HttpVersionPolicy.RequestVersionOrLower;
client.DefaultRequestHeaders.ConnectionClose = true;
services.AddSingleton<DeepSeekGPTClient>();
services.AddSingleton<ChatGPTClient>();
services.AddSingleton<IBserGPT, DeepSeek_GPT>();
var request = new HttpRequestMessage(HttpMethod.Post, uriBuilder.Uri);
request.Content = new StringContent(json, Encoding.UTF8, "application/json");
if (readAll)
return await client.SendAsync(request);
return await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
}
catch (Exception e)
{
errorMSG[i] = e;
Console.WriteLine("====================[请求异常,重试]====================");
Console.WriteLine(uriBuilder.Uri);
Console.WriteLine(e.Message);
Console.WriteLine(e.StackTrace);
Console.WriteLine("==============================================");
Thread.Sleep(1000);
}
}
throw errorMSG.Last(s => s != null);
}
}
}

View File

@ -15,94 +15,21 @@ using System.Text.Json;
namespace VideoAnalysisCore.AICore.GPT.ChatGPT
{
public class ChatGPTClient
public class ChatGPTClient : GPTClient
{
public static string Host = AppCommon.Config.ChatGpt.ChatGpt.Host;
public static string ApiKey = AppCommon.Config.ChatGpt.ChatGpt.ApiKey;
public override GptConfig Config { get; set; } = AppCommon.Config.ChatGpt.ChatGpt;
private readonly IHttpClientFactory _httpClientFactory;
private readonly RedisManager redisManager;
public ChatGPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager)
public ChatGPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager):base(httpClientFactory, redisManager)
{
_httpClientFactory = httpClientFactory;
this.redisManager = redisManager;
}
/// <summary>
/// ChatSSE[流式传输 更稳定]
/// </summary>
/// <param name="chatReq"></param>
/// <returns>Return HttpResponseMessage for SSE</returns>
public async Task<(Usage u, string res)?> ChatSSE(ChatRequest chatReq)
{
var requestBody = chatReq.ToJson();
PostJsonStream:
var chatResp = await _httpClientFactory.PostJsonStreamAsync(Host+"v1/chat/completions", requestBody, ApiKey);
if (!chatResp.IsSuccessStatusCode)
{
await redisManager.AddTaskLog(chatReq.taskId, "请求GPT服务器异常 " + chatResp?.StatusCode + await chatResp.Content.ReadAsStringAsync());
goto PostJsonStream;
}
using var stream = chatResp.Content.ReadAsStream();
using var reader = new StreamReader(stream, Encoding.UTF8);
string line;
var messageBuilder = new StringBuilder();
var messageBuilder1 = new StringBuilder();
var lastChat = new ChatResSSE();
var splitCount = "data:".Length;
var maxLoop = 60 * 100000;
int threshold = 0;
while (maxLoop > 0)
{
line = reader.ReadLine();
if (line is null || string.IsNullOrEmpty(line) || line.StartsWith(": keep-alive"))
{
Thread.Sleep(10);
maxLoop--;
continue;
}
else if (line.EndsWith("[DONE]"))
{
// 表示一条消息结束
string message = messageBuilder.ToString();
string message2 = messageBuilder1.ToString();
messageBuilder.Clear();
messageBuilder1.Clear();
var u = lastChat?.usage;
if (u == null || string.IsNullOrEmpty(message))
return null;
return (u, message);
//return (u, message, message2);
}
else if (line.StartsWith("data:"))
{
try
{
var data = System.Text.Json.JsonSerializer.Deserialize<ChatResSSE>(line.Substring(splitCount).Trim());
lastChat = data;
var delta = data?.choices.FirstOrDefault()?.delta;
var str = delta?.content;
var strReasoning = delta?.reasoning_content;
if (!string.IsNullOrEmpty(str))
messageBuilder.Append(str);
if (!string.IsNullOrEmpty(strReasoning))
messageBuilder1.Append(strReasoning);
var steamCount = messageBuilder.Length + messageBuilder1.Length;
if (++threshold % 30 == 0)
redisManager.SetTaskProgress(chatReq.taskId, "steam=>" + steamCount);
}
catch (Exception e)
{
await redisManager.AddTaskLog(chatReq.taskId, "异常 ChatSSE=>" + line + "\r\n" + e.Message + "\r\n" + e.StackTrace);
}
}
}
await redisManager.AddTaskLog(chatReq.taskId, DateTime.Now + "=>AI请求超时 " + chatReq.taskId);
return null;
}
/// <summary>
/// 请求AI
/// </summary>
@ -114,101 +41,16 @@ namespace VideoAnalysisCore.AICore.GPT.ChatGPT
/// <param name="max_tokens">最大token <para>不设置默认最大值 16000/8000</para></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model =null, int max_tokens = -1)
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model =null, int max_tokens = 8000)
{
Message[] messageArr = [
new Message(postMessages,"user"),
];
messageArr = messageArr.Where(s => s != null).ToArray();
var chatRep = new ChatRequest
{
taskId = task,
model = model ?? ChatGPTType.GPT5_mini,
max_tokens = 8000,
stream = true,
temperature = 0.2f,
messages = messageArr
};
if (max_tokens != -1)
chatRep.max_tokens = max_tokens;
var tryCount = 10;
while (tryCount-- > 0)
{
try
{
var time = title + DateTime.Now.ToString("MMddHHmmss");
var redisCached = new object[2] { chatRep, null };
redisManager.SetTaskGPTCached(task, time, chatRep);
var chatResp = await Chat(chatRep);
var chatResContent = chatResp?.res;
if (string.IsNullOrEmpty(chatResContent))
throw new Exception("GPT返回message无效结果");
if (chatResp != null)
{
redisCached[1] = new object[] { chatResp.Value.res, chatResp.Value.u, chatResp.Value };
redisManager.SetTaskGPTCached(task, time, redisCached);
}
chatResContent = chatResContent?.ExtractJsonStrings()?.FirstOrDefault();
chatResContent = chatResContent?.Replace("\n", "");
chatResContent = chatResContent?.Replace("```json", "");
chatResContent = chatResContent?.Replace("```", "");
chatResContent = chatResContent?.Replace("}{", "},{");
chatResContent = chatResContent?.Replace("}|{", "},{");
chatResContent = chatResContent?.Trim();
if (string.IsNullOrEmpty(chatResContent))
throw new Exception("ChatGPT返回结果无有效JSON");
var startsStr = typeof(T).IsArray ? "[" : "{";
var endStr = typeof(T).IsArray ? "]" : "}";
if (!chatResContent.StartsWith(startsStr))
chatResContent = startsStr + chatResContent;
if (!chatResContent.EndsWith(endStr))
chatResContent = chatResContent + endStr;
var options = new JsonSerializerOptions
{
// 允许解析不严格符合 JSON 规范的字符串
AllowTrailingCommas = true,
// 处理不匹配的 JSON 字符
ReadCommentHandling = JsonCommentHandling.Skip
};
var questionRes = System.Text.Json.JsonSerializer.Deserialize<T>(chatResContent, options);
if (questionRes is null)
throw new Exception("ChatGPT返回无效结果");
return questionRes;
}
catch (Exception ex)
{
await redisManager.AddTaskLog(task, $"ChatGPT结果解析错误 重试剩余{tryCount}" + ex.Message);
}
}
throw new Exception(DateTime.Now + "=>ChatGPT请求失败次数过多!!!");
//chatReq.modalities = null;
//chatReq.max_tokens = null;
//chatReq.top_p = null;
return await base.ChatAsync<T>(task, postMessages, title, model ?? ChatGPTType.GPT5_mini, max_tokens);
}
/// <summary>
/// Chat
/// </summary>
/// <param name="chatReq"></param>
/// <returns>Return HttpResponseMessage for SSE</returns>
public async Task<(Usage u, string res)?> Chat(ChatRequest chatReq)
{
chatReq.modalities =null;
chatReq.max_tokens = null;
chatReq.top_p = null;
if (chatReq.stream) return await ChatSSE(chatReq);
var requestBody = chatReq.ToJson();
var chatResp = await _httpClientFactory.PostJsonStreamAsync(Host+"v1/chat/completions", requestBody,ApiKey,true);
var res = await chatResp.Content.ReadFromJsonAsync<ChatRes>();
var chatResContent = res?.choices.FirstOrDefault()?.message.content.Trim();
if (res is null || res.error != null)
throw new Exception($" ChatGPT模型返回异常 返回参数: " +
$" {res?.ToJson()}");
if (string.IsNullOrEmpty(chatResContent))
return null;
return (res.usage, chatResContent);
}
}
}

View File

@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace VideoAnalysisCore.AICore.GPT.ChatGPT
{
}

View File

@ -1,31 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace VideoAnalysisCore.AICore.GPT.ChatGPT
{
public class ChatGPTType
{
public static string GPT5_mini = "gpt-5-mini-2025-08-07";
public static string GPT5 = "gpt-5-2025-08-07";
public static string GPT5_nano = "gpt-5-nano-2025-08-07";
/// <summary>
/// o1 系列模型通过强化学习进行训练以执行复杂的推理。o1 模型在回答之前会思考,在回应用户之前会产生一个漫长的内部思维链。在我们的推理指南中了解 o1 模型的功能。
///<para>目前有两种型号可供选择:</para>
///<para>O1旨在解决跨领域的难题的推理模型</para>
///<para>O1-Mini用于专业任务的快速且经济实惠的推理模型</para>
/// </summary>
public static string GPTo1 = "o1";
/// <summary>
/// o1 系列模型通过强化学习进行训练以执行复杂的推理。o1 模型在回答之前会思考,在回应用户之前会产生一个漫长的内部思维链。在我们的推理指南中了解 o1 模型的功能。
///<para>目前有两种型号可供选择:</para>
///<para>O1旨在解决跨领域的难题的推理模型</para>
///<para>O1-Mini用于专业任务的快速且经济实惠的推理模型</para>
/// </summary>
public static string GPTo1Mini = "o1-mini";
}
}

View File

@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace VideoAnalysisCore.AICore.GPT
{
public class ChatGPTType
{
public static string GPT5_mini = "gpt-5-mini-2025-08-07";
public static string GPT5 = "gpt-5-2025-08-07";
public static string GPT5_nano = "gpt-5-nano-2025-08-07";
public static string Deepseek_Reasoner = "deepseek-reasoner";
public static string Deepseek_Chat = "deepseek-chat";
}
}

View File

@ -0,0 +1,53 @@
using VideoAnalysisCore.Common;
using System.Net.Http.Headers;
using System.Text;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json.Linq;
using System.Net.Http;
using Newtonsoft.Json;
using System.Net.Http.Json;
using System.Net;
using VideoAnalysisCore.AICore.GPT.DeepSeek;
using VideoAnalysisCore.AICore.GPT;
using System.Text.Json;
namespace VideoAnalysisCore.AICore.GPT.ChatGPT
{
public class DeepSeekGPTClient : GPTClient
{
public override GptConfig Config { get; set; } = AppCommon.Config.ChatGpt.DeepSeek;
private readonly IHttpClientFactory _httpClientFactory;
private readonly RedisManager redisManager;
public DeepSeekGPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager)
:base(httpClientFactory, redisManager)
{
_httpClientFactory = httpClientFactory;
this.redisManager = redisManager;
}
/// <summary>
/// 请求AI
/// </summary>
/// <typeparam name="T">返回JSON类型</typeparam>
/// <param name="task">任务id</param>
/// <param name="postMessages">提示词</param>
/// <param name="title">任务类型</param>
/// <param name="model">GPT版本</param>
/// <param name="max_tokens">最大token <para>不设置默认最大值 16000/8000</para></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public async Task<T> ChatAsync<T>(string task, string postMessages, string title, string model =null, int max_tokens = 8000)
{
model = model ?? ChatGPTType.Deepseek_Reasoner;
max_tokens = model == ChatGPTType.Deepseek_Reasoner ? 16000 : max_tokens;
return await base.ChatAsync<T>(task, postMessages, title, model, max_tokens);
}
}
}

View File

@ -1,11 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace VideoAnalysisCore.AICore.GPT.DeepSeek
{
}

View File

@ -11,74 +11,39 @@ using System.Threading;
using System;
using System.IO;
using VideoAnalysisCore.AICore.GPT.ChatGPT;
using VideoAnalysisCore.AICore.GPT;
using System.Threading.Tasks;
using System.Text.Json;
namespace VideoAnalysisCore.AICore.GPT.DeepSeek
namespace VideoAnalysisCore.AICore.GPT
{
public class DeepSeekGPTClient
public abstract class GPTClient
{
//private readonly string Path = "v1/chat/completions";
//public static string Host = AppCommon.Config.ChatGpt.aliyun.Host;
//public static string ApiKey = AppCommon.Config.ChatGpt.aliyun.ApiKey;
private readonly string Path = "";
public static string Host = AppCommon.Config.ChatGpt.DeepSeek.Host;
public static string ApiKey = AppCommon.Config.ChatGpt.DeepSeek.ApiKey;
//public static string Host = AppCommon.Config.ChatGpt.ChatGpt.Host;
//public static string ApiKey = AppCommon.Config.ChatGpt.ChatGpt.ApiKey;
//private readonly string Path = "v1/chat/completions";
public virtual GptConfig Config { get; set; }
private readonly IHttpClientFactory _httpClientFactory;
private readonly RedisManager redisManager;
public DeepSeekGPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager)
public GPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager)
{
_httpClientFactory = httpClientFactory;
this.redisManager = redisManager;
}
/// <summary>
/// Chat
/// </summary>
/// <param name="chatReq"></param>
/// <returns>Return HttpResponseMessage for SSE</returns>
public async Task<(Usage u, string res,string reasoning)?> Chat(ChatRequest chatReq)
public async Task<(Usage u, string res, string reasoning)?> Chat(ChatRequest chatReq)
{
//chatReq.model = "deepseek-r1";
if (chatReq.stream) return await ChatSSE(chatReq);
if (chatReq.stream) return await ChatSSE(chatReq);
throw new NotImplementedException();
postStar:
var requestBody = chatReq.ToJson();
HttpResponseMessage chatResp = await _httpClientFactory.PostJsonStreamAsync(Host+Path, requestBody, ApiKey,true);
var res1 = await chatResp.Content.ReadAsStringAsync();
if (res1 == null || string.IsNullOrEmpty(res1)|| !chatResp.IsSuccessStatusCode)
{
await redisManager.AddTaskLog(chatReq.taskId,$"=>GPT请求失败重试 Code = {chatResp.StatusCode} Res={res1}");
goto postStar;
}
await redisManager.AddTaskLog(chatReq.taskId, $"=>GPT请求头获取成功 Code = {chatResp.StatusCode} Res={res1}");
var res = await chatResp.Content.ReadFromJsonAsync<ChatRes>();
if (res is null || res.error != null)
throw new Exception($" GPT模型返回异常 返回参数: " +
$" {res.ToJson()}");
var d = thinkMSG(res?.choices.FirstOrDefault()?.message);
var chatResContent = d.m1;
var chatResReasoning = d.m2;
if (string.IsNullOrEmpty(chatResContent))
return null;
return (res.usage, chatResContent, chatResReasoning);
}
private (string m1, string m2) thinkMSG(Message? m)
private (string m1, string m2) thinkMSG(Message? m)
{
var chatResContent = m?.content.Trim();
var chatResReasoning = string.Empty;
@ -94,9 +59,6 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
}
/// <summary>
/// ChatSSE[流式传输 更稳定]
/// </summary>
@ -105,12 +67,17 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
public async Task<(Usage u, string res, string reasoning)?> ChatSSE(ChatRequest chatReq)
{
var requestBody = chatReq.ToJson();
PostJsonStream:
var chatResp = await _httpClientFactory.PostJsonStreamAsync(Host, requestBody,ApiKey);
if (!chatResp.IsSuccessStatusCode)
var i = 5;
PostJsonStream:
var chatResp = await PostJsonStreamAsync(Config.Host + Config.Path, requestBody, Config.ApiKey);
if (!chatResp.IsSuccessStatusCode)
{
await redisManager.AddTaskLog(chatReq.taskId,"=>请求GPT服务器异常 " + chatResp?.StatusCode +" "+ await chatResp.Content.ReadAsStringAsync());
goto PostJsonStream;
await redisManager.AddTaskLog(chatReq.taskId, "=>请求GPT服务器异常 " + chatResp?.StatusCode + " " + await chatResp.Content.ReadAsStringAsync());
if (--i < 0)
{
throw new Exception("请求GPT服务器失败次数过多");
}
goto PostJsonStream;
}
using var stream = chatResp.Content.ReadAsStream();
using var reader = new StreamReader(stream, Encoding.UTF8);
@ -119,15 +86,16 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
var messageBuilder1 = new StringBuilder();
var lastChat = new ChatResSSE();
var splitCount = "data:".Length;
var maxLoop = 60*100000;
var maxLoop = 60 * 100000;
int threshold = 0;
while (maxLoop>0)
while (maxLoop > 0)
{
line = reader.ReadLine();
if (line is null || string.IsNullOrEmpty(line)|| line.StartsWith(": keep-alive")) {
if (line is null || string.IsNullOrEmpty(line) || line.StartsWith(": keep-alive"))
{
Thread.Sleep(10);
maxLoop--;
continue;
continue;
}
else if (line.EndsWith("[DONE]"))
{
@ -136,7 +104,7 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
string message2 = messageBuilder1.ToString();
messageBuilder.Clear();
messageBuilder1.Clear();
var d =thinkMSG(new Message() { content = message, reasoning_content = message2 });
var d = thinkMSG(new Message() { content = message, reasoning_content = message2 });
message = d.m1;
message2 = d.m2;
var u = lastChat?.usage;
@ -158,8 +126,8 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
if (!string.IsNullOrEmpty(strReasoning))
messageBuilder1.Append(strReasoning);
var steamCount = messageBuilder.Length + messageBuilder1.Length;
if (++threshold%30==0)
redisManager.SetTaskProgress(chatReq.taskId, "steam=>"+ steamCount);
if (++threshold % 30 == 0)
redisManager.SetTaskProgress(chatReq.taskId, "steam=>" + steamCount);
}
catch (Exception e)
{
@ -171,7 +139,6 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
return null;
}
/// <summary>
/// 请求AI
/// </summary>
@ -226,7 +193,7 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
chatResContent = chatResContent?.Trim();
if (string.IsNullOrEmpty(chatResContent))
throw new Exception("ChatGPT返回结果无有效JSON");
throw new Exception("GPT返回结果无有效JSON");
var startsStr = typeof(T).IsArray ? "[" : "{";
var endStr = typeof(T).IsArray ? "]" : "}";
if (!chatResContent.StartsWith(startsStr))
@ -242,17 +209,54 @@ namespace VideoAnalysisCore.AICore.GPT.DeepSeek
};
var questionRes = System.Text.Json.JsonSerializer.Deserialize<T>(chatResContent, options);
if (questionRes is null)
throw new Exception("ChatGPT返回无效结果");
throw new Exception("GPT返回无效结果");
return questionRes;
}
catch (Exception ex)
{
await redisManager.AddTaskLog(task, $"=>ChatGPT结果解析错误 重试剩余{tryCount} {ex.Message}");
await redisManager.AddTaskLog(task, $"=>GPT结果解析错误 重试剩余{tryCount} {ex.Message}");
}
}
await redisManager.AddTaskLog(task, $"=>ChatGPT请求失败次数过多!!!");
throw new Exception(DateTime.Now + "=>ChatGPT请求失败次数过多!!!");
await redisManager.AddTaskLog(task, $"=>GPT请求失败次数过多!!!");
throw new Exception(DateTime.Now + "=>GPT请求失败次数过多!!!");
}
public async Task<HttpResponseMessage> PostJsonStreamAsync(
string path, string json, string apiKey, bool readAll = false)
{
var uriBuilder = new UriBuilder(path);
var maxRestart = 4;
var errorMSG = new Exception[maxRestart];
for (int i = 0; i < maxRestart; i++)
{
try
{
var client = _httpClientFactory.CreateClient();
client.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Bearer", apiKey);
client.Timeout = TimeSpan.FromSeconds(60 * 20);//超时时间20分钟
client.DefaultRequestVersion = HttpVersion.Version20;
client.DefaultVersionPolicy = HttpVersionPolicy.RequestVersionOrLower;
client.DefaultRequestHeaders.ConnectionClose = true;
var request = new HttpRequestMessage(HttpMethod.Post, uriBuilder.Uri);
request.Content = new StringContent(json, Encoding.UTF8, "application/json");
if (readAll)
return await client.SendAsync(request);
return await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
}
catch (Exception e)
{
errorMSG[i] = e;
Console.WriteLine("====================[请求异常,重试]====================");
Console.WriteLine(uriBuilder.Uri);
Console.WriteLine(e.Message);
Console.WriteLine(e.StackTrace);
Console.WriteLine("==============================================");
Thread.Sleep(1000);
}
}
throw errorMSG.Last(s => s != null);
}
}
}

View File

@ -133,6 +133,10 @@ namespace VideoAnalysisCore.Common
/// api的密钥
/// </summary>
public string ApiKey { get; set; } = string.Empty;
/// <summary>
/// API的路径
/// </summary>
public string Path { get; set; } = string.Empty;
}
/// <summary>
/// 文本模型 配置