using VideoAnalysisCore.Common; using System.Net.Http.Headers; using System.Text; using Microsoft.Extensions.Logging; using Newtonsoft.Json.Linq; using System.Net.Http; using Newtonsoft.Json; using System.Net.Http.Json; using System.Net; using System.Text.Json; namespace VideoAnalysisCore.AICore.GPT.DeepSeek { public class DeepSeekGPTClient : GPTClient { public override GptConfig Config { get; set; } = AppCommon.Config.ChatGpt.DeepSeek; private readonly IHttpClientFactory _httpClientFactory; private readonly RedisManager redisManager; public DeepSeekGPTClient(IHttpClientFactory httpClientFactory, RedisManager redisManager) : base(httpClientFactory, redisManager) { _httpClientFactory = httpClientFactory; this.redisManager = redisManager; } /// /// 请求AI /// /// 返回JSON类型 /// 任务id /// 提示词 /// 任务类型 /// GPT版本 /// 最大token 不设置默认最大值 16000/8000 /// /// public override async Task ChatAsync(string task, string postMessages, string title, string model = ChatGPTType.Deepseek_Chat, int max_tokens = 8000) { Message[] messageArr = [ new Message(postMessages,"user"), ]; messageArr = messageArr.Where(s => s != null).ToArray(); if (max_tokens > 8000 && (model is null || model == ChatGPTType.Deepseek_Chat)) max_tokens = 8000; var chatReq = new ChatRequest { taskId = task, title = title, model = model ?? ChatGPTType.Deepseek_Chat, max_tokens = model == ChatGPTType.Deepseek_Reasoner ? 32000 : max_tokens, stream = true, messages = messageArr }; return await ChatAsync(chatReq); } } }