using Admin.NET.Core.Ai.Models; using Admin.NET.Core.Ai.Option; using Admin.NET.Core.Ai.Utils; using Newtonsoft.Json; namespace Admin.NET.Core.Ai.Services.Infrastructure; /// /// OpenRouter 客户端 /// public class LLMOpenRouterClient { private readonly HttpClient _httpClient; private readonly LLMCustomOptions _options; public LLMOpenRouterClient(HttpClient httpClient, IOptions options) { _httpClient = httpClient; _options = options.Value; _httpClient.Timeout = TimeSpan.FromSeconds(_options.Timeout); } /// /// 获取提示词的LLM的回答 /// /// 提示词 /// 提示词的LLM的回答 /// 消息列表不能为空 /// 网络请求错误 /// JSON解析错误 public async Task GetPromptAsync(List messages) { return await GetLLMResponseAsync(messages, (messages) => { if (!messages.Any(m => m.Role.Equals("system"))) { messages.Insert(0, new LLMInputMessage() { Role = "system", Content = _options.InitSystemPromptMessage }); } }); } /// /// 获取聊天记录的LLM的回答 /// /// 聊天记录 /// 聊天记录的LLM的回答 /// 消息列表不能为空 /// 网络请求错误 public async Task GetChatAsync(List messages) { return await GetLLMResponseAsync(messages, (messages) => { if (!messages.Any(m => m.Role.Equals("system"))) { messages.Insert(0, new LLMInputMessage() { Role = "system", Content = _options.InitSystemChatMessage }); } }); } /// /// 获取LLM的回答 /// /// 消息列表 /// 在发送请求之前,可以对消息进行修改 /// LLM的回答 /// 消息列表不能为空 /// 网络请求错误 /// JSON解析错误 private async Task GetLLMResponseAsync(List messages,Action> beforeSendAction = null) { try { if (messages == null || !messages.Any()) throw new ArgumentException("Message list cannot be empty"); if (messages.Any(m => m.Content.Length > 2000)) throw new ArgumentException("Message content exceeds the maximum length limit"); var defaultLLM = _options.SupportLLMList.Find(item => item.Desciption.Equals(_options.ModelProvider)); if (defaultLLM == null) { throw new Exception("Default LLM not found, please check if the ModelProvider in ai.json is set incorrectly?"); } var inputBody = new LLMInputBody(); inputBody.Model = defaultLLM.Model; inputBody.Messages = messages; var strBody = LLMJsonTools.SerializeObject(inputBody); beforeSendAction?.Invoke(messages); // 在发送请求之前,可以对消息进行修改 using (var content = new StringContent(strBody, Encoding.UTF8, "application/json")) using (var response = await _httpClient.PostAsync(_options.BaseUrl, content)) { if (response.StatusCode == System.Net.HttpStatusCode.OK) { var strResponse = await response.Content.ReadAsStringAsync(); var output = LLMJsonTools.DeserializeObject(strResponse); return output.Choices[0].Message.Content; } else { throw new Exception("Failed to get LLM response: " + $"Status code: {response.StatusCode}" + " " + $"Error message: {response.ReasonPhrase}" + " " + $"Error content: {await response.Content.ReadAsStringAsync()}"); } } } catch (HttpRequestException ex) { throw new Exception($"Network request error: {ex.Message}"); } catch (JsonException ex) { throw new Exception($"JSON parsing error: {ex.Message}"); } catch (Exception ex) { throw new Exception($"Unknown error: {ex.Message}"); } } }