fix(llm): 将自定义接入LLM的方式去掉,那些特殊LLM的接入统一改成微软官方解决方案

This commit is contained in:
PZ688 2025-06-19 05:00:40 +08:00
parent 4cdb10b2bb
commit b681c5fe48
6 changed files with 0 additions and 251 deletions

View File

@ -156,36 +156,5 @@
}
}
]
},
"LLMCustom": {
"LLMType": "openrouter",
"ApiKey": "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",
"BaseUrl": "https://openrouter.ai/api/v1/chat/completions",
"InitSystemChatMessage": "你是一个经验丰富的AI助手请根据用户的问题给出最准确的回答,每个回答都以markdown格式输出",
"InitSystemPromptMessage": "你是一个经验丰富的AI助手请根据用户的问题给出最准确的回答",
"CanUserSwitchLLM": false,
"ModelProvider": "DeepSeek: R1",
"MaxHistory": 10,
"Timeout": 30,
"IsUserProxy": true,
"ProxyUrl": "http://127.0.0.1:10809",
"SupportLLMList": [
{
"Desciption": "OpenAI: GPT-3.5 Turbo",
"Model": "openai/gpt-3.5-turbo"
},
{
"Desciption": "Google: LearnLM 1.5 Pro",
"Model": "google/learnlm-1.5-pro-experimental:free"
},
{
"Desciption": "Meta: Llama 3.2 11B",
"Model": "meta-llama/llama-3.2-11b-vision-instruct:free"
},
{
"Desciption": "DeepSeek: R1",
"Model": "deepseek/deepseek-r1-distill-llama-70b:free"
}
]
}
}

View File

@ -1,23 +0,0 @@
using System.Net.Http.Headers;
using Admin.NET.Core.Ai.Option;
namespace Admin.NET.Core.Ai.Handlers;
public class LLMDelegatingHandler: DelegatingHandler
{
private readonly LLMCustomOptions _options;
public LLMDelegatingHandler(IOptions<LLMCustomOptions> options)
{
_options = options.Value;
}
protected override async Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
//设置header
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _options.ApiKey);
return await base.SendAsync(request, cancellationToken);
}
}

View File

@ -1,29 +0,0 @@
namespace Admin.NET.Core.Ai.Option;
/// <summary>
/// 手动实现LLM接口配置选项
/// </summary>
public class LLMCustomOptions: IConfigurableOptions
{
public string LLMType { get; set; } = "openrouter";
public string ApiKey { get; set; }
public string BaseUrl { get; set; } = "https://openrouter.ai/api/v1/chat/completions";
public string InitSystemChatMessage { get; set; } = "你是一个经验丰富的AI助手请根据用户的问题给出最准确的回答,每个回答都以markdown格式输出";
public string InitSystemPromptMessage { get; set; } = "你是一个经验丰富的AI助手请根据用户的问题给出最准确的回答";
public bool CanUserSwitchLLM { get; set; } = false;
public string ModelProvider { get; set; }
public int MaxHistory { get; set; } = 10;
public bool IsUserProxy { get; set; } = false;
public string ProxyUrl { get; set; } = "";
public int Timeout { get; set; } = 30;
public List<LLMItem> SupportLLMList { get; set; }
}
/// <summary>
/// LLM配置选项
/// </summary>
public class LLMItem
{
public string Desciption { get; set; }
public string Model { get; set; }
}

View File

@ -1,122 +0,0 @@
using Admin.NET.Core.Ai.Models;
using Admin.NET.Core.Ai.Option;
using Admin.NET.Core.Ai.Utils;
using Newtonsoft.Json;
namespace Admin.NET.Core.Ai.Services.Infrastructure;
/// <summary>
/// OpenRouter 客户端
/// </summary>
public class LLMOpenRouterClient
{
private readonly HttpClient _httpClient;
private readonly LLMCustomOptions _options;
public LLMOpenRouterClient(HttpClient httpClient, IOptions<LLMCustomOptions> options)
{
_httpClient = httpClient;
_options = options.Value;
_httpClient.Timeout = TimeSpan.FromSeconds(_options.Timeout);
}
/// <summary>
/// 获取提示词的LLM的回答
/// </summary>
/// <param name="messages">提示词</param>
/// <returns>提示词的LLM的回答</returns>
/// <exception cref="ArgumentException">消息列表不能为空</exception>
/// <exception cref="Exception">网络请求错误</exception>
/// <exception cref="JsonException">JSON解析错误</exception>
public async Task<string> GetPromptAsync(List<LLMInputMessage> messages)
{
return await GetLLMResponseAsync(messages, (messages) =>
{
if (!messages.Any(m => m.Role.Equals("system")))
{
messages.Insert(0, new LLMInputMessage()
{
Role = "system",
Content = _options.InitSystemPromptMessage
});
}
});
}
/// <summary>
/// 获取聊天记录的LLM的回答
/// </summary>
/// <param name="messages">聊天记录</param>
/// <returns>聊天记录的LLM的回答</returns>
/// <exception cref="ArgumentException">消息列表不能为空</exception>
/// <exception cref="Exception">网络请求错误</exception>
public async Task<string> GetChatAsync(List<LLMInputMessage> messages)
{
return await GetLLMResponseAsync(messages, (messages) =>
{
if (!messages.Any(m => m.Role.Equals("system")))
{
messages.Insert(0, new LLMInputMessage()
{
Role = "system",
Content = _options.InitSystemChatMessage
});
}
});
}
/// <summary>
/// 获取LLM的回答
/// </summary>
/// <param name="messages">消息列表</param>
/// <param name="beforeSendAction">在发送请求之前,可以对消息进行修改</param>
/// <returns>LLM的回答</returns>
/// <exception cref="ArgumentException">消息列表不能为空</exception>
/// <exception cref="Exception">网络请求错误</exception>
/// <exception cref="JsonException">JSON解析错误</exception>
private async Task<string> GetLLMResponseAsync(List<LLMInputMessage> messages,Action<List<LLMInputMessage>> beforeSendAction = null)
{
try
{
if (messages == null || !messages.Any())
throw new ArgumentException("Message list cannot be empty");
if (messages.Any(m => m.Content.Length > 2000))
throw new ArgumentException("Message content exceeds the maximum length limit");
var defaultLLM = _options.SupportLLMList.Find(item => item.Desciption.Equals(_options.ModelProvider));
if (defaultLLM == null)
{
throw new Exception("Default LLM not found, please check if the ModelProvider in ai.json is set incorrectly?");
}
var inputBody = new LLMInputBody();
inputBody.Model = defaultLLM.Model;
inputBody.Messages = messages;
var strBody = LLMJsonTools.SerializeObject(inputBody);
beforeSendAction?.Invoke(messages); // 在发送请求之前,可以对消息进行修改
using (var content = new StringContent(strBody, Encoding.UTF8, "application/json"))
using (var response = await _httpClient.PostAsync(_options.BaseUrl, content))
{
if (response.StatusCode == System.Net.HttpStatusCode.OK)
{
var strResponse = await response.Content.ReadAsStringAsync();
var output = LLMJsonTools.DeserializeObject<LLMOutput>(strResponse);
return output.Choices[0].Message.Content;
}
else
{
throw new Exception("Failed to get LLM response: " + $"Status code: {response.StatusCode}" + " " + $"Error message: {response.ReasonPhrase}" + " " + $"Error content: {await response.Content.ReadAsStringAsync()}");
}
}
}
catch (HttpRequestException ex)
{
throw new Exception($"Network request error: {ex.Message}");
}
catch (JsonException ex)
{
throw new Exception($"JSON parsing error: {ex.Message}");
}
catch (Exception ex)
{
throw new Exception($"Unknown error: {ex.Message}");
}
}
}

View File

@ -1,45 +0,0 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Admin.NET.Core.Ai.Utils;
/// <summary>
/// LLM JSON 工具类
/// </summary>
public class LLMJsonTools
{
/// <summary>
/// 序列化对象
/// </summary>
/// <param name="obj">对象</param>
/// <returns>JSON字符串</returns>
public static string SerializeObject(object obj)
{
var settings = new JsonSerializerSettings
{
ContractResolver = new DefaultContractResolver
{
NamingStrategy = new SnakeCaseNamingStrategy()
}
};
return JsonConvert.SerializeObject(obj, settings);
}
/// <summary>
/// 反序列化JSON字符串
/// </summary>
/// <typeparam name="T">对象类型</typeparam>
/// <param name="json">JSON字符串</param>
/// <returns>对象</returns>
public static T DeserializeObject<T>(string json)
{
var settings = new JsonSerializerSettings
{
ContractResolver = new DefaultContractResolver
{
NamingStrategy = new SnakeCaseNamingStrategy()
}
};
return JsonConvert.DeserializeObject<T>(json, settings);
}
}

View File

@ -44,7 +44,6 @@ public static class ProjectOptions
services.Configure<ClientRateLimitOptions>(App.Configuration.GetSection("ClientRateLimiting"));
services.Configure<ClientRateLimitPolicies>(App.Configuration.GetSection("ClientRateLimitPolicies"));
services.AddConfigurableOptions<LLMCustomOptions>(); //手动实现LLM接口配置选项
services.AddConfigurableOptions<LLMOptions>(); //基于Microsoft Semantic Kernel实现,也是本应用的默认实现