42 lines
1.4 KiB
C#
42 lines
1.4 KiB
C#
|
|
using Microsoft.SemanticKernel.ChatCompletion;
|
|
|
|
namespace Admin.NET.Application.Service.LLM;
|
|
|
|
[ApiDescriptionSettings(Name = "LLMTest", Description = "LLM测试,不可以切换模型")]
|
|
public class LLMTestService : IDynamicApiController, ITransient
|
|
{
|
|
private readonly Kernel _kernel;
|
|
public LLMTestService(Kernel kernel)
|
|
{
|
|
_kernel = kernel;
|
|
}
|
|
|
|
/// <summary>
|
|
/// 演示使用常规大模型的使用,只能使用配置的默认模型,不能切换模型。
|
|
/// </summary>
|
|
/// <returns></returns>
|
|
[HttpPost]
|
|
[ApiDescriptionSettings(Name = "Test", Description = "测试")]
|
|
public async Task<string> TestAsync()
|
|
{
|
|
//1. 非流式输出
|
|
var result = await _kernel.InvokePromptAsync("请介绍自己");
|
|
return result.ToString();
|
|
//2. 流式输出
|
|
// var chat = _kernel.GetRequiredService<IChatCompletionService>();
|
|
// ChatHistory chatHistory = [];
|
|
// chatHistory.AddUserMessage("请介绍自己");
|
|
// var response = chat.GetStreamingChatMessageContentsAsync(
|
|
// chatHistory: chatHistory,
|
|
// kernel: _kernel
|
|
// );
|
|
// var result = "";
|
|
// await foreach (var chunk in response)
|
|
// {
|
|
// result += chunk.Content ?? "";
|
|
// Console.WriteLine(chunk.Content);
|
|
// }
|
|
// return result;
|
|
}
|
|
} |