Kernel 内核 #
概述 #
Kernel 是 Semantic Kernel 的核心引擎,是所有操作的协调中心。它负责管理 AI 服务、插件、函数执行和依赖注入。
创建 Kernel #
方式一:Builder 模式 #
csharp
using Microsoft.SemanticKernel;
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "gpt-4",
apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")
);
var kernel = builder.Build();
方式二:依赖注入 #
csharp
using Microsoft.Extensions.DependencyInjection;
var services = new ServiceCollection();
services.AddKernel()
.AddOpenAIChatCompletion(
modelId: "gpt-4",
apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")
);
var serviceProvider = services.BuildServiceProvider();
var kernel = serviceProvider.GetRequiredService<Kernel>();
方式三:直接实例化 #
csharp
var kernel = new Kernel();
kernel.AddOpenAIChatCompletion(
modelId: "gpt-4",
apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")
);
服务配置 #
添加多个 AI 服务 #
csharp
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
serviceId: "openai-gpt4",
modelId: "gpt-4",
apiKey: "openai-api-key"
);
builder.AddOpenAIChatCompletion(
serviceId: "openai-gpt35",
modelId: "gpt-3.5-turbo",
apiKey: "openai-api-key"
);
builder.AddAzureOpenAIChatCompletion(
serviceId: "azure-gpt4",
deploymentName: "gpt-4-deployment",
endpoint: "https://your-resource.openai.azure.com/",
apiKey: "azure-api-key"
);
var kernel = builder.Build();
选择特定服务 #
csharp
var arguments = new KernelArguments();
var result = await kernel.InvokePromptAsync(
"你好",
arguments,
serviceName: "azure-gpt4"
);
配置执行设置 #
csharp
using Microsoft.SemanticKernel.Connectors.OpenAI;
var executionSettings = new OpenAIPromptExecutionSettings
{
MaxTokens = 1000,
Temperature = 0.7,
TopP = 0.9,
FrequencyPenalty = 0.5,
PresencePenalty = 0.5,
StopSequences = new[] { "###", "END" }
};
var arguments = new KernelArguments(executionSettings);
var result = await kernel.InvokePromptAsync("写一首诗", arguments);
嵌入服务配置 #
csharp
builder.AddOpenAITextEmbeddingGeneration(
modelId: "text-embedding-3-small",
apiKey: "api-key"
);
builder.AddAzureOpenAITextEmbeddingGeneration(
deploymentName: "text-embedding-ada-002",
endpoint: "https://your-resource.openai.azure.com/",
apiKey: "api-key"
);
插件管理 #
注册插件 #
csharp
// 方式 1:从类型注册
kernel.Plugins.AddFromType<MathPlugin>("Math");
// 方式 2:从对象注册
var mathPlugin = new MathPlugin();
kernel.Plugins.AddFromObject(mathPlugin, "Math");
// 方式 3:从函数集合创建
var functions = new[]
{
kernel.CreateFunctionFromMethod(MathPlugin.Add, "Add"),
kernel.CreateFunctionFromMethod(MathPlugin.Multiply, "Multiply")
};
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("Math", functions));
// 方式 4:从提示词目录导入
kernel.ImportPluginFromPromptDirectory("./Prompts", "MyPlugin");
// 方式 5:从 OpenAPI 规范导入
kernel.ImportPluginFromOpenApiAsync("MyApiPlugin", "openapi.json");
管理插件 #
csharp
// 获取所有插件
foreach (var plugin in kernel.Plugins)
{
Console.WriteLine($"Plugin: {plugin.Name}");
foreach (var function in plugin)
{
Console.WriteLine($" - {function.Name}: {function.Description}");
}
}
// 获取特定插件
var mathPlugin = kernel.Plugins["Math"];
// 获取插件中的函数
var addFunction = kernel.Plugins["Math"]["Add"];
// 检查插件是否存在
if (kernel.Plugins.TryGetPlugin("Math", out var plugin))
{
// 使用插件
}
// 移除插件
kernel.Plugins.Remove("Math");
函数执行 #
执行提示词 #
csharp
// 简单提示词
var result = await kernel.InvokePromptAsync("你好");
// 带参数的提示词
var arguments = new KernelArguments
{
["name"] = "小明",
["topic"] = "人工智能"
};
var result = await kernel.InvokePromptAsync(
"你好{{$name}},请介绍一下{{$topic}}",
arguments
);
// 带执行设置的提示词
var settings = new OpenAIPromptExecutionSettings
{
Temperature = 0.8,
MaxTokens = 500
};
var arguments = new KernelArguments(settings)
{
["topic"] = "机器学习"
};
var result = await kernel.InvokePromptAsync("介绍{{$topic}}", arguments);
执行插件函数 #
csharp
// 通过名称执行
var result = await kernel.InvokeAsync(
"Math",
"Add",
new KernelArguments { ["a"] = 5, ["b"] = 3 }
);
// 通过函数引用执行
var function = kernel.Plugins["Math"]["Add"];
var result = await kernel.InvokeAsync(
function,
new KernelArguments { ["a"] = 5, ["b"] = 3 }
);
// 链式调用
var result = await function.InvokeAsync(
kernel,
new KernelArguments { ["a"] = 5, ["b"] = 3 }
);
流式执行 #
csharp
// 流式提示词执行
await foreach (var chunk in kernel.InvokePromptStreamingAsync("写一首诗"))
{
Console.Write(chunk);
}
// 流式函数执行
var function = kernel.Plugins["Text"]["Summarize"];
await foreach (var chunk in kernel.InvokeStreamingAsync(function, arguments))
{
Console.Write(chunk);
}
依赖注入集成 #
ASP.NET Core 集成 #
csharp
// Program.cs
var builder = WebApplication.CreateBuilder(args);
builder.Services.AddKernel()
.AddAzureOpenAIChatCompletion(
deploymentName: builder.Configuration["AzureOpenAI:DeploymentName"]!,
endpoint: builder.Configuration["AzureOpenAI:Endpoint"]!,
apiKey: builder.Configuration["AzureOpenAI:ApiKey"]!
);
builder.Services.AddTransient<MyService>();
var app = builder.Build();
// MyService.cs
public class MyService
{
private readonly Kernel _kernel;
public MyService(Kernel kernel)
{
_kernel = kernel;
}
public async Task<string> ProcessAsync(string input)
{
return await _kernel.InvokePromptAsync(input);
}
}
手动依赖注入 #
csharp
var services = new ServiceCollection();
services.AddKernel()
.AddOpenAIChatCompletion("gpt-4", "api-key");
services.AddSingleton<IMyService, MyService>();
var serviceProvider = services.BuildServiceProvider();
var kernel = serviceProvider.GetRequiredService<Kernel>();
高级配置 #
自定义 HttpClient #
csharp
var handler = new HttpClientHandler
{
Proxy = new WebProxy("http://proxy:8080"),
UseProxy = true
};
var httpClient = new HttpClient(handler);
builder.AddOpenAIChatCompletion(
modelId: "gpt-4",
apiKey: "api-key",
httpClient: httpClient
);
配置重试策略 #
csharp
using Microsoft.Extensions.Http.Resilience;
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "gpt-4",
apiKey: "api-key",
httpClient: new HttpClient(
new ResilienceHandler(
new HttpRetryStrategyOptions
{
MaxRetryAttempts = 3,
BackoffType = DelayBackoffType.Exponential
}
)
{
InnerHandler = new HttpClientHandler()
}
)
);
配置超时 #
csharp
var httpClient = new HttpClient
{
Timeout = TimeSpan.FromMinutes(5)
};
builder.AddOpenAIChatCompletion(
modelId: "gpt-4",
apiKey: "api-key",
httpClient: httpClient
);
KernelArguments 详解 #
基本用法 #
csharp
var arguments = new KernelArguments();
arguments["name"] = "小明";
arguments["age"] = 25;
arguments["items"] = new[] { "苹果", "香蕉", "橙子" };
var result = await kernel.InvokePromptAsync(
"你好{{$name}},你今年{{$age}}岁,你喜欢:{{$items}}",
arguments
);
结合执行设置 #
csharp
var settings = new OpenAIPromptExecutionSettings
{
Temperature = 0.7,
MaxTokens = 500
};
var arguments = new KernelArguments(settings)
{
["topic"] = "人工智能"
};
动态更新参数 #
csharp
var arguments = new KernelArguments
{
["name"] = "小明"
};
// 更新参数
arguments["name"] = "小红";
// 添加新参数
arguments["language"] = "中文";
// 移除参数
arguments.Remove("name");
事件与钩子 #
函数调用事件 #
csharp
kernel.FunctionInvoking += (sender, e) =>
{
Console.WriteLine($"即将调用: {e.Function.Name}");
};
kernel.FunctionInvoked += (sender, e) =>
{
Console.WriteLine($"调用完成: {e.Function.Name}");
Console.WriteLine($"结果: {e.Result}");
};
使用过滤器 #
csharp
public class LoggingFilter : IFunctionFilter
{
public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func<FunctionInvocationContext, Task> next)
{
Console.WriteLine($"[Before] {context.Function.Name}");
await next(context);
Console.WriteLine($"[After] {context.Function.Name}: {context.Result}");
}
}
kernel.FunctionFilters.Add(new LoggingFilter());
最佳实践 #
1. 使用依赖注入 #
csharp
// 推荐:使用 DI
services.AddKernel()
.AddAzureOpenAIChatCompletion(...);
// 不推荐:硬编码
var kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion("gpt-4", "hardcoded-key")
.Build();
2. 配置外部化 #
csharp
// appsettings.json
{
"OpenAI": {
"ModelId": "gpt-4",
"ApiKey": ""
}
}
// 代码
builder.AddOpenAIChatCompletion(
modelId: configuration["OpenAI:ModelId"]!,
apiKey: configuration["OpenAI:ApiKey"]!
);
3. 合理设置超时 #
csharp
var httpClient = new HttpClient
{
Timeout = TimeSpan.FromSeconds(30) // 根据实际情况设置
};
4. 错误处理 #
csharp
try
{
var result = await kernel.InvokePromptAsync("问题");
}
catch (HttpOperationException ex)
{
// HTTP 错误
}
catch (KernelException ex)
{
// Kernel 错误
}
catch (Exception ex)
{
// 其他错误
}
下一步 #
现在你已经掌握了 Kernel 的核心用法,接下来学习 插件系统,了解如何开发和使用插件!
最后更新:2026-04-04