how to post ChatHistory to kernel CreateFunctionFromPrompt in different way?
Context / Scenario
app.MapPost(Constants.HttpSKAskEndpoint,
async Task (
HttpContext context,
SearchQuery query,
IKernelMemory service,
ILogger log,
CancellationToken cancellationToken) =>
{
string apiKey = context.Request.Query["apiKey"];
log.LogTrace("New skAsk request");
IKernelBuilder builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "gpt-3.5-turbo",
apiKey: apiKey, null, serviceId: "chatgpt",
new HttpClient() { BaseAddress = new Uri("https://www.barcodecloud.club/proxy/api.openai.com") });
builder.Plugins.AddFromType();
//var bingConnector = new BingConnector("...key...");
//var bing = new WebSearchEnginePlugin(bingConnector);
//builder. Plugins.AddFromObject(bing);
//OpenAIClient? openAIClient = new OpenAIClient(apiKey,new OpenAIClientOptions() { Transport= }),
//builder. Services.AddOpenAIChatCompletion("gpt-3.5-turbo", apiKey);
var kernel = builder.Build();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>("chatgpt");
// Handler which is called before a function is invoked
void MyInvokingHandler(object? sender, FunctionInvokingEventArgs e)
{
Console.WriteLine($"Invoking {e.Function.Name}");
}
// Handler which is called before a prompt is rendered
void MyRenderingHandler(object? sender, PromptRenderingEventArgs e)
{
Console.WriteLine($"Rendering prompt for {e.Function.Name}");
}
// Handler which is called after a prompt is rendered
void MyRenderedHandler(object? sender, PromptRenderedEventArgs e)
{
Console.WriteLine($"Rendered prompt: {e.RenderedPrompt}");
}
// Handler which is called after a function is invoked
void MyInvokedHandler(object? sender, FunctionInvokedEventArgs e)
{
if (e.Result.Metadata is not null && e.Result.Metadata.ContainsKey("Usage"))
{
Console.WriteLine($"Token usage: {e.Result.Metadata?[" Usage"]?.AsJson()}");
}
}
// Add the handlers to the kernel
kernel.FunctionInvoking += MyInvokingHandler;
kernel.PromptRendering += MyRenderingHandler;
kernel.PromptRendered += MyRenderedHandler;
kernel.FunctionInvoked += MyInvokedHandler;
var skPrompt = """
客户提问: {{$input}}
知识库回答: {{memory.search $query}}
根据上面的信息,有感情的回答用户的提问,如果遇到你无法确认的信息,请自由调用插件功能,比如搜索引擎插件,时间插件
""";
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
var myFunction = kernel.CreateFunctionFromPrompt(skPrompt, settings);
var memoryConnector = new MemoryWebClient("http://127.0.0.1:9001/", Environment.GetEnvironmentVariable("MEMORY_API_KEY"));
var memoryPlugin = kernel.ImportPluginFromObject(new MemoryPlugin(memoryConnector, waitForIngestionToComplete: true), "memory");
TagCollectionWrapper tags = new TagCollectionWrapper();
foreach (var memoryFilter in query.Filters)
{
foreach (var filter in memoryFilter.GetFilters())
{
tags.Add(filter.Key, filter.Value);
}
}
ChatHistory chatMessages = query.Messages;
var answer = await myFunction.InvokeAsync(kernel, new KernelArguments()
{
{ "input", query.Query },
{ "index", query.Index },
{ "query", query.Query },
{ "tags", tags },
{ "minRelevance", query.MinRelevance },
{ "limit", query.Limit }
});
Console.WriteLine("Answer: " + answer);
return Results.Ok(answer.GetValue<string>());
})
.AddEndpointFilter(authFilter)
.Produces<MemoryAnswer>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status401Unauthorized)
.Produces<ProblemDetails>(StatusCodes.Status403Forbidden);
The problem
####################################### above is my code, i dont want using {{$chatMessages }} para in a Prompt ,who knows other way?
Proposed solution
thanks
Importance
None
@kill136 the comment contained something that looked like a Bing API Key (//var bingConnector = new BingConnector("....
I removed the value. You might want to invalidate the key on your service and generating a new one.
sorry but I don't see where {{$chatMessages }} is used, and looks like your SearchQuery is not the same class used by KM, probably your custom class.
I see also that you're passing IKernelMemory service into the endpoint but then that's not used, and you're calling the service via MemoryWebClient.
sorry but I don't see where
{{$chatMessages }}is used, and looks like yourSearchQueryis not the same class used by KM, probably your custom class.I see also that you're passing
IKernelMemory serviceinto the endpoint but then that's not used, and you're calling the service viaMemoryWebClient.
1.yes ,i changed SearchQuery class ,so pass the historymessages from other program 2.as your advice ,i use service instay of MemoryWebClient but my problem is how to call kernel-memory from Semantic Kernel with chat history this is my new code below,maybe is not the best way,please help me。thanks very much! ` app.MapPost(Constants.HttpSKAskEndpoint, async Task ( HttpContext context, SearchQuery query, IKernelMemory service, ILogger log, CancellationToken cancellationToken) => { string apiKey = context.Request.Query["api_key"]; log.LogTrace("New skAsk request"); IKernelBuilder builder = Kernel.CreateBuilder(); builder.AddOpenAIChatCompletion( modelId: "gpt-3.5-turbo", apiKey: apiKey, null, serviceId: "chatgpt", new HttpClient() { BaseAddress = new Uri("https://www.barcodecloud.club/proxy/api.openai.com") });
var kernel = builder.Build();
kernel.Plugins.AddFromType<TimeInformation>();
var bingConnector = new BingConnector("4a6ae95875844abaa19d5576e38bbb68");
var bing = new WebSearchEnginePlugin(bingConnector);
kernel.Plugins.AddFromObject(bing, "bing");
kernel.Plugins.AddFromType<MathPlugin>();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>( "chatgpt");
void MyInvokingHandler(object? sender, FunctionInvokingEventArgs e)
{
Console.WriteLine($"Invoking {e.Function.Name}");
}
void MyRenderingHandler(object? sender, PromptRenderingEventArgs e)
{
Console.WriteLine($"Rendering prompt for {e.Function.Name} ,result: {e.Function.Metadata}");
}
void MyRenderedHandler(object? sender, PromptRenderedEventArgs e)
{
Console.WriteLine($"Rendered prompt: {e.RenderedPrompt}");
}
void MyInvokedHandler(object? sender, FunctionInvokedEventArgs e)
{
if (e.Result.Metadata is not null)
{
Console.WriteLine($"Invoking {e.Function.Name},result:{e.Function.Metadata.AsJson()}");
}
}
kernel.FunctionInvoking += MyInvokingHandler;
kernel.PromptRendering += MyRenderingHandler;
kernel.PromptRendered += MyRenderedHandler;
kernel.FunctionInvoked += MyInvokedHandler;
SearchResult answer = await service.SearchAsync(
query: query.Query,
index: query.Index,
filters: query.Filters,
minRelevance: query.MinRelevance,
limit: query.Limit,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
string wiki = answer.ToJson();
byte[] unicodeBytes = Encoding.Unicode.GetBytes(wiki);
// 将字节数组解码为中文字符串
string chineseString = Encoding.Unicode.GetString(unicodeBytes);
Console.WriteLine($"------------------------------------\n{chineseString}");
var skPrompt =$"""
记住您是一个高情商的售前客服,请将以下内容作为已知信息:{chineseString} ,请根据以上内容回答用户的问题,请注意:第一点:如果已知信息不能回答用户的提问,请使用bing 插件联网查询,并告诉用户是网上的信息。第二点:如果客户问你日常的时间,天气,股票等需要联网的才能完成的任务,请使用bing 插件联网查询。
""";
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
TagCollectionWrapper tags = new TagCollectionWrapper();
foreach (var memoryFilter in query.Filters)
{
foreach (var filter in memoryFilter.GetFilters())
{
tags.Add(filter.Key, filter.Value);
}
}
ChatHistory chatMessages = new ChatHistory(skPrompt);
chatMessages.AddRange(query.Messages.Select(message => new ChatMessageContent
(
role:new AuthorRole( message.Role),
content : message.Content
)));
chatMessages.AddUserMessage(query.Query);
var response = await chatCompletionService.GetChatMessageContentAsync(chatMessages, settings,kernel).ConfigureAwait(false);
Console.WriteLine("Answer: " + response);
return Results.Ok(response);
})
.AddEndpointFilter(authFilter) .Produces<MemoryAnswer>(StatusCodes.Status200OK) .Produces<ProblemDetails>(StatusCodes.Status401Unauthorized) .Produces<ProblemDetails>(StatusCodes.Status403Forbidden);`