Skip to content

Commit

Permalink
Bump Azure.AI.OpenAI to 1.0.0-beta.9
Browse files Browse the repository at this point in the history
  • Loading branch information
Oceania2018 committed Nov 13, 2023
1 parent 8378af8 commit 57870ae
Show file tree
Hide file tree
Showing 8 changed files with 27 additions and 37 deletions.
2 changes: 1 addition & 1 deletion Directory.Build.props
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
<PropertyGroup>
<LangVersion>10.0</LangVersion>
<OutputPath>..\..\..\packages</OutputPath>
<BotSharpVersion>0.18.0</BotSharpVersion>
<BotSharpVersion>0.19.0</BotSharpVersion>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
</PropertyGroup>
</Project>
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# The Open Source LLM Application Framework
## Connect LLMs to your existing application
# The Open Source AI Agent Application Framework
## Connect LLMs to your existing application focused on your business

[![Discord](https://img.shields.io/discord/1106946823282761851?label=Discord)](https://discord.gg/qRVm82fKTS)
[![QQ群聊](https://img.shields.io/static/v1?label=QQ&message=群聊&color=brightgreen)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=sN9VVMwbWjs5L0ATpizKKxOcZdEPMrp8&authKey=RLDw41bLTrEyEgZZi%2FzT4pYk%2BwmEFgFcrhs8ZbkiVY7a4JFckzJefaYNW6Lk4yPX&noverify=0&group_code=985366726)
Expand Down Expand Up @@ -52,6 +52,7 @@ BotSharp uses component design, the kernel is kept to a minimum, and business fu
- BotSharp.Plugin.MetaAI
- BotSharp.Plugin.HuggingFace
- BotSharp.Plugin.LLamaSharp
- BotSharp.Plugin.SemanticKernel

#### Messaging / Channel
- BotSharp.OpenAPI
Expand Down
4 changes: 2 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@
# built documents.
#
# The short X.Y version.
version = '0.18'
version = '0.19'
# The full version, including alpha/beta/rc tags.
release = '0.18.0'
release = '0.19.0'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
Expand All @@ -10,7 +10,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.8" />
<PackageReference Include="Azure.AI.OpenAI" Version="1.0.0-beta.9" />
</ItemGroup>

<ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con

var client = ProviderHelper.GetClient(_model, _settings);
var (prompt, chatCompletionsOptions) = PrepareOptions(agent, conversations);

var response = client.GetChatCompletions(_model, chatCompletionsOptions);
chatCompletionsOptions.DeploymentName = _model;
var response = client.GetChatCompletions(chatCompletionsOptions);
var choice = response.Value.Choices[0];
var message = choice.Message;

Expand Down Expand Up @@ -110,7 +110,8 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
var client = ProviderHelper.GetClient(_model, _settings);
var (prompt, chatCompletionsOptions) = PrepareOptions(agent, conversations);

var response = await client.GetChatCompletionsAsync(_model, chatCompletionsOptions);
chatCompletionsOptions.DeploymentName = _model;
var response = await client.GetChatCompletionsAsync(chatCompletionsOptions);
var choice = response.Value.Choices[0];
var message = choice.Message;

Expand Down Expand Up @@ -162,39 +163,27 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
{
var client = ProviderHelper.GetClient(_model, _settings);
var (prompt, chatCompletionsOptions) = PrepareOptions(agent, conversations);

var response = await client.GetChatCompletionsStreamingAsync(_model, chatCompletionsOptions);
using StreamingChatCompletions streaming = response.Value;
chatCompletionsOptions.DeploymentName = _model;
var response = await client.GetChatCompletionsStreamingAsync(chatCompletionsOptions);

string output = "";
await foreach (var choice in streaming.GetChoicesStreaming())
await foreach (var choice in response)
{
if (choice.FinishReason == CompletionsFinishReason.FunctionCall)
{
var args = "";
await foreach (var message in choice.GetMessageStreaming())
{
if (message.FunctionCall == null || message.FunctionCall.Arguments == null)
continue;
Console.Write(message.FunctionCall.Arguments);
args += message.FunctionCall.Arguments;
Console.Write(choice.FunctionArgumentsUpdate);

}
await onMessageReceived(new RoleDialogModel(ChatRole.Assistant.ToString(), args));
await onMessageReceived(new RoleDialogModel(ChatRole.Assistant.ToString(), choice.FunctionArgumentsUpdate));
continue;
}

await foreach (var message in choice.GetMessageStreaming())
{
if (message.Content == null)
continue;
Console.Write(message.Content);
output += message.Content;
if (choice.ContentUpdate == null)
continue;
Console.Write(choice.ContentUpdate);

_logger.LogInformation(message.Content);
_logger.LogInformation(choice.ContentUpdate);

await onMessageReceived(new RoleDialogModel(message.Role.ToString(), message.Content));
}
await onMessageReceived(new RoleDialogModel(choice.Role.ToString(), choice.ContentUpdate));

output = "";
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,8 @@ public async Task<string> GetCompletion(string text, string agentId, string mess
var samplingFactor = float.Parse(state.GetState("sampling_factor", "0.5"));
completionsOptions.Temperature = temperature;
completionsOptions.NucleusSamplingFactor = samplingFactor;

var response = await client.GetCompletionsAsync(
deploymentOrModelName: _model,
completionsOptions);
completionsOptions.DeploymentName = _model;
var response = await client.GetCompletionsAsync(completionsOptions);

// OpenAI
var completion = "";
Expand Down
3 changes: 2 additions & 1 deletion src/WebStarter/WebStarter.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,12 @@
<ProjectReference Include="..\Plugins\BotSharp.Plugin.Qdrant\BotSharp.Plugin.Qdrant.csproj" />
<ProjectReference Include="..\Plugins\BotSharp.Plugin.RoutingSpeeder\BotSharp.Plugin.RoutingSpeeder.csproj" />
<ProjectReference Include="..\Plugins\BotSharp.Plugin.WeChat\BotSharp.Plugin.WeChat.csproj" />
<ProjectReference Include="..\Plugins\BotSharp.Plugin.SemanticKernel\BotSharp.Plugin.SemanticKernel.csproj" />
<ProjectReference Include="..\Plugins\BotSharp.Plugin.Twilio\BotSharp.Plugin.Twilio.csproj" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\tests\BotSharp.Plugin.PizzaBot\BotSharp.Plugin.PizzaBot.csproj" />
<ProjectReference Include="..\Plugins\BotSharp.Plugin.Twilio\BotSharp.Plugin.Twilio.csproj" />
</ItemGroup>

</Project>
1 change: 1 addition & 0 deletions src/WebStarter/appsettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@
// "BotSharp.Plugin.Twilio",
"BotSharp.Plugin.HuggingFace",
"BotSharp.Plugin.LLamaSharp",
// "BotSharp.Plugin.SemanticKernel",
"BotSharp.Plugin.KnowledgeBase",
"BotSharp.Plugin.Qdrant",
"BotSharp.Plugin.PaddleSharp",
Expand Down

0 comments on commit 57870ae

Please sign in to comment.