Skip to content

Commit c9822dd

Browse files
committed
TextCompletion of google-ai
1 parent caf6069 commit c9822dd

File tree

11 files changed

+178
-95
lines changed

11 files changed

+178
-95
lines changed

Directory.Build.props

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
<PropertyGroup>
33
<LangVersion>10.0</LangVersion>
44
<OutputPath>..\..\..\packages</OutputPath>
5-
<BotSharpVersion>0.15.1</BotSharpVersion>
5+
<BotSharpVersion>0.16.0</BotSharpVersion>
66
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
77
</PropertyGroup>
88
</Project>

src/Infrastructure/BotSharp.Abstraction/Routing/Settings/RoutingSettings.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ public class RoutingSettings
1212
public string Description { get; set; } = string.Empty;
1313

1414
public bool EnableReasoning { get; set; } = false;
15+
public bool UseTextCompletion { get; set; } = false;
1516

1617
public string Provider { get; set; } = string.Empty;
1718

src/Infrastructure/BotSharp.Abstraction/Utilities/StringExtensions.cs

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -18,21 +18,6 @@ public static string SubstringMax(this string str, int maxLength)
1818
return str;
1919
}
2020

21-
public static string CleanPhoneNumber(this string phoneNumber)
22-
{
23-
if (phoneNumber != null && !phoneNumber.All(char.IsDigit))
24-
{
25-
phoneNumber = Regex.Replace(phoneNumber, @"[^\d]", "");
26-
}
27-
28-
if (phoneNumber != null && phoneNumber.Length > 10)
29-
{
30-
phoneNumber = phoneNumber.Substring(1);
31-
}
32-
33-
return phoneNumber;
34-
}
35-
3621
public static string[] SplitByNewLine(this string input)
3722
{
3823
return input.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);

src/Infrastructure/BotSharp.Core/Infrastructures/CompletionProvider.cs

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,4 +31,32 @@ public static IChatCompletion GetChatCompletion(IServiceProvider services, strin
3131

3232
return completer;
3333
}
34+
35+
public static ITextCompletion GetTextCompletion(IServiceProvider services, string? provider = null, string? model = null)
36+
{
37+
var completions = services.GetServices<ITextCompletion>();
38+
39+
var state = services.GetRequiredService<IConversationStateService>();
40+
41+
if (string.IsNullOrEmpty(provider))
42+
{
43+
provider = state.GetState("provider", "azure-openai");
44+
}
45+
46+
if (string.IsNullOrEmpty(model))
47+
{
48+
model = state.GetState("model", "gpt-3.5-turbo");
49+
}
50+
51+
var completer = completions.FirstOrDefault(x => x.Provider == provider);
52+
if (completer == null)
53+
{
54+
var logger = services.GetRequiredService<ILogger<CompletionProvider>>();
55+
logger.LogError($"Can't resolve completion provider by {provider}");
56+
}
57+
58+
completer.SetModelName(model);
59+
60+
return completer;
61+
}
3462
}

src/Infrastructure/BotSharp.Core/Routing/RoutingService.GetNextInstruction.cs

Lines changed: 20 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
using BotSharp.Abstraction.Functions.Models;
2-
using BotSharp.Abstraction.Routing;
32
using BotSharp.Abstraction.Routing.Models;
43
using System.Drawing;
54
using System.Text.RegularExpressions;
@@ -19,16 +18,30 @@ public async Task<FunctionCallFromLlm> GetNextInstruction(string prompt)
1918
var content = $"{prompt} Response must be in JSON format {responseFormat}";
2019

2120
var state = _services.GetRequiredService<IConversationStateService>();
22-
var provider = state.GetState("provider", _settings.Provider);
23-
var model = state.GetState("model", _settings.Model);
24-
var chatCompletion = CompletionProvider.GetChatCompletion(_services,
25-
provider: provider,
26-
model: model);
21+
2722

28-
var response = chatCompletion.GetChatCompletions(_routerInstance.Router, new List<RoleDialogModel>
23+
RoleDialogModel response = default;
24+
if (_settings.UseTextCompletion)
25+
{
26+
var completion = CompletionProvider.GetTextCompletion(_services,
27+
provider: _settings.Provider,
28+
model: _settings.Model);
29+
30+
content = _routerInstance.Router.Instruction + "\r\n\r\n" + content + "\r\nResponse: ";
31+
var text = await completion.GetCompletion(content);
32+
response = new RoleDialogModel(AgentRole.Assistant, text);
33+
}
34+
else
35+
{
36+
var completion = CompletionProvider.GetChatCompletion(_services,
37+
provider: _settings.Provider,
38+
model: _settings.Model);
39+
40+
response = completion.GetChatCompletions(_routerInstance.Router, new List<RoleDialogModel>
2941
{
3042
new RoleDialogModel(AgentRole.User, content)
3143
});
44+
}
3245

3346
var args = new FunctionCallFromLlm();
3447
try

src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/ChatCompletionProvider.cs

Lines changed: 6 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -37,59 +37,15 @@ public ChatCompletionProvider(AzureOpenAiSettings settings,
3737
_tokenStatistics = tokenStatistics;
3838
}
3939

40-
protected virtual (OpenAIClient, string) GetClient()
41-
{
42-
if (_model == "gpt-4")
43-
{
44-
var client = new OpenAIClient(new Uri(_settings.GPT4.Endpoint), new AzureKeyCredential(_settings.GPT4.ApiKey));
45-
return (client, _settings.GPT4.DeploymentModel);
46-
}
47-
else
48-
{
49-
var client = new OpenAIClient(new Uri(_settings.Endpoint), new AzureKeyCredential(_settings.ApiKey));
50-
return (client, _settings.DeploymentModel.ChatCompletionModel);
51-
}
52-
}
53-
54-
public List<RoleDialogModel> GetChatSamples(string sampleText)
55-
{
56-
var samples = new List<RoleDialogModel>();
57-
if (string.IsNullOrEmpty(sampleText))
58-
{
59-
return samples;
60-
}
61-
62-
var lines = sampleText.Split('\n');
63-
for (int i = 0; i < lines.Length; i++)
64-
{
65-
var line = lines[i];
66-
if (string.IsNullOrEmpty(line.Trim()))
67-
{
68-
continue;
69-
}
70-
var role = line.Substring(0, line.IndexOf(' ') - 1).Trim();
71-
var content = line.Substring(line.IndexOf(' ') + 1).Trim();
72-
73-
// comments
74-
if (role == "##")
75-
{
76-
continue;
77-
}
78-
79-
samples.Add(new RoleDialogModel(role, content));
80-
}
81-
82-
return samples;
83-
}
84-
8540
public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> conversations)
8641
{
87-
var (client, deploymentModel) = GetClient();
42+
var (client, deploymentModel) = ProviderHelper.GetClient(_model, _settings);
8843
var chatCompletionsOptions = PrepareOptions(agent, conversations);
8944

9045
_tokenStatistics.StartTimer();
9146
var response = client.GetChatCompletions(deploymentModel, chatCompletionsOptions);
9247
_tokenStatistics.StopTimer();
48+
9349
var choice = response.Value.Choices[0];
9450
var message = choice.Message;
9551

@@ -104,7 +60,7 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
10460

10561
if (choice.FinishReason == CompletionsFinishReason.FunctionCall)
10662
{
107-
_logger.LogInformation($"[{agent.Name}]: {message.FunctionCall.Name} => {message.FunctionCall.Arguments}");
63+
_logger.LogInformation($"[{agent.Name}]: {message.FunctionCall.Name}({message.FunctionCall.Arguments})");
10864

10965
var funcContextIn = new RoleDialogModel(AgentRole.Function, message.Content)
11066
{
@@ -137,7 +93,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
13793
Func<RoleDialogModel, Task> onMessageReceived,
13894
Func<RoleDialogModel, Task> onFunctionExecuting)
13995
{
140-
var (client, deploymentModel) = GetClient();
96+
var (client, deploymentModel) = ProviderHelper.GetClient(_model, _settings);
14197
var chatCompletionsOptions = PrepareOptions(agent, conversations);
14298

14399
var response = await client.GetChatCompletionsAsync(deploymentModel, chatCompletionsOptions);
@@ -155,7 +111,7 @@ public async Task<bool> GetChatCompletionsAsync(Agent agent,
155111

156112
if (choice.FinishReason == CompletionsFinishReason.FunctionCall)
157113
{
158-
_logger.LogInformation($"[{agent.Name}]: {message.FunctionCall.Name} => {message.FunctionCall.Arguments}");
114+
_logger.LogInformation($"[{agent.Name}]: {message.FunctionCall.Name}({message.FunctionCall.Arguments})");
159115

160116
var funcContextIn = new RoleDialogModel(AgentRole.Function, message.Content)
161117
{
@@ -246,7 +202,7 @@ protected ChatCompletionsOptions PrepareOptions(Agent agent, List<RoleDialogMode
246202
chatCompletionsOptions.Messages.Add(new ChatMessage(ChatRole.System, agent.Knowledges));
247203
}
248204

249-
var samples = GetChatSamples(agent.Samples);
205+
var samples = ProviderHelper.GetChatSamples(agent.Samples);
250206
foreach (var message in samples)
251207
{
252208
chatCompletionsOptions.Messages.Add(new ChatMessage(message.Role, message.Content));
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
using Azure.AI.OpenAI;
2+
using Azure;
3+
using System;
4+
using BotSharp.Plugin.AzureOpenAI.Settings;
5+
using BotSharp.Abstraction.Conversations.Models;
6+
using System.Collections.Generic;
7+
8+
namespace BotSharp.Plugin.AzureOpenAI.Providers;
9+
10+
public class ProviderHelper
11+
{
12+
public static (OpenAIClient, string) GetClient(string model, AzureOpenAiSettings settings)
13+
{
14+
if (model == "gpt-4")
15+
{
16+
var client = new OpenAIClient(new Uri(settings.GPT4.Endpoint), new AzureKeyCredential(settings.GPT4.ApiKey));
17+
return (client, settings.GPT4.DeploymentModel);
18+
}
19+
else
20+
{
21+
var client = new OpenAIClient(new Uri(settings.Endpoint), new AzureKeyCredential(settings.ApiKey));
22+
return (client, settings.DeploymentModel.ChatCompletionModel);
23+
}
24+
}
25+
26+
public static List<RoleDialogModel> GetChatSamples(string sampleText)
27+
{
28+
var samples = new List<RoleDialogModel>();
29+
if (string.IsNullOrEmpty(sampleText))
30+
{
31+
return samples;
32+
}
33+
34+
var lines = sampleText.Split('\n');
35+
for (int i = 0; i < lines.Length; i++)
36+
{
37+
var line = lines[i];
38+
if (string.IsNullOrEmpty(line.Trim()))
39+
{
40+
continue;
41+
}
42+
var role = line.Substring(0, line.IndexOf(' ') - 1).Trim();
43+
var content = line.Substring(line.IndexOf(' ') + 1).Trim();
44+
45+
// comments
46+
if (role == "##")
47+
{
48+
continue;
49+
}
50+
51+
samples.Add(new RoleDialogModel(role, content));
52+
}
53+
54+
return samples;
55+
}
56+
}
Lines changed: 31 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,43 +1,68 @@
11
using Azure.AI.OpenAI;
2-
using Azure;
32
using BotSharp.Abstraction.MLTasks;
43
using System;
54
using System.Threading.Tasks;
65
using BotSharp.Plugin.AzureOpenAI.Settings;
76
using Microsoft.Extensions.Logging;
7+
using BotSharp.Abstraction.Conversations;
8+
using Microsoft.Extensions.DependencyInjection;
9+
using BotSharp.Abstraction.Conversations.Models;
810

911
namespace BotSharp.Plugin.AzureOpenAI.Providers;
1012

1113
public class TextCompletionProvider : ITextCompletion
1214
{
15+
private readonly IServiceProvider _services;
1316
private readonly AzureOpenAiSettings _settings;
1417
private readonly ILogger _logger;
15-
bool _useAzureOpenAI = true;
18+
private readonly ITokenStatistics _tokenStatistics;
1619
private string _model;
1720
public string Provider => "azure-openai";
1821

19-
public TextCompletionProvider(AzureOpenAiSettings settings, ILogger<TextCompletionProvider> logger)
22+
public TextCompletionProvider(IServiceProvider services,
23+
AzureOpenAiSettings settings,
24+
ILogger<TextCompletionProvider> logger,
25+
ITokenStatistics tokenStatistics)
2026
{
27+
_services = services;
2128
_settings = settings;
2229
_logger = logger;
30+
_tokenStatistics = tokenStatistics;
2331
}
2432

2533
public async Task<string> GetCompletion(string text)
2634
{
27-
var client = GetOpenAIClient();
35+
var (client, _) = ProviderHelper.GetClient(_model, _settings);
36+
2837
var completionsOptions = new CompletionsOptions()
2938
{
3039
Prompts =
3140
{
3241
text
3342
},
34-
Temperature = 0.7f,
3543
MaxTokens = 256
3644
};
3745

46+
var state = _services.GetRequiredService<IConversationStateService>();
47+
var temperature = float.Parse(state.GetState("temperature", "0.5"));
48+
var samplingFactor = float.Parse(state.GetState("sampling_factor", "0.5"));
49+
completionsOptions.Temperature = temperature;
50+
completionsOptions.NucleusSamplingFactor = samplingFactor;
51+
52+
_tokenStatistics.StartTimer();
3853
var response = await client.GetCompletionsAsync(
3954
deploymentOrModelName: _settings.DeploymentModel.TextCompletionModel,
4055
completionsOptions);
56+
_tokenStatistics.StopTimer();
57+
58+
_tokenStatistics.AddToken(new TokenStatsModel
59+
{
60+
Model = _model,
61+
PromptCount = response.Value.Usage.PromptTokens,
62+
CompletionCount = response.Value.Usage.CompletionTokens,
63+
PromptCost = 0.0015f,
64+
CompletionCost = 0.002f
65+
});
4166

4267
// OpenAI
4368
var completion = "";
@@ -46,7 +71,7 @@ public async Task<string> GetCompletion(string text)
4671
completion += t.Text;
4772
};
4873

49-
_logger.LogInformation(text + completion);
74+
_logger.LogInformation(text);
5075

5176
return completion.Trim();
5277
}
@@ -55,14 +80,4 @@ public void SetModelName(string model)
5580
{
5681
_model = model;
5782
}
58-
59-
private OpenAIClient GetOpenAIClient()
60-
{
61-
OpenAIClient client = _useAzureOpenAI
62-
? new OpenAIClient(
63-
new Uri(_settings.Endpoint),
64-
new AzureKeyCredential(_settings.ApiKey))
65-
: new OpenAIClient("your-api-key-from-platform.openai.com");
66-
return client;
67-
}
6883
}

src/Plugins/BotSharp.Plugin.GoogleAI/Providers/ChatCompletionProvider.cs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,9 @@ public ChatCompletionProvider(IServiceProvider services,
2929
public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> conversations)
3030
{
3131
var client = new GooglePalmClient(apiKey: _settings.PaLM.ApiKey);
32-
List<PalmChatMessage> messages = new()
33-
{
34-
new(conversations.Last().Content, "user"),
35-
};
32+
var messages = conversations.Select(c => new PalmChatMessage(c.Content, c.Role == AgentRole.User ? "user" : "AI"))
33+
.ToList();
34+
3635
_tokenStatistics.StartTimer();
3736
var response = client.ChatAsync(messages, agent.Instruction, null).Result;
3837
_tokenStatistics.StopTimer();

0 commit comments

Comments
 (0)