Skip to content

Features/refine json format #562

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,11 @@ public class LlmModelSetting
/// </summary>
public string Version { get; set; } = "1106-Preview";

/// <summary>
/// Api version
/// </summary>
public string? ApiVersion { get; set; }

/// <summary>
/// Deployment same functional model in a group.
/// It can be used to deploy same model in different regions.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
using Newtonsoft.Json.Linq;
using Newtonsoft.Json;

namespace BotSharp.Abstraction.Utilities;

public static class JsonExtensions
{
public static string FormatJson(this string? json, Formatting format = Formatting.Indented)
{
if (string.IsNullOrWhiteSpace(json))
{
return "{}";
}

try
{
var parsedJson = JObject.Parse(json);
foreach (var item in parsedJson)
{
try
{
var key = item.Key;
var value = parsedJson[key].ToString();
var parsedValue = JObject.Parse(value);
parsedJson[key] = parsedValue;
}
catch { continue; }
}

var jsonSettings = new JsonSerializerSettings
{
Formatting = format
};
return JsonConvert.SerializeObject(parsedJson, jsonSettings);
}
catch
{
return json;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,11 @@ private async Task<TextCompletionResponse> GetTextCompletion(string apiUrl, stri

private string BuildApiUrl(LlmModelSetting modelSetting)
{
var url = string.Empty;
var apiVersion = !string.IsNullOrWhiteSpace(modelSetting.ApiVersion) ? modelSetting.ApiVersion : "2023-09-15-preview";
var endpoint = modelSetting.Endpoint.EndsWith("/") ?
modelSetting.Endpoint.Substring(0, modelSetting.Endpoint.Length - 1) : modelSetting.Endpoint;

url = $"{endpoint}/openai/deployments/{_model}/completions?api-version=2023-09-15-preview";
var url = $"{endpoint}/openai/deployments/{_model}/completions?api-version={apiVersion}";
return url;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
using BotSharp.Abstraction.Messaging.Enums;
using BotSharp.Abstraction.Options;
using Microsoft.AspNetCore.SignalR;

namespace BotSharp.Plugin.ChatHub.Hooks;
Expand Down
45 changes: 1 addition & 44 deletions src/Plugins/BotSharp.Plugin.ChatHub/Hooks/StreamingLogHook.cs
Original file line number Diff line number Diff line change
@@ -1,11 +1,3 @@
using BotSharp.Abstraction.Agents.Models;
using BotSharp.Abstraction.Functions.Models;
using BotSharp.Abstraction.Loggers;
using BotSharp.Abstraction.Loggers.Enums;
using BotSharp.Abstraction.Loggers.Models;
using BotSharp.Abstraction.Options;
using BotSharp.Abstraction.Repositories;
using BotSharp.Abstraction.Routing;
using Microsoft.AspNetCore.SignalR;
using System.Text.Encodings.Web;
using System.Text.Unicode;
Expand Down Expand Up @@ -126,7 +118,7 @@ public override async Task OnFunctionExecuting(RoleDialogModel message)

var agent = await _agentService.LoadAgent(message.CurrentAgentId);
message.FunctionArgs = message.FunctionArgs ?? "{}";
var args = FormatJson(message.FunctionArgs);
var args = message.FunctionArgs.FormatJson();
var log = $"{message.FunctionName} <u>executing</u>\r\n```json\r\n{args}\r\n```";

var input = new ContentLogInputModel(conversationId, message)
Expand Down Expand Up @@ -567,40 +559,5 @@ private JsonSerializerOptions InitLocalJsonOptions(BotSharpOptions options)

return localOptions;
}

private string FormatJson(string? json)
{
var defaultJson = "{}";
if (string.IsNullOrWhiteSpace(json))
{
return defaultJson;
}

try
{
var parsedJson = JObject.Parse(json);
foreach (var item in parsedJson)
{
try
{
var key = item.Key;
var value = parsedJson[key].ToString();
var parsedValue = JObject.Parse(value);
parsedJson[key] = parsedValue;
}
catch { continue; }
}

var jsonSettings = new JsonSerializerSettings
{
Formatting = Newtonsoft.Json.Formatting.Indented
};
return JsonConvert.SerializeObject(parsedJson, jsonSettings) ?? defaultJson;
}
catch
{
return defaultJson;
}
}
#endregion
}
4 changes: 0 additions & 4 deletions src/Plugins/BotSharp.Plugin.ChatHub/Hooks/WelcomeHook.cs
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
using BotSharp.Abstraction.Messaging.Models.RichContent;
using BotSharp.Abstraction.Messaging;
using BotSharp.Abstraction.Templating;
using Microsoft.AspNetCore.SignalR;
using BotSharp.Abstraction.Options;

namespace BotSharp.Plugin.ChatHub.Hooks;

Expand Down
14 changes: 13 additions & 1 deletion src/Plugins/BotSharp.Plugin.ChatHub/Using.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,16 @@
global using BotSharp.Abstraction.Agents.Enums;
global using BotSharp.Abstraction.Conversations.Models;
global using BotSharp.OpenAPI.ViewModels.Conversations;
global using BotSharp.OpenAPI.ViewModels.Users;
global using BotSharp.OpenAPI.ViewModels.Users;
global using BotSharp.Abstraction.Agents.Models;
global using BotSharp.Abstraction.Functions.Models;
global using BotSharp.Abstraction.Loggers;
global using BotSharp.Abstraction.Loggers.Enums;
global using BotSharp.Abstraction.Loggers.Models;
global using BotSharp.Abstraction.Options;
global using BotSharp.Abstraction.Repositories;
global using BotSharp.Abstraction.Routing;
global using BotSharp.Abstraction.Messaging;
global using BotSharp.Abstraction.Messaging.Enums;
global using BotSharp.Abstraction.Messaging.Models.RichContent;
global using BotSharp.Abstraction.Templating;
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,9 @@ private async Task<IEnumerable<MessageFileModel>> GetConversationFiles()
var conversationId = convService.ConversationId;
var dialogs = convService.GetDialogHistory(fromBreakpoint: false);
var messageIds = dialogs.Select(x => x.MessageId).Distinct().ToList();
var files = fileService.GetMessageFiles(conversationId, messageIds, FileSourceType.User);
return await SelectFiles(files, dialogs);
var userFiles = fileService.GetMessageFiles(conversationId, messageIds, FileSourceType.User);
var botFiles = fileService.GetMessageFiles(conversationId, messageIds, FileSourceType.Bot);
return await SelectFiles(userFiles.Concat(botFiles), dialogs);
}

private async Task<IEnumerable<MessageFileModel>> SelectFiles(IEnumerable<MessageFileModel> files, List<RoleDialogModel> dialogs)
Expand All @@ -94,7 +95,7 @@ private async Task<IEnumerable<MessageFileModel>> SelectFiles(IEnumerable<Messag
{
var promptFiles = files.Select((x, idx) =>
{
return $"id: {idx + 1}, file_name: {x.FileName}.{x.FileType}, content_type: {x.ContentType}";
return $"id: {idx + 1}, file_name: {x.FileName}.{x.FileType}, author: {x.FileSource}, content_type: {x.ContentType}";
}).ToList();
var prompt = db.GetAgentTemplate(BuiltInAgentId.UtilityAssistant, "select_attachment_prompt");
prompt = render.Render(prompt, new Dictionary<string, object>
Expand All @@ -114,7 +115,8 @@ private async Task<IEnumerable<MessageFileModel>> SelectFiles(IEnumerable<Messag
var completion = CompletionProvider.GetChatCompletion(_services, provider: provider, model: model.Name);
var response = await completion.GetChatCompletions(agent, dialogs);
var content = response?.Content ?? string.Empty;
var fids = JsonSerializer.Deserialize<List<int>>(content) ?? new List<int>();
var selecteds = JsonSerializer.Deserialize<LlmContextOut>(content);
var fids = selecteds?.Selecteds ?? new List<int>();
return files.Where((x, idx) => fids.Contains(idx + 1)).ToList();
}
catch (Exception ex)
Expand Down Expand Up @@ -146,6 +148,6 @@ private async Task<string> SendEmailBySMTP(MimeMessage mailMessage)
await smtpClient.ConnectAsync(_emailSettings.SMTPServer, _emailSettings.SMTPPort, SecureSocketOptions.StartTls);
await smtpClient.AuthenticateAsync(_emailSettings.EmailAddress, _emailSettings.Password);
var response = await smtpClient.SendAsync(mailMessage);
return response;
return response ?? "Email sent";
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
using System.Text.Json.Serialization;

namespace BotSharp.Plugin.EmailHandler.LlmContexts;

public class LlmContextOut
{
[JsonPropertyName("selected_ids")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IEnumerable<int> Selecteds { get; set; }
}
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
Please call handle_email_request if user wants to send out an email.
** Please take a look at the conversation, and decide whether user wants to send email with attachments or not.
Please call handle_email_request if user wants to send email.
** Please take a look at the conversation and decide whether user wants to send email with files/attachments/images or not.
Original file line number Diff line number Diff line change
@@ -1,18 +1,42 @@
Please take a look at the files in the [FILES] section from the conversation and select the files based on the conversation with user.
Your response must be a list of file ids.
** Please only output the list. Do not prepend or append anything.

For example:
Suppose there are three files:
** Ensure the output is only in JSON format without any additional text.
** If no files are selected, you must output an empty list [].
** You may need to look at the file_name as a reference to find the correct file id.

id: 1, file_name: example_file.png, content_type: image/png
id: 2, file_name: example_file.jpeg, content_type: image/jpeg
id: 3, file_name: example_file.pdf, content_type: application/pdf
Here is the JSON format to use:
{
"selected_ids": a list of id selected from the [FILES] section
}

If user wants the first file and the third file, the ouput should be [1, 3].
If user wants the all the images, the output should be [1, 2].
If user wants the pdf file, the output should be [3].
If user does not want any files, the ouput should be [];
Suppose there are 4 files:

id: 1, file_name: example_file.jpg, author: user, content_type: image/jpeg
id: 2, file_name: example_file.pdf, author: user, content_type: application/pdf
id: 3, file_name: example_file.png, author: bot, content_type: image/png
id: 4, file_name: example_file.png, author: bot, content_type: image/png

=====
Example 1:
USER: I want to send the first file and the third file.
OUTPUT: { "selected_ids": [1, 3] }

Example 2:
USER: Send all the images.
OUTPUT: { "selected_ids": [1, 2, 4] }

Example 3:
USER: Send all the images I uploaded.
OUTPUT: { "selected_ids": [1] }

Example 4:
USER: Send the image and the pdf file.
OUTPUT: { "selected_ids": [1, 2] }

Example 5:
USER: Send the images generated by bot
OUTPUT: { "selected_ids": [3, 4] }
=====

[FILES]
{% for file in file_list -%}
Expand Down
25 changes: 15 additions & 10 deletions src/Plugins/BotSharp.Plugin.FileHandler/Functions/EditImageFn.cs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public async Task<bool> Execute(RoleDialogModel message)
Init(message);
SetImageOptions();

var image = await SelectConversationImage();
var image = await SelectConversationImage(descrpition);
var response = await GetImageEditGeneration(message, descrpition, image);
message.Content = response;
return true;
Expand All @@ -48,17 +48,17 @@ private void SetImageOptions()
state.SetState("image_count", "1");
}

private async Task<MessageFileModel?> SelectConversationImage()
private async Task<MessageFileModel?> SelectConversationImage(string? description)
{
var convService = _services.GetRequiredService<IConversationService>();
var fileService = _services.GetRequiredService<IBotSharpFileService>();
var dialogs = convService.GetDialogHistory(fromBreakpoint: false);
var dialogs = convService.GetDialogHistory();
var messageIds = dialogs.Select(x => x.MessageId).Distinct().ToList();
var images = fileService.GetMessageFiles(_conversationId, messageIds, FileSourceType.User, imageOnly: true);
return await SelectImage(images, dialogs);
var userImages = fileService.GetMessageFiles(_conversationId, messageIds, FileSourceType.User, imageOnly: true);
return await SelectImage(userImages, dialogs.LastOrDefault(), description);
}

private async Task<MessageFileModel?> SelectImage(IEnumerable<MessageFileModel> images, List<RoleDialogModel> dialogs)
private async Task<MessageFileModel?> SelectImage(IEnumerable<MessageFileModel> images, RoleDialogModel message, string? description)
{
if (images.IsNullOrEmpty()) return null;

Expand Down Expand Up @@ -91,10 +91,15 @@ private void SetImageOptions()
var provider = llmProviderService.GetProviders().FirstOrDefault(x => x == "openai");
var model = llmProviderService.GetProviderModel(provider: provider, id: "gpt-4");
var completion = CompletionProvider.GetChatCompletion(_services, provider: provider, model: model.Name);
var response = await completion.GetChatCompletions(agent, dialogs);

var text = !string.IsNullOrWhiteSpace(description) ? description : message.Content;
var dialog = RoleDialogModel.From(message, AgentRole.User, text);

var response = await completion.GetChatCompletions(agent, new List<RoleDialogModel> { dialog });
var content = response?.Content ?? string.Empty;
var fid = JsonSerializer.Deserialize<int?>(content);
return images.Where((x, idx) => idx == fid - 1).FirstOrDefault();
var selected = JsonSerializer.Deserialize<LlmContextOut>(content);
var fid = selected?.Selected ?? -1;
return fid > 0 ? images.Where((x, idx) => idx == fid - 1).FirstOrDefault() : null;
}
catch (Exception ex)
{
Expand Down Expand Up @@ -126,7 +131,7 @@ private async Task<string> GetImageEditGeneration(RoleDialogModel message, strin
stream.Close();
SaveGeneratedImage(result?.GeneratedImages?.FirstOrDefault());

return !string.IsNullOrWhiteSpace(result?.Content) ? result.Content : "Image edit is completed.";
return $"Image \"{image.FileName}.{image.FileType}\" is successfylly editted.";
}
catch (Exception ex)
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
using System.Text.Json.Serialization;

namespace BotSharp.Plugin.FileHandler.LlmContexts;

public class LlmContextOut
{
[JsonPropertyName("selected_id")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? Selected { get; set; }
}
Original file line number Diff line number Diff line change
@@ -1,30 +1,38 @@
Please take a look at the images in the [IMAGES] section from the conversation and select ONLY one image based on the conversation with user.
Your response must be an interger number.
** Please ONLY output the interger number. Do not prepend or append anything else.
** If you think user requests multiple images. Please ONLY select the first image and output its id.

** Ensure the output is only in JSON format without any additional text.
** You may need to look at the image_name as a reference to find the correct image id.

Here is the JSON format to use:
{
"selected_id": the id selected from the [IMAGES] section
}


Suppose there are three images:

id: 1, image_name: example_image_a.png
id: 2, image_name: example_image_b.png
id: 3, image_name: example_image_c.png
id: 4, image_name: example_image_d.png

=====
Example 1:
USER: I want to add a dog in the first file.
OUTPUT: 1
OUTPUT: { "selected_id": 1 }

Example 2:
USER: Add a coffee cup in the second image I uploaded.
OUTPUT: 2
OUTPUT: { "selected_id": 2 }

Example 3:
USER: Please remove the left tree in the third and the first images.
OUTPUT: 3
OUTPUT: { "selected_id": 3 }

Example 4:
USER: Add a boat in the images.
OUTPUT: 1
USER: Circle the head of the dog in example_image_b.png.
OUTPUT: { "selected_id": 4 }
=====


Expand Down