Skip to content

Change to modelType when getting realtime model. #986

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 2, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ public interface ILlmProviderService
{
LlmModelSetting GetSetting(string provider, string model);
List<string> GetProviders();
LlmModelSetting GetProviderModel(string provider, string id, bool? multiModal = null, bool realTime = false, bool imageGenerate = false);
LlmModelSetting GetProviderModel(string provider, string id, bool? multiModal = null, LlmModelType? modelType = null, bool imageGenerate = false);
List<LlmModelSetting> GetProviderModels(string provider);
List<LlmProviderSetting> GetLlmConfigs(LlmConfigOptions? options = null);
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,5 @@ public class LlmConfigOptions
{
public LlmModelType? Type { get; set; }
public bool? MultiModal { get; set; }
public bool? RealTime { get; set; }
public bool? ImageGeneration { get; set; }
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,6 @@ public class LlmModelSetting
/// </summary>
public bool MultiModal { get; set; }

/// <summary>
/// If true, allow real-time interaction
/// </summary>
public bool RealTime { get; set; }

/// <summary>
/// If true, allow generating images
/// </summary>
Expand Down Expand Up @@ -84,5 +79,6 @@ public enum LlmModelType
Chat = 2,
Image = 3,
Embedding = 4,
Audio = 5
Audio = 5,
Realtime = 6,
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using BotSharp.Abstraction.MLTasks.Settings;
using BotSharp.Core.Infrastructures;

namespace BotSharp.Core.Realtime.Services;
Expand Down Expand Up @@ -76,7 +77,7 @@ private async Task ConnectToModel(WebSocket userWebSocket)
if (agent.Profiles.Contains("realtime"))
{
var llmProviderService = _services.GetRequiredService<ILlmProviderService>();
model = llmProviderService.GetProviderModel("openai", "gpt-4o", realTime: true).Name;
model = llmProviderService.GetProviderModel("openai", "gpt-4o", modelType: LlmModelType.Realtime).Name;
}

_completer.SetModelName(model);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ public static object GetCompletion(IServiceProvider services,
{
return GetAudioTranscriber(services, provider: provider, model: model);
}
else if (settings.Type == LlmModelType.Realtime)
{
return GetRealTimeCompletion(services, provider: provider, model: model);
}
else
{
return GetChatCompletion(services, provider: provider, model: model, agentConfig: agentConfig);
Expand Down Expand Up @@ -172,7 +176,7 @@ public static IRealTimeCompletion GetRealTimeCompletion(IServiceProvider service
var completions = services.GetServices<IRealTimeCompletion>();
(provider, model) = GetProviderAndModel(services, provider: provider, model: model, modelId: modelId,
multiModal: multiModal,
realTime: true,
modelType: LlmModelType.Realtime,
agentConfig: agentConfig);

var completer = completions.FirstOrDefault(x => x.Provider == provider);
Expand All @@ -191,7 +195,7 @@ private static (string, string) GetProviderAndModel(IServiceProvider services,
string? model = null,
string? modelId = null,
bool? multiModal = null,
bool realTime = false,
LlmModelType? modelType = null,
bool imageGenerate = false,
AgentLlmConfig? agentConfig = null)
{
Expand All @@ -217,7 +221,7 @@ private static (string, string) GetProviderAndModel(IServiceProvider services,
var llmProviderService = services.GetRequiredService<ILlmProviderService>();
model = llmProviderService.GetProviderModel(provider, modelIdentity,
multiModal: multiModal,
realTime: realTime,
modelType: modelType,
imageGenerate: imageGenerate)?.Name;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public List<LlmModelSetting> GetProviderModels(string provider)
?.Models ?? new List<LlmModelSetting>();
}

public LlmModelSetting GetProviderModel(string provider, string id, bool? multiModal = null, bool realTime = false, bool imageGenerate = false)
public LlmModelSetting GetProviderModel(string provider, string id, bool? multiModal = null, LlmModelType? modelType = null, bool imageGenerate = false)
{
var models = GetProviderModels(provider)
.Where(x => x.Id == id);
Expand All @@ -54,7 +54,10 @@ public LlmModelSetting GetProviderModel(string provider, string id, bool? multiM
models = models.Where(x => x.MultiModal == multiModal);
}

models = models.Where(x => x.RealTime == realTime);
if (modelType.HasValue)
{
models = models.Where(x => x.Type == modelType.Value);
}

models = models.Where(x => x.ImageGeneration == imageGenerate);

Expand Down Expand Up @@ -130,11 +133,6 @@ public List<LlmProviderSetting> GetLlmConfigs(LlmConfigOptions? options = null)
models = models.Where(x => x.ImageGeneration == options.ImageGeneration.Value).ToList();
}

if (options.RealTime.HasValue)
{
models = models.Where(x => x.RealTime == options.RealTime.Value).ToList();
}

if (models.IsNullOrEmpty())
{
continue;
Expand Down
Loading