From f5d58dd93f981fce45d0b61cebe0cf01b6e70c12 Mon Sep 17 00:00:00 2001 From: srebrek Date: Tue, 13 Jan 2026 10:13:41 +0100 Subject: [PATCH 1/6] feat: Implement tool calling to local LLMs --- .../Examples/Chat/ChatExampleToolsSimple.cs | 8 +- .../Chat/ChatExampleToolsSimpleLocalLLM.cs | 27 ++ Examples/Examples/Program.cs | 4 +- .../Services/LLMService/LLMService.cs | 329 +++++++++++++++++- 4 files changed, 360 insertions(+), 8 deletions(-) create mode 100644 Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs diff --git a/Examples/Examples/Chat/ChatExampleToolsSimple.cs b/Examples/Examples/Chat/ChatExampleToolsSimple.cs index d3a6e37..bc246ef 100644 --- a/Examples/Examples/Chat/ChatExampleToolsSimple.cs +++ b/Examples/Examples/Chat/ChatExampleToolsSimple.cs @@ -10,11 +10,13 @@ public async Task Start() { OpenAiExample.Setup(); //We need to provide OpenAi API key - Console.WriteLine("(OpenAi) ChatExample with tools is running!"); + Console.WriteLine("(OpenAi) ChatExample with tools is running!"); + + var model = AIHub.Model(); await AIHub.Chat() .WithModel("gpt-5-nano") - .WithMessage("What time is it right now?") + .WithMessage("What time is it right now? Use tool provided.") .WithTools(new ToolsConfigurationBuilder() .AddTool( name: "get_current_time", @@ -24,4 +26,4 @@ await AIHub.Chat() .Build()) .CompleteAsync(interactive: true); } -} \ No newline at end of file +} diff --git a/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs b/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs new file mode 100644 index 0000000..c947332 --- /dev/null +++ b/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs @@ -0,0 +1,27 @@ +using Examples.Utils; +using MaIN.Core.Hub; +using MaIN.Core.Hub.Utils; + +namespace Examples.Chat; + +public class ChatExampleToolsSimpleLocalLLM : IExample +{ + public async Task Start() + { + Console.WriteLine("Local LLM ChatExample with tools is running!"); + + var model = AIHub.Model(); + + await AIHub.Chat() + .WithModel("gemma3:4b") + .WithMessage("What time is it right now? Use tool provided.") + .WithTools(new ToolsConfigurationBuilder() + .AddTool( + name: "get_current_time", + description: "Get the current date and time", + execute: Tools.GetCurrentTime) + .WithToolChoice("auto") + .Build()) + .CompleteAsync(interactive: true); + } +} \ No newline at end of file diff --git a/Examples/Examples/Program.cs b/Examples/Examples/Program.cs index f1b2bdc..627261c 100644 --- a/Examples/Examples/Program.cs +++ b/Examples/Examples/Program.cs @@ -51,6 +51,7 @@ static void RegisterExamples(IServiceCollection services) services.AddTransient(); services.AddTransient(); services.AddTransient(); + services.AddTransient(); services.AddTransient(); services.AddTransient(); services.AddTransient(); @@ -161,6 +162,7 @@ public class ExampleRegistry(IServiceProvider serviceProvider) ("\u25a0 Chat with Files from stream", serviceProvider.GetRequiredService()), ("\u25a0 Chat with Vision", serviceProvider.GetRequiredService()), ("\u25a0 Chat with Tools (simple)", serviceProvider.GetRequiredService()), + ("\u25a0 Chat with Tools (simple Local LLM)", serviceProvider.GetRequiredService()), ("\u25a0 Chat with Image Generation", serviceProvider.GetRequiredService()), ("\u25a0 Chat from Existing", serviceProvider.GetRequiredService()), ("\u25a0 Chat with reasoning", serviceProvider.GetRequiredService()), @@ -197,4 +199,4 @@ public class ExampleRegistry(IServiceProvider serviceProvider) ]; } }; -} \ No newline at end of file +} diff --git a/src/MaIN.Services/Services/LLMService/LLMService.cs b/src/MaIN.Services/Services/LLMService/LLMService.cs index ceb539e..78aade7 100644 --- a/src/MaIN.Services/Services/LLMService/LLMService.cs +++ b/src/MaIN.Services/Services/LLMService/LLMService.cs @@ -1,5 +1,6 @@ using System.Collections.Concurrent; using System.Text; +using System.Text.Json; using LLama; using LLama.Batched; using LLama.Common; @@ -8,6 +9,7 @@ using MaIN.Domain.Configuration; using MaIN.Domain.Entities; using MaIN.Domain.Exceptions.Models; +using MaIN.Domain.Entities.Tools; using MaIN.Domain.Models; using MaIN.Services.Constants; using MaIN.Services.Services.Abstract; @@ -26,6 +28,7 @@ public class LLMService : ILLMService { private const string DEFAULT_MODEL_ENV_PATH = "MaIN_ModelsPath"; private static readonly ConcurrentDictionary _sessionCache = new(); + private const int MaxToolIterations = 5; private readonly MaINSettings options; private readonly INotificationService notificationService; @@ -62,6 +65,11 @@ public LLMService( return await AskMemory(chat, memoryOptions, requestOptions, cancellationToken); } + if (chat.ToolsConfiguration?.Tools != null && chat.ToolsConfiguration.Tools.Any()) + { + return await ProcessWithToolsAsync(chat, requestOptions, cancellationToken); + } + var model = KnownModels.GetModel(chat.Model); var tokens = await ProcessChatRequest(chat, model, lastMsg, requestOptions, cancellationToken); lastMsg.MarkProcessed(); @@ -319,16 +327,26 @@ private static void ProcessTextMessage(Conversation conversation, var template = new LLamaTemplate(llmModel); var finalPrompt = ChatHelper.GetFinalPrompt(lastMsg, model, isNewConversation); + var hasTools = chat.ToolsConfiguration?.Tools != null && chat.ToolsConfiguration.Tools.Any(); + if (isNewConversation) { - foreach (var messageToProcess in chat.Messages - .Where(x => x.Properties.ContainsKey(Message.UnprocessedMessageProperty)) - .SkipLast(1)) + var messagesToProcess = hasTools + ? chat.Messages.SkipLast(1) + : chat.Messages.Where(x => x.Properties.ContainsKey(Message.UnprocessedMessageProperty)).SkipLast(1); + + foreach (var messageToProcess in messagesToProcess) { template.Add(messageToProcess.Role, messageToProcess.Content); } } + if (hasTools) + { + var toolsPrompt = FormatToolsForPrompt(chat.ToolsConfiguration!); + finalPrompt = $"{toolsPrompt}\n\n{finalPrompt}"; + } + template.Add(ServiceConstants.Roles.User, finalPrompt); template.AddAssistant = true; @@ -340,6 +358,151 @@ private static void ProcessTextMessage(Conversation conversation, conversation.Prompt(tokens); } + private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) + { + var sb = new StringBuilder(); + sb.AppendLine("## TOOLS"); + sb.AppendLine("You can call these tools if needed. To call a tool, respond with a JSON object inside tags."); + + foreach (var tool in toolsConfig.Tools) + { + // TODO: refactor to not allow null Function + sb.AppendLine($"- {tool.Function.Name}: {tool.Function.Description}"); + sb.AppendLine($" Parameters: {JsonSerializer.Serialize(tool.Function.Parameters)}"); + } + + sb.AppendLine("\n## RESPONSE FORMAT"); + sb.AppendLine("1. For normal conversation, just respond with plain text."); + sb.AppendLine("2. For tool calls, use this format:"); + sb.AppendLine(""); + sb.AppendLine("{\"tool_calls\": [{\"id\": \"abc\", \"type\": \"function\", \"function\": {\"name\": \"fn\", \"arguments\": \"{\\\"p\\\":\\\"v\\\"}\"}}]}"); + sb.AppendLine(""); + + return sb.ToString(); + } + + private List? ParseToolCalls(string response) + { + if (string.IsNullOrWhiteSpace(response)) return null; + + try + { + string jsonContent = ExtractJsonContent(response); + if (string.IsNullOrEmpty(jsonContent)) return null; + + using var doc = JsonDocument.Parse(jsonContent); + var root = doc.RootElement; + + // OpenAI standard { "tool_calls": [...] } + if (root.ValueKind == JsonValueKind.Object && root.TryGetProperty("tool_calls", out var toolCallsProp)) + { + var calls = toolCallsProp.Deserialize>(new JsonSerializerOptions { PropertyNameCaseInsensitive = true }); + return NormalizeToolCalls(calls); + } + + // TODO: test if those formats are used by any model + // model returned table [ { ... }, { ... } ] + if (root.ValueKind == JsonValueKind.Array) + { + var calls = root.Deserialize>(new JsonSerializerOptions { PropertyNameCaseInsensitive = true }); + return NormalizeToolCalls(calls); + } + + // flat format { "tool_name": "...", "arguments": {...} } + if (root.ValueKind == JsonValueKind.Object && (root.TryGetProperty("tool_name", out _) || root.TryGetProperty("function", out _))) + { + var singleCall = ParseSingleLegacyCall(root); + if (singleCall != null) return new List { singleCall }; + } + } + catch (Exception) + { + // No tool calls found + } + + return null; + } + + private string ExtractJsonContent(string text) + { + text = text.Trim(); + + int firstBrace = text.IndexOf('{'); + int firstBracket = text.IndexOf('['); + int startIndex = (firstBrace >= 0 && firstBracket >= 0) ? Math.Min(firstBrace, firstBracket) : Math.Max(firstBrace, firstBracket); + + int lastBrace = text.LastIndexOf('}'); + int lastBracket = text.LastIndexOf(']'); + int endIndex = Math.Max(lastBrace, lastBracket); + + if (startIndex >= 0 && endIndex > startIndex) + { + return text.Substring(startIndex, endIndex - startIndex + 1); + } + + return text; + } + + private ToolCall? ParseSingleLegacyCall(JsonElement root) + { + string name = string.Empty; + if (root.TryGetProperty("tool_name", out var tn)) name = tn.GetString(); + else if (root.TryGetProperty("function", out var fn) && fn.ValueKind == JsonValueKind.String) name = fn.GetString(); + else if (root.TryGetProperty("function", out var fnObj) && fnObj.TryGetProperty("name", out var n)) name = n.GetString(); + + if (string.IsNullOrEmpty(name)) return null; + + string? args = "{}"; + if (root.TryGetProperty("arguments", out var argProp)) + { + args = argProp.ValueKind == JsonValueKind.String ? argProp.GetString() : argProp.GetRawText(); + } + else if (root.TryGetProperty("parameters", out var paramProp)) + { + args = paramProp.GetRawText(); + } + + return new ToolCall + { + Id = Guid.NewGuid().ToString().Substring(0, 8), + Type = "function", + Function = new FunctionCall { Name = name, Arguments = args! } + }; + } + + private List NormalizeToolCalls(List? calls) + { + if (calls == null) return new List(); + foreach (var call in calls) + { + if (string.IsNullOrEmpty(call.Id)) call.Id = Guid.NewGuid().ToString().Substring(0, 8); + if (string.IsNullOrEmpty(call.Type)) call.Type = "function"; + if (call.Function == null) call.Function = new FunctionCall(); + } + return calls; + } + + public class ToolCall + { + [System.Text.Json.Serialization.JsonPropertyName("id")] + public string Id { get; set; } = Guid.NewGuid().ToString(); + + [System.Text.Json.Serialization.JsonPropertyName("type")] + public string Type { get; set; } = "function"; + + [System.Text.Json.Serialization.JsonPropertyName("function")] + public FunctionCall Function { get; set; } = new(); + } + + public class FunctionCall + { + [System.Text.Json.Serialization.JsonPropertyName("name")] + public string Name { get; set; } = string.Empty; + + [System.Text.Json.Serialization.JsonPropertyName("arguments")] + public string Arguments { get; set; } = "{}"; + } + private async Task<(List Tokens, bool IsComplete, bool HasFailed)> ProcessTokens( Chat chat, Conversation conversation, @@ -477,4 +640,162 @@ await notificationService.DispatchNotification( NotificationMessageBuilder.CreateChatCompletion(chatId, token, isComplete), ServiceConstants.Notifications.ReceiveMessageUpdate); } -} \ No newline at end of file + + private async Task ProcessWithToolsAsync( + Chat chat, + ChatRequestOptions requestOptions, + CancellationToken cancellationToken) + { + var model = KnownModels.GetModel(chat.Model); + var tokens = new List(); + var fullResponseBuilder = new StringBuilder(); + var iterations = 0; + + while (iterations < MaxToolIterations) + { + if (iterations > 0 && requestOptions.InteractiveUpdates && fullResponseBuilder.Length > 0) + { + var spaceToken = new LLMTokenValue { Text = " ", Type = TokenType.Message }; + tokens.Add(spaceToken); + + requestOptions.TokenCallback?.Invoke(spaceToken); + + await notificationService.DispatchNotification( + NotificationMessageBuilder.CreateChatCompletion(chat.Id, spaceToken, false), + ServiceConstants.Notifications.ReceiveMessageUpdate); + } + + var lastMsg = chat.Messages.Last(); + var iterationTokens = await ProcessChatRequest(chat, model, lastMsg, requestOptions, cancellationToken); + + var responseText = string.Concat(iterationTokens.Select(x => x.Text)); + + if (fullResponseBuilder.Length > 0) + { + fullResponseBuilder.Append(" "); + } + fullResponseBuilder.Append(responseText); + tokens.AddRange(iterationTokens); + + var toolCalls = ParseToolCalls(responseText); + + if (toolCalls == null || !toolCalls.Any()) + { + break; + } + + var assistantMessage = new Message + { + Content = responseText, + Role = AuthorRole.Assistant.ToString(), + Type = MessageType.LocalLLM, + Tool = true + }; + assistantMessage.Properties[ToolCallsProperty] = JsonSerializer.Serialize(toolCalls); + chat.Messages.Add(assistantMessage.MarkProcessed()); + + foreach (var toolCall in toolCalls) + { + if (chat.Properties.CheckProperty(ServiceConstants.Properties.AgentIdProperty)) + { + await notificationService.DispatchNotification( + NotificationMessageBuilder.ProcessingTools( + chat.Properties[ServiceConstants.Properties.AgentIdProperty], + string.Empty, + toolCall.Function.Name), + ServiceConstants.Notifications.ReceiveAgentUpdate); + } + + var executor = chat.ToolsConfiguration?.GetExecutor(toolCall.Function.Name); + + if (executor == null) + { + var errorMessage = $"No executor found for tool: {toolCall.Function.Name}"; + throw new InvalidOperationException(errorMessage); + } + + + try + { + requestOptions.ToolCallback?.Invoke(new ToolInvocation + { + ToolName = toolCall.Function.Name, + Arguments = toolCall.Function.Arguments, + Done = false + }); + + var toolResult = await executor(toolCall.Function.Arguments); + + requestOptions.ToolCallback?.Invoke(new ToolInvocation + { + ToolName = toolCall.Function.Name, + Arguments = toolCall.Function.Arguments, + Done = true + }); + + var toolMessage = new Message + { + Content = $"Tool result for {toolCall.Function.Name}: {toolResult}", + Role = ServiceConstants.Roles.Tool, + Type = MessageType.LocalLLM, + Tool = true + }; + toolMessage.Properties[ToolCallIdProperty] = toolCall.Id; + toolMessage.Properties[ToolNameProperty] = toolCall.Function.Name; + chat.Messages.Add(toolMessage.MarkProcessed()); + } + catch (Exception ex) + { + var errorResult = JsonSerializer.Serialize(new { error = ex.Message }); + var toolMessage = new Message + { + Content = $"Tool error for {toolCall.Function.Name}: {errorResult}", + Role = ServiceConstants.Roles.Tool, + Type = MessageType.LocalLLM, + Tool = true + }; + toolMessage.Properties[ToolCallIdProperty] = toolCall.Id; + toolMessage.Properties[ToolNameProperty] = toolCall.Function.Name; + chat.Messages.Add(toolMessage.MarkProcessed()); + } + } + + iterations++; + } + + if (iterations >= MaxToolIterations) + { + } + + var finalResponse = fullResponseBuilder.ToString(); + var finalToken = new LLMTokenValue { Text = finalResponse, Type = TokenType.FullAnswer }; + tokens.Add(finalToken); + + if (requestOptions.InteractiveUpdates) + { + await notificationService.DispatchNotification( + NotificationMessageBuilder.CreateChatCompletion(chat.Id, finalToken, true), + ServiceConstants.Notifications.ReceiveMessageUpdate); + } + + chat.Messages.Last().MarkProcessed(); + + return new ChatResult + { + Done = true, + CreatedAt = DateTime.Now, + Model = chat.Model, + Message = new Message + { + Content = finalResponse, + Tokens = tokens, + Role = AuthorRole.Assistant.ToString(), + Type = MessageType.LocalLLM, + }.MarkProcessed() + }; + } + + private const string ToolCallsProperty = "ToolCalls"; + private const string ToolCallIdProperty = "ToolCallId"; + private const string ToolNameProperty = "ToolName"; +} From 8d6851da02a38acbe66771daea45bcc778401003 Mon Sep 17 00:00:00 2001 From: srebrek Date: Wed, 21 Jan 2026 17:19:50 +0100 Subject: [PATCH 2/6] Fix tool calling logic in multiple iterations loop - Prevent tool definition duplication in the system prompt during subsequent loop iterations. - Refine system prompt to enforce format more effectively. --- .../Examples/Chat/ChatExampleToolsSimple.cs | 2 +- .../Chat/ChatExampleToolsSimpleLocalLLM.cs | 2 +- .../Services/LLMService/LLMService.cs | 83 +++++++++++-------- 3 files changed, 49 insertions(+), 38 deletions(-) diff --git a/Examples/Examples/Chat/ChatExampleToolsSimple.cs b/Examples/Examples/Chat/ChatExampleToolsSimple.cs index bc246ef..4c90a93 100644 --- a/Examples/Examples/Chat/ChatExampleToolsSimple.cs +++ b/Examples/Examples/Chat/ChatExampleToolsSimple.cs @@ -16,7 +16,7 @@ public async Task Start() await AIHub.Chat() .WithModel("gpt-5-nano") - .WithMessage("What time is it right now? Use tool provided.") + .WithMessage("What time is it right now?") .WithTools(new ToolsConfigurationBuilder() .AddTool( name: "get_current_time", diff --git a/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs b/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs index c947332..03ff7f3 100644 --- a/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs +++ b/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs @@ -14,7 +14,7 @@ public async Task Start() await AIHub.Chat() .WithModel("gemma3:4b") - .WithMessage("What time is it right now? Use tool provided.") + .WithMessage("What time is it right now?") .WithTools(new ToolsConfigurationBuilder() .AddTool( name: "get_current_time", diff --git a/src/MaIN.Services/Services/LLMService/LLMService.cs b/src/MaIN.Services/Services/LLMService/LLMService.cs index 78aade7..44eb00a 100644 --- a/src/MaIN.Services/Services/LLMService/LLMService.cs +++ b/src/MaIN.Services/Services/LLMService/LLMService.cs @@ -36,6 +36,11 @@ public class LLMService : ILLMService private readonly IMemoryFactory memoryFactory; private readonly string modelsPath; + private readonly JsonSerializerOptions _jsonToolOptions = new() + { + PropertyNameCaseInsensitive = true, + }; + public LLMService( MaINSettings options, INotificationService notificationService, @@ -341,9 +346,10 @@ private static void ProcessTextMessage(Conversation conversation, } } - if (hasTools) + if (hasTools && isNewConversation) { var toolsPrompt = FormatToolsForPrompt(chat.ToolsConfiguration!); + // Dodaj to jako wiadomość systemową lub na początku pierwszego promptu użytkownika finalPrompt = $"{toolsPrompt}\n\n{finalPrompt}"; } @@ -371,11 +377,14 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) sb.AppendLine($" Parameters: {JsonSerializer.Serialize(tool.Function.Parameters)}"); } - sb.AppendLine("\n## RESPONSE FORMAT"); + sb.AppendLine("\n## RESPONSE FORMAT (YOU HAVE TO CHOOSE ONE FORMAT AND CANNOT MIX THEM)##"); sb.AppendLine("1. For normal conversation, just respond with plain text."); - sb.AppendLine("2. For tool calls, use this format:"); + sb.AppendLine("2. For tool calls, use this format. " + + "You cannot respond with plain text before or after format. " + + "If you want to call multiple functions, you have to combine them into one array." + + "Your response MUST contain only one tool call block:"); sb.AppendLine(""); - sb.AppendLine("{\"tool_calls\": [{\"id\": \"abc\", \"type\": \"function\", \"function\": {\"name\": \"fn\", \"arguments\": \"{\\\"p\\\":\\\"v\\\"}\"}}]}"); + sb.AppendLine("{\"tool_calls\": [{\"id\": \"call_1\", \"type\": \"function\", \"function\": {\"name\": \"tool_name\", \"arguments\": \"{\\\"param\\\":\\\"value\\\"}\"}},{\"id\": \"call_2\", \"type\": \"function\", \"function\": {\"name\": \"tool2_name\", \"arguments\": \"{\\\"param1\\\":\\\"value1\\\",\\\"param2\\\":\\\"value2\\\"}\"}}]}"); sb.AppendLine(""); return sb.ToString(); @@ -385,9 +394,9 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) { if (string.IsNullOrWhiteSpace(response)) return null; + string jsonContent = ExtractJsonContent(response); try { - string jsonContent = ExtractJsonContent(response); if (string.IsNullOrEmpty(jsonContent)) return null; using var doc = JsonDocument.Parse(jsonContent); @@ -396,7 +405,7 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) // OpenAI standard { "tool_calls": [...] } if (root.ValueKind == JsonValueKind.Object && root.TryGetProperty("tool_calls", out var toolCallsProp)) { - var calls = toolCallsProp.Deserialize>(new JsonSerializerOptions { PropertyNameCaseInsensitive = true }); + var calls = toolCallsProp.Deserialize>(_jsonToolOptions); return NormalizeToolCalls(calls); } @@ -417,7 +426,7 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) } catch (Exception) { - // No tool calls found + // No tool calls found no need to throw nor log } return null; @@ -429,14 +438,14 @@ private string ExtractJsonContent(string text) int firstBrace = text.IndexOf('{'); int firstBracket = text.IndexOf('['); - int startIndex = (firstBrace >= 0 && firstBracket >= 0) ? Math.Min(firstBrace, firstBracket) : Math.Max(firstBrace, firstBracket); + int startIndex = (firstBrace >= 0 && firstBracket >= 0) ? Math.Min(firstBrace, firstBracket) : Math.Max(firstBrace, firstBracket); int lastBrace = text.LastIndexOf('}'); int lastBracket = text.LastIndexOf(']'); - int endIndex = Math.Max(lastBrace, lastBracket); + int endIndex = Math.Max(lastBrace, lastBracket); - if (startIndex >= 0 && endIndex > startIndex) - { + if (startIndex >= 0 && endIndex > startIndex) + { return text.Substring(startIndex, endIndex - startIndex + 1); } @@ -645,34 +654,35 @@ private async Task ProcessWithToolsAsync( Chat chat, ChatRequestOptions requestOptions, CancellationToken cancellationToken) - { + { + NativeLogConfig.llama_log_set((level, message) => { + if (level == LLamaLogLevel.Error) + { + Console.Error.Write(message); + } + }); // Remove llama native logging + var model = KnownModels.GetModel(chat.Model); var tokens = new List(); var fullResponseBuilder = new StringBuilder(); var iterations = 0; while (iterations < MaxToolIterations) - { - if (iterations > 0 && requestOptions.InteractiveUpdates && fullResponseBuilder.Length > 0) - { - var spaceToken = new LLMTokenValue { Text = " ", Type = TokenType.Message }; - tokens.Add(spaceToken); - - requestOptions.TokenCallback?.Invoke(spaceToken); - - await notificationService.DispatchNotification( - NotificationMessageBuilder.CreateChatCompletion(chat.Id, spaceToken, false), - ServiceConstants.Notifications.ReceiveMessageUpdate); - } - + { var lastMsg = chat.Messages.Last(); + await SendNotification(chat.Id, new LLMTokenValue + { + Type = TokenType.FullAnswer, + Text = $"Processing with tools... iteration {iterations + 1}\n\n" + }, false); + requestOptions.InteractiveUpdates = false; var iterationTokens = await ProcessChatRequest(chat, model, lastMsg, requestOptions, cancellationToken); var responseText = string.Concat(iterationTokens.Select(x => x.Text)); if (fullResponseBuilder.Length > 0) { - fullResponseBuilder.Append(" "); + fullResponseBuilder.Append('\n'); } fullResponseBuilder.Append(responseText); tokens.AddRange(iterationTokens); @@ -681,6 +691,12 @@ await notificationService.DispatchNotification( if (toolCalls == null || !toolCalls.Any()) { + requestOptions.InteractiveUpdates = true; + await SendNotification(chat.Id, new LLMTokenValue + { + Type = TokenType.FullAnswer, + Text = responseText + }, false); break; } @@ -765,19 +781,14 @@ await notificationService.DispatchNotification( if (iterations >= MaxToolIterations) { + await SendNotification(chat.Id, new LLMTokenValue + { + Type = TokenType.FullAnswer, + Text = "Maximum tool invocation iterations reached. Ending the conversation." + }, false); } var finalResponse = fullResponseBuilder.ToString(); - var finalToken = new LLMTokenValue { Text = finalResponse, Type = TokenType.FullAnswer }; - tokens.Add(finalToken); - - if (requestOptions.InteractiveUpdates) - { - await notificationService.DispatchNotification( - NotificationMessageBuilder.CreateChatCompletion(chat.Id, finalToken, true), - ServiceConstants.Notifications.ReceiveMessageUpdate); - } - chat.Messages.Last().MarkProcessed(); return new ChatResult From 580d53680a0ff00b9d29853c84480860cd072de2 Mon Sep 17 00:00:00 2001 From: srebrek Date: Thu, 22 Jan 2026 14:46:29 +0100 Subject: [PATCH 3/6] fix(LLMService): minor fixes and code cleanup - Add invalid chat responses during ToolCalling to the chat, so it sees them in the next prompts. - Mark first message as processed after processing. - Removeunnecessary parsing methods. - Add messages about invalid formating to the promt so model sees what went wrong. - Format and clean the code. --- .../Services/LLMService/LLMService.cs | 287 +++++++++--------- 1 file changed, 141 insertions(+), 146 deletions(-) diff --git a/src/MaIN.Services/Services/LLMService/LLMService.cs b/src/MaIN.Services/Services/LLMService/LLMService.cs index 44eb00a..8a90a35 100644 --- a/src/MaIN.Services/Services/LLMService/LLMService.cs +++ b/src/MaIN.Services/Services/LLMService/LLMService.cs @@ -1,6 +1,7 @@ -using System.Collections.Concurrent; +using System.Collections.Concurrent; using System.Text; using System.Text.Json; +using System.Text.Json.Serialization; using LLama; using LLama.Batched; using LLama.Common; @@ -349,7 +350,6 @@ private static void ProcessTextMessage(Conversation conversation, if (hasTools && isNewConversation) { var toolsPrompt = FormatToolsForPrompt(chat.ToolsConfiguration!); - // Dodaj to jako wiadomość systemową lub na początku pierwszego promptu użytkownika finalPrompt = $"{toolsPrompt}\n\n{finalPrompt}"; } @@ -366,152 +366,130 @@ private static void ProcessTextMessage(Conversation conversation, private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) { - var sb = new StringBuilder(); - sb.AppendLine("## TOOLS"); - sb.AppendLine("You can call these tools if needed. To call a tool, respond with a JSON object inside tags."); - + var toolsList = new StringBuilder(); foreach (var tool in toolsConfig.Tools) { - // TODO: refactor to not allow null Function - sb.AppendLine($"- {tool.Function.Name}: {tool.Function.Description}"); - sb.AppendLine($" Parameters: {JsonSerializer.Serialize(tool.Function.Parameters)}"); + if (tool.Function == null) + continue; + + toolsList.AppendLine($"- {tool.Function.Name}: {tool.Function.Description}"); + toolsList.AppendLine($" Parameters: {JsonSerializer.Serialize(tool.Function.Parameters)}"); } - sb.AppendLine("\n## RESPONSE FORMAT (YOU HAVE TO CHOOSE ONE FORMAT AND CANNOT MIX THEM)##"); - sb.AppendLine("1. For normal conversation, just respond with plain text."); - sb.AppendLine("2. For tool calls, use this format. " + - "You cannot respond with plain text before or after format. " + - "If you want to call multiple functions, you have to combine them into one array." + - "Your response MUST contain only one tool call block:"); - sb.AppendLine(""); - sb.AppendLine("{\"tool_calls\": [{\"id\": \"call_1\", \"type\": \"function\", \"function\": {\"name\": \"tool_name\", \"arguments\": \"{\\\"param\\\":\\\"value\\\"}\"}},{\"id\": \"call_2\", \"type\": \"function\", \"function\": {\"name\": \"tool2_name\", \"arguments\": \"{\\\"param1\\\":\\\"value1\\\",\\\"param2\\\":\\\"value2\\\"}\"}}]}"); - sb.AppendLine(""); - - return sb.ToString(); + return $$$""" + ## TOOLS + You can call these tools if needed. To call a tool, respond with a JSON object inside tags. + + {{{toolsList}}} + + ## RESPONSE FORMAT (YOU HAVE TO CHOOSE ONE FORMAT AND CANNOT MIX THEM)## + 1. For normal conversation, just respond with plain text. + 2. For tool calls, use this format. You cannot respond with plain text before or after format. If you want to call multiple functions, you have to combine them into one array. Your response MUST contain only one tool call block: + + {"tool_calls": [{"id": "call_1", "type": "function", "function": {"name": "tool_name", "arguments": "{\"param\":\"value\"}"}},{"id": "call_2", "type": "function", "function": {"name": "tool2_name", "arguments": "{\"param1\":\"value1\",\"param2\":\"value2\"}"}}]} + + """; } - private List? ParseToolCalls(string response) + private ToolParseResult ParseToolCalls(string response) { - if (string.IsNullOrWhiteSpace(response)) return null; + if (string.IsNullOrWhiteSpace(response)) + return ToolParseResult.Failure("Response is empty."); - string jsonContent = ExtractJsonContent(response); - try - { - if (string.IsNullOrEmpty(jsonContent)) return null; + var jsonContent = ExtractJsonContent(response); - using var doc = JsonDocument.Parse(jsonContent); - var root = doc.RootElement; + if (string.IsNullOrEmpty(jsonContent)) + return ToolParseResult.ToolNotFound(); - // OpenAI standard { "tool_calls": [...] } - if (root.ValueKind == JsonValueKind.Object && root.TryGetProperty("tool_calls", out var toolCallsProp)) - { - var calls = toolCallsProp.Deserialize>(_jsonToolOptions); - return NormalizeToolCalls(calls); - } + try + { + var wrapper = JsonSerializer.Deserialize(jsonContent, _jsonToolOptions); - // TODO: test if those formats are used by any model - // model returned table [ { ... }, { ... } ] - if (root.ValueKind == JsonValueKind.Array) - { - var calls = root.Deserialize>(new JsonSerializerOptions { PropertyNameCaseInsensitive = true }); - return NormalizeToolCalls(calls); - } + if (wrapper?.ToolCalls != null && wrapper.ToolCalls.Any()) + return ToolParseResult.Success(NormalizeToolCalls(wrapper.ToolCalls)); - // flat format { "tool_name": "...", "arguments": {...} } - if (root.ValueKind == JsonValueKind.Object && (root.TryGetProperty("tool_name", out _) || root.TryGetProperty("function", out _))) - { - var singleCall = ParseSingleLegacyCall(root); - if (singleCall != null) return new List { singleCall }; - } + return ToolParseResult.Failure("JSON parsed correctly but 'tool_calls' property is missing or empty."); } - catch (Exception) + catch (JsonException ex) { - // No tool calls found no need to throw nor log + return ToolParseResult.Failure($"Invalid JSON format: {ex.Message}"); } - - return null; } - private string ExtractJsonContent(string text) + private static string? ExtractJsonContent(string text) { text = text.Trim(); - int firstBrace = text.IndexOf('{'); - int firstBracket = text.IndexOf('['); - int startIndex = (firstBrace >= 0 && firstBracket >= 0) ? Math.Min(firstBrace, firstBracket) : Math.Max(firstBrace, firstBracket); + var firstBrace = text.IndexOf('{'); + var firstBracket = text.IndexOf('['); + var startIndex = (firstBrace >= 0 && firstBracket >= 0) ? Math.Min(firstBrace, firstBracket) : Math.Max(firstBrace, firstBracket); - int lastBrace = text.LastIndexOf('}'); - int lastBracket = text.LastIndexOf(']'); - int endIndex = Math.Max(lastBrace, lastBracket); + var lastBrace = text.LastIndexOf('}'); + var lastBracket = text.LastIndexOf(']'); + var endIndex = Math.Max(lastBrace, lastBracket); - if (startIndex >= 0 && endIndex > startIndex) - { + if (startIndex >= 0 && endIndex > startIndex) return text.Substring(startIndex, endIndex - startIndex + 1); - } - return text; + return null; } - private ToolCall? ParseSingleLegacyCall(JsonElement root) + private static List NormalizeToolCalls(List? calls) { - string name = string.Empty; - if (root.TryGetProperty("tool_name", out var tn)) name = tn.GetString(); - else if (root.TryGetProperty("function", out var fn) && fn.ValueKind == JsonValueKind.String) name = fn.GetString(); - else if (root.TryGetProperty("function", out var fnObj) && fnObj.TryGetProperty("name", out var n)) name = n.GetString(); - - if (string.IsNullOrEmpty(name)) return null; + if (calls == null) + return []; - string? args = "{}"; - if (root.TryGetProperty("arguments", out var argProp)) - { - args = argProp.ValueKind == JsonValueKind.String ? argProp.GetString() : argProp.GetRawText(); - } - else if (root.TryGetProperty("parameters", out var paramProp)) + foreach (var call in calls) { - args = paramProp.GetRawText(); - } + if (string.IsNullOrEmpty(call.Id)) + call.Id = Guid.NewGuid().ToString()[..8]; - return new ToolCall - { - Id = Guid.NewGuid().ToString().Substring(0, 8), - Type = "function", - Function = new FunctionCall { Name = name, Arguments = args! } - }; - } + if (string.IsNullOrEmpty(call.Type)) + call.Type = "function"; - private List NormalizeToolCalls(List? calls) - { - if (calls == null) return new List(); - foreach (var call in calls) - { - if (string.IsNullOrEmpty(call.Id)) call.Id = Guid.NewGuid().ToString().Substring(0, 8); - if (string.IsNullOrEmpty(call.Type)) call.Type = "function"; - if (call.Function == null) call.Function = new FunctionCall(); + call.Function ??= new FunctionCall(); } return calls; } public class ToolCall { - [System.Text.Json.Serialization.JsonPropertyName("id")] + [JsonPropertyName("id")] public string Id { get; set; } = Guid.NewGuid().ToString(); - [System.Text.Json.Serialization.JsonPropertyName("type")] + [JsonPropertyName("type")] public string Type { get; set; } = "function"; - [System.Text.Json.Serialization.JsonPropertyName("function")] + [JsonPropertyName("function")] public FunctionCall Function { get; set; } = new(); } public class FunctionCall { - [System.Text.Json.Serialization.JsonPropertyName("name")] + [JsonPropertyName("name")] public string Name { get; set; } = string.Empty; - [System.Text.Json.Serialization.JsonPropertyName("arguments")] + [JsonPropertyName("arguments")] public string Arguments { get; set; } = "{}"; } + private class ToolResponseWrapper + { + [JsonPropertyName("tool_calls")] + public List? ToolCalls { get; set; } + } + + private record ToolParseResult + { + public bool IsSuccess { get; init; } + public List? ToolCalls { get; init; } + public string? ErrorMessage { get; init; } + + public static ToolParseResult Success(List calls) => new() { IsSuccess = true, ToolCalls = calls }; + public static ToolParseResult Failure(string error) => new() { IsSuccess = false, ErrorMessage = error }; + public static ToolParseResult ToolNotFound() => new() { IsSuccess = false }; + } + private async Task<(List Tokens, bool IsComplete, bool HasFailed)> ProcessTokens( Chat chat, Conversation conversation, @@ -663,9 +641,9 @@ private async Task ProcessWithToolsAsync( }); // Remove llama native logging var model = KnownModels.GetModel(chat.Model); - var tokens = new List(); - var fullResponseBuilder = new StringBuilder(); var iterations = 0; + var lastResponseTokens = new List(); + var lastResponse = string.Empty; while (iterations < MaxToolIterations) { @@ -676,39 +654,50 @@ private async Task ProcessWithToolsAsync( Text = $"Processing with tools... iteration {iterations + 1}\n\n" }, false); requestOptions.InteractiveUpdates = false; - var iterationTokens = await ProcessChatRequest(chat, model, lastMsg, requestOptions, cancellationToken); - - var responseText = string.Concat(iterationTokens.Select(x => x.Text)); - - if (fullResponseBuilder.Length > 0) + lastResponseTokens = await ProcessChatRequest(chat, model, lastMsg, requestOptions, cancellationToken); + lastMsg.MarkProcessed(); + lastResponse = string.Concat(lastResponseTokens.Select(x => x.Text)); + var responseMessage = new Message { - fullResponseBuilder.Append('\n'); - } - fullResponseBuilder.Append(responseText); - tokens.AddRange(iterationTokens); + Content = lastResponse, + Role = AuthorRole.Assistant.ToString(), + Type = MessageType.LocalLLM, + }; + chat.Messages.Add(responseMessage.MarkProcessed()); - var toolCalls = ParseToolCalls(responseText); + var parseResult = ParseToolCalls(lastResponse); - if (toolCalls == null || !toolCalls.Any()) + // Tool not found or invalid JSON + if (!parseResult.IsSuccess) { - requestOptions.InteractiveUpdates = true; - await SendNotification(chat.Id, new LLMTokenValue + if (parseResult.ErrorMessage is not null) // Invalid JSON, self correction { - Type = TokenType.FullAnswer, - Text = responseText - }, false); - break; + var errorMsg = new Message + { + Content = $"System Error: The tool call JSON was invalid. {parseResult.ErrorMessage}. Please correct the JSON format.", + Role = ServiceConstants.Roles.Tool, + Type = MessageType.LocalLLM, + Tool = true + }; + chat.Messages.Add(errorMsg.MarkProcessed()); + + iterations++; + continue; + } + else // Final response + { + requestOptions.InteractiveUpdates = true; + await SendNotification(chat.Id, new LLMTokenValue + { + Type = TokenType.FullAnswer, + Text = lastResponse + }, false); + break; + } } - var assistantMessage = new Message - { - Content = responseText, - Role = AuthorRole.Assistant.ToString(), - Type = MessageType.LocalLLM, - Tool = true - }; - assistantMessage.Properties[ToolCallsProperty] = JsonSerializer.Serialize(toolCalls); - chat.Messages.Add(assistantMessage.MarkProcessed()); + var toolCalls = parseResult.ToolCalls!; + responseMessage.Properties[ToolCallsProperty] = JsonSerializer.Serialize(toolCalls); foreach (var toolCall in toolCalls) { @@ -733,21 +722,27 @@ await notificationService.DispatchNotification( try { - requestOptions.ToolCallback?.Invoke(new ToolInvocation + if (requestOptions.ToolCallback is not null) { - ToolName = toolCall.Function.Name, - Arguments = toolCall.Function.Arguments, - Done = false - }); + await requestOptions.ToolCallback.Invoke(new ToolInvocation + { + ToolName = toolCall.Function.Name, + Arguments = toolCall.Function.Arguments, + Done = false + }); + } var toolResult = await executor(toolCall.Function.Arguments); - requestOptions.ToolCallback?.Invoke(new ToolInvocation + if (requestOptions.ToolCallback is not null) { - ToolName = toolCall.Function.Name, - Arguments = toolCall.Function.Arguments, - Done = true - }); + await requestOptions.ToolCallback.Invoke(new ToolInvocation + { + ToolName = toolCall.Function.Name, + Arguments = toolCall.Function.Arguments, + Done = true + }); + } var toolMessage = new Message { @@ -781,28 +776,28 @@ await notificationService.DispatchNotification( if (iterations >= MaxToolIterations) { + var errorMessage = "Maximum tool invocation iterations reached. Ending the conversation."; + var iterationMessage = new Message + { + Content = errorMessage, + Role = AuthorRole.System.ToString(), + Type = MessageType.LocalLLM, + }; + chat.Messages.Add(iterationMessage.MarkProcessed()); + await SendNotification(chat.Id, new LLMTokenValue { Type = TokenType.FullAnswer, - Text = "Maximum tool invocation iterations reached. Ending the conversation." + Text = errorMessage }, false); } - var finalResponse = fullResponseBuilder.ToString(); - chat.Messages.Last().MarkProcessed(); - return new ChatResult { Done = true, CreatedAt = DateTime.Now, Model = chat.Model, - Message = new Message - { - Content = finalResponse, - Tokens = tokens, - Role = AuthorRole.Assistant.ToString(), - Type = MessageType.LocalLLM, - }.MarkProcessed() + Message = chat.Messages.Last() }; } From 8600dcb1da7e8ad63af02d2fcecf66b2297b25a5 Mon Sep 17 00:00:00 2001 From: srebrek Date: Mon, 26 Jan 2026 17:45:44 +0100 Subject: [PATCH 4/6] refactor: tool code cleanup - OpenAICompatibleService and LLMService use Tool Classes Defined in the Domain. - Extract Json-Tool parse logic to helper function. --- .../Entities/Tools/FunctionCall.cs | 15 ++- src/MaIN.Domain/Entities/Tools/ToolCall.cs | 15 +++ .../Entities/Tools/ToolDefinition.cs | 6 +- .../Services/LLMService/LLMService.cs | 107 +----------------- .../LLMService/OpenAiCompatibleService.cs | 35 ------ .../LLMService/Utils/ToolCallsHelper.cs | 94 +++++++++++++++ 6 files changed, 125 insertions(+), 147 deletions(-) create mode 100644 src/MaIN.Domain/Entities/Tools/ToolCall.cs create mode 100644 src/MaIN.Services/Services/LLMService/Utils/ToolCallsHelper.cs diff --git a/src/MaIN.Domain/Entities/Tools/FunctionCall.cs b/src/MaIN.Domain/Entities/Tools/FunctionCall.cs index b3ecead..065c173 100644 --- a/src/MaIN.Domain/Entities/Tools/FunctionCall.cs +++ b/src/MaIN.Domain/Entities/Tools/FunctionCall.cs @@ -1,7 +1,12 @@ -namespace MaIN.Domain.Entities.Tools; +using System.Text.Json.Serialization; -public class FunctionCall +namespace MaIN.Domain.Entities.Tools; + +public sealed record FunctionCall { - public string Name { get; set; } = null!; - public string Arguments { get; set; } = null!; -} \ No newline at end of file + [JsonPropertyName("name")] + public string Name { get; init; } = string.Empty; + + [JsonPropertyName("arguments")] + public string Arguments { get; init; } = "{}"; +} diff --git a/src/MaIN.Domain/Entities/Tools/ToolCall.cs b/src/MaIN.Domain/Entities/Tools/ToolCall.cs new file mode 100644 index 0000000..8eb5776 --- /dev/null +++ b/src/MaIN.Domain/Entities/Tools/ToolCall.cs @@ -0,0 +1,15 @@ +using System.Text.Json.Serialization; + +namespace MaIN.Domain.Entities.Tools; + +public sealed record ToolCall +{ + [JsonPropertyName("id")] + public string Id { get; init; } = string.Empty; + + [JsonPropertyName("type")] + public string Type { get; init; } = "function"; + + [JsonPropertyName("function")] + public FunctionCall Function { get; init; } = new(); +} diff --git a/src/MaIN.Domain/Entities/Tools/ToolDefinition.cs b/src/MaIN.Domain/Entities/Tools/ToolDefinition.cs index e6c68f9..cfd73fd 100644 --- a/src/MaIN.Domain/Entities/Tools/ToolDefinition.cs +++ b/src/MaIN.Domain/Entities/Tools/ToolDefinition.cs @@ -1,8 +1,12 @@ -namespace MaIN.Domain.Entities.Tools; +using System.Text.Json.Serialization; + +namespace MaIN.Domain.Entities.Tools; public class ToolDefinition { public string Type { get; set; } = "function"; public FunctionDefinition? Function { get; set; } + + [JsonIgnore] public Func>? Execute { get; set; } } \ No newline at end of file diff --git a/src/MaIN.Services/Services/LLMService/LLMService.cs b/src/MaIN.Services/Services/LLMService/LLMService.cs index 8a90a35..e86b440 100644 --- a/src/MaIN.Services/Services/LLMService/LLMService.cs +++ b/src/MaIN.Services/Services/LLMService/LLMService.cs @@ -1,7 +1,6 @@ using System.Collections.Concurrent; using System.Text; using System.Text.Json; -using System.Text.Json.Serialization; using LLama; using LLama.Batched; using LLama.Common; @@ -37,11 +36,6 @@ public class LLMService : ILLMService private readonly IMemoryFactory memoryFactory; private readonly string modelsPath; - private readonly JsonSerializerOptions _jsonToolOptions = new() - { - PropertyNameCaseInsensitive = true, - }; - public LLMService( MaINSettings options, INotificationService notificationService, @@ -391,105 +385,6 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) """; } - private ToolParseResult ParseToolCalls(string response) - { - if (string.IsNullOrWhiteSpace(response)) - return ToolParseResult.Failure("Response is empty."); - - var jsonContent = ExtractJsonContent(response); - - if (string.IsNullOrEmpty(jsonContent)) - return ToolParseResult.ToolNotFound(); - - try - { - var wrapper = JsonSerializer.Deserialize(jsonContent, _jsonToolOptions); - - if (wrapper?.ToolCalls != null && wrapper.ToolCalls.Any()) - return ToolParseResult.Success(NormalizeToolCalls(wrapper.ToolCalls)); - - return ToolParseResult.Failure("JSON parsed correctly but 'tool_calls' property is missing or empty."); - } - catch (JsonException ex) - { - return ToolParseResult.Failure($"Invalid JSON format: {ex.Message}"); - } - } - - private static string? ExtractJsonContent(string text) - { - text = text.Trim(); - - var firstBrace = text.IndexOf('{'); - var firstBracket = text.IndexOf('['); - var startIndex = (firstBrace >= 0 && firstBracket >= 0) ? Math.Min(firstBrace, firstBracket) : Math.Max(firstBrace, firstBracket); - - var lastBrace = text.LastIndexOf('}'); - var lastBracket = text.LastIndexOf(']'); - var endIndex = Math.Max(lastBrace, lastBracket); - - if (startIndex >= 0 && endIndex > startIndex) - return text.Substring(startIndex, endIndex - startIndex + 1); - - return null; - } - - private static List NormalizeToolCalls(List? calls) - { - if (calls == null) - return []; - - foreach (var call in calls) - { - if (string.IsNullOrEmpty(call.Id)) - call.Id = Guid.NewGuid().ToString()[..8]; - - if (string.IsNullOrEmpty(call.Type)) - call.Type = "function"; - - call.Function ??= new FunctionCall(); - } - return calls; - } - - public class ToolCall - { - [JsonPropertyName("id")] - public string Id { get; set; } = Guid.NewGuid().ToString(); - - [JsonPropertyName("type")] - public string Type { get; set; } = "function"; - - [JsonPropertyName("function")] - public FunctionCall Function { get; set; } = new(); - } - - public class FunctionCall - { - [JsonPropertyName("name")] - public string Name { get; set; } = string.Empty; - - [JsonPropertyName("arguments")] - public string Arguments { get; set; } = "{}"; - } - - private class ToolResponseWrapper - { - [JsonPropertyName("tool_calls")] - public List? ToolCalls { get; set; } - } - - private record ToolParseResult - { - public bool IsSuccess { get; init; } - public List? ToolCalls { get; init; } - public string? ErrorMessage { get; init; } - - public static ToolParseResult Success(List calls) => new() { IsSuccess = true, ToolCalls = calls }; - public static ToolParseResult Failure(string error) => new() { IsSuccess = false, ErrorMessage = error }; - public static ToolParseResult ToolNotFound() => new() { IsSuccess = false }; - } - private async Task<(List Tokens, bool IsComplete, bool HasFailed)> ProcessTokens( Chat chat, Conversation conversation, @@ -665,7 +560,7 @@ private async Task ProcessWithToolsAsync( }; chat.Messages.Add(responseMessage.MarkProcessed()); - var parseResult = ParseToolCalls(lastResponse); + var parseResult = ToolCallParser.ParseToolCalls(lastResponse); // Tool not found or invalid JSON if (!parseResult.IsSuccess) diff --git a/src/MaIN.Services/Services/LLMService/OpenAiCompatibleService.cs b/src/MaIN.Services/Services/LLMService/OpenAiCompatibleService.cs index 1c3fc35..b0ae2d1 100644 --- a/src/MaIN.Services/Services/LLMService/OpenAiCompatibleService.cs +++ b/src/MaIN.Services/Services/LLMService/OpenAiCompatibleService.cs @@ -1001,41 +1001,6 @@ private static string DetectImageMimeType(byte[] imageBytes) } } - -public class ToolDefinition -{ - public string Type { get; set; } = "function"; - public FunctionDefinition Function { get; set; } = null!; - - [System.Text.Json.Serialization.JsonIgnore] - public Func>? Execute { get; set; } -} - -public class FunctionDefinition -{ - public string Name { get; set; } = null!; - public string? Description { get; set; } - public object Parameters { get; set; } = null!; -} - -public class ToolCall -{ - [JsonPropertyName("id")] - public string Id { get; set; } = null!; - [JsonPropertyName("type")] - public string Type { get; set; } = "function"; - [JsonPropertyName("function")] - public FunctionCall Function { get; set; } = null!; -} - -public class FunctionCall -{ - [JsonPropertyName("name")] - public string Name { get; set; } = null!; - [JsonPropertyName("arguments")] - public string Arguments { get; set; } = null!; -} - internal class ChatMessage { public string Role { get; set; } diff --git a/src/MaIN.Services/Services/LLMService/Utils/ToolCallsHelper.cs b/src/MaIN.Services/Services/LLMService/Utils/ToolCallsHelper.cs new file mode 100644 index 0000000..41df73f --- /dev/null +++ b/src/MaIN.Services/Services/LLMService/Utils/ToolCallsHelper.cs @@ -0,0 +1,94 @@ +using System.Text.Json; +using System.Text.Json.Serialization; +using MaIN.Domain.Entities.Tools; + +namespace MaIN.Services.Services.LLMService.Utils; + +public static class ToolCallParser +{ + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNameCaseInsensitive = true, + }; + + public static ToolParseResult ParseToolCalls(string response) + { + if (string.IsNullOrWhiteSpace(response)) + return ToolParseResult.Failure("Response is empty."); + + var jsonContent = ExtractJsonContent(response); + + if (string.IsNullOrEmpty(jsonContent)) + return ToolParseResult.ToolNotFound(); + + try + { + var wrapper = JsonSerializer.Deserialize(jsonContent, JsonOptions); + + if (wrapper?.ToolCalls is not null && wrapper.ToolCalls.Count != 0) + return ToolParseResult.Success(NormalizeToolCalls(wrapper.ToolCalls)); + + return ToolParseResult.Failure("JSON parsed correctly but 'tool_calls' property is missing or empty."); + } + catch (JsonException ex) + { + return ToolParseResult.Failure($"Invalid JSON format: {ex.Message}"); + } + } + + private static string? ExtractJsonContent(string text) + { + text = text.Trim(); + + var firstBrace = text.IndexOf('{'); + var firstBracket = text.IndexOf('['); + var startIndex = (firstBrace >= 0 && firstBracket >= 0) + ? Math.Min(firstBrace, firstBracket) + : Math.Max(firstBrace, firstBracket); + + var lastBrace = text.LastIndexOf('}'); + var lastBracket = text.LastIndexOf(']'); + var endIndex = Math.Max(lastBrace, lastBracket); + + if (startIndex >= 0 && endIndex > startIndex) + return text.Substring(startIndex, endIndex - startIndex + 1); + + return null; + } + + private static List NormalizeToolCalls(List? calls) + { + if (calls is null) + return []; + + var normalizedCalls = new List(); + + foreach (var call in calls) + { + var id = string.IsNullOrEmpty(call.Id) ? Guid.NewGuid().ToString()[..8] : call.Id; + var type = string.IsNullOrEmpty(call.Type) ? "function" : call.Type; + var function = call.Function ?? new FunctionCall(); + + normalizedCalls.Add(call with { Id = id, Type = type, Function = function }); + } + + return normalizedCalls; + } + + private sealed record ToolResponseWrapper + { + [JsonPropertyName("tool_calls")] + public List? ToolCalls { get; init; } + } +} + +public record ToolParseResult +{ + public bool IsSuccess { get; init; } + public List? ToolCalls { get; init; } + public string? ErrorMessage { get; init; } + + public static ToolParseResult Success(List calls) => new() { IsSuccess = true, ToolCalls = calls }; + public static ToolParseResult Failure(string error) => new() { IsSuccess = false, ErrorMessage = error }; + public static ToolParseResult ToolNotFound() => new() { IsSuccess = false }; +} From f1e847d5c1d594df0c130b7c506986ff33281ae3 Mon Sep 17 00:00:00 2001 From: srebrek Date: Tue, 27 Jan 2026 09:46:12 +0100 Subject: [PATCH 5/6] fix: minor fixes related to tool-calling --- Examples/Examples/Chat/ChatExampleToolsSimple.cs | 4 +--- .../Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs | 2 -- src/MaIN.Services/Services/LLMService/LLMService.cs | 10 +++++++++- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Examples/Examples/Chat/ChatExampleToolsSimple.cs b/Examples/Examples/Chat/ChatExampleToolsSimple.cs index 4c90a93..9681976 100644 --- a/Examples/Examples/Chat/ChatExampleToolsSimple.cs +++ b/Examples/Examples/Chat/ChatExampleToolsSimple.cs @@ -9,11 +9,9 @@ public class ChatExampleToolsSimple : IExample public async Task Start() { OpenAiExample.Setup(); //We need to provide OpenAi API key - + Console.WriteLine("(OpenAi) ChatExample with tools is running!"); - var model = AIHub.Model(); - await AIHub.Chat() .WithModel("gpt-5-nano") .WithMessage("What time is it right now?") diff --git a/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs b/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs index 03ff7f3..9694c1a 100644 --- a/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs +++ b/Examples/Examples/Chat/ChatExampleToolsSimpleLocalLLM.cs @@ -10,8 +10,6 @@ public async Task Start() { Console.WriteLine("Local LLM ChatExample with tools is running!"); - var model = AIHub.Model(); - await AIHub.Chat() .WithModel("gemma3:4b") .WithMessage("What time is it right now?") diff --git a/src/MaIN.Services/Services/LLMService/LLMService.cs b/src/MaIN.Services/Services/LLMService/LLMService.cs index e86b440..deb441f 100644 --- a/src/MaIN.Services/Services/LLMService/LLMService.cs +++ b/src/MaIN.Services/Services/LLMService/LLMService.cs @@ -55,7 +55,9 @@ public LLMService( CancellationToken cancellationToken = default) { if (chat.Messages.Count == 0) + { return null; + } var lastMsg = chat.Messages.Last(); @@ -90,7 +92,9 @@ public Task GetCurrentModels() public Task CleanSessionCache(string? id) { if (string.IsNullOrEmpty(id) || !_sessionCache.TryRemove(id, out var session)) + { return Task.CompletedTask; + } session.Executor.Context.Dispose(); return Task.CompletedTask; @@ -310,7 +314,9 @@ private static async Task ProcessImageMessage(Conversation conversation, conversation.Prompt(imageEmbeddings!); while (executor.BatchedTokenCount > 0) + { await executor.Infer(cancellationToken); + } var prompt = llmModel.Tokenize($"USER: {lastMsg.Content}\nASSISTANT:", true, false, Encoding.UTF8); conversation.Prompt(prompt); @@ -364,7 +370,9 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig) foreach (var tool in toolsConfig.Tools) { if (tool.Function == null) + { continue; + } toolsList.AppendLine($"- {tool.Function.Name}: {tool.Function.Description}"); toolsList.AppendLine($" Parameters: {JsonSerializer.Serialize(tool.Function.Parameters)}"); @@ -671,7 +679,7 @@ await requestOptions.ToolCallback.Invoke(new ToolInvocation if (iterations >= MaxToolIterations) { - var errorMessage = "Maximum tool invocation iterations reached. Ending the conversation."; + var errorMessage = "Maximum tool invocation iterations reached. Ending the tool-loop prematurely."; var iterationMessage = new Message { Content = errorMessage, From 89f3382ec008b34b22eea79da7e2589d29db5518 Mon Sep 17 00:00:00 2001 From: srebrek Date: Tue, 27 Jan 2026 11:27:30 +0100 Subject: [PATCH 6/6] chore: bump version to 0.9.0 and update release notes --- Releases/0.9.0.md | 3 +++ src/MaIN.Core/.nuspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 Releases/0.9.0.md diff --git a/Releases/0.9.0.md b/Releases/0.9.0.md new file mode 100644 index 0000000..dc98b6e --- /dev/null +++ b/Releases/0.9.0.md @@ -0,0 +1,3 @@ +# 0.9.0 release + +- Add tool calling to local models \ No newline at end of file diff --git a/src/MaIN.Core/.nuspec b/src/MaIN.Core/.nuspec index 5135042..c4b8816 100644 --- a/src/MaIN.Core/.nuspec +++ b/src/MaIN.Core/.nuspec @@ -2,7 +2,7 @@ MaIN.NET - 0.8.1 + 0.9.0 Wisedev Wisedev favicon.png