Skip to content

Ollama provider returns Bad request #559

@PontiacGTX

Description

@PontiacGTX

Describe the bug

I tried using the v0.017.1dev28 and net 8.0 but it returns and error with bad request using latest version from ollama on windows 11

Steps to reproduce the bug

using this code example it returns 400 when sending this tool in the request ```
using LangChain.Providers.OpenAI.Predefined;
using LangChain.Providers.Ollama;
using System.Text.RegularExpressions;
using Ollama;
using System.IO;
using LlmAssist.Core.DataContextEmbeddeding;
using LlmAssist.Core.Tools;
using Microsoft.Extensions.Options;
using System.Reflection;
using Tool = Ollama.Tool;
using Message = Ollama.Message;
using LlmAssist.Core.ToolExtension;

namespace LlmAssist.Core.Services;

public class LlmAssistSvc
{
const string Sys_Message = "You are a helpful coding assistant, complete the work without comments, dont explain the code. dont write the thought process. strictly do what was prompted";
OllamaApiClient ollama { get; }
Dictionary<int, Ollama.Model> Models { get; }
GenerateEmbeddingResponse Embeddings { get; set; }
public string SelectedModel { get; set; }
public List Messages { get; set; } = new List();
bool ModelSelected => !string.IsNullOrEmpty(SelectedModel);
public async Task<List<Ollama.Model>> GetModels() => Models.Values.ToList();
public async Task GetEmbeddings() => Embeddings;

static HtmlDownloadTool htmlDownloadTool = new HtmlDownloadTool();

public async Task SetModel(int index)
{
    if (!Models.ContainsKey(index))
        throw new Exception("Index not found");

    SelectedModel = Models[index].Model1;

}

public LlmAssistSvc(string uriOllama=null)
{
     ollama= string.IsNullOrEmpty(uriOllama) ? 
        new OllamaApiClient()
        :
        new OllamaApiClient(baseUri: new Uri(uriOllama));
    Models = ollama.Models.ListModelsAsync().Result!.Models.Select((x,idx) =>
    {
        return new {
            Index =idx,
            Model =x
        };
    }).ToDictionary(x=>x.Index, x=>x.Model)!;
}

public LlmAssistSvc(string uriOllama = null,int index=1)
{
    ollama = string.IsNullOrEmpty(uriOllama) ?
       new OllamaApiClient()
       :
       new OllamaApiClient(baseUri: new Uri(uriOllama));
    Models = ollama.Models.ListModelsAsync().Result!.Models.Select((x, idx) =>
    {
        return new
        {
            Index = idx,
            Model = x
        };
    }).ToDictionary(x => x.Index, x => x.Model)!;
    ollama = GetClient(Models[index].Model1).Result;
    SetModel(index).Wait();
}

async Task<OllamaApiClient> GetClient(string model)
{
    // set OLLAMA_HOST=...:11434
    var apiClient = new OllamaApiClient(
        httpClient: new HttpClient
        {
            Timeout = TimeSpan.FromMinutes(10),
        },
        baseUri: new Uri("http://127.0.0.1:11434/api")); // baseUri: new Uri("http://10.10.5.85:11434/api")

    if (!string.IsNullOrEmpty(model))
    {
        await apiClient.Models.PullModelAsync(model).EnsureSuccessAsync();
    }

    return apiClient;
}

public async Task GeneratedEmbeddings(string input)
{
     Embeddings = await ollama.Embeddings.GenerateEmbeddingAsync(
     model: SelectedModel,
     prompt: "");
}


public async Task<QueryResponseContext> QueryChat(string userInput, bool includeThought = false, bool useStream = false, DataChatContext ctx = null, List<ExtendedTool> toolSets=null)
{
   
    
        Messages.Add("You are a helpful coding assistant, complete the work without comments, dont explain the code. dont write the thought process. strictly do what was prompted".AsSystemMessage()) ;
        Messages.Add(userInput.AsUserMessage());
    
    //                 await ollama.Chat.GenerateChatCompletionAsync(SelectedModel, ctx?.Messages, tools: toolSets.Select(x=>x.Tool).ToList());
    var service = new HtmlDownloadTool();
    var tools = service.AsTools().AsOllamaTools(); 
    var calls = service.AsCalls();
    var lastResponse = ollama.Chat(
        model: SelectedModel,
        systemMessage:Sys_Message ,
        autoCallTools: true);
    lastResponse.AddToolService(service.AsTools().AsOllamaTools(), service.AsCalls());
    
    var res=await lastResponse.SendAsync(userInput);
    string response = res.Content;
    response = includeThought ? response : Sanitize(ref response);
    return new QueryResponseContext
    {
     
        Response = response
    };
}

string Sanitize(ref string input)
{
    input = Regex.Replace(input, "<think>.*?</think>", "", RegexOptions.Singleline);
    return input.StartsWith("\n\n") ? input.Substring(2) : input;
}

public async Task PrintModels()
{
    Console.WriteLine(string.Join(Environment.NewLine,
    (await GetModels()).Select((x, idx) => $"{idx} {x.Model1}")
));
}

}the tool implementation [GenerateJsonSchema]
public interface IHTMLDownloadTool
{
[Description("Downloads or fetchs data in HTML")]
Task DownloadHtmlAsync(
[Description("Url string to request to fetch HTML from")] string uri,
global::System.Threading.CancellationToken cancellationToken = default);
}
public class HtmlDownloadTool: IHTMLDownloadTool
{
private readonly HttpClient _httpClient;

 public HtmlDownloadTool()
 {
     _httpClient = new HttpClient();
 }

 public async Task<string> DownloadHtmlAsync(string uri ,global::System.Threading.CancellationToken cancellationToken = default)
 {
     if (cancellationToken != null && cancellationToken.IsCancellationRequested)
         return "";

     var response = await _httpClient.GetStringAsync(new Uri(uri));

     return response;
 }

}```

Expected behavior

returns the response without errors

Screenshots

No response

NuGet package version

No response

Additional context

No response

Metadata

Metadata

Assignees

No one assigned

    Labels

    bugSomething isn't working

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions