381 lines
12 KiB
C#
381 lines
12 KiB
C#
using ChatBot.Services;
|
|
using ChatBot.Services.Interfaces;
|
|
using ChatBot.Tests.TestUtilities;
|
|
using FluentAssertions;
|
|
using Moq;
|
|
using OllamaSharp.Models;
|
|
using OllamaSharp.Models.Chat;
|
|
|
|
namespace ChatBot.Tests.Services.Interfaces;
|
|
|
|
public class IOllamaClientTests : UnitTestBase
|
|
{
|
|
[Fact]
|
|
public void IOllamaClient_ShouldHaveCorrectMethodSignatures()
|
|
{
|
|
// Arrange & Act
|
|
var interfaceType = typeof(IOllamaClient);
|
|
var methods = interfaceType.GetMethods();
|
|
var properties = interfaceType.GetProperties();
|
|
|
|
// Assert
|
|
methods.Should().HaveCount(4); // ChatAsync, ListLocalModelsAsync, get_SelectedModel, set_SelectedModel
|
|
properties.Should().HaveCount(1);
|
|
|
|
// SelectedModel property
|
|
var selectedModelProperty = properties.FirstOrDefault(p => p.Name == "SelectedModel");
|
|
selectedModelProperty.Should().NotBeNull();
|
|
selectedModelProperty!.PropertyType.Should().Be(typeof(string));
|
|
selectedModelProperty.CanRead.Should().BeTrue();
|
|
selectedModelProperty.CanWrite.Should().BeTrue();
|
|
|
|
// ChatAsync method
|
|
var chatAsyncMethod = methods.FirstOrDefault(m => m.Name == "ChatAsync");
|
|
chatAsyncMethod.Should().NotBeNull();
|
|
chatAsyncMethod!.ReturnType.Should().Be(typeof(IAsyncEnumerable<ChatResponseStream?>));
|
|
chatAsyncMethod.GetParameters().Should().HaveCount(1);
|
|
chatAsyncMethod.GetParameters()[0].ParameterType.Should().Be(typeof(ChatRequest));
|
|
|
|
// ListLocalModelsAsync method
|
|
var listLocalModelsAsyncMethod = methods.FirstOrDefault(m =>
|
|
m.Name == "ListLocalModelsAsync"
|
|
);
|
|
listLocalModelsAsyncMethod.Should().NotBeNull();
|
|
listLocalModelsAsyncMethod!.ReturnType.Should().Be(typeof(Task<IEnumerable<Model>>));
|
|
listLocalModelsAsyncMethod.GetParameters().Should().BeEmpty();
|
|
}
|
|
|
|
[Fact]
|
|
public void IOllamaClient_ShouldBeImplementedByOllamaClientAdapter()
|
|
{
|
|
// Arrange & Act
|
|
var ollamaClientAdapterType = typeof(OllamaClientAdapter);
|
|
var interfaceType = typeof(IOllamaClient);
|
|
|
|
// Assert
|
|
interfaceType.IsAssignableFrom(ollamaClientAdapterType).Should().BeTrue();
|
|
}
|
|
|
|
[Fact]
|
|
public void IOllamaClient_SelectedModel_ShouldBeReadableAndWritable()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var expectedModel = "llama2:7b";
|
|
|
|
mock.SetupProperty(x => x.SelectedModel, "default-model");
|
|
|
|
// Act
|
|
mock.Object.SelectedModel = expectedModel;
|
|
var result = mock.Object.SelectedModel;
|
|
|
|
// Assert
|
|
result.Should().Be(expectedModel);
|
|
mock.VerifySet(x => x.SelectedModel = expectedModel, Times.Once);
|
|
mock.VerifyGet(x => x.SelectedModel, Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task IOllamaClient_ChatAsync_ShouldReturnAsyncEnumerable()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var request = new ChatRequest
|
|
{
|
|
Model = "llama2:7b",
|
|
Messages = new List<Message>
|
|
{
|
|
new() { Role = "user", Content = "Hello" },
|
|
},
|
|
};
|
|
|
|
var expectedResponse = new List<ChatResponseStream?>
|
|
{
|
|
new()
|
|
{
|
|
Message = new Message
|
|
{
|
|
Role = "assistant",
|
|
Content = "Hello! How can I help you?",
|
|
},
|
|
},
|
|
new() { Done = true },
|
|
};
|
|
|
|
mock.Setup(x => x.ChatAsync(It.IsAny<ChatRequest>()))
|
|
.Returns(CreateAsyncEnumerable(expectedResponse));
|
|
|
|
// Act
|
|
var result = mock.Object.ChatAsync(request);
|
|
var responses = new List<ChatResponseStream?>();
|
|
await foreach (var response in result)
|
|
{
|
|
responses.Add(response);
|
|
}
|
|
|
|
// Assert
|
|
responses.Should().HaveCount(2);
|
|
responses[0]?.Message?.Content.Should().Be("Hello! How can I help you?");
|
|
responses[1]?.Done.Should().BeTrue();
|
|
mock.Verify(x => x.ChatAsync(request), Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task IOllamaClient_ChatAsync_ShouldHandleEmptyResponse()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var request = new ChatRequest { Model = "llama2:7b", Messages = new List<Message>() };
|
|
|
|
var expectedResponse = new List<ChatResponseStream?>();
|
|
|
|
mock.Setup(x => x.ChatAsync(It.IsAny<ChatRequest>()))
|
|
.Returns(CreateAsyncEnumerable(expectedResponse));
|
|
|
|
// Act
|
|
var result = mock.Object.ChatAsync(request);
|
|
var responses = new List<ChatResponseStream?>();
|
|
await foreach (var response in result)
|
|
{
|
|
responses.Add(response);
|
|
}
|
|
|
|
// Assert
|
|
responses.Should().BeEmpty();
|
|
mock.Verify(x => x.ChatAsync(request), Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task IOllamaClient_ChatAsync_ShouldHandleNullResponse()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var request = new ChatRequest
|
|
{
|
|
Model = "llama2:7b",
|
|
Messages = new List<Message>
|
|
{
|
|
new() { Role = "user", Content = "Test" },
|
|
},
|
|
};
|
|
|
|
var expectedResponse = new List<ChatResponseStream?>
|
|
{
|
|
null,
|
|
new()
|
|
{
|
|
Message = new Message { Role = "assistant", Content = "Response" },
|
|
},
|
|
};
|
|
|
|
mock.Setup(x => x.ChatAsync(It.IsAny<ChatRequest>()))
|
|
.Returns(CreateAsyncEnumerable(expectedResponse));
|
|
|
|
// Act
|
|
var result = mock.Object.ChatAsync(request);
|
|
var responses = new List<ChatResponseStream?>();
|
|
await foreach (var response in result)
|
|
{
|
|
responses.Add(response);
|
|
}
|
|
|
|
// Assert
|
|
responses.Should().HaveCount(2);
|
|
responses[0].Should().BeNull();
|
|
responses[1]?.Message?.Content.Should().Be("Response");
|
|
mock.Verify(x => x.ChatAsync(request), Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task IOllamaClient_ListLocalModelsAsync_ShouldReturnModels()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var expectedModels = new List<Model>
|
|
{
|
|
new()
|
|
{
|
|
Name = "llama2:7b",
|
|
Size = 3825819519,
|
|
ModifiedAt = DateTime.UtcNow,
|
|
},
|
|
new()
|
|
{
|
|
Name = "codellama:7b",
|
|
Size = 3825819519,
|
|
ModifiedAt = DateTime.UtcNow,
|
|
},
|
|
};
|
|
|
|
mock.Setup(x => x.ListLocalModelsAsync()).ReturnsAsync(expectedModels);
|
|
|
|
// Act
|
|
var result = await mock.Object.ListLocalModelsAsync();
|
|
|
|
// Assert
|
|
result.Should().BeEquivalentTo(expectedModels);
|
|
mock.Verify(x => x.ListLocalModelsAsync(), Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task IOllamaClient_ListLocalModelsAsync_ShouldReturnEmptyList()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var expectedModels = new List<Model>();
|
|
|
|
mock.Setup(x => x.ListLocalModelsAsync()).ReturnsAsync(expectedModels);
|
|
|
|
// Act
|
|
var result = await mock.Object.ListLocalModelsAsync();
|
|
|
|
// Assert
|
|
result.Should().BeEmpty();
|
|
mock.Verify(x => x.ListLocalModelsAsync(), Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task IOllamaClient_ListLocalModelsAsync_ShouldHandleNullModels()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
IEnumerable<Model>? expectedModels = null;
|
|
|
|
mock.Setup(x => x.ListLocalModelsAsync()).ReturnsAsync(expectedModels!);
|
|
|
|
// Act
|
|
var result = await mock.Object.ListLocalModelsAsync();
|
|
|
|
// Assert
|
|
result.Should().BeNull();
|
|
mock.Verify(x => x.ListLocalModelsAsync(), Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public void IOllamaClient_SelectedModel_ShouldHandleNullValue()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
string? expectedModel = null;
|
|
|
|
mock.SetupProperty(x => x.SelectedModel, "default-model");
|
|
|
|
// Act
|
|
mock.Object.SelectedModel = expectedModel!;
|
|
var result = mock.Object.SelectedModel;
|
|
|
|
// Assert
|
|
result.Should().BeNull();
|
|
mock.VerifySet(x => x.SelectedModel = expectedModel!, Times.Once);
|
|
mock.VerifyGet(x => x.SelectedModel, Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public void IOllamaClient_SelectedModel_ShouldHandleEmptyString()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var expectedModel = "";
|
|
|
|
mock.SetupProperty(x => x.SelectedModel, "default-model");
|
|
|
|
// Act
|
|
mock.Object.SelectedModel = expectedModel;
|
|
var result = mock.Object.SelectedModel;
|
|
|
|
// Assert
|
|
result.Should().Be(expectedModel);
|
|
mock.VerifySet(x => x.SelectedModel = expectedModel, Times.Once);
|
|
mock.VerifyGet(x => x.SelectedModel, Times.Once);
|
|
}
|
|
|
|
[Fact]
|
|
public void IOllamaClient_ShouldBePublicInterface()
|
|
{
|
|
// Arrange & Act
|
|
var interfaceType = typeof(IOllamaClient);
|
|
|
|
// Assert
|
|
interfaceType.IsPublic.Should().BeTrue();
|
|
interfaceType.IsInterface.Should().BeTrue();
|
|
}
|
|
|
|
[Fact]
|
|
public void IOllamaClient_ShouldHaveCorrectNamespace()
|
|
{
|
|
// Arrange & Act
|
|
var interfaceType = typeof(IOllamaClient);
|
|
|
|
// Assert
|
|
interfaceType.Namespace.Should().Be("ChatBot.Services.Interfaces");
|
|
}
|
|
|
|
[Fact]
|
|
public void IOllamaClient_ShouldHaveCorrectGenericConstraints()
|
|
{
|
|
// Arrange & Act
|
|
var interfaceType = typeof(IOllamaClient);
|
|
var methods = interfaceType.GetMethods();
|
|
var properties = interfaceType.GetProperties();
|
|
|
|
// Assert
|
|
// All methods should be public
|
|
methods.All(m => m.IsPublic).Should().BeTrue();
|
|
|
|
// All properties should be public
|
|
properties.All(p => p.GetGetMethod()?.IsPublic == true).Should().BeTrue();
|
|
properties.All(p => p.GetSetMethod()?.IsPublic == true).Should().BeTrue();
|
|
}
|
|
|
|
[Fact]
|
|
public async Task IOllamaClient_ChatAsync_ShouldHandleLargeRequest()
|
|
{
|
|
// Arrange
|
|
var mock = new Mock<IOllamaClient>();
|
|
var messages = new List<Message>();
|
|
// Add many messages
|
|
for (int i = 0; i < 100; i++)
|
|
{
|
|
messages.Add(
|
|
new Message { Role = i % 2 == 0 ? "user" : "assistant", Content = $"Message {i}" }
|
|
);
|
|
}
|
|
|
|
var request = new ChatRequest { Model = "llama2:7b", Messages = messages };
|
|
|
|
var expectedResponse = new List<ChatResponseStream?>
|
|
{
|
|
new()
|
|
{
|
|
Message = new Message { Role = "assistant", Content = "Large response" },
|
|
},
|
|
};
|
|
|
|
mock.Setup(x => x.ChatAsync(It.IsAny<ChatRequest>()))
|
|
.Returns(CreateAsyncEnumerable(expectedResponse));
|
|
|
|
// Act
|
|
var result = mock.Object.ChatAsync(request);
|
|
var responses = new List<ChatResponseStream?>();
|
|
await foreach (var response in result)
|
|
{
|
|
responses.Add(response);
|
|
}
|
|
|
|
// Assert
|
|
responses.Should().HaveCount(1);
|
|
responses[0]?.Message?.Content.Should().Be("Large response");
|
|
mock.Verify(x => x.ChatAsync(request), Times.Once);
|
|
}
|
|
|
|
private static async IAsyncEnumerable<ChatResponseStream?> CreateAsyncEnumerable(
|
|
List<ChatResponseStream?> items
|
|
)
|
|
{
|
|
foreach (var item in items)
|
|
{
|
|
yield return item;
|
|
}
|
|
}
|
|
}
|