210 lines
7.8 KiB
C#
210 lines
7.8 KiB
C#
using System.Linq;
|
|
using ChatBot.Common.Constants;
|
|
using ChatBot.Models.Configuration;
|
|
using ChatBot.Models.Dto;
|
|
using ChatBot.Services;
|
|
using ChatBot.Services.Interfaces;
|
|
using ChatBot.Tests.TestUtilities;
|
|
using FluentAssertions;
|
|
using Microsoft.Extensions.Logging;
|
|
using Microsoft.Extensions.Options;
|
|
using Moq;
|
|
using OllamaSharp.Models.Chat;
|
|
|
|
namespace ChatBot.Tests.Services;
|
|
|
|
public class AIServiceTests : UnitTestBase
|
|
{
|
|
private readonly Mock<ILogger<AIService>> _loggerMock;
|
|
private readonly Mock<ModelService> _modelServiceMock;
|
|
private readonly Mock<IOllamaClient> _ollamaClientMock;
|
|
private readonly Mock<SystemPromptService> _systemPromptServiceMock;
|
|
private readonly Mock<IHistoryCompressionService> _compressionServiceMock;
|
|
private readonly AISettings _aiSettings;
|
|
private readonly AIService _aiService;
|
|
|
|
public AIServiceTests()
|
|
{
|
|
_loggerMock = TestDataBuilder.Mocks.CreateLoggerMock<AIService>();
|
|
var ollamaSettings = TestDataBuilder.Configurations.CreateOllamaSettings();
|
|
var ollamaOptionsMock = TestDataBuilder.Mocks.CreateOptionsMock(ollamaSettings);
|
|
_modelServiceMock = new Mock<ModelService>(
|
|
Mock.Of<ILogger<ModelService>>(),
|
|
ollamaOptionsMock.Object
|
|
);
|
|
_ollamaClientMock = TestDataBuilder.Mocks.CreateOllamaClientMock();
|
|
_systemPromptServiceMock = new Mock<SystemPromptService>(
|
|
Mock.Of<ILogger<SystemPromptService>>(),
|
|
TestDataBuilder
|
|
.Mocks.CreateOptionsMock(TestDataBuilder.Configurations.CreateAISettings())
|
|
.Object
|
|
);
|
|
_compressionServiceMock = TestDataBuilder.Mocks.CreateCompressionServiceMock();
|
|
_aiSettings = TestDataBuilder.Configurations.CreateAISettings();
|
|
|
|
var optionsMock = TestDataBuilder.Mocks.CreateOptionsMock(_aiSettings);
|
|
|
|
_aiService = new AIService(
|
|
_loggerMock.Object,
|
|
_modelServiceMock.Object,
|
|
_ollamaClientMock.Object,
|
|
optionsMock.Object,
|
|
_systemPromptServiceMock.Object,
|
|
_compressionServiceMock.Object
|
|
);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task GenerateChatCompletionAsync_ShouldReturnResponse_WhenSuccessful()
|
|
{
|
|
// Arrange
|
|
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(2);
|
|
var expectedResponse = "Test AI response";
|
|
var model = "llama3.2";
|
|
|
|
_modelServiceMock.Setup(x => x.GetCurrentModel()).Returns(model);
|
|
_systemPromptServiceMock.Setup(x => x.GetSystemPromptAsync()).ReturnsAsync("System prompt");
|
|
|
|
var responseBuilder = new System.Text.StringBuilder();
|
|
responseBuilder.Append(expectedResponse);
|
|
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new Message(ChatRole.Assistant, expectedResponse),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _aiService.GenerateChatCompletionAsync(messages);
|
|
|
|
// Assert
|
|
result.Should().Be(expectedResponse);
|
|
_ollamaClientMock.Verify(
|
|
x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()),
|
|
Times.Once
|
|
);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task GenerateChatCompletionAsync_ShouldThrowException_WhenOllamaClientThrows()
|
|
{
|
|
// Arrange
|
|
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(2);
|
|
var model = "llama3.2";
|
|
|
|
_modelServiceMock.Setup(x => x.GetCurrentModel()).Returns(model);
|
|
_systemPromptServiceMock.Setup(x => x.GetSystemPromptAsync()).ReturnsAsync("System prompt");
|
|
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(new Exception("Ollama client error"));
|
|
|
|
// Act & Assert
|
|
var result = await _aiService.GenerateChatCompletionAsync(messages);
|
|
result.Should().Be(AIResponseConstants.DefaultErrorMessage);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task GenerateChatCompletionWithCompressionAsync_ShouldUseCompression_WhenEnabled()
|
|
{
|
|
// Arrange
|
|
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(10);
|
|
var expectedResponse = "Test AI response with compression";
|
|
var model = "llama3.2";
|
|
|
|
_modelServiceMock.Setup(x => x.GetCurrentModel()).Returns(model);
|
|
_systemPromptServiceMock.Setup(x => x.GetSystemPromptAsync()).ReturnsAsync("System prompt");
|
|
_compressionServiceMock.Setup(x => x.ShouldCompress(20, 10)).Returns(true);
|
|
_compressionServiceMock
|
|
.Setup(x =>
|
|
x.CompressHistoryAsync(
|
|
It.IsAny<List<ChatMessage>>(),
|
|
5,
|
|
It.IsAny<CancellationToken>()
|
|
)
|
|
)
|
|
.ReturnsAsync(messages.TakeLast(5).ToList());
|
|
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new Message(ChatRole.Assistant, expectedResponse),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _aiService.GenerateChatCompletionWithCompressionAsync(messages);
|
|
|
|
// Assert
|
|
result.Should().Be(expectedResponse);
|
|
_compressionServiceMock.Verify(x => x.ShouldCompress(20, 10), Times.Once);
|
|
_compressionServiceMock.Verify(
|
|
x =>
|
|
x.CompressHistoryAsync(
|
|
It.IsAny<List<ChatMessage>>(),
|
|
5,
|
|
It.IsAny<CancellationToken>()
|
|
),
|
|
Times.Once
|
|
);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task GenerateChatCompletionWithCompressionAsync_ShouldNotUseCompression_WhenNotNeeded()
|
|
{
|
|
// Arrange
|
|
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(3);
|
|
var expectedResponse = "Test AI response without compression";
|
|
var model = "llama3.2";
|
|
|
|
_modelServiceMock.Setup(x => x.GetCurrentModel()).Returns(model);
|
|
_systemPromptServiceMock.Setup(x => x.GetSystemPromptAsync()).ReturnsAsync("System prompt");
|
|
_compressionServiceMock.Setup(x => x.ShouldCompress(6, 10)).Returns(false);
|
|
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new Message(ChatRole.Assistant, expectedResponse),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _aiService.GenerateChatCompletionWithCompressionAsync(messages);
|
|
|
|
// Assert
|
|
result.Should().Be(expectedResponse);
|
|
_compressionServiceMock.Verify(x => x.ShouldCompress(6, 10), Times.Once);
|
|
_compressionServiceMock.Verify(
|
|
x =>
|
|
x.CompressHistoryAsync(
|
|
It.IsAny<List<ChatMessage>>(),
|
|
It.IsAny<int>(),
|
|
It.IsAny<CancellationToken>()
|
|
),
|
|
Times.Never
|
|
);
|
|
}
|
|
}
|