add tests
This commit is contained in:
224
ChatBot.Tests/Services/HistoryCompressionServiceTests.cs
Normal file
224
ChatBot.Tests/Services/HistoryCompressionServiceTests.cs
Normal file
@@ -0,0 +1,224 @@
|
||||
using System.Linq;
|
||||
using ChatBot.Models.Configuration;
|
||||
using ChatBot.Models.Dto;
|
||||
using ChatBot.Services;
|
||||
using ChatBot.Services.Interfaces;
|
||||
using ChatBot.Tests.TestUtilities;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using OllamaSharp.Models.Chat;
|
||||
|
||||
namespace ChatBot.Tests.Services;
|
||||
|
||||
public class HistoryCompressionServiceTests : UnitTestBase
|
||||
{
|
||||
private readonly Mock<ILogger<HistoryCompressionService>> _loggerMock;
|
||||
private readonly Mock<IOllamaClient> _ollamaClientMock;
|
||||
private readonly AISettings _aiSettings;
|
||||
private readonly HistoryCompressionService _compressionService;
|
||||
|
||||
public HistoryCompressionServiceTests()
|
||||
{
|
||||
_loggerMock = TestDataBuilder.Mocks.CreateLoggerMock<HistoryCompressionService>();
|
||||
_ollamaClientMock = TestDataBuilder.Mocks.CreateOllamaClientMock();
|
||||
_aiSettings = TestDataBuilder.Configurations.CreateAISettings();
|
||||
|
||||
var optionsMock = TestDataBuilder.Mocks.CreateOptionsMock(_aiSettings);
|
||||
|
||||
_compressionService = new HistoryCompressionService(
|
||||
_loggerMock.Object,
|
||||
optionsMock.Object,
|
||||
_ollamaClientMock.Object
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldCompress_ShouldReturnTrue_WhenMessageCountExceedsThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var messageCount = 15;
|
||||
var threshold = 10;
|
||||
|
||||
// Act
|
||||
var result = _compressionService.ShouldCompress(messageCount, threshold);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldCompress_ShouldReturnFalse_WhenMessageCountIsBelowThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var messageCount = 5;
|
||||
var threshold = 10;
|
||||
|
||||
// Act
|
||||
var result = _compressionService.ShouldCompress(messageCount, threshold);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldCompress_ShouldReturnFalse_WhenMessageCountEqualsThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var messageCount = 10;
|
||||
var threshold = 10;
|
||||
|
||||
// Act
|
||||
var result = _compressionService.ShouldCompress(messageCount, threshold);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompressHistoryAsync_ShouldReturnCompressedMessages_WhenSuccessful()
|
||||
{
|
||||
// Arrange
|
||||
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(10);
|
||||
var targetCount = 5;
|
||||
var expectedResponse = "Compressed summary of previous messages";
|
||||
|
||||
_ollamaClientMock
|
||||
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
||||
.Returns(
|
||||
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
||||
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
||||
{
|
||||
new OllamaSharp.Models.Chat.ChatResponseStream
|
||||
{
|
||||
Message = new Message(ChatRole.Assistant, expectedResponse),
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
// Act
|
||||
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Should().HaveCount(7); // 2 compressed messages + 5 recent messages
|
||||
result.Should().Contain(m => m.Role == ChatRole.User && m.Content.Contains("[Сжато:"));
|
||||
result.Should().Contain(m => m.Role == ChatRole.Assistant && m.Content.Contains("[Сжато:"));
|
||||
result.Should().Contain(m => m.Role == ChatRole.User && m.Content == "User message 9");
|
||||
result
|
||||
.Should()
|
||||
.Contain(m => m.Role == ChatRole.Assistant && m.Content == "Assistant response 9");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompressHistoryAsync_ShouldFallbackToSimpleTrimming_WhenOllamaClientThrows()
|
||||
{
|
||||
// Arrange
|
||||
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(10);
|
||||
var targetCount = 5;
|
||||
|
||||
_ollamaClientMock
|
||||
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
||||
.Returns(ThrowAsyncEnumerable(new Exception("Ollama client error")));
|
||||
|
||||
// Act
|
||||
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Should().HaveCount(7); // 2 compressed messages + 5 recent messages (exception is caught and handled)
|
||||
result.Should().Contain(m => m.Role == ChatRole.User && m.Content.Contains("[Сжато:"));
|
||||
result.Should().Contain(m => m.Role == ChatRole.Assistant && m.Content.Contains("[Сжато:"));
|
||||
result.Should().Contain(m => m.Role == ChatRole.User && m.Content == "User message 9");
|
||||
result
|
||||
.Should()
|
||||
.Contain(m => m.Role == ChatRole.Assistant && m.Content == "Assistant response 9");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompressHistoryAsync_ShouldReturnOriginalMessages_WhenTargetCountIsGreaterThanOrEqual()
|
||||
{
|
||||
// Arrange
|
||||
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(5);
|
||||
var targetCount = 10;
|
||||
|
||||
// Act
|
||||
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
|
||||
|
||||
// Assert
|
||||
result.Should().BeEquivalentTo(messages);
|
||||
_ollamaClientMock.Verify(
|
||||
x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()),
|
||||
Times.Never
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompressHistoryAsync_ShouldHandleEmptyMessages()
|
||||
{
|
||||
// Arrange
|
||||
var messages = new List<ChatMessage>();
|
||||
var targetCount = 5;
|
||||
|
||||
// Act
|
||||
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
|
||||
|
||||
// Assert
|
||||
result.Should().BeEmpty();
|
||||
_ollamaClientMock.Verify(
|
||||
x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()),
|
||||
Times.Never
|
||||
);
|
||||
}
|
||||
|
||||
private static IAsyncEnumerable<OllamaSharp.Models.Chat.ChatResponseStream> ThrowAsyncEnumerable(
|
||||
Exception exception
|
||||
)
|
||||
{
|
||||
return new ThrowingAsyncEnumerable(exception);
|
||||
}
|
||||
|
||||
private class ThrowingAsyncEnumerable
|
||||
: IAsyncEnumerable<OllamaSharp.Models.Chat.ChatResponseStream>
|
||||
{
|
||||
private readonly Exception _exception;
|
||||
|
||||
public ThrowingAsyncEnumerable(Exception exception)
|
||||
{
|
||||
_exception = exception;
|
||||
}
|
||||
|
||||
public IAsyncEnumerator<OllamaSharp.Models.Chat.ChatResponseStream> GetAsyncEnumerator(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
return new ThrowingAsyncEnumerator(_exception);
|
||||
}
|
||||
}
|
||||
|
||||
private class ThrowingAsyncEnumerator
|
||||
: IAsyncEnumerator<OllamaSharp.Models.Chat.ChatResponseStream>
|
||||
{
|
||||
private readonly Exception _exception;
|
||||
|
||||
public ThrowingAsyncEnumerator(Exception exception)
|
||||
{
|
||||
_exception = exception;
|
||||
}
|
||||
|
||||
public OllamaSharp.Models.Chat.ChatResponseStream Current =>
|
||||
throw new InvalidOperationException();
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask<bool> MoveNextAsync()
|
||||
{
|
||||
throw _exception;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user