add tests

This commit is contained in:
Leonid Pershin
2025-10-17 05:47:18 +03:00
parent f7e3024e7e
commit 03eb0f22a2
41 changed files with 4001 additions and 30 deletions

View File

@@ -0,0 +1,224 @@
using System.Linq;
using ChatBot.Models.Configuration;
using ChatBot.Models.Dto;
using ChatBot.Services;
using ChatBot.Services.Interfaces;
using ChatBot.Tests.TestUtilities;
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Moq;
using OllamaSharp.Models.Chat;
namespace ChatBot.Tests.Services;
public class HistoryCompressionServiceTests : UnitTestBase
{
private readonly Mock<ILogger<HistoryCompressionService>> _loggerMock;
private readonly Mock<IOllamaClient> _ollamaClientMock;
private readonly AISettings _aiSettings;
private readonly HistoryCompressionService _compressionService;
public HistoryCompressionServiceTests()
{
_loggerMock = TestDataBuilder.Mocks.CreateLoggerMock<HistoryCompressionService>();
_ollamaClientMock = TestDataBuilder.Mocks.CreateOllamaClientMock();
_aiSettings = TestDataBuilder.Configurations.CreateAISettings();
var optionsMock = TestDataBuilder.Mocks.CreateOptionsMock(_aiSettings);
_compressionService = new HistoryCompressionService(
_loggerMock.Object,
optionsMock.Object,
_ollamaClientMock.Object
);
}
[Fact]
public void ShouldCompress_ShouldReturnTrue_WhenMessageCountExceedsThreshold()
{
// Arrange
var messageCount = 15;
var threshold = 10;
// Act
var result = _compressionService.ShouldCompress(messageCount, threshold);
// Assert
result.Should().BeTrue();
}
[Fact]
public void ShouldCompress_ShouldReturnFalse_WhenMessageCountIsBelowThreshold()
{
// Arrange
var messageCount = 5;
var threshold = 10;
// Act
var result = _compressionService.ShouldCompress(messageCount, threshold);
// Assert
result.Should().BeFalse();
}
[Fact]
public void ShouldCompress_ShouldReturnFalse_WhenMessageCountEqualsThreshold()
{
// Arrange
var messageCount = 10;
var threshold = 10;
// Act
var result = _compressionService.ShouldCompress(messageCount, threshold);
// Assert
result.Should().BeFalse();
}
[Fact]
public async Task CompressHistoryAsync_ShouldReturnCompressedMessages_WhenSuccessful()
{
// Arrange
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(10);
var targetCount = 5;
var expectedResponse = "Compressed summary of previous messages";
_ollamaClientMock
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
.Returns(
TestDataBuilder.Mocks.CreateAsyncEnumerable(
new List<OllamaSharp.Models.Chat.ChatResponseStream>
{
new OllamaSharp.Models.Chat.ChatResponseStream
{
Message = new Message(ChatRole.Assistant, expectedResponse),
},
}
)
);
// Act
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
// Assert
result.Should().NotBeNull();
result.Should().HaveCount(7); // 2 compressed messages + 5 recent messages
result.Should().Contain(m => m.Role == ChatRole.User && m.Content.Contains("[Сжато:"));
result.Should().Contain(m => m.Role == ChatRole.Assistant && m.Content.Contains("[Сжато:"));
result.Should().Contain(m => m.Role == ChatRole.User && m.Content == "User message 9");
result
.Should()
.Contain(m => m.Role == ChatRole.Assistant && m.Content == "Assistant response 9");
}
[Fact]
public async Task CompressHistoryAsync_ShouldFallbackToSimpleTrimming_WhenOllamaClientThrows()
{
// Arrange
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(10);
var targetCount = 5;
_ollamaClientMock
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
.Returns(ThrowAsyncEnumerable(new Exception("Ollama client error")));
// Act
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
// Assert
result.Should().NotBeNull();
result.Should().HaveCount(7); // 2 compressed messages + 5 recent messages (exception is caught and handled)
result.Should().Contain(m => m.Role == ChatRole.User && m.Content.Contains("[Сжато:"));
result.Should().Contain(m => m.Role == ChatRole.Assistant && m.Content.Contains("[Сжато:"));
result.Should().Contain(m => m.Role == ChatRole.User && m.Content == "User message 9");
result
.Should()
.Contain(m => m.Role == ChatRole.Assistant && m.Content == "Assistant response 9");
}
[Fact]
public async Task CompressHistoryAsync_ShouldReturnOriginalMessages_WhenTargetCountIsGreaterThanOrEqual()
{
// Arrange
var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(5);
var targetCount = 10;
// Act
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
// Assert
result.Should().BeEquivalentTo(messages);
_ollamaClientMock.Verify(
x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()),
Times.Never
);
}
[Fact]
public async Task CompressHistoryAsync_ShouldHandleEmptyMessages()
{
// Arrange
var messages = new List<ChatMessage>();
var targetCount = 5;
// Act
var result = await _compressionService.CompressHistoryAsync(messages, targetCount);
// Assert
result.Should().BeEmpty();
_ollamaClientMock.Verify(
x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()),
Times.Never
);
}
private static IAsyncEnumerable<OllamaSharp.Models.Chat.ChatResponseStream> ThrowAsyncEnumerable(
Exception exception
)
{
return new ThrowingAsyncEnumerable(exception);
}
private class ThrowingAsyncEnumerable
: IAsyncEnumerable<OllamaSharp.Models.Chat.ChatResponseStream>
{
private readonly Exception _exception;
public ThrowingAsyncEnumerable(Exception exception)
{
_exception = exception;
}
public IAsyncEnumerator<OllamaSharp.Models.Chat.ChatResponseStream> GetAsyncEnumerator(
CancellationToken cancellationToken = default
)
{
return new ThrowingAsyncEnumerator(_exception);
}
}
private class ThrowingAsyncEnumerator
: IAsyncEnumerator<OllamaSharp.Models.Chat.ChatResponseStream>
{
private readonly Exception _exception;
public ThrowingAsyncEnumerator(Exception exception)
{
_exception = exception;
}
public OllamaSharp.Models.Chat.ChatResponseStream Current =>
throw new InvalidOperationException();
public ValueTask DisposeAsync()
{
return ValueTask.CompletedTask;
}
public ValueTask<bool> MoveNextAsync()
{
throw _exception;
}
}
}