using System.Linq; using ChatBot.Models.Configuration; using ChatBot.Models.Dto; using ChatBot.Services; using ChatBot.Services.Interfaces; using ChatBot.Tests.TestUtilities; using FluentAssertions; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Moq; using OllamaSharp.Models.Chat; namespace ChatBot.Tests.Services; public class HistoryCompressionServiceTests : UnitTestBase { private readonly Mock> _loggerMock; private readonly Mock _ollamaClientMock; private readonly AISettings _aiSettings; private readonly HistoryCompressionService _compressionService; public HistoryCompressionServiceTests() { _loggerMock = TestDataBuilder.Mocks.CreateLoggerMock(); _ollamaClientMock = TestDataBuilder.Mocks.CreateOllamaClientMock(); _aiSettings = TestDataBuilder.Configurations.CreateAISettings(); var optionsMock = TestDataBuilder.Mocks.CreateOptionsMock(_aiSettings); _compressionService = new HistoryCompressionService( _loggerMock.Object, optionsMock.Object, _ollamaClientMock.Object ); } [Fact] public void ShouldCompress_ShouldReturnTrue_WhenMessageCountExceedsThreshold() { // Arrange var messageCount = 15; var threshold = 10; // Act var result = _compressionService.ShouldCompress(messageCount, threshold); // Assert result.Should().BeTrue(); } [Fact] public void ShouldCompress_ShouldReturnFalse_WhenMessageCountIsBelowThreshold() { // Arrange var messageCount = 5; var threshold = 10; // Act var result = _compressionService.ShouldCompress(messageCount, threshold); // Assert result.Should().BeFalse(); } [Fact] public void ShouldCompress_ShouldReturnFalse_WhenMessageCountEqualsThreshold() { // Arrange var messageCount = 10; var threshold = 10; // Act var result = _compressionService.ShouldCompress(messageCount, threshold); // Assert result.Should().BeFalse(); } [Fact] public async Task CompressHistoryAsync_ShouldReturnCompressedMessages_WhenSuccessful() { // Arrange var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(10); var targetCount = 5; var expectedResponse = "Compressed summary of previous messages"; _ollamaClientMock .Setup(x => x.ChatAsync(It.IsAny())) .Returns( TestDataBuilder.Mocks.CreateAsyncEnumerable( new List { new OllamaSharp.Models.Chat.ChatResponseStream { Message = new Message(ChatRole.Assistant, expectedResponse), }, } ) ); // Act var result = await _compressionService.CompressHistoryAsync(messages, targetCount); // Assert result.Should().NotBeNull(); result.Should().HaveCount(7); // 2 compressed messages + 5 recent messages result.Should().Contain(m => m.Role == ChatRole.User && m.Content.Contains("[Сжато:")); result.Should().Contain(m => m.Role == ChatRole.Assistant && m.Content.Contains("[Сжато:")); result.Should().Contain(m => m.Role == ChatRole.User && m.Content == "User message 9"); result .Should() .Contain(m => m.Role == ChatRole.Assistant && m.Content == "Assistant response 9"); } [Fact] public async Task CompressHistoryAsync_ShouldFallbackToSimpleTrimming_WhenOllamaClientThrows() { // Arrange var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(10); var targetCount = 5; _ollamaClientMock .Setup(x => x.ChatAsync(It.IsAny())) .Returns(ThrowAsyncEnumerable(new Exception("Ollama client error"))); // Act var result = await _compressionService.CompressHistoryAsync(messages, targetCount); // Assert result.Should().NotBeNull(); result.Should().HaveCount(7); // 2 compressed messages + 5 recent messages (exception is caught and handled) result.Should().Contain(m => m.Role == ChatRole.User && m.Content.Contains("[Сжато:")); result.Should().Contain(m => m.Role == ChatRole.Assistant && m.Content.Contains("[Сжато:")); result.Should().Contain(m => m.Role == ChatRole.User && m.Content == "User message 9"); result .Should() .Contain(m => m.Role == ChatRole.Assistant && m.Content == "Assistant response 9"); } [Fact] public async Task CompressHistoryAsync_ShouldReturnOriginalMessages_WhenTargetCountIsGreaterThanOrEqual() { // Arrange var messages = TestDataBuilder.ChatMessages.CreateMessageHistory(5); var targetCount = 10; // Act var result = await _compressionService.CompressHistoryAsync(messages, targetCount); // Assert result.Should().BeEquivalentTo(messages); _ollamaClientMock.Verify( x => x.ChatAsync(It.IsAny()), Times.Never ); } [Fact] public async Task CompressHistoryAsync_ShouldHandleEmptyMessages() { // Arrange var messages = new List(); var targetCount = 5; // Act var result = await _compressionService.CompressHistoryAsync(messages, targetCount); // Assert result.Should().BeEmpty(); _ollamaClientMock.Verify( x => x.ChatAsync(It.IsAny()), Times.Never ); } private static IAsyncEnumerable ThrowAsyncEnumerable( Exception exception ) { return new ThrowingAsyncEnumerable(exception); } private class ThrowingAsyncEnumerable : IAsyncEnumerable { private readonly Exception _exception; public ThrowingAsyncEnumerable(Exception exception) { _exception = exception; } public IAsyncEnumerator GetAsyncEnumerator( CancellationToken cancellationToken = default ) { return new ThrowingAsyncEnumerator(_exception); } } private class ThrowingAsyncEnumerator : IAsyncEnumerator { private readonly Exception _exception; public ThrowingAsyncEnumerator(Exception exception) { _exception = exception; } public OllamaSharp.Models.Chat.ChatResponseStream Current => throw new InvalidOperationException(); public ValueTask DisposeAsync() { return ValueTask.CompletedTask; } public ValueTask MoveNextAsync() { throw _exception; } } }