159 lines
5.3 KiB
C#
159 lines
5.3 KiB
C#
using ChatBot.Models.Configuration;
|
|
using ChatBot.Services;
|
|
using ChatBot.Services.Interfaces;
|
|
using ChatBot.Services.Telegram.Commands;
|
|
using ChatBot.Tests.TestUtilities;
|
|
using FluentAssertions;
|
|
using Microsoft.Extensions.Options;
|
|
using Moq;
|
|
using OllamaSharp.Models.Chat;
|
|
|
|
namespace ChatBot.Tests.Telegram.Commands;
|
|
|
|
public class StatusCommandTests : UnitTestBase
|
|
{
|
|
private readonly Mock<IOptions<OllamaSettings>> _ollamaOptionsMock;
|
|
private readonly Mock<IOllamaClient> _ollamaClientMock;
|
|
private readonly StatusCommand _statusCommand;
|
|
|
|
public StatusCommandTests()
|
|
{
|
|
var ollamaSettings = TestDataBuilder.Configurations.CreateOllamaSettings();
|
|
_ollamaOptionsMock = TestDataBuilder.Mocks.CreateOptionsMock(ollamaSettings);
|
|
|
|
_ollamaClientMock = TestDataBuilder.Mocks.CreateOllamaClientMock();
|
|
|
|
var chatServiceMock = new Mock<ChatService>(
|
|
TestDataBuilder.Mocks.CreateLoggerMock<ChatService>().Object,
|
|
TestDataBuilder.Mocks.CreateAIServiceMock().Object,
|
|
TestDataBuilder.Mocks.CreateSessionStorageMock().Object,
|
|
TestDataBuilder
|
|
.Mocks.CreateOptionsMock(TestDataBuilder.Configurations.CreateAISettings())
|
|
.Object,
|
|
TestDataBuilder.Mocks.CreateCompressionServiceMock().Object
|
|
);
|
|
var modelServiceMock = new Mock<ModelService>(
|
|
TestDataBuilder.Mocks.CreateLoggerMock<ModelService>().Object,
|
|
_ollamaOptionsMock.Object
|
|
);
|
|
var aiSettingsMock = TestDataBuilder.Mocks.CreateOptionsMock(new AISettings());
|
|
|
|
_statusCommand = new StatusCommand(
|
|
chatServiceMock.Object,
|
|
modelServiceMock.Object,
|
|
aiSettingsMock.Object,
|
|
_ollamaClientMock.Object
|
|
);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnStatusMessage_WhenBothServicesAreHealthy()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
// Mock Ollama health check
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new OllamaSharp.Models.Chat.Message(
|
|
ChatRole.Assistant,
|
|
"Test response"
|
|
),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Статус");
|
|
result.Should().Contain("API");
|
|
result.Should().Contain("системы");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnErrorStatus_WhenOllamaIsUnavailable()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
// Mock Ollama failure
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(new Exception("Ollama unavailable"));
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Статус");
|
|
result.Should().Contain("API");
|
|
result.Should().Contain("Ошибка");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnErrorStatus_WhenTelegramIsUnavailable()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
// Mock Ollama health check
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new OllamaSharp.Models.Chat.Message(
|
|
ChatRole.Assistant,
|
|
"Test response"
|
|
),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Статус");
|
|
result.Should().Contain("системы");
|
|
result.Should().Contain("Доступен");
|
|
}
|
|
}
|