439 lines
14 KiB
C#
439 lines
14 KiB
C#
using ChatBot.Models.Configuration;
|
|
using ChatBot.Services;
|
|
using ChatBot.Services.Interfaces;
|
|
using ChatBot.Services.Telegram.Commands;
|
|
using ChatBot.Tests.TestUtilities;
|
|
using FluentAssertions;
|
|
using Microsoft.Extensions.Options;
|
|
using Moq;
|
|
using OllamaSharp.Models.Chat;
|
|
|
|
namespace ChatBot.Tests.Telegram.Commands;
|
|
|
|
public class StatusCommandTests : UnitTestBase
|
|
{
|
|
private readonly Mock<IOptions<OllamaSettings>> _ollamaOptionsMock;
|
|
private readonly Mock<IOllamaClient> _ollamaClientMock;
|
|
private readonly StatusCommand _statusCommand;
|
|
|
|
public StatusCommandTests()
|
|
{
|
|
var ollamaSettings = TestDataBuilder.Configurations.CreateOllamaSettings();
|
|
_ollamaOptionsMock = TestDataBuilder.Mocks.CreateOptionsMock(ollamaSettings);
|
|
|
|
_ollamaClientMock = TestDataBuilder.Mocks.CreateOllamaClientMock();
|
|
|
|
var chatServiceMock = new Mock<ChatService>(
|
|
TestDataBuilder.Mocks.CreateLoggerMock<ChatService>().Object,
|
|
TestDataBuilder.Mocks.CreateAIServiceMock().Object,
|
|
TestDataBuilder.Mocks.CreateSessionStorageMock().Object,
|
|
TestDataBuilder
|
|
.Mocks.CreateOptionsMock(TestDataBuilder.Configurations.CreateAISettings())
|
|
.Object,
|
|
TestDataBuilder.Mocks.CreateCompressionServiceMock().Object
|
|
);
|
|
var modelServiceMock = new Mock<ModelService>(
|
|
TestDataBuilder.Mocks.CreateLoggerMock<ModelService>().Object,
|
|
_ollamaOptionsMock.Object
|
|
);
|
|
var aiSettingsMock = TestDataBuilder.Mocks.CreateOptionsMock(new AISettings());
|
|
|
|
_statusCommand = new StatusCommand(
|
|
chatServiceMock.Object,
|
|
modelServiceMock.Object,
|
|
aiSettingsMock.Object,
|
|
_ollamaClientMock.Object
|
|
);
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnStatusMessage_WhenBothServicesAreHealthy()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
// Mock Ollama health check
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new OllamaSharp.Models.Chat.Message(
|
|
ChatRole.Assistant,
|
|
"Test response"
|
|
),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Статус");
|
|
result.Should().Contain("API");
|
|
result.Should().Contain("системы");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnErrorStatus_WhenOllamaIsUnavailable()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
// Mock Ollama failure
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(new Exception("Ollama unavailable"));
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Статус");
|
|
result.Should().Contain("API");
|
|
result.Should().Contain("Ошибка");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnErrorStatus_WhenTelegramIsUnavailable()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
// Mock Ollama health check
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new OllamaSharp.Models.Chat.Message(
|
|
ChatRole.Assistant,
|
|
"Test response"
|
|
),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Статус");
|
|
result.Should().Contain("системы");
|
|
result.Should().Contain("Доступен");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnTimeoutStatus_WhenRequestTimesOut()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws<TaskCanceledException>();
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Таймаут");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnHttpError502_WhenBadGateway()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
var httpException = new HttpRequestException(
|
|
"Response status code does not indicate success: 502"
|
|
);
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(httpException);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("502");
|
|
result.Should().Contain("Bad Gateway");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnHttpError503_WhenServiceUnavailable()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
var httpException = new HttpRequestException(
|
|
"Response status code does not indicate success: 503"
|
|
);
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(httpException);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("503");
|
|
result.Should().Contain("Service Unavailable");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnHttpError504_WhenGatewayTimeout()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
var httpException = new HttpRequestException(
|
|
"Response status code does not indicate success: 504"
|
|
);
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(httpException);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("504");
|
|
result.Should().Contain("Gateway Timeout");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnHttpError429_WhenTooManyRequests()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
var httpException = new HttpRequestException(
|
|
"Response status code does not indicate success: 429"
|
|
);
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(httpException);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("429");
|
|
result.Should().Contain("Too Many Requests");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnHttpError500_WhenInternalServerError()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
var httpException = new HttpRequestException(
|
|
"Response status code does not indicate success: 500"
|
|
);
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Throws(httpException);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("500");
|
|
result.Should().Contain("Internal Server Error");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_ShouldReturnNoResponseStatus_WhenResponseIsEmpty()
|
|
{
|
|
// Arrange
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
// Return empty response
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream { Message = null! },
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await _statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Нет ответа");
|
|
}
|
|
|
|
[Fact]
|
|
public async Task ExecuteAsync_WithSession_ShouldShowSessionInfo()
|
|
{
|
|
// Arrange
|
|
var chatServiceMock = new Mock<ChatService>(
|
|
TestDataBuilder.Mocks.CreateLoggerMock<ChatService>().Object,
|
|
TestDataBuilder.Mocks.CreateAIServiceMock().Object,
|
|
TestDataBuilder.Mocks.CreateSessionStorageMock().Object,
|
|
TestDataBuilder
|
|
.Mocks.CreateOptionsMock(TestDataBuilder.Configurations.CreateAISettings())
|
|
.Object,
|
|
TestDataBuilder.Mocks.CreateCompressionServiceMock().Object
|
|
);
|
|
|
|
var session = TestDataBuilder.ChatSessions.CreateBasicSession(12345, "private");
|
|
session.AddUserMessage("Test", "user");
|
|
chatServiceMock.Setup(x => x.GetSessionAsync(12345)).ReturnsAsync(session);
|
|
|
|
var statusCommand = new StatusCommand(
|
|
chatServiceMock.Object,
|
|
new Mock<ModelService>(
|
|
TestDataBuilder.Mocks.CreateLoggerMock<ModelService>().Object,
|
|
_ollamaOptionsMock.Object
|
|
).Object,
|
|
TestDataBuilder.Mocks.CreateOptionsMock(new AISettings()).Object,
|
|
_ollamaClientMock.Object
|
|
);
|
|
|
|
var context = new TelegramCommandContext
|
|
{
|
|
ChatId = 12345,
|
|
Username = "testuser",
|
|
MessageText = "/status",
|
|
ChatType = "private",
|
|
ChatTitle = "Test Chat",
|
|
};
|
|
|
|
_ollamaClientMock
|
|
.Setup(x => x.ChatAsync(It.IsAny<OllamaSharp.Models.Chat.ChatRequest>()))
|
|
.Returns(
|
|
TestDataBuilder.Mocks.CreateAsyncEnumerable(
|
|
new List<OllamaSharp.Models.Chat.ChatResponseStream>
|
|
{
|
|
new OllamaSharp.Models.Chat.ChatResponseStream
|
|
{
|
|
Message = new OllamaSharp.Models.Chat.Message(
|
|
ChatRole.Assistant,
|
|
"Test"
|
|
),
|
|
},
|
|
}
|
|
)
|
|
);
|
|
|
|
// Act
|
|
var result = await statusCommand.ExecuteAsync(context);
|
|
|
|
// Assert
|
|
result.Should().NotBeNull();
|
|
result.Should().Contain("Сессия");
|
|
result.Should().Contain("Сообщений в истории");
|
|
}
|
|
|
|
[Fact]
|
|
public void CommandName_ShouldReturnCorrectName()
|
|
{
|
|
// Act & Assert
|
|
_statusCommand.CommandName.Should().Be("/status");
|
|
}
|
|
|
|
[Fact]
|
|
public void Description_ShouldReturnCorrectDescription()
|
|
{
|
|
// Act & Assert
|
|
_statusCommand.Description.Should().Be("Показать статус системы и API");
|
|
}
|
|
}
|