diff --git a/ChatBot/ChatBot.csproj b/ChatBot/ChatBot.csproj
index 29a43fe..851424a 100644
--- a/ChatBot/ChatBot.csproj
+++ b/ChatBot/ChatBot.csproj
@@ -7,7 +7,7 @@
-
+
@@ -17,6 +17,9 @@
+
+
+
diff --git a/ChatBot/Common/Constants/AIResponseConstants.cs b/ChatBot/Common/Constants/AIResponseConstants.cs
new file mode 100644
index 0000000..f43e12c
--- /dev/null
+++ b/ChatBot/Common/Constants/AIResponseConstants.cs
@@ -0,0 +1,19 @@
+namespace ChatBot.Common.Constants
+{
+ ///
+ /// Constants for AI response handling
+ ///
+ public static class AIResponseConstants
+ {
+ ///
+ /// Marker for empty AI responses that should be ignored
+ ///
+ public const string EmptyResponseMarker = "{empty}";
+
+ ///
+ /// Default error message for failed generation
+ ///
+ public const string DefaultErrorMessage =
+ "Извините, произошла ошибка при генерации ответа.";
+ }
+}
diff --git a/ChatBot/Common/Constants/ChatRoles.cs b/ChatBot/Common/Constants/ChatRoles.cs
new file mode 100644
index 0000000..e3a20f5
--- /dev/null
+++ b/ChatBot/Common/Constants/ChatRoles.cs
@@ -0,0 +1,12 @@
+namespace ChatBot.Common.Constants
+{
+ ///
+ /// Constants for chat message roles
+ ///
+ public static class ChatRoles
+ {
+ public const string System = "system";
+ public const string User = "user";
+ public const string Assistant = "assistant";
+ }
+}
diff --git a/ChatBot/Common/Constants/ChatTypes.cs b/ChatBot/Common/Constants/ChatTypes.cs
new file mode 100644
index 0000000..16ef31e
--- /dev/null
+++ b/ChatBot/Common/Constants/ChatTypes.cs
@@ -0,0 +1,13 @@
+namespace ChatBot.Common.Constants
+{
+ ///
+ /// Constants for chat types
+ ///
+ public static class ChatTypes
+ {
+ public const string Private = "private";
+ public const string Group = "group";
+ public const string SuperGroup = "supergroup";
+ public const string Channel = "channel";
+ }
+}
diff --git a/ChatBot/Common/Constants/RetryConstants.cs b/ChatBot/Common/Constants/RetryConstants.cs
new file mode 100644
index 0000000..bb8272d
--- /dev/null
+++ b/ChatBot/Common/Constants/RetryConstants.cs
@@ -0,0 +1,12 @@
+namespace ChatBot.Common.Constants
+{
+ ///
+ /// Constants for retry logic
+ ///
+ public static class RetryConstants
+ {
+ public const int DefaultMaxRetries = 3;
+ public const int DefaultBaseDelaySeconds = 1;
+ public const int DefaultMaxJitterMs = 2000;
+ }
+}
diff --git a/ChatBot/Common/Results/Result.cs b/ChatBot/Common/Results/Result.cs
new file mode 100644
index 0000000..4f498f8
--- /dev/null
+++ b/ChatBot/Common/Results/Result.cs
@@ -0,0 +1,39 @@
+namespace ChatBot.Common.Results
+{
+ ///
+ /// Represents the result of an operation that can succeed or fail
+ ///
+ public class Result
+ {
+ public bool IsSuccess { get; }
+ public string Error { get; }
+
+ protected Result(bool isSuccess, string error)
+ {
+ IsSuccess = isSuccess;
+ Error = error;
+ }
+
+ public static Result Success() => new(true, string.Empty);
+
+ public static Result Failure(string error) => new(false, error);
+ }
+
+ ///
+ /// Represents the result of an operation that returns a value
+ ///
+ public class Result : Result
+ {
+ public T? Value { get; }
+
+ private Result(T? value, bool isSuccess, string error)
+ : base(isSuccess, error)
+ {
+ Value = value;
+ }
+
+ public static Result Success(T value) => new(value, true, string.Empty);
+
+ public static new Result Failure(string error) => new(default, false, error);
+ }
+}
diff --git a/ChatBot/Models/AvailableModels.cs b/ChatBot/Models/AvailableModels.cs
deleted file mode 100644
index abaee68..0000000
--- a/ChatBot/Models/AvailableModels.cs
+++ /dev/null
@@ -1,43 +0,0 @@
-namespace ChatBot.Models
-{
- ///
- /// Available AI models for OpenRouter
- ///
- public static class AvailableModels
- {
- ///
- /// List of available models with their descriptions
- ///
- public static readonly Dictionary Models = new()
- {
- // Verified Working Model
- ["qwen/qwen3-4b:free"] = "Qwen 3 4B - FREE, Verified working model",
- };
-
- ///
- /// Get model description
- ///
- public static string GetModelDescription(string modelName)
- {
- return Models.TryGetValue(modelName, out var description)
- ? description
- : "Unknown model";
- }
-
- ///
- /// Check if model is available
- ///
- public static bool IsModelAvailable(string modelName)
- {
- return Models.ContainsKey(modelName);
- }
-
- ///
- /// Get all available model names
- ///
- public static IEnumerable GetAllModelNames()
- {
- return Models.Keys;
- }
- }
-}
diff --git a/ChatBot/Models/ChatSession.cs b/ChatBot/Models/ChatSession.cs
index 38b6d58..23dbc89 100644
--- a/ChatBot/Models/ChatSession.cs
+++ b/ChatBot/Models/ChatSession.cs
@@ -145,28 +145,5 @@ namespace ChatBot.Models
MessageHistory.Clear();
LastUpdatedAt = DateTime.UtcNow;
}
-
- ///
- /// Load system prompt from file
- ///
- public static string LoadSystemPrompt(string filePath)
- {
- if (!File.Exists(filePath))
- {
- throw new FileNotFoundException($"System prompt file not found: {filePath}");
- }
-
- try
- {
- return File.ReadAllText(filePath, System.Text.Encoding.UTF8);
- }
- catch (Exception ex)
- {
- throw new InvalidOperationException(
- $"Failed to read system prompt file '{filePath}': {ex.Message}",
- ex
- );
- }
- }
}
}
diff --git a/ChatBot/Models/Configuration/AppSettings.cs b/ChatBot/Models/Configuration/AppSettings.cs
index 1511e5f..da1af0a 100644
--- a/ChatBot/Models/Configuration/AppSettings.cs
+++ b/ChatBot/Models/Configuration/AppSettings.cs
@@ -11,9 +11,9 @@ namespace ChatBot.Models.Configuration
public TelegramBotSettings TelegramBot { get; set; } = new();
///
- /// Настройки OpenRouter API
+ /// Настройки Ollama API
///
- public OpenRouterSettings OpenRouter { get; set; } = new();
+ public OllamaSettings Ollama { get; set; } = new();
///
/// Настройки логирования Serilog
diff --git a/ChatBot/Models/Configuration/OllamaSettings.cs b/ChatBot/Models/Configuration/OllamaSettings.cs
new file mode 100644
index 0000000..789a87d
--- /dev/null
+++ b/ChatBot/Models/Configuration/OllamaSettings.cs
@@ -0,0 +1,38 @@
+namespace ChatBot.Models.Configuration
+{
+ ///
+ /// Настройки Ollama API
+ ///
+ public class OllamaSettings
+ {
+ ///
+ /// URL эндпоинта Ollama API
+ ///
+ public string Url { get; set; } = "http://localhost:11434";
+
+ ///
+ /// Настройки для каждой модели отдельно
+ ///
+ public List ModelConfigurations { get; set; } = new();
+
+ ///
+ /// Максимальное количество повторных попыток при ошибках
+ ///
+ public int MaxRetries { get; set; } = 3;
+
+ ///
+ /// Максимальное количество токенов в ответе (по умолчанию, если не задано для конкретной модели)
+ ///
+ public int MaxTokens { get; set; } = 1000;
+
+ ///
+ /// Температура генерации по умолчанию (креативность ответов от 0.0 до 2.0)
+ ///
+ public double Temperature { get; set; } = 0.7;
+
+ ///
+ /// Путь к файлу с системным промтом
+ ///
+ public string SystemPromptFilePath { get; set; } = "system-prompt.txt";
+ }
+}
diff --git a/ChatBot/Models/Configuration/OpenRouterSettings.cs b/ChatBot/Models/Configuration/OpenRouterSettings.cs
deleted file mode 100644
index 94c9f17..0000000
--- a/ChatBot/Models/Configuration/OpenRouterSettings.cs
+++ /dev/null
@@ -1,79 +0,0 @@
-namespace ChatBot.Models.Configuration
-{
- ///
- /// Настройки OpenRouter API
- ///
- public class OpenRouterSettings
- {
- ///
- /// API токен для аутентификации в OpenRouter
- ///
- public string Token { get; set; } = string.Empty;
-
- ///
- /// URL эндпоинта OpenRouter API
- ///
- public string Url { get; set; } = string.Empty;
-
- ///
- /// Список доступных моделей ИИ (для обратной совместимости)
- ///
- public List AvailableModels { get; set; } = new();
-
- ///
- /// Настройки для каждой модели отдельно
- ///
- public List ModelConfigurations { get; set; } = new();
-
- ///
- /// Модель по умолчанию для генерации ответов
- ///
- public string DefaultModel { get; set; } = string.Empty;
-
- ///
- /// Максимальное количество повторных попыток при ошибках
- ///
- public int MaxRetries { get; set; } = 3;
-
- ///
- /// Максимальное количество токенов в ответе (по умолчанию, если не задано для конкретной модели)
- ///
- public int MaxTokens { get; set; } = 1000;
-
- ///
- /// Температура генерации по умолчанию (креативность ответов от 0.0 до 2.0)
- ///
- public double Temperature { get; set; } = 0.7;
-
- ///
- /// Настройки случайной задержки перед ответом AI модели
- ///
- public ResponseDelaySettings ResponseDelay { get; set; } = new();
-
- ///
- /// Путь к файлу с системным промтом
- ///
- public string SystemPromptFilePath { get; set; } = "system-prompt.txt";
- }
-
- ///
- /// Настройки случайной задержки ответа
- ///
- public class ResponseDelaySettings
- {
- ///
- /// Включена ли случайная задержка
- ///
- public bool IsEnabled { get; set; } = false;
-
- ///
- /// Минимальная задержка в миллисекундах
- ///
- public int MinDelayMs { get; set; } = 1000;
-
- ///
- /// Максимальная задержка в миллисекундах
- ///
- public int MaxDelayMs { get; set; } = 3000;
- }
-}
diff --git a/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs b/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs
index bf438b9..e8a8967 100644
--- a/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs
+++ b/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs
@@ -20,9 +20,9 @@ namespace ChatBot.Models.Configuration.Validators
var telegramResult = ValidateTelegramBotSettings(settings.TelegramBot);
errors.AddRange(telegramResult.Errors);
- // Валидация настроек OpenRouter
- var openRouterResult = ValidateOpenRouterSettings(settings.OpenRouter);
- errors.AddRange(openRouterResult.Errors);
+ // Валидация настроек Ollama
+ var ollamaResult = ValidateOllamaSettings(settings.Ollama);
+ errors.AddRange(ollamaResult.Errors);
return new ValidationResult { IsValid = !errors.Any(), Errors = errors };
}
@@ -56,46 +56,24 @@ namespace ChatBot.Models.Configuration.Validators
}
///
- /// Валидирует настройки OpenRouter
+ /// Валидирует настройки Ollama
///
- /// Настройки OpenRouter
+ /// Настройки Ollama
/// Результат валидации
- public static ValidationResult ValidateOpenRouterSettings(OpenRouterSettings settings)
+ public static ValidationResult ValidateOllamaSettings(OllamaSettings settings)
{
var errors = new List();
- // Валидация всех компонентов настроек OpenRouter
- ValidateToken(settings.Token, errors);
+ // Валидация основных компонентов настроек Ollama
ValidateUrl(settings.Url, errors);
- ValidateAvailableModels(settings.AvailableModels, errors);
ValidateModelConfigurations(settings.ModelConfigurations, errors);
- ValidateDefaultModel(settings.DefaultModel, settings.AvailableModels, errors);
ValidateNumericSettings(settings, errors);
return new ValidationResult { IsValid = !errors.Any(), Errors = errors };
}
///
- /// Валидирует токен OpenRouter
- ///
- /// Токен для проверки
- /// Список ошибок валидации
- private static void ValidateToken(string token, List errors)
- {
- // Проверка наличия токена
- if (string.IsNullOrWhiteSpace(token))
- {
- errors.Add("OpenRouter:Token is required");
- }
- // Проверка формата токена (должен начинаться с 'sk-')
- else if (!token.StartsWith("sk-", StringComparison.OrdinalIgnoreCase))
- {
- errors.Add("OpenRouter:Token appears to be invalid (should start with 'sk-')");
- }
- }
-
- ///
- /// Валидирует URL OpenRouter
+ /// Валидирует URL Ollama
///
/// URL для проверки
/// Список ошибок валидации
@@ -104,7 +82,7 @@ namespace ChatBot.Models.Configuration.Validators
// Проверка наличия URL
if (string.IsNullOrWhiteSpace(url))
{
- errors.Add("OpenRouter:Url is required");
+ errors.Add("Ollama:Url is required");
}
// Проверка корректности URL (должен быть валидным HTTP/HTTPS URL)
else if (
@@ -112,34 +90,12 @@ namespace ChatBot.Models.Configuration.Validators
|| (uri.Scheme != "http" && uri.Scheme != "https")
)
{
- errors.Add("OpenRouter:Url must be a valid HTTP/HTTPS URL");
+ errors.Add("Ollama:Url must be a valid HTTP/HTTPS URL");
}
}
///
- /// Валидирует список доступных моделей
- ///
- /// Список моделей для проверки
- /// Список ошибок валидации
- private static void ValidateAvailableModels(IEnumerable models, List errors)
- {
- // Проверка наличия хотя бы одной модели
- if (models == null || !models.Any())
- {
- errors.Add("OpenRouter:AvailableModels must contain at least one model");
- return;
- }
-
- // Проверка на пустые названия моделей
- var emptyModels = models.Where(string.IsNullOrWhiteSpace).ToList();
- if (emptyModels.Any())
- {
- errors.Add("OpenRouter:AvailableModels contains empty model name");
- }
- }
-
- ///
- /// /// Валидирует конфигурации моделей
+ /// Валидирует конфигурации моделей
///
/// Конфигурации моделей
/// Список ошибок валидации
@@ -157,76 +113,49 @@ namespace ChatBot.Models.Configuration.Validators
{
if (string.IsNullOrWhiteSpace(modelConfig.Name))
{
- errors.Add("OpenRouter:ModelConfigurations contains model with empty name");
+ errors.Add("ModelConfigurations contains model with empty name");
+ continue;
}
if (modelConfig.MaxTokens < 1 || modelConfig.MaxTokens > 100000)
{
errors.Add(
- $"OpenRouter:ModelConfigurations model '{modelConfig.Name}' MaxTokens must be between 1 and 100000"
+ $"ModelConfigurations model '{modelConfig.Name}' MaxTokens must be between 1 and 100000"
);
}
if (modelConfig.Temperature < 0.0 || modelConfig.Temperature > 2.0)
{
errors.Add(
- $"OpenRouter:ModelConfigurations model '{modelConfig.Name}' Temperature must be between 0.0 and 2.0"
+ $"ModelConfigurations model '{modelConfig.Name}' Temperature must be between 0.0 and 2.0"
);
}
}
}
///
- /// Валидирует модель по умолчанию
+ /// Валидирует числовые параметры настроек Ollama
///
- /// Модель по умолчанию
- /// Список доступных моделей
+ /// Настройки Ollama
/// Список ошибок валидации
- private static void ValidateDefaultModel(
- string defaultModel,
- IEnumerable availableModels,
- List errors
- )
- {
- // Проверка, что модель по умолчанию присутствует в списке доступных
- if (
- !string.IsNullOrWhiteSpace(defaultModel)
- && availableModels != null
- && !availableModels.Contains(defaultModel)
- )
- {
- errors.Add(
- $"OpenRouter:DefaultModel '{defaultModel}' is not in AvailableModels list"
- );
- }
- }
-
- ///
- /// Валидирует числовые параметры настроек OpenRouter
- ///
- /// Настройки OpenRouter
- /// Список ошибок валидации
- private static void ValidateNumericSettings(
- OpenRouterSettings settings,
- List errors
- )
+ private static void ValidateNumericSettings(OllamaSettings settings, List errors)
{
// Проверка количества повторных попыток (1-10)
if (settings.MaxRetries < 1 || settings.MaxRetries > 10)
{
- errors.Add("OpenRouter:MaxRetries must be between 1 and 10");
+ errors.Add("Ollama:MaxRetries must be between 1 and 10");
}
// Проверка максимального количества токенов (1-100000)
if (settings.MaxTokens < 1 || settings.MaxTokens > 100000)
{
- errors.Add("OpenRouter:MaxTokens must be between 1 and 100000");
+ errors.Add("Ollama:MaxTokens must be between 1 and 100000");
}
// Проверка температуры (0.0-2.0)
if (settings.Temperature < 0.0 || settings.Temperature > 2.0)
{
- errors.Add("OpenRouter:Temperature must be between 0.0 and 2.0");
+ errors.Add("Ollama:Temperature must be between 0.0 and 2.0");
}
}
}
diff --git a/ChatBot/Models/Configuration/Validators/OllamaSettingsValidator.cs b/ChatBot/Models/Configuration/Validators/OllamaSettingsValidator.cs
new file mode 100644
index 0000000..a9e53c2
--- /dev/null
+++ b/ChatBot/Models/Configuration/Validators/OllamaSettingsValidator.cs
@@ -0,0 +1,77 @@
+using Microsoft.Extensions.Options;
+
+namespace ChatBot.Models.Configuration.Validators
+{
+ ///
+ /// Validator for OllamaSettings
+ ///
+ public class OllamaSettingsValidator : IValidateOptions
+ {
+ public ValidateOptionsResult Validate(string? name, OllamaSettings options)
+ {
+ var errors = new List();
+
+ ValidateUrl(options, errors);
+ ValidateRetryAndTokenSettings(options, errors);
+ ValidateSystemPromptPath(options, errors);
+ ValidateModelConfigurations(options, errors);
+
+ return errors.Count > 0
+ ? ValidateOptionsResult.Fail(errors)
+ : ValidateOptionsResult.Success;
+ }
+
+ private static void ValidateUrl(OllamaSettings options, List errors)
+ {
+ if (string.IsNullOrWhiteSpace(options.Url))
+ errors.Add("Ollama URL is required");
+ else if (!Uri.TryCreate(options.Url, UriKind.Absolute, out _))
+ errors.Add($"Invalid Ollama URL format: {options.Url}");
+ }
+
+ private static void ValidateRetryAndTokenSettings(
+ OllamaSettings options,
+ List errors
+ )
+ {
+ if (options.MaxRetries < 1)
+ errors.Add($"MaxRetries must be at least 1, got: {options.MaxRetries}");
+
+ if (options.MaxRetries > 10)
+ errors.Add($"MaxRetries should not exceed 10, got: {options.MaxRetries}");
+
+ if (options.MaxTokens < 1)
+ errors.Add($"MaxTokens must be at least 1, got: {options.MaxTokens}");
+
+ if (options.Temperature < 0 || options.Temperature > 2)
+ errors.Add($"Temperature must be between 0 and 2, got: {options.Temperature}");
+ }
+
+ private static void ValidateSystemPromptPath(OllamaSettings options, List errors)
+ {
+ if (string.IsNullOrWhiteSpace(options.SystemPromptFilePath))
+ errors.Add("SystemPromptFilePath is required");
+ }
+
+ private static void ValidateModelConfigurations(OllamaSettings options, List errors)
+ {
+ if (options.ModelConfigurations.Count == 0)
+ {
+ errors.Add("At least one model configuration is required");
+ return;
+ }
+
+ foreach (var model in options.ModelConfigurations)
+ {
+ if (string.IsNullOrWhiteSpace(model.Name))
+ errors.Add("Model name cannot be empty");
+
+ if (model.MaxTokens < 1)
+ errors.Add($"Model '{model.Name}': MaxTokens must be at least 1");
+
+ if (model.Temperature < 0 || model.Temperature > 2)
+ errors.Add($"Model '{model.Name}': Temperature must be between 0 and 2");
+ }
+ }
+ }
+}
diff --git a/ChatBot/Models/Configuration/Validators/TelegramBotSettingsValidator.cs b/ChatBot/Models/Configuration/Validators/TelegramBotSettingsValidator.cs
new file mode 100644
index 0000000..b71f0d2
--- /dev/null
+++ b/ChatBot/Models/Configuration/Validators/TelegramBotSettingsValidator.cs
@@ -0,0 +1,28 @@
+using Microsoft.Extensions.Options;
+
+namespace ChatBot.Models.Configuration.Validators
+{
+ ///
+ /// Validator for TelegramBotSettings
+ ///
+ public class TelegramBotSettingsValidator : IValidateOptions
+ {
+ public ValidateOptionsResult Validate(string? name, TelegramBotSettings options)
+ {
+ var errors = new List();
+
+ if (string.IsNullOrWhiteSpace(options.BotToken))
+ {
+ errors.Add("Telegram bot token is required");
+ }
+ else if (options.BotToken.Length < 40)
+ {
+ errors.Add("Telegram bot token appears to be invalid (too short)");
+ }
+
+ return errors.Count > 0
+ ? ValidateOptionsResult.Fail(errors)
+ : ValidateOptionsResult.Success;
+ }
+ }
+}
diff --git a/ChatBot/Models/Dto/ChatMessage.cs b/ChatBot/Models/Dto/ChatMessage.cs
index a1f6222..5a5015e 100644
--- a/ChatBot/Models/Dto/ChatMessage.cs
+++ b/ChatBot/Models/Dto/ChatMessage.cs
@@ -1,43 +1,18 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
namespace ChatBot.Models.Dto
{
///
- /// Сообщение чата.
+ /// Represents a chat message in a conversation
///
- [DataContract]
public class ChatMessage
{
///
- /// Содержимое сообщения.
+ /// The content of the message
///
- [DataMember(Name = "content")]
public required string Content { get; set; }
///
- /// Роль автора этого сообщения.
+ /// The role of the message author (system, user, assistant)
///
- [DataMember(Name = "role")]
public required string Role { get; set; }
-
- ///
- /// Имя и аргументы функции, которую следует вызвать, как сгенерировано моделью.
- ///
- [DataMember(Name = "function_call")]
- public FunctionCall? FunctionCall { get; set; }
-
- ///
- /// Вызовы инструментов, сгенерированные моделью, такие как вызовы функций.
- ///
- [DataMember(Name = "tool_calls")]
- public List ToolCalls { get; set; } = new List();
-
- ///
- /// Имя автора этого сообщения. Имя обязательно, если роль - функция, и должно быть именем функции, ответ которой содержится в контенте.
- ///
- [DataMember(Name = "name")]
- public string? Name { get; set; }
}
}
diff --git a/ChatBot/Models/Dto/Choice.cs b/ChatBot/Models/Dto/Choice.cs
deleted file mode 100644
index b1fb3bd..0000000
--- a/ChatBot/Models/Dto/Choice.cs
+++ /dev/null
@@ -1,37 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Вариант завершения чата, сгенерированный моделью.
- ///
- [DataContract]
- public class Choice
- {
- ///
- /// Причина, по которой модель остановила генерацию токенов. Это будет stop, если модель достигла естественной точки остановки или предоставленной последовательности остановки, length, если было достигнуто максимальное количество токенов, указанное в запросе, content_filter, если контент был опущен из-за флага наших фильтров контента, tool_calls, если модель вызвала инструмент
- ///
- [DataMember(Name = "finish_reason")]
- public required string FinishReason { get; set; }
-
- ///
- /// Индекс варианта в списке вариантов.
- ///
- [DataMember(Name = "index")]
- public int Index { get; set; }
-
- ///
- /// Сообщение завершения чата, сгенерированное моделью.
- ///
- [DataMember(Name = "message")]
- public required ChoiceMessage Message { get; set; }
-
- ///
- /// Информация о логарифмической вероятности для варианта.
- ///
- [DataMember(Name = "logprobs")]
- public LogProbs? LogProbs { get; set; }
- }
-}
diff --git a/ChatBot/Models/Dto/ChoiceMessage.cs b/ChatBot/Models/Dto/ChoiceMessage.cs
deleted file mode 100644
index f55bc69..0000000
--- a/ChatBot/Models/Dto/ChoiceMessage.cs
+++ /dev/null
@@ -1,37 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Сообщение завершения чата, сгенерированное моделью.
- ///
- [DataContract]
- public class ChoiceMessage
- {
- ///
- /// Содержимое сообщения.
- ///
- [DataMember(Name = "content")]
- public required string Content { get; set; }
-
- ///
- /// Вызовы инструментов, сгенерированные моделью, такие как вызовы функций.
- ///
- [DataMember(Name = "tool_calls")]
- public List ToolCalls { get; set; } = new List();
-
- ///
- /// Роль автора этого сообщения.
- ///
- [DataMember(Name = "role")]
- public required string Role { get; set; }
-
- ///
- /// Имя и аргументы функции, которую следует вызвать, как сгенерировано моделью.
- ///
- [DataMember(Name = "function_call")]
- public FunctionCall? FunctionCall { get; set; }
- }
-}
diff --git a/ChatBot/Models/Dto/LogProbs.cs b/ChatBot/Models/Dto/LogProbs.cs
deleted file mode 100644
index 090b47c..0000000
--- a/ChatBot/Models/Dto/LogProbs.cs
+++ /dev/null
@@ -1,75 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Информация о логарифмической вероятности для варианта.
- ///
- [DataContract]
- public class LogProbs
- {
- ///
- /// Список токенов содержимого сообщения с информацией о логарифмической вероятности.
- ///
- [DataMember(Name = "content")]
- public List Content { get; set; } = new List();
- }
-
- ///
- /// Информация о логарифмической вероятности для токена содержимого сообщения.
- ///
- [DataContract]
- public class LogProbContent
- {
- ///
- /// Токен.
- ///
- [DataMember(Name = "token")]
- public required string Token { get; set; }
-
- ///
- /// Логарифмическая вероятность этого токена, если он входит в топ-20 наиболее вероятных токенов.
- ///
- [DataMember(Name = "logprob")]
- public double LogProb { get; set; }
-
- ///
- /// Список целых чисел, представляющих UTF-8 байтовое представление токена. Полезно в случаях, когда символы представлены несколькими токенами и их байтовые смещения должны быть известны для вычисления границ.
- ///
- [DataMember(Name = "bytes")]
- public List Bytes { get; set; } = new List();
-
- ///
- /// Список наиболее вероятных токенов и их логарифмических вероятностей в этой позиции токена. В редких случаях может быть возвращено меньше токенов, чем запрошено top_logprobs.
- ///
- [DataMember(Name = "top_logprobs")]
- public List TopLogProbs { get; set; } = new List();
- }
-
- ///
- /// Информация о логарифмической вероятности для токена с высокой логарифмической вероятностью.
- ///
- [DataContract]
- public class TopLogProb
- {
- ///
- /// Токен.
- ///
- [DataMember(Name = "token")]
- public required string Token { get; set; }
-
- ///
- /// Логарифмическая вероятность этого токена, если он входит в топ-20 наиболее вероятных токенов.
- ///
- [DataMember(Name = "logprob")]
- public double LogProb { get; set; }
-
- ///
- /// Список целых чисел, представляющих UTF-8 байтовое представление токена. Полезно в случаях, когда символы представлены несколькими токенами и их байтовые смещения должны быть известны для вычисления границ.
- ///
- [DataMember(Name = "bytes")]
- public List Bytes { get; set; } = new List();
- }
-}
diff --git a/ChatBot/Models/Dto/OpenAiChatCompletion.cs b/ChatBot/Models/Dto/OpenAiChatCompletion.cs
deleted file mode 100644
index 31a1ae3..0000000
--- a/ChatBot/Models/Dto/OpenAiChatCompletion.cs
+++ /dev/null
@@ -1,103 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Модель запроса завершения чата OpenAI
- ///
- [DataContract]
- public class OpenAiChatCompletion
- {
- ///
- /// Список сообщений, составляющих разговор на данный момент.
- ///
- [DataMember(Name = "messages")]
- public List Messages { get; set; } = new List();
-
- ///
- /// Идентификатор модели для использования.
- ///
- [DataMember(Name = "model")]
- public required string Model { get; set; }
-
- ///
- /// Число от -2.0 до 2.0. Положительные значения штрафуют новые токены на основе их существующей частоты в тексте, уменьшая вероятность того, что модель повторит ту же строку дословно.
- ///
- [DataMember(Name = "frequency_penalty")]
- public double? FrequencyPenalty { get; set; }
-
- ///
- /// Изменить вероятность появления указанных токенов в завершении.
- ///
- [DataMember(Name = "logit_bias")]
- public Dictionary LogitBias { get; set; } = new Dictionary();
-
- ///
- /// Максимальное количество токенов для генерации в завершении чата.
- ///
- [DataMember(Name = "max_tokens")]
- public int? MaxTokens { get; set; }
-
- ///
- /// Сколько вариантов завершения чата генерировать для каждого входного сообщения.
- ///
- [DataMember(Name = "n")]
- public int? N { get; set; }
-
- ///
- /// Число от -2.0 до 2.0. Положительные значения штрафуют новые токены на основе того, появлялись ли они в тексте, увеличивая вероятность того, что модель будет говорить о новых темах.
- ///
- [DataMember(Name = "presence_penalty")]
- public double? PresencePenalty { get; set; }
-
- ///
- /// Объект, указывающий формат, который должна выводить модель.
- ///
- [DataMember(Name = "response_format")]
- public ResponseFormat? ResponseFormat { get; set; }
-
- ///
- /// Эта функция находится в бета-версии. Если указано, наша система приложит максимальные усилия для детерминированной выборки, так что повторные запросы с одинаковым семенем и параметрами должны возвращать тот же результат. Детерминизм не гарантируется, и вы должны обращаться к параметру ответа system_fingerprint для мониторинга изменений в бэкенде.
- ///
- [DataMember(Name = "seed")]
- public int? Seed { get; set; }
-
- ///
- /// До 4 последовательностей, на которых API остановит генерацию дальнейших токенов.
- ///
- [DataMember(Name = "stop")]
- public object? Stop { get; set; }
-
- ///
- /// Какая температура выборки использовать, от 0 до 2. Более высокие значения, такие как 0.8, сделают вывод более случайным, а более низкие значения, такие как 0.2, сделают его более сфокусированным и детерминированным.
- ///
- [DataMember(Name = "temperature")]
- public double? Temperature { get; set; }
-
- ///
- /// Альтернатива выборке с температурой, называемая ядерной выборкой, где модель рассматривает результаты токенов с вероятностной массой top_p. Так, 0.1 означает, что рассматриваются только токены, составляющие топ-10% вероятностной массы.
- ///
- [DataMember(Name = "top_p")]
- public double? TopP { get; set; }
-
- ///
- /// Список инструментов, которые может вызывать модель. В настоящее время в качестве инструмента поддерживаются только функции.
- ///
- [DataMember(Name = "tools")]
- public List Tools { get; set; } = new List();
-
- ///
- /// Управляет тем, какая (если есть) функция вызывается моделью.
- ///
- [DataMember(Name = "tool_choice")]
- public object? ToolChoice { get; set; }
-
- ///
- /// Уникальный идентификатор, представляющий вашего конечного пользователя, который может помочь OpenAI мониторить и обнаруживать злоупотребления.
- ///
- [DataMember(Name = "user")]
- public string? User { get; set; }
- }
-}
diff --git a/ChatBot/Models/Dto/OpenAiChatResponse.cs b/ChatBot/Models/Dto/OpenAiChatResponse.cs
deleted file mode 100644
index 18149ef..0000000
--- a/ChatBot/Models/Dto/OpenAiChatResponse.cs
+++ /dev/null
@@ -1,55 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Объект ответа для запросов завершения чата OpenAI
- ///
- [DataContract]
- public class OpenAiChatResponse
- {
- ///
- /// Уникальный идентификатор для завершения чата.
- ///
- [DataMember(Name = "id")]
- public required string Id { get; set; }
-
- ///
- /// Тип объекта, который всегда "chat.completion".
- ///
- [DataMember(Name = "object")]
- public required string Object { get; set; }
-
- ///
- /// Unix-временная метка (в секундах) создания завершения чата.
- ///
- [DataMember(Name = "created")]
- public long Created { get; set; }
-
- ///
- /// Модель, использованная для завершения чата.
- ///
- [DataMember(Name = "model")]
- public required string Model { get; set; }
-
- ///
- /// Список вариантов завершения чата. Может быть больше одного, если n больше 1.
- ///
- [DataMember(Name = "choices")]
- public List Choices { get; set; } = new List();
-
- ///
- /// Статистика использования для запроса завершения.
- ///
- [DataMember(Name = "usage")]
- public required Usage Usage { get; set; }
-
- ///
- /// Этот отпечаток представляет конфигурацию бэкенда, с которой работает модель.
- ///
- [DataMember(Name = "system_fingerprint")]
- public required string SystemFingerprint { get; set; }
- }
-}
diff --git a/ChatBot/Models/Dto/ResponseFormat.cs b/ChatBot/Models/Dto/ResponseFormat.cs
deleted file mode 100644
index 7f6afde..0000000
--- a/ChatBot/Models/Dto/ResponseFormat.cs
+++ /dev/null
@@ -1,18 +0,0 @@
-using System;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Объект, указывающий формат, который должна выводить модель.
- ///
- [DataContract]
- public class ResponseFormat
- {
- ///
- /// Должно быть одним из: text или json_object.
- ///
- [DataMember(Name = "type")]
- public required string Type { get; set; }
- }
-}
diff --git a/ChatBot/Models/Dto/Tool.cs b/ChatBot/Models/Dto/Tool.cs
deleted file mode 100644
index e614364..0000000
--- a/ChatBot/Models/Dto/Tool.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Инструмент, который может вызывать модель.
- ///
- [DataContract]
- public class Tool
- {
- ///
- /// Тип инструмента. В настоящее время поддерживается только функция.
- ///
- [DataMember(Name = "type")]
- public required string Type { get; set; }
-
- ///
- /// Определение функции.
- ///
- [DataMember(Name = "function")]
- public required ToolFunction Function { get; set; }
- }
-
- ///
- /// Определение функции.
- ///
- [DataContract]
- public class ToolFunction
- {
- ///
- /// Имя функции для вызова. Должно содержать a-z, A-Z, 0-9 или подчеркивания и тире, с максимальной длиной 64 символа.
- ///
- [DataMember(Name = "name")]
- public required string Name { get; set; }
-
- ///
- /// Описание того, что делает функция, используется моделью для выбора, когда и как вызывать функцию.
- ///
- [DataMember(Name = "description")]
- public required string Description { get; set; }
-
- ///
- /// Параметры, которые принимает функция, описанные как объект JSON Schema.
- ///
- [DataMember(Name = "parameters")]
- public required object Parameters { get; set; }
- }
-}
diff --git a/ChatBot/Models/Dto/ToolCall.cs b/ChatBot/Models/Dto/ToolCall.cs
deleted file mode 100644
index a45b4e7..0000000
--- a/ChatBot/Models/Dto/ToolCall.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Вызов инструмента, сгенерированный моделью.
- ///
- [DataContract]
- public class ToolCall
- {
- ///
- /// Идентификатор вызова инструмента.
- ///
- [DataMember(Name = "id")]
- public required string Id { get; set; }
-
- ///
- /// Тип инструмента. В настоящее время поддерживается только функция.
- ///
- [DataMember(Name = "type")]
- public required string Type { get; set; }
-
- ///
- /// Функция, которую вызвала модель.
- ///
- [DataMember(Name = "function")]
- public required FunctionCall Function { get; set; }
- }
-
- ///
- /// Функция, которую вызвала модель.
- ///
- [DataContract]
- public class FunctionCall
- {
- ///
- /// Имя функции для вызова.
- ///
- [DataMember(Name = "name")]
- public required string Name { get; set; }
-
- ///
- /// Аргументы для вызова функции, сгенерированные моделью в формате JSON.
- ///
- [DataMember(Name = "arguments")]
- public required string Arguments { get; set; }
- }
-}
diff --git a/ChatBot/Models/Dto/Usage.cs b/ChatBot/Models/Dto/Usage.cs
deleted file mode 100644
index 6cf81db..0000000
--- a/ChatBot/Models/Dto/Usage.cs
+++ /dev/null
@@ -1,30 +0,0 @@
-using System;
-using System.Runtime.Serialization;
-
-namespace ChatBot.Models.Dto
-{
- ///
- /// Usage statistics for the completion request.
- ///
- [DataContract]
- public class Usage
- {
- ///
- /// Number of tokens in the generated completion.
- ///
- [DataMember(Name = "completion_tokens")]
- public int CompletionTokens { get; set; }
-
- ///
- /// Number of tokens in the prompt.
- ///
- [DataMember(Name = "prompt_tokens")]
- public int PromptTokens { get; set; }
-
- ///
- /// Total number of tokens used in the request (prompt + completion).
- ///
- [DataMember(Name = "total_tokens")]
- public int TotalTokens { get; set; }
- }
-}
diff --git a/ChatBot/Models/Validation/ChatMessageValidator.cs b/ChatBot/Models/Validation/ChatMessageValidator.cs
new file mode 100644
index 0000000..3d41768
--- /dev/null
+++ b/ChatBot/Models/Validation/ChatMessageValidator.cs
@@ -0,0 +1,33 @@
+using ChatBot.Common.Constants;
+using ChatBot.Models.Dto;
+using FluentValidation;
+
+namespace ChatBot.Models.Validation
+{
+ ///
+ /// Validator for ChatMessage
+ ///
+ public class ChatMessageValidator : AbstractValidator
+ {
+ public ChatMessageValidator()
+ {
+ RuleFor(x => x.Content)
+ .NotEmpty()
+ .WithMessage("Message content cannot be empty")
+ .MaximumLength(10000)
+ .WithMessage("Message content is too long (max 10000 characters)");
+
+ RuleFor(x => x.Role)
+ .NotEmpty()
+ .WithMessage("Message role cannot be empty")
+ .Must(role =>
+ role == ChatRoles.System
+ || role == ChatRoles.User
+ || role == ChatRoles.Assistant
+ )
+ .WithMessage(
+ $"Invalid message role. Must be one of: {ChatRoles.System}, {ChatRoles.User}, {ChatRoles.Assistant}"
+ );
+ }
+ }
+}
diff --git a/ChatBot/Program.cs b/ChatBot/Program.cs
index e033cde..7d56627 100644
--- a/ChatBot/Program.cs
+++ b/ChatBot/Program.cs
@@ -1,10 +1,17 @@
using ChatBot.Models.Configuration;
using ChatBot.Models.Configuration.Validators;
+using ChatBot.Models.Validation;
using ChatBot.Services;
+using ChatBot.Services.ErrorHandlers;
+using ChatBot.Services.HealthChecks;
+using ChatBot.Services.Interfaces;
using ChatBot.Services.Telegram.Commands;
using ChatBot.Services.Telegram.Interfaces;
using ChatBot.Services.Telegram.Services;
+using FluentValidation;
+using Microsoft.Extensions.Options;
using Serilog;
+using Telegram.Bot;
var builder = Host.CreateApplicationBuilder(args);
@@ -21,21 +28,34 @@ try
// Добавляем Serilog в DI контейнер
builder.Services.AddSerilog();
- // Конфигурируем настройки
+ // Конфигурируем настройки с валидацией
builder.Services.Configure(builder.Configuration);
- builder.Services.Configure(
- builder.Configuration.GetSection("TelegramBot")
- );
- builder.Services.Configure(options =>
- {
- builder.Configuration.GetSection("OpenRouter").Bind(options);
- builder
- .Configuration.GetSection("ModelConfigurations")
- .Bind(options, o => o.BindNonPublicProperties = false);
- });
+
+ builder
+ .Services.Configure(builder.Configuration.GetSection("TelegramBot"))
+ .AddSingleton, TelegramBotSettingsValidator>();
+
+ builder
+ .Services.Configure(options =>
+ {
+ builder.Configuration.GetSection("Ollama").Bind(options);
+ var modelConfigs = builder
+ .Configuration.GetSection("ModelConfigurations")
+ .Get>();
+ if (modelConfigs != null)
+ {
+ options.ModelConfigurations = modelConfigs;
+ }
+ })
+ .AddSingleton, OllamaSettingsValidator>();
+
builder.Services.Configure(builder.Configuration.GetSection("Serilog"));
- // Валидируем конфигурацию
+ // Валидируем конфигурацию при старте
+ builder.Services.AddOptions().ValidateOnStart();
+ builder.Services.AddOptions().ValidateOnStart();
+
+ // Валидируем конфигурацию (старый способ для совместимости)
var appSettings = builder.Configuration.Get();
if (appSettings == null)
{
@@ -54,31 +74,68 @@ try
return;
}
- Log.ForContext().Information("Configuration validation passed");
+ Log.ForContext().Debug("Configuration validation passed");
+
+ // Регистрируем FluentValidation валидаторы
+ builder.Services.AddValidatorsFromAssemblyContaining();
+
+ // Регистрируем IOllamaClient
+ builder.Services.AddSingleton(sp =>
+ {
+ var settings = sp.GetRequiredService>();
+ return new OllamaClientAdapter(settings.Value.Url);
+ });
+
+ // Регистрируем интерфейсы и сервисы
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+
+ // Регистрируем error handlers
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+
+ // Регистрируем retry policy (использует error handlers)
+ builder.Services.AddSingleton();
// Регистрируем основные сервисы
builder.Services.AddSingleton();
- builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
builder.Services.AddSingleton();
+ // Регистрируем Telegram команды
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+
// Регистрируем Telegram сервисы
+ builder.Services.AddSingleton(provider =>
+ {
+ var settings = provider.GetRequiredService>();
+ return new TelegramBotClient(settings.Value.BotToken);
+ });
builder.Services.AddSingleton();
builder.Services.AddSingleton();
builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
builder.Services.AddSingleton();
builder.Services.AddSingleton();
builder.Services.AddSingleton();
builder.Services.AddHostedService();
+ // Регистрируем Health Checks
+ builder
+ .Services.AddHealthChecks()
+ .AddCheck("ollama", tags: new[] { "api", "ollama" })
+ .AddCheck("telegram", tags: new[] { "api", "telegram" });
+
var host = builder.Build();
// Инициализируем ModelService
var modelService = host.Services.GetRequiredService();
await modelService.InitializeAsync();
- // Инициализируем команды
- var commandRegistry = host.Services.GetRequiredService();
- commandRegistry.RegisterCommandsFromAssembly(typeof(Program).Assembly, host.Services);
+ Log.ForContext().Information("All services initialized successfully");
await host.RunAsync();
}
diff --git a/ChatBot/Services/AIService.cs b/ChatBot/Services/AIService.cs
index 286e384..e84d829 100644
--- a/ChatBot/Services/AIService.cs
+++ b/ChatBot/Services/AIService.cs
@@ -1,202 +1,107 @@
-using ChatBot.Models.Configuration;
+using System.Text;
+using ChatBot.Common.Constants;
using ChatBot.Models.Dto;
-using Microsoft.Extensions.Options;
-using ServiceStack;
+using ChatBot.Services.Interfaces;
+using OllamaSharp.Models.Chat;
namespace ChatBot.Services
{
- public class AIService
+ ///
+ /// Service for AI text generation using Ollama
+ ///
+ public class AIService : IAIService
{
private readonly ILogger _logger;
- private readonly OpenRouterSettings _openRouterSettings;
private readonly ModelService _modelService;
- private readonly JsonApiClient _client;
+ private readonly IOllamaClient _client;
- public AIService(
- ILogger logger,
- IOptions openRouterSettings,
- ModelService modelService
- )
+ public AIService(ILogger logger, ModelService modelService, IOllamaClient client)
{
_logger = logger;
- _openRouterSettings = openRouterSettings.Value;
_modelService = modelService;
- _client = new JsonApiClient(_openRouterSettings.Url)
- {
- BearerToken = _openRouterSettings.Token,
- };
+ _client = client;
- // Log available configuration
- _logger.LogInformation(
- "AIService initialized with URL: {Url}",
- _openRouterSettings.Url
- );
+ _logger.LogInformation("AIService initialized");
}
- public async Task GenerateTextAsync(
- string prompt,
- string role,
- int? maxTokens = null
+ ///
+ /// Generate chat completion using Ollama Chat API
+ ///
+ public async Task GenerateChatCompletionAsync(
+ List messages,
+ int? maxTokens = null,
+ double? temperature = null,
+ CancellationToken cancellationToken = default
)
{
var modelSettings = _modelService.GetCurrentModelSettings();
- var tokens = maxTokens ?? modelSettings.MaxTokens;
var model = modelSettings.Name;
try
{
- var result = await _client.PostAsync(
- "/v1/chat/completions",
- new OpenAiChatCompletion
- {
- Model = model,
- Messages = [new() { Role = role, Content = prompt }],
- MaxTokens = tokens,
- Temperature = modelSettings.Temperature,
- }
+ _logger.LogInformation("Generating response using model {Model}", model);
+
+ var result = await ExecuteGenerationAsync(messages, model, cancellationToken);
+
+ _logger.LogInformation(
+ "Response generated successfully, length: {Length} characters",
+ result.Length
);
- return result.Choices[0].Message.Content;
+
+ return result;
}
catch (Exception ex)
{
- _logger.LogError(ex, "Error generating text with model {Model}", model);
-
- // Пытаемся переключиться на другую модель
- if (_modelService.TrySwitchToNextModel())
- {
- _logger.LogInformation(
- "Retrying with alternative model: {Model}",
- _modelService.GetCurrentModel()
- );
- return await GenerateTextAsync(prompt, role, tokens);
- }
-
- return string.Empty;
+ _logger.LogError(ex, "Failed to generate chat completion for model {Model}", model);
+ return AIResponseConstants.DefaultErrorMessage;
}
}
///
- /// Generate text using conversation history
+ /// Execute a single generation attempt
///
- public async Task GenerateTextAsync(
+ private async Task ExecuteGenerationAsync(
List messages,
- int? maxTokens = null,
- double? temperature = null
+ string model,
+ CancellationToken cancellationToken
)
{
- var modelSettings = _modelService.GetCurrentModelSettings();
- var tokens = maxTokens ?? modelSettings.MaxTokens;
- var temp = temperature ?? modelSettings.Temperature;
- var model = modelSettings.Name;
+ _client.SelectedModel = model;
- for (int attempt = 1; attempt <= _openRouterSettings.MaxRetries; attempt++)
+ var chatMessages = messages
+ .Select(m => new Message(ConvertRole(m.Role), m.Content))
+ .ToList();
+
+ var chatRequest = new ChatRequest { Messages = chatMessages, Stream = true };
+ var response = new StringBuilder();
+
+ await foreach (
+ var chatResponse in _client
+ .ChatAsync(chatRequest)
+ .WithCancellation(cancellationToken)
+ )
{
- try
+ if (chatResponse?.Message?.Content != null)
{
- var result = await _client.PostAsync(
- "/v1/chat/completions",
- new OpenAiChatCompletion
- {
- Model = model,
- Messages = messages,
- MaxTokens = tokens,
- Temperature = temp,
- }
- );
- return result.Choices[0].Message.Content;
- }
- catch (Exception ex)
- when (ex.Message.Contains("429") || ex.Message.Contains("Too Many Requests"))
- {
- _logger.LogWarning(
- ex,
- "Rate limit exceeded (429) on attempt {Attempt}/{MaxRetries} for model {Model}. Retrying...",
- attempt,
- _openRouterSettings.MaxRetries,
- model
- );
-
- if (attempt == _openRouterSettings.MaxRetries)
- {
- _logger.LogError(
- ex,
- "Failed to generate text after {MaxRetries} attempts due to rate limiting for model {Model}",
- _openRouterSettings.MaxRetries,
- model
- );
- return string.Empty;
- }
-
- // Calculate delay: exponential backoff with jitter
- var baseDelay = TimeSpan.FromSeconds(Math.Pow(2, attempt - 1)); // 1s, 2s, 4s...
- var jitter = TimeSpan.FromMilliseconds(Random.Shared.Next(0, 2000)); // Add up to 2s random jitter
- var delay = baseDelay.Add(jitter);
-
- _logger.LogInformation(
- "Waiting {Delay} before retry {NextAttempt}/{MaxRetries}",
- delay,
- attempt + 1,
- _openRouterSettings.MaxRetries
- );
-
- await Task.Delay(delay);
- }
- catch (Exception ex)
- {
- _logger.LogError(
- ex,
- "Error generating text with conversation history. Model: {Model}, Messages count: {MessageCount}",
- model,
- messages.Count
- );
-
- // Пытаемся переключиться на другую модель
- if (_modelService.TrySwitchToNextModel())
- {
- _logger.LogInformation(
- "Retrying with alternative model: {Model}",
- _modelService.GetCurrentModel()
- );
- model = _modelService.GetCurrentModel();
- continue;
- }
-
- return string.Empty;
+ response.Append(chatResponse.Message.Content);
}
}
- return string.Empty;
+ return response.ToString();
}
///
- /// Генерирует случайную задержку на основе настроек
+ /// Convert string role to OllamaSharp ChatRole
///
- public async Task ApplyRandomDelayAsync(CancellationToken cancellationToken = default)
+ private static ChatRole ConvertRole(string role)
{
- if (!_openRouterSettings.ResponseDelay.IsEnabled)
+ return role.ToLower() switch
{
- return;
- }
-
- var minDelay = _openRouterSettings.ResponseDelay.MinDelayMs;
- var maxDelay = _openRouterSettings.ResponseDelay.MaxDelayMs;
-
- if (minDelay >= maxDelay)
- {
- _logger.LogWarning(
- "Invalid delay settings: MinDelayMs ({MinDelay}) >= MaxDelayMs ({MaxDelay}). Skipping delay.",
- minDelay,
- maxDelay
- );
- return;
- }
-
- var randomDelay = Random.Shared.Next(minDelay, maxDelay + 1);
- var delay = TimeSpan.FromMilliseconds(randomDelay);
-
- _logger.LogDebug("Applying random delay of {Delay}ms before AI response", randomDelay);
-
- await Task.Delay(delay, cancellationToken);
+ ChatRoles.System => ChatRole.System,
+ ChatRoles.User => ChatRole.User,
+ ChatRoles.Assistant => ChatRole.Assistant,
+ _ => ChatRole.User,
+ };
}
}
}
diff --git a/ChatBot/Services/ChatService.cs b/ChatBot/Services/ChatService.cs
index 273069a..5be99a2 100644
--- a/ChatBot/Services/ChatService.cs
+++ b/ChatBot/Services/ChatService.cs
@@ -1,7 +1,6 @@
-using System.Collections.Concurrent;
+using ChatBot.Common.Constants;
using ChatBot.Models;
-using ChatBot.Models.Configuration;
-using Microsoft.Extensions.Options;
+using ChatBot.Services.Interfaces;
namespace ChatBot.Services
{
@@ -11,19 +10,18 @@ namespace ChatBot.Services
public class ChatService
{
private readonly ILogger _logger;
- private readonly AIService _aiService;
- private readonly OpenRouterSettings _openRouterSettings;
- private readonly ConcurrentDictionary _sessions = new();
+ private readonly IAIService _aiService;
+ private readonly ISessionStorage _sessionStorage;
public ChatService(
ILogger logger,
- AIService aiService,
- IOptions openRouterSettings
+ IAIService aiService,
+ ISessionStorage sessionStorage
)
{
_logger = logger;
_aiService = aiService;
- _openRouterSettings = openRouterSettings.Value;
+ _sessionStorage = sessionStorage;
}
///
@@ -31,52 +29,11 @@ namespace ChatBot.Services
///
public ChatSession GetOrCreateSession(
long chatId,
- string chatType = "private",
+ string chatType = ChatTypes.Private,
string chatTitle = ""
)
{
- if (!_sessions.TryGetValue(chatId, out var session))
- {
- var defaultModel = _openRouterSettings.DefaultModel;
-
- try
- {
- session = new ChatSession
- {
- ChatId = chatId,
- ChatType = chatType,
- ChatTitle = chatTitle,
- Model = defaultModel,
- MaxTokens = _openRouterSettings.MaxTokens,
- Temperature = _openRouterSettings.Temperature,
- SystemPrompt = ChatSession.LoadSystemPrompt(
- _openRouterSettings.SystemPromptFilePath
- ),
- };
- }
- catch (Exception ex)
- {
- _logger.LogError(
- ex,
- "Failed to load system prompt from file: {FilePath}",
- _openRouterSettings.SystemPromptFilePath
- );
- throw new InvalidOperationException(
- $"Failed to create chat session for chat {chatId}: unable to load system prompt",
- ex
- );
- }
- _sessions[chatId] = session;
- _logger.LogInformation(
- "Created new chat session for chat {ChatId}, type {ChatType}, title: {ChatTitle}, model: {Model}",
- chatId,
- chatType,
- chatTitle,
- defaultModel
- );
- }
-
- return session;
+ return _sessionStorage.GetOrCreate(chatId, chatType, chatTitle);
}
///
@@ -86,8 +43,9 @@ namespace ChatBot.Services
long chatId,
string username,
string message,
- string chatType = "private",
- string chatTitle = ""
+ string chatType = ChatTypes.Private,
+ string chatTitle = "",
+ CancellationToken cancellationToken = default
)
{
try
@@ -105,39 +63,44 @@ namespace ChatBot.Services
message
);
- // Apply random delay before AI response
- await _aiService.ApplyRandomDelayAsync();
-
// Get AI response
- var response = await _aiService.GenerateTextAsync(
+ var response = await _aiService.GenerateChatCompletionAsync(
session.GetAllMessages(),
session.MaxTokens,
- session.Temperature
+ session.Temperature,
+ cancellationToken
);
if (!string.IsNullOrEmpty(response))
{
- // Check for {empty} response
- if (response.Trim().Equals("{empty}", StringComparison.OrdinalIgnoreCase))
+ // Check for {empty} response - special marker to ignore the message
+ if (
+ response
+ .Trim()
+ .Equals(
+ AIResponseConstants.EmptyResponseMarker,
+ StringComparison.OrdinalIgnoreCase
+ )
+ )
{
_logger.LogInformation(
- "AI returned empty response for chat {ChatId}, ignoring message",
+ "AI returned empty response marker for chat {ChatId}, ignoring message",
chatId
);
- return string.Empty; // Return empty string to ignore the message
+ return string.Empty;
}
// Add AI response to history
session.AddAssistantMessage(response);
- _logger.LogInformation(
- "AI response generated for chat {ChatId}: {Response}",
+ _logger.LogDebug(
+ "AI response generated for chat {ChatId} (length: {Length})",
chatId,
- response
+ response.Length
);
}
- return response ?? "Извините, произошла ошибка при генерации ответа.";
+ return response ?? AIResponseConstants.DefaultErrorMessage;
}
catch (Exception ex)
{
@@ -157,7 +120,8 @@ namespace ChatBot.Services
string? systemPrompt = null
)
{
- if (_sessions.TryGetValue(chatId, out var session))
+ var session = _sessionStorage.Get(chatId);
+ if (session != null)
{
if (!string.IsNullOrEmpty(model))
session.Model = model;
@@ -178,7 +142,8 @@ namespace ChatBot.Services
///
public void ClearHistory(long chatId)
{
- if (_sessions.TryGetValue(chatId, out var session))
+ var session = _sessionStorage.Get(chatId);
+ if (session != null)
{
session.ClearHistory();
_logger.LogInformation("Cleared history for chat {ChatId}", chatId);
@@ -190,8 +155,7 @@ namespace ChatBot.Services
///
public ChatSession? GetSession(long chatId)
{
- _sessions.TryGetValue(chatId, out var session);
- return session;
+ return _sessionStorage.Get(chatId);
}
///
@@ -199,12 +163,7 @@ namespace ChatBot.Services
///
public bool RemoveSession(long chatId)
{
- var removed = _sessions.TryRemove(chatId, out _);
- if (removed)
- {
- _logger.LogInformation("Removed session for chat {ChatId}", chatId);
- }
- return removed;
+ return _sessionStorage.Remove(chatId);
}
///
@@ -212,7 +171,7 @@ namespace ChatBot.Services
///
public int GetActiveSessionsCount()
{
- return _sessions.Count;
+ return _sessionStorage.GetActiveSessionsCount();
}
///
@@ -220,23 +179,7 @@ namespace ChatBot.Services
///
public int CleanupOldSessions(int hoursOld = 24)
{
- var cutoffTime = DateTime.UtcNow.AddHours(-hoursOld);
- var sessionsToRemove = _sessions
- .Where(kvp => kvp.Value.LastUpdatedAt < cutoffTime)
- .Select(kvp => kvp.Key)
- .ToList();
-
- foreach (var chatId in sessionsToRemove)
- {
- _sessions.TryRemove(chatId, out _);
- }
-
- if (sessionsToRemove.Count > 0)
- {
- _logger.LogInformation("Cleaned up {Count} old sessions", sessionsToRemove.Count);
- }
-
- return sessionsToRemove.Count;
+ return _sessionStorage.CleanupOldSessions(hoursOld);
}
}
}
diff --git a/ChatBot/Services/ErrorHandlers/NetworkErrorHandler.cs b/ChatBot/Services/ErrorHandlers/NetworkErrorHandler.cs
new file mode 100644
index 0000000..011e7c6
--- /dev/null
+++ b/ChatBot/Services/ErrorHandlers/NetworkErrorHandler.cs
@@ -0,0 +1,49 @@
+using ChatBot.Services.Interfaces;
+
+namespace ChatBot.Services.ErrorHandlers
+{
+ ///
+ /// Error handler for network-related errors
+ ///
+ public class NetworkErrorHandler : IErrorHandler
+ {
+ private readonly ILogger _logger;
+
+ public NetworkErrorHandler(ILogger logger)
+ {
+ _logger = logger;
+ }
+
+ public bool CanHandle(Exception exception)
+ {
+ return exception is HttpRequestException
+ || exception is TaskCanceledException
+ || exception.Message.Contains("timeout", StringComparison.OrdinalIgnoreCase)
+ || exception.Message.Contains("connection", StringComparison.OrdinalIgnoreCase);
+ }
+
+ public async Task HandleAsync(
+ Exception exception,
+ int attempt,
+ string currentModel,
+ CancellationToken cancellationToken = default
+ )
+ {
+ _logger.LogWarning(
+ exception,
+ "Network error on attempt {Attempt} for model {Model}",
+ attempt,
+ currentModel
+ );
+
+ // Apply exponential backoff for network errors
+ var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt - 1));
+
+ _logger.LogInformation("Waiting {Delay} before retry due to network error", delay);
+
+ await Task.Delay(delay, cancellationToken);
+
+ return ErrorHandlingResult.Retry();
+ }
+ }
+}
diff --git a/ChatBot/Services/ErrorHandlers/RateLimitErrorHandler.cs b/ChatBot/Services/ErrorHandlers/RateLimitErrorHandler.cs
new file mode 100644
index 0000000..38dc039
--- /dev/null
+++ b/ChatBot/Services/ErrorHandlers/RateLimitErrorHandler.cs
@@ -0,0 +1,68 @@
+using ChatBot.Services.Interfaces;
+
+namespace ChatBot.Services.ErrorHandlers
+{
+ ///
+ /// Error handler for rate limit errors (HTTP 429)
+ ///
+ public class RateLimitErrorHandler : IErrorHandler
+ {
+ private readonly ModelService _modelService;
+ private readonly ILogger _logger;
+
+ public RateLimitErrorHandler(
+ ModelService modelService,
+ ILogger logger
+ )
+ {
+ _modelService = modelService;
+ _logger = logger;
+ }
+
+ public bool CanHandle(Exception exception)
+ {
+ return exception.Message.Contains("429")
+ || exception.Message.Contains("Too Many Requests")
+ || exception.Message.Contains("rate limit", StringComparison.OrdinalIgnoreCase);
+ }
+
+ public async Task HandleAsync(
+ Exception exception,
+ int attempt,
+ string currentModel,
+ CancellationToken cancellationToken = default
+ )
+ {
+ _logger.LogWarning(
+ exception,
+ "Rate limit exceeded on attempt {Attempt} for model {Model}",
+ attempt,
+ currentModel
+ );
+
+ // Try to switch to another model
+ if (_modelService.TrySwitchToNextModel())
+ {
+ var newModel = _modelService.GetCurrentModel();
+ _logger.LogInformation(
+ "Switching to alternative model: {Model} due to rate limiting",
+ newModel
+ );
+ return ErrorHandlingResult.Retry(newModel);
+ }
+
+ // If can't switch, apply exponential backoff
+ var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt - 1));
+ var jitter = TimeSpan.FromMilliseconds(Random.Shared.Next(0, 2000));
+
+ _logger.LogInformation(
+ "No alternative model available, waiting {Delay} before retry",
+ delay.Add(jitter)
+ );
+
+ await Task.Delay(delay.Add(jitter), cancellationToken);
+
+ return ErrorHandlingResult.Retry();
+ }
+ }
+}
diff --git a/ChatBot/Services/ExponentialBackoffRetryPolicy.cs b/ChatBot/Services/ExponentialBackoffRetryPolicy.cs
new file mode 100644
index 0000000..7936a47
--- /dev/null
+++ b/ChatBot/Services/ExponentialBackoffRetryPolicy.cs
@@ -0,0 +1,111 @@
+using ChatBot.Common.Constants;
+using ChatBot.Models.Configuration;
+using ChatBot.Services.Interfaces;
+using Microsoft.Extensions.Options;
+
+namespace ChatBot.Services
+{
+ ///
+ /// Retry policy with exponential backoff and jitter
+ ///
+ public class ExponentialBackoffRetryPolicy : IRetryPolicy
+ {
+ private readonly int _maxRetries;
+ private readonly ILogger _logger;
+ private readonly IEnumerable _errorHandlers;
+
+ public ExponentialBackoffRetryPolicy(
+ IOptions settings,
+ ILogger logger,
+ IEnumerable errorHandlers
+ )
+ {
+ _maxRetries = settings.Value.MaxRetries;
+ _logger = logger;
+ _errorHandlers = errorHandlers;
+ }
+
+ public async Task ExecuteAsync(
+ Func> action,
+ CancellationToken cancellationToken = default
+ )
+ {
+ Exception? lastException = null;
+
+ for (int attempt = 1; attempt <= _maxRetries; attempt++)
+ {
+ try
+ {
+ return await action();
+ }
+ catch (Exception ex) when (attempt < _maxRetries)
+ {
+ lastException = ex;
+ LogAttemptFailure(ex, attempt);
+
+ if (!await HandleErrorAndDecideRetry(ex, attempt, cancellationToken))
+ break;
+ }
+ catch (Exception ex)
+ {
+ lastException = ex;
+ _logger.LogError(ex, "All {MaxRetries} attempts failed", _maxRetries);
+ }
+ }
+
+ throw new InvalidOperationException(
+ $"Failed after {_maxRetries} attempts",
+ lastException
+ );
+ }
+
+ private void LogAttemptFailure(Exception ex, int attempt)
+ {
+ _logger.LogWarning(ex, "Attempt {Attempt}/{MaxRetries} failed", attempt, _maxRetries);
+ }
+
+ private async Task HandleErrorAndDecideRetry(
+ Exception ex,
+ int attempt,
+ CancellationToken cancellationToken
+ )
+ {
+ var handler = _errorHandlers.FirstOrDefault(h => h.CanHandle(ex));
+ if (handler == null)
+ {
+ await DelayWithBackoff(attempt, cancellationToken);
+ return true;
+ }
+
+ var result = await handler.HandleAsync(ex, attempt, string.Empty, cancellationToken);
+
+ if (result.IsFatal)
+ {
+ _logger.LogError("Fatal error occurred: {ErrorMessage}", result.ErrorMessage);
+ return false;
+ }
+
+ return result.ShouldRetry;
+ }
+
+ private async Task DelayWithBackoff(int attempt, CancellationToken cancellationToken)
+ {
+ var baseDelay = TimeSpan.FromSeconds(
+ Math.Pow(2, attempt - 1) * RetryConstants.DefaultBaseDelaySeconds
+ );
+ var jitter = TimeSpan.FromMilliseconds(
+ Random.Shared.Next(0, RetryConstants.DefaultMaxJitterMs)
+ );
+ var delay = baseDelay.Add(jitter);
+
+ _logger.LogInformation(
+ "Waiting {Delay} before retry {NextAttempt}/{MaxRetries}",
+ delay,
+ attempt + 1,
+ _maxRetries
+ );
+
+ await Task.Delay(delay, cancellationToken);
+ }
+ }
+}
diff --git a/ChatBot/Services/FileSystemPromptProvider.cs b/ChatBot/Services/FileSystemPromptProvider.cs
new file mode 100644
index 0000000..0d8ce38
--- /dev/null
+++ b/ChatBot/Services/FileSystemPromptProvider.cs
@@ -0,0 +1,58 @@
+using System.Text;
+using ChatBot.Models.Configuration;
+using ChatBot.Services.Interfaces;
+using Microsoft.Extensions.Options;
+
+namespace ChatBot.Services
+{
+ ///
+ /// System prompt provider that loads prompt from file
+ ///
+ public class FileSystemPromptProvider : ISystemPromptProvider
+ {
+ private readonly string _filePath;
+ private readonly ILogger _logger;
+ private readonly Lazy _cachedPrompt;
+
+ public FileSystemPromptProvider(
+ IOptions settings,
+ ILogger logger
+ )
+ {
+ _filePath = settings.Value.SystemPromptFilePath;
+ _logger = logger;
+ _cachedPrompt = new Lazy(LoadPrompt);
+ }
+
+ public string GetSystemPrompt() => _cachedPrompt.Value;
+
+ private string LoadPrompt()
+ {
+ if (!File.Exists(_filePath))
+ {
+ var error = $"System prompt file not found: {_filePath}";
+ _logger.LogError(error);
+ throw new FileNotFoundException(error);
+ }
+
+ try
+ {
+ var prompt = File.ReadAllText(_filePath, Encoding.UTF8);
+ _logger.LogInformation(
+ "System prompt loaded from {FilePath} ({Length} characters)",
+ _filePath,
+ prompt.Length
+ );
+ return prompt;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to read system prompt file: {FilePath}", _filePath);
+ throw new InvalidOperationException(
+ $"Failed to read system prompt file '{_filePath}': {ex.Message}",
+ ex
+ );
+ }
+ }
+ }
+}
diff --git a/ChatBot/Services/HealthChecks/OllamaHealthCheck.cs b/ChatBot/Services/HealthChecks/OllamaHealthCheck.cs
new file mode 100644
index 0000000..99406f3
--- /dev/null
+++ b/ChatBot/Services/HealthChecks/OllamaHealthCheck.cs
@@ -0,0 +1,56 @@
+using ChatBot.Services.Interfaces;
+using Microsoft.Extensions.Diagnostics.HealthChecks;
+
+namespace ChatBot.Services.HealthChecks
+{
+ ///
+ /// Health check for Ollama API connectivity
+ ///
+ public class OllamaHealthCheck : IHealthCheck
+ {
+ private readonly IOllamaClient _client;
+ private readonly ILogger _logger;
+
+ public OllamaHealthCheck(IOllamaClient client, ILogger logger)
+ {
+ _client = client;
+ _logger = logger;
+ }
+
+ public async Task CheckHealthAsync(
+ HealthCheckContext context,
+ CancellationToken cancellationToken = default
+ )
+ {
+ try
+ {
+ var models = await _client.ListLocalModelsAsync();
+ var modelCount = models.Count();
+
+ _logger.LogDebug(
+ "Ollama health check passed. Available models: {Count}",
+ modelCount
+ );
+
+ return HealthCheckResult.Healthy(
+ $"Ollama is accessible. Available models: {modelCount}",
+ new Dictionary { { "modelCount", modelCount } }
+ );
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Ollama health check failed");
+
+ return HealthCheckResult.Unhealthy(
+ "Cannot connect to Ollama API",
+ ex,
+ new Dictionary
+ {
+ { "error", ex.Message },
+ { "exceptionType", ex.GetType().Name },
+ }
+ );
+ }
+ }
+ }
+}
diff --git a/ChatBot/Services/HealthChecks/TelegramBotHealthCheck.cs b/ChatBot/Services/HealthChecks/TelegramBotHealthCheck.cs
new file mode 100644
index 0000000..525e643
--- /dev/null
+++ b/ChatBot/Services/HealthChecks/TelegramBotHealthCheck.cs
@@ -0,0 +1,59 @@
+using Microsoft.Extensions.Diagnostics.HealthChecks;
+using Telegram.Bot;
+
+namespace ChatBot.Services.HealthChecks
+{
+ ///
+ /// Health check for Telegram Bot API connectivity
+ ///
+ public class TelegramBotHealthCheck : IHealthCheck
+ {
+ private readonly ITelegramBotClient _botClient;
+ private readonly ILogger _logger;
+
+ public TelegramBotHealthCheck(
+ ITelegramBotClient botClient,
+ ILogger logger
+ )
+ {
+ _botClient = botClient;
+ _logger = logger;
+ }
+
+ public async Task CheckHealthAsync(
+ HealthCheckContext context,
+ CancellationToken cancellationToken = default
+ )
+ {
+ try
+ {
+ var me = await _botClient.GetMe(cancellationToken: cancellationToken);
+
+ _logger.LogDebug("Telegram health check passed. Bot: @{Username}", me.Username);
+
+ return HealthCheckResult.Healthy(
+ $"Telegram bot is accessible: @{me.Username}",
+ new Dictionary
+ {
+ { "botUsername", me.Username ?? "unknown" },
+ { "botId", me.Id },
+ }
+ );
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Telegram health check failed");
+
+ return HealthCheckResult.Unhealthy(
+ "Cannot connect to Telegram Bot API",
+ ex,
+ new Dictionary
+ {
+ { "error", ex.Message },
+ { "exceptionType", ex.GetType().Name },
+ }
+ );
+ }
+ }
+ }
+}
diff --git a/ChatBot/Services/InMemorySessionStorage.cs b/ChatBot/Services/InMemorySessionStorage.cs
new file mode 100644
index 0000000..19c31ab
--- /dev/null
+++ b/ChatBot/Services/InMemorySessionStorage.cs
@@ -0,0 +1,115 @@
+using System.Collections.Concurrent;
+using ChatBot.Models;
+using ChatBot.Models.Configuration;
+using ChatBot.Services.Interfaces;
+using Microsoft.Extensions.Options;
+
+namespace ChatBot.Services
+{
+ ///
+ /// In-memory implementation of session storage
+ ///
+ public class InMemorySessionStorage : ISessionStorage
+ {
+ private readonly ConcurrentDictionary _sessions = new();
+ private readonly ILogger _logger;
+ private readonly ISystemPromptProvider _systemPromptProvider;
+ private readonly OllamaSettings _ollamaSettings;
+
+ public InMemorySessionStorage(
+ ILogger logger,
+ ISystemPromptProvider systemPromptProvider,
+ IOptions ollamaSettings
+ )
+ {
+ _logger = logger;
+ _systemPromptProvider = systemPromptProvider;
+ _ollamaSettings = ollamaSettings.Value;
+ }
+
+ public ChatSession GetOrCreate(
+ long chatId,
+ string chatType = "private",
+ string chatTitle = ""
+ )
+ {
+ if (!_sessions.TryGetValue(chatId, out var session))
+ {
+ try
+ {
+ session = new ChatSession
+ {
+ ChatId = chatId,
+ ChatType = chatType,
+ ChatTitle = chatTitle,
+ Model = string.Empty, // Will be set by ModelService
+ MaxTokens = _ollamaSettings.MaxTokens,
+ Temperature = _ollamaSettings.Temperature,
+ SystemPrompt = _systemPromptProvider.GetSystemPrompt(),
+ };
+
+ _sessions[chatId] = session;
+
+ _logger.LogInformation(
+ "Created new chat session for chat {ChatId}, type: {ChatType}, title: {ChatTitle}",
+ chatId,
+ chatType,
+ chatTitle
+ );
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to create chat session for chat {ChatId}", chatId);
+ throw new InvalidOperationException(
+ $"Failed to create chat session for chat {chatId}",
+ ex
+ );
+ }
+ }
+
+ return session;
+ }
+
+ public ChatSession? Get(long chatId)
+ {
+ _sessions.TryGetValue(chatId, out var session);
+ return session;
+ }
+
+ public bool Remove(long chatId)
+ {
+ var removed = _sessions.TryRemove(chatId, out _);
+ if (removed)
+ {
+ _logger.LogInformation("Removed session for chat {ChatId}", chatId);
+ }
+ return removed;
+ }
+
+ public int GetActiveSessionsCount()
+ {
+ return _sessions.Count;
+ }
+
+ public int CleanupOldSessions(int hoursOld = 24)
+ {
+ var cutoffTime = DateTime.UtcNow.AddHours(-hoursOld);
+ var sessionsToRemove = _sessions
+ .Where(kvp => kvp.Value.LastUpdatedAt < cutoffTime)
+ .Select(kvp => kvp.Key)
+ .ToList();
+
+ foreach (var chatId in sessionsToRemove)
+ {
+ _sessions.TryRemove(chatId, out _);
+ }
+
+ if (sessionsToRemove.Count > 0)
+ {
+ _logger.LogInformation("Cleaned up {Count} old sessions", sessionsToRemove.Count);
+ }
+
+ return sessionsToRemove.Count;
+ }
+ }
+}
diff --git a/ChatBot/Services/Interfaces/IAIService.cs b/ChatBot/Services/Interfaces/IAIService.cs
new file mode 100644
index 0000000..fde44f1
--- /dev/null
+++ b/ChatBot/Services/Interfaces/IAIService.cs
@@ -0,0 +1,20 @@
+using ChatBot.Models.Dto;
+
+namespace ChatBot.Services.Interfaces
+{
+ ///
+ /// Interface for AI text generation service
+ ///
+ public interface IAIService
+ {
+ ///
+ /// Generate chat completion using AI
+ ///
+ Task GenerateChatCompletionAsync(
+ List messages,
+ int? maxTokens = null,
+ double? temperature = null,
+ CancellationToken cancellationToken = default
+ );
+ }
+}
diff --git a/ChatBot/Services/Interfaces/IErrorHandler.cs b/ChatBot/Services/Interfaces/IErrorHandler.cs
new file mode 100644
index 0000000..7906df7
--- /dev/null
+++ b/ChatBot/Services/Interfaces/IErrorHandler.cs
@@ -0,0 +1,44 @@
+using ChatBot.Common.Results;
+
+namespace ChatBot.Services.Interfaces
+{
+ ///
+ /// Interface for error handling strategy
+ ///
+ public interface IErrorHandler
+ {
+ ///
+ /// Check if this handler can handle the exception
+ ///
+ bool CanHandle(Exception exception);
+
+ ///
+ /// Handle the exception and return result
+ ///
+ Task HandleAsync(
+ Exception exception,
+ int attempt,
+ string currentModel,
+ CancellationToken cancellationToken = default
+ );
+ }
+
+ ///
+ /// Result of error handling
+ ///
+ public class ErrorHandlingResult
+ {
+ public bool ShouldRetry { get; set; }
+ public string? NewModel { get; set; }
+ public bool IsFatal { get; set; }
+ public string? ErrorMessage { get; set; }
+
+ public static ErrorHandlingResult Retry(string? newModel = null) =>
+ new() { ShouldRetry = true, NewModel = newModel };
+
+ public static ErrorHandlingResult Fatal(string errorMessage) =>
+ new() { IsFatal = true, ErrorMessage = errorMessage };
+
+ public static ErrorHandlingResult NoRetry() => new() { ShouldRetry = false };
+ }
+}
diff --git a/ChatBot/Services/Interfaces/IOllamaClient.cs b/ChatBot/Services/Interfaces/IOllamaClient.cs
new file mode 100644
index 0000000..d3f7d43
--- /dev/null
+++ b/ChatBot/Services/Interfaces/IOllamaClient.cs
@@ -0,0 +1,26 @@
+using OllamaSharp.Models;
+using OllamaSharp.Models.Chat;
+
+namespace ChatBot.Services.Interfaces
+{
+ ///
+ /// Interface for Ollama API client
+ ///
+ public interface IOllamaClient
+ {
+ ///
+ /// Selected model name
+ ///
+ string SelectedModel { get; set; }
+
+ ///
+ /// Stream chat completion
+ ///
+ IAsyncEnumerable ChatAsync(ChatRequest request);
+
+ ///
+ /// List available local models
+ ///
+ Task> ListLocalModelsAsync();
+ }
+}
diff --git a/ChatBot/Services/Interfaces/IRetryPolicy.cs b/ChatBot/Services/Interfaces/IRetryPolicy.cs
new file mode 100644
index 0000000..abb557a
--- /dev/null
+++ b/ChatBot/Services/Interfaces/IRetryPolicy.cs
@@ -0,0 +1,16 @@
+namespace ChatBot.Services.Interfaces
+{
+ ///
+ /// Interface for retry policy
+ ///
+ public interface IRetryPolicy
+ {
+ ///
+ /// Execute an action with retry logic
+ ///
+ Task ExecuteAsync(
+ Func> action,
+ CancellationToken cancellationToken = default
+ );
+ }
+}
diff --git a/ChatBot/Services/Interfaces/ISessionStorage.cs b/ChatBot/Services/Interfaces/ISessionStorage.cs
new file mode 100644
index 0000000..8b23691
--- /dev/null
+++ b/ChatBot/Services/Interfaces/ISessionStorage.cs
@@ -0,0 +1,35 @@
+using ChatBot.Models;
+
+namespace ChatBot.Services.Interfaces
+{
+ ///
+ /// Interface for chat session storage
+ ///
+ public interface ISessionStorage
+ {
+ ///
+ /// Get or create a chat session
+ ///
+ ChatSession GetOrCreate(long chatId, string chatType = "private", string chatTitle = "");
+
+ ///
+ /// Get a session by chat ID
+ ///
+ ChatSession? Get(long chatId);
+
+ ///
+ /// Remove a session
+ ///
+ bool Remove(long chatId);
+
+ ///
+ /// Get count of active sessions
+ ///
+ int GetActiveSessionsCount();
+
+ ///
+ /// Clean up old sessions
+ ///
+ int CleanupOldSessions(int hoursOld = 24);
+ }
+}
diff --git a/ChatBot/Services/Interfaces/ISystemPromptProvider.cs b/ChatBot/Services/Interfaces/ISystemPromptProvider.cs
new file mode 100644
index 0000000..3d3ac49
--- /dev/null
+++ b/ChatBot/Services/Interfaces/ISystemPromptProvider.cs
@@ -0,0 +1,13 @@
+namespace ChatBot.Services.Interfaces
+{
+ ///
+ /// Interface for system prompt provider
+ ///
+ public interface ISystemPromptProvider
+ {
+ ///
+ /// Get the system prompt
+ ///
+ string GetSystemPrompt();
+ }
+}
diff --git a/ChatBot/Services/ModelService.cs b/ChatBot/Services/ModelService.cs
index d753835..b6bcd7e 100644
--- a/ChatBot/Services/ModelService.cs
+++ b/ChatBot/Services/ModelService.cs
@@ -1,37 +1,36 @@
using ChatBot.Models.Configuration;
using Microsoft.Extensions.Options;
-using ServiceStack;
+using OllamaSharp;
namespace ChatBot.Services
{
+ ///
+ /// Service for managing AI models and model selection
+ ///
public class ModelService
{
private readonly ILogger _logger;
- private readonly OpenRouterSettings _openRouterSettings;
- private readonly JsonApiClient _client;
+ private readonly OllamaSettings _ollamaSettings;
+ private readonly OllamaApiClient _client;
private List _availableModels = new();
private int _currentModelIndex = 0;
- public ModelService(
- ILogger logger,
- IOptions openRouterSettings
- )
+ public ModelService(ILogger logger, IOptions ollamaSettings)
{
_logger = logger;
- _openRouterSettings = openRouterSettings.Value;
- _client = new JsonApiClient(_openRouterSettings.Url)
- {
- BearerToken = _openRouterSettings.Token,
- };
+ _ollamaSettings = ollamaSettings.Value;
+ _client = new OllamaApiClient(new Uri(_ollamaSettings.Url));
}
+ ///
+ /// Initialize the service by loading available models
+ ///
public async Task InitializeAsync()
{
try
{
var models = await LoadModelsFromApiAsync();
- _availableModels =
- models.Count > 0 ? models : _openRouterSettings.AvailableModels.ToList();
+ _availableModels = models.Count > 0 ? models : GetConfiguredModelNames();
SetDefaultModel();
_logger.LogInformation("Current model: {Model}", GetCurrentModel());
@@ -39,133 +38,104 @@ namespace ChatBot.Services
catch (Exception ex)
{
_logger.LogError(ex, "Failed to initialize models, using configuration fallback");
- _availableModels = _openRouterSettings.AvailableModels.ToList();
+ _availableModels = GetConfiguredModelNames();
_currentModelIndex = 0;
}
}
+ ///
+ /// Load models from Ollama API
+ ///
private async Task> LoadModelsFromApiAsync()
{
- var response = await _client.GetAsync("/v1/models");
- if (response == null)
+ try
{
- _logger.LogInformation(
- "Using {Count} models from configuration (API unavailable)",
- _openRouterSettings.AvailableModels.Count
+ var models = await _client.ListLocalModelsAsync();
+ var modelNames = models.Select(m => m.Name).ToList();
+
+ if (modelNames.Count > 0)
+ {
+ _logger.LogInformation(
+ "Loaded {Count} models from Ollama API: {Models}",
+ modelNames.Count,
+ string.Join(", ", modelNames)
+ );
+ return modelNames;
+ }
+
+ _logger.LogInformation("No models found in Ollama API, using configured models");
+ return new List();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(
+ ex,
+ "Failed to load models from Ollama API, using configuration fallback"
);
return new List();
}
-
- var models = ParseModelsFromResponse(response);
- if (models.Count > 0)
- {
- _logger.LogInformation(
- "Loaded {Count} models from OpenRouter API",
- (int)models.Count
- );
- return models;
- }
-
- _logger.LogInformation(
- "Using {Count} models from configuration",
- _openRouterSettings.AvailableModels.Count
- );
- return new List();
- }
-
- private static List ParseModelsFromResponse(dynamic response)
- {
- var models = new List();
-
- if (response is not System.Text.Json.JsonElement jsonElement)
- return models;
-
- if (
- !jsonElement.TryGetProperty("data", out var dataElement)
- || dataElement.ValueKind != System.Text.Json.JsonValueKind.Array
- )
- return models;
-
- foreach (var modelElement in dataElement.EnumerateArray())
- {
- if (modelElement.TryGetProperty("id", out var idElement))
- {
- var modelId = idElement.GetString();
- if (!string.IsNullOrEmpty(modelId))
- {
- models.Add(modelId);
- }
- }
- }
-
- return models;
}
+ ///
+ /// Set the default model based on configuration
+ ///
private void SetDefaultModel()
{
- if (
- string.IsNullOrEmpty(_openRouterSettings.DefaultModel)
- || !_availableModels.Contains(_openRouterSettings.DefaultModel)
- )
+ if (_availableModels.Count == 0)
{
- _currentModelIndex = 0;
+ _logger.LogWarning("No models available");
return;
}
- _currentModelIndex = _availableModels.IndexOf(_openRouterSettings.DefaultModel);
+ // Try to find a model from configuration
+ var configuredModels = _ollamaSettings
+ .ModelConfigurations.Where(m => m.IsEnabled)
+ .Select(m => m.Name)
+ .ToList();
+
+ if (configuredModels.Count > 0)
+ {
+ var firstConfiguredModel = configuredModels[0];
+ var index = _availableModels.FindIndex(m =>
+ m.Equals(firstConfiguredModel, StringComparison.OrdinalIgnoreCase)
+ );
+
+ if (index >= 0)
+ {
+ _currentModelIndex = index;
+ _logger.LogInformation("Using configured model: {Model}", firstConfiguredModel);
+ return;
+ }
+ }
+
+ // Fallback to first available model
+ _currentModelIndex = 0;
+ _logger.LogInformation("Using first available model: {Model}", _availableModels[0]);
}
+ ///
+ /// Get the name of the currently selected model
+ ///
public string GetCurrentModel()
{
return _availableModels.Count > 0 ? _availableModels[_currentModelIndex] : string.Empty;
}
///
- /// Получает настройки для текущей модели
+ /// Get all available model names
///
- /// Настройки модели или настройки по умолчанию
- public ModelSettings GetCurrentModelSettings()
+ public List GetAvailableModels()
{
- var currentModel = GetCurrentModel();
- if (string.IsNullOrEmpty(currentModel))
- {
- return GetDefaultModelSettings();
- }
-
- // Ищем настройки для текущей модели
- var modelConfig = _openRouterSettings.ModelConfigurations.FirstOrDefault(m =>
- m.Name.Equals(currentModel, StringComparison.OrdinalIgnoreCase)
- );
-
- if (modelConfig != null)
- {
- return modelConfig;
- }
-
- // Если настройки не найдены, возвращаем настройки по умолчанию
- return GetDefaultModelSettings();
+ return new List(_availableModels);
}
///
- /// Получает настройки по умолчанию
+ /// Switch to the next available model (round-robin)
///
- /// Настройки по умолчанию
- private ModelSettings GetDefaultModelSettings()
- {
- return new ModelSettings
- {
- Name = GetCurrentModel(),
- MaxTokens = _openRouterSettings.MaxTokens,
- Temperature = _openRouterSettings.Temperature,
- IsEnabled = true,
- };
- }
-
public bool TrySwitchToNextModel()
{
if (_availableModels.Count <= 1)
{
- _logger.LogWarning("No alternative models available for switching");
return false;
}
@@ -174,14 +144,83 @@ namespace ChatBot.Services
return true;
}
- public List GetAvailableModels()
+ ///
+ /// Switch to a specific model by name
+ ///
+ public bool TrySwitchToModel(string modelName)
{
- return _availableModels.ToList();
+ var index = _availableModels.FindIndex(m =>
+ m.Equals(modelName, StringComparison.OrdinalIgnoreCase)
+ );
+
+ if (index >= 0)
+ {
+ _currentModelIndex = index;
+ _logger.LogInformation("Switched to model: {Model}", modelName);
+ return true;
+ }
+
+ _logger.LogWarning("Model {Model} not found in available models", modelName);
+ return false;
}
- public bool HasAlternativeModels()
+ ///
+ /// Get settings for the current model
+ ///
+ public ModelSettings GetCurrentModelSettings()
{
- return _availableModels.Count > 1;
+ var currentModel = GetCurrentModel();
+ if (string.IsNullOrEmpty(currentModel))
+ {
+ return GetDefaultModelSettings();
+ }
+
+ // Find settings for the current model
+ var modelConfig = _ollamaSettings.ModelConfigurations.FirstOrDefault(m =>
+ m.Name.Equals(currentModel, StringComparison.OrdinalIgnoreCase)
+ );
+
+ return modelConfig ?? GetDefaultModelSettings();
+ }
+
+ ///
+ /// Get default model settings
+ ///
+ private ModelSettings GetDefaultModelSettings()
+ {
+ return new ModelSettings
+ {
+ Name = GetCurrentModel(),
+ MaxTokens = _ollamaSettings.MaxTokens,
+ Temperature = _ollamaSettings.Temperature,
+ IsEnabled = true,
+ };
+ }
+
+ ///
+ /// Get list of configured model names
+ ///
+ private List GetConfiguredModelNames()
+ {
+ var models = _ollamaSettings
+ .ModelConfigurations.Where(m => m.IsEnabled)
+ .Select(m => m.Name)
+ .ToList();
+
+ if (models.Count > 0)
+ {
+ _logger.LogInformation(
+ "Using {Count} configured models: {Models}",
+ models.Count,
+ string.Join(", ", models)
+ );
+ }
+ else
+ {
+ _logger.LogWarning("No configured models found");
+ }
+
+ return models;
}
}
}
diff --git a/ChatBot/Services/OllamaClientAdapter.cs b/ChatBot/Services/OllamaClientAdapter.cs
new file mode 100644
index 0000000..eb17798
--- /dev/null
+++ b/ChatBot/Services/OllamaClientAdapter.cs
@@ -0,0 +1,39 @@
+using ChatBot.Services.Interfaces;
+using OllamaSharp;
+using OllamaSharp.Models;
+using OllamaSharp.Models.Chat;
+
+namespace ChatBot.Services
+{
+ ///
+ /// Adapter for OllamaSharp client to implement IOllamaClient interface
+ ///
+ public class OllamaClientAdapter : IOllamaClient
+ {
+ private readonly OllamaApiClient _client;
+
+ public OllamaClientAdapter(string url)
+ {
+ if (string.IsNullOrWhiteSpace(url))
+ throw new ArgumentException("URL cannot be empty", nameof(url));
+
+ _client = new OllamaApiClient(new Uri(url));
+ }
+
+ public string SelectedModel
+ {
+ get => _client.SelectedModel;
+ set => _client.SelectedModel = value;
+ }
+
+ public IAsyncEnumerable ChatAsync(ChatRequest request)
+ {
+ return _client.ChatAsync(request);
+ }
+
+ public Task> ListLocalModelsAsync()
+ {
+ return _client.ListLocalModelsAsync();
+ }
+ }
+}
diff --git a/ChatBot/Services/Telegram/Commands/CommandRegistry.cs b/ChatBot/Services/Telegram/Commands/CommandRegistry.cs
index 38daa2a..e1dbd8c 100644
--- a/ChatBot/Services/Telegram/Commands/CommandRegistry.cs
+++ b/ChatBot/Services/Telegram/Commands/CommandRegistry.cs
@@ -11,15 +11,26 @@ namespace ChatBot.Services.Telegram.Commands
private readonly Dictionary _commands = new();
private readonly ILogger _logger;
- public CommandRegistry(ILogger logger)
+ public CommandRegistry(
+ ILogger logger,
+ IEnumerable commands
+ )
{
_logger = logger;
+
+ // Register all commands
+ foreach (var command in commands)
+ {
+ RegisterCommand(command);
+ }
+
+ _logger.LogInformation("Registered {Count} commands", _commands.Count);
}
///
/// Регистрирует команду
///
- public void RegisterCommand(ITelegramCommand command)
+ private void RegisterCommand(ITelegramCommand command)
{
if (command == null)
{
@@ -37,47 +48,6 @@ namespace ChatBot.Services.Telegram.Commands
_logger.LogDebug("Registered command: {CommandName}", commandName);
}
- ///
- /// Регистрирует все команды из сборки
- ///
- public void RegisterCommandsFromAssembly(
- Assembly assembly,
- IServiceProvider serviceProvider
- )
- {
- var commandTypes = assembly
- .GetTypes()
- .Where(t =>
- t.IsClass && !t.IsAbstract && typeof(ITelegramCommand).IsAssignableFrom(t)
- )
- .Where(t => t.GetCustomAttribute() != null)
- .OrderBy(t => t.GetCustomAttribute()?.Priority ?? 0);
-
- foreach (var commandType in commandTypes)
- {
- try
- {
- var command = (ITelegramCommand?)
- Activator.CreateInstance(
- commandType,
- GetConstructorParameters(commandType, serviceProvider)
- );
- if (command != null)
- {
- RegisterCommand(command);
- }
- }
- catch (Exception ex)
- {
- _logger.LogError(
- ex,
- "Failed to register command {CommandType}",
- commandType.Name
- );
- }
- }
- }
-
///
/// Получает команду по имени
///
@@ -96,7 +66,7 @@ namespace ChatBot.Services.Telegram.Commands
}
///
- /// /// Получает все команды с их описаниями, отсортированные по приоритету
+ /// Получает все команды с их описаниями, отсортированные по приоритету
///
public IEnumerable<(string CommandName, string Description)> GetCommandsWithDescriptions()
{
@@ -114,38 +84,5 @@ namespace ChatBot.Services.Telegram.Commands
{
return _commands.Values.FirstOrDefault(cmd => cmd.CanHandle(messageText));
}
-
- ///
- /// Получает параметры конструктора для создания команды
- ///
- private object[] GetConstructorParameters(
- Type commandType,
- IServiceProvider serviceProvider
- )
- {
- var constructor = commandType.GetConstructors().FirstOrDefault();
- if (constructor == null)
- {
- return Array.Empty