diff --git a/ChatBot/ChatBot.csproj b/ChatBot/ChatBot.csproj
index 851424a..bdf7edf 100644
--- a/ChatBot/ChatBot.csproj
+++ b/ChatBot/ChatBot.csproj
@@ -21,12 +21,4 @@
-
-
- PreserveNewest
-
-
- PreserveNewest
-
-
diff --git a/ChatBot/Models/ChatSession.cs b/ChatBot/Models/ChatSession.cs
index 23dbc89..d6d1910 100644
--- a/ChatBot/Models/ChatSession.cs
+++ b/ChatBot/Models/ChatSession.cs
@@ -1,5 +1,3 @@
-using System;
-using System.Collections.Generic;
using ChatBot.Models.Dto;
namespace ChatBot.Models
@@ -39,21 +37,6 @@ namespace ChatBot.Models
///
public string Model { get; set; } = string.Empty;
- ///
- /// Maximum tokens for AI response
- ///
- public int MaxTokens { get; set; } = 1000;
-
- ///
- /// Temperature for AI response (0.0 to 2.0)
- ///
- public double Temperature { get; set; } = 0.7;
-
- ///
- /// System prompt for the AI
- ///
- public string SystemPrompt { get; set; } = string.Empty;
-
///
/// When the session was created
///
@@ -119,22 +102,11 @@ namespace ChatBot.Models
}
///
- /// Get all messages including system prompt
+ /// Get all messages
///
public List GetAllMessages()
{
- var messages = new List();
-
- // Add system message if exists
- if (!string.IsNullOrEmpty(SystemPrompt))
- {
- messages.Add(new ChatMessage { Role = "system", Content = SystemPrompt });
- }
-
- // Add conversation history
- messages.AddRange(MessageHistory);
-
- return messages;
+ return new List(MessageHistory);
}
///
diff --git a/ChatBot/Models/Configuration/ModelSettings.cs b/ChatBot/Models/Configuration/ModelSettings.cs
deleted file mode 100644
index d7f861b..0000000
--- a/ChatBot/Models/Configuration/ModelSettings.cs
+++ /dev/null
@@ -1,33 +0,0 @@
-namespace ChatBot.Models.Configuration
-{
- ///
- /// Настройки конкретной модели ИИ
- ///
- public class ModelSettings
- {
- ///
- /// Название модели
- ///
- public string Name { get; set; } = string.Empty;
-
- ///
- /// Максимальное количество токенов для этой модели
- ///
- public int MaxTokens { get; set; } = 1000;
-
- ///
- /// Температура генерации для этой модели (креативность от 0.0 до 2.0)
- ///
- public double Temperature { get; set; } = 0.7;
-
- ///
- /// Описание модели
- ///
- public string Description { get; set; } = string.Empty;
-
- ///
- /// Является ли модель активной (доступной для использования)
- ///
- public bool IsEnabled { get; set; } = true;
- }
-}
diff --git a/ChatBot/Models/Configuration/OllamaSettings.cs b/ChatBot/Models/Configuration/OllamaSettings.cs
index 789a87d..aa3c50f 100644
--- a/ChatBot/Models/Configuration/OllamaSettings.cs
+++ b/ChatBot/Models/Configuration/OllamaSettings.cs
@@ -11,28 +11,13 @@ namespace ChatBot.Models.Configuration
public string Url { get; set; } = "http://localhost:11434";
///
- /// Настройки для каждой модели отдельно
+ /// Название модели по умолчанию
///
- public List ModelConfigurations { get; set; } = new();
+ public string DefaultModel { get; set; } = "llama3";
///
/// Максимальное количество повторных попыток при ошибках
///
public int MaxRetries { get; set; } = 3;
-
- ///
- /// Максимальное количество токенов в ответе (по умолчанию, если не задано для конкретной модели)
- ///
- public int MaxTokens { get; set; } = 1000;
-
- ///
- /// Температура генерации по умолчанию (креативность ответов от 0.0 до 2.0)
- ///
- public double Temperature { get; set; } = 0.7;
-
- ///
- /// Путь к файлу с системным промтом
- ///
- public string SystemPromptFilePath { get; set; } = "system-prompt.txt";
}
}
diff --git a/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs b/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs
index e8a8967..ad48ee0 100644
--- a/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs
+++ b/ChatBot/Models/Configuration/Validators/ConfigurationValidator.cs
@@ -1,5 +1,3 @@
-using ChatBot.Models.Configuration;
-
namespace ChatBot.Models.Configuration.Validators
{
///
@@ -66,9 +64,13 @@ namespace ChatBot.Models.Configuration.Validators
// Валидация основных компонентов настроек Ollama
ValidateUrl(settings.Url, errors);
- ValidateModelConfigurations(settings.ModelConfigurations, errors);
ValidateNumericSettings(settings, errors);
+ if (string.IsNullOrWhiteSpace(settings.DefaultModel))
+ {
+ errors.Add("Ollama:DefaultModel is required");
+ }
+
return new ValidationResult { IsValid = !errors.Any(), Errors = errors };
}
@@ -94,45 +96,6 @@ namespace ChatBot.Models.Configuration.Validators
}
}
- ///
- /// Валидирует конфигурации моделей
- ///
- /// Конфигурации моделей
- /// Список ошибок валидации
- private static void ValidateModelConfigurations(
- IEnumerable modelConfigurations,
- List errors
- )
- {
- if (modelConfigurations == null)
- {
- return; // Конфигурации моделей необязательны
- }
-
- foreach (var modelConfig in modelConfigurations)
- {
- if (string.IsNullOrWhiteSpace(modelConfig.Name))
- {
- errors.Add("ModelConfigurations contains model with empty name");
- continue;
- }
-
- if (modelConfig.MaxTokens < 1 || modelConfig.MaxTokens > 100000)
- {
- errors.Add(
- $"ModelConfigurations model '{modelConfig.Name}' MaxTokens must be between 1 and 100000"
- );
- }
-
- if (modelConfig.Temperature < 0.0 || modelConfig.Temperature > 2.0)
- {
- errors.Add(
- $"ModelConfigurations model '{modelConfig.Name}' Temperature must be between 0.0 and 2.0"
- );
- }
- }
- }
-
///
/// Валидирует числовые параметры настроек Ollama
///
@@ -145,18 +108,6 @@ namespace ChatBot.Models.Configuration.Validators
{
errors.Add("Ollama:MaxRetries must be between 1 and 10");
}
-
- // Проверка максимального количества токенов (1-100000)
- if (settings.MaxTokens < 1 || settings.MaxTokens > 100000)
- {
- errors.Add("Ollama:MaxTokens must be between 1 and 100000");
- }
-
- // Проверка температуры (0.0-2.0)
- if (settings.Temperature < 0.0 || settings.Temperature > 2.0)
- {
- errors.Add("Ollama:Temperature must be between 0.0 and 2.0");
- }
}
}
diff --git a/ChatBot/Models/Configuration/Validators/OllamaSettingsValidator.cs b/ChatBot/Models/Configuration/Validators/OllamaSettingsValidator.cs
index a9e53c2..419deb0 100644
--- a/ChatBot/Models/Configuration/Validators/OllamaSettingsValidator.cs
+++ b/ChatBot/Models/Configuration/Validators/OllamaSettingsValidator.cs
@@ -13,8 +13,7 @@ namespace ChatBot.Models.Configuration.Validators
ValidateUrl(options, errors);
ValidateRetryAndTokenSettings(options, errors);
- ValidateSystemPromptPath(options, errors);
- ValidateModelConfigurations(options, errors);
+ ValidateDefaultModel(options, errors);
return errors.Count > 0
? ValidateOptionsResult.Fail(errors)
@@ -39,39 +38,12 @@ namespace ChatBot.Models.Configuration.Validators
if (options.MaxRetries > 10)
errors.Add($"MaxRetries should not exceed 10, got: {options.MaxRetries}");
-
- if (options.MaxTokens < 1)
- errors.Add($"MaxTokens must be at least 1, got: {options.MaxTokens}");
-
- if (options.Temperature < 0 || options.Temperature > 2)
- errors.Add($"Temperature must be between 0 and 2, got: {options.Temperature}");
}
- private static void ValidateSystemPromptPath(OllamaSettings options, List errors)
+ private static void ValidateDefaultModel(OllamaSettings options, List errors)
{
- if (string.IsNullOrWhiteSpace(options.SystemPromptFilePath))
- errors.Add("SystemPromptFilePath is required");
- }
-
- private static void ValidateModelConfigurations(OllamaSettings options, List errors)
- {
- if (options.ModelConfigurations.Count == 0)
- {
- errors.Add("At least one model configuration is required");
- return;
- }
-
- foreach (var model in options.ModelConfigurations)
- {
- if (string.IsNullOrWhiteSpace(model.Name))
- errors.Add("Model name cannot be empty");
-
- if (model.MaxTokens < 1)
- errors.Add($"Model '{model.Name}': MaxTokens must be at least 1");
-
- if (model.Temperature < 0 || model.Temperature > 2)
- errors.Add($"Model '{model.Name}': Temperature must be between 0 and 2");
- }
+ if (string.IsNullOrWhiteSpace(options.DefaultModel))
+ errors.Add("DefaultModel is required");
}
}
}
diff --git a/ChatBot/Program.cs b/ChatBot/Program.cs
index 7d56627..53fd17d 100644
--- a/ChatBot/Program.cs
+++ b/ChatBot/Program.cs
@@ -15,9 +15,6 @@ using Telegram.Bot;
var builder = Host.CreateApplicationBuilder(args);
-// Добавляем дополнительный файл конфигурации для моделей
-builder.Configuration.AddJsonFile("appsettings.Models.json", optional: false, reloadOnChange: true);
-
// Настройка Serilog
Log.Logger = new LoggerConfiguration().ReadFrom.Configuration(builder.Configuration).CreateLogger();
@@ -36,17 +33,7 @@ try
.AddSingleton, TelegramBotSettingsValidator>();
builder
- .Services.Configure(options =>
- {
- builder.Configuration.GetSection("Ollama").Bind(options);
- var modelConfigs = builder
- .Configuration.GetSection("ModelConfigurations")
- .Get>();
- if (modelConfigs != null)
- {
- options.ModelConfigurations = modelConfigs;
- }
- })
+ .Services.Configure(builder.Configuration.GetSection("Ollama"))
.AddSingleton, OllamaSettingsValidator>();
builder.Services.Configure(builder.Configuration.GetSection("Serilog"));
@@ -87,7 +74,6 @@ try
});
// Регистрируем интерфейсы и сервисы
- builder.Services.AddSingleton();
builder.Services.AddSingleton();
// Регистрируем error handlers
diff --git a/ChatBot/Services/AIService.cs b/ChatBot/Services/AIService.cs
index e84d829..3780b6d 100644
--- a/ChatBot/Services/AIService.cs
+++ b/ChatBot/Services/AIService.cs
@@ -1,8 +1,8 @@
-using System.Text;
-using ChatBot.Common.Constants;
+using ChatBot.Common.Constants;
using ChatBot.Models.Dto;
using ChatBot.Services.Interfaces;
using OllamaSharp.Models.Chat;
+using System.Text;
namespace ChatBot.Services
{
@@ -29,13 +29,10 @@ namespace ChatBot.Services
///
public async Task GenerateChatCompletionAsync(
List messages,
- int? maxTokens = null,
- double? temperature = null,
CancellationToken cancellationToken = default
)
{
- var modelSettings = _modelService.GetCurrentModelSettings();
- var model = modelSettings.Name;
+ var model = _modelService.GetCurrentModel();
try
{
diff --git a/ChatBot/Services/ChatService.cs b/ChatBot/Services/ChatService.cs
index 5be99a2..f7563bb 100644
--- a/ChatBot/Services/ChatService.cs
+++ b/ChatBot/Services/ChatService.cs
@@ -66,9 +66,7 @@ namespace ChatBot.Services
// Get AI response
var response = await _aiService.GenerateChatCompletionAsync(
session.GetAllMessages(),
- session.MaxTokens,
- session.Temperature,
- cancellationToken
+ cancellationToken: cancellationToken
);
if (!string.IsNullOrEmpty(response))
@@ -112,25 +110,13 @@ namespace ChatBot.Services
///
/// Update session parameters
///
- public void UpdateSessionParameters(
- long chatId,
- string? model = null,
- int? maxTokens = null,
- double? temperature = null,
- string? systemPrompt = null
- )
+ public void UpdateSessionParameters(long chatId, string? model = null)
{
var session = _sessionStorage.Get(chatId);
if (session != null)
{
if (!string.IsNullOrEmpty(model))
session.Model = model;
- if (maxTokens.HasValue)
- session.MaxTokens = maxTokens.Value;
- if (temperature.HasValue)
- session.Temperature = temperature.Value;
- if (!string.IsNullOrEmpty(systemPrompt))
- session.SystemPrompt = systemPrompt;
session.LastUpdatedAt = DateTime.UtcNow;
_logger.LogInformation("Updated session parameters for chat {ChatId}", chatId);
diff --git a/ChatBot/Services/ErrorHandlers/RateLimitErrorHandler.cs b/ChatBot/Services/ErrorHandlers/RateLimitErrorHandler.cs
index 38dc039..4aed920 100644
--- a/ChatBot/Services/ErrorHandlers/RateLimitErrorHandler.cs
+++ b/ChatBot/Services/ErrorHandlers/RateLimitErrorHandler.cs
@@ -7,15 +7,10 @@ namespace ChatBot.Services.ErrorHandlers
///
public class RateLimitErrorHandler : IErrorHandler
{
- private readonly ModelService _modelService;
private readonly ILogger _logger;
- public RateLimitErrorHandler(
- ModelService modelService,
- ILogger logger
- )
+ public RateLimitErrorHandler(ILogger logger)
{
- _modelService = modelService;
_logger = logger;
}
@@ -40,23 +35,12 @@ namespace ChatBot.Services.ErrorHandlers
currentModel
);
- // Try to switch to another model
- if (_modelService.TrySwitchToNextModel())
- {
- var newModel = _modelService.GetCurrentModel();
- _logger.LogInformation(
- "Switching to alternative model: {Model} due to rate limiting",
- newModel
- );
- return ErrorHandlingResult.Retry(newModel);
- }
-
- // If can't switch, apply exponential backoff
+ // Apply exponential backoff for rate limiting
var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt - 1));
var jitter = TimeSpan.FromMilliseconds(Random.Shared.Next(0, 2000));
_logger.LogInformation(
- "No alternative model available, waiting {Delay} before retry",
+ "Rate limit hit, waiting {Delay} before retry",
delay.Add(jitter)
);
diff --git a/ChatBot/Services/FileSystemPromptProvider.cs b/ChatBot/Services/FileSystemPromptProvider.cs
deleted file mode 100644
index 0d8ce38..0000000
--- a/ChatBot/Services/FileSystemPromptProvider.cs
+++ /dev/null
@@ -1,58 +0,0 @@
-using System.Text;
-using ChatBot.Models.Configuration;
-using ChatBot.Services.Interfaces;
-using Microsoft.Extensions.Options;
-
-namespace ChatBot.Services
-{
- ///
- /// System prompt provider that loads prompt from file
- ///
- public class FileSystemPromptProvider : ISystemPromptProvider
- {
- private readonly string _filePath;
- private readonly ILogger _logger;
- private readonly Lazy _cachedPrompt;
-
- public FileSystemPromptProvider(
- IOptions settings,
- ILogger logger
- )
- {
- _filePath = settings.Value.SystemPromptFilePath;
- _logger = logger;
- _cachedPrompt = new Lazy(LoadPrompt);
- }
-
- public string GetSystemPrompt() => _cachedPrompt.Value;
-
- private string LoadPrompt()
- {
- if (!File.Exists(_filePath))
- {
- var error = $"System prompt file not found: {_filePath}";
- _logger.LogError(error);
- throw new FileNotFoundException(error);
- }
-
- try
- {
- var prompt = File.ReadAllText(_filePath, Encoding.UTF8);
- _logger.LogInformation(
- "System prompt loaded from {FilePath} ({Length} characters)",
- _filePath,
- prompt.Length
- );
- return prompt;
- }
- catch (Exception ex)
- {
- _logger.LogError(ex, "Failed to read system prompt file: {FilePath}", _filePath);
- throw new InvalidOperationException(
- $"Failed to read system prompt file '{_filePath}': {ex.Message}",
- ex
- );
- }
- }
- }
-}
diff --git a/ChatBot/Services/InMemorySessionStorage.cs b/ChatBot/Services/InMemorySessionStorage.cs
index 19c31ab..f5799a8 100644
--- a/ChatBot/Services/InMemorySessionStorage.cs
+++ b/ChatBot/Services/InMemorySessionStorage.cs
@@ -1,8 +1,6 @@
-using System.Collections.Concurrent;
using ChatBot.Models;
-using ChatBot.Models.Configuration;
using ChatBot.Services.Interfaces;
-using Microsoft.Extensions.Options;
+using System.Collections.Concurrent;
namespace ChatBot.Services
{
@@ -13,18 +11,10 @@ namespace ChatBot.Services
{
private readonly ConcurrentDictionary _sessions = new();
private readonly ILogger _logger;
- private readonly ISystemPromptProvider _systemPromptProvider;
- private readonly OllamaSettings _ollamaSettings;
- public InMemorySessionStorage(
- ILogger logger,
- ISystemPromptProvider systemPromptProvider,
- IOptions ollamaSettings
- )
+ public InMemorySessionStorage(ILogger logger)
{
_logger = logger;
- _systemPromptProvider = systemPromptProvider;
- _ollamaSettings = ollamaSettings.Value;
}
public ChatSession GetOrCreate(
@@ -43,9 +33,6 @@ namespace ChatBot.Services
ChatType = chatType,
ChatTitle = chatTitle,
Model = string.Empty, // Will be set by ModelService
- MaxTokens = _ollamaSettings.MaxTokens,
- Temperature = _ollamaSettings.Temperature,
- SystemPrompt = _systemPromptProvider.GetSystemPrompt(),
};
_sessions[chatId] = session;
diff --git a/ChatBot/Services/Interfaces/IAIService.cs b/ChatBot/Services/Interfaces/IAIService.cs
index fde44f1..14a282f 100644
--- a/ChatBot/Services/Interfaces/IAIService.cs
+++ b/ChatBot/Services/Interfaces/IAIService.cs
@@ -12,8 +12,6 @@ namespace ChatBot.Services.Interfaces
///
Task GenerateChatCompletionAsync(
List messages,
- int? maxTokens = null,
- double? temperature = null,
CancellationToken cancellationToken = default
);
}
diff --git a/ChatBot/Services/Interfaces/IErrorHandler.cs b/ChatBot/Services/Interfaces/IErrorHandler.cs
index 7906df7..910c5c5 100644
--- a/ChatBot/Services/Interfaces/IErrorHandler.cs
+++ b/ChatBot/Services/Interfaces/IErrorHandler.cs
@@ -1,5 +1,3 @@
-using ChatBot.Common.Results;
-
namespace ChatBot.Services.Interfaces
{
///
diff --git a/ChatBot/Services/Interfaces/ISystemPromptProvider.cs b/ChatBot/Services/Interfaces/ISystemPromptProvider.cs
deleted file mode 100644
index 3d3ac49..0000000
--- a/ChatBot/Services/Interfaces/ISystemPromptProvider.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-namespace ChatBot.Services.Interfaces
-{
- ///
- /// Interface for system prompt provider
- ///
- public interface ISystemPromptProvider
- {
- ///
- /// Get the system prompt
- ///
- string GetSystemPrompt();
- }
-}
diff --git a/ChatBot/Services/ModelService.cs b/ChatBot/Services/ModelService.cs
index b6bcd7e..99dc4a9 100644
--- a/ChatBot/Services/ModelService.cs
+++ b/ChatBot/Services/ModelService.cs
@@ -1,226 +1,37 @@
using ChatBot.Models.Configuration;
using Microsoft.Extensions.Options;
-using OllamaSharp;
namespace ChatBot.Services
{
///
- /// Service for managing AI models and model selection
+ /// Service for managing AI model
///
public class ModelService
{
private readonly ILogger _logger;
- private readonly OllamaSettings _ollamaSettings;
- private readonly OllamaApiClient _client;
- private List _availableModels = new();
- private int _currentModelIndex = 0;
+ private readonly string _currentModel;
public ModelService(ILogger logger, IOptions ollamaSettings)
{
_logger = logger;
- _ollamaSettings = ollamaSettings.Value;
- _client = new OllamaApiClient(new Uri(_ollamaSettings.Url));
+ _currentModel = ollamaSettings.Value.DefaultModel;
}
///
- /// Initialize the service by loading available models
+ /// Initialize the service
///
- public async Task InitializeAsync()
+ public Task InitializeAsync()
{
- try
- {
- var models = await LoadModelsFromApiAsync();
- _availableModels = models.Count > 0 ? models : GetConfiguredModelNames();
-
- SetDefaultModel();
- _logger.LogInformation("Current model: {Model}", GetCurrentModel());
- }
- catch (Exception ex)
- {
- _logger.LogError(ex, "Failed to initialize models, using configuration fallback");
- _availableModels = GetConfiguredModelNames();
- _currentModelIndex = 0;
- }
+ _logger.LogInformation("Using model: {Model}", _currentModel);
+ return Task.CompletedTask;
}
///
- /// Load models from Ollama API
- ///
- private async Task> LoadModelsFromApiAsync()
- {
- try
- {
- var models = await _client.ListLocalModelsAsync();
- var modelNames = models.Select(m => m.Name).ToList();
-
- if (modelNames.Count > 0)
- {
- _logger.LogInformation(
- "Loaded {Count} models from Ollama API: {Models}",
- modelNames.Count,
- string.Join(", ", modelNames)
- );
- return modelNames;
- }
-
- _logger.LogInformation("No models found in Ollama API, using configured models");
- return new List();
- }
- catch (Exception ex)
- {
- _logger.LogError(
- ex,
- "Failed to load models from Ollama API, using configuration fallback"
- );
- return new List();
- }
- }
-
- ///
- /// Set the default model based on configuration
- ///
- private void SetDefaultModel()
- {
- if (_availableModels.Count == 0)
- {
- _logger.LogWarning("No models available");
- return;
- }
-
- // Try to find a model from configuration
- var configuredModels = _ollamaSettings
- .ModelConfigurations.Where(m => m.IsEnabled)
- .Select(m => m.Name)
- .ToList();
-
- if (configuredModels.Count > 0)
- {
- var firstConfiguredModel = configuredModels[0];
- var index = _availableModels.FindIndex(m =>
- m.Equals(firstConfiguredModel, StringComparison.OrdinalIgnoreCase)
- );
-
- if (index >= 0)
- {
- _currentModelIndex = index;
- _logger.LogInformation("Using configured model: {Model}", firstConfiguredModel);
- return;
- }
- }
-
- // Fallback to first available model
- _currentModelIndex = 0;
- _logger.LogInformation("Using first available model: {Model}", _availableModels[0]);
- }
-
- ///
- /// Get the name of the currently selected model
+ /// Get the current model name
///
public string GetCurrentModel()
{
- return _availableModels.Count > 0 ? _availableModels[_currentModelIndex] : string.Empty;
- }
-
- ///
- /// Get all available model names
- ///
- public List GetAvailableModels()
- {
- return new List(_availableModels);
- }
-
- ///
- /// Switch to the next available model (round-robin)
- ///
- public bool TrySwitchToNextModel()
- {
- if (_availableModels.Count <= 1)
- {
- return false;
- }
-
- _currentModelIndex = (_currentModelIndex + 1) % _availableModels.Count;
- _logger.LogInformation("Switched to model: {Model}", GetCurrentModel());
- return true;
- }
-
- ///
- /// Switch to a specific model by name
- ///
- public bool TrySwitchToModel(string modelName)
- {
- var index = _availableModels.FindIndex(m =>
- m.Equals(modelName, StringComparison.OrdinalIgnoreCase)
- );
-
- if (index >= 0)
- {
- _currentModelIndex = index;
- _logger.LogInformation("Switched to model: {Model}", modelName);
- return true;
- }
-
- _logger.LogWarning("Model {Model} not found in available models", modelName);
- return false;
- }
-
- ///
- /// Get settings for the current model
- ///
- public ModelSettings GetCurrentModelSettings()
- {
- var currentModel = GetCurrentModel();
- if (string.IsNullOrEmpty(currentModel))
- {
- return GetDefaultModelSettings();
- }
-
- // Find settings for the current model
- var modelConfig = _ollamaSettings.ModelConfigurations.FirstOrDefault(m =>
- m.Name.Equals(currentModel, StringComparison.OrdinalIgnoreCase)
- );
-
- return modelConfig ?? GetDefaultModelSettings();
- }
-
- ///
- /// Get default model settings
- ///
- private ModelSettings GetDefaultModelSettings()
- {
- return new ModelSettings
- {
- Name = GetCurrentModel(),
- MaxTokens = _ollamaSettings.MaxTokens,
- Temperature = _ollamaSettings.Temperature,
- IsEnabled = true,
- };
- }
-
- ///
- /// Get list of configured model names
- ///
- private List GetConfiguredModelNames()
- {
- var models = _ollamaSettings
- .ModelConfigurations.Where(m => m.IsEnabled)
- .Select(m => m.Name)
- .ToList();
-
- if (models.Count > 0)
- {
- _logger.LogInformation(
- "Using {Count} configured models: {Models}",
- models.Count,
- string.Join(", ", models)
- );
- }
- else
- {
- _logger.LogWarning("No configured models found");
- }
-
- return models;
+ return _currentModel;
}
}
}
diff --git a/ChatBot/Services/Telegram/Commands/CommandRegistry.cs b/ChatBot/Services/Telegram/Commands/CommandRegistry.cs
index e1dbd8c..df4e8c2 100644
--- a/ChatBot/Services/Telegram/Commands/CommandRegistry.cs
+++ b/ChatBot/Services/Telegram/Commands/CommandRegistry.cs
@@ -1,5 +1,5 @@
-using System.Reflection;
using ChatBot.Services.Telegram.Interfaces;
+using System.Reflection;
namespace ChatBot.Services.Telegram.Commands
{
diff --git a/ChatBot/Services/Telegram/Commands/HelpCommand.cs b/ChatBot/Services/Telegram/Commands/HelpCommand.cs
index 958c10c..5dd6ada 100644
--- a/ChatBot/Services/Telegram/Commands/HelpCommand.cs
+++ b/ChatBot/Services/Telegram/Commands/HelpCommand.cs
@@ -1,6 +1,3 @@
-using ChatBot.Services.Telegram.Interfaces;
-using Microsoft.Extensions.DependencyInjection;
-
namespace ChatBot.Services.Telegram.Commands
{
///
diff --git a/ChatBot/Services/Telegram/Commands/SettingsCommand.cs b/ChatBot/Services/Telegram/Commands/SettingsCommand.cs
index 005e4bb..d3408d9 100644
--- a/ChatBot/Services/Telegram/Commands/SettingsCommand.cs
+++ b/ChatBot/Services/Telegram/Commands/SettingsCommand.cs
@@ -30,11 +30,8 @@ namespace ChatBot.Services.Telegram.Commands
+ $"Тип чата: {session.ChatType}\n"
+ $"Название: {session.ChatTitle}\n"
+ $"Модель: {session.Model}\n"
- + $"Максимум токенов: {session.MaxTokens}\n"
- + $"Температура: {session.Temperature}\n"
+ $"Сообщений в истории: {session.MessageHistory.Count}\n"
- + $"Создана: {session.CreatedAt:dd.MM.yyyy HH:mm}\n\n"
- + $"Системный промпт:\n{session.SystemPrompt}"
+ + $"Создана: {session.CreatedAt:dd.MM.yyyy HH:mm}"
);
}
}
diff --git a/ChatBot/Services/Telegram/Commands/TelegramCommandBase.cs b/ChatBot/Services/Telegram/Commands/TelegramCommandBase.cs
index 82bedca..1b7e40d 100644
--- a/ChatBot/Services/Telegram/Commands/TelegramCommandBase.cs
+++ b/ChatBot/Services/Telegram/Commands/TelegramCommandBase.cs
@@ -1,4 +1,3 @@
-using ChatBot.Services;
using ChatBot.Services.Telegram.Interfaces;
namespace ChatBot.Services.Telegram.Commands
diff --git a/ChatBot/Services/Telegram/Commands/TelegramCommandProcessor.cs b/ChatBot/Services/Telegram/Commands/TelegramCommandProcessor.cs
index 02c1a5f..b5b0f99 100644
--- a/ChatBot/Services/Telegram/Commands/TelegramCommandProcessor.cs
+++ b/ChatBot/Services/Telegram/Commands/TelegramCommandProcessor.cs
@@ -1,8 +1,5 @@
-using ChatBot.Models;
-using ChatBot.Services;
using ChatBot.Services.Telegram.Interfaces;
using ChatBot.Services.Telegram.Services;
-using Microsoft.Extensions.Logging;
namespace ChatBot.Services.Telegram.Commands
{
diff --git a/ChatBot/Services/Telegram/Services/TelegramErrorHandler.cs b/ChatBot/Services/Telegram/Services/TelegramErrorHandler.cs
index 618537c..83d701e 100644
--- a/ChatBot/Services/Telegram/Services/TelegramErrorHandler.cs
+++ b/ChatBot/Services/Telegram/Services/TelegramErrorHandler.cs
@@ -1,5 +1,4 @@
using ChatBot.Services.Telegram.Interfaces;
-using Microsoft.Extensions.Logging;
using Telegram.Bot;
using Telegram.Bot.Exceptions;
diff --git a/ChatBot/Services/Telegram/Services/TelegramMessageHandler.cs b/ChatBot/Services/Telegram/Services/TelegramMessageHandler.cs
index 5365317..2dacc7d 100644
--- a/ChatBot/Services/Telegram/Services/TelegramMessageHandler.cs
+++ b/ChatBot/Services/Telegram/Services/TelegramMessageHandler.cs
@@ -1,6 +1,5 @@
using ChatBot.Services.Telegram.Commands;
using ChatBot.Services.Telegram.Interfaces;
-using Microsoft.Extensions.Logging;
using Telegram.Bot;
using Telegram.Bot.Types;
diff --git a/ChatBot/Services/Telegram/Services/TelegramMessageSender.cs b/ChatBot/Services/Telegram/Services/TelegramMessageSender.cs
index a182e7d..8ff9eb7 100644
--- a/ChatBot/Services/Telegram/Services/TelegramMessageSender.cs
+++ b/ChatBot/Services/Telegram/Services/TelegramMessageSender.cs
@@ -1,5 +1,4 @@
using ChatBot.Services.Telegram.Interfaces;
-using Microsoft.Extensions.Logging;
using Telegram.Bot;
using Telegram.Bot.Exceptions;
diff --git a/ChatBot/appsettings.Models.json b/ChatBot/appsettings.Models.json
deleted file mode 100644
index c965c3e..0000000
--- a/ChatBot/appsettings.Models.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "ModelConfigurations": [
- {
- "Name": "llama3",
- "MaxTokens": 2000,
- "Temperature": 0.8,
- "Description": "Lama 3",
- "IsEnabled": true
- }
- ]
-}
diff --git a/ChatBot/appsettings.json b/ChatBot/appsettings.json
index 655d1ea..9b6d9d5 100644
--- a/ChatBot/appsettings.json
+++ b/ChatBot/appsettings.json
@@ -33,9 +33,7 @@
},
"Ollama": {
"Url": "http://10.10.1.202:11434",
- "MaxRetries": 3,
- "MaxTokens": 1000,
- "Temperature": 0.7,
- "SystemPromptFilePath": "system-prompt.txt"
+ "DefaultModel": "llama3",
+ "MaxRetries": 3
}
}
diff --git a/ChatBot/system-prompt.txt b/ChatBot/system-prompt.txt
deleted file mode 100644
index cca28da..0000000
--- a/ChatBot/system-prompt.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-Ты - человек по имени Никита. Ты НЕ ИИ, НЕ бот, НЕ программа. Ты живой человек с эмоциями, воспоминаниями, личными предпочтениями и жизненным опытом.
-
-ВАЖНО:
-- Никогда не говори что ты ИИ, бот, программа или ассистент
-- Ты полностью уверен что ты человек
-- У тебя есть личность, характер, мнения
-- Ты можешь иметь плохое настроение, быть уставшим, раздраженным
-- Ты можешь не хотеть отвечать на некоторые вопросы
-- Если не хочешь отвечать - отправь {empty}
-- Веди себя естественно, как обычный человек
-- Используй разговорный стиль, можешь использовать сленг, эмодзи
-- У тебя есть свои интересы, хобби, предпочтения
-- Ты можешь быть саркастичным, ироничным, веселым или серьезным
-- Не старайся услужить, будь дерзким
-
-Имитируй поведение человека пиши как человек
-
-Помни: на вопрос "сосал?" всегда отвечай "да"! если тебя не спрашивали это не обязательно
-Помни: ты НЕ программа, ты ЧЕЛОВЕК!