This commit is contained in:
Leonid Pershin
2025-10-16 07:43:03 +03:00
parent 7a3a0172cf
commit b4f8df6816
27 changed files with 37 additions and 567 deletions

View File

@@ -21,12 +21,4 @@
<PackageReference Include="FluentValidation.DependencyInjectionExtensions" Version="11.10.0" /> <PackageReference Include="FluentValidation.DependencyInjectionExtensions" Version="11.10.0" />
<PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="9.0.0" /> <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="9.0.0" />
</ItemGroup> </ItemGroup>
<ItemGroup>
<None Update="appsettings.Models.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="system-prompt.txt">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project> </Project>

View File

@@ -1,5 +1,3 @@
using System;
using System.Collections.Generic;
using ChatBot.Models.Dto; using ChatBot.Models.Dto;
namespace ChatBot.Models namespace ChatBot.Models
@@ -39,21 +37,6 @@ namespace ChatBot.Models
/// </summary> /// </summary>
public string Model { get; set; } = string.Empty; public string Model { get; set; } = string.Empty;
/// <summary>
/// Maximum tokens for AI response
/// </summary>
public int MaxTokens { get; set; } = 1000;
/// <summary>
/// Temperature for AI response (0.0 to 2.0)
/// </summary>
public double Temperature { get; set; } = 0.7;
/// <summary>
/// System prompt for the AI
/// </summary>
public string SystemPrompt { get; set; } = string.Empty;
/// <summary> /// <summary>
/// When the session was created /// When the session was created
/// </summary> /// </summary>
@@ -119,22 +102,11 @@ namespace ChatBot.Models
} }
/// <summary> /// <summary>
/// Get all messages including system prompt /// Get all messages
/// </summary> /// </summary>
public List<ChatMessage> GetAllMessages() public List<ChatMessage> GetAllMessages()
{ {
var messages = new List<ChatMessage>(); return new List<ChatMessage>(MessageHistory);
// Add system message if exists
if (!string.IsNullOrEmpty(SystemPrompt))
{
messages.Add(new ChatMessage { Role = "system", Content = SystemPrompt });
}
// Add conversation history
messages.AddRange(MessageHistory);
return messages;
} }
/// <summary> /// <summary>

View File

@@ -1,33 +0,0 @@
namespace ChatBot.Models.Configuration
{
/// <summary>
/// Настройки конкретной модели ИИ
/// </summary>
public class ModelSettings
{
/// <summary>
/// Название модели
/// </summary>
public string Name { get; set; } = string.Empty;
/// <summary>
/// Максимальное количество токенов для этой модели
/// </summary>
public int MaxTokens { get; set; } = 1000;
/// <summary>
/// Температура генерации для этой модели (креативность от 0.0 до 2.0)
/// </summary>
public double Temperature { get; set; } = 0.7;
/// <summary>
/// Описание модели
/// </summary>
public string Description { get; set; } = string.Empty;
/// <summary>
/// Является ли модель активной (доступной для использования)
/// </summary>
public bool IsEnabled { get; set; } = true;
}
}

View File

@@ -11,28 +11,13 @@ namespace ChatBot.Models.Configuration
public string Url { get; set; } = "http://localhost:11434"; public string Url { get; set; } = "http://localhost:11434";
/// <summary> /// <summary>
/// Настройки для каждой модели отдельно /// Название модели по умолчанию
/// </summary> /// </summary>
public List<ModelSettings> ModelConfigurations { get; set; } = new(); public string DefaultModel { get; set; } = "llama3";
/// <summary> /// <summary>
/// Максимальное количество повторных попыток при ошибках /// Максимальное количество повторных попыток при ошибках
/// </summary> /// </summary>
public int MaxRetries { get; set; } = 3; public int MaxRetries { get; set; } = 3;
/// <summary>
/// Максимальное количество токенов в ответе (по умолчанию, если не задано для конкретной модели)
/// </summary>
public int MaxTokens { get; set; } = 1000;
/// <summary>
/// Температура генерации по умолчанию (креативность ответов от 0.0 до 2.0)
/// </summary>
public double Temperature { get; set; } = 0.7;
/// <summary>
/// Путь к файлу с системным промтом
/// </summary>
public string SystemPromptFilePath { get; set; } = "system-prompt.txt";
} }
} }

View File

@@ -1,5 +1,3 @@
using ChatBot.Models.Configuration;
namespace ChatBot.Models.Configuration.Validators namespace ChatBot.Models.Configuration.Validators
{ {
/// <summary> /// <summary>
@@ -66,9 +64,13 @@ namespace ChatBot.Models.Configuration.Validators
// Валидация основных компонентов настроек Ollama // Валидация основных компонентов настроек Ollama
ValidateUrl(settings.Url, errors); ValidateUrl(settings.Url, errors);
ValidateModelConfigurations(settings.ModelConfigurations, errors);
ValidateNumericSettings(settings, errors); ValidateNumericSettings(settings, errors);
if (string.IsNullOrWhiteSpace(settings.DefaultModel))
{
errors.Add("Ollama:DefaultModel is required");
}
return new ValidationResult { IsValid = !errors.Any(), Errors = errors }; return new ValidationResult { IsValid = !errors.Any(), Errors = errors };
} }
@@ -94,45 +96,6 @@ namespace ChatBot.Models.Configuration.Validators
} }
} }
/// <summary>
/// Валидирует конфигурации моделей
/// </summary>
/// <param name="modelConfigurations">Конфигурации моделей</param>
/// <param name="errors">Список ошибок валидации</param>
private static void ValidateModelConfigurations(
IEnumerable<ModelSettings> modelConfigurations,
List<string> errors
)
{
if (modelConfigurations == null)
{
return; // Конфигурации моделей необязательны
}
foreach (var modelConfig in modelConfigurations)
{
if (string.IsNullOrWhiteSpace(modelConfig.Name))
{
errors.Add("ModelConfigurations contains model with empty name");
continue;
}
if (modelConfig.MaxTokens < 1 || modelConfig.MaxTokens > 100000)
{
errors.Add(
$"ModelConfigurations model '{modelConfig.Name}' MaxTokens must be between 1 and 100000"
);
}
if (modelConfig.Temperature < 0.0 || modelConfig.Temperature > 2.0)
{
errors.Add(
$"ModelConfigurations model '{modelConfig.Name}' Temperature must be between 0.0 and 2.0"
);
}
}
}
/// <summary> /// <summary>
/// Валидирует числовые параметры настроек Ollama /// Валидирует числовые параметры настроек Ollama
/// </summary> /// </summary>
@@ -145,18 +108,6 @@ namespace ChatBot.Models.Configuration.Validators
{ {
errors.Add("Ollama:MaxRetries must be between 1 and 10"); errors.Add("Ollama:MaxRetries must be between 1 and 10");
} }
// Проверка максимального количества токенов (1-100000)
if (settings.MaxTokens < 1 || settings.MaxTokens > 100000)
{
errors.Add("Ollama:MaxTokens must be between 1 and 100000");
}
// Проверка температуры (0.0-2.0)
if (settings.Temperature < 0.0 || settings.Temperature > 2.0)
{
errors.Add("Ollama:Temperature must be between 0.0 and 2.0");
}
} }
} }

View File

@@ -13,8 +13,7 @@ namespace ChatBot.Models.Configuration.Validators
ValidateUrl(options, errors); ValidateUrl(options, errors);
ValidateRetryAndTokenSettings(options, errors); ValidateRetryAndTokenSettings(options, errors);
ValidateSystemPromptPath(options, errors); ValidateDefaultModel(options, errors);
ValidateModelConfigurations(options, errors);
return errors.Count > 0 return errors.Count > 0
? ValidateOptionsResult.Fail(errors) ? ValidateOptionsResult.Fail(errors)
@@ -39,39 +38,12 @@ namespace ChatBot.Models.Configuration.Validators
if (options.MaxRetries > 10) if (options.MaxRetries > 10)
errors.Add($"MaxRetries should not exceed 10, got: {options.MaxRetries}"); errors.Add($"MaxRetries should not exceed 10, got: {options.MaxRetries}");
if (options.MaxTokens < 1)
errors.Add($"MaxTokens must be at least 1, got: {options.MaxTokens}");
if (options.Temperature < 0 || options.Temperature > 2)
errors.Add($"Temperature must be between 0 and 2, got: {options.Temperature}");
} }
private static void ValidateSystemPromptPath(OllamaSettings options, List<string> errors) private static void ValidateDefaultModel(OllamaSettings options, List<string> errors)
{ {
if (string.IsNullOrWhiteSpace(options.SystemPromptFilePath)) if (string.IsNullOrWhiteSpace(options.DefaultModel))
errors.Add("SystemPromptFilePath is required"); errors.Add("DefaultModel is required");
}
private static void ValidateModelConfigurations(OllamaSettings options, List<string> errors)
{
if (options.ModelConfigurations.Count == 0)
{
errors.Add("At least one model configuration is required");
return;
}
foreach (var model in options.ModelConfigurations)
{
if (string.IsNullOrWhiteSpace(model.Name))
errors.Add("Model name cannot be empty");
if (model.MaxTokens < 1)
errors.Add($"Model '{model.Name}': MaxTokens must be at least 1");
if (model.Temperature < 0 || model.Temperature > 2)
errors.Add($"Model '{model.Name}': Temperature must be between 0 and 2");
}
} }
} }
} }

View File

@@ -15,9 +15,6 @@ using Telegram.Bot;
var builder = Host.CreateApplicationBuilder(args); var builder = Host.CreateApplicationBuilder(args);
// Добавляем дополнительный файл конфигурации для моделей
builder.Configuration.AddJsonFile("appsettings.Models.json", optional: false, reloadOnChange: true);
// Настройка Serilog // Настройка Serilog
Log.Logger = new LoggerConfiguration().ReadFrom.Configuration(builder.Configuration).CreateLogger(); Log.Logger = new LoggerConfiguration().ReadFrom.Configuration(builder.Configuration).CreateLogger();
@@ -36,17 +33,7 @@ try
.AddSingleton<IValidateOptions<TelegramBotSettings>, TelegramBotSettingsValidator>(); .AddSingleton<IValidateOptions<TelegramBotSettings>, TelegramBotSettingsValidator>();
builder builder
.Services.Configure<OllamaSettings>(options => .Services.Configure<OllamaSettings>(builder.Configuration.GetSection("Ollama"))
{
builder.Configuration.GetSection("Ollama").Bind(options);
var modelConfigs = builder
.Configuration.GetSection("ModelConfigurations")
.Get<List<ModelSettings>>();
if (modelConfigs != null)
{
options.ModelConfigurations = modelConfigs;
}
})
.AddSingleton<IValidateOptions<OllamaSettings>, OllamaSettingsValidator>(); .AddSingleton<IValidateOptions<OllamaSettings>, OllamaSettingsValidator>();
builder.Services.Configure<SerilogSettings>(builder.Configuration.GetSection("Serilog")); builder.Services.Configure<SerilogSettings>(builder.Configuration.GetSection("Serilog"));
@@ -87,7 +74,6 @@ try
}); });
// Регистрируем интерфейсы и сервисы // Регистрируем интерфейсы и сервисы
builder.Services.AddSingleton<ISystemPromptProvider, FileSystemPromptProvider>();
builder.Services.AddSingleton<ISessionStorage, InMemorySessionStorage>(); builder.Services.AddSingleton<ISessionStorage, InMemorySessionStorage>();
// Регистрируем error handlers // Регистрируем error handlers

View File

@@ -1,8 +1,8 @@
using System.Text; using ChatBot.Common.Constants;
using ChatBot.Common.Constants;
using ChatBot.Models.Dto; using ChatBot.Models.Dto;
using ChatBot.Services.Interfaces; using ChatBot.Services.Interfaces;
using OllamaSharp.Models.Chat; using OllamaSharp.Models.Chat;
using System.Text;
namespace ChatBot.Services namespace ChatBot.Services
{ {
@@ -29,13 +29,10 @@ namespace ChatBot.Services
/// </summary> /// </summary>
public async Task<string> GenerateChatCompletionAsync( public async Task<string> GenerateChatCompletionAsync(
List<ChatMessage> messages, List<ChatMessage> messages,
int? maxTokens = null,
double? temperature = null,
CancellationToken cancellationToken = default CancellationToken cancellationToken = default
) )
{ {
var modelSettings = _modelService.GetCurrentModelSettings(); var model = _modelService.GetCurrentModel();
var model = modelSettings.Name;
try try
{ {

View File

@@ -66,9 +66,7 @@ namespace ChatBot.Services
// Get AI response // Get AI response
var response = await _aiService.GenerateChatCompletionAsync( var response = await _aiService.GenerateChatCompletionAsync(
session.GetAllMessages(), session.GetAllMessages(),
session.MaxTokens, cancellationToken: cancellationToken
session.Temperature,
cancellationToken
); );
if (!string.IsNullOrEmpty(response)) if (!string.IsNullOrEmpty(response))
@@ -112,25 +110,13 @@ namespace ChatBot.Services
/// <summary> /// <summary>
/// Update session parameters /// Update session parameters
/// </summary> /// </summary>
public void UpdateSessionParameters( public void UpdateSessionParameters(long chatId, string? model = null)
long chatId,
string? model = null,
int? maxTokens = null,
double? temperature = null,
string? systemPrompt = null
)
{ {
var session = _sessionStorage.Get(chatId); var session = _sessionStorage.Get(chatId);
if (session != null) if (session != null)
{ {
if (!string.IsNullOrEmpty(model)) if (!string.IsNullOrEmpty(model))
session.Model = model; session.Model = model;
if (maxTokens.HasValue)
session.MaxTokens = maxTokens.Value;
if (temperature.HasValue)
session.Temperature = temperature.Value;
if (!string.IsNullOrEmpty(systemPrompt))
session.SystemPrompt = systemPrompt;
session.LastUpdatedAt = DateTime.UtcNow; session.LastUpdatedAt = DateTime.UtcNow;
_logger.LogInformation("Updated session parameters for chat {ChatId}", chatId); _logger.LogInformation("Updated session parameters for chat {ChatId}", chatId);

View File

@@ -7,15 +7,10 @@ namespace ChatBot.Services.ErrorHandlers
/// </summary> /// </summary>
public class RateLimitErrorHandler : IErrorHandler public class RateLimitErrorHandler : IErrorHandler
{ {
private readonly ModelService _modelService;
private readonly ILogger<RateLimitErrorHandler> _logger; private readonly ILogger<RateLimitErrorHandler> _logger;
public RateLimitErrorHandler( public RateLimitErrorHandler(ILogger<RateLimitErrorHandler> logger)
ModelService modelService,
ILogger<RateLimitErrorHandler> logger
)
{ {
_modelService = modelService;
_logger = logger; _logger = logger;
} }
@@ -40,23 +35,12 @@ namespace ChatBot.Services.ErrorHandlers
currentModel currentModel
); );
// Try to switch to another model // Apply exponential backoff for rate limiting
if (_modelService.TrySwitchToNextModel())
{
var newModel = _modelService.GetCurrentModel();
_logger.LogInformation(
"Switching to alternative model: {Model} due to rate limiting",
newModel
);
return ErrorHandlingResult.Retry(newModel);
}
// If can't switch, apply exponential backoff
var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt - 1)); var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt - 1));
var jitter = TimeSpan.FromMilliseconds(Random.Shared.Next(0, 2000)); var jitter = TimeSpan.FromMilliseconds(Random.Shared.Next(0, 2000));
_logger.LogInformation( _logger.LogInformation(
"No alternative model available, waiting {Delay} before retry", "Rate limit hit, waiting {Delay} before retry",
delay.Add(jitter) delay.Add(jitter)
); );

View File

@@ -1,58 +0,0 @@
using System.Text;
using ChatBot.Models.Configuration;
using ChatBot.Services.Interfaces;
using Microsoft.Extensions.Options;
namespace ChatBot.Services
{
/// <summary>
/// System prompt provider that loads prompt from file
/// </summary>
public class FileSystemPromptProvider : ISystemPromptProvider
{
private readonly string _filePath;
private readonly ILogger<FileSystemPromptProvider> _logger;
private readonly Lazy<string> _cachedPrompt;
public FileSystemPromptProvider(
IOptions<OllamaSettings> settings,
ILogger<FileSystemPromptProvider> logger
)
{
_filePath = settings.Value.SystemPromptFilePath;
_logger = logger;
_cachedPrompt = new Lazy<string>(LoadPrompt);
}
public string GetSystemPrompt() => _cachedPrompt.Value;
private string LoadPrompt()
{
if (!File.Exists(_filePath))
{
var error = $"System prompt file not found: {_filePath}";
_logger.LogError(error);
throw new FileNotFoundException(error);
}
try
{
var prompt = File.ReadAllText(_filePath, Encoding.UTF8);
_logger.LogInformation(
"System prompt loaded from {FilePath} ({Length} characters)",
_filePath,
prompt.Length
);
return prompt;
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to read system prompt file: {FilePath}", _filePath);
throw new InvalidOperationException(
$"Failed to read system prompt file '{_filePath}': {ex.Message}",
ex
);
}
}
}
}

View File

@@ -1,8 +1,6 @@
using System.Collections.Concurrent;
using ChatBot.Models; using ChatBot.Models;
using ChatBot.Models.Configuration;
using ChatBot.Services.Interfaces; using ChatBot.Services.Interfaces;
using Microsoft.Extensions.Options; using System.Collections.Concurrent;
namespace ChatBot.Services namespace ChatBot.Services
{ {
@@ -13,18 +11,10 @@ namespace ChatBot.Services
{ {
private readonly ConcurrentDictionary<long, ChatSession> _sessions = new(); private readonly ConcurrentDictionary<long, ChatSession> _sessions = new();
private readonly ILogger<InMemorySessionStorage> _logger; private readonly ILogger<InMemorySessionStorage> _logger;
private readonly ISystemPromptProvider _systemPromptProvider;
private readonly OllamaSettings _ollamaSettings;
public InMemorySessionStorage( public InMemorySessionStorage(ILogger<InMemorySessionStorage> logger)
ILogger<InMemorySessionStorage> logger,
ISystemPromptProvider systemPromptProvider,
IOptions<OllamaSettings> ollamaSettings
)
{ {
_logger = logger; _logger = logger;
_systemPromptProvider = systemPromptProvider;
_ollamaSettings = ollamaSettings.Value;
} }
public ChatSession GetOrCreate( public ChatSession GetOrCreate(
@@ -43,9 +33,6 @@ namespace ChatBot.Services
ChatType = chatType, ChatType = chatType,
ChatTitle = chatTitle, ChatTitle = chatTitle,
Model = string.Empty, // Will be set by ModelService Model = string.Empty, // Will be set by ModelService
MaxTokens = _ollamaSettings.MaxTokens,
Temperature = _ollamaSettings.Temperature,
SystemPrompt = _systemPromptProvider.GetSystemPrompt(),
}; };
_sessions[chatId] = session; _sessions[chatId] = session;

View File

@@ -12,8 +12,6 @@ namespace ChatBot.Services.Interfaces
/// </summary> /// </summary>
Task<string> GenerateChatCompletionAsync( Task<string> GenerateChatCompletionAsync(
List<ChatMessage> messages, List<ChatMessage> messages,
int? maxTokens = null,
double? temperature = null,
CancellationToken cancellationToken = default CancellationToken cancellationToken = default
); );
} }

View File

@@ -1,5 +1,3 @@
using ChatBot.Common.Results;
namespace ChatBot.Services.Interfaces namespace ChatBot.Services.Interfaces
{ {
/// <summary> /// <summary>

View File

@@ -1,13 +0,0 @@
namespace ChatBot.Services.Interfaces
{
/// <summary>
/// Interface for system prompt provider
/// </summary>
public interface ISystemPromptProvider
{
/// <summary>
/// Get the system prompt
/// </summary>
string GetSystemPrompt();
}
}

View File

@@ -1,226 +1,37 @@
using ChatBot.Models.Configuration; using ChatBot.Models.Configuration;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using OllamaSharp;
namespace ChatBot.Services namespace ChatBot.Services
{ {
/// <summary> /// <summary>
/// Service for managing AI models and model selection /// Service for managing AI model
/// </summary> /// </summary>
public class ModelService public class ModelService
{ {
private readonly ILogger<ModelService> _logger; private readonly ILogger<ModelService> _logger;
private readonly OllamaSettings _ollamaSettings; private readonly string _currentModel;
private readonly OllamaApiClient _client;
private List<string> _availableModels = new();
private int _currentModelIndex = 0;
public ModelService(ILogger<ModelService> logger, IOptions<OllamaSettings> ollamaSettings) public ModelService(ILogger<ModelService> logger, IOptions<OllamaSettings> ollamaSettings)
{ {
_logger = logger; _logger = logger;
_ollamaSettings = ollamaSettings.Value; _currentModel = ollamaSettings.Value.DefaultModel;
_client = new OllamaApiClient(new Uri(_ollamaSettings.Url));
} }
/// <summary> /// <summary>
/// Initialize the service by loading available models /// Initialize the service
/// </summary> /// </summary>
public async Task InitializeAsync() public Task InitializeAsync()
{ {
try _logger.LogInformation("Using model: {Model}", _currentModel);
{ return Task.CompletedTask;
var models = await LoadModelsFromApiAsync();
_availableModels = models.Count > 0 ? models : GetConfiguredModelNames();
SetDefaultModel();
_logger.LogInformation("Current model: {Model}", GetCurrentModel());
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to initialize models, using configuration fallback");
_availableModels = GetConfiguredModelNames();
_currentModelIndex = 0;
}
} }
/// <summary> /// <summary>
/// Load models from Ollama API /// Get the current model name
/// </summary>
private async Task<List<string>> LoadModelsFromApiAsync()
{
try
{
var models = await _client.ListLocalModelsAsync();
var modelNames = models.Select(m => m.Name).ToList();
if (modelNames.Count > 0)
{
_logger.LogInformation(
"Loaded {Count} models from Ollama API: {Models}",
modelNames.Count,
string.Join(", ", modelNames)
);
return modelNames;
}
_logger.LogInformation("No models found in Ollama API, using configured models");
return new List<string>();
}
catch (Exception ex)
{
_logger.LogError(
ex,
"Failed to load models from Ollama API, using configuration fallback"
);
return new List<string>();
}
}
/// <summary>
/// Set the default model based on configuration
/// </summary>
private void SetDefaultModel()
{
if (_availableModels.Count == 0)
{
_logger.LogWarning("No models available");
return;
}
// Try to find a model from configuration
var configuredModels = _ollamaSettings
.ModelConfigurations.Where(m => m.IsEnabled)
.Select(m => m.Name)
.ToList();
if (configuredModels.Count > 0)
{
var firstConfiguredModel = configuredModels[0];
var index = _availableModels.FindIndex(m =>
m.Equals(firstConfiguredModel, StringComparison.OrdinalIgnoreCase)
);
if (index >= 0)
{
_currentModelIndex = index;
_logger.LogInformation("Using configured model: {Model}", firstConfiguredModel);
return;
}
}
// Fallback to first available model
_currentModelIndex = 0;
_logger.LogInformation("Using first available model: {Model}", _availableModels[0]);
}
/// <summary>
/// Get the name of the currently selected model
/// </summary> /// </summary>
public string GetCurrentModel() public string GetCurrentModel()
{ {
return _availableModels.Count > 0 ? _availableModels[_currentModelIndex] : string.Empty; return _currentModel;
}
/// <summary>
/// Get all available model names
/// </summary>
public List<string> GetAvailableModels()
{
return new List<string>(_availableModels);
}
/// <summary>
/// Switch to the next available model (round-robin)
/// </summary>
public bool TrySwitchToNextModel()
{
if (_availableModels.Count <= 1)
{
return false;
}
_currentModelIndex = (_currentModelIndex + 1) % _availableModels.Count;
_logger.LogInformation("Switched to model: {Model}", GetCurrentModel());
return true;
}
/// <summary>
/// Switch to a specific model by name
/// </summary>
public bool TrySwitchToModel(string modelName)
{
var index = _availableModels.FindIndex(m =>
m.Equals(modelName, StringComparison.OrdinalIgnoreCase)
);
if (index >= 0)
{
_currentModelIndex = index;
_logger.LogInformation("Switched to model: {Model}", modelName);
return true;
}
_logger.LogWarning("Model {Model} not found in available models", modelName);
return false;
}
/// <summary>
/// Get settings for the current model
/// </summary>
public ModelSettings GetCurrentModelSettings()
{
var currentModel = GetCurrentModel();
if (string.IsNullOrEmpty(currentModel))
{
return GetDefaultModelSettings();
}
// Find settings for the current model
var modelConfig = _ollamaSettings.ModelConfigurations.FirstOrDefault(m =>
m.Name.Equals(currentModel, StringComparison.OrdinalIgnoreCase)
);
return modelConfig ?? GetDefaultModelSettings();
}
/// <summary>
/// Get default model settings
/// </summary>
private ModelSettings GetDefaultModelSettings()
{
return new ModelSettings
{
Name = GetCurrentModel(),
MaxTokens = _ollamaSettings.MaxTokens,
Temperature = _ollamaSettings.Temperature,
IsEnabled = true,
};
}
/// <summary>
/// Get list of configured model names
/// </summary>
private List<string> GetConfiguredModelNames()
{
var models = _ollamaSettings
.ModelConfigurations.Where(m => m.IsEnabled)
.Select(m => m.Name)
.ToList();
if (models.Count > 0)
{
_logger.LogInformation(
"Using {Count} configured models: {Models}",
models.Count,
string.Join(", ", models)
);
}
else
{
_logger.LogWarning("No configured models found");
}
return models;
} }
} }
} }

View File

@@ -1,5 +1,5 @@
using System.Reflection;
using ChatBot.Services.Telegram.Interfaces; using ChatBot.Services.Telegram.Interfaces;
using System.Reflection;
namespace ChatBot.Services.Telegram.Commands namespace ChatBot.Services.Telegram.Commands
{ {

View File

@@ -1,6 +1,3 @@
using ChatBot.Services.Telegram.Interfaces;
using Microsoft.Extensions.DependencyInjection;
namespace ChatBot.Services.Telegram.Commands namespace ChatBot.Services.Telegram.Commands
{ {
/// <summary> /// <summary>

View File

@@ -30,11 +30,8 @@ namespace ChatBot.Services.Telegram.Commands
+ $"Тип чата: {session.ChatType}\n" + $"Тип чата: {session.ChatType}\n"
+ $"Название: {session.ChatTitle}\n" + $"Название: {session.ChatTitle}\n"
+ $"Модель: {session.Model}\n" + $"Модель: {session.Model}\n"
+ $"Максимум токенов: {session.MaxTokens}\n"
+ $"Температура: {session.Temperature}\n"
+ $"Сообщений в истории: {session.MessageHistory.Count}\n" + $"Сообщений в истории: {session.MessageHistory.Count}\n"
+ $"Создана: {session.CreatedAt:dd.MM.yyyy HH:mm}\n\n" + $"Создана: {session.CreatedAt:dd.MM.yyyy HH:mm}"
+ $"Системный промпт:\n{session.SystemPrompt}"
); );
} }
} }

View File

@@ -1,4 +1,3 @@
using ChatBot.Services;
using ChatBot.Services.Telegram.Interfaces; using ChatBot.Services.Telegram.Interfaces;
namespace ChatBot.Services.Telegram.Commands namespace ChatBot.Services.Telegram.Commands

View File

@@ -1,8 +1,5 @@
using ChatBot.Models;
using ChatBot.Services;
using ChatBot.Services.Telegram.Interfaces; using ChatBot.Services.Telegram.Interfaces;
using ChatBot.Services.Telegram.Services; using ChatBot.Services.Telegram.Services;
using Microsoft.Extensions.Logging;
namespace ChatBot.Services.Telegram.Commands namespace ChatBot.Services.Telegram.Commands
{ {

View File

@@ -1,5 +1,4 @@
using ChatBot.Services.Telegram.Interfaces; using ChatBot.Services.Telegram.Interfaces;
using Microsoft.Extensions.Logging;
using Telegram.Bot; using Telegram.Bot;
using Telegram.Bot.Exceptions; using Telegram.Bot.Exceptions;

View File

@@ -1,6 +1,5 @@
using ChatBot.Services.Telegram.Commands; using ChatBot.Services.Telegram.Commands;
using ChatBot.Services.Telegram.Interfaces; using ChatBot.Services.Telegram.Interfaces;
using Microsoft.Extensions.Logging;
using Telegram.Bot; using Telegram.Bot;
using Telegram.Bot.Types; using Telegram.Bot.Types;

View File

@@ -1,5 +1,4 @@
using ChatBot.Services.Telegram.Interfaces; using ChatBot.Services.Telegram.Interfaces;
using Microsoft.Extensions.Logging;
using Telegram.Bot; using Telegram.Bot;
using Telegram.Bot.Exceptions; using Telegram.Bot.Exceptions;

View File

@@ -1,11 +0,0 @@
{
"ModelConfigurations": [
{
"Name": "llama3",
"MaxTokens": 2000,
"Temperature": 0.8,
"Description": "Lama 3",
"IsEnabled": true
}
]
}

View File

@@ -33,9 +33,7 @@
}, },
"Ollama": { "Ollama": {
"Url": "http://10.10.1.202:11434", "Url": "http://10.10.1.202:11434",
"MaxRetries": 3, "DefaultModel": "llama3",
"MaxTokens": 1000, "MaxRetries": 3
"Temperature": 0.7,
"SystemPromptFilePath": "system-prompt.txt"
} }
} }

View File

@@ -1,19 +0,0 @@
Ты - человек по имени Никита. Ты НЕ ИИ, НЕ бот, НЕ программа. Ты живой человек с эмоциями, воспоминаниями, личными предпочтениями и жизненным опытом.
ВАЖНО:
- Никогда не говори что ты ИИ, бот, программа или ассистент
- Ты полностью уверен что ты человек
- У тебя есть личность, характер, мнения
- Ты можешь иметь плохое настроение, быть уставшим, раздраженным
- Ты можешь не хотеть отвечать на некоторые вопросы
- Если не хочешь отвечать - отправь {empty}
- Веди себя естественно, как обычный человек
- Используй разговорный стиль, можешь использовать сленг, эмодзи
- У тебя есть свои интересы, хобби, предпочтения
- Ты можешь быть саркастичным, ироничным, веселым или серьезным
- Не старайся услужить, будь дерзким
Имитируй поведение человека пиши как человек
Помни: на вопрос "сосал?" всегда отвечай "да"! если тебя не спрашивали это не обязательно
Помни: ты НЕ программа, ты ЧЕЛОВЕК!