| | | 1 | | using System.Globalization; |
| | | 2 | | using System.Net.Http.Headers; |
| | | 3 | | using System.Net.Http.Json; |
| | | 4 | | using System.Net.Mime; |
| | | 5 | | using System.Runtime.CompilerServices; |
| | | 6 | | using UIBlazor.Services.Models; |
| | | 7 | | using UIBlazor.Services.Settings; |
| | | 8 | | |
| | | 9 | | namespace UIBlazor.Services; |
| | | 10 | | |
| | 5 | 11 | | public class ChatService( |
| | 5 | 12 | | HttpClient httpClient, |
| | 5 | 13 | | IProfileManager profileManager, |
| | 5 | 14 | | ICommonSettingsProvider commonSettingsProvider, |
| | 5 | 15 | | IToolManager toolManager, |
| | 5 | 16 | | ILocalStorageService localStorage, |
| | 5 | 17 | | ISkillService skillService, |
| | 5 | 18 | | IRuleService ruleService, |
| | 5 | 19 | | IVsCodeContextService vsCodeContextService |
| | 5 | 20 | | ) |
| | | 21 | | { |
| | | 22 | | private const string _thinkStart = "<think>"; |
| | | 23 | | private const string _thinkEnd = "</think>"; |
| | | 24 | | private const string _complitions = "/v1/chat/completions"; |
| | | 25 | | private const string _models = "/v1/models"; |
| | 5 | 26 | | private readonly JsonSerializerOptions _jsonSerializerOptions = new() |
| | 5 | 27 | | { |
| | 5 | 28 | | DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull |
| | 5 | 29 | | }; |
| | | 30 | | |
| | 7 | 31 | | public ConnectionProfile Options => profileManager.ActiveProfile; |
| | | 32 | | |
| | | 33 | | public ConversationSession Session |
| | | 34 | | { |
| | 16 | 35 | | get; |
| | | 36 | | private set |
| | | 37 | | { |
| | 3 | 38 | | if (field != value) |
| | | 39 | | { |
| | 3 | 40 | | field = value; |
| | 3 | 41 | | NotifySessionChanged(); |
| | | 42 | | } |
| | 3 | 43 | | } |
| | 5 | 44 | | } = CreateNewSession(); |
| | | 45 | | |
| | | 46 | | public event Action? OnSessionChanged; |
| | | 47 | | |
| | 3 | 48 | | private void NotifySessionChanged() => OnSessionChanged?.Invoke(); |
| | | 49 | | |
| | | 50 | | /// <summary> |
| | | 51 | | /// Получение списка моделей по API |
| | | 52 | | /// </summary> |
| | | 53 | | /// <exception cref="InvalidOperationException"></exception> |
| | | 54 | | /// <exception cref="HttpRequestException"></exception> |
| | | 55 | | /// <exception cref="JsonException"></exception> |
| | | 56 | | public async Task<AiModelList> GetModelsAsync(CancellationToken cancellationToken) |
| | | 57 | | { |
| | 1 | 58 | | using var request = new HttpRequestMessage(HttpMethod.Get, $"{Options.Endpoint}{_models}"); |
| | | 59 | | |
| | 1 | 60 | | if (!string.IsNullOrEmpty(Options.ApiKey)) |
| | | 61 | | { |
| | 0 | 62 | | if (string.IsNullOrWhiteSpace(Options.ApiKeyHeader)) |
| | | 63 | | { |
| | 0 | 64 | | throw new InvalidOperationException("API key header must be specified when an API key is provided."); |
| | | 65 | | } |
| | | 66 | | |
| | 0 | 67 | | if (string.Equals(Options.ApiKeyHeader, "Authorization", StringComparison.OrdinalIgnoreCase)) |
| | | 68 | | { |
| | 0 | 69 | | request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", Options.ApiKey); |
| | | 70 | | } |
| | | 71 | | else |
| | | 72 | | { |
| | 0 | 73 | | request.Headers.Add(Options.ApiKeyHeader, Options.ApiKey); |
| | | 74 | | } |
| | | 75 | | } |
| | | 76 | | |
| | 1 | 77 | | if (string.IsNullOrEmpty(Options.Endpoint)) |
| | | 78 | | { |
| | 1 | 79 | | throw new InvalidOperationException("Endpoint must be specified."); |
| | | 80 | | } |
| | | 81 | | |
| | 0 | 82 | | var response = await httpClient.SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken). |
| | | 83 | | |
| | 0 | 84 | | if (!response.IsSuccessStatusCode) |
| | | 85 | | { |
| | 0 | 86 | | throw new HttpRequestException($"Getting models failed: {await response.Content.ReadAsStringAsync(cancellati |
| | | 87 | | } |
| | | 88 | | |
| | 0 | 89 | | return await response.Content.ReadFromJsonAsync<AiModelList>(cancellationToken) |
| | 0 | 90 | | ?? throw new JsonException("Models deserialization exception"); |
| | 0 | 91 | | } |
| | | 92 | | |
| | | 93 | | public async Task AddMessageAsync(string role, string content) |
| | | 94 | | { |
| | 1 | 95 | | Session.AddMessage(role, content); |
| | 1 | 96 | | await SaveSessionAsync(); |
| | 1 | 97 | | } |
| | | 98 | | |
| | | 99 | | public async Task AddMessageAsync(VisualChatMessage message) |
| | | 100 | | { |
| | 0 | 101 | | Session.AddMessage(message); |
| | 0 | 102 | | await SaveSessionAsync(); |
| | 0 | 103 | | } |
| | | 104 | | |
| | | 105 | | /// <summary> |
| | | 106 | | /// Asynchronously saves the current session data to local storage using the session ID as the key. |
| | | 107 | | /// </summary> |
| | | 108 | | /// <returns></returns> |
| | | 109 | | public async Task SaveSessionAsync() |
| | | 110 | | { |
| | 1 | 111 | | await localStorage.SetItemAsync(Session.Id, Session); |
| | 1 | 112 | | UpdateSessionCache(Session); |
| | 1 | 113 | | } |
| | | 114 | | |
| | | 115 | | private void UpdateSessionCache(ConversationSession session) |
| | | 116 | | { |
| | 2 | 117 | | if (_recentSessionsCache == null) return; |
| | | 118 | | |
| | 0 | 119 | | var existing = _recentSessionsCache.FirstOrDefault(s => s.Id == session.Id); |
| | 0 | 120 | | var firstMessage = session.Messages.FirstOrDefault(m => m.Role == Constants.ChatMessageRole.User)?.Content ?? "N |
| | 0 | 121 | | var preview = firstMessage.Length > 40 ? firstMessage[..40] + "..." : firstMessage; |
| | | 122 | | |
| | 0 | 123 | | if (existing != null) |
| | | 124 | | { |
| | 0 | 125 | | existing.FirstUserMessage = preview; |
| | | 126 | | } |
| | | 127 | | else |
| | | 128 | | { |
| | 0 | 129 | | _recentSessionsCache.Add(new SessionSummary |
| | 0 | 130 | | { |
| | 0 | 131 | | Id = session.Id, |
| | 0 | 132 | | CreatedAt = session.CreatedAt, |
| | 0 | 133 | | FirstUserMessage = preview |
| | 0 | 134 | | }); |
| | 0 | 135 | | _recentSessionsCache = [.. _recentSessionsCache.OrderByDescending(s => s.CreatedAt)]; |
| | | 136 | | } |
| | 0 | 137 | | } |
| | | 138 | | |
| | | 139 | | /// <summary> |
| | | 140 | | /// Модель, которая последняя отвечала |
| | | 141 | | /// </summary> |
| | 0 | 142 | | public string? LastCompletionsModel { get; private set; } |
| | | 143 | | |
| | | 144 | | /// <summary> |
| | | 145 | | /// Последнее использование токенов |
| | | 146 | | /// </summary> |
| | 0 | 147 | | public UsageInfo? LastUsage { get; private set; } |
| | | 148 | | |
| | | 149 | | /// <summary> |
| | | 150 | | /// Asynchronously prepares the system prompt by combining configured instructions, tool usage guidance, skill |
| | | 151 | | /// metadata, and the current code context. |
| | | 152 | | /// </summary> |
| | | 153 | | /// <remarks> |
| | | 154 | | /// The returned prompt includes information relevant to the current session and code context, |
| | | 155 | | /// which may affect downstream processing. If no code context is available, the prompt will omit that section. This |
| | | 156 | | /// method is intended for internal use when constructing prompts for AI interactions. |
| | | 157 | | /// </remarks> |
| | | 158 | | /// <returns> |
| | | 159 | | /// A string containing the complete system prompt, including instructions, tool information, skill details, and |
| | | 160 | | /// code context if available. |
| | | 161 | | /// </returns> |
| | | 162 | | private async Task<string> PrepareSystemPromptAsync() |
| | | 163 | | { |
| | | 164 | | // Загружаем метаданные скиллов и добавляем в системный промпт |
| | 0 | 165 | | var skillsMetadata = await skillService.GetSkillsMetadataAsync(); |
| | 0 | 166 | | var skillsSection = skillService.FormatSkillsForSystemPrompt(skillsMetadata); |
| | | 167 | | |
| | 0 | 168 | | var contextSection = new StringBuilder(); |
| | 0 | 169 | | var currentContext = vsCodeContextService.CurrentContext; |
| | 0 | 170 | | if (currentContext != null) |
| | | 171 | | { |
| | 0 | 172 | | contextSection.AppendLine("# CURRENT CODE CONTEXT"); |
| | | 173 | | |
| | 0 | 174 | | if (commonSettingsProvider.Current.SendCurrentFile && !string.IsNullOrEmpty(currentContext.ActiveFilePath)) |
| | | 175 | | { |
| | 0 | 176 | | contextSection.AppendLine($""" |
| | 0 | 177 | | Active file path: {currentContext.ActiveFilePath} |
| | 0 | 178 | | Selected lines: {currentContext.SelectionStartLine} - {currentContext.Selectio |
| | 0 | 179 | | ``` |
| | 0 | 180 | | {currentContext.ActiveFileContent} |
| | 0 | 181 | | ``` |
| | 0 | 182 | | """); |
| | | 183 | | } |
| | | 184 | | |
| | 0 | 185 | | if (commonSettingsProvider.Current.SendSolutionsStricture) |
| | | 186 | | { |
| | 0 | 187 | | contextSection.AppendLine($""" |
| | 0 | 188 | | Solution files: |
| | 0 | 189 | | ``` |
| | 0 | 190 | | {string.Join(Environment.NewLine, currentContext.SolutionFiles)} |
| | 0 | 191 | | ``` |
| | 0 | 192 | | """); |
| | | 193 | | } |
| | | 194 | | } |
| | | 195 | | |
| | | 196 | | // Загружаем правила |
| | 0 | 197 | | var rules = await ruleService.GetRulesAsync(); |
| | | 198 | | |
| | 0 | 199 | | return string.Join(Environment.NewLine, |
| | 0 | 200 | | Options.SystemPrompt, |
| | 0 | 201 | | rules ?? string.Empty, |
| | 0 | 202 | | toolManager.GetToolUseSystemInstructions(Session?.Mode ?? AppMode.Chat, skillsMetadata.Count != 0), |
| | 0 | 203 | | skillsSection, |
| | 0 | 204 | | contextSection); |
| | 0 | 205 | | } |
| | | 206 | | |
| | | 207 | | /// <summary> |
| | | 208 | | /// Asynchronously generates a sequence of chat completion deltas for the current conversation session. |
| | | 209 | | /// </summary> |
| | | 210 | | /// <remarks> |
| | | 211 | | /// This method streams chat completion results as they become available, allowing for real-time |
| | | 212 | | /// processing of partial responses. The returned sequence may include reasoning content or message content |
| | | 213 | | /// depending on the model and response format. If streaming is not enabled, the method yields a single completion |
| | | 214 | | /// result. |
| | | 215 | | /// </remarks> |
| | | 216 | | /// <param name="cancellationToken">A cancellation token that can be used to cancel the asynchronous operation.</par |
| | | 217 | | /// <returns> |
| | | 218 | | /// An asynchronous stream of <see cref="ChatDelta"/> objects representing incremental updates to the chat |
| | | 219 | | /// completion. The stream completes when the response is fully received. |
| | | 220 | | /// </returns> |
| | | 221 | | /// <exception cref="Exception">Thrown if the chat completion request fails or the server returns an unsuccessful re |
| | | 222 | | public async IAsyncEnumerable<ChatDelta> GetCompletionsAsync([EnumeratorCancellation] CancellationToken cancellation |
| | | 223 | | { |
| | 0 | 224 | | LastCompletionsModel = null; |
| | 0 | 225 | | LastUsage = null; |
| | | 226 | | |
| | | 227 | | // Use runtime parameters or fall back to configured options |
| | 0 | 228 | | var url = $"{Options.Endpoint}{_complitions}"; |
| | 0 | 229 | | var effectiveApiKey = Options.ApiKey; |
| | 0 | 230 | | var effectiveApiKeyHeader = Options.ApiKeyHeader; |
| | | 231 | | |
| | | 232 | | // Get formatted messages including conversation history |
| | 0 | 233 | | var messages = Session?.GetFormattedMessages(await PrepareSystemPromptAsync()) ?? []; |
| | | 234 | | |
| | 0 | 235 | | var payload = new |
| | 0 | 236 | | { |
| | 0 | 237 | | model = Options.Model, |
| | 0 | 238 | | messages = messages, |
| | 0 | 239 | | temperature = Options.Temperature, |
| | 0 | 240 | | max_tokens = Options.MaxTokens, |
| | 0 | 241 | | stream = Options.Stream, |
| | 0 | 242 | | stream_options = Options.Stream ? new { include_usage = true } : null |
| | 0 | 243 | | }; |
| | | 244 | | |
| | 0 | 245 | | var request = new HttpRequestMessage(HttpMethod.Post, url) |
| | 0 | 246 | | { |
| | 0 | 247 | | Content = new StringContent( |
| | 0 | 248 | | JsonSerializer.Serialize(payload, _jsonSerializerOptions), |
| | 0 | 249 | | Encoding.UTF8, |
| | 0 | 250 | | MediaTypeNames.Application.Json) |
| | 0 | 251 | | }; |
| | | 252 | | |
| | 0 | 253 | | if (!string.IsNullOrEmpty(effectiveApiKey)) |
| | | 254 | | { |
| | 0 | 255 | | if (string.Equals(effectiveApiKeyHeader, "Authorization", StringComparison.OrdinalIgnoreCase)) |
| | | 256 | | { |
| | 0 | 257 | | request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", effectiveApiKey); |
| | | 258 | | } |
| | | 259 | | else |
| | | 260 | | { |
| | 0 | 261 | | request.Headers.Add(effectiveApiKeyHeader, effectiveApiKey); |
| | | 262 | | } |
| | | 263 | | } |
| | | 264 | | |
| | 0 | 265 | | var response = await httpClient.SendAsync(request, Options.Stream ? HttpCompletionOption.ResponseHeadersRead : H |
| | | 266 | | |
| | 0 | 267 | | if (!response.IsSuccessStatusCode) |
| | | 268 | | { |
| | 0 | 269 | | var message = Options.Stream ? "stream" : "request"; |
| | 0 | 270 | | var result = $"HttpCode: {response.StatusCode} | server failed: {await response.Content.ReadAsStringAsync(ca |
| | 0 | 271 | | throw new Exception(result); |
| | | 272 | | } |
| | | 273 | | |
| | | 274 | | // если не стрим, то возвращаем как один чанк |
| | 0 | 275 | | if (!Options.Stream) |
| | | 276 | | { |
| | 0 | 277 | | var chunk = await response.Content.ReadFromJsonAsync<StreamChunk>(cancellationToken); |
| | 0 | 278 | | var message = chunk?.Choice?.Message; |
| | 0 | 279 | | if (message?.Content != null) |
| | | 280 | | { |
| | | 281 | | // Удаление <think> блока из контента и перенос его в ReasoningContent если его там нет. |
| | 0 | 282 | | var regex = Regex.Match(message.Content, $"^{_thinkStart}(?<reason>.*){_thinkEnd}", RegexOptions.Singlel |
| | 0 | 283 | | if (regex.Success) |
| | | 284 | | { |
| | 0 | 285 | | message.ReasoningContent ??= regex.Groups["reason"].Value; |
| | 0 | 286 | | message.Content = message.Content[regex.Length..]; |
| | | 287 | | } |
| | 0 | 288 | | LastCompletionsModel ??= chunk?.Model; |
| | 0 | 289 | | if (chunk?.Usage != null) |
| | | 290 | | { |
| | 0 | 291 | | LastUsage = chunk.Usage; |
| | 0 | 292 | | Session.TotalTokens = chunk.Usage.TotalTokens; |
| | | 293 | | } |
| | 0 | 294 | | yield return message; |
| | | 295 | | } |
| | 0 | 296 | | yield break; |
| | | 297 | | } |
| | | 298 | | |
| | | 299 | | // стрим |
| | 0 | 300 | | await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken); |
| | 0 | 301 | | using var reader = new StreamReader(stream); |
| | | 302 | | |
| | | 303 | | string? line; |
| | 0 | 304 | | var isReasoningContent = false; |
| | 0 | 305 | | var isStart = true; |
| | | 306 | | |
| | | 307 | | // чтобы html-теги <function> склеивать в один чанк |
| | 0 | 308 | | var _pendingText = string.Empty; |
| | | 309 | | |
| | 0 | 310 | | while ((line = await reader.ReadLineAsync(cancellationToken)) is not null && !cancellationToken.IsCancellationRe |
| | | 311 | | { |
| | 0 | 312 | | if (string.IsNullOrWhiteSpace(line) || !line.StartsWith("data:")) |
| | | 313 | | { |
| | | 314 | | continue; |
| | | 315 | | } |
| | | 316 | | |
| | 0 | 317 | | var json = line["data:".Length..].Trim(); |
| | | 318 | | |
| | 0 | 319 | | if (json == "[DONE]") |
| | | 320 | | { |
| | | 321 | | break; |
| | | 322 | | } |
| | | 323 | | |
| | 0 | 324 | | var chunk = JsonUtils.Deserialize<StreamChunk>(json); |
| | 0 | 325 | | if (chunk == null) |
| | | 326 | | { |
| | | 327 | | continue; |
| | | 328 | | } |
| | | 329 | | |
| | 0 | 330 | | if (chunk.Usage != null) |
| | | 331 | | { |
| | 0 | 332 | | LastUsage = chunk.Usage; |
| | 0 | 333 | | Session.TotalTokens = chunk.Usage.TotalTokens; |
| | | 334 | | } |
| | | 335 | | |
| | 0 | 336 | | if (chunk.Choices.Count != 1 || chunk.Choices[0].Delta == null) |
| | | 337 | | { |
| | | 338 | | continue; |
| | | 339 | | } |
| | | 340 | | |
| | 0 | 341 | | LastCompletionsModel ??= chunk.Model; |
| | 0 | 342 | | var delta = chunk.Choices[0].Delta; |
| | 0 | 343 | | var content = delta!.Content; |
| | | 344 | | |
| | | 345 | | // Размышляющие модели по разному отдают размышления |
| | | 346 | | // |
| | | 347 | | // ReasoningContent | Content |
| | | 348 | | // GLM 4.7 +++ | --- |
| | | 349 | | // Kimi 2 +++ | <think> |
| | | 350 | | // Deepseek R1 --- | <think> |
| | | 351 | | // |
| | | 352 | | // обрабатываем размышления как Z.ai GLM. |
| | | 353 | | // Все размышления идут в ReasoningContent с пустым Content |
| | | 354 | | |
| | | 355 | | // Преобразрвания нужны если есть контент с блоком <think> |
| | 0 | 356 | | if (!string.IsNullOrEmpty(content)) |
| | | 357 | | { |
| | 0 | 358 | | if (!isReasoningContent) // не думаем |
| | | 359 | | { |
| | 0 | 360 | | if (isStart && content.StartsWith(_thinkStart)) |
| | | 361 | | { |
| | | 362 | | // начать думать можно только в первом чанке |
| | 0 | 363 | | isReasoningContent = true; |
| | 0 | 364 | | delta.ReasoningContent = content.Replace(_thinkStart, string.Empty); |
| | 0 | 365 | | delta.Content = null; |
| | | 366 | | } |
| | | 367 | | else |
| | | 368 | | { |
| | | 369 | | // Не думали - нечего и начинать. |
| | | 370 | | } |
| | | 371 | | } |
| | | 372 | | else // внутри <think> блока |
| | | 373 | | { |
| | 0 | 374 | | if (content.Contains(_thinkEnd)) |
| | | 375 | | { |
| | | 376 | | // если закончил думать, то можно в контент добавить часть чанка (актуально для Kimi2) |
| | 0 | 377 | | isReasoningContent = false; |
| | 0 | 378 | | delta.Content = content.Replace(_thinkEnd, string.Empty); |
| | 0 | 379 | | delta.ReasoningContent = null; |
| | | 380 | | } |
| | | 381 | | else |
| | | 382 | | { |
| | | 383 | | // если не конец - то все пихаем в ReasoningContent и очищаем Content |
| | 0 | 384 | | delta.Content = null; |
| | 0 | 385 | | delta.ReasoningContent = content; |
| | | 386 | | } |
| | | 387 | | } |
| | | 388 | | } |
| | | 389 | | |
| | | 390 | | // Если есть контент, то проверяем на разрезанные теги и склеиваем их |
| | 0 | 391 | | if (delta.Content != null) |
| | | 392 | | { |
| | | 393 | | // Склеиваем с остатком от прошлого раза |
| | 0 | 394 | | var incomingText = _pendingText + delta.Content; |
| | 0 | 395 | | _pendingText = string.Empty; |
| | | 396 | | |
| | | 397 | | // Ищем последний открывающий тег |
| | 0 | 398 | | var lastOpenIndex = incomingText.LastIndexOf('<'); |
| | 0 | 399 | | if (lastOpenIndex >= 0) |
| | | 400 | | { |
| | 0 | 401 | | var potentialTag = incomingText[lastOpenIndex..]; |
| | | 402 | | // Если тег не закрыт (нет '>') и нет переноса строки (\n), то буферизируем |
| | 0 | 403 | | if (potentialTag.IndexOfAny(['>', '\n']) == -1) |
| | | 404 | | { |
| | 0 | 405 | | _pendingText = incomingText[lastOpenIndex..]; |
| | 0 | 406 | | incomingText = incomingText[..lastOpenIndex]; |
| | | 407 | | |
| | | 408 | | // Если после отрезания тега ничего не осталось, пропускаем итерацию |
| | 0 | 409 | | if (string.IsNullOrWhiteSpace(incomingText)) |
| | | 410 | | continue; |
| | | 411 | | } |
| | | 412 | | } |
| | | 413 | | |
| | 0 | 414 | | delta.Content = incomingText; |
| | | 415 | | } |
| | | 416 | | |
| | 0 | 417 | | yield return delta; |
| | | 418 | | |
| | 0 | 419 | | isStart = false; |
| | | 420 | | } |
| | | 421 | | |
| | | 422 | | // если после окончания стрима остался неотправленный текст, отправляем его |
| | 0 | 423 | | if (!string.IsNullOrEmpty(_pendingText)) |
| | | 424 | | { |
| | 0 | 425 | | yield return new ChatDelta() { Content = _pendingText }; |
| | | 426 | | } |
| | 0 | 427 | | } |
| | | 428 | | |
| | | 429 | | private const int _maxSessions = 5; |
| | | 430 | | private List<SessionSummary>? _recentSessionsCache; |
| | | 431 | | |
| | | 432 | | public async Task<List<SessionSummary>> GetRecentSessionsAsync(int count = _maxSessions) |
| | | 433 | | { |
| | 0 | 434 | | if (_recentSessionsCache == null) |
| | | 435 | | { |
| | 0 | 436 | | var sessionIds = await GetAllSessionIdsAsync(); |
| | 0 | 437 | | var summaries = new List<SessionSummary>(); |
| | | 438 | | |
| | 0 | 439 | | foreach (var id in sessionIds) |
| | | 440 | | { |
| | 0 | 441 | | var session = await localStorage.GetItemAsync<ConversationSession>(id); |
| | 0 | 442 | | if (session != null) |
| | | 443 | | { |
| | 0 | 444 | | var firstMessage = session.Messages.FirstOrDefault(m => m.Role == Constants.ChatMessageRole.User)?.C |
| | 0 | 445 | | var preview = firstMessage.Length > 40 ? firstMessage[..40] + "..." : firstMessage; |
| | | 446 | | |
| | 0 | 447 | | summaries.Add(new SessionSummary |
| | 0 | 448 | | { |
| | 0 | 449 | | Id = id, |
| | 0 | 450 | | CreatedAt = session.CreatedAt, |
| | 0 | 451 | | FirstUserMessage = preview |
| | 0 | 452 | | }); |
| | | 453 | | } |
| | 0 | 454 | | } |
| | | 455 | | |
| | 0 | 456 | | _recentSessionsCache = [.. summaries.OrderByDescending(s => s.CreatedAt)]; |
| | 0 | 457 | | } |
| | | 458 | | |
| | 0 | 459 | | return [.. _recentSessionsCache.Take(count)]; |
| | 0 | 460 | | } |
| | | 461 | | |
| | | 462 | | public async Task NewSessionAsync() |
| | | 463 | | { |
| | | 464 | | // Save current session if it has messages |
| | 0 | 465 | | if (Session?.Messages.Count > 0) |
| | | 466 | | { |
| | 0 | 467 | | await SaveSessionAsync(); |
| | | 468 | | } |
| | | 469 | | |
| | 0 | 470 | | Session = CreateNewSession(); |
| | 0 | 471 | | Session.MaxMessages = Options.MaxMessages; |
| | | 472 | | |
| | 0 | 473 | | await CleanupOldSessionsAsync(); |
| | 0 | 474 | | } |
| | | 475 | | |
| | | 476 | | private async Task CleanupOldSessionsAsync() |
| | | 477 | | { |
| | 0 | 478 | | var recent = await GetRecentSessionsAsync(int.MaxValue); |
| | 0 | 479 | | if (recent.Count > _maxSessions) |
| | | 480 | | { |
| | 0 | 481 | | var sessionsToDelete = recent.Skip(_maxSessions).ToList(); |
| | 0 | 482 | | foreach (var sessionToDelete in sessionsToDelete) |
| | | 483 | | { |
| | 0 | 484 | | await DeleteSessionAsync(sessionToDelete.Id); |
| | | 485 | | } |
| | | 486 | | } |
| | 0 | 487 | | } |
| | | 488 | | |
| | | 489 | | public async Task LoadSessionAsync(string id) |
| | | 490 | | { |
| | 0 | 491 | | var session = await localStorage.GetItemAsync<ConversationSession>(id); |
| | 0 | 492 | | if (session != null) |
| | | 493 | | { |
| | 0 | 494 | | session.Id = id; |
| | 0 | 495 | | session.MaxMessages = Options.MaxMessages; |
| | 0 | 496 | | Session = session; |
| | | 497 | | } |
| | 0 | 498 | | } |
| | | 499 | | |
| | | 500 | | public async Task DeleteSessionAsync(string id) |
| | | 501 | | { |
| | 0 | 502 | | if (Session?.Id == id) |
| | | 503 | | { |
| | 0 | 504 | | Session = CreateNewSession(); |
| | 0 | 505 | | Session.MaxMessages = Options.MaxMessages; |
| | | 506 | | } |
| | 0 | 507 | | await localStorage.RemoveItemAsync(id); |
| | | 508 | | |
| | 0 | 509 | | if (_recentSessionsCache != null) |
| | | 510 | | { |
| | 0 | 511 | | _recentSessionsCache.RemoveAll(s => s.Id == id); |
| | | 512 | | } |
| | 0 | 513 | | } |
| | | 514 | | |
| | | 515 | | private async Task<List<string>> GetAllSessionIdsAsync() |
| | | 516 | | { |
| | 4 | 517 | | return [.. (await localStorage.GetAllKeysAsync()).Where(k => k.StartsWith("session_"))]; |
| | 3 | 518 | | } |
| | | 519 | | |
| | 7 | 520 | | private static string GenerateSessionId() => $"session_{DateTime.Now:s}"; |
| | | 521 | | |
| | 7 | 522 | | private static ConversationSession CreateNewSession() => new() { Id = GenerateSessionId() }; |
| | | 523 | | |
| | | 524 | | public async Task LoadLastSessionOrGenerateNewAsync() |
| | | 525 | | { |
| | 3 | 526 | | var sessionList = await GetAllSessionIdsAsync(); |
| | | 527 | | // сортируем сессии по времени создания и берем самую свежую |
| | 3 | 528 | | var lastSessionId = sessionList.OrderByDescending(id => |
| | 3 | 529 | | { |
| | 1 | 530 | | if (DateTime.TryParseExact(id.Substring(8), "s", CultureInfo.InvariantCulture, DateTimeStyles.None, out var |
| | 3 | 531 | | { |
| | 1 | 532 | | return result; |
| | 3 | 533 | | } |
| | 0 | 534 | | return DateTime.MinValue; |
| | 3 | 535 | | }).FirstOrDefault(); |
| | 3 | 536 | | if (lastSessionId != default) |
| | | 537 | | { |
| | 1 | 538 | | var fromStorage = await localStorage.GetItemAsync<ConversationSession>(lastSessionId); |
| | 1 | 539 | | fromStorage?.Id = lastSessionId; |
| | 1 | 540 | | Session = fromStorage ?? CreateNewSession(); |
| | | 541 | | } |
| | | 542 | | else |
| | | 543 | | { |
| | 2 | 544 | | Session = CreateNewSession(); |
| | | 545 | | } |
| | 3 | 546 | | Session.MaxMessages = Options.MaxMessages; |
| | 3 | 547 | | } |
| | | 548 | | } |