11using Microsoft . Extensions . Logging ;
22using TenSecondTom . Features . Today . Commands ;
33using TenSecondTom . Infrastructure . Auth ;
4+ using TenSecondTom . Infrastructure . Configuration ;
45using TenSecondTom . Infrastructure . Llm ;
56using TenSecondTom . Infrastructure . Prompts ;
67using TenSecondTom . Infrastructure . Storage ;
@@ -16,12 +17,14 @@ namespace TenSecondTom.Features.Today.Handlers;
1617/// </summary>
1718[ System . Diagnostics . CodeAnalysis . SuppressMessage ( "Design" , "CA1515:Consider making public types internal" , Justification = "Public API by design" ) ]
1819[ System . Diagnostics . CodeAnalysis . SuppressMessage ( "Usage" , "CA2254:Template should be a static expression" , Justification = "Structured logging pattern" ) ]
20+ [ System . Diagnostics . CodeAnalysis . SuppressMessage ( "Performance" , "CA1848:Use the LoggerMessage delegates" , Justification = "Simple logging calls, delegate overhead not justified" ) ]
1921public sealed class CreateDailyEntryHandler : IRequestHandler < CreateDailyEntryCommand , Result < DailyEntry > >
2022{
2123 private readonly IMemoryStorageProvider _storage ;
2224 private readonly ILlmProviderFactory _llmFactory ;
2325 private readonly IPromptTemplateLoader _promptLoader ;
2426 private readonly IAuthenticationService _authService ;
27+ private readonly IConfigurationStorageService _configService ;
2528 private readonly ILogger < CreateDailyEntryHandler > _logger ;
2629
2730 /// <summary>
@@ -31,18 +34,21 @@ public sealed class CreateDailyEntryHandler : IRequestHandler<CreateDailyEntryCo
3134 /// <param name="llmFactory">The LLM provider factory.</param>
3235 /// <param name="promptLoader">The prompt template loader.</param>
3336 /// <param name="authService">The authentication service.</param>
37+ /// <param name="configService">The configuration storage service.</param>
3438 /// <param name="logger">The logger instance.</param>
3539 public CreateDailyEntryHandler (
3640 IMemoryStorageProvider storage ,
3741 ILlmProviderFactory llmFactory ,
3842 IPromptTemplateLoader promptLoader ,
3943 IAuthenticationService authService ,
44+ IConfigurationStorageService configService ,
4045 ILogger < CreateDailyEntryHandler > logger )
4146 {
4247 _storage = storage ;
4348 _llmFactory = llmFactory ;
4449 _promptLoader = promptLoader ;
4550 _authService = authService ;
51+ _configService = configService ;
4652 _logger = logger ;
4753 }
4854
@@ -94,16 +100,44 @@ public async Task<Result<DailyEntry>> Handle(
94100
95101 string prompt = RenderPrompt ( templateResult . Value , userInput ) ;
96102
97- // 6. Call LLM provider
98- string provider = request . LlmProviderOverride ?? LlmProviders . OpenAI ; // Default to OpenAI if not specified
103+ // 6. Determine LLM provider (use override, or load from config, or default to OpenAI)
104+ string provider ;
105+ if ( ! string . IsNullOrWhiteSpace ( request . LlmProviderOverride ) )
106+ {
107+ provider = request . LlmProviderOverride ;
108+ }
109+ else
110+ {
111+ // Load from configuration
112+ Result < Features . Setup . Models . ConfigurationSettings > configResult = await _configService . LoadAsync ( cancellationToken ) . ConfigureAwait ( false ) ;
113+ if ( configResult . IsSuccess && configResult . Value . Llm . Provider != Features . Setup . Models . LlmProvider . OpenAI )
114+ {
115+ // Convert enum to string
116+ provider = configResult . Value . Llm . Provider . ToString ( ) ;
117+ }
118+ else
119+ {
120+ // Default to OpenAI
121+ provider = LlmProviders . OpenAI ;
122+ if ( ! configResult . IsSuccess )
123+ {
124+ _logger . LogDebug ( "Could not load configuration, defaulting to OpenAI: {Error}" , configResult . Error ) ;
125+ }
126+ }
127+ }
128+
99129 ILlmProvider llmProvider ;
100130 try
101131 {
102132 llmProvider = _llmFactory . CreateProvider ( provider ) ;
103133 }
104134 catch ( ArgumentException ex )
105135 {
106- return Result < DailyEntry > . Failure ( $ "Invalid LLM provider. Use 'OpenAI' or 'Anthropic'. Error: { ex . Message } ") ;
136+ return Result < DailyEntry > . Failure ( $ "Invalid LLM provider '{ provider } '. Use 'OpenAI' or 'Anthropic'. Error: { ex . Message } ") ;
137+ }
138+ catch ( InvalidOperationException ex )
139+ {
140+ return Result < DailyEntry > . Failure ( $ "Failed to create LLM provider '{ provider } ': { ex . Message } ") ;
107141 }
108142
109143 Result < string > llmResult = await llmProvider . GenerateCompletionAsync ( prompt , cancellationToken ) . ConfigureAwait ( false ) ;
0 commit comments