1use serde::{Deserialize, Serialize};
7
8use crate::domain::InferenceConfig;
9
10pub const DEFAULT_PROXY_PORT: u16 = 8080;
12
13pub const DEFAULT_LLAMA_BASE_PORT: u16 = 9000;
15
16pub const DEFAULT_CONTEXT_SIZE: u64 = 4096;
18
19#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
23#[serde(default)]
24pub struct Settings {
25 pub default_download_path: Option<String>,
27
28 pub default_context_size: Option<u64>,
30
31 pub proxy_port: Option<u16>,
33
34 pub llama_base_port: Option<u16>,
37
38 pub max_download_queue_size: Option<u32>,
40
41 pub show_memory_fit_indicators: Option<bool>,
43
44 pub max_tool_iterations: Option<u32>,
46
47 pub max_stagnation_steps: Option<u32>,
49
50 pub default_model_id: Option<i64>,
52
53 #[serde(default)]
58 pub inference_defaults: Option<InferenceConfig>,
59
60 pub setup_completed: Option<bool>,
63
64 pub title_generation_prompt: Option<String>,
66}
67
68impl Settings {
69 #[must_use]
71 pub const fn with_defaults() -> Self {
72 Self {
73 default_download_path: None,
74 default_context_size: Some(DEFAULT_CONTEXT_SIZE),
75 proxy_port: Some(DEFAULT_PROXY_PORT),
76 llama_base_port: Some(DEFAULT_LLAMA_BASE_PORT),
77 max_download_queue_size: Some(10),
78 show_memory_fit_indicators: Some(true),
79 #[allow(clippy::cast_possible_truncation)] max_tool_iterations: Some(crate::domain::agent::DEFAULT_MAX_ITERATIONS as u32),
81 #[allow(clippy::cast_possible_truncation)]
82 max_stagnation_steps: Some(crate::domain::agent::DEFAULT_MAX_STAGNATION_STEPS as u32),
83 default_model_id: None,
84 inference_defaults: None,
85 setup_completed: None,
86 title_generation_prompt: None,
87 }
88 }
89
90 #[must_use]
92 pub const fn effective_proxy_port(&self) -> u16 {
93 match self.proxy_port {
94 Some(port) => port,
95 None => DEFAULT_PROXY_PORT,
96 }
97 }
98
99 #[must_use]
101 pub const fn effective_llama_base_port(&self) -> u16 {
102 match self.llama_base_port {
103 Some(port) => port,
104 None => DEFAULT_LLAMA_BASE_PORT,
105 }
106 }
107
108 pub fn merge(&mut self, other: &SettingsUpdate) {
110 if let Some(ref path) = other.default_download_path {
111 self.default_download_path.clone_from(path);
112 }
113 if let Some(ref ctx_size) = other.default_context_size {
114 self.default_context_size = *ctx_size;
115 }
116 if let Some(ref port) = other.proxy_port {
117 self.proxy_port = *port;
118 }
119 if let Some(ref port) = other.llama_base_port {
120 self.llama_base_port = *port;
121 }
122 if let Some(ref queue_size) = other.max_download_queue_size {
123 self.max_download_queue_size = *queue_size;
124 }
125 if let Some(ref show_fit) = other.show_memory_fit_indicators {
126 self.show_memory_fit_indicators = *show_fit;
127 }
128 if let Some(ref iters) = other.max_tool_iterations {
129 self.max_tool_iterations = *iters;
130 }
131 if let Some(ref steps) = other.max_stagnation_steps {
132 self.max_stagnation_steps = *steps;
133 }
134 if let Some(ref model_id) = other.default_model_id {
135 self.default_model_id = *model_id;
136 }
137 if let Some(ref inference_defaults) = other.inference_defaults {
138 self.inference_defaults.clone_from(inference_defaults);
139 }
140 if let Some(ref v) = other.setup_completed {
141 self.setup_completed = *v;
142 }
143 if let Some(ref v) = other.title_generation_prompt {
144 self.title_generation_prompt.clone_from(v);
145 }
146 }
147}
148
149#[derive(Debug, Clone, Default, Serialize, Deserialize)]
156pub struct SettingsUpdate {
157 pub default_download_path: Option<Option<String>>,
158 pub default_context_size: Option<Option<u64>>,
159 pub proxy_port: Option<Option<u16>>,
160 pub llama_base_port: Option<Option<u16>>,
161 pub max_download_queue_size: Option<Option<u32>>,
162 pub show_memory_fit_indicators: Option<Option<bool>>,
163 pub max_tool_iterations: Option<Option<u32>>,
164 pub max_stagnation_steps: Option<Option<u32>>,
165 pub default_model_id: Option<Option<i64>>,
166 pub inference_defaults: Option<Option<InferenceConfig>>,
167 pub setup_completed: Option<Option<bool>>,
168 pub title_generation_prompt: Option<Option<String>>,
169}
170
171#[derive(Debug, Clone, thiserror::Error)]
173pub enum SettingsError {
174 #[error("Context size must be between 512 and 1,000,000, got {0}")]
175 InvalidContextSize(u64),
176
177 #[error("Port should be >= 1024 (privileged ports require root), got {0}")]
178 InvalidPort(u16),
179
180 #[error("Max download queue size must be between 1 and 50, got {0}")]
181 InvalidQueueSize(u32),
182
183 #[error("Download path cannot be empty")]
184 EmptyDownloadPath,
185
186 #[error("Invalid inference parameter: {0}")]
187 InvalidInferenceConfig(String),
188}
189
190pub fn validate_settings(settings: &Settings) -> Result<(), SettingsError> {
192 if let Some(ctx_size) = settings.default_context_size
194 && !(512..=1_000_000).contains(&ctx_size)
195 {
196 return Err(SettingsError::InvalidContextSize(ctx_size));
197 }
198
199 if let Some(port) = settings.proxy_port
201 && port < 1024
202 {
203 return Err(SettingsError::InvalidPort(port));
204 }
205
206 if let Some(port) = settings.llama_base_port
208 && port < 1024
209 {
210 return Err(SettingsError::InvalidPort(port));
211 }
212
213 if let Some(queue_size) = settings.max_download_queue_size
215 && !(1..=50).contains(&queue_size)
216 {
217 return Err(SettingsError::InvalidQueueSize(queue_size));
218 }
219
220 if settings
222 .default_download_path
223 .as_ref()
224 .is_some_and(|p| p.trim().is_empty())
225 {
226 return Err(SettingsError::EmptyDownloadPath);
227 }
228
229 if let Some(ref inference_config) = settings.inference_defaults {
231 validate_inference_config(inference_config)
232 .map_err(SettingsError::InvalidInferenceConfig)?;
233 }
234
235 Ok(())
236}
237
238pub fn validate_inference_config(config: &InferenceConfig) -> Result<(), String> {
242 if let Some(temp) = config.temperature
244 && !(0.0..=2.0).contains(&temp)
245 {
246 return Err(format!(
247 "Temperature must be between 0.0 and 2.0, got {temp}"
248 ));
249 }
250
251 if let Some(top_p) = config.top_p
253 && !(0.0..=1.0).contains(&top_p)
254 {
255 return Err(format!("Top P must be between 0.0 and 1.0, got {top_p}"));
256 }
257
258 if let Some(top_k) = config.top_k
260 && top_k <= 0
261 {
262 return Err(format!("Top K must be positive, got {top_k}"));
263 }
264
265 if let Some(max_tokens) = config.max_tokens
267 && max_tokens == 0
268 {
269 return Err("Max tokens must be positive".to_string());
270 }
271
272 if let Some(repeat_penalty) = config.repeat_penalty
274 && repeat_penalty <= 0.0
275 {
276 return Err(format!(
277 "Repeat penalty must be positive, got {repeat_penalty}"
278 ));
279 }
280
281 Ok(())
282}
283
284#[cfg(test)]
285mod tests {
286 use super::*;
287
288 #[test]
289 fn test_default_settings() {
290 let settings = Settings::with_defaults();
291 assert_eq!(settings.default_context_size, Some(4096));
292 assert_eq!(settings.proxy_port, Some(DEFAULT_PROXY_PORT));
293 assert_eq!(settings.llama_base_port, Some(DEFAULT_LLAMA_BASE_PORT));
294 assert_eq!(settings.default_download_path, None);
295 assert_eq!(settings.max_download_queue_size, Some(10));
296 assert_eq!(settings.show_memory_fit_indicators, Some(true));
297 }
298
299 #[test]
300 fn test_validate_settings_valid() {
301 let settings = Settings::with_defaults();
302 assert!(validate_settings(&settings).is_ok());
303 }
304
305 #[test]
306 fn test_validate_context_size_too_small() {
307 let settings = Settings {
308 default_context_size: Some(100),
309 ..Default::default()
310 };
311 assert!(matches!(
312 validate_settings(&settings),
313 Err(SettingsError::InvalidContextSize(100))
314 ));
315 }
316
317 #[test]
318 fn test_validate_context_size_too_large() {
319 let settings = Settings {
320 default_context_size: Some(2_000_000),
321 ..Default::default()
322 };
323 assert!(matches!(
324 validate_settings(&settings),
325 Err(SettingsError::InvalidContextSize(2_000_000))
326 ));
327 }
328
329 #[test]
330 fn test_validate_port_too_low() {
331 let settings = Settings {
332 proxy_port: Some(80),
333 ..Default::default()
334 };
335 assert!(matches!(
336 validate_settings(&settings),
337 Err(SettingsError::InvalidPort(80))
338 ));
339 }
340
341 #[test]
342 fn test_validate_empty_path() {
343 let settings = Settings {
344 default_download_path: Some(String::new()),
345 ..Default::default()
346 };
347 assert!(matches!(
348 validate_settings(&settings),
349 Err(SettingsError::EmptyDownloadPath)
350 ));
351 }
352
353 #[test]
354 fn test_validate_inference_config_valid() {
355 let config = InferenceConfig {
356 temperature: Some(0.7),
357 top_p: Some(0.9),
358 top_k: Some(40),
359 max_tokens: Some(2048),
360 repeat_penalty: Some(1.1),
361 };
362 assert!(validate_inference_config(&config).is_ok());
363 }
364
365 #[test]
366 fn test_validate_inference_config_temperature_out_of_range() {
367 let config = InferenceConfig {
368 temperature: Some(2.5),
369 ..Default::default()
370 };
371 assert!(validate_inference_config(&config).is_err());
372
373 let config = InferenceConfig {
374 temperature: Some(-0.1),
375 ..Default::default()
376 };
377 assert!(validate_inference_config(&config).is_err());
378 }
379
380 #[test]
381 fn test_validate_inference_config_top_p_out_of_range() {
382 let config = InferenceConfig {
383 top_p: Some(1.5),
384 ..Default::default()
385 };
386 assert!(validate_inference_config(&config).is_err());
387
388 let config = InferenceConfig {
389 top_p: Some(-0.1),
390 ..Default::default()
391 };
392 assert!(validate_inference_config(&config).is_err());
393 }
394
395 #[test]
396 fn test_validate_inference_config_negative_values() {
397 let config = InferenceConfig {
398 top_k: Some(-1),
399 ..Default::default()
400 };
401 assert!(validate_inference_config(&config).is_err());
402
403 let config = InferenceConfig {
404 repeat_penalty: Some(0.0),
405 ..Default::default()
406 };
407 assert!(validate_inference_config(&config).is_err());
408 }
409
410 #[test]
411 fn test_settings_with_valid_inference_defaults() {
412 let settings = Settings {
413 inference_defaults: Some(InferenceConfig {
414 temperature: Some(0.8),
415 top_p: Some(0.95),
416 ..Default::default()
417 }),
418 ..Settings::with_defaults()
419 };
420 assert!(validate_settings(&settings).is_ok());
421 }
422
423 #[test]
424 fn test_settings_with_invalid_inference_defaults() {
425 let settings = Settings {
426 inference_defaults: Some(InferenceConfig {
427 temperature: Some(3.0), ..Default::default()
429 }),
430 ..Settings::with_defaults()
431 };
432 assert!(validate_settings(&settings).is_err());
433 }
434
435 #[test]
436 fn test_validate_queue_size_too_small() {
437 let settings = Settings {
438 max_download_queue_size: Some(0),
439 ..Default::default()
440 };
441 assert!(matches!(
442 validate_settings(&settings),
443 Err(SettingsError::InvalidQueueSize(0))
444 ));
445 }
446
447 #[test]
448 fn test_validate_queue_size_too_large() {
449 let settings = Settings {
450 max_download_queue_size: Some(100),
451 ..Default::default()
452 };
453 assert!(matches!(
454 validate_settings(&settings),
455 Err(SettingsError::InvalidQueueSize(100))
456 ));
457 }
458
459 #[test]
460 fn test_merge_settings() {
461 let mut settings = Settings::with_defaults();
462 let update = SettingsUpdate {
463 default_context_size: Some(Some(8192)),
464 proxy_port: Some(None), ..Default::default()
466 };
467 settings.merge(&update);
468
469 assert_eq!(settings.default_context_size, Some(8192));
470 assert_eq!(settings.proxy_port, None);
471 assert_eq!(settings.llama_base_port, Some(DEFAULT_LLAMA_BASE_PORT)); }
473
474 #[test]
475 fn test_effective_ports() {
476 let settings = Settings::with_defaults();
477 assert_eq!(settings.effective_proxy_port(), DEFAULT_PROXY_PORT);
478 assert_eq!(
479 settings.effective_llama_base_port(),
480 DEFAULT_LLAMA_BASE_PORT
481 );
482
483 let settings_none = Settings::default();
484 assert_eq!(settings_none.effective_proxy_port(), DEFAULT_PROXY_PORT);
485 assert_eq!(
486 settings_none.effective_llama_base_port(),
487 DEFAULT_LLAMA_BASE_PORT
488 );
489 }
490}