gglib_core/paths/
llama.rs1use std::path::PathBuf;
7
8use super::error::PathError;
9use super::platform::resource_root;
10
11pub fn gglib_data_dir() -> Result<PathBuf, PathError> {
16 Ok(resource_root()?.join(".llama"))
17}
18
19pub fn llama_server_path() -> Result<PathBuf, PathError> {
21 let gglib_dir = gglib_data_dir()?;
22
23 #[cfg(target_os = "windows")]
24 let binary_name = "llama-server.exe";
25
26 #[cfg(not(target_os = "windows"))]
27 let binary_name = "llama-server";
28
29 Ok(gglib_dir.join("bin").join(binary_name))
30}
31
32pub fn llama_cpp_dir() -> Result<PathBuf, PathError> {
34 let gglib_dir = gglib_data_dir()?;
35 Ok(gglib_dir.join("llama.cpp"))
36}
37
38pub fn llama_config_path() -> Result<PathBuf, PathError> {
40 let gglib_dir = gglib_data_dir()?;
41 Ok(gglib_dir.join("llama-config.json"))
42}
43
44#[cfg(test)]
45mod tests {
46 use super::*;
47
48 #[test]
49 fn test_llama_server_path() {
50 let result = llama_server_path();
51 assert!(result.is_ok());
52
53 let path = result.unwrap();
54 #[cfg(target_os = "windows")]
55 assert!(path.to_string_lossy().ends_with("llama-server.exe"));
56
57 #[cfg(not(target_os = "windows"))]
58 assert!(path.to_string_lossy().ends_with("llama-server"));
59 }
60}