gglib_core/paths/
llama.rs1use std::path::PathBuf;
7
8use super::error::PathError;
9use super::platform::resource_root;
10
11pub fn gglib_data_dir() -> Result<PathBuf, PathError> {
16 Ok(resource_root()?.join(".llama"))
17}
18
19pub fn llama_server_path() -> Result<PathBuf, PathError> {
21 let gglib_dir = gglib_data_dir()?;
22
23 #[cfg(target_os = "windows")]
24 let binary_name = "llama-server.exe";
25
26 #[cfg(not(target_os = "windows"))]
27 let binary_name = "llama-server";
28
29 Ok(gglib_dir.join("bin").join(binary_name))
30}
31
32pub fn llama_cli_path() -> Result<PathBuf, PathError> {
34 let gglib_dir = gglib_data_dir()?;
35
36 #[cfg(target_os = "windows")]
37 let binary_name = "llama-cli.exe";
38
39 #[cfg(not(target_os = "windows"))]
40 let binary_name = "llama-cli";
41
42 Ok(gglib_dir.join("bin").join(binary_name))
43}
44
45pub fn llama_cpp_dir() -> Result<PathBuf, PathError> {
47 let gglib_dir = gglib_data_dir()?;
48 Ok(gglib_dir.join("llama.cpp"))
49}
50
51pub fn llama_config_path() -> Result<PathBuf, PathError> {
53 let gglib_dir = gglib_data_dir()?;
54 Ok(gglib_dir.join("llama-config.json"))
55}
56
57#[cfg(test)]
58mod tests {
59 use super::*;
60
61 #[test]
62 fn test_llama_server_path() {
63 let result = llama_server_path();
64 assert!(result.is_ok());
65
66 let path = result.unwrap();
67 #[cfg(target_os = "windows")]
68 assert!(path.to_string_lossy().ends_with("llama-server.exe"));
69
70 #[cfg(not(target_os = "windows"))]
71 assert!(path.to_string_lossy().ends_with("llama-server"));
72 }
73
74 #[test]
75 fn test_llama_cli_path() {
76 let result = llama_cli_path();
77 assert!(result.is_ok());
78
79 let path = result.unwrap();
80 #[cfg(target_os = "windows")]
81 assert!(path.to_string_lossy().ends_with("llama-cli.exe"));
82
83 #[cfg(not(target_os = "windows"))]
84 assert!(path.to_string_lossy().ends_with("llama-cli"));
85 }
86}