gglib_core/paths/
llama.rs

1//! Llama.cpp binary path resolution.
2//!
3//! Provides paths to the managed llama-server binary,
4//! as well as the llama.cpp repository and configuration files.
5
6use std::path::PathBuf;
7
8use super::error::PathError;
9use super::platform::resource_root;
10
11/// Get the gglib data directory containing llama binaries.
12///
13/// Returns the `.llama/` directory containing helper binaries.
14/// In dev, this is in the repo. In release, this is in the user data dir.
15pub fn gglib_data_dir() -> Result<PathBuf, PathError> {
16    Ok(resource_root()?.join(".llama"))
17}
18
19/// Get the path to the managed llama-server binary.
20pub fn llama_server_path() -> Result<PathBuf, PathError> {
21    let gglib_dir = gglib_data_dir()?;
22
23    #[cfg(target_os = "windows")]
24    let binary_name = "llama-server.exe";
25
26    #[cfg(not(target_os = "windows"))]
27    let binary_name = "llama-server";
28
29    Ok(gglib_dir.join("bin").join(binary_name))
30}
31
32/// Get the path to the llama.cpp repository directory.
33pub fn llama_cpp_dir() -> Result<PathBuf, PathError> {
34    let gglib_dir = gglib_data_dir()?;
35    Ok(gglib_dir.join("llama.cpp"))
36}
37
38/// Get the path to the llama build configuration file.
39pub fn llama_config_path() -> Result<PathBuf, PathError> {
40    let gglib_dir = gglib_data_dir()?;
41    Ok(gglib_dir.join("llama-config.json"))
42}
43
44#[cfg(test)]
45mod tests {
46    use super::*;
47
48    #[test]
49    fn test_llama_server_path() {
50        let result = llama_server_path();
51        assert!(result.is_ok());
52
53        let path = result.unwrap();
54        #[cfg(target_os = "windows")]
55        assert!(path.to_string_lossy().ends_with("llama-server.exe"));
56
57        #[cfg(not(target_os = "windows"))]
58        assert!(path.to_string_lossy().ends_with("llama-server"));
59    }
60}