diff --git a/Cargo.toml b/Cargo.toml index e27d3b3..4f76383 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,3 +28,5 @@ bindgen = "0.57" xmas-elf = "0.8" bitflags = "1.3" shlex = "1.0" +remove_dir_all = "0.7" +cmake = "0.1" diff --git a/src/bindgen.rs b/src/bindgen.rs index 22ef1cd..0771e2a 100644 --- a/src/bindgen.rs +++ b/src/bindgen.rs @@ -1,55 +1,102 @@ +use std::ffi::OsStr; use std::path::{Path, PathBuf}; -use std::process::Command; use std::{env, fs}; use anyhow::*; -use crate::pio::project::SconsVariables; use crate::utils::OsStrExt; +use crate::{cargo, cli, cmake, cmd, cmd_output, pio}; pub const VAR_BINDINGS_FILE: &str = "EMBUILD_GENERATED_BINDINGS_FILE"; -#[cfg(windows)] -const EXE_SUFFIX: &str = ".exe"; - -#[cfg(not(windows))] -const EXE_SUFFIX: &str = ""; - -#[cfg(windows)] -const FS_CASE_INSENSITIVE: bool = true; - -#[cfg(not(windows))] -const FS_CASE_INSENSITIVE: bool = false; - #[derive(Clone, Default, Debug)] pub struct Factory { pub clang_args: Vec, pub linker: Option, pub mcu: Option, + pub force_cpp: bool, + pub sysroot: Option, } impl Factory { - pub fn from_scons_vars(scons_vars: &SconsVariables) -> Result { + pub fn from_scons_vars(scons_vars: &pio::project::SconsVariables) -> Result { + let clang_args = cli::NativeCommandArgs::new(&scons_vars.incflags) + .chain(cli::NativeCommandArgs::new( + scons_vars + .clangargs + .as_deref() + .unwrap_or_default(), + )) + .collect(); + Ok(Self { - clang_args: Self::get_pio_clang_args( - &scons_vars.incflags, - scons_vars.clangargs.clone(), - ), + clang_args, linker: Some(scons_vars.full_path(scons_vars.link.clone())?), mcu: Some(scons_vars.mcu.clone()), + force_cpp: false, + sysroot: None, }) } - pub fn builder(&self) -> Result { + pub fn from_cmake(compile_group: &cmake::codemodel::target::CompileGroup) -> Result { + use crate::cmake::codemodel::Language; + assert!( + compile_group.language == Language::C || compile_group.language == Language::Cpp, + "Generating bindings for languages other than C/C++ is not supported" + ); + + let clang_args = compile_group + .defines + .iter() + .map(|d| format!("-D{}", d.define)) + .chain( + compile_group + .includes + .iter() + .map(|i| format!("-I{}", &i.path)), + ) + .collect(); + + Ok(Self { + clang_args, + linker: None, + force_cpp: compile_group.language == Language::Cpp, + mcu: None, + sysroot: compile_group.sysroot.as_ref().map(|s| s.path.clone()), + }) + } + + /// Set the linker used to determine the sysroot to be used for generating bindings. + pub fn with_linker(mut self, linker: impl Into) -> Self { + self.linker = Some(linker.into()); + self + } + + pub fn builder(self) -> Result { self.create_builder(false) } - pub fn cpp_builder(&self) -> Result { + pub fn cpp_builder(self) -> Result { self.create_builder(true) } - fn create_builder(&self, cpp: bool) -> Result { - let sysroot = self.get_sysroot()?; + fn create_builder(self, cpp: bool) -> Result { + let cpp = self.force_cpp || cpp; + let sysroot = self + .sysroot + .clone() + .map_or_else(|| try_get_sysroot(&self.linker), Ok)?; + + let sysroot_args = [ + format!("--sysroot={}", sysroot.try_to_str()?), + format!("-I{}", sysroot.join("include").try_to_str()?), + ]; + + let cpp_args = if cpp { + get_cpp_includes(&sysroot)? + } else { + vec![] + }; let builder = bindgen::Builder::default() .use_core() @@ -58,130 +105,25 @@ impl Factory { .derive_default(true) //.ctypes_prefix(c_types) .clang_arg("-D__bindgen") - .clang_arg(format!("--sysroot={}", sysroot.display())) - .clang_arg(format!("-I{}", sysroot.join("include").try_to_str()?)) + .clang_args(sysroot_args) .clang_args(&["-x", if cpp { "c++" } else { "c" }]) - .clang_args(if cpp { - Self::get_cpp_includes(sysroot)? - } else { - Vec::new() - }) + .clang_args(cpp_args) .clang_args(&self.clang_args); - eprintln!( + log::debug!( "Bindgen builder factory flags: {:?}", builder.command_line_flags() ); Ok(builder) } - - fn get_sysroot(&self) -> Result { - let linker = if let Some(linker) = self.linker.as_ref() { - linker - .clone() - .into_os_string() - .into_string() - .map_err(|_| anyhow!("Cannot convert the linker variable to String"))? - } else if let Ok(linker) = env::var("RUSTC_LINKER") { - linker - } else { - bail!("No explicit linker, and env var RUSTC_LINKER not defined either"); - }; - - let gcc = format!("gcc{}", EXE_SUFFIX); - let gcc_suffix = format!("-{}", gcc); - - let linker_canonicalized = if FS_CASE_INSENSITIVE { - linker.to_lowercase() - } else { - linker.clone() - }; - - let linker = if linker_canonicalized == gcc || linker_canonicalized.ends_with(&gcc_suffix) { - // For whatever reason, --print-sysroot does not work with GCC - // Change it to LD - format!("{}ld{}", &linker[0..linker.len() - gcc.len()], EXE_SUFFIX) - } else { - linker - }; - - let output = Command::new(linker).arg("--print-sysroot").output()?; - - let path_str = String::from_utf8(output.stdout)?; - - Ok(PathBuf::from(path_str.trim())) - } - - fn get_cpp_includes(sysroot: impl AsRef) -> Result> { - let sysroot = sysroot.as_ref(); - let cpp_includes_root = sysroot.join("include").join("c++"); - - let cpp_version = fs::read_dir(&cpp_includes_root)? - .map(|dir_entry_r| dir_entry_r.map(|dir_entry| dir_entry.path())) - .fold(None, |ao: Option, sr: Result| { - if let Some(a) = ao.as_ref() { - sr.ok() - .map_or(ao.clone(), |s| if a >= &s { ao.clone() } else { Some(s) }) - } else { - sr.ok() - } - }); - - if let Some(cpp_version) = cpp_version { - let mut cpp_include_paths = vec![ - format!("-I{}", cpp_version.try_to_str()?), - format!("-I{}", cpp_version.join("backward").try_to_str()?), - ]; - - if let Some(sysroot_last_segment) = fs::canonicalize(sysroot)?.file_name() { - cpp_include_paths.push(format!( - "-I{}", - cpp_version.join(sysroot_last_segment).try_to_str()? - )); - } - - Ok(cpp_include_paths) - } else { - Ok(Vec::new()) - } - } - - fn get_pio_clang_args( - incflags: impl AsRef, - extra_args: Option>, - ) -> Vec { - let mut result = incflags - .as_ref() - .split(' ') - .map(str::to_string) - .collect::>(); - - if let Some(extra_args) = extra_args { - result.append( - &mut extra_args - .as_ref() - .split(' ') - .map(str::to_string) - .collect::>(), - ); - } - - result - } } pub fn run(builder: bindgen::Builder) -> Result<()> { let output_file = PathBuf::from(env::var("OUT_DIR")?).join("bindings.rs"); - run_for_file(builder, &output_file)?; - println!( - "cargo:rustc-env={}={}", - VAR_BINDINGS_FILE, - output_file.display() - ); - + cargo::set_rustc_env(VAR_BINDINGS_FILE, output_file.try_to_str()?); Ok(()) } @@ -199,12 +141,77 @@ pub fn run_for_file(builder: bindgen::Builder, output_file: impl AsRef) -> // Run rustfmt on the generated bindings separately, because custom toolchains often do not have rustfmt // Hence why we need to use the rustfmt from the stable buildchain (where the assumption is, it is already installed) - Command::new("rustup") - .arg("run") - .arg("stable") - .arg("rustfmt") - .arg(output_file) - .status()?; - + cmd!("rustup", "run", "stable", "rustfmt", output_file)?; Ok(()) } + +fn try_get_sysroot(linker: &Option>) -> Result { + let linker = if let Some(ref linker) = linker { + linker.as_ref().to_owned() + } else if let Some(linker) = env::var_os("RUSTC_LINKER") { + PathBuf::from(linker) + } else { + bail!("Could not determine linker: No explicit linker and `RUSTC_LINKER` not set"); + }; + + let gcc_file_stem = linker + .file_stem() + .and_then(OsStr::to_str) + .filter(|&s| s == "gcc" || s.ends_with("-gcc")); + + // For whatever reason, --print-sysroot does not work with GCC + // Change it to LD + let linker = if let Some(stem) = gcc_file_stem { + let mut ld_linker = + linker.with_file_name(format!("{}{}", stem.strip_suffix("gcc").unwrap(), "ld")); + if let Some(ext) = linker.extension() { + ld_linker.set_extension(ext); + } + ld_linker + } else { + linker + }; + + cmd_output!(linker, "--print-sysroot") + .with_context(|| { + anyhow!( + "Could not determine sysroot from linker '{}'", + linker.display() + ) + }) + .map(PathBuf::from) +} + +fn get_cpp_includes(sysroot: impl AsRef) -> Result> { + let sysroot = sysroot.as_ref(); + let cpp_includes_root = sysroot.join("include").join("c++"); + + let cpp_version = fs::read_dir(&cpp_includes_root)? + .map(|dir_entry_r| dir_entry_r.map(|dir_entry| dir_entry.path())) + .fold(None, |ao: Option, sr: Result| { + if let Some(a) = ao.as_ref() { + sr.ok() + .map_or(ao.clone(), |s| if a >= &s { ao.clone() } else { Some(s) }) + } else { + sr.ok() + } + }); + + if let Some(cpp_version) = cpp_version { + let mut cpp_include_paths = vec![ + format!("-I{}", cpp_version.try_to_str()?), + format!("-I{}", cpp_version.join("backward").try_to_str()?), + ]; + + if let Some(sysroot_last_segment) = fs::canonicalize(sysroot)?.file_name() { + cpp_include_paths.push(format!( + "-I{}", + cpp_version.join(sysroot_last_segment).try_to_str()? + )); + } + + Ok(cpp_include_paths) + } else { + Ok(Vec::new()) + } +} diff --git a/src/build.rs b/src/build.rs index 5f7bcdc..1036e14 100644 --- a/src/build.rs +++ b/src/build.rs @@ -137,17 +137,22 @@ pub struct LinkArgsBuilder { } impl LinkArgsBuilder { - pub fn force_ldproxy(&mut self, value: bool) -> &mut Self { + pub fn force_ldproxy(mut self, value: bool) -> Self { self.force_ldproxy = value; self } + + pub fn linker(mut self, path: impl Into) -> Self { + self.linker = Some(path.into()); + self + } - pub fn working_directory(&mut self, dir: impl AsRef) -> &mut Self { + pub fn working_directory(mut self, dir: impl AsRef) -> Self { self.working_directory = Some(dir.as_ref().to_owned()); self } - pub fn dedup_libs(&mut self, dedup: bool) -> &mut Self { + pub fn dedup_libs(mut self, dedup: bool) -> Self { self.dedup_libs = dedup; self } @@ -166,11 +171,11 @@ impl LinkArgsBuilder { .unwrap_or(false); if self.force_ldproxy && !detected_ldproxy { - print_warning(concat!( - "The linker arguments force the usage of `ldproxy` but the linker used ", - "by cargo is different. Please set the linker to `ldproxy` in your cargo config ", - "or set `force_ldproxy` to `false`." - )); + print_warning( + "The linker arguments force the usage of `ldproxy` but the linker used \ + by cargo is different. Please set the linker to `ldproxy` in your cargo config \ + or set `force_ldproxy` to `false`." + ); } if self.force_ldproxy || detected_ldproxy { diff --git a/src/cargo.rs b/src/cargo.rs index b1018e8..4ad3ebe 100644 --- a/src/cargo.rs +++ b/src/cargo.rs @@ -7,8 +7,8 @@ use anyhow::*; use cargo_toml::{Manifest, Product}; use log::*; -use crate::cmd; use crate::utils::OsStrExt; +use crate::{cargo, cmd}; #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub enum CargoCmd { @@ -43,9 +43,9 @@ impl Crate { debug!("Generating new Cargo crate in path {}", self.0.display()); cmd!( - "cargo", if init { "init" } else {"new"}; - args=(options), - arg=(&self.0) + "cargo", if init { "init" } else {"new"}, + @options, + &self.0 )?; Ok(()) } @@ -248,7 +248,7 @@ build-std-features = ["panic_immediate_abort"] Some(b) => b == binary, _ => false, }) - .ok_or(anyhow!("Cannot locate binary with name {}", binary))? + .ok_or_else(|| anyhow!("Cannot locate binary with name {}", binary))? } else { if bin_products.len() > 1 { bail!( @@ -331,3 +331,32 @@ pub fn set_rustc_env(key: impl Display, value: impl Display) { pub fn print_warning(warning: impl Display) { println!("cargo:warning={}", warning); } + +pub trait IntoWarning { + type T; + + /// Print a cargo warning for this error. + fn into_warning(self) -> Self::T; +} + +impl IntoWarning for Error { + type T = (); + fn into_warning(self) { + for line in format!("{:#}", self.context("error turned into warning")).lines() { + cargo::print_warning(line); + } + } +} + +impl IntoWarning for Result { + type T = Option; + fn into_warning(self) -> Option { + match self { + Ok(v) => Some(v), + Err(e) => { + e.into_warning(); + None + } + } + } +} diff --git a/src/cli/separate_args.rs b/src/cli/separate_args.rs index 58847a9..8379fd7 100644 --- a/src/cli/separate_args.rs +++ b/src/cli/separate_args.rs @@ -156,6 +156,11 @@ impl<'a> Iterator for WindowsCommandArgs<'a> { pub use shlex::Shlex as UnixCommandArgs; +#[cfg(windows)] +pub type NativeCommandArgs<'a> = WindowsCommandArgs<'a>; +#[cfg(unix)] +pub type NativeCommandArgs<'a> = UnixCommandArgs<'a>; + #[cfg(test)] mod test { use super::*; diff --git a/src/cmake.rs b/src/cmake.rs new file mode 100644 index 0000000..b44bfc0 --- /dev/null +++ b/src/cmake.rs @@ -0,0 +1,123 @@ +use std::collections::HashMap; +use std::convert::TryFrom; +use std::env; +use std::ffi::OsString; +use std::fs::File; +use std::io::Write; +use std::path::Path; + +use anyhow::{Error, Result}; + +use crate::build::{CInclArgs, LinkArgsBuilder}; +use crate::cli::NativeCommandArgs; +use crate::cmd_output; + +mod file_api; + +pub use ::cmake::*; +pub use file_api::*; + +/// Get all variables defined in the `cmake_script_file`. +/// +/// #### Note +/// This will run the script using `cmake -P`, beware of any side effects. Variables that +/// cmake itself sets will also be returned. +pub fn get_script_variables( + cmake_script_file: impl AsRef, +) -> Result> { + let mut temp_file = tempfile::NamedTempFile::new()?; + std::io::copy(&mut File::open(cmake_script_file)?, &mut temp_file)?; + + temp_file.write_all( + r#" +message(STATUS "VARIABLE_DUMP_START") +get_cmake_property(_variableNames VARIABLES) +list (SORT _variableNames) +foreach (_variableName ${_variableNames}) + message(STATUS "${_variableName}=${${_variableName}}") +endforeach() + "# + .as_bytes(), + )?; + + temp_file.as_file().sync_all()?; + let temp_file = temp_file.into_temp_path(); + + let output = cmd_output!(cmake(), "-P", &temp_file)?; + drop(temp_file); + + Ok(output + .lines() + .filter_map(|l| l.strip_prefix("-- ")) + .skip_while(|&l| l != "VARIABLE_DUMP_START") + .skip(1) + .map(|l| { + if let Some((name, value)) = l.split_once('=') { + (name.to_owned(), value.to_owned()) + } else { + (l.to_owned(), String::new()) + } + }) + .collect()) +} + +/// The cmake executable used. +pub fn cmake() -> OsString { + env::var_os("CMAKE").unwrap_or_else(|| "cmake".into()) +} + +impl TryFrom<&codemodel::target::Link> for LinkArgsBuilder { + type Error = Error; + + fn try_from(link: &codemodel::target::Link) -> Result { + let linkflags = link + .command_fragments + .iter() + .map(|f| NativeCommandArgs::new(&f.fragment)) + .flatten() + .collect(); + Ok(LinkArgsBuilder { + linkflags, + ..Default::default() + }) + } +} + +impl TryFrom<&codemodel::target::CompileGroup> for CInclArgs { + type Error = Error; + + fn try_from(value: &codemodel::target::CompileGroup) -> Result { + let flags = value + .defines + .iter() + .map(|d| format!("-D{}", d.define)) + .chain(value.includes.iter().map(|i| format!("\"-I{}\"", i.path))) + .collect::>() + .join(" "); + Ok(Self(flags)) + } +} + +#[cfg(test)] +mod tests { + use std::io::Write; + + use super::*; + + #[test] + fn test_get_script_variables() { + let mut script = tempfile::NamedTempFile::new().unwrap(); + write!(&mut script, "set(VAR \"some string\")").unwrap(); + + let script_path = script.into_temp_path(); + let vars = get_script_variables(&script_path).unwrap(); + + println!("{:?}", vars); + + let var = vars + .iter() + .map(|(k, v)| (k.as_str(), v.as_str())) + .find(|&(k, _)| k == "VAR"); + assert_eq!(var, Some(("VAR", "some string"))); + } +} diff --git a/src/cmake/file_api.rs b/src/cmake/file_api.rs new file mode 100644 index 0000000..0d85b13 --- /dev/null +++ b/src/cmake/file_api.rs @@ -0,0 +1,84 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use anyhow::Result; +use serde::Deserialize; + +use crate::path_buf; + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct Version { + pub major: u32, + pub minor: u32, + #[serde(default)] + pub patch: u32, + #[serde(default)] + pub suffix: String, + #[serde(default)] + pub is_dirty: bool, +} + +impl std::fmt::Display for Version { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}.{}.{}{}{}", + self.major, + self.minor, + self.patch, + if !self.suffix.is_empty() { "-" } else { "" }, + self.suffix + ) + } +} + +#[derive(Clone, Debug)] +pub struct Query<'a> { + api_dir: PathBuf, + client_name: String, + kinds: &'a [ObjKind], +} + +impl Query<'_> { + /// Create a new query. + pub fn new( + cmake_build_dir: impl AsRef, + client_name: impl Into, + kinds: &[ObjKind], + ) -> Result { + let client_name = client_name.into(); + let api_dir = path_buf![cmake_build_dir, ".cmake", "api", "v1"]; + + let client_dir = path_buf![&api_dir, "query", format!("client-{}", &client_name)]; + fs::create_dir_all(&client_dir)?; + + for kind in kinds { + fs::File::create(client_dir.join(format!( + "{}-v{}", + kind.as_str(), + kind.supported_version() + )))?; + } + + Ok(Query { + api_dir, + client_name, + kinds, + }) + } + + /// Try to get all replies from this query. + pub fn get_replies(&self) -> Result { + Replies::from_query(self) + } +} + +pub mod cache; +pub mod codemodel; +mod index; +pub mod toolchains; + +pub use cache::Cache; +pub use codemodel::Codemodel; +pub use index::*; diff --git a/src/cmake/file_api/cache.rs b/src/cmake/file_api/cache.rs new file mode 100644 index 0000000..43d6872 --- /dev/null +++ b/src/cmake/file_api/cache.rs @@ -0,0 +1,89 @@ +use std::convert::TryFrom; +use std::fs; + +use anyhow::{anyhow, Context, Error}; +use serde::Deserialize; + +use super::{index, ObjKind, Version}; + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +pub struct Cache { + pub version: Version, + pub entries: Vec, +} + +impl TryFrom<&index::Reply> for Cache { + type Error = Error; + fn try_from(value: &index::Reply) -> Result { + assert!(value.kind == ObjKind::Cache); + ObjKind::Cache + .check_version_supported(value.version.major) + .unwrap(); + + serde_json::from_reader(&fs::File::open(&value.json_file)?).with_context(|| { + anyhow!( + "Parsing cmake-file-api cache object file '{}' failed", + value.json_file.display() + ) + }) + } +} + +impl Cache { + pub fn linker(&self) -> Option<&String> { + self.entries + .iter() + .find(|e| e.name == "CMAKE_LINKER") + .map(|e| &e.value) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +pub struct Entry { + pub name: String, + pub value: String, + #[serde(rename = "type")] + pub entry_type: Type, + pub properties: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(from = "String")] +pub enum Type { + Bool, + Path, + Filepath, + String, + Internal, + Static, + Uninitialized, + Other(String), +} + +impl From for Type { + fn from(s: String) -> Self { + match s.as_str() { + "BOOL" => Self::Bool, + "PATH" => Self::Path, + "FILEPATH" => Self::Filepath, + "STRING" => Self::String, + "INTERNAL" => Self::Internal, + "STATIC" => Self::Static, + "UNINITIALIZED" => Self::Uninitialized, + _ => Self::Other(s), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "UPPERCASE", tag = "name", content = "value")] +pub enum Property { + Advanced(String), + Helpstring(String), + Modified(String), + Strings(String), + Type(Type), + Value(String), + #[serde(other)] + Unknown, +} diff --git a/src/cmake/file_api/codemodel.rs b/src/cmake/file_api/codemodel.rs new file mode 100644 index 0000000..f2b7f13 --- /dev/null +++ b/src/cmake/file_api/codemodel.rs @@ -0,0 +1,237 @@ +use std::convert::TryFrom; +use std::fs; +use std::path::PathBuf; +use std::sync::Arc; + +use anyhow::{anyhow, Context, Error, Result}; +use serde::Deserialize; + +use super::index::{self, ObjKind}; +use super::Version; + +#[derive(Debug, Deserialize, Clone)] +pub struct Codemodel { + #[serde(skip)] + codemodel_dir: Arc, + pub version: Version, + pub paths: Paths, + pub configurations: Vec, +} + +impl TryFrom<&index::Reply> for Codemodel { + type Error = Error; + fn try_from(value: &index::Reply) -> Result { + assert!(value.kind == ObjKind::Codemodel); + ObjKind::Codemodel + .check_version_supported(value.version.major) + .unwrap(); + + let mut codemodel: Codemodel = serde_json::from_reader(&fs::File::open(&value.json_file)?) + .with_context(|| { + anyhow!( + "Parsing cmake-file-api codemodel object file '{}' failed", + value.json_file.display() + ) + })?; + + codemodel.codemodel_dir = Arc::new(value.json_file.parent().unwrap().to_owned()); + for conf in codemodel.configurations.iter_mut() { + conf.codemodel_dir = codemodel.codemodel_dir.clone(); + } + + Ok(codemodel) + } +} + +impl Codemodel { + pub fn into_conf(self) -> Vec { + self.configurations + } + + pub fn into_first_conf(self) -> Configuration { + self.configurations + .into_iter() + .next() + .expect("no configurations") + } + + /// The path to the directory containing the file represented by this + /// [`Codemodel`] instance. + pub fn dir_path(&self) -> &PathBuf { + &self.codemodel_dir + } +} + +#[derive(Debug, Deserialize, Clone)] +pub struct Paths { + /// The absolute path to the top-level source directory. + pub source: PathBuf, + /// The absolute path to the top-level build directory. + pub build: PathBuf, +} + +#[derive(Debug, Deserialize, Clone)] +pub struct Configuration { + #[serde(skip)] + codemodel_dir: Arc, + pub name: String, + #[serde(rename = "targets")] + pub target_refs: Vec, +} + +impl Configuration { + pub fn get_target(&self, name: impl AsRef) -> Option> { + self.target_refs + .iter() + .find(|t| t.name == name.as_ref()) + .map(|t| t.deref(self)) + } + + pub fn targets(&self) -> impl Iterator> + '_ { + self.target_refs.iter().map(move |t| t.deref(self)) + } +} + +#[derive(Debug, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TargetRef { + pub name: String, + pub directory_index: usize, + pub project_index: usize, + pub json_file: String, +} + +impl TargetRef { + pub fn deref(&self, cfg: &Configuration) -> Result { + target::Target::from_file(cfg.codemodel_dir.join(&self.json_file)) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +pub enum Language { + C, + #[serde(rename = "CXX")] + Cpp, + #[serde(rename = "CUDA")] + Cuda, + #[serde(rename = "OBJCXX")] + ObjectiveCpp, + #[serde(rename = "HIP")] + Hip, + #[serde(rename = "ISPC")] + Ispc, + #[serde(rename = "ASM")] + Assembly, +} + +pub use target::Target; + +pub mod target { + use std::path::{Path, PathBuf}; + + use anyhow::*; + use serde::Deserialize; + + use super::Language; + + #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Hash)] + #[serde(rename_all = "SCREAMING_SNAKE_CASE")] + pub enum Type { + Executable, + StaticLibrary, + SharedLibrary, + ModuleLibrary, + ObjectLibrary, + InterfaceLibrary, + Utility, + } + + #[derive(Debug, Deserialize, Clone)] + #[serde(rename_all = "camelCase")] + pub struct Target { + pub name: String, + pub link: Link, + pub compile_groups: Vec, + #[serde(rename = "type")] + pub target_type: Type, + } + + impl Target { + pub fn from_file(file_path: impl AsRef) -> Result { + let file = std::fs::File::open(&file_path)?; + let value: Target = serde_json::from_reader(file).with_context(|| { + anyhow!( + "Failed to parse the cmake-file-api target file '{}'", + file_path.as_ref().display() + ) + })?; + + Ok(value) + } + } + + #[derive(Debug, Deserialize, Clone)] + #[serde(rename_all = "camelCase")] + pub struct CompileGroup { + pub language: Language, + #[serde(default)] + pub compile_command_fragments: Vec, + #[serde(default)] + pub includes: Vec, + #[serde(default)] + pub defines: Vec, + pub sysroot: Option, + } + + #[derive(Debug, Deserialize, Clone)] + pub struct Fragment { + pub fragment: String, + } + + #[derive(Debug, Deserialize, Clone)] + pub struct Define { + pub define: String, + } + + #[derive(Debug, Deserialize, Clone)] + #[serde(rename_all = "camelCase")] + pub struct Include { + pub path: String, + #[serde(default)] + pub is_system: bool, + } + + #[derive(Debug, Deserialize, Clone)] + #[serde(rename_all = "camelCase")] + pub struct Link { + pub language: String, + pub command_fragments: Vec, + #[serde(default)] + pub lto: bool, + pub sysroot: Option, + } + + #[derive(Debug, Deserialize, Clone)] + pub struct CommandFragment { + pub fragment: String, + pub role: Role, + } + + #[derive(Debug, PartialEq, Eq, Deserialize, Clone, Copy)] + #[serde(rename_all = "camelCase")] + pub enum Role { + /// Link flags + Flags, + /// Link library file paths or flags + Libraries, + /// Library search path flags + LibraryPath, + /// MacOS framework search path flags + FrameworkPath, + } + + #[derive(Debug, Deserialize, Clone)] + pub struct Sysroot { + pub path: PathBuf, + } +} diff --git a/src/cmake/file_api/index.rs b/src/cmake/file_api/index.rs new file mode 100644 index 0000000..adbb864 --- /dev/null +++ b/src/cmake/file_api/index.rs @@ -0,0 +1,278 @@ +use std::collections::HashMap; +use std::convert::TryFrom; +use std::fs; +use std::path::PathBuf; + +use anyhow::{anyhow, bail, Context, Result}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use super::cache::Cache; +use super::codemodel::Codemodel; +use super::toolchains::Toolchains; +use super::{Query, Version}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize)] +pub enum PathsKey { + #[serde(rename = "cmake")] + CMake, + #[serde(rename = "ctest")] + CTest, + #[serde(rename = "cpack")] + CPack, + #[serde(rename = "root")] + Root, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Generator { + pub multi_config: bool, + pub name: String, + pub platform: Option, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CMake { + pub version: Version, + pub paths: HashMap, + pub generator: Generator, +} + +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, Copy, Hash)] +#[serde(rename_all = "camelCase")] +pub enum ObjKind { + Codemodel, + Cache, + CmakeFiles, + Toolchains, +} + +impl ObjKind { + /// Get the supported major version of this object kind. + pub(crate) const fn supported_version(self) -> u32 { + match self { + Self::Codemodel => 2, + Self::Cache => 2, + Self::CmakeFiles => 1, + Self::Toolchains => 1, + } + } + + /// Check if `object_version` is supported by this library + pub fn check_version_supported(self, object_version: u32) -> Result<()> { + let expected_version = self.supported_version(); + if object_version != expected_version { + bail!( + "cmake {} object version not supported (expected {}, got {})", + self.as_str(), + expected_version, + object_version + ); + } else { + Ok(()) + } + } + + /// Get the minimum required cmake version for this object kind. + pub fn min_cmake_version(self) -> Version { + let (major, minor) = match self { + Self::Codemodel => (3, 14), + Self::Cache => (3, 14), + Self::CmakeFiles => (3, 14), + Self::Toolchains => (3, 20), + }; + Version { + major, + minor, + ..Version::default() + } + } + + pub fn as_str(self) -> &'static str { + match self { + Self::Codemodel => "codemodel", + Self::Cache => "cache", + Self::CmakeFiles => "cmakeFiles", + Self::Toolchains => "toolchains", + } + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Reply { + pub json_file: PathBuf, + pub kind: ObjKind, + pub version: Version, +} + +impl Reply { + pub fn codemodel(&self) -> Result { + Codemodel::try_from(self) + } + + pub fn cache(&self) -> Result { + Cache::try_from(self) + } + + pub fn toolchains(&self) -> Result { + Toolchains::try_from(self) + } +} + +#[derive(Debug, Clone)] +pub struct Replies { + pub cmake: CMake, + pub replies: HashMap, +} + +impl Replies { + pub fn from_query(query: &Query) -> Result { + let reply_dir = query.api_dir.join("reply"); + + let index_file = fs::read_dir(&reply_dir) + .context("Failed to list cmake-file-api reply directory")? + .filter_map( + |file| match (&file, file.as_ref().ok().and_then(|f| f.file_type().ok())) { + (Ok(f), Some(file_type)) + if file_type.is_file() + && f.file_name().to_string_lossy().starts_with("index-") => + { + Some(f.path()) + } + _ => None, + }, + ) + .max() + .ok_or_else(|| { + anyhow!( + "No cmake-file-api index file found in '{}' \ + (cmake version must be at least 3.14)", + reply_dir.display() + ) + })?; + + #[derive(Deserialize)] + struct Index { + cmake: CMake, + reply: HashMap, + } + + let base_error = || { + anyhow!( + "Failed to parse the cmake-file-api index file '{}'", + index_file.display() + ) + }; + let Index { cmake, reply } = + serde_json::from_reader(&fs::File::open(&index_file)?).with_context(&base_error)?; + + for kind in query.kinds { + let min_cmake_version = kind.min_cmake_version(); + if cmake.version.major < min_cmake_version.major + || cmake.version.minor < min_cmake_version.minor + { + bail!( + "cmake-file-api {} object not supported: cmake version missmatch, \ + expected at least version {}, got version {} instead", + kind.as_str(), + min_cmake_version, + &cmake.version + ); + } + } + + let client = format!("client-{}", &query.client_name); + let (_, reply) = reply + .into_iter() + .find(|(k, _)| k == &client) + .ok_or_else(|| anyhow!("Reply for client '{}' not found", &query.client_name)) + .with_context(&base_error)?; + + #[derive(Deserialize)] + #[serde(untagged)] + enum ReplyOrError { + Reply(Reply), + Error { error: String }, + } + + let mut errors = vec![]; + let replies: HashMap = + serde_json::from_value::>(reply) + .with_context(&base_error)? + .into_iter() + .filter_map(|(_, v)| match v { + ReplyOrError::Reply(mut r) => { + if let Err(err) = r.kind.check_version_supported(r.version.major) { + errors.push(err.to_string()); + None + } else { + r.json_file = reply_dir.join(r.json_file); + Some((r.kind, r)) + } + } + ReplyOrError::Error { error } => { + errors.push(error); + None + } + }) + .collect(); + + let not_found = query + .kinds + .iter() + .filter(|k| !replies.contains_key(k)) + .map(|k| k.as_str()) + .collect::>(); + + if !not_found.is_empty() { + let error = anyhow!( + "Objects {} could not be deserialized{}", + not_found.join(", "), + if errors.is_empty() { + String::new() + } else { + format!(":\n{}", errors.join(",\n")) + } + ); + return Err(error + .context(format!( + "Could not deserialize all requested objects ({:?})", + query.kinds + )) + .context(base_error())); + } else if !errors.is_empty() { + log::debug!( + "Errors while deserializing cmake-file-api index `{:?}`: {}", + index_file, + errors.join(",\n") + ); + } + + Ok(Replies { cmake, replies }) + } + + pub fn get_kind(&self, kind: ObjKind) -> Result<&Reply> { + self.replies.get(&kind).ok_or_else(|| { + anyhow!( + "Object {:?} (version {}) not fund in cmake-file-api reply index", + kind, + kind.supported_version() + ) + }) + } + + pub fn get_codemodel(&self) -> Result { + self.get_kind(ObjKind::Codemodel)?.codemodel() + } + + pub fn get_cache(&self) -> Result { + self.get_kind(ObjKind::Cache)?.cache() + } + + pub fn get_toolchains(&self) -> Result { + self.get_kind(ObjKind::Toolchains)?.toolchains() + } +} diff --git a/src/cmake/file_api/toolchains.rs b/src/cmake/file_api/toolchains.rs new file mode 100644 index 0000000..35f849d --- /dev/null +++ b/src/cmake/file_api/toolchains.rs @@ -0,0 +1,64 @@ +use std::convert::TryFrom; +use std::fs; +use std::path::PathBuf; + +use anyhow::{anyhow, Context, Error}; +use serde::Deserialize; + +use super::codemodel::Language; +use super::{index, ObjKind, Version}; + +#[derive(Debug, Clone, Deserialize)] +pub struct Toolchains { + pub version: Version, + pub toolchains: Vec, +} + +impl TryFrom<&index::Reply> for Toolchains { + type Error = Error; + fn try_from(value: &index::Reply) -> Result { + assert!(value.kind == ObjKind::Toolchains); + ObjKind::Toolchains + .check_version_supported(value.version.major) + .unwrap(); + + serde_json::from_reader(&fs::File::open(&value.json_file)?).with_context(|| { + anyhow!( + "Parsing cmake-file-api toolchains object file '{}' failed", + value.json_file.display() + ) + }) + } +} + +impl Toolchains { + pub fn get(&self, lang: Language) -> Option<&Toolchain> { + self.toolchains.iter().find(|t| t.language == lang) + } + + pub fn take(&mut self, lang: Language) -> Option { + let (i, _) = self + .toolchains + .iter() + .enumerate() + .find(|(_, t)| t.language == lang)?; + Some(self.toolchains.swap_remove(i)) + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct Toolchain { + pub language: Language, + pub compiler: Compiler, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Compiler { + pub path: Option, + pub id: Option, + pub version: Option, + pub target: Option, + #[serde(default)] + pub source_file_extensions: Vec, +} diff --git a/src/fs.rs b/src/fs.rs new file mode 100644 index 0000000..52bc1bc --- /dev/null +++ b/src/fs.rs @@ -0,0 +1,88 @@ +//! Filesystem utilities + +use std::fs::{self, File}; +use std::io::{self, Read, Seek}; +use std::path::Path; + +use anyhow::Result; + +pub use remove_dir_all::*; + +/// Copy `src_file` to `dest_file_or_dir` if `src_file` is different or the destination +/// file doesn't exist. +pub fn copy_file_if_different( + src_file: impl AsRef, + dest_file_or_dir: impl AsRef, +) -> Result<()> { + let src_file: &Path = src_file.as_ref(); + let dest_file_or_dir: &Path = dest_file_or_dir.as_ref(); + + assert!(src_file.is_file()); + + let mut src_fd = fs::File::open(src_file)?; + + let (dest_fd, dest_file) = if dest_file_or_dir.exists() { + if dest_file_or_dir.is_dir() { + let dest_file = dest_file_or_dir.join(src_file.file_name().unwrap()); + if dest_file.exists() { + (Some(fs::File::open(&dest_file)?), dest_file) + } else { + (None, dest_file) + } + } else { + ( + Some(fs::File::open(dest_file_or_dir)?), + dest_file_or_dir.to_owned(), + ) + } + } else { + (None, dest_file_or_dir.to_owned()) + }; + + if let Some(mut dest_fd) = dest_fd { + if !is_file_eq(&mut src_fd, &mut dest_fd)? { + drop(dest_fd); + drop(src_fd); + fs::copy(src_file, dest_file)?; + } + } else { + fs::copy(src_file, dest_file)?; + } + Ok(()) +} + +/// Whether the file type and contents of `file` are equal to `other`. +pub fn is_file_eq(file: &mut File, other: &mut File) -> Result { + let file_meta = file.metadata()?; + let other_meta = other.metadata()?; + + if file_meta.file_type() == other_meta.file_type() && file_meta.len() == other_meta.len() { + let mut file_bytes = io::BufReader::new(&*file).bytes(); + let mut other_bytes = io::BufReader::new(&*other).bytes(); + + // TODO: check performance + let result = loop { + match (file_bytes.next(), other_bytes.next()) { + (Some(Ok(b0)), Some(Ok(b1))) => { + if b0 != b1 { + break Ok(false); + } + } + (None, None) => break Ok(true), + (None, Some(_)) | (Some(_), None) => break Ok(false), + (Some(Err(e)), _) | (_, Some(Err(e))) => return Err(e.into()), + } + }; + drop(file_bytes); + drop(other_bytes); + + // rewind files + // TODO: is this needed? + file.seek(io::SeekFrom::Start(0))?; + other.seek(io::SeekFrom::Start(0))?; + + result + } else { + Ok(false) + } +} diff --git a/src/git.rs b/src/git.rs new file mode 100644 index 0000000..2a127fd --- /dev/null +++ b/src/git.rs @@ -0,0 +1,296 @@ +//! Git utilities + +use std::ffi::OsStr; +use std::num::NonZeroU64; +use std::path::{Path, PathBuf}; + +use anyhow::{anyhow, Result}; + +use crate::fs::remove_dir_all; +use crate::{cmd, cmd_output}; + +pub const GIT: &str = "git"; + +pub struct Repository { + git_dir: PathBuf, + worktree: PathBuf, + remote_name: Option, +} + +impl Repository { + pub fn new(dir: impl AsRef) -> Repository { + Repository { + git_dir: dir.as_ref().join(".git"), + worktree: dir.as_ref().to_owned(), + remote_name: None, + } + } + + /// Get the remote name from which this repository was cloned. + pub fn origin(&self) -> Option<&String> { + self.remote_name.as_ref() + } + + fn git_args(&self) -> [&OsStr; 4] { + [ + OsStr::new("--git-dir"), + self.git_dir.as_os_str(), + OsStr::new("--work-tree"), + self.worktree.as_os_str(), + ] + } + + /// Get all remote names and their urls. + pub fn get_remotes(&self) -> Result> { + Ok(cmd_output!("git", @self.git_args(), "remote", "show")? + .lines() + .filter_map(|l| { + let remote = l.trim().to_owned(); + cmd_output!(GIT, @self.git_args(), "remote", "get-url", &remote) + .ok() + .map(|url| (remote, url)) + }) + .collect()) + } + + /// Get the default branch name of `remote`. + pub fn get_default_branch_of(&self, remote: &str) -> Result { + cmd_output!(GIT, @self.git_args(), "symbolic-ref", format!("refs/remotes/{}/HEAD", remote))? + .rsplit('/') + .next() + .map(str::to_owned) + .ok_or_else(|| anyhow!("'git symbolic-ref' yielded invalid output")) + } + + /// Get the default branch of this repository's origin. + /// + /// Returns [`None`] if [`Self::origin`] returns [`None`]. + pub fn get_default_branch(&self) -> Result> { + if let Some(r) = self.origin() { + Ok(Some(self.get_default_branch_of(r)?)) + } else { + Ok(None) + } + } + + /// Query whether the work-tree is clean ignoring any untracked files and recursing + /// through all submodules. + pub fn is_clean(&self) -> Result { + Ok( + cmd_output!(GIT, @self.git_args(), "status", "-s", "-uno", "--ignore-submodules=untracked", "--ignored=no")? + .trim() + .is_empty() + ) + } + + /// Get a human readable name based on all available refs in the `refs/` namespace. + /// + /// Calls `git describe --all --exact-match`. + pub fn describe(&self) -> Result { + cmd_output!(GIT, @self.git_args(), "describe", "--all", "--exact-match") + } + + /// Clone the repository with the default options and return if the repository was modified. + pub fn clone(&mut self, url: &str) -> Result { + self.clone_ext(url, CloneOptions::default()) + } + + /// Clone the repository with `options` and return if the repository was modified. + pub fn clone_ext(&mut self, url: &str, options: CloneOptions) -> Result { + let (should_remove, should_clone, modified) = if !self.git_dir.exists() { + (self.worktree.exists(), true, true) + } else if let Some((remote, _)) = self + .get_remotes() + .ok() + .and_then(|r| r.into_iter().find(|(_, r_url)| r_url == url)) + { + let force_ref = if let Some(force_ref) = &options.force_ref { + force_ref.clone() + } else { + Ref::Branch(self.get_default_branch_of(&remote)?) + }; + self.remote_name = Some(remote); + + match force_ref { + Ref::Branch(b) => { + if self.describe()? == format!("heads/{}", b) + && (!options.force_clean || self.is_clean()?) + { + let modified = if let Some(reset_mode) = options.branch_update_action { + cmd!(GIT, @self.git_args(), "reset", reset_mode.to_string())?; + cmd!(GIT, @self.git_args(), "pull", "--ff-only")?; + true + } else { + false + }; + + (false, false, modified) + } else { + (true, true, true) + } + } + Ref::Tag(t) => { + if self.describe()? == format!("tags/{}", t) + && (!options.force_clean || self.is_clean()?) + { + (false, false, false) + } else { + (true, true, true) + } + } + Ref::Commit(c) => { + if cmd_output!(GIT, @self.git_args(), "rev-parse", "HEAD")? == c + && (!options.force_clean || self.is_clean()?) + { + (false, false, false) + } else { + (true, true, true) + } + } + } + } else { + (true, true, true) + }; + + if should_remove { + remove_dir_all(&self.worktree)?; + } + + if should_clone { + let (depth, branch) = match &options.force_ref { + None | Some(Ref::Commit(_)) => (None, None), + Some(Ref::Branch(s) | Ref::Tag(s)) => ( + options.depth.map(|i| ["--depth".to_owned(), i.to_string()]), + Some(["--branch", s]), + ), + }; + + let depth = depth.iter().flatten(); + let branch = branch.iter().flatten(); + + cmd!(GIT, "clone", "--recursive", @depth, @branch, &url, &self.worktree)?; + + if let Some(Ref::Commit(s)) = options.force_ref { + cmd!(GIT, @self.git_args(), "checkout", s)?; + } + self.remote_name = Some(String::from("origin")); + } + + Ok(modified) + } + + /// Apply all patches to this repository. + pub fn apply(&self, patches: impl IntoIterator>) -> Result<()> { + cmd!(GIT, @self.git_args(), "apply"; args=(patches.into_iter()), current_dir=(&self.worktree))?; + Ok(()) + } + + /// Apply all patches to this repository only if they were not applied already. + /// + /// Uses [`is_applied`](Self::is_applied) to determine if the patches were already applied. + pub fn apply_once( + &self, + patches: impl Iterator> + Clone, + ) -> Result<()> { + if !self.is_applied(patches.clone())? { + self.apply(patches)?; + } + Ok(()) + } + + /// Whether all `patches` are already applied to this repository. + /// + /// This runs `git apply --check --reverse ` which if it succeeds means + /// that git could reverse all `patches` successfully and implies that all patches + /// were already applied. + pub fn is_applied(&self, patches: impl IntoIterator>) -> Result { + Ok(cmd!( + GIT, @self.git_args(), "apply", "--check", "-R"; + status, + args=(patches.into_iter()), + current_dir=(&self.worktree) + )? + .success()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ResetMode { + Soft, + Mixed, + Hard, + Merge, + Keep, +} + +impl std::fmt::Display for ResetMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + Self::Soft => "--soft", + Self::Mixed => "--mixed", + Self::Hard => "--hard", + Self::Merge => "--merge", + Self::Keep => "--keep", + }) + } +} + +#[derive(Debug, Clone)] +pub enum Ref { + Tag(String), + Branch(String), + Commit(String), +} + +#[derive(Debug)] +pub struct CloneOptions { + /// Force the working directory to be this specific tag, branch or commit. + /// + /// TODO: document what it does (ie. commit missmatch, branch/tag missmatch). + pub force_ref: Option, + /// The mode that is passed to `git reset` when the branch is updated. + /// If `None` that working directory with branch is never updated. + pub branch_update_action: Option, + /// If the working directory is not clean and `force_clean` is `true`, the git repo + /// will be cloned from scratch. + pub force_clean: bool, + /// The depth that should be cloned, if `None` the full repository is cloned. + pub depth: Option, +} + +impl Default for CloneOptions { + fn default() -> Self { + Self { + force_ref: None, + branch_update_action: None, + force_clean: false, + depth: None, + } + } +} + +impl CloneOptions { + pub fn new() -> Self { + Self::default() + } + + pub fn force_ref(mut self, force_ref: Ref) -> Self { + self.force_ref = Some(force_ref); + self + } + + pub fn branch_update_action(mut self, reset_mode: ResetMode) -> Self { + self.branch_update_action = Some(reset_mode); + self + } + + pub fn force_clean(mut self) -> Self { + self.force_clean = true; + self + } + + pub fn depth(mut self, depth: u64) -> Self { + self.depth = Some(NonZeroU64::new(depth).expect("depth must be greater than zero")); + self + } +} diff --git a/src/kconfig.rs b/src/kconfig.rs index 5a39001..b599d0d 100644 --- a/src/kconfig.rs +++ b/src/kconfig.rs @@ -1,6 +1,7 @@ /// A quick and dirty parser for the .config files generated by kconfig systems like /// the ESP-IDF one use std::{ + collections::HashMap, convert::TryFrom, env, fmt::Display, @@ -80,6 +81,23 @@ impl TryFrom<&Path> for CfgArgs { } impl CfgArgs { + pub fn try_from_json(path: impl AsRef) -> Result { + let values: HashMap = + serde_json::from_reader(fs::File::open(path)?)?; + + let cfgs = values + .into_iter() + .filter_map(|(k, v)| match v { + serde_json::Value::Bool(true) => Some((k, Value::Tristate(Tristate::True))), + serde_json::Value::Bool(false) => Some((k, Value::Tristate(Tristate::False))), + serde_json::Value::String(value) => Some((k, Value::String(value))), + _ => None, + }) + .collect(); + + Ok(CfgArgs(cfgs)) + } + /// Add configuration options from the parsed kconfig output file. /// /// All options will consist of `_