rust: simplify derive macro usage

This commit is contained in:
Laszlo Nagy
2025-12-13 00:37:52 +11:00
parent cc022c77b5
commit 56bcc0caab
9 changed files with 36 additions and 37 deletions

View File

@@ -71,11 +71,11 @@ pub use types::*;
pub use validation::Validator;
mod types {
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use std::path::PathBuf;
/// Represents the application configuration with flattened structure.
#[derive(Debug, PartialEq, Deserialize, Serialize)]
#[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct Main {
#[serde(deserialize_with = "validate_schema_version")]
pub schema: String,
@@ -105,7 +105,7 @@ mod types {
}
/// Simplified intercept configuration with mode and directory.
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)]
#[serde(tag = "mode")]
pub enum Intercept {
#[serde(rename = "wrapper")]
@@ -147,7 +147,7 @@ mod types {
}
/// Represents compiler configuration matching the YAML format.
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct Compiler {
pub path: PathBuf,
#[serde(rename = "as", skip_serializing_if = "Option::is_none")]
@@ -157,7 +157,7 @@ mod types {
}
/// Compiler types that we can recognize and configure
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "lowercase")]
pub enum CompilerType {
#[serde(alias = "gcc", alias = "gnu")]
@@ -186,7 +186,7 @@ mod types {
}
/// Action to take for files matching a directory rule
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "lowercase")]
pub enum DirectoryAction {
Include,
@@ -194,7 +194,7 @@ mod types {
}
/// A rule that specifies how to handle files within a directory
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[derive(Clone, Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct DirectoryRule {
pub path: PathBuf,
pub action: DirectoryAction,
@@ -217,7 +217,7 @@ mod types {
///
/// **Important**: For matching to work correctly, rule paths should use the same format as
/// configured in `format.paths.file`. This consistency is the user's responsibility.
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct SourceFilter {
#[serde(default = "default_enabled")]
pub only_existing_files: bool,
@@ -235,7 +235,7 @@ mod types {
}
/// Duplicate filter configuration matching the YAML format.
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct DuplicateFilter {
pub match_on: Vec<OutputFields>,
}
@@ -249,7 +249,7 @@ mod types {
}
/// Represent the fields of the JSON compilation database record.
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, serde::Deserialize, serde::Serialize)]
pub enum OutputFields {
#[serde(rename = "directory")]
Directory,
@@ -264,7 +264,7 @@ mod types {
}
/// Format configuration matching the YAML format.
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct Format {
#[serde(default)]
pub paths: PathFormat,
@@ -273,7 +273,7 @@ mod types {
}
/// Format configuration of paths in the JSON compilation database.
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct PathFormat {
#[serde(default)]
pub directory: PathResolver,
@@ -282,7 +282,7 @@ mod types {
}
/// Path resolver options matching the YAML format.
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
#[derive(Copy, Clone, Debug, Default, PartialEq, serde::Deserialize, serde::Serialize)]
pub enum PathResolver {
/// Leave the path as is without any transformation. (Default)
#[default]
@@ -300,7 +300,7 @@ mod types {
}
/// Configuration for formatting output entries matching the YAML format.
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct EntryFormat {
#[serde(default = "default_enabled")]
pub use_array_format: bool,

View File

@@ -12,7 +12,7 @@ use super::Entry;
use crate::config;
use thiserror::Error;
#[derive(Clone, Debug)]
#[derive(Debug)]
pub struct DuplicateEntryFilter {
/// The fields to use for filtering duplicate entries.
fields: Vec<config::OutputFields>,
@@ -67,7 +67,7 @@ impl TryFrom<config::DuplicateFilter> for DuplicateEntryFilter {
let mut already_seen = HashSet::new();
for field in &config.match_on {
if !already_seen.insert(field) {
return Err(ConfigurationError::DuplicateField(field.clone()));
return Err(ConfigurationError::DuplicateField(*field));
}
}

View File

@@ -39,7 +39,7 @@ use thiserror::Error;
/// A filter that determines which compilation database entries should be included
/// based on source file paths and directory-based rules.
#[derive(Clone, Debug)]
#[derive(Debug)]
pub struct SourceEntryFilter {
/// The source filter configuration containing directory rules.
config: SourceFilter,

View File

@@ -16,7 +16,6 @@ mod filter_duplicates;
mod filter_sources;
mod format;
use serde::{Deserialize, Serialize};
use shell_words;
use std::path;
use thiserror::Error;
@@ -28,7 +27,7 @@ pub use filter_sources::{SourceEntryFilter, SourceFilterError};
pub use format::{ConfigurablePathFormatter, FormatConfigurationError, FormatError, PathFormatter};
/// Represents an entry of the compilation database.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[derive(Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct Entry {
/// The main translation unit source processed by this compilation step.
/// This is used by tools as the key into the compilation database.
@@ -175,7 +174,7 @@ mod tests {
assert!(entry.command.is_empty());
assert!(!entry.arguments.is_empty());
assert!(entry.clone().validate().is_ok());
assert!(entry.validate().is_ok());
}
#[test]
@@ -185,7 +184,7 @@ mod tests {
assert!(!entry.command.is_empty());
assert!(entry.arguments.is_empty());
assert!(entry.clone().validate().is_ok());
assert!(entry.validate().is_ok());
}
#[test]

View File

@@ -50,9 +50,9 @@ impl TryFrom<(&args::BuildSemantic, &config::Main)> for OutputWriter {
let temp_path = &args.path.with_extension("tmp");
let base_writer = ClangOutputWriter::create(temp_path)?;
let unique_writer = UniqueOutputWriter::create(base_writer, &config.duplicates)?;
let unique_writer = UniqueOutputWriter::create(base_writer, config.duplicates.clone())?;
let source_filter_writer =
SourceFilterOutputWriter::create(unique_writer, &config.sources)?;
SourceFilterOutputWriter::create(unique_writer, config.sources.clone())?;
let atomic_writer =
AtomicClangOutputWriter::new(source_filter_writer, temp_path, final_path);
let append_writer = AppendClangOutputWriter::new(atomic_writer, final_path, args.append);

View File

@@ -144,9 +144,9 @@ pub(super) struct UniqueOutputWriter<T: IteratorWriter<clang::Entry>> {
impl<T: IteratorWriter<clang::Entry>> UniqueOutputWriter<T> {
pub(super) fn create(
writer: T,
config: &config::DuplicateFilter,
config: config::DuplicateFilter,
) -> Result<Self, WriterCreationError> {
let filter = clang::DuplicateEntryFilter::try_from(config.clone())
let filter = clang::DuplicateEntryFilter::try_from(config)
.map_err(|err| WriterCreationError::Configuration(err.to_string()))?;
Ok(Self { writer, filter })
@@ -155,7 +155,7 @@ impl<T: IteratorWriter<clang::Entry>> UniqueOutputWriter<T> {
impl<T: IteratorWriter<clang::Entry>> IteratorWriter<clang::Entry> for UniqueOutputWriter<T> {
fn write(self, entries: impl Iterator<Item = clang::Entry>) -> Result<(), WriterError> {
let mut filter = self.filter.clone();
let mut filter = self.filter;
let filtered_entries = entries.filter(move |entry| filter.unique(entry));
self.writer.write(filtered_entries)
@@ -175,9 +175,9 @@ pub(super) struct SourceFilterOutputWriter<T: IteratorWriter<clang::Entry>> {
impl<T: IteratorWriter<clang::Entry>> SourceFilterOutputWriter<T> {
pub(super) fn create(
writer: T,
config: &config::SourceFilter,
config: config::SourceFilter,
) -> Result<Self, WriterCreationError> {
let filter = clang::SourceEntryFilter::try_from(config.clone())
let filter = clang::SourceEntryFilter::try_from(config)
.map_err(|err| WriterCreationError::Configuration(err.to_string()))?;
Ok(Self { writer, filter })
@@ -335,7 +335,7 @@ mod tests {
],
};
let writer = SourceFilterOutputWriter::create(MockWriter, &config).unwrap();
let writer = SourceFilterOutputWriter::create(MockWriter, config).unwrap();
let entries = vec![
clang::Entry::from_arguments_str("src/main.c", vec!["gcc", "-c"], "/project", None),
@@ -360,7 +360,7 @@ mod tests {
directories: vec![],
};
let writer = SourceFilterOutputWriter::create(MockWriter, &config).unwrap();
let writer = SourceFilterOutputWriter::create(MockWriter, config).unwrap();
let entries = vec![clang::Entry::from_arguments_str(
"any/file.c",
@@ -392,7 +392,7 @@ mod tests {
],
};
let writer = SourceFilterOutputWriter::create(MockWriter, &config).unwrap();
let writer = SourceFilterOutputWriter::create(MockWriter, config).unwrap();
let entries = vec![
clang::Entry::from_arguments_str("./src/main.c", vec!["gcc", "-c"], "/project", None),
@@ -435,9 +435,9 @@ mod tests {
// Build the writer pipeline: base -> unique -> source_filter
let base_writer = ClangOutputWriter::create(&output_path).unwrap();
let unique_writer = UniqueOutputWriter::create(base_writer, &duplicate_config).unwrap();
let unique_writer = UniqueOutputWriter::create(base_writer, duplicate_config).unwrap();
let source_filter_writer =
SourceFilterOutputWriter::create(unique_writer, &source_config).unwrap();
SourceFilterOutputWriter::create(unique_writer, source_config).unwrap();
// Test entries: some should be filtered, some should pass through
let entries = vec![

View File

@@ -36,7 +36,7 @@ impl OtherArguments {
impl Arguments for OtherArguments {
fn kind(&self) -> ArgumentKind {
self.kind.clone()
self.kind
}
fn as_arguments(&self, _path_updater: &dyn Fn(&Path) -> Cow<Path>) -> Vec<String> {

View File

@@ -123,7 +123,7 @@ pub trait Arguments: std::fmt::Debug {
/// - `Output`: An output file or related argument (e.g., `-o output.o`).
/// - `Other`: Any other argument not classified above (e.g., compiler switches like `-Wall`).
/// Can optionally specify which compiler pass the argument affects.
#[derive(Debug, Clone, PartialEq)]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ArgumentKind {
Compiler,
Source,
@@ -132,7 +132,7 @@ pub enum ArgumentKind {
}
/// Represents different compiler passes that an argument might affect.
#[derive(Debug, Clone, PartialEq)]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CompilerPass {
Info,
Preprocessing,

View File

@@ -27,7 +27,7 @@ impl TestArguments {
impl Arguments for TestArguments {
fn kind(&self) -> ArgumentKind {
self.kind.clone()
self.kind
}
fn as_arguments(&self, _path_updater: &dyn Fn(&Path) -> Cow<Path>) -> Vec<String> {