diff --git a/dev_docs/project_plan.md b/dev_docs/project_plan.md index 349e8b5..8f9ef48 100644 --- a/dev_docs/project_plan.md +++ b/dev_docs/project_plan.md @@ -256,6 +256,13 @@ This plan outlines the major development phases and tasks for building `dela`, a ## Phase 9: Increase Task Runner Coverage +- [ ] **Composed / Included Task Definitions** + - [x] [DTKT-197] Define shared recursive discovery primitives and allowlist/source-attribution handling for composed task definitions so Make, Taskfile, and Turborepo can reuse the same traversal semantics. + - [x] [DTKT-198] Support Makefile `include`, `-include`, and `sinclude` during task discovery, including recursively loading referenced makefiles and covering the behavior with unit and integration tests. These will be parsed using the lossless parser and the fallback regex parser. + - [x] [DTKT-199] Preserve source-file attribution for tasks discovered from included makefiles so `dela list`, allowlists, and MCP metadata point at the defining file, with tests covering duplicates and allowlist decisions from included files. + - [x] [DTKT-200] Support Taskfile `includes` so tasks from included Taskfiles are discovered and exposed by dela, with unit and integration coverage for recursive includes and duplicate task names. + - [x] [DTKT-201] Support Turborepo workspace-local `turbo.json` configs that extend the root config so inherited package tasks are discovered, with unit and integration coverage for recursive config resolution. + - [ ] **Additional Task Runners support** - [x] [DTKT-119] Implement Maven `pom.xml` parser and task discovery - [x] [DTKT-120] Implement Gradle `build.gradle`/`build.gradle.kts` parser diff --git a/src/allowlist.rs b/src/allowlist.rs index 8c17f64..0d69639 100644 --- a/src/allowlist.rs +++ b/src/allowlist.rs @@ -59,19 +59,25 @@ fn path_matches(task_path: &Path, allowlist_path: &Path, allow_subdirs: bool) -> } } -/// Check if a given task is explicitly allowed or denied by the allowlist -/// Returns (explicitly_allowed, explicitly_denied) - both false means not found in allowlist -/// This is the core allowlist checking logic without any prompting or persistence -pub fn is_task_allowed(task: &Task) -> Result<(bool, bool), String> { - // Only proceed with allowlist operations if dela is initialized - let allowlist = load_allowlist()?; +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AllowlistMatch { + Allowed, + Denied, + NotFound, +} + +/// Evaluate a task against an already-loaded allowlist. +/// This is shared between the CLI and MCP surfaces so composed definitions use +/// identical path and precedence semantics everywhere. +pub fn evaluate_task_against_allowlist(task: &Task, allowlist: &Allowlist) -> AllowlistMatch { + let task_path = task.allowlist_path(); // First pass: Check for deny entries (highest precedence) for entry in &allowlist.entries { if let AllowScope::Deny = entry.scope - && path_matches(&task.file_path, &entry.path, true) + && path_matches(task_path, &entry.path, true) { - return Ok((false, true)); + return AllowlistMatch::Denied; } } @@ -79,32 +85,55 @@ pub fn is_task_allowed(task: &Task) -> Result<(bool, bool), String> { for entry in &allowlist.entries { match entry.scope { AllowScope::Directory => { - if path_matches(&task.file_path, &entry.path, true) { - return Ok((true, false)); + if path_matches(task_path, &entry.path, true) { + return AllowlistMatch::Allowed; } } AllowScope::File => { - if path_matches(&task.file_path, &entry.path, false) { - return Ok((true, false)); + if path_matches(task_path, &entry.path, false) { + return AllowlistMatch::Allowed; } } AllowScope::Task => { - if path_matches(&task.file_path, &entry.path, false) + if path_matches(task_path, &entry.path, false) && let Some(ref tasks) = entry.tasks && tasks.contains(&task.name) { - return Ok((true, false)); + return AllowlistMatch::Allowed; } } - AllowScope::Deny | AllowScope::Once => { - // Already handled deny in first pass, skip Once - continue; - } + AllowScope::Deny | AllowScope::Once => continue, } } - // If no matching entry found, return (false, false) - not found - Ok((false, false)) + AllowlistMatch::NotFound +} + +pub fn allowlist_entry_for_task(task: &Task, scope: AllowScope) -> AllowlistEntry { + let tasks = if scope == AllowScope::Task { + Some(vec![task.name.clone()]) + } else { + None + }; + + AllowlistEntry { + path: task.allowlist_path().to_path_buf(), + scope, + tasks, + } +} + +/// Check if a given task is explicitly allowed or denied by the allowlist +/// Returns (explicitly_allowed, explicitly_denied) - both false means not found in allowlist +/// This is the core allowlist checking logic without any prompting or persistence +pub fn is_task_allowed(task: &Task) -> Result<(bool, bool), String> { + // Only proceed with allowlist operations if dela is initialized + let allowlist = load_allowlist()?; + Ok(match evaluate_task_against_allowlist(task, &allowlist) { + AllowlistMatch::Allowed => (true, false), + AllowlistMatch::Denied => (false, true), + AllowlistMatch::NotFound => (false, false), + }) } /// Check if a given task is allowed, based on the loaded allowlist @@ -134,20 +163,10 @@ pub fn check_task_allowed(task: &Task) -> Result { Ok(true) } scope => { - // Create a new allowlist entry - let mut entry = AllowlistEntry { - path: task.file_path.clone(), - scope: scope.clone(), - tasks: None, - }; - - // For Task scope, add the specific task name - if scope == AllowScope::Task { - entry.tasks = Some(vec![task.name.clone()]); - } - // Add the entry and save - allowlist.entries.push(entry); + allowlist + .entries + .push(allowlist_entry_for_task(task, scope.clone())); save_allowlist(&allowlist)?; Ok(true) } @@ -155,11 +174,7 @@ pub fn check_task_allowed(task: &Task) -> Result { } AllowDecision::Deny => { // Add a deny entry and save - let entry = AllowlistEntry { - path: task.file_path.clone(), - scope: AllowScope::Deny, - tasks: None, - }; + let entry = allowlist_entry_for_task(task, AllowScope::Deny); allowlist.entries.push(entry); save_allowlist(&allowlist)?; Ok(false) @@ -172,20 +187,10 @@ pub fn check_task_allowed_with_scope(task: &Task, scope: AllowScope) -> Result Result<(), String> { "No tasks found in the current directory.".yellow() ))?; } else { - // Collect file paths for each runner for reference - let mut runner_files: HashMap = HashMap::new(); - for task in &discovered.tasks { - let runner_name = task.runner.short_name().to_string(); - runner_files.insert(runner_name, task.file_path.to_string_lossy().to_string()); - } - // Calculate max task name width across all runners let max_task_name_width = discovered .tasks @@ -261,25 +255,19 @@ pub fn execute(verbose: bool) -> Result<(), String> { None }; - // Get file path for this runner - let empty_string = String::new(); - let file_path = runner_files.get(&runner).unwrap_or(&empty_string); - - // For GitHub Actions, show the full relative path instead of just the filename - let display_path = if runner == "act" { - let path = std::path::Path::new(file_path); - if let Ok(relative_path) = path.strip_prefix(¤t_dir) { - relative_path.to_string_lossy().to_string() - } else { - file_path.clone() - } + let runner_paths: HashSet<_> = + sorted_tasks.iter().map(|task| &task.file_path).collect(); + let display_path = if runner_paths.len() == 1 { + format_runner_path_for_display(&runner, &sorted_tasks[0].file_path, ¤t_dir) } else { - // For other runners, show just the filename - std::path::Path::new(file_path) - .file_name() - .map(|f| f.to_string_lossy().to_string()) - .unwrap_or_else(|| file_path.clone()) + "multiple files".to_string() }; + let show_task_sources = sorted_tasks + .iter() + .map(|task| task.definition_path().to_path_buf()) + .collect::>() + .len() + > 1; // Write section header let colored_runner = if tool_not_installed { @@ -315,6 +303,11 @@ pub fn execute(verbose: bool) -> Result<(), String> { // Format the task entry let formatted_task = format_task_entry(task, is_ambiguous, display_width); + let source_label = show_task_sources.then(|| { + format_definition_path_for_display(task.definition_path(), ¤t_dir) + }); + let formatted_task = + format_task_entry_with_source(formatted_task, source_label.as_deref()); write_line(&format!(" {}", formatted_task))?; } } @@ -449,6 +442,37 @@ fn format_task_entry(task: &Task, is_ambiguous: bool, name_width: usize) -> Stri format!("{} {}", padded_name, colored_description) } +fn format_task_entry_with_source(formatted_task: String, source_label: Option<&str>) -> String { + match source_label { + Some(source_label) if !source_label.is_empty() => { + format!( + "{} {}", + formatted_task, + format!("[{}]", source_label).dimmed() + ) + } + _ => formatted_task, + } +} + +fn format_definition_path_for_display(path: &Path, current_dir: &Path) -> String { + if let Ok(relative_path) = path.strip_prefix(current_dir) { + relative_path.to_string_lossy().to_string() + } else { + path.to_string_lossy().to_string() + } +} + +fn format_runner_path_for_display(runner: &str, path: &Path, current_dir: &Path) -> String { + if runner == "act" { + format_definition_path_for_display(path, current_dir) + } else { + path.file_name() + .map(|file_name| file_name.to_string_lossy().to_string()) + .unwrap_or_else(|| format_definition_path_for_display(path, current_dir)) + } +} + #[cfg(test)] mod tests { use super::*; @@ -457,7 +481,7 @@ mod tests { use serial_test::serial; use std::fs::{self, File}; use std::io::{self, Write}; - use std::path::PathBuf; + use std::path::{Path, PathBuf}; use tempfile::TempDir; // Custom writer for testing @@ -490,6 +514,7 @@ mod tests { Task { name: name.to_string(), file_path, + definition_path: None, definition_type: match runner { TaskRunner::Make => TaskDefinitionType::Makefile, TaskRunner::NodeNpm @@ -737,6 +762,7 @@ mod tests { let task = Task { name: "build".to_string(), file_path: makefile_path, + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "build".to_string(), @@ -888,6 +914,7 @@ mod tests { let task = Task { name: "integration".to_string(), file_path: PathBuf::from(".github/workflows"), + definition_path: None, definition_type: TaskDefinitionType::GitHubActions, runner: TaskRunner::Act, source_name: "integration".to_string(), @@ -951,4 +978,42 @@ mod tests { "Should show task description" ); } + + #[test] + fn test_runner_path_display_uses_execution_context_for_composed_tasks() { + let current_dir = Path::new("/project"); + let runner_path = Path::new("/project/Makefile"); + let definition_path = Path::new("/project/mk/common.mk"); + + assert_eq!( + format_runner_path_for_display("make", runner_path, current_dir), + "Makefile" + ); + assert_eq!( + format_definition_path_for_display(definition_path, current_dir), + "mk/common.mk" + ); + } + + #[test] + fn test_task_entry_source_suffix_uses_definition_path_for_composed_tasks() { + let task = Task { + name: "included-task".to_string(), + file_path: PathBuf::from("/project/Makefile"), + definition_path: Some(PathBuf::from("/project/mk/common.mk")), + definition_type: TaskDefinitionType::Makefile, + runner: TaskRunner::Make, + source_name: "included-task".to_string(), + description: Some("Included task".to_string()), + shadowed_by: None, + disambiguated_name: None, + }; + + let formatted = format_task_entry(&task, false, 18); + let formatted = format_task_entry_with_source(formatted, Some("mk/common.mk")); + + assert!(formatted.contains("included-task")); + assert!(formatted.contains("Included task")); + assert!(formatted.contains("[mk/common.mk]")); + } } diff --git a/src/composed_paths.rs b/src/composed_paths.rs new file mode 100644 index 0000000..b7d90dd --- /dev/null +++ b/src/composed_paths.rs @@ -0,0 +1,218 @@ +use crate::types::Task; +use std::collections::HashSet; +use std::ffi::OsString; +use std::path::{Component, Path, PathBuf}; + +/// Shared path metadata for tasks discovered through composed definitions such +/// as includes or inherited configs. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ComposedDefinitionSource { + runner_path: PathBuf, + definition_path: PathBuf, +} + +impl ComposedDefinitionSource { + /// Create a source where the runner path and defining file are the same. + pub fn direct(path: impl Into) -> Self { + let path = normalize_path(path.into()); + Self { + runner_path: path.clone(), + definition_path: path, + } + } + + /// Create a source where the runner path differs from the defining file. + pub fn composed(runner_path: impl Into, definition_path: impl Into) -> Self { + Self { + runner_path: normalize_path(runner_path.into()), + definition_path: normalize_path(definition_path.into()), + } + } + + pub fn definition_path(&self) -> &Path { + &self.definition_path + } + + /// Resolve a referenced child definition relative to the current defining file. + pub fn resolve_child(&self, referenced_path: impl AsRef) -> PathBuf { + resolve_nested_definition_path(&self.definition_path, referenced_path.as_ref()) + } + + /// Apply the source metadata to a discovered task. + pub fn apply_to_task(&self, task: &mut Task) { + task.file_path = self.runner_path.clone(); + if self.runner_path == self.definition_path { + task.definition_path = None; + } else { + task.definition_path = Some(self.definition_path.clone()); + } + } +} + +/// Track visited definitions for recursive discovery so include cycles can be +/// handled consistently across runners. +#[derive(Debug, Clone, Default)] +pub struct RecursiveDiscoveryState { + visited: HashSet, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum VisitState { + New(PathBuf), + AlreadyVisited(PathBuf), +} + +impl RecursiveDiscoveryState { + pub fn new() -> Self { + Self::default() + } + + pub fn mark_visited(&mut self, path: impl AsRef) -> VisitState { + let normalized = normalize_path(path.as_ref()); + if self.visited.insert(normalized.clone()) { + VisitState::New(normalized) + } else { + VisitState::AlreadyVisited(normalized) + } + } +} + +/// Resolve a referenced definition path relative to the file that included it. +pub fn resolve_nested_definition_path( + current_definition_path: &Path, + referenced_path: &Path, +) -> PathBuf { + if referenced_path.is_absolute() { + return normalize_path(referenced_path); + } + + let base_dir = current_definition_path.parent().unwrap_or(Path::new(".")); + normalize_path(base_dir.join(referenced_path)) +} + +fn normalize_path(path: impl AsRef) -> PathBuf { + let path = path.as_ref(); + let mut normalized_parts: Vec = Vec::new(); + let is_absolute = path.is_absolute(); + + for component in path.components() { + match component { + Component::RootDir => {} + Component::CurDir => {} + Component::ParentDir => { + if let Some(last) = normalized_parts.last() { + if last != ".." { + normalized_parts.pop(); + } else if !is_absolute { + normalized_parts.push(OsString::from("..")); + } + } else if !is_absolute { + normalized_parts.push(OsString::from("..")); + } + } + Component::Normal(part) => normalized_parts.push(part.to_os_string()), + Component::Prefix(prefix) => normalized_parts.push(prefix.as_os_str().to_os_string()), + } + } + + let mut normalized = if is_absolute { + PathBuf::from(std::path::MAIN_SEPARATOR.to_string()) + } else { + PathBuf::new() + }; + + for part in normalized_parts { + normalized.push(part); + } + + if normalized.as_os_str().is_empty() && !is_absolute { + PathBuf::from(".") + } else { + normalized + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::types::{Task, TaskDefinitionType, TaskRunner}; + use std::path::PathBuf; + + fn sample_task() -> Task { + Task { + name: "build".to_string(), + file_path: PathBuf::from("/tmp/runner"), + definition_path: None, + definition_type: TaskDefinitionType::Makefile, + runner: TaskRunner::Make, + source_name: "build".to_string(), + description: None, + shadowed_by: None, + disambiguated_name: None, + } + } + + #[test] + fn test_direct_source_uses_same_path_for_runner_and_definition() { + let source = ComposedDefinitionSource::direct("/repo/Makefile"); + + assert_eq!(source.runner_path, PathBuf::from("/repo/Makefile")); + assert_eq!(source.definition_path(), Path::new("/repo/Makefile")); + } + + #[test] + fn test_composed_source_applies_runner_and_definition_paths_to_task() { + let source = ComposedDefinitionSource::composed( + "/repo/.github/workflows", + "/repo/.github/workflows/ci.yml", + ); + let mut task = sample_task(); + + source.apply_to_task(&mut task); + + assert_eq!(task.file_path, PathBuf::from("/repo/.github/workflows")); + assert_eq!( + task.definition_path(), + Path::new("/repo/.github/workflows/ci.yml") + ); + assert_eq!( + task.allowlist_path(), + Path::new("/repo/.github/workflows/ci.yml") + ); + } + + #[test] + fn test_resolve_nested_definition_path_normalizes_relative_segments() { + let resolved = resolve_nested_definition_path( + Path::new("/repo/make/Makefile"), + Path::new("../shared/tasks.mk"), + ); + + assert_eq!(resolved, PathBuf::from("/repo/shared/tasks.mk")); + } + + #[test] + fn test_recursive_discovery_state_detects_duplicate_paths_after_normalization() { + let mut state = RecursiveDiscoveryState::new(); + + assert_eq!( + state.mark_visited("/repo/make/../shared/tasks.mk"), + VisitState::New(PathBuf::from("/repo/shared/tasks.mk")) + ); + assert_eq!( + state.mark_visited("/repo/shared/tasks.mk"), + VisitState::AlreadyVisited(PathBuf::from("/repo/shared/tasks.mk")) + ); + } + + #[test] + fn test_composed_source_resolves_children_relative_to_definition_file() { + let source = + ComposedDefinitionSource::composed("/repo/Makefile", "/repo/includes/common/tasks.mk"); + + assert_eq!( + source.resolve_child("../../other.mk"), + PathBuf::from("/repo/other.mk") + ); + } +} diff --git a/src/lib.rs b/src/lib.rs index 4546996..f46c866 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,6 +4,7 @@ pub mod allowlist; pub mod builtins; pub mod colors; pub mod commands; +pub mod composed_paths; pub mod config; pub mod environment; pub mod mcp; diff --git a/src/main.rs b/src/main.rs index 15ea2cb..e1a8ea4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,6 +3,7 @@ use clap::{Parser, Subcommand}; mod allowlist; mod builtins; mod commands; +mod composed_paths; mod config; mod environment; mod mcp; diff --git a/src/mcp/allowlist.rs b/src/mcp/allowlist.rs index fd3d25b..0074d26 100644 --- a/src/mcp/allowlist.rs +++ b/src/mcp/allowlist.rs @@ -1,6 +1,7 @@ -use crate::allowlist::load_allowlist; -use crate::types::{AllowScope, Allowlist, Task}; -use std::path::Path; +use crate::allowlist::{AllowlistMatch, evaluate_task_against_allowlist, load_allowlist}; +#[cfg(test)] +use crate::types::AllowScope; +use crate::types::{Allowlist, Task}; /// MCP-specific allowlist evaluator for task permissions /// @@ -33,54 +34,10 @@ impl McpAllowlistEvaluator { /// 4. Task scope allow entries /// 5. Not found in allowlist (deny by default for MCP) pub fn is_task_allowed(&self, task: &Task) -> Result { - // First pass: Check for deny entries (highest precedence) - for entry in &self.allowlist.entries { - if let AllowScope::Deny = entry.scope - && self.path_matches(&task.file_path, &entry.path, true) - { - return Ok(false); - } - } - - // Second pass: Check for allow entries - for entry in &self.allowlist.entries { - match entry.scope { - AllowScope::Directory => { - if self.path_matches(&task.file_path, &entry.path, true) { - return Ok(true); - } - } - AllowScope::File => { - if self.path_matches(&task.file_path, &entry.path, false) { - return Ok(true); - } - } - AllowScope::Task => { - if self.path_matches(&task.file_path, &entry.path, false) - && let Some(ref tasks) = entry.tasks - && tasks.contains(&task.name) - { - return Ok(true); - } - } - AllowScope::Deny | AllowScope::Once => { - // Already handled deny in first pass, skip Once (not applicable for MCP) - continue; - } - } - } - - // If no matching entry found, deny by default for MCP - Ok(false) - } - - /// Check if two paths match, considering directory scope - fn path_matches(&self, task_path: &Path, allowlist_path: &Path, allow_subdirs: bool) -> bool { - if allow_subdirs { - task_path.starts_with(allowlist_path) - } else { - task_path == allowlist_path - } + Ok(matches!( + evaluate_task_against_allowlist(task, &self.allowlist), + AllowlistMatch::Allowed + )) } /// Get the number of entries in the allowlist @@ -111,6 +68,7 @@ mod tests { Task { name: name.to_string(), file_path, + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: name.to_string(), diff --git a/src/mcp/dto.rs b/src/mcp/dto.rs index 06bff81..06a462a 100644 --- a/src/mcp/dto.rs +++ b/src/mcp/dto.rs @@ -56,7 +56,7 @@ impl TaskDto { command: task.runner.get_command(task), runner_available: is_runner_available(&task.runner), allowlisted: false, // Default to false for legacy method - file_path: task.file_path.to_string_lossy().to_string(), + file_path: task.definition_path().to_string_lossy().to_string(), description: task.description.clone(), } } @@ -78,7 +78,7 @@ impl TaskDto { command: task.runner.get_command(task), runner_available: is_runner_available_for_mcp(&task.runner), allowlisted: allowlist_evaluator.is_task_allowed(task).unwrap_or(false), - file_path: task.file_path.to_string_lossy().to_string(), + file_path: task.definition_path().to_string_lossy().to_string(), description: task.description.clone(), } } @@ -104,6 +104,7 @@ mod tests { let task = Task { name: "build".to_string(), file_path: PathBuf::from("/project/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "build".to_string(), @@ -131,6 +132,7 @@ mod tests { let task = Task { name: "test".to_string(), file_path: PathBuf::from("/project/package.json"), + definition_path: None, definition_type: TaskDefinitionType::PackageJson, runner: TaskRunner::NodeNpm, source_name: "test".to_string(), @@ -157,6 +159,7 @@ mod tests { let task = Task { name: "clean".to_string(), file_path: PathBuf::from("/project/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "clean".to_string(), @@ -203,6 +206,7 @@ mod tests { let task = Task { name: "build".to_string(), file_path: PathBuf::from("/project/file"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: runner.clone(), source_name: "build".to_string(), @@ -237,6 +241,7 @@ mod tests { let task = Task { name: "serve".to_string(), file_path: PathBuf::from("/home/user/projects/my-app/package.json"), + definition_path: None, definition_type: TaskDefinitionType::PackageJson, runner: TaskRunner::NodeNpm, source_name: "serve".to_string(), @@ -260,12 +265,33 @@ mod tests { ); } + #[test] + fn test_taskdto_uses_definition_path_for_composed_make_task() { + let task = Task { + name: "included-task".to_string(), + file_path: PathBuf::from("/project/Makefile"), + definition_path: Some(PathBuf::from("/project/mk/common.mk")), + definition_type: TaskDefinitionType::Makefile, + runner: TaskRunner::Make, + source_name: "included-task".to_string(), + description: Some("Included task".to_string()), + shadowed_by: None, + disambiguated_name: None, + }; + + let dto = TaskDto::from_task(&task); + + assert_eq!(dto.command, "make included-task"); + assert_eq!(dto.file_path, "/project/mk/common.mk"); + } + #[test] fn test_taskdto_serialization() { // Arrange let task = Task { name: "test".to_string(), file_path: PathBuf::from("/project/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "test".to_string(), @@ -291,6 +317,7 @@ mod tests { Task { name: "test".to_string(), file_path: PathBuf::from("/project/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "test".to_string(), @@ -301,6 +328,7 @@ mod tests { Task { name: "test".to_string(), file_path: PathBuf::from("/project/package.json"), + definition_path: None, definition_type: TaskDefinitionType::PackageJson, runner: TaskRunner::NodeNpm, source_name: "test".to_string(), @@ -381,6 +409,7 @@ mod tests { let task = Task { name: "build".to_string(), file_path: PathBuf::from("/project/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "build".to_string(), @@ -441,6 +470,7 @@ mod tests { let task = Task { name: task_name.to_string(), file_path: PathBuf::from("/project/file"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: runner.clone(), source_name: task_name.to_string(), @@ -482,6 +512,7 @@ mod tests { let task = Task { name: task_name.to_string(), file_path: PathBuf::from("/project/docker-compose.yml"), + definition_path: None, definition_type: TaskDefinitionType::DockerCompose, runner: TaskRunner::DockerCompose, source_name: task_name.to_string(), @@ -510,6 +541,7 @@ mod tests { let task = Task { name: "test".to_string(), file_path: PathBuf::from("/project/.travis.yml"), + definition_path: None, definition_type: TaskDefinitionType::TravisCi, runner: TaskRunner::TravisCi, source_name: "test".to_string(), @@ -543,6 +575,7 @@ mod tests { let task = Task { name: "build-all".to_string(), file_path: PathBuf::from("/project/CMakeLists.txt"), + definition_path: None, definition_type: TaskDefinitionType::CMake, runner: TaskRunner::CMake, source_name: "build-all".to_string(), diff --git a/src/mcp/server.rs b/src/mcp/server.rs index 093837d..e16dc92 100644 --- a/src/mcp/server.rs +++ b/src/mcp/server.rs @@ -774,7 +774,7 @@ impl DelaMcpServer { env: args.env.clone(), cwd: args.cwd.as_ref().map(PathBuf::from), command: task.runner.get_command(task), - file_path: task.file_path.clone(), + file_path: task.definition_path().to_path_buf(), }; let exit_state = JobState::Exited(exit_code.unwrap_or(-1)); @@ -831,7 +831,7 @@ impl DelaMcpServer { env: args.env.clone(), cwd: args.cwd.as_ref().map(PathBuf::from), command: task.runner.get_command(task), - file_path: task.file_path.clone(), + file_path: task.definition_path().to_path_buf(), }; // Start background job management diff --git a/src/parsers/parse_cmake.rs b/src/parsers/parse_cmake.rs index 6be11cf..41e9c95 100644 --- a/src/parsers/parse_cmake.rs +++ b/src/parsers/parse_cmake.rs @@ -64,6 +64,7 @@ fn parse_cmake_string(content: &str, file_path: &Path) -> Result, Stri let task = Task { name: target_name.to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::CMake, runner: TaskRunner::CMake, source_name: target_name.to_string(), diff --git a/src/parsers/parse_docker_compose.rs b/src/parsers/parse_docker_compose.rs index a6ef125..0ecde5c 100644 --- a/src/parsers/parse_docker_compose.rs +++ b/src/parsers/parse_docker_compose.rs @@ -49,6 +49,7 @@ pub fn parse(path: &PathBuf) -> Result, String> { tasks.push(Task { name: "up".to_string(), file_path: path.clone(), + definition_path: None, definition_type: TaskDefinitionType::DockerCompose, runner: TaskRunner::DockerCompose, source_name: "up".to_string(), @@ -61,6 +62,7 @@ pub fn parse(path: &PathBuf) -> Result, String> { tasks.push(Task { name: "down".to_string(), file_path: path.clone(), + definition_path: None, definition_type: TaskDefinitionType::DockerCompose, runner: TaskRunner::DockerCompose, source_name: "down".to_string(), @@ -82,6 +84,7 @@ pub fn parse(path: &PathBuf) -> Result, String> { tasks.push(Task { name: service_name.clone(), file_path: path.clone(), + definition_path: None, definition_type: TaskDefinitionType::DockerCompose, runner: TaskRunner::DockerCompose, source_name: service_name, diff --git a/src/parsers/parse_github_actions.rs b/src/parsers/parse_github_actions.rs index 15fec04..9510246 100644 --- a/src/parsers/parse_github_actions.rs +++ b/src/parsers/parse_github_actions.rs @@ -65,6 +65,7 @@ fn parse_workflow_string(content: &str, file_path: &Path) -> Result, S let task = Task { name: task_name.clone(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::GitHubActions, runner: TaskRunner::Act, source_name: task_name, // Source name is the same as the task name (entire workflow) diff --git a/src/parsers/parse_gradle.rs b/src/parsers/parse_gradle.rs index 3124e51..62c1ba8 100644 --- a/src/parsers/parse_gradle.rs +++ b/src/parsers/parse_gradle.rs @@ -48,6 +48,7 @@ fn add_common_tasks(tasks: &mut Vec, file_path: &Path) { tasks.push(Task { name: task_name.to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Gradle, runner: TaskRunner::Gradle, source_name: task_name.to_string(), @@ -82,6 +83,7 @@ fn extract_custom_tasks( tasks.push(Task { name: task_name.as_str().to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Gradle, runner: TaskRunner::Gradle, source_name: task_name.as_str().to_string(), @@ -98,6 +100,7 @@ fn extract_custom_tasks( tasks.push(Task { name: task_name.as_str().to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Gradle, runner: TaskRunner::Gradle, source_name: task_name.as_str().to_string(), @@ -114,6 +117,7 @@ fn extract_custom_tasks( tasks.push(Task { name: task_name.as_str().to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Gradle, runner: TaskRunner::Gradle, source_name: task_name.as_str().to_string(), @@ -226,6 +230,7 @@ fn extract_plugin_tasks( tasks.push(Task { name: task_name.to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Gradle, runner: TaskRunner::Gradle, source_name: task_name.to_string(), diff --git a/src/parsers/parse_justfile.rs b/src/parsers/parse_justfile.rs index ad66769..ce2f7a3 100644 --- a/src/parsers/parse_justfile.rs +++ b/src/parsers/parse_justfile.rs @@ -45,6 +45,7 @@ pub fn parse(path: &PathBuf) -> Result, String> { tasks.push(Task { name: task_name.clone(), file_path: path.clone(), + definition_path: None, definition_type: TaskDefinitionType::Justfile, runner: TaskRunner::Just, source_name: task_name, diff --git a/src/parsers/parse_makefile.rs b/src/parsers/parse_makefile.rs index 347ac4b..573fdad 100644 --- a/src/parsers/parse_makefile.rs +++ b/src/parsers/parse_makefile.rs @@ -3,6 +3,13 @@ use makefile_lossless::Makefile; use regex::Regex; use std::collections::HashMap; use std::path::Path; +use std::path::PathBuf; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MakefileInclude { + pub path: PathBuf, + pub optional: bool, +} /// Parse a Makefile at the given path and extract tasks pub fn parse(path: &Path) -> Result, String> { @@ -71,6 +78,7 @@ fn extract_tasks(makefile: &Makefile, path: &Path) -> Result, String> Task { name: name.clone(), file_path: path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: name, @@ -86,6 +94,135 @@ fn extract_tasks(makefile: &Makefile, path: &Path) -> Result, String> Ok(tasks_map.into_values().collect()) } +/// Extract Makefile include directives from a file. +pub fn extract_include_directives(path: &Path) -> Result, String> { + let content = + std::fs::read_to_string(path).map_err(|e| format!("Failed to read Makefile: {}", e))?; + Ok(extract_include_directives_from_str(&content)) +} + +fn extract_include_directives_from_str(content: &str) -> Vec { + let normalized = collapse_line_continuations(content); + let mut includes = Vec::new(); + + for line in normalized.lines() { + if line.starts_with('\t') { + continue; + } + + let trimmed = line.trim_start(); + if trimmed.is_empty() || trimmed.starts_with('#') { + continue; + } + + let (optional, remainder) = if let Some(rest) = trimmed.strip_prefix("include") { + (false, rest) + } else if let Some(rest) = trimmed.strip_prefix("-include") { + (true, rest) + } else if let Some(rest) = trimmed.strip_prefix("sinclude") { + (true, rest) + } else { + continue; + }; + + if !remainder.chars().next().is_some_and(char::is_whitespace) { + continue; + } + + let comment_stripped = strip_trailing_comment(remainder).trim(); + if comment_stripped.is_empty() { + continue; + } + + let path_tokens = shell_words::split(comment_stripped) + .unwrap_or_else(|_| split_include_fallback(comment_stripped)); + + for token in path_tokens { + if token.is_empty() || contains_dynamic_make_syntax(&token) { + continue; + } + + includes.push(MakefileInclude { + path: PathBuf::from(token), + optional, + }); + } + } + + includes +} + +fn collapse_line_continuations(content: &str) -> String { + let mut collapsed = String::new(); + let mut current = String::new(); + let mut continuing = false; + + for line in content.lines() { + let trimmed_end = line.trim_end(); + let continues = trimmed_end.ends_with('\\'); + let segment = if continues { + trimmed_end.trim_end_matches('\\').trim_end() + } else { + trimmed_end + }; + + if !current.is_empty() { + current.push(' '); + } + if continuing { + current.push_str(segment.trim_start()); + } else { + current.push_str(segment); + } + + if continues { + continuing = true; + continue; + } + + collapsed.push_str(¤t); + collapsed.push('\n'); + current.clear(); + continuing = false; + } + + if !current.is_empty() { + collapsed.push_str(¤t); + } + + collapsed +} + +fn strip_trailing_comment(input: &str) -> &str { + let mut in_single = false; + let mut in_double = false; + let mut escape = false; + + for (idx, ch) in input.char_indices() { + match ch { + '\\' if !escape => { + escape = true; + continue; + } + '\'' if !escape && !in_double => in_single = !in_single, + '"' if !escape && !in_single => in_double = !in_double, + '#' if !escape && !in_single && !in_double => return &input[..idx], + _ => {} + } + escape = false; + } + + input +} + +fn split_include_fallback(input: &str) -> Vec { + input.split_whitespace().map(str::to_string).collect() +} + +fn contains_dynamic_make_syntax(token: &str) -> bool { + token.contains('$') || token.contains('*') || token.contains('?') || token.contains('[') +} + /// Extract tasks using regex as a fallback method when standard parsing fails fn extract_tasks_regex(content: &str, path: &Path) -> Result, String> { let mut tasks_map: HashMap = HashMap::new(); @@ -133,6 +270,7 @@ fn extract_tasks_regex(content: &str, path: &Path) -> Result, String> Task { name: name.clone(), file_path: path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: name, @@ -614,4 +752,42 @@ OTHER_VAR ::= value assert_eq!(tasks.len(), 0); } + + #[test] + fn test_extract_include_directives() { + let content = r#" +include common.mk more.mk +-include .env +sinclude config/local.mk +"#; + + let includes = extract_include_directives_from_str(content); + + assert_eq!(includes.len(), 4); + assert_eq!(includes[0].path, PathBuf::from("common.mk")); + assert!(!includes[0].optional); + assert_eq!(includes[1].path, PathBuf::from("more.mk")); + assert!(!includes[1].optional); + assert_eq!(includes[2].path, PathBuf::from(".env")); + assert!(includes[2].optional); + assert_eq!(includes[3].path, PathBuf::from("config/local.mk")); + assert!(includes[3].optional); + } + + #[test] + fn test_extract_include_directives_ignores_recipes_and_dynamic_paths() { + let content = r#" +include first.mk \ + second.mk +include $(GENERATED_MAKEFILES) *.mk +build: + @include from-recipe.mk +"#; + + let includes = extract_include_directives_from_str(content); + + assert_eq!(includes.len(), 2); + assert_eq!(includes[0].path, PathBuf::from("first.mk")); + assert_eq!(includes[1].path, PathBuf::from("second.mk")); + } } diff --git a/src/parsers/parse_package_json.rs b/src/parsers/parse_package_json.rs index 6b848bf..d43f1a4 100644 --- a/src/parsers/parse_package_json.rs +++ b/src/parsers/parse_package_json.rs @@ -27,6 +27,7 @@ pub fn parse(path: &PathBuf) -> Result, String> { tasks.push(Task { name: name.clone(), file_path: path.clone(), + definition_path: None, definition_type: TaskDefinitionType::PackageJson, runner: runner.clone(), source_name: name.clone(), diff --git a/src/parsers/parse_pom_xml.rs b/src/parsers/parse_pom_xml.rs index cc56e41..92270b7 100644 --- a/src/parsers/parse_pom_xml.rs +++ b/src/parsers/parse_pom_xml.rs @@ -41,6 +41,7 @@ fn add_default_maven_goals(tasks: &mut Vec, file_path: &Path) { tasks.push(Task { name: goal.to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::MavenPom, runner: TaskRunner::Maven, source_name: goal.to_string(), @@ -67,6 +68,7 @@ fn add_profile_tasks(tasks: &mut Vec, root: Node, file_path: &Path) -> Res tasks.push(Task { name: format!("profile:{}", profile_id), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::MavenPom, runner: TaskRunner::Maven, source_name: profile_id.clone(), @@ -121,6 +123,7 @@ fn add_plugin_tasks(tasks: &mut Vec, root: Node, file_path: &Path) -> Resu tasks.push(Task { name: task_name.clone(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::MavenPom, runner: TaskRunner::Maven, source_name: task_name, diff --git a/src/parsers/parse_pyproject_toml.rs b/src/parsers/parse_pyproject_toml.rs index c64d2f5..b6368a8 100644 --- a/src/parsers/parse_pyproject_toml.rs +++ b/src/parsers/parse_pyproject_toml.rs @@ -24,6 +24,7 @@ pub fn parse(path: &Path) -> Result, String> { tasks.push(Task { name: name.clone(), file_path: path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::PyprojectToml, runner: TaskRunner::PythonUv, source_name: name.clone(), @@ -57,6 +58,7 @@ pub fn parse(path: &Path) -> Result, String> { tasks.push(Task { name: name.clone(), file_path: path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::PyprojectToml, runner: TaskRunner::PythonPoetry, source_name: name.clone(), @@ -98,6 +100,7 @@ pub fn parse(path: &Path) -> Result, String> { tasks.push(Task { name: name.clone(), file_path: path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::PyprojectToml, runner: TaskRunner::PythonPoe, source_name: name.clone(), diff --git a/src/parsers/parse_taskfile.rs b/src/parsers/parse_taskfile.rs index 2fa3387..a3296f7 100644 --- a/src/parsers/parse_taskfile.rs +++ b/src/parsers/parse_taskfile.rs @@ -1,7 +1,20 @@ use crate::types::{Task, TaskDefinitionType, TaskRunner}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; + +pub const SUPPORTED_TASKFILE_NAMES: [&str; 8] = [ + "Taskfile.yml", + "taskfile.yml", + "Taskfile.yaml", + "taskfile.yaml", + "Taskfile.dist.yml", + "taskfile.dist.yml", + "Taskfile.dist.yaml", + "taskfile.dist.yaml", +]; + +const DEFAULT_TASKFILE_NAME: &str = SUPPORTED_TASKFILE_NAMES[0]; #[derive(Debug, Serialize, Deserialize)] #[serde(untagged)] @@ -26,27 +39,78 @@ struct TaskfileTask { } #[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +enum TaskfileIncludeEntry { + Shorthand(String), + Detailed(TaskfileIncludeConfig), +} + +#[derive(Debug, Serialize, Deserialize)] +#[allow(dead_code)] // We parse a broader subset of the schema than DTKT-200 consumes. +struct TaskfileIncludeConfig { + taskfile: String, + optional: Option, + flatten: Option, + internal: Option, + excludes: Option>, + aliases: Option>, + dir: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TaskfileInclude { + pub namespace: String, + pub taskfile: PathBuf, + pub optional: bool, + pub flatten: bool, + pub internal: bool, + pub excludes: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +#[allow(dead_code)] // Some top-level schema fields are currently only parsed for compatibility. struct Taskfile { version: Option, + #[serde(default)] + includes: HashMap, + #[serde(default)] tasks: HashMap, } -/// Parse a Taskfile.yml file at the given path and extract tasks -pub fn parse(path: &PathBuf) -> Result, String> { - let file_name = path - .file_name() - .and_then(|n| n.to_str()) - .unwrap_or("Taskfile"); +pub fn find_taskfile_in_dir(dir: &Path) -> Option { + for filename in SUPPORTED_TASKFILE_NAMES { + let path = dir.join(filename); + if path.is_file() { + return Some(path); + } + } - let contents = std::fs::read_to_string(path) - .map_err(|e| format!("Failed to read {}: {}", file_name, e))?; + None +} - let taskfile: Taskfile = serde_yaml::from_str(&contents) - .map_err(|e| format!("Failed to parse {}: {}", file_name, e))?; +pub fn resolve_taskfile_include_path(candidate: &Path) -> PathBuf { + if candidate.is_dir() { + return find_taskfile_in_dir(candidate) + .unwrap_or_else(|| candidate.join(DEFAULT_TASKFILE_NAME)); + } + + if candidate.is_file() || looks_like_taskfile_file(candidate) || candidate.extension().is_some() + { + return candidate.to_path_buf(); + } + + candidate.join(DEFAULT_TASKFILE_NAME) +} + +/// Parse a Taskfile.yml file at the given path and extract tasks +pub fn parse(path: &Path) -> Result, String> { + let taskfile = load_taskfile(path)?; + let mut task_entries: Vec<_> = taskfile.tasks.into_iter().collect(); + task_entries.sort_by(|a, b| a.0.cmp(&b.0)); let mut tasks = Vec::new(); - for (name, task_def) in taskfile.tasks { + for (name, task_def) in task_entries { // Skip tasks marked as internal if task_def.internal.unwrap_or(false) { continue; @@ -70,7 +134,8 @@ pub fn parse(path: &PathBuf) -> Result, String> { tasks.push(Task { name: name.clone(), - file_path: path.clone(), + file_path: path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::Taskfile, runner: TaskRunner::Task, source_name: name, @@ -83,6 +148,68 @@ pub fn parse(path: &PathBuf) -> Result, String> { Ok(tasks) } +pub fn extract_include_directives(path: &Path) -> Result, String> { + let taskfile = load_taskfile(path)?; + let mut include_entries: Vec<_> = taskfile.includes.into_iter().collect(); + include_entries.sort_by(|a, b| a.0.cmp(&b.0)); + + let mut includes = Vec::new(); + + for (namespace, include) in include_entries { + let include = match include { + TaskfileIncludeEntry::Shorthand(taskfile) => TaskfileInclude { + namespace, + taskfile: PathBuf::from(taskfile), + optional: false, + flatten: false, + internal: false, + excludes: Vec::new(), + }, + TaskfileIncludeEntry::Detailed(config) => TaskfileInclude { + namespace, + taskfile: PathBuf::from(config.taskfile), + optional: config.optional.unwrap_or(false), + flatten: config.flatten.unwrap_or(false), + internal: config.internal.unwrap_or(false), + excludes: config.excludes.unwrap_or_default(), + }, + }; + + if should_skip_non_local_include(&include.taskfile) { + continue; + } + + includes.push(include); + } + + Ok(includes) +} + +fn load_taskfile(path: &Path) -> Result { + let file_name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("Taskfile"); + + let contents = std::fs::read_to_string(path) + .map_err(|e| format!("Failed to read {}: {}", file_name, e))?; + + let taskfile = serde_yaml::from_str(&contents) + .map_err(|e| format!("Failed to parse {}: {}", file_name, e))?; + Ok(taskfile) +} + +fn looks_like_taskfile_file(path: &Path) -> bool { + path.file_name() + .and_then(|name| name.to_str()) + .is_some_and(|name| SUPPORTED_TASKFILE_NAMES.contains(&name)) +} + +fn should_skip_non_local_include(path: &Path) -> bool { + let path = path.to_string_lossy(); + path.contains("://") || path.contains("{{") || path.contains("}}") +} + #[cfg(test)] mod tests { use super::*; @@ -276,4 +403,77 @@ tasks: ); assert_eq!(args_task.runner, TaskRunner::Task); } + + #[test] + fn test_extract_include_directives() { + let temp_dir = TempDir::new().unwrap(); + let taskfile_path = temp_dir.path().join("Taskfile.yml"); + + std::fs::write( + &taskfile_path, + r#" +version: '3' +includes: + docs: ./docs + shared: + taskfile: ./shared/Taskfile.dist.yml + optional: true + flatten: true + internal: true + excludes: [build] + remote: + taskfile: https://example.com/tasks.yml + templated: ./Taskfile_{{OS}}.yml +"#, + ) + .unwrap(); + + let includes = extract_include_directives(&taskfile_path).unwrap(); + assert_eq!(includes.len(), 2); + + assert_eq!(includes[0].namespace, "docs"); + assert_eq!(includes[0].taskfile, PathBuf::from("./docs")); + assert!(!includes[0].optional); + assert!(!includes[0].flatten); + assert!(!includes[0].internal); + assert!(includes[0].excludes.is_empty()); + + assert_eq!(includes[1].namespace, "shared"); + assert_eq!( + includes[1].taskfile, + PathBuf::from("./shared/Taskfile.dist.yml") + ); + assert!(includes[1].optional); + assert!(includes[1].flatten); + assert!(includes[1].internal); + assert_eq!(includes[1].excludes, vec!["build".to_string()]); + } + + #[test] + fn test_find_taskfile_in_dir_and_resolve_include_path() { + let temp_dir = TempDir::new().unwrap(); + let include_dir = temp_dir.path().join("docs"); + std::fs::create_dir_all(&include_dir).unwrap(); + std::fs::write(include_dir.join("taskfile.yaml"), "version: '3'\n").unwrap(); + + let resolved_path = find_taskfile_in_dir(&include_dir).unwrap(); + assert!( + resolved_path == include_dir.join("Taskfile.yaml") + || resolved_path == include_dir.join("taskfile.yaml") + ); + let include_path = resolve_taskfile_include_path(&include_dir); + assert!( + include_path == include_dir.join("Taskfile.yaml") + || include_path == include_dir.join("taskfile.yaml") + ); + + let missing_dir = temp_dir.path().join("missing"); + assert_eq!( + resolve_taskfile_include_path(&missing_dir), + missing_dir.join("Taskfile.yml") + ); + + let explicit_file = temp_dir.path().join("Shared.yml"); + assert_eq!(resolve_taskfile_include_path(&explicit_file), explicit_file); + } } diff --git a/src/parsers/parse_travis_ci.rs b/src/parsers/parse_travis_ci.rs index 4c2f69b..d8149bb 100644 --- a/src/parsers/parse_travis_ci.rs +++ b/src/parsers/parse_travis_ci.rs @@ -43,6 +43,7 @@ fn parse_travis_string(content: &str, file_path: &Path) -> Result, Str let task = Task { name: job_name.clone(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::TravisCi, runner: TaskRunner::TravisCi, source_name: job_name.clone(), @@ -73,6 +74,7 @@ fn parse_travis_string(content: &str, file_path: &Path) -> Result, Str let task = Task { name: job_name.clone(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::TravisCi, runner: TaskRunner::TravisCi, source_name: job_name.clone(), @@ -90,6 +92,7 @@ fn parse_travis_string(content: &str, file_path: &Path) -> Result, Str let task = Task { name: job_name.clone(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::TravisCi, runner: TaskRunner::TravisCi, source_name: job_name.clone(), @@ -110,6 +113,7 @@ fn parse_travis_string(content: &str, file_path: &Path) -> Result, Str let task = Task { name: "travis".to_string(), file_path: file_path.to_path_buf(), + definition_path: None, definition_type: TaskDefinitionType::TravisCi, runner: TaskRunner::TravisCi, source_name: "travis".to_string(), diff --git a/src/parsers/parse_turbo_json.rs b/src/parsers/parse_turbo_json.rs index 7d82bc3..c9469c7 100644 --- a/src/parsers/parse_turbo_json.rs +++ b/src/parsers/parse_turbo_json.rs @@ -1,34 +1,127 @@ use crate::types::{Task, TaskDefinitionType, TaskRunner}; -use std::path::PathBuf; +use serde_json::Value; +use std::collections::BTreeMap; +use std::path::Path; -pub fn parse(path: &PathBuf) -> Result, String> { +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TurboTaskConfig { + pub inherits: bool, + pub declared_locally: bool, + pub has_local_configuration: bool, +} + +impl TurboTaskConfig { + pub fn is_effective_task_definition(&self) -> bool { + self.declared_locally && (self.inherits || self.has_local_configuration) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TurboConfig { + pub extends: Vec, + pub tasks: BTreeMap, +} + +impl TurboConfig { + pub fn task_names(&self) -> impl Iterator { + self.tasks + .iter() + .filter(|(_, task)| task.is_effective_task_definition()) + .map(|(name, _)| name) + } +} + +pub fn load_config(path: &Path) -> Result { let contents = std::fs::read_to_string(path).map_err(|e| format!("Failed to read turbo.json: {}", e))?; - let json: serde_json::Value = serde_json::from_str(&contents) + let json: Value = serde_json::from_str(&contents) .map_err(|e| format!("Failed to parse turbo.json: {}", e))?; - let tasks = json - .get("tasks") - .or_else(|| json.get("pipeline")) - .and_then(|value| value.as_object()) - .map(|task_map| { - task_map - .keys() - .map(|name| Task { - name: name.clone(), - file_path: path.clone(), - definition_type: TaskDefinitionType::TurboJson, - runner: TaskRunner::Turbo, - source_name: name.clone(), - description: None, - shadowed_by: None, - disambiguated_name: None, - }) + let extends = json + .get("extends") + .and_then(Value::as_array) + .map(|entries| { + entries + .iter() + .filter_map(Value::as_str) + .map(str::to_string) .collect() }) .unwrap_or_default(); - Ok(tasks) + let tasks = if let Some(value) = json.get("tasks") { + parse_task_map("tasks", value)? + } else if let Some(value) = json.get("pipeline") { + parse_task_map("pipeline", value)? + } else { + BTreeMap::new() + }; + + Ok(TurboConfig { extends, tasks }) +} + +pub fn parse(path: &Path) -> Result, String> { + let config = load_config(path)?; + + Ok(config + .task_names() + .map(|name| Task { + name: name.clone(), + file_path: path.to_path_buf(), + definition_path: None, + definition_type: TaskDefinitionType::TurboJson, + runner: TaskRunner::Turbo, + source_name: name.clone(), + description: None, + shadowed_by: None, + disambiguated_name: None, + }) + .collect()) +} + +fn parse_task_config(value: &Value) -> TurboTaskConfig { + let Some(object) = value.as_object() else { + return TurboTaskConfig { + inherits: true, + declared_locally: false, + has_local_configuration: false, + }; + }; + + TurboTaskConfig { + inherits: object + .get("extends") + .and_then(Value::as_bool) + .unwrap_or(true), + declared_locally: true, + has_local_configuration: object.keys().any(|key| key != "extends"), + } +} + +fn parse_task_map(key: &str, value: &Value) -> Result, String> { + let Some(task_map) = value.as_object() else { + return Err(format!( + "Failed to parse turbo.json: '{}' must be an object, found {}", + key, + json_type_name(value) + )); + }; + + Ok(task_map + .iter() + .map(|(name, value)| (name.clone(), parse_task_config(value))) + .collect()) +} + +fn json_type_name(value: &Value) -> &'static str { + match value { + Value::Null => "null", + Value::Bool(_) => "boolean", + Value::Number(_) => "number", + Value::String(_) => "string", + Value::Array(_) => "array", + Value::Object(_) => "object", + } } #[cfg(test)] @@ -68,6 +161,118 @@ mod tests { ); } + #[test] + fn test_load_turbo_json_tracks_extends_and_task_level_extends_false() { + let temp_dir = TempDir::new().unwrap(); + let turbo_json_path = temp_dir.path().join("turbo.json"); + std::fs::write( + &turbo_json_path, + r#"{ + "extends": ["//", "shared-config"], + "tasks": { + "build": {}, + "test": { + "extends": false + }, + "lint": { + "extends": false, + "outputs": [] + } + } +}"#, + ) + .unwrap(); + + let config = load_config(&turbo_json_path).unwrap(); + + assert_eq!(config.extends, vec!["//", "shared-config"]); + assert_eq!( + config.tasks.get("build"), + Some(&TurboTaskConfig { + inherits: true, + declared_locally: true, + has_local_configuration: false, + }) + ); + assert_eq!( + config.tasks.get("test"), + Some(&TurboTaskConfig { + inherits: false, + declared_locally: true, + has_local_configuration: false, + }) + ); + assert_eq!( + config.tasks.get("lint"), + Some(&TurboTaskConfig { + inherits: false, + declared_locally: true, + has_local_configuration: true, + }) + ); + } + + #[test] + fn test_parse_turbo_json_excludes_task_with_only_extends_false() { + let temp_dir = TempDir::new().unwrap(); + let turbo_json_path = temp_dir.path().join("turbo.json"); + std::fs::write( + &turbo_json_path, + r#"{ + "extends": ["//"], + "tasks": { + "build": {}, + "test": { + "extends": false + }, + "lint": { + "extends": false, + "dependsOn": [] + } + } +}"#, + ) + .unwrap(); + + let tasks = parse(&turbo_json_path).unwrap(); + let task_names: Vec<_> = tasks.iter().map(|task| task.name.as_str()).collect(); + + assert_eq!(task_names, vec!["build", "lint"]); + assert!(!task_names.contains(&"test")); + } + + #[test] + fn test_parse_turbo_json_treats_non_object_task_as_inherit_only() { + let temp_dir = TempDir::new().unwrap(); + let turbo_json_path = temp_dir.path().join("turbo.json"); + std::fs::write( + &turbo_json_path, + r#"{ + "tasks": { + "build": null, + "test": {} + } +}"#, + ) + .unwrap(); + + let config = load_config(&turbo_json_path).unwrap(); + assert_eq!( + config.tasks.get("build"), + Some(&TurboTaskConfig { + inherits: true, + declared_locally: false, + has_local_configuration: false, + }) + ); + + let tasks = parse(&turbo_json_path).unwrap(); + let task_names: Vec<_> = tasks.iter().map(|task| task.name.as_str()).collect(); + + assert_eq!(task_names, vec!["test"]); + assert!(!task_names.contains(&"build")); + } + #[test] fn test_parse_turbo_json_legacy_pipeline() { let temp_dir = TempDir::new().unwrap(); @@ -89,6 +294,30 @@ mod tests { assert!(tasks.iter().any(|task| task.name == "check-types")); } + #[test] + fn test_parse_turbo_json_errors_when_tasks_is_not_an_object() { + let temp_dir = TempDir::new().unwrap(); + let turbo_json_path = temp_dir.path().join("turbo.json"); + std::fs::write(&turbo_json_path, r#"{"tasks":["build"]}"#).unwrap(); + + let err = parse(&turbo_json_path).unwrap_err(); + + assert!(err.contains("'tasks' must be an object")); + assert!(err.contains("array")); + } + + #[test] + fn test_parse_turbo_json_errors_when_pipeline_is_not_an_object() { + let temp_dir = TempDir::new().unwrap(); + let turbo_json_path = temp_dir.path().join("turbo.json"); + std::fs::write(&turbo_json_path, r#"{"pipeline":"build"}"#).unwrap(); + + let err = parse(&turbo_json_path).unwrap_err(); + + assert!(err.contains("'pipeline' must be an object")); + assert!(err.contains("string")); + } + #[test] fn test_parse_turbo_json_without_tasks() { let temp_dir = TempDir::new().unwrap(); diff --git a/src/prompt.rs b/src/prompt.rs index be3fc36..ba05ecb 100644 --- a/src/prompt.rs +++ b/src/prompt.rs @@ -69,7 +69,7 @@ fn prompt_for_task_fallback(task: &Task) -> Result { println!( "\nTask '{}' from '{}' requires approval.", task.name, - task.file_path.display() + task.definition_path().display() ); if let Some(desc) = &task.description { println!("Description: {}", desc); @@ -189,7 +189,7 @@ fn ui(f: &mut Frame, task: &Task, options: &[(&str, AllowDecision)], selected: u .add_modifier(Modifier::BOLD), )]), Line::from(vec![Span::styled( - format!(" {}", task.file_path.display()), + format!(" {}", task.definition_path().display()), Style::default() .fg(Color::Yellow) .add_modifier(Modifier::BOLD), diff --git a/src/task_discovery.rs b/src/task_discovery.rs index 3a696a9..451003f 100644 --- a/src/task_discovery.rs +++ b/src/task_discovery.rs @@ -1,3 +1,4 @@ +use crate::composed_paths::{ComposedDefinitionSource, RecursiveDiscoveryState, VisitState}; use crate::parsers::{ parse_cmake, parse_docker_compose, parse_github_actions, parse_gradle, parse_justfile, parse_makefile, parse_package_json, parse_pom_xml, parse_pyproject_toml, parse_taskfile, @@ -6,7 +7,7 @@ use crate::parsers::{ use crate::repo_root::find_git_repo_root; use crate::task_shadowing::check_shadowing; use crate::types::{Task, TaskDefinitionFile, TaskDefinitionType, TaskFileStatus, TaskRunner}; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use std::fs; use std::path::Path; use std::path::PathBuf; @@ -223,7 +224,7 @@ pub fn format_ambiguous_task_error(task_name: &str, matching_tasks: &[&Task]) -> " • {} ({} from {})\n", disambiguated, task.runner.short_name(), - task.file_path.display() + task.definition_path().display() )); } } @@ -308,21 +309,109 @@ fn discover_makefile_tasks(dir: &Path, discovered: &mut DiscoveredTasks) -> Resu return Ok(()); } - match parse_makefile::parse(&makefile_path) { - Ok(tasks) => { - handle_discovery_success( - tasks, - makefile_path, - TaskDefinitionType::Makefile, - discovered, - ); + let root_source = ComposedDefinitionSource::direct(makefile_path.clone()); + let mut traversal_state = RecursiveDiscoveryState::new(); + let mut seen_task_names = std::collections::HashSet::new(); + let mut tasks = Vec::new(); + let mut include_errors = Vec::new(); + + let result = collect_makefile_tasks_recursive( + &makefile_path, + &root_source, + &mut traversal_state, + &mut seen_task_names, + &mut tasks, + &mut include_errors, + ); + + for task in &mut tasks { + task.shadowed_by = check_shadowing(&task.name); + } + + discovered.tasks.extend(tasks); + discovered.errors.extend(include_errors); + + let status = match result { + Ok(()) => TaskFileStatus::Parsed, + Err(error) => { + discovered.errors.push(format!( + "Failed to parse {}: {}", + makefile_path.display(), + error + )); + TaskFileStatus::ParseError(error) } - Err(e) => { - handle_discovery_error(e, makefile_path, TaskDefinitionType::Makefile, discovered); + }; + discovered.definitions.makefile = Some(TaskDefinitionFile { + path: makefile_path, + definition_type: TaskDefinitionType::Makefile, + status, + }); + + Ok(()) +} + +fn collect_makefile_tasks_recursive( + root_makefile_path: &Path, + current_source: &ComposedDefinitionSource, + traversal_state: &mut RecursiveDiscoveryState, + seen_task_names: &mut std::collections::HashSet, + collected_tasks: &mut Vec, + include_errors: &mut Vec, +) -> Result<(), String> { + match traversal_state.mark_visited(current_source.definition_path()) { + VisitState::AlreadyVisited(_) => return Ok(()), + VisitState::New(_) => {} + } + + let mut first_error: Option = None; + + let mut tasks = parse_makefile::parse(current_source.definition_path())?; + for task in &mut tasks { + current_source.apply_to_task(task); + } + for task in tasks { + if seen_task_names.insert(task.name.clone()) { + collected_tasks.push(task); } } - Ok(()) + let includes = parse_makefile::extract_include_directives(current_source.definition_path())?; + + for include in includes { + let resolved_include = current_source.resolve_child(&include.path); + + if !resolved_include.is_file() { + continue; + } + + let include_source = + ComposedDefinitionSource::composed(root_makefile_path, resolved_include.clone()); + if let Err(error) = collect_makefile_tasks_recursive( + root_makefile_path, + &include_source, + traversal_state, + seen_task_names, + collected_tasks, + include_errors, + ) { + let error = format!( + "Failed to parse included makefile '{}': {}", + resolved_include.display(), + error + ); + include_errors.push(error.clone()); + if first_error.is_none() { + first_error = Some(error); + } + } + } + + if let Some(error) = first_error { + Err(error) + } else { + Ok(()) + } } fn discover_npm_tasks(dir: &Path, discovered: &mut DiscoveredTasks) -> Result<(), String> { @@ -389,63 +478,184 @@ fn discover_python_tasks(dir: &Path, discovered: &mut DiscoveredTasks) -> Result } fn discover_taskfile_tasks(dir: &Path, discovered: &mut DiscoveredTasks) -> Result<(), String> { - // List of possible Taskfile paths in order of priority - let possible_taskfiles = [ - "Taskfile.yml", - "taskfile.yml", - "Taskfile.yaml", - "taskfile.yaml", - "Taskfile.dist.yml", - "taskfile.dist.yml", - "Taskfile.dist.yaml", - "taskfile.dist.yaml", - ]; - - // Try to find the first existing Taskfile - let mut taskfile_path = None; - for filename in &possible_taskfiles { - let path = dir.join(filename); - if path.exists() { - taskfile_path = Some(path); - break; + let default_path = dir.join(parse_taskfile::SUPPORTED_TASKFILE_NAMES[0]); + let Some(taskfile_path) = parse_taskfile::find_taskfile_in_dir(dir) else { + discovered.definitions.taskfile = Some(TaskDefinitionFile { + path: default_path, + definition_type: TaskDefinitionType::Taskfile, + status: TaskFileStatus::NotFound, + }); + return Ok(()); + }; + + let root_source = ComposedDefinitionSource::direct(taskfile_path.clone()); + let mut traversal_state = RecursiveDiscoveryState::new(); + let mut seen_task_names = std::collections::HashSet::new(); + let mut tasks = Vec::new(); + let mut include_errors = Vec::new(); + let no_excludes = std::collections::HashSet::new(); + let mut traversal = TaskfileTraversal { + root_taskfile_path: &taskfile_path, + traversal_state: &mut traversal_state, + seen_task_names: &mut seen_task_names, + collected_tasks: &mut tasks, + include_errors: &mut include_errors, + }; + + let result = collect_taskfile_tasks_recursive( + &root_source, + "", + None, + false, + &no_excludes, + &mut traversal, + ); + + for task in &mut tasks { + task.shadowed_by = check_shadowing(&task.name); + } + + discovered.tasks.extend(tasks); + discovered.errors.extend(include_errors); + + let status = match result { + Ok(()) => TaskFileStatus::Parsed, + Err(error) => { + discovered.errors.push(format!( + "Failed to parse {}: {}", + taskfile_path.display(), + error + )); + TaskFileStatus::ParseError(error) } + }; + discovered.definitions.taskfile = Some(TaskDefinitionFile { + path: taskfile_path, + definition_type: TaskDefinitionType::Taskfile, + status, + }); + + Ok(()) +} + +struct TaskfileTraversal<'a> { + root_taskfile_path: &'a Path, + traversal_state: &'a mut RecursiveDiscoveryState, + seen_task_names: &'a mut std::collections::HashSet, + collected_tasks: &'a mut Vec, + include_errors: &'a mut Vec, +} + +fn collect_taskfile_tasks_recursive( + current_source: &ComposedDefinitionSource, + namespace_prefix: &str, + include_label: Option<&str>, + hide_tasks: bool, + excluded_tasks: &std::collections::HashSet, + traversal: &mut TaskfileTraversal<'_>, +) -> Result<(), String> { + match traversal + .traversal_state + .mark_visited(current_source.definition_path()) + { + VisitState::AlreadyVisited(_) => return Ok(()), + VisitState::New(_) => {} } - // Use a default path for reporting if no Taskfile was found - let default_path = dir.join("Taskfile.yml"); + let mut first_error: Option = None; - // If a Taskfile was found, parse it - if let Some(taskfile_path) = taskfile_path { - let mut definition = TaskDefinitionFile { - path: taskfile_path.clone(), - definition_type: TaskDefinitionType::Taskfile, - status: TaskFileStatus::NotImplemented, - }; + let mut tasks = parse_taskfile::parse(current_source.definition_path())?; + tasks.sort_by(|a, b| a.name.cmp(&b.name)); - match parse_taskfile::parse(&taskfile_path) { - Ok(tasks) => { - definition.status = TaskFileStatus::Parsed; - discovered.tasks.extend(tasks); + if !hide_tasks { + for mut task in tasks { + let original_name = task.name.clone(); + if excluded_tasks.contains(&original_name) { + continue; } - Err(e) => { - definition.status = TaskFileStatus::ParseError(e.clone()); - discovered - .errors - .push(format!("Error parsing {}: {}", taskfile_path.display(), e)); + + let effective_name = prefix_taskfile_task_name(namespace_prefix, &original_name); + task.name = effective_name.clone(); + task.source_name = effective_name; + current_source.apply_to_task(&mut task); + + if !traversal.seen_task_names.insert(task.name.clone()) { + let error = match include_label { + Some(include_label) => { + format!( + "Found multiple tasks ({}) included by \"{}\"", + task.name, include_label + ) + } + None => format!("Found multiple Taskfile tasks named '{}'", task.name), + }; + traversal.include_errors.push(error.clone()); + if first_error.is_none() { + first_error = Some(error); + } + continue; + } + + traversal.collected_tasks.push(task); + } + } + + let includes = parse_taskfile::extract_include_directives(current_source.definition_path())?; + + for include in includes { + let resolved_candidate = current_source.resolve_child(&include.taskfile); + let resolved_include = parse_taskfile::resolve_taskfile_include_path(&resolved_candidate); + + if !resolved_include.is_file() { + continue; + } + + let child_source = ComposedDefinitionSource::composed( + traversal.root_taskfile_path, + resolved_include.clone(), + ); + let child_namespace = if include.flatten { + namespace_prefix.to_string() + } else { + prefix_taskfile_task_name(namespace_prefix, &include.namespace) + }; + let child_include_label = prefix_taskfile_task_name(namespace_prefix, &include.namespace); + let child_hide_tasks = hide_tasks || include.internal; + let child_excludes = include.excludes.into_iter().collect(); + + if let Err(error) = collect_taskfile_tasks_recursive( + &child_source, + &child_namespace, + Some(child_include_label.as_str()), + child_hide_tasks, + &child_excludes, + traversal, + ) { + let error = format!( + "Failed to parse included Taskfile '{}': {}", + resolved_include.display(), + error + ); + traversal.include_errors.push(error.clone()); + if first_error.is_none() { + first_error = Some(error); } } + } - set_definition(discovered, definition); + if let Some(error) = first_error { + Err(error) } else { - // No Taskfile found, set status as NotFound - discovered.definitions.taskfile = Some(TaskDefinitionFile { - path: default_path, - definition_type: TaskDefinitionType::Taskfile, - status: TaskFileStatus::NotFound, - }); + Ok(()) } +} - Ok(()) +fn prefix_taskfile_task_name(namespace_prefix: &str, task_name: &str) -> String { + if namespace_prefix.is_empty() { + task_name.to_string() + } else { + format!("{}:{}", namespace_prefix, task_name) + } } fn discover_turbo_tasks(dir: &Path, discovered: &mut DiscoveredTasks) -> Result<(), String> { @@ -461,16 +671,339 @@ fn discover_turbo_tasks(dir: &Path, discovered: &mut DiscoveredTasks) -> Result< return Ok(()); } - match parse_turbo_json::parse(&turbo_json) { - Ok(tasks) => { - handle_discovery_success(tasks, turbo_json, TaskDefinitionType::TurboJson, discovered); + let mut tasks_by_name = BTreeMap::new(); + let mut config_errors = Vec::new(); + + let result = collect_turbo_tasks_for_context( + &repo_root, + dir, + &turbo_json, + &mut tasks_by_name, + &mut config_errors, + ); + + let mut tasks: Vec<_> = tasks_by_name.into_values().collect(); + for task in &mut tasks { + task.shadowed_by = check_shadowing(&task.name); + } + + discovered.tasks.extend(tasks); + discovered.errors.extend(config_errors); + + let status = match result { + Ok(()) => TaskFileStatus::Parsed, + Err(error) => { + discovered.errors.push(format!( + "Failed to parse {}: {}", + turbo_json.display(), + error + )); + TaskFileStatus::ParseError(error) } - Err(e) => { - handle_discovery_error(e, turbo_json, TaskDefinitionType::TurboJson, discovered); + }; + discovered.definitions.turbo_json = Some(TaskDefinitionFile { + path: turbo_json, + definition_type: TaskDefinitionType::TurboJson, + status, + }); + + Ok(()) +} + +fn collect_turbo_tasks_for_context( + repo_root: &Path, + dir: &Path, + root_turbo_json: &Path, + collected_tasks: &mut BTreeMap, + config_errors: &mut Vec, +) -> Result<(), String> { + let root_source = ComposedDefinitionSource::direct(root_turbo_json.to_path_buf()); + let mut package_configs_by_name = None; + let root_tasks = resolve_effective_turbo_tasks( + &root_source, + repo_root, + root_turbo_json, + &mut package_configs_by_name, + &mut RecursiveDiscoveryState::new(), + )?; + collected_tasks.extend(root_tasks); + + let mut first_error = None; + + if dir == repo_root { + for config_path in collect_descendant_turbo_config_paths(repo_root) { + let config_source = + ComposedDefinitionSource::composed(root_turbo_json, config_path.clone()); + match resolve_effective_turbo_tasks( + &config_source, + repo_root, + root_turbo_json, + &mut package_configs_by_name, + &mut RecursiveDiscoveryState::new(), + ) { + Ok(tasks) => { + for (name, task) in tasks { + collected_tasks.entry(name).or_insert(task); + } + } + Err(error) => { + let error = format!( + "Failed to parse workspace-local turbo config '{}': {}", + config_path.display(), + error + ); + config_errors.push(error.clone()); + if first_error.is_none() { + first_error = Some(error); + } + } + } + } + } else { + for config_path in collect_turbo_ancestor_config_paths(dir, repo_root) { + let config_source = + ComposedDefinitionSource::composed(root_turbo_json, config_path.clone()); + match resolve_effective_turbo_tasks( + &config_source, + repo_root, + root_turbo_json, + &mut package_configs_by_name, + &mut RecursiveDiscoveryState::new(), + ) { + Ok(tasks) if !tasks.is_empty() => { + *collected_tasks = tasks; + break; + } + Ok(_) => {} + Err(error) => { + let error = format!( + "Failed to parse workspace-local turbo config '{}': {}", + config_path.display(), + error + ); + config_errors.push(error.clone()); + if first_error.is_none() { + first_error = Some(error); + } + break; + } + } } } - Ok(()) + if let Some(error) = first_error { + Err(error) + } else { + Ok(()) + } +} + +fn resolve_effective_turbo_tasks( + current_source: &ComposedDefinitionSource, + repo_root: &Path, + root_turbo_json: &Path, + package_configs_by_name: &mut Option>, + traversal_state: &mut RecursiveDiscoveryState, +) -> Result, String> { + match traversal_state.mark_visited(current_source.definition_path()) { + VisitState::AlreadyVisited(_) => return Ok(BTreeMap::new()), + VisitState::New(_) => {} + } + + let config = parse_turbo_json::load_config(current_source.definition_path())?; + + if current_source.definition_path() != root_turbo_json && config.extends.is_empty() { + return Ok(BTreeMap::new()); + } + + let mut tasks = BTreeMap::new(); + + if current_source.definition_path() != root_turbo_json { + for extend_entry in &config.extends { + let Some(parent_config_path) = resolve_turbo_extends_entry( + current_source, + extend_entry, + repo_root, + root_turbo_json, + package_configs_by_name, + ) else { + continue; + }; + + if !parent_config_path.is_file() { + continue; + } + + let parent_source = + ComposedDefinitionSource::composed(root_turbo_json, parent_config_path.clone()); + let inherited_tasks = resolve_effective_turbo_tasks( + &parent_source, + repo_root, + root_turbo_json, + package_configs_by_name, + traversal_state, + )?; + tasks.extend(inherited_tasks); + } + } + + for (name, task_config) in &config.tasks { + if !task_config.is_effective_task_definition() { + tasks.remove(name.as_str()); + } + } + + let mut local_tasks = parse_turbo_json::parse(current_source.definition_path())?; + for task in &mut local_tasks { + current_source.apply_to_task(task); + } + for task in local_tasks { + tasks.insert(task.name.clone(), task); + } + + Ok(tasks) +} + +fn resolve_turbo_extends_entry( + current_source: &ComposedDefinitionSource, + extend_entry: &str, + repo_root: &Path, + root_turbo_json: &Path, + package_configs_by_name: &mut Option>, +) -> Option { + if extend_entry == "//" { + return Some(root_turbo_json.to_path_buf()); + } + + if looks_like_turbo_config_path(extend_entry) { + let candidate = current_source.resolve_child(extend_entry); + return Some(resolve_turbo_config_path_candidate(&candidate)); + } + + let package_configs_by_name = + package_configs_by_name.get_or_insert_with(|| build_turbo_package_config_index(repo_root)); + package_configs_by_name.get(extend_entry).cloned() +} + +fn collect_turbo_ancestor_config_paths(dir: &Path, repo_root: &Path) -> Vec { + let mut current = dir.to_path_buf(); + let mut config_paths = Vec::new(); + + while current.starts_with(repo_root) && current != repo_root { + let candidate = current.join("turbo.json"); + if candidate.is_file() { + config_paths.push(candidate); + } + + if !current.pop() { + break; + } + } + + config_paths +} + +fn collect_descendant_turbo_config_paths(repo_root: &Path) -> Vec { + let mut config_paths = Vec::new(); + collect_descendant_turbo_config_paths_recursive(repo_root, repo_root, &mut config_paths); + config_paths.sort(); + config_paths +} + +fn collect_descendant_turbo_config_paths_recursive( + repo_root: &Path, + current_dir: &Path, + config_paths: &mut Vec, +) { + let Ok(entries) = fs::read_dir(current_dir) else { + return; + }; + + for entry in entries.flatten() { + let Ok(file_type) = entry.file_type() else { + continue; + }; + if file_type.is_symlink() || !file_type.is_dir() { + continue; + } + + let path = entry.path(); + + let Some(file_name) = path.file_name().and_then(|name| name.to_str()) else { + continue; + }; + if should_skip_turbo_config_scan(file_name) { + continue; + } + + let candidate = path.join("turbo.json"); + if candidate.is_file() && candidate != repo_root.join("turbo.json") { + config_paths.push(candidate); + } + + collect_descendant_turbo_config_paths_recursive(repo_root, &path, config_paths); + } +} + +fn should_skip_turbo_config_scan(file_name: &str) -> bool { + matches!(file_name, ".git" | "node_modules") +} + +fn looks_like_turbo_config_path(extend_entry: &str) -> bool { + let extend_path = Path::new(extend_entry); + extend_path.is_absolute() + || extend_entry.starts_with('.') + || extend_entry.contains(std::path::MAIN_SEPARATOR) + || extend_entry.contains('/') + || extend_entry.contains('\\') +} + +fn resolve_turbo_config_path_candidate(candidate: &Path) -> PathBuf { + if candidate + .file_name() + .and_then(|name| name.to_str()) + .is_some_and(|name| name == "turbo.json") + { + return candidate.to_path_buf(); + } + + if candidate.extension().is_none() || candidate.is_dir() { + return candidate.join("turbo.json"); + } + + candidate.to_path_buf() +} + +fn build_turbo_package_config_index(repo_root: &Path) -> HashMap { + let mut package_configs = HashMap::new(); + + let root_turbo_json = repo_root.join("turbo.json"); + if root_turbo_json.is_file() + && let Some(package_name) = read_package_name(repo_root) + { + package_configs.insert(package_name, root_turbo_json); + } + + for config_path in collect_descendant_turbo_config_paths(repo_root) { + let Some(config_dir) = config_path.parent() else { + continue; + }; + let Some(package_name) = read_package_name(config_dir) else { + continue; + }; + package_configs.entry(package_name).or_insert(config_path); + } + + package_configs +} + +fn read_package_name(dir: &Path) -> Option { + let package_json_path = dir.join("package.json"); + let contents = fs::read_to_string(package_json_path).ok()?; + let json: serde_json::Value = serde_json::from_str(&contents).ok()?; + json.get("name") + .and_then(serde_json::Value::as_str) + .map(str::to_string) } fn discover_maven_tasks(dir: &Path, discovered: &mut DiscoveredTasks) -> Result<(), String> { @@ -639,10 +1172,11 @@ fn discover_github_actions_tasks( for file_path in workflow_files { match parse_github_actions(&file_path) { Ok(mut tasks) => { - // Override the file path to use the common parent directory - // instead of individual workflow files + let source = + ComposedDefinitionSource::composed(workflows_parent.clone(), file_path); for task in &mut tasks { - task.file_path = workflows_parent.clone(); + source.apply_to_task(task); + task.shadowed_by = check_shadowing(&task.name); } all_tasks.extend(tasks); } @@ -838,6 +1372,7 @@ fn discover_shell_script_tasks(dir: &Path, discovered: &mut DiscoveredTasks) { discovered.tasks.push(Task { name: name.clone(), file_path: path, + definition_path: None, definition_type: TaskDefinitionType::ShellScript, runner: TaskRunner::ShellScript, source_name, @@ -944,6 +1479,26 @@ mod tests { writeln!(file, "{}", content).unwrap(); } + fn create_test_turbo_json(dir: &Path, content: &str) { + std::fs::create_dir_all(dir).unwrap(); + std::fs::write(dir.join("turbo.json"), content).unwrap(); + } + + fn create_test_package_json(dir: &Path, name: &str) { + std::fs::create_dir_all(dir).unwrap(); + std::fs::write( + dir.join("package.json"), + format!( + r#"{{ + "name": "{}", + "private": true +}}"#, + name + ), + ) + .unwrap(); + } + #[test] fn test_discover_tasks_empty_directory() { let temp_dir = TempDir::new().unwrap(); @@ -971,47 +1526,246 @@ mod tests { )); assert!(matches!( - discovered.definitions.turbo_json.unwrap().status, - TaskFileStatus::NotFound + discovered.definitions.turbo_json.unwrap().status, + TaskFileStatus::NotFound + )); + } + + #[test] + fn test_discover_tasks_with_makefile() { + let temp_dir = TempDir::new().unwrap(); + let content = r#".PHONY: build test + +build: + @echo "Building the project" + cargo build + +test: + @echo "Running tests" + cargo test"#; + create_test_makefile(temp_dir.path(), content); + + let discovered = discover_tasks(temp_dir.path()); + + assert_eq!(discovered.tasks.len(), 2); + assert!(discovered.errors.is_empty()); + + // Check Makefile status + assert!(matches!( + discovered.definitions.makefile.unwrap().status, + TaskFileStatus::Parsed + )); + + // Verify tasks + let build_task = discovered.tasks.iter().find(|t| t.name == "build").unwrap(); + assert_eq!(build_task.runner, TaskRunner::Make); + assert_eq!( + build_task.description, + Some("Building the project".to_string()) + ); + + let test_task = discovered.tasks.iter().find(|t| t.name == "test").unwrap(); + assert_eq!(test_task.runner, TaskRunner::Make); + assert_eq!(test_task.description, Some("Running tests".to_string())); + } + + #[test] + fn test_discover_tasks_with_included_makefiles() { + let temp_dir = TempDir::new().unwrap(); + let included_dir = temp_dir.path().join("mk"); + std::fs::create_dir_all(&included_dir).unwrap(); + + create_test_makefile( + temp_dir.path(), + r#"include mk/common.mk + +build: + @echo "Build from root""#, + ); + std::fs::write( + included_dir.join("common.mk"), + r#"include nested.mk + +test: + @echo "Test from common""#, + ) + .unwrap(); + std::fs::write( + included_dir.join("nested.mk"), + r#"lint: + @echo "Lint from nested""#, + ) + .unwrap(); + + let discovered = discover_tasks(temp_dir.path()); + + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert!(matches!( + discovered.definitions.makefile.unwrap().status, + TaskFileStatus::Parsed + )); + assert_eq!(discovered.tasks.len(), 3); + + let root_makefile = temp_dir.path().join("Makefile"); + let common_makefile = included_dir.join("common.mk"); + let nested_makefile = included_dir.join("nested.mk"); + + let build_task = discovered.tasks.iter().find(|t| t.name == "build").unwrap(); + assert_eq!(build_task.file_path, root_makefile); + assert_eq!(build_task.definition_path(), root_makefile.as_path()); + + let test_task = discovered.tasks.iter().find(|t| t.name == "test").unwrap(); + assert_eq!(test_task.file_path, root_makefile); + assert_eq!(test_task.definition_path(), common_makefile.as_path()); + + let lint_task = discovered.tasks.iter().find(|t| t.name == "lint").unwrap(); + assert_eq!(lint_task.file_path, root_makefile); + assert_eq!(lint_task.definition_path(), nested_makefile.as_path()); + } + + #[test] + #[serial] + fn test_duplicate_task_names_from_included_makefile_use_definition_path() { + let temp_dir = TempDir::new().unwrap(); + let included_dir = temp_dir.path().join("mk"); + std::fs::create_dir_all(&included_dir).unwrap(); + + reset_mock(); + enable_mock(); + mock_executable("npm"); + + create_test_makefile( + temp_dir.path(), + r#"include mk/common.mk + +build: + @echo "Build from root""#, + ); + std::fs::write( + included_dir.join("common.mk"), + r#"test: + @echo "Test from included makefile""#, + ) + .unwrap(); + std::fs::write( + temp_dir.path().join("package.json"), + r#"{ + "name": "test-package", + "scripts": { + "test": "jest" + } +}"#, + ) + .unwrap(); + std::fs::write(temp_dir.path().join("package-lock.json"), "{}").unwrap(); + + let discovered = discover_tasks(temp_dir.path()); + let matching_tasks: Vec<&Task> = discovered + .tasks + .iter() + .filter(|task| task.name == "test") + .collect(); + + assert_eq!(matching_tasks.len(), 2); + + let make_task = matching_tasks + .iter() + .copied() + .find(|task| task.runner == TaskRunner::Make) + .unwrap(); + assert_eq!( + make_task.definition_path(), + included_dir.join("common.mk").as_path() + ); + assert_eq!(make_task.file_path, temp_dir.path().join("Makefile")); + assert_eq!(make_task.disambiguated_name.as_deref(), Some("test-m")); + + let error = format_ambiguous_task_error("test", &matching_tasks); + assert!( + error.contains("mk/common.mk"), + "unexpected ambiguous-task error: {}", + error + ); + + reset_mock(); + reset_to_real_environment(); + } + + #[test] + fn test_discover_tasks_with_optional_missing_included_makefile() { + let temp_dir = TempDir::new().unwrap(); + create_test_makefile( + temp_dir.path(), + r#"-include missing.mk + +build: + @echo "Build from root""#, + ); + + let discovered = discover_tasks(temp_dir.path()); + + assert!(matches!( + discovered.definitions.makefile.unwrap().status, + TaskFileStatus::Parsed + )); + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert_eq!(discovered.tasks.len(), 1); + assert!(discovered.tasks.iter().any(|t| t.name == "build")); + } + + #[test] + fn test_discover_tasks_with_recursive_makefile_include_cycle() { + let temp_dir = TempDir::new().unwrap(); + let included_dir = temp_dir.path().join("mk"); + std::fs::create_dir_all(&included_dir).unwrap(); + + create_test_makefile( + temp_dir.path(), + r#"include mk/common.mk + +build: + @echo "Build from root""#, + ); + std::fs::write( + included_dir.join("common.mk"), + r#"include ../Makefile + +test: + @echo "Test from common""#, + ) + .unwrap(); + + let discovered = discover_tasks(temp_dir.path()); + + assert!(matches!( + discovered.definitions.makefile.unwrap().status, + TaskFileStatus::Parsed )); + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert_eq!(discovered.tasks.len(), 2); + assert!(discovered.tasks.iter().any(|t| t.name == "build")); + assert!(discovered.tasks.iter().any(|t| t.name == "test")); } #[test] - fn test_discover_tasks_with_makefile() { + fn test_discover_tasks_with_missing_required_included_makefile() { let temp_dir = TempDir::new().unwrap(); - let content = r#".PHONY: build test + create_test_makefile( + temp_dir.path(), + r#"include missing.mk build: - @echo "Building the project" - cargo build - -test: - @echo "Running tests" - cargo test"#; - create_test_makefile(temp_dir.path(), content); + @echo "Build from root""#, + ); let discovered = discover_tasks(temp_dir.path()); - assert_eq!(discovered.tasks.len(), 2); - assert!(discovered.errors.is_empty()); - - // Check Makefile status assert!(matches!( discovered.definitions.makefile.unwrap().status, TaskFileStatus::Parsed )); - - // Verify tasks - let build_task = discovered.tasks.iter().find(|t| t.name == "build").unwrap(); - assert_eq!(build_task.runner, TaskRunner::Make); - assert_eq!( - build_task.description, - Some("Building the project".to_string()) - ); - - let test_task = discovered.tasks.iter().find(|t| t.name == "test").unwrap(); - assert_eq!(test_task.runner, TaskRunner::Make); - assert_eq!(test_task.description, Some("Running tests".to_string())); + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert!(discovered.tasks.iter().any(|t| t.name == "build")); } #[test] @@ -1020,8 +1774,8 @@ test: std::fs::create_dir_all(temp_dir.path().join(".git")).unwrap(); std::fs::create_dir_all(temp_dir.path().join("apps").join("web")).unwrap(); - std::fs::write( - temp_dir.path().join("turbo.json"), + create_test_turbo_json( + temp_dir.path(), r#"{ "$schema": "https://turborepo.dev/schema.json", "tasks": { @@ -1031,18 +1785,25 @@ test: } } }"#, - ) - .unwrap(); + ); let discovered = discover_tasks(&temp_dir.path().join("apps").join("web")); let build_task = discovered.tasks.iter().find(|t| t.name == "build").unwrap(); assert_eq!(build_task.runner, TaskRunner::Turbo); assert_eq!(build_task.file_path, temp_dir.path().join("turbo.json")); + assert_eq!( + build_task.definition_path(), + temp_dir.path().join("turbo.json").as_path() + ); let test_task = discovered.tasks.iter().find(|t| t.name == "test").unwrap(); assert_eq!(test_task.runner, TaskRunner::Turbo); assert_eq!(test_task.file_path, temp_dir.path().join("turbo.json")); + assert_eq!( + test_task.definition_path(), + temp_dir.path().join("turbo.json").as_path() + ); assert_eq!( discovered.definitions.turbo_json.unwrap().status, @@ -1050,6 +1811,180 @@ test: ); } + #[test] + fn test_discover_tasks_includes_workspace_local_turbo_tasks_from_repo_root() { + let temp_dir = TempDir::new().unwrap(); + let repo_root = temp_dir.path(); + let web_dir = repo_root.join("apps").join("web"); + std::fs::create_dir_all(repo_root.join(".git")).unwrap(); + + create_test_turbo_json( + repo_root, + r#"{ + "tasks": { + "build": {}, + "test": {} + } +}"#, + ); + create_test_package_json(&web_dir, "web"); + create_test_turbo_json( + &web_dir, + r#"{ + "extends": ["//"], + "tasks": { + "lint": {}, + "test": { + "extends": false + } + } +}"#, + ); + + let discovered = discover_tasks(repo_root); + + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert!(discovered.tasks.iter().any(|task| task.name == "build")); + assert!(discovered.tasks.iter().any(|task| task.name == "test")); + + let lint_task = discovered + .tasks + .iter() + .find(|task| task.name == "lint") + .unwrap(); + assert_eq!(lint_task.runner, TaskRunner::Turbo); + assert_eq!(lint_task.file_path, repo_root.join("turbo.json")); + assert_eq!( + lint_task.definition_path(), + web_dir.join("turbo.json").as_path() + ); + } + + #[test] + fn test_discover_tasks_resolves_workspace_local_turbo_tasks_for_nested_package() { + let temp_dir = TempDir::new().unwrap(); + let repo_root = temp_dir.path(); + let web_dir = repo_root.join("apps").join("web"); + std::fs::create_dir_all(repo_root.join(".git")).unwrap(); + + create_test_turbo_json( + repo_root, + r#"{ + "tasks": { + "build": {}, + "test": {} + } +}"#, + ); + create_test_package_json(&web_dir, "web"); + create_test_turbo_json( + &web_dir, + r#"{ + "extends": ["//"], + "tasks": { + "lint": {}, + "test": { + "extends": false + } + } +}"#, + ); + + let discovered = discover_tasks(&web_dir); + let task_names: Vec<_> = discovered + .tasks + .iter() + .map(|task| task.name.as_str()) + .collect(); + + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert_eq!(task_names, vec!["build", "lint"]); + + let build_task = discovered + .tasks + .iter() + .find(|task| task.name == "build") + .unwrap(); + assert_eq!( + build_task.definition_path(), + repo_root.join("turbo.json").as_path() + ); + + let lint_task = discovered + .tasks + .iter() + .find(|task| task.name == "lint") + .unwrap(); + assert_eq!(lint_task.file_path, repo_root.join("turbo.json")); + assert_eq!( + lint_task.definition_path(), + web_dir.join("turbo.json").as_path() + ); + } + + #[test] + fn test_discover_tasks_recursively_resolves_turbo_extends_chain() { + let temp_dir = TempDir::new().unwrap(); + let repo_root = temp_dir.path(); + let shared_dir = repo_root.join("packages").join("shared-config"); + let web_dir = repo_root.join("apps").join("web"); + std::fs::create_dir_all(repo_root.join(".git")).unwrap(); + + create_test_turbo_json( + repo_root, + r#"{ + "tasks": { + "build": {} + } +}"#, + ); + create_test_package_json(&shared_dir, "shared-config"); + create_test_turbo_json( + &shared_dir, + r#"{ + "extends": ["//"], + "tasks": { + "lint": {} + } +}"#, + ); + create_test_package_json(&web_dir, "web"); + create_test_turbo_json( + &web_dir, + r#"{ + "extends": ["//", "shared-config"], + "tasks": { + "deploy": {} + } +}"#, + ); + + let discovered = discover_tasks(&web_dir); + + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert!(discovered.tasks.iter().any(|task| task.name == "build")); + + let lint_task = discovered + .tasks + .iter() + .find(|task| task.name == "lint") + .unwrap(); + assert_eq!( + lint_task.definition_path(), + shared_dir.join("turbo.json").as_path() + ); + + let deploy_task = discovered + .tasks + .iter() + .find(|task| task.name == "deploy") + .unwrap(); + assert_eq!( + deploy_task.definition_path(), + web_dir.join("turbo.json").as_path() + ); + } + #[test] fn test_discover_tasks_with_invalid_makefile() { let temp_dir = TempDir::new().unwrap(); @@ -1605,6 +2540,193 @@ tasks: reset_mock(); } + #[test] + #[serial] + fn test_discover_tasks_with_included_taskfiles() { + let temp_dir = TempDir::new().unwrap(); + let docs_dir = temp_dir.path().join("docs"); + let api_dir = docs_dir.join("api"); + std::fs::create_dir_all(&api_dir).unwrap(); + + reset_mock(); + enable_mock(); + mock_executable("task"); + + std::fs::write( + temp_dir.path().join("Taskfile.yml"), + r#"version: '3' +includes: + docs: ./docs +tasks: + build: + desc: Build task + cmds: + - echo "Build""#, + ) + .unwrap(); + std::fs::write( + docs_dir.join("Taskfile.yml"), + r#"version: '3' +includes: + api: ./api +tasks: + serve: + desc: Serve docs + cmds: + - echo "Serve""#, + ) + .unwrap(); + std::fs::write( + api_dir.join("Taskfile.yml"), + r#"version: '3' +tasks: + generate: + desc: Generate API docs + cmds: + - echo "Generate""#, + ) + .unwrap(); + + let discovered = discover_tasks(temp_dir.path()); + + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert!(matches!( + discovered.definitions.taskfile.unwrap().status, + TaskFileStatus::Parsed + )); + + let root_taskfile = temp_dir.path().join("Taskfile.yml"); + let docs_taskfile = docs_dir.join("Taskfile.yml"); + let api_taskfile = api_dir.join("Taskfile.yml"); + + let build_task = discovered.tasks.iter().find(|t| t.name == "build").unwrap(); + assert_eq!(build_task.runner, TaskRunner::Task); + assert_eq!(build_task.file_path, root_taskfile); + assert_eq!(build_task.definition_path(), root_taskfile.as_path()); + assert_eq!(build_task.source_name, "build"); + + let docs_task = discovered + .tasks + .iter() + .find(|t| t.name == "docs:serve") + .unwrap(); + assert_eq!(docs_task.file_path, root_taskfile); + assert_eq!(docs_task.definition_path(), docs_taskfile.as_path()); + assert_eq!(docs_task.source_name, "docs:serve"); + + let api_task = discovered + .tasks + .iter() + .find(|t| t.name == "docs:api:generate") + .unwrap(); + assert_eq!(api_task.file_path, root_taskfile); + assert_eq!(api_task.definition_path(), api_taskfile.as_path()); + assert_eq!(api_task.source_name, "docs:api:generate"); + + reset_mock(); + reset_to_real_environment(); + } + + #[test] + #[serial] + fn test_discover_tasks_with_missing_required_taskfile_include() { + let temp_dir = TempDir::new().unwrap(); + + reset_mock(); + enable_mock(); + mock_executable("task"); + + std::fs::write( + temp_dir.path().join("Taskfile.yml"), + r#"version: '3' +includes: + docs: ./docs + optional: + taskfile: ./optional + optional: true +tasks: + build: + desc: Build task + cmds: + - echo "Build""#, + ) + .unwrap(); + + let discovered = discover_tasks(temp_dir.path()); + + assert!(discovered.errors.is_empty(), "{:?}", discovered.errors); + assert!(matches!( + discovered.definitions.taskfile.unwrap().status, + TaskFileStatus::Parsed + )); + assert_eq!(discovered.tasks.len(), 1); + + let build_task = discovered.tasks.iter().find(|t| t.name == "build").unwrap(); + assert_eq!(build_task.runner, TaskRunner::Task); + assert_eq!( + build_task.definition_path(), + temp_dir.path().join("Taskfile.yml").as_path() + ); + + reset_mock(); + reset_to_real_environment(); + } + + #[test] + #[serial] + fn test_discover_tasks_with_duplicate_flattened_taskfile_include() { + let temp_dir = TempDir::new().unwrap(); + let shared_dir = temp_dir.path().join("shared"); + std::fs::create_dir_all(&shared_dir).unwrap(); + + reset_mock(); + enable_mock(); + mock_executable("task"); + + std::fs::write( + temp_dir.path().join("Taskfile.yml"), + r#"version: '3' +includes: + shared: + taskfile: ./shared + flatten: true +tasks: + build: + desc: Root build + cmds: + - echo "Build""#, + ) + .unwrap(); + std::fs::write( + shared_dir.join("Taskfile.yml"), + r#"version: '3' +tasks: + build: + desc: Shared build + cmds: + - echo "Shared build""#, + ) + .unwrap(); + + let discovered = discover_tasks(temp_dir.path()); + + assert!(matches!( + discovered.definitions.taskfile.unwrap().status, + TaskFileStatus::ParseError(_) + )); + assert!( + discovered + .errors + .iter() + .any(|error| error.contains("Found multiple tasks (build) included by \"shared\"")), + "{:?}", + discovered.errors + ); + + reset_mock(); + reset_to_real_environment(); + } + #[test] fn test_discover_maven_tasks() { let temp_dir = tempfile::tempdir().unwrap(); @@ -1880,8 +3002,19 @@ jobs: // With the new workflow grouping, all tasks should have the same workflow directory let common_path = temp_dir.path().join(".github").join("workflows"); + let expected_definition_paths = [ + github_workflows_dir.join("ci.yml"), + temp_dir.path().join("workflow.yml"), + custom_dir.join("custom.yml"), + ]; + for task in act_tasks { assert_eq!(task.file_path, common_path); + assert!( + expected_definition_paths.contains(&task.definition_path().to_path_buf()), + "unexpected workflow definition path: {}", + task.definition_path().display() + ); } } @@ -1895,6 +3028,7 @@ jobs: discovered.tasks.push(Task { name: "test".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "test".to_string(), @@ -1907,6 +3041,7 @@ jobs: discovered.tasks.push(Task { name: "ls".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "ls".to_string(), @@ -1919,6 +3054,7 @@ jobs: discovered.tasks.push(Task { name: "build".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "build".to_string(), @@ -1957,6 +3093,7 @@ jobs: discovered.tasks.push(Task { name: "test".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "test".to_string(), @@ -1968,6 +3105,7 @@ jobs: discovered.tasks.push(Task { name: "test".to_string(), file_path: PathBuf::from("/test/package.json"), + definition_path: None, definition_type: TaskDefinitionType::PackageJson, runner: TaskRunner::NodeNpm, source_name: "test".to_string(), @@ -1980,6 +3118,7 @@ jobs: discovered.tasks.push(Task { name: "ls".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "ls".to_string(), @@ -1992,6 +3131,7 @@ jobs: discovered.tasks.push(Task { name: "cd".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "cd".to_string(), @@ -2003,6 +3143,7 @@ jobs: discovered.tasks.push(Task { name: "cd".to_string(), file_path: PathBuf::from("/test/Taskfile.yml"), + definition_path: None, definition_type: TaskDefinitionType::Taskfile, runner: TaskRunner::Task, source_name: "cd".to_string(), @@ -2015,6 +3156,7 @@ jobs: discovered.tasks.push(Task { name: "build".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "build".to_string(), @@ -2085,6 +3227,7 @@ jobs: discovered.tasks.push(Task { name: "install".to_string(), file_path: PathBuf::from("/test/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "install".to_string(), @@ -2117,6 +3260,7 @@ jobs: let task = Task { name: "test".to_string(), file_path: PathBuf::from("/path/to/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "test".to_string(), @@ -2153,6 +3297,7 @@ jobs: let task = Task { name: "grep".to_string(), file_path: PathBuf::from("/path/to/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "grep".to_string(), @@ -2191,6 +3336,7 @@ jobs: let task1 = Task { name: "test".to_string(), file_path: PathBuf::from("/path/to/Makefile"), + definition_path: None, definition_type: TaskDefinitionType::Makefile, runner: TaskRunner::Make, source_name: "test".to_string(), @@ -2202,6 +3348,7 @@ jobs: let task2 = Task { name: "test".to_string(), file_path: PathBuf::from("/path/to/package.json"), + definition_path: None, definition_type: TaskDefinitionType::PackageJson, runner: TaskRunner::NodeNpm, source_name: "test".to_string(), diff --git a/src/types.rs b/src/types.rs index b6c0f11..a36747a 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Serialize}; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; /// Information about what shadows a task name #[derive(Debug, Clone, PartialEq)] @@ -160,8 +160,14 @@ pub struct DiscoveredTaskDefinitions { pub struct Task { /// Name of the task (e.g., "build", "test", "start") pub name: String, - /// Path to the file containing this task + /// Path the runner should use for execution context. + /// For simple task definitions this is the same as the defining file. + /// For composed definitions this may point at the root file or directory + /// that the runner executes against. pub file_path: PathBuf, + /// Path to the file that actually defines this task when it differs from + /// the runner path. For simple task definitions this is None. + pub definition_path: Option, /// The type of definition file this task came from pub definition_type: TaskDefinitionType, /// The type of runner needed for this task @@ -176,6 +182,18 @@ pub struct Task { pub disambiguated_name: Option, } +impl Task { + /// Return the file that actually defines this task. + pub fn definition_path(&self) -> &Path { + self.definition_path.as_deref().unwrap_or(&self.file_path) + } + + /// Return the path used for allowlist matching and user-facing source attribution. + pub fn allowlist_path(&self) -> &Path { + self.definition_path() + } +} + impl TaskRunner { /// Get the command to run a task with this runner pub fn get_command(&self, task: &Task) -> String { diff --git a/tests/docker_noinit/test_noinit.sh b/tests/docker_noinit/test_noinit.sh index ec22ec0..f85b72e 100755 --- a/tests/docker_noinit/test_noinit.sh +++ b/tests/docker_noinit/test_noinit.sh @@ -110,6 +110,110 @@ else exit 1 fi +# Test 6b: Verify recursive Taskfile include discovery +echo "\nTest 6b: Testing recursive Taskfile include discovery" +mkdir -p /home/testuser/task_include_project/docs/api +cat > /home/testuser/task_include_project/Taskfile.yml <<'EOF' +version: '3' +includes: + docs: ./docs + missing-required: ./missing + missing-optional: + taskfile: ./optional + optional: true +tasks: + task-root: + desc: Root task + cmds: + - echo "Root" +EOF +cat > /home/testuser/task_include_project/docs/Taskfile.yml <<'EOF' +version: '3' +includes: + api: ./api +tasks: + serve: + desc: Docs serve + cmds: + - echo "Serve docs" +EOF +cat > /home/testuser/task_include_project/docs/api/Taskfile.yml <<'EOF' +version: '3' +tasks: + generate: + desc: Generate docs + cmds: + - echo "Generate docs" +EOF + +cd /home/testuser/task_include_project +if ! dela list > task_include_list.txt 2> task_include_stderr.txt; then + echo "${RED}✗ dela list failed for recursive Taskfile includes${NC}" + cat task_include_stderr.txt + exit 1 +fi + +if grep -q "docs:serve" task_include_list.txt && grep -q "docs:api:generate" task_include_list.txt; then + echo "${GREEN}✓ dela list discovers recursively included Taskfile tasks${NC}" +else + echo "${RED}✗ dela list did not discover recursively included Taskfile tasks${NC}" + cat task_include_list.txt + exit 1 +fi + +if [ -s task_include_stderr.txt ]; then + echo "${RED}✗ dela emitted errors for missing included Taskfiles${NC}" + cat task_include_stderr.txt + exit 1 +fi + +output=$(dela get-command docs:api:generate 2>&1) +if echo "$output" | grep -q "task docs:api:generate"; then + echo "${GREEN}✓ get-command works for recursively included Taskfile tasks${NC}" +else + echo "${RED}✗ get-command failed for recursively included Taskfile task${NC}" + echo "Got: $output" + exit 1 +fi + +# Test 6c: Verify flattened Taskfile include duplicate detection +echo "\nTest 6c: Testing duplicate flattened Taskfile include detection" +mkdir -p /home/testuser/task_include_duplicates/shared +cat > /home/testuser/task_include_duplicates/Taskfile.yml <<'EOF' +version: '3' +includes: + shared: + taskfile: ./shared + flatten: true +tasks: + build: + desc: Root build + cmds: + - echo "Root build" +EOF +cat > /home/testuser/task_include_duplicates/shared/Taskfile.yml <<'EOF' +version: '3' +tasks: + build: + desc: Shared build + cmds: + - echo "Shared build" +EOF + +cd /home/testuser/task_include_duplicates +duplicate_output=$(dela list 2>&1) +if echo "$duplicate_output" | grep -q "multiple tasks" \ + && echo "$duplicate_output" | grep -q "build" \ + && echo "$duplicate_output" | grep -q "shared"; then + echo "${GREEN}✓ dela reports duplicate flattened Taskfile task names${NC}" +else + echo "${RED}✗ dela did not report duplicate flattened Taskfile task names${NC}" + echo "$duplicate_output" + exit 1 +fi + +cd /home/testuser/test_project + # Test 7: Basic dela list for Maven tasks echo "\nTest 7: Testing dela list for Maven tasks" if list_and_grep "clean" && list_and_grep "compile" && list_and_grep "profile:dev"; then @@ -289,6 +393,24 @@ else exit 1 fi +# Test 19b: Verify allow-command stores the defining workflow path +echo "\nTest 19b: Verifying GitHub Actions allowlist source attribution" +github_allow_stderr=$(mktemp) +if ! echo "2" | dela allow-command test-a >/dev/null 2>"$github_allow_stderr"; then + echo "${RED}✗ allow-command failed for GitHub Actions task${NC}" + cat "$github_allow_stderr" + rm -f "$github_allow_stderr" + exit 1 +fi +rm -f "$github_allow_stderr" +if grep -q "/home/testuser/test_project/.github/workflows/test.yml" /home/testuser/.config/dela/allowlist.toml; then + echo "${GREEN}✓ GitHub Actions allowlist entries use the defining workflow file${NC}" +else + echo "${RED}✗ GitHub Actions allowlist entry did not use the defining workflow file${NC}" + cat /home/testuser/.config/dela/allowlist.toml + exit 1 +fi + # Test 20: Test single argument passing with print-arg-task echo "\nTest 20: Testing single argument passing with print-arg-task" @@ -330,26 +452,57 @@ fi echo "${GREEN}✓ All get-command argument passing tests passed successfully${NC}" -# Test 21b: Discover turbo tasks from a nested package using the git repo root -echo "\nTest 21b: Testing Turborepo discovery from a nested package" +# Test 21b: Discover turbo tasks from workspace-local configs +echo "\nTest 21b: Testing Turborepo workspace-local config discovery" cd /home/testuser/turbo_repo git init >/dev/null 2>&1 + +dela list > turbo_root_list_output.txt 2> turbo_root_stderr.txt +if grep -q "web-lint" turbo_root_list_output.txt && grep -q "apps/web/turbo.json" turbo_root_list_output.txt; then + echo "${GREEN}✓ dela list shows workspace-local turbo tasks and source paths from the repo root${NC}" +else + echo "${RED}✗ dela list failed to show workspace-local turbo tasks from the repo root${NC}" + cat turbo_root_list_output.txt + exit 1 +fi + +if [ -s turbo_root_stderr.txt ]; then + echo "${RED}✗ dela emitted errors while listing turbo tasks from the repo root${NC}" + cat turbo_root_stderr.txt + exit 1 +fi + +echo "2" | dela allow-command web-lint >/dev/null 2>&1 +if grep -q "/home/testuser/turbo_repo/apps/web/turbo.json" /home/testuser/.config/dela/allowlist.toml; then + echo "${GREEN}✓ allow-command stores the workspace-local turbo.json path${NC}" +else + echo "${RED}✗ allow-command did not store the workspace-local turbo.json path${NC}" + cat /home/testuser/.config/dela/allowlist.toml + exit 1 +fi + cd /home/testuser/turbo_repo/apps/web -dela list 2>/dev/null > turbo_list_output.txt -if grep -q "build-t" turbo_list_output.txt && grep -q "test-t" turbo_list_output.txt; then - echo "${GREEN}✓ dela list shows turbo tasks from repo root when run in a nested package${NC}" +dela list > turbo_package_list_output.txt 2> turbo_package_stderr.txt +if grep -q "build-t" turbo_package_list_output.txt && grep -q "web-lint-t" turbo_package_list_output.txt && ! grep -q "test-t" turbo_package_list_output.txt; then + echo "${GREEN}✓ dela list resolves effective turbo tasks for a nested package${NC}" else - echo "${RED}✗ dela list failed to show turbo tasks from repo root${NC}" - cat turbo_list_output.txt + echo "${RED}✗ dela list did not resolve the expected nested-package turbo tasks${NC}" + cat turbo_package_list_output.txt + exit 1 +fi + +if [ -s turbo_package_stderr.txt ]; then + echo "${RED}✗ dela emitted errors while listing turbo tasks from a nested package${NC}" + cat turbo_package_stderr.txt exit 1 fi -output=$(dela get-command build-t 2>&1) -if echo "$output" | grep -q "turbo run build"; then - echo "${GREEN}✓ get-command returns the turbo command for a nested package task${NC}" +output=$(dela get-command web-lint-t 2>&1) +if echo "$output" | grep -q "turbo run web-lint"; then + echo "${GREEN}✓ get-command returns the turbo command for a workspace-local task${NC}" else - echo "${RED}✗ get-command failed for turbo task${NC}" + echo "${RED}✗ get-command failed for the workspace-local turbo task${NC}" echo "Full output: $output" exit 1 fi @@ -391,6 +544,75 @@ else exit 1 fi +# Test 22c: Verify recursive Makefile include discovery +echo "\nTest 22c: Testing recursive Makefile include discovery" +mkdir -p /home/testuser/make_include_project/mk +cat > /home/testuser/make_include_project/Makefile <<'EOF' +include mk/common.mk +include mk/missing-required.mk +-include mk/missing-optional.mk + +build: + @echo "Build from root" +EOF +cat > /home/testuser/make_include_project/mk/common.mk <<'EOF' +include nested.mk + +included-task: + @echo "Included task" +EOF +cat > /home/testuser/make_include_project/mk/nested.mk <<'EOF' +nested-task: + @echo "Nested task" +EOF + +cd /home/testuser/make_include_project +if ! dela list > make_include_list.txt 2> make_include_stderr.txt; then + echo "${RED}✗ dela list failed when a required included makefile was missing${NC}" + cat make_include_stderr.txt + exit 1 +fi + +if grep -q "included-task" make_include_list.txt && grep -q "nested-task" make_include_list.txt; then + echo "${GREEN}✓ dela list discovers tasks from recursively included makefiles${NC}" +else + echo "${RED}✗ dela list did not discover tasks from recursively included makefiles${NC}" + cat make_include_list.txt + exit 1 +fi + +if [ -s make_include_stderr.txt ]; then + echo "${RED}✗ dela emitted errors for missing included makefiles${NC}" + cat make_include_stderr.txt + exit 1 +fi + +if grep -q "mk/common.mk" make_include_list.txt && grep -q "mk/nested.mk" make_include_list.txt; then + echo "${GREEN}✓ dela list shows defining files for included make tasks${NC}" +else + echo "${RED}✗ dela list did not show defining files for included make tasks${NC}" + cat make_include_list.txt + exit 1 +fi + +output=$(dela get-command nested-task 2>&1) +if echo "$output" | grep -q "make nested-task"; then + echo "${GREEN}✓ get-command works for tasks from included makefiles${NC}" +else + echo "${RED}✗ get-command failed for task from included makefiles${NC}" + echo "Got: $output" + exit 1 +fi + +echo "3" | dela allow-command included-task >/dev/null 2>&1 +if grep -q "/home/testuser/make_include_project/mk/common.mk" /home/testuser/.config/dela/allowlist.toml; then + echo "${GREEN}✓ allow-command stores the defining included makefile path${NC}" +else + echo "${RED}✗ allow-command did not store the defining included makefile path${NC}" + cat /home/testuser/.config/dela/allowlist.toml + exit 1 +fi + cd /home/testuser/test_project # Test 23: Verify ambiguous task detection in dela list diff --git a/tests/task_definitions/turbo_repo/apps/web/package.json b/tests/task_definitions/turbo_repo/apps/web/package.json index 598a6e6..2332114 100644 --- a/tests/task_definitions/turbo_repo/apps/web/package.json +++ b/tests/task_definitions/turbo_repo/apps/web/package.json @@ -3,6 +3,7 @@ "private": true, "scripts": { "build": "echo building web", - "test": "echo testing web" + "test": "echo testing web", + "web-lint": "echo linting web" } } diff --git a/tests/task_definitions/turbo_repo/apps/web/turbo.json b/tests/task_definitions/turbo_repo/apps/web/turbo.json new file mode 100644 index 0000000..47b3967 --- /dev/null +++ b/tests/task_definitions/turbo_repo/apps/web/turbo.json @@ -0,0 +1,9 @@ +{ + "extends": ["//"], + "tasks": { + "web-lint": {}, + "test": { + "extends": false + } + } +}