From 0c3a3ee1ed83f8cbe9122cf379a3570bb76c0f18 Mon Sep 17 00:00:00 2001 From: Alex Kirszenberg Date: Mon, 13 Feb 2023 13:07:34 +0100 Subject: [PATCH 01/31] Add support for ctx.params in getStaticProps/getServerSideProps (#3696) Dynamic segments should be passed a single string, while catch-all segments should be passed a list of strings. Furthermore, ctx.params was previously undefined because we weren't passing it forward through the render options. --- .../js/src/internal/api-server-handler.ts | 2 +- .../js/src/internal/page-server-handler.tsx | 1 + crates/next-core/js/types/turbopack.d.ts | 2 +- .../src/next_route_matcher/path_regex.rs | 51 +++++++++++++++---- .../input/pages/[...segments].tsx | 26 ++++++++++ .../catch-all-params/input/pages/index.tsx | 7 +++ .../dynamic-params/input/pages/[segment].tsx | 26 ++++++++++ .../dynamic-params/input/pages/index.tsx | 7 +++ crates/turbopack-node/src/render/mod.rs | 4 +- crates/turbopack-node/src/route_matcher.rs | 11 +++- 10 files changed, 123 insertions(+), 14 deletions(-) create mode 100644 crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/[...segments].tsx create mode 100644 crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/index.tsx create mode 100644 crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/[segment].tsx create mode 100644 crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/index.tsx diff --git a/crates/next-core/js/src/internal/api-server-handler.ts b/crates/next-core/js/src/internal/api-server-handler.ts index d6bd2049dc5c1..0da2c28c52ade 100644 --- a/crates/next-core/js/src/internal/api-server-handler.ts +++ b/crates/next-core/js/src/internal/api-server-handler.ts @@ -41,7 +41,7 @@ type Handler = (data: { request: IncomingMessage; response: ServerResponse; query: string; - params: Record; + params: Record; path: string; }) => Promise; diff --git a/crates/next-core/js/src/internal/page-server-handler.tsx b/crates/next-core/js/src/internal/page-server-handler.tsx index fa7332d73ce0b..0c55e6598dc8c 100644 --- a/crates/next-core/js/src/internal/page-server-handler.tsx +++ b/crates/next-core/js/src/internal/page-server-handler.tsx @@ -160,6 +160,7 @@ export default function startHandler({ previewModeEncryptionKey: "", previewModeSigningKey: "", }, + params: renderData.params, basePath: "", // TODO(WEB-583) this isn't correct, instead it should set `dev: true` nextExport: true, diff --git a/crates/next-core/js/types/turbopack.d.ts b/crates/next-core/js/types/turbopack.d.ts index 927c3e1fcb69c..75ad857ff661c 100644 --- a/crates/next-core/js/types/turbopack.d.ts +++ b/crates/next-core/js/types/turbopack.d.ts @@ -1,7 +1,7 @@ import { NextParsedUrlQuery } from "next/dist/server/request-meta"; export type RenderData = { - params: Record; + params: Record; method: string; url: string; path: string; diff --git a/crates/next-core/src/next_route_matcher/path_regex.rs b/crates/next-core/src/next_route_matcher/path_regex.rs index fb563569a7f25..b1f90d856a47f 100644 --- a/crates/next-core/src/next_route_matcher/path_regex.rs +++ b/crates/next-core/src/next_route_matcher/path_regex.rs @@ -1,14 +1,26 @@ use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use turbo_tasks::primitives::{BoolVc, Regex}; -use turbopack_node::route_matcher::{ParamsVc, RouteMatcher}; +use turbopack_node::route_matcher::{Param, ParamsVc, RouteMatcher}; /// A regular expression that matches a path, with named capture groups for the /// dynamic parts of the path. #[derive(Debug, Serialize, Deserialize, Eq, PartialEq)] pub struct PathRegex { regex: Regex, - named_params: Vec, + named_params: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)] +struct NamedParam { + name: String, + kind: NamedParamKind, +} + +#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)] +enum NamedParamKind { + Single, + Multi, } impl std::fmt::Display for PathRegex { @@ -27,12 +39,24 @@ impl RouteMatcher for PathRegex { self.named_params .iter() .enumerate() - .filter_map(|(idx, name)| { - if name.is_empty() { + .filter_map(|(idx, param)| { + if param.name.is_empty() { return None; } let value = capture.get(idx + 1)?; - Some((name.to_string(), value.as_str().to_string())) + Some(( + param.name.to_string(), + match param.kind { + NamedParamKind::Single => Param::Single(value.as_str().to_string()), + NamedParamKind::Multi => Param::Multi( + value + .as_str() + .split("/") + .map(|segment| segment.to_string()) + .collect(), + ), + }, + )) }) .collect() })) @@ -42,7 +66,7 @@ impl RouteMatcher for PathRegex { /// Builder for [PathRegex]. pub struct PathRegexBuilder { regex_str: String, - named_params: Vec, + named_params: Vec, } impl PathRegexBuilder { @@ -74,7 +98,10 @@ impl PathRegexBuilder { "([^?]+)?" }); self.push_str(®ex::escape(rem.as_ref())); - self.named_params.push(name.into()); + self.named_params.push(NamedParam { + name: name.into(), + kind: NamedParamKind::Multi, + }); } /// Pushes a catch all segment to the regex. @@ -88,7 +115,10 @@ impl PathRegexBuilder { } self.push_str("([^?]+)"); self.push_str(®ex::escape(rem.as_ref())); - self.named_params.push(name.into()); + self.named_params.push(NamedParam { + name: name.into(), + kind: NamedParamKind::Multi, + }); } /// Pushes a dynamic segment to the regex. @@ -102,7 +132,10 @@ impl PathRegexBuilder { } self.push_str("([^?/]+)"); self.push_str(®ex::escape(rem.as_ref())); - self.named_params.push(name.into()); + self.named_params.push(NamedParam { + name: name.into(), + kind: NamedParamKind::Single, + }); } /// Pushes a static segment to the regex. diff --git a/crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/[...segments].tsx b/crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/[...segments].tsx new file mode 100644 index 0000000000000..6b98ac597574a --- /dev/null +++ b/crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/[...segments].tsx @@ -0,0 +1,26 @@ +import { useEffect } from "react"; + +export default function Home({ params }: { params: any }) { + useEffect(() => { + // Only run on client + import("@turbo/pack-test-harness").then(() => runTests(params)); + }); + + return
Test
; +} + +export function getServerSideProps(ctx: { params: any }) { + return { + props: { + params: ctx.params, + }, + }; +} + +function runTests(params: any) { + describe("catch-all segments", () => { + it("should be passed a param array", () => { + expect(params.segments).toEqual(["first", "second"]); + }); + }); +} diff --git a/crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/index.tsx b/crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/index.tsx new file mode 100644 index 0000000000000..0cdeab7f231a9 --- /dev/null +++ b/crates/next-dev-tests/tests/integration/next/router/catch-all-params/input/pages/index.tsx @@ -0,0 +1,7 @@ +export function getServerSideProps() { + return { + redirect: { + destination: "/first/second", + }, + }; +} diff --git a/crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/[segment].tsx b/crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/[segment].tsx new file mode 100644 index 0000000000000..af47d64aeaede --- /dev/null +++ b/crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/[segment].tsx @@ -0,0 +1,26 @@ +import { useEffect } from "react"; + +export default function Home({ params }: { params: any }) { + useEffect(() => { + // Only run on client + import("@turbo/pack-test-harness").then(() => runTests(params)); + }); + + return
Test
; +} + +export function getServerSideProps(ctx: { params: any }) { + return { + props: { + params: ctx.params, + }, + }; +} + +function runTests(params: any) { + describe("catch-all segments", () => { + it("should be passed a param array", () => { + expect(params.segment).toEqual("dynamic-segment"); + }); + }); +} diff --git a/crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/index.tsx b/crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/index.tsx new file mode 100644 index 0000000000000..33f3f1e179f28 --- /dev/null +++ b/crates/next-dev-tests/tests/integration/next/router/dynamic-params/input/pages/index.tsx @@ -0,0 +1,7 @@ +export function getServerSideProps() { + return { + redirect: { + destination: "/dynamic-segment", + }, + }; +} diff --git a/crates/turbopack-node/src/render/mod.rs b/crates/turbopack-node/src/render/mod.rs index 0aa110efd6072..3d7c56aadabe8 100644 --- a/crates/turbopack-node/src/render/mod.rs +++ b/crates/turbopack-node/src/render/mod.rs @@ -1,7 +1,7 @@ use indexmap::IndexMap; use serde::{Deserialize, Serialize}; -use crate::{ResponseHeaders, StructuredError}; +use crate::{route_matcher::Param, ResponseHeaders, StructuredError}; pub mod issue; pub mod node_api_source; @@ -12,7 +12,7 @@ pub mod rendered_source; #[turbo_tasks::value(shared)] #[serde(rename_all = "camelCase")] pub struct RenderData { - params: IndexMap, + params: IndexMap, method: String, url: String, raw_query: String, diff --git a/crates/turbopack-node/src/route_matcher.rs b/crates/turbopack-node/src/route_matcher.rs index 123776cb76251..8edf2bc564db0 100644 --- a/crates/turbopack-node/src/route_matcher.rs +++ b/crates/turbopack-node/src/route_matcher.rs @@ -2,7 +2,16 @@ use indexmap::IndexMap; use turbo_tasks::primitives::BoolVc; #[turbo_tasks::value(transparent)] -pub struct Params(Option>); +#[derive(Debug, Clone)] +#[serde(untagged)] +pub enum Param { + Single(String), + Multi(Vec), +} + +#[turbo_tasks::value(transparent)] +#[derive(Debug, Clone)] +pub struct Params(Option>); /// Extracts parameters from a URL path. #[turbo_tasks::value_trait] From ebc49f6c1bfaa2350c26cc290f43a9d8e45efff2 Mon Sep 17 00:00:00 2001 From: Alex Kirszenberg Date: Mon, 13 Feb 2023 15:14:12 +0100 Subject: [PATCH 02/31] Implement FromSubTrait for ValueDebug (#3769) This is necessary in order to pass in a `SomeTraitVc` to a function which expects `T: ValueDebug`. --- crates/turbo-tasks-macros/src/value_trait_macro.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/turbo-tasks-macros/src/value_trait_macro.rs b/crates/turbo-tasks-macros/src/value_trait_macro.rs index 1063b831c66d4..0ebd9ff12422f 100644 --- a/crates/turbo-tasks-macros/src/value_trait_macro.rs +++ b/crates/turbo-tasks-macros/src/value_trait_macro.rs @@ -162,6 +162,12 @@ pub fn value_trait(args: TokenStream, input: TokenStream) -> TokenStream { self.value_debug_format(depth).try_to_value_debug_string().await } } + + impl turbo_tasks::FromSubTrait<#ref_ident> for turbo_tasks::debug::ValueDebugVc { + fn from_sub_trait(node_ref: #ref_ident) -> Self { + node_ref.node.into() + } + } } } else { quote! {} From dae3e2155dcc5dc8e240825da4fd091296486b18 Mon Sep 17 00:00:00 2001 From: Will Binns-Smith Date: Mon, 13 Feb 2023 08:07:19 -0800 Subject: [PATCH 03/31] Issue Reporters (#3707) Big thanks to @jridgewell for helping me out with a number of Rust-isms with this change. This expands `handle_issues` and the `NextDevServerBuilder` to accept an arbitrary `IssueReporter` -- a trait implementing `report_issues` which receives captured issues to send somewhere. This replaces using a fixed `ConsoleUi` to send issues to stdout/stderr, though `ConsoleUi` now implements `IssueReporter` and is the default implementation of an issue reporter if no other is provided. It also moves the responsibility of detecting fatal errors out of `ConsoleUi` and into `handle_issues` itself. This lays the foundation for alternative reporters, such as a test reporter to snapshot or assert against issues emitted, or a newline-delimited JSON reporter for other tools to consume. Co-authored-by: Justin Ridgewell --------- Co-authored-by: Justin Ridgewell --- Cargo.lock | 1 + crates/next-dev/src/lib.rs | 85 +++++++++++++------ crates/node-file-trace/src/lib.rs | 27 +++--- crates/turbopack-cli-utils/src/issue.rs | 61 ++++++------- crates/turbopack-core/src/issue/mod.rs | 27 +++++- crates/turbopack-dev-server/Cargo.toml | 1 + crates/turbopack-dev-server/src/http.rs | 11 ++- crates/turbopack-dev-server/src/lib.rs | 45 +++++----- .../src/source/resolve.rs | 6 +- .../turbopack-dev-server/src/update/server.rs | 11 ++- 10 files changed, 168 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 15aecbe5800b6..1261c0fc29ad3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7778,6 +7778,7 @@ dependencies = [ "indexmap", "mime", "mime_guess", + "once_cell", "parking_lot", "pin-project-lite", "serde", diff --git a/crates/next-dev/src/lib.rs b/crates/next-dev/src/lib.rs index c8efb922eca65..7095dc54da096 100644 --- a/crates/next-dev/src/lib.rs +++ b/crates/next-dev/src/lib.rs @@ -27,14 +27,15 @@ use owo_colors::OwoColorize; use turbo_malloc::TurboMalloc; use turbo_tasks::{ util::{FormatBytes, FormatDuration}, - RawVc, StatsType, TransientInstance, TransientValue, TurboTasks, TurboTasksBackendApi, Value, + CollectiblesSource, RawVc, StatsType, TransientInstance, TransientValue, TurboTasks, + TurboTasksBackendApi, Value, }; use turbo_tasks_fs::{DiskFileSystemVc, FileSystem, FileSystemVc}; use turbo_tasks_memory::MemoryBackend; -use turbopack_cli_utils::issue::{ConsoleUi, ConsoleUiVc, LogOptions}; +use turbopack_cli_utils::issue::{ConsoleUiVc, LogOptions}; use turbopack_core::{ environment::ServerAddr, - issue::IssueSeverity, + issue::{IssueReporter, IssueReporterVc, IssueSeverity, IssueVc}, resolve::{parse::RequestVc, pattern::QueryMapVc}, server_fs::ServerFileSystemVc, }; @@ -62,6 +63,7 @@ pub struct NextDevServerBuilder { entry_requests: Vec, eager_compile: bool, hostname: Option, + issue_reporter: Option>, port: Option, browserslist_query: String, log_level: IssueSeverity, @@ -83,6 +85,7 @@ impl NextDevServerBuilder { entry_requests: vec![], eager_compile: false, hostname: None, + issue_reporter: None, port: None, browserslist_query: "last 1 Chrome versions, last 1 Firefox versions, last 1 Safari \ versions, last 1 Edge versions" @@ -139,6 +142,14 @@ impl NextDevServerBuilder { self } + pub fn issue_reporter( + mut self, + issue_reporter: Box, + ) -> NextDevServerBuilder { + self.issue_reporter = Some(issue_reporter); + self + } + /// Attempts to find an open port to bind. fn find_port(&self, host: IpAddr, port: u16, max_attempts: u16) -> Result { // max_attempts of 1 means we loop 0 times. @@ -192,17 +203,22 @@ impl NextDevServerBuilder { let show_all = self.show_all; let log_detail = self.log_detail; let browserslist_query = self.browserslist_query; - let log_options = LogOptions { + let log_options = Arc::new(LogOptions { current_dir: current_dir().unwrap(), show_all, log_detail, log_level: self.log_level, - }; + }); let entry_requests = Arc::new(self.entry_requests); - let console_ui = Arc::new(ConsoleUi::new(log_options)); - let console_ui_to_dev_server = console_ui.clone(); let server_addr = Arc::new(server.addr); let tasks = turbo_tasks.clone(); + let issue_provider = self.issue_reporter.unwrap_or_else(|| { + // Initialize a ConsoleUi reporter if no custom reporter was provided + Box::new(move || ConsoleUiVc::new(log_options.clone().into()).into()) + }); + let issue_reporter_arc = Arc::new(move || issue_provider.get_issue_reporter()); + + let get_issue_reporter = issue_reporter_arc.clone(); let source = move || { source( root_dir.clone(), @@ -210,22 +226,31 @@ impl NextDevServerBuilder { entry_requests.clone().into(), eager_compile, turbo_tasks.clone().into(), - console_ui.clone().into(), + get_issue_reporter(), browserslist_query.clone(), server_addr.clone().into(), ) }; - Ok(server.serve(tasks, source, console_ui_to_dev_server)) + Ok(server.serve(tasks, source, issue_reporter_arc.clone())) } } -async fn handle_issues>(source: T, console_ui: ConsoleUiVc) -> Result<()> { - let state = console_ui - .group_and_display_issues(TransientValue::new(source.into())) +async fn handle_issues + CollectiblesSource + Copy>( + source: T, + issue_reporter: IssueReporterVc, +) -> Result<()> { + let issues = IssueVc::peek_issues_with_path(source) + .await? + .strongly_consistent() .await?; - if state.has_fatal { + issue_reporter.report_issues( + TransientInstance::new(issues.clone()), + TransientValue::new(source.into()), + ); + + if issues.has_fatal().await? { Err(anyhow!("Fatal issue(s) occurred")) } else { Ok(()) @@ -233,17 +258,17 @@ async fn handle_issues>(source: T, console_ui: ConsoleUiVc) -> Re } #[turbo_tasks::function] -async fn project_fs(project_dir: &str, console_ui: ConsoleUiVc) -> Result { +async fn project_fs(project_dir: &str, issue_reporter: IssueReporterVc) -> Result { let disk_fs = DiskFileSystemVc::new("project".to_string(), project_dir.to_string()); - handle_issues(disk_fs, console_ui).await?; + handle_issues(disk_fs, issue_reporter).await?; disk_fs.await?.start_watching()?; Ok(disk_fs.into()) } #[turbo_tasks::function] -async fn output_fs(project_dir: &str, console_ui: ConsoleUiVc) -> Result { +async fn output_fs(project_dir: &str, issue_reporter: IssueReporterVc) -> Result { let disk_fs = DiskFileSystemVc::new("output".to_string(), project_dir.to_string()); - handle_issues(disk_fs, console_ui).await?; + handle_issues(disk_fs, issue_reporter).await?; disk_fs.await?.start_watching()?; Ok(disk_fs.into()) } @@ -256,13 +281,12 @@ async fn source( entry_requests: TransientInstance>, eager_compile: bool, turbo_tasks: TransientInstance>, - console_ui: TransientInstance, + issue_reporter: IssueReporterVc, browserslist_query: String, server_addr: TransientInstance, ) -> Result { - let console_ui = (*console_ui).clone().cell(); - let output_fs = output_fs(&project_dir, console_ui); - let fs = project_fs(&root_dir, console_ui); + let output_fs = output_fs(&project_dir, issue_reporter); + let fs = project_fs(&root_dir, issue_reporter); let project_relative = project_dir.strip_prefix(&root_dir).unwrap(); let project_relative = project_relative .strip_prefix(MAIN_SEPARATOR) @@ -372,9 +396,9 @@ async fn source( .cell() .into(); - handle_issues(dev_server_fs, console_ui).await?; - handle_issues(web_source, console_ui).await?; - handle_issues(page_source, console_ui).await?; + handle_issues(dev_server_fs, issue_reporter).await?; + handle_issues(web_source, issue_reporter).await?; + handle_issues(page_source, issue_reporter).await?; Ok(source) } @@ -551,3 +575,16 @@ fn profile_timeout( ) -> impl Future { future } + +pub trait IssueReporterProvider: Send + Sync + 'static { + fn get_issue_reporter(&self) -> IssueReporterVc; +} + +impl IssueReporterProvider for T +where + T: Fn() -> IssueReporterVc + Send + Sync + Clone + 'static, +{ + fn get_issue_reporter(&self) -> IssueReporterVc { + self() + } +} diff --git a/crates/node-file-trace/src/lib.rs b/crates/node-file-trace/src/lib.rs index 1dcf76412009c..b1873526afea8 100644 --- a/crates/node-file-trace/src/lib.rs +++ b/crates/node-file-trace/src/lib.rs @@ -37,12 +37,12 @@ use turbopack::{ resolve_options_context::ResolveOptionsContext, transition::TransitionsByNameVc, ModuleAssetContextVc, }; -use turbopack_cli_utils::issue::{ConsoleUi, IssueSeverityCliOption, LogOptions}; +use turbopack_cli_utils::issue::{ConsoleUiVc, IssueSeverityCliOption, LogOptions}; use turbopack_core::{ asset::{Asset, AssetVc, AssetsVc}, context::{AssetContext, AssetContextVc}, environment::{EnvironmentIntention, EnvironmentVc, ExecutionEnvironment, NodeJsEnvironment}, - issue::{IssueSeverity, IssueVc}, + issue::{IssueReporter, IssueSeverity, IssueVc}, reference::all_assets, resolve::options::{ImportMapping, ResolvedMap}, source_asset::SourceAssetVc, @@ -487,24 +487,27 @@ async fn run>( let (sender, mut receiver) = channel(1); let dir = current_dir().unwrap(); let tt = create_tt(); - let console_ui = Arc::new(ConsoleUi::new(LogOptions { - current_dir: dir.clone(), - show_all, - log_detail, - log_level: log_level.map_or_else(|| IssueSeverity::Error, |l| l.0), - })); let task = tt.spawn_root_task(move || { + let console_ui = ConsoleUiVc::new(TransientInstance::new(LogOptions { + current_dir: dir.clone(), + show_all, + log_detail, + log_level: log_level.map_or_else(|| IssueSeverity::Error, |l| l.0), + })); let dir = dir.clone(); let args = args.clone(); - let console_ui = console_ui.clone(); let sender = sender.clone(); Box::pin(async move { let output = main_operation(TransientValue::new(dir.clone()), args.clone().into()); - let console_ui = (*console_ui).clone().cell(); - console_ui - .group_and_display_issues(TransientValue::new(output.into())) + let source = TransientValue::new(output.into()); + let issues = IssueVc::peek_issues_with_path(output) + .await? + .strongly_consistent() .await?; + console_ui + .as_issue_reporter() + .report_issues(TransientInstance::new(issues), source); if has_return_value { let output_read_ref = output.await?; diff --git a/crates/turbopack-cli-utils/src/issue.rs b/crates/turbopack-cli-utils/src/issue.rs index 7f9303dc2df73..52f8a6bd4f310 100644 --- a/crates/turbopack-cli-utils/src/issue.rs +++ b/crates/turbopack-cli-utils/src/issue.rs @@ -10,15 +10,17 @@ use std::{ use anyhow::{anyhow, Result}; use crossterm::style::{StyledContent, Stylize}; use owo_colors::{OwoColorize as _, Style}; -use turbo_tasks::{RawVc, TransientValue, TryJoinIterExt, ValueToString}; +use turbo_tasks::{ + RawVc, ReadRef, TransientInstance, TransientValue, TryJoinIterExt, ValueToString, +}; use turbo_tasks_fs::{ attach::AttachedFileSystemVc, source_context::{get_source_context, SourceContextLine}, to_sys_path, FileLinesContent, FileSystemPathVc, }; use turbopack_core::issue::{ - Issue, IssueProcessingPathItem, IssueSeverity, IssueVc, OptionIssueProcessingPathItemsVc, - PlainIssue, PlainIssueSource, + CapturedIssues, Issue, IssueProcessingPathItem, IssueReporter, IssueReporterVc, IssueSeverity, + OptionIssueProcessingPathItemsVc, PlainIssue, PlainIssueSource, }; #[derive(Clone, Copy, PartialEq, Eq, Debug)] @@ -412,41 +414,34 @@ impl PartialEq for ConsoleUi { } } -impl ConsoleUi { - pub fn new(options: LogOptions) -> Self { +#[turbo_tasks::value_impl] +impl ConsoleUiVc { + #[turbo_tasks::function] + pub fn new(options: TransientInstance) -> Self { ConsoleUi { - options, + options: (*options).clone(), seen: Arc::new(Mutex::new(SeenIssues::new())), } + .cell() } } -#[turbo_tasks::value(transparent)] -pub struct DisplayIssueState { - pub has_fatal: bool, - pub has_issues: bool, - pub has_new_issues: bool, -} - #[turbo_tasks::value_impl] -impl ConsoleUiVc { +impl IssueReporter for ConsoleUi { #[turbo_tasks::function] - pub async fn group_and_display_issues( - self, + async fn report_issues( + &self, + issues: TransientInstance>, source: TransientValue, - ) -> Result { - let source = source.into_value(); - let this = self.await?; - - let issues = IssueVc::peek_issues_with_path(source).await?; - let issues = issues.await?; - let &LogOptions { + ) -> Result<()> { + let issues = &*issues; + let LogOptions { ref current_dir, show_all, log_detail, log_level, .. - } = &this.options; + } = self.options; let mut grouped_issues: GroupedIssues = HashMap::new(); let issues = issues @@ -464,11 +459,11 @@ impl ConsoleUiVc { .iter() .map(|(_, _, _, id)| *id) .collect::>(); - let mut new_ids = this.seen.lock().unwrap().new_ids(source, issue_ids); - - let mut has_fatal = false; - let has_issues = !issues.is_empty(); - let has_new_issues = !new_ids.is_empty(); + let mut new_ids = self + .seen + .lock() + .unwrap() + .new_ids(source.into_value(), issue_ids); for (plain_issue, path, context, id) in issues { if !new_ids.remove(&id) { @@ -479,7 +474,6 @@ impl ConsoleUiVc { let context_path = make_relative_to_cwd(context, current_dir).await?; let category = &plain_issue.category; let title = &plain_issue.title; - has_fatal = severity == IssueSeverity::Fatal; let severity_map = grouped_issues .entry(severity) .or_insert_with(Default::default); @@ -612,12 +606,7 @@ impl ConsoleUiVc { } } - Ok(DisplayIssueState { - has_fatal, - has_issues, - has_new_issues, - } - .cell()) + Ok(()) } } diff --git a/crates/turbopack-core/src/issue/mod.rs b/crates/turbopack-core/src/issue/mod.rs index 6af1c8407171c..1ab17a3d38046 100644 --- a/crates/turbopack-core/src/issue/mod.rs +++ b/crates/turbopack-core/src/issue/mod.rs @@ -17,7 +17,8 @@ use futures::FutureExt; use turbo_tasks::{ emit, primitives::{BoolVc, StringVc, U64Vc}, - CollectiblesSource, ReadRef, TryJoinIterExt, ValueToString, ValueToStringVc, + CollectiblesSource, RawVc, ReadRef, TransientInstance, TransientValue, TryJoinIterExt, + ValueToString, ValueToStringVc, }; use turbo_tasks_fs::{ FileContent, FileContentReadRef, FileLine, FileLinesContent, FileSystemPathReadRef, @@ -340,6 +341,21 @@ pub struct CapturedIssues { processing_path: ItemIssueProcessingPathVc, } +impl CapturedIssues { + pub async fn has_fatal(&self) -> Result { + let mut has_fatal = false; + + for issue in self.issues.iter() { + let severity = *issue.severity().await?; + if severity == IssueSeverity::Fatal { + has_fatal = true; + break; + } + } + Ok(has_fatal) + } +} + #[turbo_tasks::value_impl] impl CapturedIssuesVc { #[turbo_tasks::function] @@ -568,3 +584,12 @@ impl PlainAssetVc { .cell()) } } + +#[turbo_tasks::value_trait] +pub trait IssueReporter { + fn report_issues( + &self, + issues: TransientInstance>, + source: TransientValue, + ); +} diff --git a/crates/turbopack-dev-server/Cargo.toml b/crates/turbopack-dev-server/Cargo.toml index ab19c1e9623c7..28959cea404ea 100644 --- a/crates/turbopack-dev-server/Cargo.toml +++ b/crates/turbopack-dev-server/Cargo.toml @@ -17,6 +17,7 @@ hyper-tungstenite = "0.8.1" indexmap = { workspace = true, features = ["serde"] } mime = "0.3.16" mime_guess = "2.0.4" +once_cell = "1.13.0" parking_lot = "0.12.1" pin-project-lite = "0.2.9" serde = "1.0.136" diff --git a/crates/turbopack-dev-server/src/http.rs b/crates/turbopack-dev-server/src/http.rs index 695e40d050fae..91b1851d28cd3 100644 --- a/crates/turbopack-dev-server/src/http.rs +++ b/crates/turbopack-dev-server/src/http.rs @@ -4,8 +4,7 @@ use hyper::{header::HeaderName, Request, Response}; use mime_guess::mime; use turbo_tasks::TransientInstance; use turbo_tasks_fs::{FileContent, FileContentReadRef}; -use turbopack_cli_utils::issue::ConsoleUiVc; -use turbopack_core::{asset::AssetContent, version::VersionedContent}; +use turbopack_core::{asset::AssetContent, issue::IssueReporterVc, version::VersionedContent}; use crate::source::{ request::SourceRequest, @@ -30,10 +29,10 @@ enum GetFromSourceResult { async fn get_from_source( source: ContentSourceVc, request: TransientInstance, - console_ui: ConsoleUiVc, + issue_repoter: IssueReporterVc, ) -> Result { Ok( - match &*resolve_source_request(source, request, console_ui).await? { + match &*resolve_source_request(source, request, issue_repoter).await? { ResolveSourceRequestResult::Static(static_content_vc) => { let static_content = static_content_vc.await?; if let AssetContent::File(file) = &*static_content.content.content().await? { @@ -60,11 +59,11 @@ async fn get_from_source( pub async fn process_request_with_content_source( source: ContentSourceVc, request: Request, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, ) -> Result> { let original_path = request.uri().path().to_string(); let request = http_request_to_source_request(request).await?; - let result = get_from_source(source, TransientInstance::new(request), console_ui); + let result = get_from_source(source, TransientInstance::new(request), issue_reporter); match &*result.strongly_consistent().await? { GetFromSourceResult::Static { content, diff --git a/crates/turbopack-dev-server/src/lib.rs b/crates/turbopack-dev-server/src/lib.rs index edf9a4d1c0c07..9851b65ff5682 100644 --- a/crates/turbopack-dev-server/src/lib.rs +++ b/crates/turbopack-dev-server/src/lib.rs @@ -16,16 +16,17 @@ use std::{ time::{Duration, Instant}, }; -use anyhow::{bail, Context, Result}; +use anyhow::{anyhow, Context, Result}; use hyper::{ server::{conn::AddrIncoming, Builder}, service::{make_service_fn, service_fn}, Request, Response, Server, }; use turbo_tasks::{ - run_once, trace::TraceRawVcs, util::FormatDuration, RawVc, TransientValue, TurboTasksApi, + run_once, trace::TraceRawVcs, util::FormatDuration, CollectiblesSource, RawVc, + TransientInstance, TransientValue, TurboTasksApi, }; -use turbopack_cli_utils::issue::{ConsoleUi, ConsoleUiVc}; +use turbopack_core::issue::{IssueReporter, IssueReporterVc, IssueVc}; use self::{ source::{ContentSourceResultVc, ContentSourceVc}, @@ -66,21 +67,27 @@ pub struct DevServer { pub future: Pin> + Send + 'static>>, } -// Just print issues to console for now... -async fn handle_issues>( +async fn handle_issues + CollectiblesSource + Copy>( source: T, path: &str, operation: &str, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, ) -> Result<()> { - let state = console_ui - .group_and_display_issues(TransientValue::new(source.into())) + let issues = IssueVc::peek_issues_with_path(source) + .await? + .strongly_consistent() .await?; - if state.has_fatal { - bail!("Fatal issue(s) occurred in {path} ({operation}") - } - Ok(()) + issue_reporter.report_issues( + TransientInstance::new(issues.clone()), + TransientValue::new(source.into()), + ); + + if issues.has_fatal().await? { + Err(anyhow!("Fatal issue(s) occurred in {path} ({operation})")) + } else { + Ok(()) + } } impl DevServer { @@ -106,21 +113,21 @@ impl DevServerBuilder { self, turbo_tasks: Arc, source_provider: impl SourceProvider + Clone + Send + Sync, - console_ui: Arc, + get_issue_reporter: Arc IssueReporterVc + Send + Sync>, ) -> DevServer { let make_svc = make_service_fn(move |_| { let tt = turbo_tasks.clone(); let source_provider = source_provider.clone(); - let console_ui = console_ui.clone(); + let get_issue_reporter = get_issue_reporter.clone(); async move { let handler = move |request: Request| { - let console_ui = console_ui.clone(); let start = Instant::now(); let tt = tt.clone(); + let get_issue_reporter = get_issue_reporter.clone(); let source_provider = source_provider.clone(); let future = async move { run_once(tt.clone(), async move { - let console_ui = (*console_ui).clone().cell(); + let issue_reporter = get_issue_reporter(); if hyper_tungstenite::is_upgrade_request(&request) { let uri = request.uri(); @@ -130,7 +137,7 @@ impl DevServerBuilder { let (response, websocket) = hyper_tungstenite::upgrade(request, None)?; let update_server = - UpdateServer::new(source_provider, console_ui); + UpdateServer::new(source_provider, issue_reporter); update_server.run(&*tt, websocket); return Ok(response); } @@ -158,12 +165,12 @@ impl DevServerBuilder { let uri = request.uri(); let path = uri.path().to_string(); let source = source_provider.get_source(); - handle_issues(source, &path, "get source", console_ui).await?; + handle_issues(source, &path, "get source", issue_reporter).await?; let resolved_source = source.resolve_strongly_consistent().await?; let response = http::process_request_with_content_source( resolved_source, request, - console_ui, + issue_reporter, ) .await?; let status = response.status().as_u16(); diff --git a/crates/turbopack-dev-server/src/source/resolve.rs b/crates/turbopack-dev-server/src/source/resolve.rs index aa5cd38ab5303..96e827842382a 100644 --- a/crates/turbopack-dev-server/src/source/resolve.rs +++ b/crates/turbopack-dev-server/src/source/resolve.rs @@ -6,7 +6,7 @@ use std::{ use anyhow::{bail, Result}; use hyper::Uri; use turbo_tasks::{TransientInstance, Value}; -use turbopack_cli_utils::issue::ConsoleUiVc; +use turbopack_core::issue::IssueReporterVc; use super::{ headers::{HeaderValue, Headers}, @@ -36,7 +36,7 @@ pub enum ResolveSourceRequestResult { pub async fn resolve_source_request( source: ContentSourceVc, request: TransientInstance, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, ) -> Result { let mut data = ContentSourceData::default(); let mut current_source = source; @@ -50,7 +50,7 @@ pub async fn resolve_source_request( result, &original_path, "get content from source", - console_ui, + issue_reporter, ) .await?; diff --git a/crates/turbopack-dev-server/src/update/server.rs b/crates/turbopack-dev-server/src/update/server.rs index 2ff147f7fca5c..d4719ac194199 100644 --- a/crates/turbopack-dev-server/src/update/server.rs +++ b/crates/turbopack-dev-server/src/update/server.rs @@ -12,8 +12,7 @@ use tokio::select; use tokio_stream::StreamMap; use turbo_tasks::{TransientInstance, TurboTasksApi}; use turbo_tasks_fs::json::parse_json_with_source_context; -use turbopack_cli_utils::issue::ConsoleUiVc; -use turbopack_core::version::Update; +use turbopack_core::{issue::IssueReporterVc, version::Update}; use super::{ protocol::{ClientMessage, ClientUpdateInstruction, Issue, ResourceIdentifier}, @@ -28,15 +27,15 @@ use crate::{ /// A server that listens for updates and sends them to connected clients. pub(crate) struct UpdateServer { source_provider: P, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, } impl UpdateServer

{ /// Create a new update server with the given websocket and content source. - pub fn new(source_provider: P, console_ui: ConsoleUiVc) -> Self { + pub fn new(source_provider: P, issue_reporter: IssueReporterVc) -> Self { Self { source_provider, - console_ui, + issue_reporter, } } @@ -69,7 +68,7 @@ impl UpdateServer

{ resolve_source_request( source, TransientInstance::new(request), - self.console_ui + self.issue_reporter ) } }; From a509b2c499ab1a499210e9eddad8ce0b9379ea30 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Mon, 13 Feb 2023 17:16:15 +0100 Subject: [PATCH 04/31] fix integration tests on windows (#3737) For windows it's important that the browser is dropped so that the test can complete. To do that we need to cancel the spawned task below (which will drop the browser). For this we are using a JoinSet which cancels all tasks when dropped. --- .cargo/config.toml | 2 +- Cargo.lock | 1 + crates/next-dev-tests/Cargo.toml | 10 ++ crates/next-dev-tests/test-harness/harness.ts | 9 +- crates/next-dev-tests/tests/integration.rs | 65 +++++++--- crates/turbopack-node/src/lib.rs | 1 + crates/turbopack-node/src/pool.rs | 115 ++++++++++++------ 7 files changed, 142 insertions(+), 61 deletions(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index 7b102f4c94237..81f5aceb7c068 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -11,4 +11,4 @@ linker = "rust-lld" xtask = "run --package xtask --" [target.'cfg(all())'] -rustflags = ["-Aclippy::too_many_arguments"] +rustflags = ["--cfg", "tokio_unstable", "-Aclippy::too_many_arguments"] diff --git a/Cargo.lock b/Cargo.lock index 1261c0fc29ad3..48da205fb4ebc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3747,6 +3747,7 @@ version = "0.1.0" dependencies = [ "anyhow", "chromiumoxide", + "console-subscriber", "futures", "httpmock", "lazy_static", diff --git a/crates/next-dev-tests/Cargo.toml b/crates/next-dev-tests/Cargo.toml index be35eca8a2cd8..bf1420a445c5f 100644 --- a/crates/next-dev-tests/Cargo.toml +++ b/crates/next-dev-tests/Cargo.toml @@ -9,6 +9,16 @@ autobenches = false # don't publish this crate publish = false +[features] +tokio_console = [ + "dep:console-subscriber", + "tokio/tracing", + "turbo-tasks/tokio_tracing", +] + +[dependencies] +console-subscriber = { version = "0.1.8", optional = true } + [dev-dependencies] anyhow = "1.0.47" chromiumoxide = { version = "0.4.0", features = [ diff --git a/crates/next-dev-tests/test-harness/harness.ts b/crates/next-dev-tests/test-harness/harness.ts index 66358b5432ae1..ed175f5c3c4bf 100644 --- a/crates/next-dev-tests/test-harness/harness.ts +++ b/crates/next-dev-tests/test-harness/harness.ts @@ -27,9 +27,12 @@ function autoReady unknown>( return (...args) => { if (!isReady) { isReady = true; - setImmediate(() => { - READY(""); - }); + requestIdleCallback( + () => { + READY(""); + }, + { timeout: 20000 } + ); } return fn(...args); }; diff --git a/crates/next-dev-tests/tests/integration.rs b/crates/next-dev-tests/tests/integration.rs index 464f97cb9f2cf..c75afd8beaf7d 100644 --- a/crates/next-dev-tests/tests/integration.rs +++ b/crates/next-dev-tests/tests/integration.rs @@ -4,7 +4,9 @@ extern crate test_generator; use std::{ env, fmt::Write, + future::Future, net::SocketAddr, + panic::{catch_unwind, resume_unwind, AssertUnwindSafe}, path::{Path, PathBuf}, time::Duration, }; @@ -27,7 +29,7 @@ use next_dev::{register, EntryRequest, NextDevServerBuilder}; use owo_colors::OwoColorize; use serde::Deserialize; use test_generator::test_resources; -use tokio::{net::TcpSocket, task::JoinHandle}; +use tokio::{net::TcpSocket, task::JoinSet}; use tungstenite::{error::ProtocolError::ResetWithoutClosingHandshake, Error::Protocol}; use turbo_tasks::TurboTasks; use turbo_tasks_fs::util::sys_to_unix; @@ -53,9 +55,30 @@ lazy_static! { static ref DEBUG_BROWSER: bool = env::var("TURBOPACK_DEBUG_BROWSER").is_ok(); } +fn run_async_test<'a, T>(future: impl Future + Send + 'a) -> T { + let runtime = tokio::runtime::Builder::new_multi_thread() + .worker_threads(1) + .enable_all() + .build() + .unwrap(); + let result = catch_unwind(AssertUnwindSafe(|| { + runtime.block_on(async move { + #[cfg(feature = "tokio_console")] + console_subscriber::init(); + future.await + }) + })); + println!("Stutting down runtime..."); + runtime.shutdown_timeout(Duration::from_secs(5)); + println!("Stut down runtime"); + match result { + Ok(result) => result, + Err(err) => resume_unwind(err), + } +} + #[test_resources("crates/next-dev-tests/tests/integration/*/*/*")] -#[tokio::main(flavor = "current_thread")] -async fn test(resource: &str) { +fn test(resource: &str) { if resource.ends_with("__skipped__") || resource.ends_with("__flakey__") { // "Skip" directories named `__skipped__`, which include test directories to // skip. These tests are not considered truly skipped by `cargo test`, but they @@ -69,7 +92,7 @@ async fn test(resource: &str) { return; } - let run_result = run_test(resource).await; + let run_result = run_async_test(run_test(resource)); assert!( !run_result.test_results.is_empty(), @@ -100,9 +123,8 @@ async fn test(resource: &str) { #[test_resources("crates/next-dev-tests/tests/integration/*/*/__skipped__/*")] #[should_panic] -#[tokio::main] -async fn test_skipped_fails(resource: &str) { - let run_result = run_test(resource).await; +fn test_skipped_fails(resource: &str) { + let run_result = run_async_test(run_test(resource)); // Assert that this skipped test itself has at least one browser test which // fails. @@ -140,8 +162,9 @@ async fn run_test(resource: &str) -> JestRunResult { let mock_dir = path.join("__httpmock__"); let mock_server_future = get_mock_server_future(&mock_dir); + let turbo_tasks = TurboTasks::new(MemoryBackend::default()); let server = NextDevServerBuilder::new( - TurboTasks::new(MemoryBackend::default()), + turbo_tasks, sys_to_unix(&project_dir.to_string_lossy()).to_string(), sys_to_unix(&workspace_root.to_string_lossy()).to_string(), ) @@ -178,7 +201,7 @@ async fn run_test(resource: &str) -> JestRunResult { result } -async fn create_browser(is_debugging: bool) -> Result<(Browser, JoinHandle<()>)> { +async fn create_browser(is_debugging: bool) -> Result<(Browser, JoinSet<()>)> { let mut config_builder = BrowserConfig::builder(); if is_debugging { config_builder = config_builder @@ -197,8 +220,14 @@ async fn create_browser(is_debugging: bool) -> Result<(Browser, JoinHandle<()>)> ) .await .context("Launching browser failed")?; + + // For windows it's important that the browser is dropped so that the test can + // complete. To do that we need to cancel the spawned task below (which will + // drop the browser). For this we are using a JoinSet which cancels all tasks + // when dropped. + let mut set = JoinSet::new(); // See https://crates.io/crates/chromiumoxide - let thread_handle = tokio::task::spawn(async move { + set.spawn(async move { loop { if let Err(Ws(Protocol(ResetWithoutClosingHandshake))) = handler.next().await.unwrap() { // The user has most likely closed the browser. End gracefully. @@ -207,14 +236,11 @@ async fn create_browser(is_debugging: bool) -> Result<(Browser, JoinHandle<()>)> } }); - Ok((browser, thread_handle)) + Ok((browser, set)) } async fn run_browser(addr: SocketAddr) -> Result { - run_test_browser(addr, *DEBUG_BROWSER).await -} - -async fn run_test_browser(addr: SocketAddr, is_debugging: bool) -> Result { + let is_debugging = *DEBUG_BROWSER; let (browser, mut handle) = create_browser(is_debugging).await?; // `browser.new_page()` opens a tab, navigates to the destination, and waits for @@ -346,9 +372,12 @@ async fn run_test_browser(addr: SocketAddr, is_debugging: bool) -> Result { - result?; - return Err(anyhow!("Browser closed")); + result = handle.join_next() => { + if let Some(result) = result { + result?; + } else { + return Err(anyhow!("Browser closed")); + } } () = tokio::time::sleep(Duration::from_secs(60)) => { if !is_debugging { diff --git a/crates/turbopack-node/src/lib.rs b/crates/turbopack-node/src/lib.rs index f3109c7cb8e03..5759fb65edaed 100644 --- a/crates/turbopack-node/src/lib.rs +++ b/crates/turbopack-node/src/lib.rs @@ -1,5 +1,6 @@ #![feature(async_closure)] #![feature(min_specialization)] +#![feature(lint_reasons)] use std::{ collections::{HashMap, HashSet}, diff --git a/crates/turbopack-node/src/pool.rs b/crates/turbopack-node/src/pool.rs index 0ed7795e1016c..b3d80ba75ef76 100644 --- a/crates/turbopack-node/src/pool.rs +++ b/crates/turbopack-node/src/pool.rs @@ -17,10 +17,10 @@ use tokio::{ BufReader, }, net::{TcpListener, TcpStream}, - process::{Child, Command}, + process::{Child, ChildStderr, ChildStdout, Command}, select, sync::{OwnedSemaphorePermit, Semaphore}, - time::sleep, + time::{sleep, timeout}, }; enum NodeJsPoolProcess { @@ -39,6 +39,14 @@ struct SpawnedNodeJsPoolProcess { struct RunningNodeJsPoolProcess { child: Option, connection: TcpStream, + stdout_buf: Vec, + stdout: BufReader, + shared_stdout: SharedOutputSet, + stdout_occurences: HashMap, u32>, + stderr_buf: Vec, + stderr: BufReader, + shared_stderr: SharedOutputSet, + stderr_occurences: HashMap, u32>, } const CONNECT_TIMEOUT: Duration = Duration::from_secs(30); @@ -49,15 +57,14 @@ type SharedOutputSet = Arc, u32)>>>; /// lines that has beem emitted by other `handle_output_stream` instances with /// the same `shared` before. async fn handle_output_stream( - stream: impl AsyncRead + Unpin, - shared: SharedOutputSet, + buffer: &mut Vec, + stream: &mut BufReader, + shared: &mut SharedOutputSet, + own_output: &mut HashMap, u32>, mut final_stream: impl AsyncWrite + Unpin, ) { - let mut buffered = BufReader::new(stream); - let mut own_output = HashMap::, u32>::new(); - let mut buffer = Vec::new(); loop { - match buffered.read_until(b'\n', &mut buffer).await { + match stream.read_until(b'\n', buffer).await { Ok(0) => { break; } @@ -67,7 +74,7 @@ async fn handle_output_stream( } Ok(_) => {} } - let line = Arc::from(take(&mut buffer).into_boxed_slice()); + let line = Arc::from(take(buffer).into_boxed_slice()); let occurance_number = *own_output .entry(Arc::clone(&line)) .and_modify(|c| *c += 1) @@ -185,20 +192,20 @@ impl NodeJsPoolProcess { }, }; - tokio::spawn(handle_output_stream( - child.stdout.take().unwrap(), - shared_stdout, - stdout(), - )); - tokio::spawn(handle_output_stream( - child.stderr.take().unwrap(), - shared_stderr, - stderr(), - )); + let stdout = BufReader::new(child.stdout.take().unwrap()); + let stderr = BufReader::new(child.stderr.take().unwrap()); RunningNodeJsPoolProcess { child: Some(child), connection, + stdout, + stdout_buf: Vec::new(), + stdout_occurences: HashMap::new(), + shared_stdout, + stderr, + stderr_buf: Vec::new(), + stderr_occurences: HashMap::new(), + shared_stderr, } } NodeJsPoolProcess::Running(running) => running, @@ -208,19 +215,40 @@ impl NodeJsPoolProcess { impl RunningNodeJsPoolProcess { async fn recv(&mut self) -> Result> { - let packet_len = self - .connection - .read_u32() - .await - .context("reading packet length")? - .try_into() - .context("storing packet length")?; - let mut packet_data = vec![0; packet_len]; - self.connection - .read_exact(&mut packet_data) - .await - .context("reading packet data")?; - Ok(packet_data) + let connection = &mut self.connection; + let recv_future = async move { + let packet_len = connection + .read_u32() + .await + .context("reading packet length")? + .try_into() + .context("storing packet length")?; + let mut packet_data = vec![0; packet_len]; + connection + .read_exact(&mut packet_data) + .await + .context("reading packet data")?; + Ok(packet_data) + }; + let stdout_future = handle_output_stream( + &mut self.stdout_buf, + &mut self.stdout, + &mut self.shared_stdout, + &mut self.stdout_occurences, + stdout(), + ); + let stderr_future = handle_output_stream( + &mut self.stderr_buf, + &mut self.stderr, + &mut self.shared_stderr, + &mut self.stderr_occurences, + stderr(), + ); + select! { + result = recv_future => result, + _ = stdout_future => bail!("stdout stream ended unexpectedly"), + _ = stderr_future => bail!("stderr stream ended unexpectedly"), + } } async fn send(&mut self, packet_data: Vec) -> Result<()> { @@ -358,20 +386,26 @@ impl NodeJsOperation { M: DeserializeOwned, { let message = self - .with_process( - |process| async move { process.recv().await.context("receiving message") }, - ) + .with_process(|process| async move { + timeout(Duration::from_secs(30), process.recv()) + .await + .context("timeout while receiving message from process")? + .context("failed to receive message") + }) .await?; - serde_json::from_slice(&message).context("deserializing message") + serde_json::from_slice(&message).context("failed to deserialize message") } pub async fn send(&mut self, message: M) -> Result<()> where M: Serialize, { - let message = serde_json::to_vec(&message).context("serializing message")?; + let message = serde_json::to_vec(&message).context("failed to serialize message")?; self.with_process(|process| async move { - process.send(message).await.context("sending message")?; + timeout(Duration::from_secs(30), process.send(message)) + .await + .context("timeout while sending message")? + .context("failed to send message")?; Ok(()) }) .await @@ -390,7 +424,10 @@ impl NodeJsOperation { // Ignore error since we are not sure if the process is still alive let _ = child.start_kill(); - let status = child.wait().await.context("waiting for process end")?; + let status = timeout(Duration::from_secs(30), child.wait()) + .await + .context("timeout while waiting for process end")? + .context("waiting for process end")?; Ok(status) } From dea257ace031859ed92614aa087f8497b2056977 Mon Sep 17 00:00:00 2001 From: Chris Olszewski Date: Mon, 13 Feb 2023 08:18:08 -0800 Subject: [PATCH 05/31] chore: add option to dump args sent to go (#3515) I used this yesterday when debugging package inference and found it helpful for launching the Go debugger with the correct args. I figure it might be helpful for other devs as well. --- crates/turborepo-lib/src/lib.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/crates/turborepo-lib/src/lib.rs b/crates/turborepo-lib/src/lib.rs index ca9d8ba0b4250..d2d088262932f 100644 --- a/crates/turborepo-lib/src/lib.rs +++ b/crates/turborepo-lib/src/lib.rs @@ -8,7 +8,7 @@ mod shim; mod ui; use anyhow::Result; -use log::error; +use log::{debug, error, log_enabled, Level}; pub use crate::cli::Args; use crate::package_manager::PackageManager; @@ -32,7 +32,17 @@ pub fn get_version() -> &'static str { pub fn main() -> Payload { match shim::run() { - Ok(payload) => payload, + Ok(payload) => { + match &payload { + Payload::Go(args) if log_enabled!(Level::Debug) => { + if let Ok(serialized_args) = serde_json::to_string_pretty(&args) { + debug!("Args passed to Go binary:\n{}", serialized_args); + } + } + _ => (), + } + payload + } Err(err) => { error!("{}", err.to_string()); Payload::Rust(Err(err)) From 10686b16d163dded615f605966a82fc543dcb5ab Mon Sep 17 00:00:00 2001 From: Alex Kirszenberg Date: Mon, 13 Feb 2023 17:51:53 +0100 Subject: [PATCH 06/31] Make stringify_str and stringify_module_id use Serialize (#3770) Both `stringify_str` and `stringify_module_id` are now simply `stringify_js`. We could skip this and go directly to `serde_json`, but this hides the dependency behind turbopack-ecmascript. --- crates/next-core/src/app_source.rs | 10 +++++----- .../src/next_client_chunks/with_chunks.rs | 5 ++--- .../with_client_chunks.rs | 4 ++-- crates/next-core/src/next_edge/transition.rs | 4 ++-- crates/next-core/src/page_loader.rs | 6 +++--- crates/turbopack-css/src/chunk/mod.rs | 4 ++-- crates/turbopack-css/src/module_asset.rs | 9 ++------- crates/turbopack-css/src/util.rs | 18 +++++++++--------- .../turbopack-ecmascript/src/chunk/loader.rs | 10 +++++----- crates/turbopack-ecmascript/src/chunk/mod.rs | 10 +++++----- crates/turbopack-ecmascript/src/utils.rs | 18 ++++++------------ crates/turbopack-env/src/asset.rs | 4 ++-- crates/turbopack-env/src/embeddable.rs | 6 +++--- crates/turbopack-node/src/bootstrap.rs | 4 ++-- crates/turbopack-static/src/lib.rs | 4 ++-- 15 files changed, 52 insertions(+), 64 deletions(-) diff --git a/crates/next-core/src/app_source.rs b/crates/next-core/src/app_source.rs index aa35852123f66..72a3ffb25a566 100644 --- a/crates/next-core/src/app_source.rs +++ b/crates/next-core/src/app_source.rs @@ -34,7 +34,7 @@ use turbopack_dev_server::{ }, }; use turbopack_ecmascript::{ - chunk::EcmascriptChunkPlaceablesVc, magic_identifier, utils::stringify_str, + chunk::EcmascriptChunkPlaceablesVc, magic_identifier, utils::stringify_js, EcmascriptInputTransformsVc, EcmascriptModuleAssetType, EcmascriptModuleAssetVc, }; use turbopack_env::ProcessEnvAssetVc; @@ -572,7 +572,7 @@ impl AppRendererVc { )); } } - Ok((stringify_str(segment_path), imports)) + Ok((stringify_js(segment_path), imports)) }); futures }) @@ -597,7 +597,7 @@ impl AppRendererVc { "import {}, {{ chunks as {} }} from {};\n", identifier, chunks_identifier, - stringify_str(p) + stringify_js(p) )? } } @@ -607,7 +607,7 @@ impl AppRendererVc { r#"("TURBOPACK {{ transition: next-client }}"); import BOOTSTRAP from {}; "#, - stringify_str(&page) + stringify_js(&page) )?; } @@ -618,7 +618,7 @@ import BOOTSTRAP from {}; writeln!( result, " {key}: {{ module: {identifier}, chunks: {chunks_identifier} }},", - key = stringify_str(key), + key = stringify_js(key), )?; } result += " },"; diff --git a/crates/next-core/src/next_client_chunks/with_chunks.rs b/crates/next-core/src/next_client_chunks/with_chunks.rs index ae552054a8afe..f27478af46b39 100644 --- a/crates/next-core/src/next_client_chunks/with_chunks.rs +++ b/crates/next-core/src/next_client_chunks/with_chunks.rs @@ -8,7 +8,7 @@ use turbopack::ecmascript::{ EcmascriptChunkItemVc, EcmascriptChunkPlaceable, EcmascriptChunkPlaceableVc, EcmascriptChunkVc, EcmascriptExports, EcmascriptExportsVc, }, - utils::stringify_module_id, + utils::stringify_js, }; use turbopack_core::{ asset::{Asset, AssetContentVc, AssetVc}, @@ -122,8 +122,7 @@ impl EcmascriptChunkItem for WithChunksChunkItem { client_chunks.push(Value::String(path.to_string())); } } - let module_id = - stringify_module_id(&*inner.asset.as_chunk_item(self.inner_context).id().await?); + let module_id = stringify_js(&*inner.asset.as_chunk_item(self.inner_context).id().await?); Ok(EcmascriptChunkItemContent { inner_code: format!( "__turbopack_esm__({{ diff --git a/crates/next-core/src/next_client_component/with_client_chunks.rs b/crates/next-core/src/next_client_component/with_client_chunks.rs index 253e4340539a5..097a0e264a17e 100644 --- a/crates/next-core/src/next_client_component/with_client_chunks.rs +++ b/crates/next-core/src/next_client_component/with_client_chunks.rs @@ -8,7 +8,7 @@ use turbopack::ecmascript::{ EcmascriptChunkItemVc, EcmascriptChunkPlaceable, EcmascriptChunkPlaceableVc, EcmascriptChunkVc, EcmascriptExports, EcmascriptExportsVc, }, - utils::stringify_module_id, + utils::stringify_js, }; use turbopack_core::{ asset::{Asset, AssetContentVc, AssetVc}, @@ -130,7 +130,7 @@ impl EcmascriptChunkItem for WithClientChunksChunkItem { } } } - let module_id = stringify_module_id(&*inner.asset.as_chunk_item(self.context).id().await?); + let module_id = stringify_js(&*inner.asset.as_chunk_item(self.context).id().await?); Ok(EcmascriptChunkItemContent { inner_code: format!( "__turbopack_esm__({{ diff --git a/crates/next-core/src/next_edge/transition.rs b/crates/next-core/src/next_edge/transition.rs index 7f644f38b3058..ca733a1e04d4f 100644 --- a/crates/next-core/src/next_edge/transition.rs +++ b/crates/next-core/src/next_edge/transition.rs @@ -13,7 +13,7 @@ use turbopack_core::{ environment::EnvironmentVc, virtual_asset::VirtualAssetVc, }; -use turbopack_ecmascript::{chunk_group_files_asset::ChunkGroupFilesAsset, utils::stringify_str}; +use turbopack_ecmascript::{chunk_group_files_asset::ChunkGroupFilesAsset, utils::stringify_js}; use crate::embed_js::next_js_file; @@ -36,7 +36,7 @@ impl Transition for NextEdgeTransition { let mut new_content = RopeBuilder::from( format!( "const PAGE = {};\n", - stringify_str( + stringify_js( self.base_path .await? .get_path_to(&*asset.path().await?) diff --git a/crates/next-core/src/page_loader.rs b/crates/next-core/src/page_loader.rs index 6ecb0692068d7..838d8b16135c5 100644 --- a/crates/next-core/src/page_loader.rs +++ b/crates/next-core/src/page_loader.rs @@ -13,7 +13,7 @@ use turbopack_core::{ }; use turbopack_dev_server::source::{asset_graph::AssetGraphContentSourceVc, ContentSourceVc}; use turbopack_ecmascript::{ - utils::stringify_str, EcmascriptInputTransform, EcmascriptInputTransformsVc, + utils::stringify_js, EcmascriptInputTransform, EcmascriptInputTransformsVc, EcmascriptModuleAssetType, EcmascriptModuleAssetVc, }; @@ -58,7 +58,7 @@ impl PageLoaderAssetVc { writeln!( result, "const PAGE_PATH = {};\n", - stringify_str(&format!("/{}", &*this.pathname.await?)) + stringify_js(&format!("/{}", &*this.pathname.await?)) )?; let base_code = next_js_file("entry/page-loader.ts"); @@ -118,7 +118,7 @@ impl Asset for PageLoaderAsset { let content = format!( "__turbopack_load_page_chunks__({}, {})\n", - stringify_str(&this.pathname.await?), + stringify_js(&this.pathname.await?), Value::Array(data) ); diff --git a/crates/turbopack-css/src/chunk/mod.rs b/crates/turbopack-css/src/chunk/mod.rs index b66dc46dbff81..d3bf9ffeab0ab 100644 --- a/crates/turbopack-css/src/chunk/mod.rs +++ b/crates/turbopack-css/src/chunk/mod.rs @@ -28,7 +28,7 @@ use self::{optimize::CssChunkOptimizerVc, source_map::CssChunkSourceMapAssetRefe use crate::{ embed::{CssEmbed, CssEmbeddable, CssEmbeddableVc}, parse::ParseResultSourceMapVc, - util::stringify_str, + util::stringify_js, ImportAssetReferenceVc, }; @@ -134,7 +134,7 @@ impl CssChunkContentVc { let mut code = CodeBuilder::default(); writeln!(code, "/* chunk {} */", chunk_name.await?)?; for external_import in external_imports { - writeln!(code, "@import {};", stringify_str(&external_import))?; + writeln!(code, "@import {};", stringify_js(&external_import))?; } code.push_code(&body.build()); diff --git a/crates/turbopack-css/src/module_asset.rs b/crates/turbopack-css/src/module_asset.rs index 21f75d69b2cf5..019b2f3a418f5 100644 --- a/crates/turbopack-css/src/module_asset.rs +++ b/crates/turbopack-css/src/module_asset.rs @@ -27,7 +27,7 @@ use turbopack_ecmascript::{ EcmascriptChunkItemVc, EcmascriptChunkPlaceable, EcmascriptChunkPlaceableVc, EcmascriptChunkVc, EcmascriptExports, EcmascriptExportsVc, }, - utils::stringify_str, + utils::stringify_js, ParseResultSourceMap, ParseResultSourceMapVc, }; @@ -193,12 +193,7 @@ impl EcmascriptChunkItem for ModuleChunkItem { }) .collect::>() .join(" "); - writeln!( - code, - " {}: {},", - stringify_str(key), - stringify_str(&content) - )?; + writeln!(code, " {}: {},", stringify_js(key), stringify_js(&content))?; } code += "});\n"; EcmascriptChunkItemContent { diff --git a/crates/turbopack-css/src/util.rs b/crates/turbopack-css/src/util.rs index 9c770c3a9b046..8e61cd5731f7d 100644 --- a/crates/turbopack-css/src/util.rs +++ b/crates/turbopack-css/src/util.rs @@ -1,4 +1,4 @@ -pub fn stringify_str(str: &str) -> String { +pub fn stringify_js(str: &str) -> String { let mut escaped = String::with_capacity(str.len()); for char in str.chars() { match char { @@ -22,32 +22,32 @@ pub fn stringify_str(str: &str) -> String { #[cfg(test)] mod tests { - use crate::util::stringify_str; + use crate::util::stringify_js; #[test] fn surrounds_with_double_quotes() { - assert_eq!(stringify_str("foo"), r#""foo""#); + assert_eq!(stringify_js("foo"), r#""foo""#); } #[test] fn escapes_double_quotes() { - assert_eq!(stringify_str(r#""""#), r#""\"\"""#); + assert_eq!(stringify_js(r#""""#), r#""\"\"""#); } #[test] fn escapes_backslash() { - assert_eq!(stringify_str(r#"\"#), r#""\\""#); - assert_eq!(stringify_str(r#"\\"#), r#""\\\\""#); - assert_eq!(stringify_str(r#"\n"#), r#""\\n""#); + assert_eq!(stringify_js(r#"\"#), r#""\\""#); + assert_eq!(stringify_js(r#"\\"#), r#""\\\\""#); + assert_eq!(stringify_js(r#"\n"#), r#""\\n""#); } #[test] fn escapes_newlines() { - assert_eq!(stringify_str("\n"), r#""\n""#); + assert_eq!(stringify_js("\n"), r#""\n""#); } #[test] fn escapes_mixed() { - assert_eq!(stringify_str("\n\r\u{0c}"), r#""\n\r\f""#); + assert_eq!(stringify_js("\n\r\u{0c}"), r#""\n\r\f""#); } } diff --git a/crates/turbopack-ecmascript/src/chunk/loader.rs b/crates/turbopack-ecmascript/src/chunk/loader.rs index 84143e9879066..1e8b907b8009f 100644 --- a/crates/turbopack-ecmascript/src/chunk/loader.rs +++ b/crates/turbopack-ecmascript/src/chunk/loader.rs @@ -19,7 +19,7 @@ use crate::{ EcmascriptChunkItemVc, EcmascriptChunkPlaceable, EcmascriptChunkPlaceableVc, EcmascriptChunkVc, EcmascriptExports, EcmascriptExportsVc, }, - utils::{stringify_module_id, stringify_str}, + utils::stringify_js, }; /// The manifest loader item is shipped in the same chunk that uses the dynamic @@ -125,9 +125,9 @@ __turbopack_export_value__((__turbopack_import__) => {{ return __turbopack_require__({item_id}); }}).then(() => __turbopack_import__({dynamic_id})); }});", - chunk_server_path = stringify_str(chunk_server_path), - item_id = stringify_module_id(item_id), - dynamic_id = stringify_module_id(dynamic_id), + chunk_server_path = stringify_js(chunk_server_path), + item_id = stringify_js(item_id), + dynamic_id = stringify_js(dynamic_id), )?; Ok(EcmascriptChunkItemContent { @@ -274,7 +274,7 @@ impl EcmascriptChunkItem for ManifestChunkItem { let mut code = b"const chunks = [\n".to_vec(); for pathname in chunk_server_paths { - writeln!(code, " {},", stringify_str(&pathname))?; + writeln!(code, " {},", stringify_js(&pathname))?; } writeln!(code, "];")?; diff --git a/crates/turbopack-ecmascript/src/chunk/mod.rs b/crates/turbopack-ecmascript/src/chunk/mod.rs index d98709af822fb..7bedbc5347f55 100644 --- a/crates/turbopack-ecmascript/src/chunk/mod.rs +++ b/crates/turbopack-ecmascript/src/chunk/mod.rs @@ -49,7 +49,7 @@ use self::{ use crate::{ parse::ParseResultSourceMapVc, references::esm::EsmExportsVc, - utils::{stringify_module_id, stringify_str, FormatIter}, + utils::{stringify_js, FormatIter}, }; #[turbo_tasks::value] @@ -613,9 +613,9 @@ impl EcmascriptChunkContentVc { let mut code = CodeBuilder::default(); code += "(self.TURBOPACK = self.TURBOPACK || []).push(["; - writeln!(code, "{}, {{", stringify_str(chunk_server_path))?; + writeln!(code, "{}, {{", stringify_js(chunk_server_path))?; for entry in &this.module_factories { - write!(code, "\n{}: ", &stringify_module_id(entry.id()))?; + write!(code, "\n{}: ", &stringify_js(entry.id()))?; code.push_code(entry.code()); code += ","; } @@ -627,7 +627,7 @@ impl EcmascriptChunkContentVc { .chunks_server_paths .await? .iter() - .map(|path| format!(" && loadedChunks.has({})", stringify_str(path))) + .map(|path| format!(" && loadedChunks.has({})", stringify_js(path))) .collect::>() .join(""); let entries_ids = &*evaluate.entry_modules_ids.await?; @@ -635,7 +635,7 @@ impl EcmascriptChunkContentVc { .iter() .map(|id| async move { let id = id.await?; - let id = stringify_module_id(&id); + let id = stringify_js(&id); Ok(format!(r#"instantiateRuntimeModule({id});"#)) as Result<_> }) .try_join() diff --git a/crates/turbopack-ecmascript/src/utils.rs b/crates/turbopack-ecmascript/src/utils.rs index 047d2a3c1371a..7867b63e067a1 100644 --- a/crates/turbopack-ecmascript/src/utils.rs +++ b/crates/turbopack-ecmascript/src/utils.rs @@ -5,6 +5,7 @@ use std::{ }; use pin_project_lite::pin_project; +use serde::Serialize; use swc_core::{ common::DUMMY_SP, ecma::ast::{Expr, Lit, Str}, @@ -64,21 +65,14 @@ pub fn module_id_to_lit(module_id: &ModuleId) -> Expr { }) } -pub fn stringify_module_id(id: &ModuleId) -> String { - match id { - ModuleId::Number(n) => stringify_number(*n), - ModuleId::String(s) => stringify_str(s), - } -} - -pub fn stringify_str(s: &str) -> String { +/// Converts a serializable value into a valid JavaScript expression. +pub fn stringify_js(s: &T) -> String +where + T: Serialize + ?Sized, +{ serde_json::to_string(s).unwrap() } -pub fn stringify_number(s: u32) -> String { - s.to_string() -} - pub struct FormatIter T>(pub F); macro_rules! format_iter { diff --git a/crates/turbopack-env/src/asset.rs b/crates/turbopack-env/src/asset.rs index 5ca918991e69b..cb0a0d858d2d3 100644 --- a/crates/turbopack-env/src/asset.rs +++ b/crates/turbopack-env/src/asset.rs @@ -15,7 +15,7 @@ use turbopack_ecmascript::{ EcmascriptChunkItemVc, EcmascriptChunkPlaceable, EcmascriptChunkPlaceableVc, EcmascriptChunkVc, EcmascriptExports, EcmascriptExportsVc, }, - utils::stringify_str, + utils::stringify_js, }; /// The `process.env` asset, responsible for initializing the env (shared by all @@ -134,7 +134,7 @@ impl EcmascriptChunkItem for ProcessEnvChunkItem { // env can be used to inject live code into the output. // TODO this is not completely correct as env vars need to ignore casing // So `process.env.path === process.env.PATH === process.env.PaTh` - writeln!(code, "env[{}] = {};", stringify_str(name), val)?; + writeln!(code, "env[{}] = {};", stringify_js(name), val)?; } Ok(EcmascriptChunkItemContent { diff --git a/crates/turbopack-env/src/embeddable.rs b/crates/turbopack-env/src/embeddable.rs index 89a986393c45e..a01b0838aa068 100644 --- a/crates/turbopack-env/src/embeddable.rs +++ b/crates/turbopack-env/src/embeddable.rs @@ -1,7 +1,7 @@ use anyhow::Result; use turbo_tasks::primitives::OptionStringVc; use turbo_tasks_env::{EnvMapVc, ProcessEnv, ProcessEnvVc}; -use turbopack_ecmascript::utils::stringify_str; +use turbopack_ecmascript::utils::stringify_js; /// Encodes values as JS strings so that they can be safely injected into a JS /// output. @@ -26,7 +26,7 @@ impl ProcessEnv for EmbeddableProcessEnv { let encoded = prior .iter() - .map(|(k, v)| (k.clone(), stringify_str(v))) + .map(|(k, v)| (k.clone(), stringify_js(v))) .collect(); Ok(EnvMapVc::cell(encoded)) @@ -35,7 +35,7 @@ impl ProcessEnv for EmbeddableProcessEnv { #[turbo_tasks::function] async fn read(&self, name: &str) -> Result { let prior = self.prior.read(name).await?; - let encoded = prior.as_deref().map(stringify_str); + let encoded = prior.as_deref().map(stringify_js); Ok(OptionStringVc::cell(encoded)) } } diff --git a/crates/turbopack-node/src/bootstrap.rs b/crates/turbopack-node/src/bootstrap.rs index fd7be61e1a976..86e1198ca7225 100644 --- a/crates/turbopack-node/src/bootstrap.rs +++ b/crates/turbopack-node/src/bootstrap.rs @@ -7,7 +7,7 @@ use turbopack_core::{ chunk::{ChunkGroupVc, ChunkReferenceVc}, reference::AssetReferencesVc, }; -use turbopack_ecmascript::utils::stringify_str; +use turbopack_ecmascript::utils::stringify_js; #[turbo_tasks::value(shared)] pub(super) struct NodeJsBootstrapAsset { @@ -34,7 +34,7 @@ impl Asset for NodeJsBootstrapAsset { let path = &*chunk.path().await?; if let Some(p) = context_path.get_relative_path_to(path) { if p.ends_with(".js") { - writeln!(&mut output, "require({});", stringify_str(&p))?; + writeln!(&mut output, "require({});", stringify_js(&p))?; } } } diff --git a/crates/turbopack-static/src/lib.rs b/crates/turbopack-static/src/lib.rs index 7dc29dcb5bfb7..ac5b782a5e9e7 100644 --- a/crates/turbopack-static/src/lib.rs +++ b/crates/turbopack-static/src/lib.rs @@ -29,7 +29,7 @@ use turbopack_ecmascript::{ EcmascriptChunkItemVc, EcmascriptChunkPlaceable, EcmascriptChunkPlaceableVc, EcmascriptChunkVc, EcmascriptExports, EcmascriptExportsVc, }, - utils::stringify_str, + utils::stringify_js, }; #[turbo_tasks::value] @@ -193,7 +193,7 @@ impl EcmascriptChunkItem for ModuleChunkItem { Ok(EcmascriptChunkItemContent { inner_code: format!( "__turbopack_export_value__({path});", - path = stringify_str(&format!("/{}", &*self.static_asset.path().await?)) + path = stringify_js(&format!("/{}", &*self.static_asset.path().await?)) ) .into(), ..Default::default() From e4b1945053e94e8b7d4d1436ff7aabd94dc2997d Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Mon, 13 Feb 2023 18:21:19 +0100 Subject: [PATCH 07/31] chunks in chunk groups no longer reference parallel chunks (#3767) optimized chunks no longer reference unoptimized chunks This fixes a bug where unoptimized chunks where written to the temporary output directory --- .../src/chunk/chunk_in_group.rs | 140 ++++++++++++++++++ crates/turbopack-core/src/chunk/mod.rs | 13 +- crates/turbopack-css/src/chunk/mod.rs | 55 +++++++ .../src/introspect/mod.rs | 12 +- crates/turbopack-ecmascript/src/chunk/mod.rs | 15 +- 5 files changed, 225 insertions(+), 10 deletions(-) create mode 100644 crates/turbopack-core/src/chunk/chunk_in_group.rs diff --git a/crates/turbopack-core/src/chunk/chunk_in_group.rs b/crates/turbopack-core/src/chunk/chunk_in_group.rs new file mode 100644 index 0000000000000..f6b2aad36ecff --- /dev/null +++ b/crates/turbopack-core/src/chunk/chunk_in_group.rs @@ -0,0 +1,140 @@ +use anyhow::Result; +use turbo_tasks::{primitives::StringVc, ValueToString, ValueToStringVc}; +use turbo_tasks_fs::FileSystemPathVc; + +use super::{Chunk, ChunkVc, ParallelChunkReferenceVc}; +use crate::{ + asset::{Asset, AssetContentVc, AssetVc}, + chunk::ParallelChunkReference, + introspect::{ + asset::{children_from_asset_references, content_to_details, IntrospectableAssetVc}, + Introspectable, IntrospectableChildrenVc, IntrospectableVc, + }, + reference::AssetReferencesVc, + version::VersionedContentVc, +}; + +/// A chunk that is part of a [ChunkGroup]. In contrast to the inner chunk it +/// will not have references of parallel chunk since these are already handled +/// on [ChunkGroup] level. +#[turbo_tasks::value] +pub struct ChunkInGroup { + inner: ChunkVc, +} + +#[turbo_tasks::value_impl] +impl ChunkInGroupVc { + #[turbo_tasks::function] + pub fn new(inner: ChunkVc) -> Self { + ChunkInGroup { inner }.cell() + } + + /// Returns the inner chunk of this chunk in group. + #[turbo_tasks::function] + pub async fn inner(self) -> Result { + Ok(self.await?.inner) + } +} + +#[turbo_tasks::value_impl] +impl Chunk for ChunkInGroup {} + +#[turbo_tasks::value_impl] +impl Asset for ChunkInGroup { + #[turbo_tasks::function] + fn path(&self) -> FileSystemPathVc { + self.inner.path() + } + + #[turbo_tasks::function] + fn content(&self) -> AssetContentVc { + self.inner.content() + } + + #[turbo_tasks::function] + async fn references(&self) -> Result { + let refs = &*self.inner.references().await?; + let mut references = Vec::new(); + for reference in refs { + if let Some(parallel_ref) = ParallelChunkReferenceVc::resolve_from(*reference).await? { + if *parallel_ref.is_loaded_in_parallel().await? { + continue; + } + } + references.push(*reference); + } + Ok(AssetReferencesVc::cell(references)) + } + + #[turbo_tasks::function] + fn versioned_content(&self) -> VersionedContentVc { + self.inner.versioned_content() + } +} + +#[turbo_tasks::value_impl] +impl ValueToString for ChunkInGroup { + #[turbo_tasks::function] + fn to_string(&self) -> StringVc { + self.inner.to_string() + } +} + +#[turbo_tasks::function] +fn inner_chunk_key() -> StringVc { + StringVc::cell("inner chunk".to_string()) +} + +#[turbo_tasks::function] +fn base_ty() -> StringVc { + StringVc::cell("chunk in group".to_string()) +} + +#[turbo_tasks::value_impl] +impl Introspectable for ChunkInGroup { + #[turbo_tasks::function] + async fn ty(&self) -> Result { + Ok( + if let Some(chunk) = IntrospectableVc::resolve_from(self.inner).await? { + let ty = chunk.ty().await?; + StringVc::cell(format!("{ty} (in group)")) + } else { + base_ty() + }, + ) + } + + #[turbo_tasks::function] + async fn title(&self) -> Result { + Ok( + if let Some(chunk) = IntrospectableVc::resolve_from(self.inner).await? { + chunk.title() + } else { + self.inner.path().to_string() + }, + ) + } + + #[turbo_tasks::function] + async fn details(&self) -> Result { + Ok( + if let Some(chunk) = IntrospectableVc::resolve_from(self.inner).await? { + chunk.details() + } else { + content_to_details(self.inner.content()) + }, + ) + } + + #[turbo_tasks::function] + async fn children(self_vc: ChunkInGroupVc) -> Result { + let mut children = children_from_asset_references(self_vc.references()) + .await? + .clone_value(); + children.insert(( + inner_chunk_key(), + IntrospectableAssetVc::new(self_vc.await?.inner.into()), + )); + Ok(IntrospectableChildrenVc::cell(children)) + } +} diff --git a/crates/turbopack-core/src/chunk/mod.rs b/crates/turbopack-core/src/chunk/mod.rs index b2b92cd07a8a8..3624cdae9f2f6 100644 --- a/crates/turbopack-core/src/chunk/mod.rs +++ b/crates/turbopack-core/src/chunk/mod.rs @@ -1,3 +1,4 @@ +pub mod chunk_in_group; pub mod dev; pub mod optimize; @@ -18,7 +19,7 @@ use turbo_tasks::{ use turbo_tasks_fs::FileSystemPathVc; use turbo_tasks_hash::DeterministicHash; -use self::optimize::optimize; +use self::{chunk_in_group::ChunkInGroupVc, optimize::optimize}; use crate::{ asset::{Asset, AssetVc, AssetsVc}, environment::EnvironmentVc, @@ -149,9 +150,6 @@ impl ChunkGroupVc { Ok(result.into_iter().flatten()) } - // async fn get_chunk_children( - // chunk: ChunkVc, - // ) -> Result>>>>> { async fn get_chunk_children( chunk: ChunkVc, ) -> Result + Send> { @@ -174,6 +172,13 @@ impl ChunkGroupVc { let chunks = ChunksVc::cell(chunks.into_iter().collect()); let chunks = optimize(chunks, self); + let chunks = ChunksVc::cell( + chunks + .await? + .iter() + .map(|&chunk| ChunkInGroupVc::new(chunk).as_chunk()) + .collect(), + ); Ok(chunks) } diff --git a/crates/turbopack-css/src/chunk/mod.rs b/crates/turbopack-css/src/chunk/mod.rs index d3bf9ffeab0ab..e77531f7335f1 100644 --- a/crates/turbopack-css/src/chunk/mod.rs +++ b/crates/turbopack-css/src/chunk/mod.rs @@ -2,6 +2,8 @@ pub(crate) mod optimize; pub mod source_map; pub(crate) mod writer; +use std::fmt::Write; + use anyhow::{anyhow, Result}; use indexmap::IndexSet; use turbo_tasks::{primitives::StringVc, TryJoinIterExt, ValueToString, ValueToStringVc}; @@ -17,6 +19,10 @@ use turbopack_core::{ FromChunkableAsset, ModuleId, ModuleIdVc, }, code_builder::{CodeBuilder, CodeVc}, + introspect::{ + asset::{children_from_asset_references, content_to_details, IntrospectableAssetVc}, + Introspectable, IntrospectableChildrenVc, IntrospectableVc, + }, reference::{AssetReference, AssetReferenceVc, AssetReferencesVc}, resolve::PrimaryResolveResult, source_map::{GenerateSourceMap, GenerateSourceMapVc, SourceMapVc}, @@ -416,3 +422,52 @@ impl FromChunkableAsset for CssChunkItemVc { Ok(None) } } + +#[turbo_tasks::function] +fn introspectable_type() -> StringVc { + StringVc::cell("css chunk".to_string()) +} + +#[turbo_tasks::function] +fn entry_module_key() -> StringVc { + StringVc::cell("entry module".to_string()) +} + +#[turbo_tasks::value_impl] +impl Introspectable for CssChunk { + #[turbo_tasks::function] + fn ty(&self) -> StringVc { + introspectable_type() + } + + #[turbo_tasks::function] + fn title(self_vc: CssChunkVc) -> StringVc { + self_vc.path().to_string() + } + + #[turbo_tasks::function] + async fn details(self_vc: CssChunkVc) -> Result { + let content = content_to_details(self_vc.content()); + let mut details = String::new(); + let this = self_vc.await?; + let chunk_content = css_chunk_content(this.context, this.main_entries).await?; + details += "Chunk items:\n\n"; + for item in chunk_content.chunk_items.iter() { + writeln!(details, "- {}", item.to_string().await?)?; + } + details += "\nContent:\n\n"; + write!(details, "{}", content.await?)?; + Ok(StringVc::cell(details)) + } + + #[turbo_tasks::function] + async fn children(self_vc: CssChunkVc) -> Result { + let mut children = children_from_asset_references(self_vc.references()) + .await? + .clone_value(); + for &entry in &*self_vc.await?.main_entries.await? { + children.insert((entry_module_key(), IntrospectableAssetVc::new(entry.into()))); + } + Ok(IntrospectableChildrenVc::cell(children)) + } +} diff --git a/crates/turbopack-dev-server/src/introspect/mod.rs b/crates/turbopack-dev-server/src/introspect/mod.rs index c668989330e25..23eb4c8340204 100644 --- a/crates/turbopack-dev-server/src/introspect/mod.rs +++ b/crates/turbopack-dev-server/src/introspect/mod.rs @@ -1,7 +1,7 @@ use std::{collections::HashSet, fmt::Display}; use anyhow::Result; -use turbo_tasks::{primitives::StringVc, TryJoinIterExt}; +use turbo_tasks::{primitives::StringVc, registry, CellId, RawVc, TryJoinIterExt}; use turbo_tasks_fs::{json::parse_json_with_source_context, File, FileContent}; use turbopack_core::{ asset::AssetContent, @@ -88,6 +88,15 @@ impl ContentSource for IntrospectionSource { } } else { parse_json_with_source_context(path)? + } + .resolve() + .await?; + let raw_vc: RawVc = introspectable.into(); + let internal_ty = if let RawVc::TaskCell(_, CellId { type_id, index }) = raw_vc { + let value_ty = registry::get_value_type(type_id); + format!("{}#{}", value_ty.name, index) + } else { + unreachable!() }; let ty = introspectable.ty().await?; let title = introspectable.title().await?; @@ -130,6 +139,7 @@ impl ContentSource for IntrospectionSource { " {title} +

{internal_ty}

{ty}

{title}

{details} diff --git a/crates/turbopack-ecmascript/src/chunk/mod.rs b/crates/turbopack-ecmascript/src/chunk/mod.rs index 7bedbc5347f55..d12858e3c22cc 100644 --- a/crates/turbopack-ecmascript/src/chunk/mod.rs +++ b/crates/turbopack-ecmascript/src/chunk/mod.rs @@ -21,6 +21,7 @@ use turbopack_core::{ asset::{Asset, AssetContentVc, AssetVc}, chunk::{ chunk_content, chunk_content_split, + chunk_in_group::ChunkInGroupVc, optimize::{ChunkOptimizerVc, OptimizableChunk, OptimizableChunkVc}, Chunk, ChunkContentResult, ChunkGroupReferenceVc, ChunkGroupVc, ChunkItem, ChunkItemVc, ChunkReferenceVc, ChunkVc, ChunkableAsset, ChunkableAssetVc, ChunkingContext, @@ -213,11 +214,15 @@ impl EcmascriptChunkEvaluateVc { let mut chunks_server_paths = Vec::new(); let output_root = context.output_root().await?; for chunk in evaluate_chunks.iter() { - if let Some(ecma_chunk) = EcmascriptChunkVc::resolve_from(chunk).await? { - if ecma_chunk != origin_chunk { - let chunk_path = &*chunk.path().await?; - if let Some(chunk_server_path) = output_root.get_path_to(chunk_path) { - chunks_server_paths.push(chunk_server_path.to_string()); + if let Some(chunk_in_group) = ChunkInGroupVc::resolve_from(chunk).await? { + if let Some(ecma_chunk) = + EcmascriptChunkVc::resolve_from(chunk_in_group.inner()).await? + { + if ecma_chunk != origin_chunk { + let chunk_path = &*chunk.path().await?; + if let Some(chunk_server_path) = output_root.get_path_to(chunk_path) { + chunks_server_paths.push(chunk_server_path.to_string()); + } } } } From e403a44063d02d52c5fccfe80e8dd5336a7755c5 Mon Sep 17 00:00:00 2001 From: chicoworry <121981363+chicoworry@users.noreply.github.com> Date: Mon, 13 Feb 2023 15:22:55 -0300 Subject: [PATCH 08/31] Fix typo (#3763) Just a small type I belive :) --- crates/next-dev/benches/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/next-dev/benches/README.md b/crates/next-dev/benches/README.md index 360d56717f99d..3a54903618e66 100644 --- a/crates/next-dev/benches/README.md +++ b/crates/next-dev/benches/README.md @@ -39,7 +39,7 @@ The benchmark suite runs Turbopack and other bundlers in a variety of scenarios. - **bench_startup:** Time from startup (without cache) until the app is rendered in the browser (it doesn't have to be interactive/hydrated for this.) - **bench_hydration:** Time from startup (without cache) until the app is interactive in the browser (it needs to be hydrated for that.) This metric is not captured for CSR since the first render is interactive. -- **bench_hmr_to_eval:** Time from changing a file until the new code is evaluated in the browser. Evaluating the code does not mean the change is visible to the user yet. For instance, when a React component changes, it need to be re-rendered in the browser. This mostly measures the time spent computing the update in the bundler itself and sending it to the client. +- **bench_hmr_to_eval:** Time from changing a file until the new code is evaluated in the browser. Evaluating the code does not mean the change is visible to the user yet. For instance, when a React component changes, it needs to be re-rendered in the browser. This mostly measures the time spent computing the update in the bundler itself and sending it to the client. - **bench_hmr_to_commit:** Time from changing a file until the change is reflected in the browser. We are using a `useEffect` hook within a React component to measure the time it takes for the updated React component to be committed to the DOM. This is a good measure of the end to end performance perceived by the user. - **bench_startup_cache:** Time from startup with persistent cache until the app is rendered in the browser (it doesn't have to be interactive/hydrated for this.). Turbopack doesn't include a persistent cache yet. (This benchmark is disabled by default and can be enabled with `TURBOPACK_BENCH_CACHED=1`) - **bench_hydration:** Time from startup with persistent cache until the app is interactive in the browser (it needs to be hydrated for that.) This metric is not captured for CSR since the first render is interactive. Turbopack doesn't include a persistent cache yet. (This benchmark is disabled by default and can be enabled with `TURBOPACK_BENCH_CACHED=1`) From 6e2d96c769e3a0047bc429e3961f4e3a220e82c2 Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Mon, 13 Feb 2023 13:04:59 -0800 Subject: [PATCH 09/31] Revert "chore: add option to dump args sent to go (#3515)" (#3782) This reverts commit dea257ace031859ed92614aa087f8497b2056977. --- crates/turborepo-lib/src/lib.rs | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/crates/turborepo-lib/src/lib.rs b/crates/turborepo-lib/src/lib.rs index d2d088262932f..ca9d8ba0b4250 100644 --- a/crates/turborepo-lib/src/lib.rs +++ b/crates/turborepo-lib/src/lib.rs @@ -8,7 +8,7 @@ mod shim; mod ui; use anyhow::Result; -use log::{debug, error, log_enabled, Level}; +use log::error; pub use crate::cli::Args; use crate::package_manager::PackageManager; @@ -32,17 +32,7 @@ pub fn get_version() -> &'static str { pub fn main() -> Payload { match shim::run() { - Ok(payload) => { - match &payload { - Payload::Go(args) if log_enabled!(Level::Debug) => { - if let Ok(serialized_args) = serde_json::to_string_pretty(&args) { - debug!("Args passed to Go binary:\n{}", serialized_args); - } - } - _ => (), - } - payload - } + Ok(payload) => payload, Err(err) => { error!("{}", err.to_string()); Payload::Rust(Err(err)) From 5b44c9289e7d72f05745342ca7a1c6f63479bf0a Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Mon, 13 Feb 2023 13:34:16 -0800 Subject: [PATCH 10/31] Composable turbo.json (#2706) This feature allows monorepos to place a `turbo.json` file in workspaces with an `"extends"` key to override task configuration for that workspace. After this PR, individual workspaces should be able to: - add keys to task defintions (e.g. changing `dependsOn` or `outputs` configs) - reset keys in task definitions to defaults (`e.g. `dependsOn: []`) - add tasks (e.g add a `lint` command only in the workspace that implements it) --- CONTRIBUTING.md | 12 +- .../basic_monorepo/dry_run.t | 10 + cli/integration_tests/basic_monorepo/run.t | 16 ++ .../composable_config/composing-add-keys.t | 100 +++++++ .../composable_config/composing-add-tasks.t | 18 ++ .../composable_config/composing-bad-json.t | 14 + .../composable_config/composing-cache.t | 102 +++++++ .../composing-config-change.t | 16 ++ .../composing-invalid-config.t | 37 +++ .../composing-missing-workspace-config-deps.t | 44 +++ .../composing-missing-workspace-config.t | 107 +++++++ .../composing-omit-keys-deps.t | 51 ++++ .../composable_config/composing-omit-keys.t | 89 ++++++ .../composing-override-values-deps.t | 44 +++ .../composing-override-values.t | 108 ++++++++ .../composable_config/composing-persistent.t | 60 ++++ .../composable_config/monorepo/.gitignore | 7 + .../monorepo/apps/add-keys/out/.keep | 0 .../monorepo/apps/add-keys/package.json | 10 + .../monorepo/apps/add-keys/src/foo.txt | 1 + .../monorepo/apps/add-keys/turbo.json | 14 + .../monorepo/apps/add-tasks/out/.keep | 0 .../monorepo/apps/add-tasks/package.json | 9 + .../monorepo/apps/add-tasks/src/foo.txt | 1 + .../monorepo/apps/add-tasks/turbo.json | 9 + .../monorepo/apps/bad-json/package.json | 6 + .../monorepo/apps/cached/out/.keep | 0 .../monorepo/apps/cached/package.json | 8 + .../monorepo/apps/cached/turbo.json | 14 + .../monorepo/apps/config-change/package.json | 6 + .../monorepo/apps/config-change/src/foo.txt | 1 + .../apps/config-change/turbo-changed.json | 9 + .../monorepo/apps/config-change/turbo.json | 7 + .../monorepo/apps/invalid-config/package.json | 6 + .../monorepo/apps/invalid-config/turbo.json | 9 + .../apps/missing-workspace-config/out/.keep | 0 .../missing-workspace-config/package.json | 12 + .../apps/missing-workspace-config/src/foo.txt | 1 + .../monorepo/apps/omit-keys/out/.keep | 0 .../monorepo/apps/omit-keys/package.json | 11 + .../monorepo/apps/omit-keys/src/foo.txt | 1 + .../monorepo/apps/omit-keys/turbo.json | 8 + .../monorepo/apps/override-values/lib/.keep | 0 .../monorepo/apps/override-values/out/.keep | 0 .../apps/override-values/package.json | 11 + .../monorepo/apps/override-values/src/foo.txt | 1 + .../monorepo/apps/override-values/turbo.json | 15 + .../monorepo/apps/persistent/package.json | 13 + .../monorepo/apps/persistent/turbo.json | 12 + .../monorepo/package-lock.json | 54 ++++ .../composable_config/monorepo/package.json | 7 + .../monorepo/packages/blank-pkg/package.json | 8 + .../composable_config/monorepo/turbo.json | 99 +++++++ .../composable_config/setup.sh | 7 + .../prune/composable-config.t | 26 ++ .../apps/docs/package.json | 3 + .../apps/docs/turbo.json | 6 + .../task-dependencies/complex.t | 8 +- .../task-dependencies/complex/turbo.json | 2 +- cli/internal/context/context.go | 30 +- cli/internal/core/engine.go | 245 +++++++++++----- cli/internal/fs/turbo_json.go | 262 ++++++++++++++---- cli/internal/fs/turbo_json_test.go | 111 ++++---- cli/internal/graph/graph.go | 61 +++- cli/internal/hashing/package_deps_hash.go | 14 +- cli/internal/prune/prune.go | 18 +- cli/internal/run/dry_run.go | 2 +- cli/internal/run/global_hash.go | 6 +- cli/internal/run/run.go | 69 ++--- cli/internal/scope/filter/filter.go | 6 +- cli/internal/scope/filter/filter_test.go | 37 ++- cli/internal/scope/scope.go | 6 +- cli/internal/scope/scope_test.go | 94 ++++--- cli/internal/taskhash/taskhash.go | 14 +- 74 files changed, 1906 insertions(+), 319 deletions(-) create mode 100644 cli/integration_tests/basic_monorepo/run.t create mode 100644 cli/integration_tests/composable_config/composing-add-keys.t create mode 100644 cli/integration_tests/composable_config/composing-add-tasks.t create mode 100644 cli/integration_tests/composable_config/composing-bad-json.t create mode 100644 cli/integration_tests/composable_config/composing-cache.t create mode 100644 cli/integration_tests/composable_config/composing-config-change.t create mode 100644 cli/integration_tests/composable_config/composing-invalid-config.t create mode 100644 cli/integration_tests/composable_config/composing-missing-workspace-config-deps.t create mode 100644 cli/integration_tests/composable_config/composing-missing-workspace-config.t create mode 100644 cli/integration_tests/composable_config/composing-omit-keys-deps.t create mode 100644 cli/integration_tests/composable_config/composing-omit-keys.t create mode 100644 cli/integration_tests/composable_config/composing-override-values-deps.t create mode 100644 cli/integration_tests/composable_config/composing-override-values.t create mode 100644 cli/integration_tests/composable_config/composing-persistent.t create mode 100644 cli/integration_tests/composable_config/monorepo/.gitignore create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-keys/out/.keep create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-keys/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-keys/src/foo.txt create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-keys/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-tasks/out/.keep create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-tasks/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-tasks/src/foo.txt create mode 100644 cli/integration_tests/composable_config/monorepo/apps/add-tasks/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/bad-json/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/cached/out/.keep create mode 100644 cli/integration_tests/composable_config/monorepo/apps/cached/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/cached/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/config-change/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/config-change/src/foo.txt create mode 100644 cli/integration_tests/composable_config/monorepo/apps/config-change/turbo-changed.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/config-change/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/invalid-config/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/invalid-config/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/out/.keep create mode 100644 cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/src/foo.txt create mode 100644 cli/integration_tests/composable_config/monorepo/apps/omit-keys/out/.keep create mode 100644 cli/integration_tests/composable_config/monorepo/apps/omit-keys/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/omit-keys/src/foo.txt create mode 100644 cli/integration_tests/composable_config/monorepo/apps/omit-keys/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/override-values/lib/.keep create mode 100644 cli/integration_tests/composable_config/monorepo/apps/override-values/out/.keep create mode 100644 cli/integration_tests/composable_config/monorepo/apps/override-values/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/override-values/src/foo.txt create mode 100644 cli/integration_tests/composable_config/monorepo/apps/override-values/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/persistent/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/apps/persistent/turbo.json create mode 100644 cli/integration_tests/composable_config/monorepo/package-lock.json create mode 100644 cli/integration_tests/composable_config/monorepo/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/packages/blank-pkg/package.json create mode 100644 cli/integration_tests/composable_config/monorepo/turbo.json create mode 100755 cli/integration_tests/composable_config/setup.sh create mode 100644 cli/integration_tests/prune/composable-config.t create mode 100644 cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/turbo.json diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 739337421b5a0..4f67396da31ea 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,11 +27,7 @@ Thanks for your interest in contributing to Turbo! Dependencies -1. Install `jq` and `sponge` - - On macOS: `brew install sponge jq` - -1. Install [turborepo crate](https://github.com/vercel/turbo/blob/main/crates/turborepo/README.md) build requirements +1. Install [turborepo crate](./crates/turborepo/README.md) build requirements 1. Run `pnpm install` at root @@ -51,6 +47,12 @@ to the build command. This allows for us to build for more platforms, as `native ### Running Turborepo Tests +Dependencies + +1. Install `jq`, `sponge`, and `zstd` + +On macOS: `brew install sponge jq zstd` + #### Go Tests From the root directory, you can diff --git a/cli/integration_tests/basic_monorepo/dry_run.t b/cli/integration_tests/basic_monorepo/dry_run.t index 30562806d57c3..b3e883081904a 100644 --- a/cli/integration_tests/basic_monorepo/dry_run.t +++ b/cli/integration_tests/basic_monorepo/dry_run.t @@ -101,3 +101,13 @@ Check my-app#build output "persistent": false } } + +$ Non-existent tasks don't throw an error + $ ${TURBO} run doesnotexist --dry=json + { + "packages": [ + "my-app", + "util" + ], + "tasks": [] + } diff --git a/cli/integration_tests/basic_monorepo/run.t b/cli/integration_tests/basic_monorepo/run.t new file mode 100644 index 0000000000000..7f5aa7bf53045 --- /dev/null +++ b/cli/integration_tests/basic_monorepo/run.t @@ -0,0 +1,16 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) + +$ running non-existent tasks works + $ ${TURBO} run doesnotexist + \xe2\x80\xa2 Packages in scope: my-app, util (esc) + \xe2\x80\xa2 Running doesnotexist in 2 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + + No tasks were executed as part of this run. + + Tasks: 0 successful, 0 total + Cached: 0 cached, 0 total + Time:\s*[\.0-9]+m?s (re) + \ No newline at end of file diff --git a/cli/integration_tests/composable_config/composing-add-keys.t b/cli/integration_tests/composable_config/composing-add-keys.t new file mode 100644 index 0000000000000..5babbafbfaa83 --- /dev/null +++ b/cli/integration_tests/composable_config/composing-add-keys.t @@ -0,0 +1,100 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The add-keys-task in the root turbo.json has no config. This test: +# [x] Tests dependsOn works by asserting that another task runs first +# [x] Tests outputs works by asserting that the right directory is cached +# [x] Tests outputMode by asserting output logs on a second run +# [x] Tests inputs works by changing a file and testing there was a cache miss +# [x] Tests env works by setting an env var and asserting there was a cache miss + +# 1. First run, assert for `dependsOn` and `outputs` keys + $ ${TURBO} run add-keys-task --filter=add-keys > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: add-keys (esc) + \xe2\x80\xa2 Running add-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + add-keys:add-keys-underlying-task: cache miss, executing a33d34272db64281 + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: > add-keys-underlying-task + add-keys:add-keys-underlying-task: > echo "running add-keys-underlying-task" + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: running add-keys-underlying-task + add-keys:add-keys-task: cache miss, executing f4bbaa26e53aac6f + add-keys:add-keys-task: + add-keys:add-keys-task: > add-keys-task + add-keys:add-keys-task: > echo "running add-keys-task" > out/foo.min.txt + add-keys:add-keys-task: + + Tasks: 2 successful, 2 total + Cached: 0 cached, 2 total + Time:\s*[\.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "add-keys:add-keys-task.* executing .*" | awk '{print $5}') + $ tar -tf $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + apps/add-keys/.turbo/turbo-add-keys-task.log + apps/add-keys/out/ + apps/add-keys/out/.keep + apps/add-keys/out/foo.min.txt + +# 2. Second run, test there was a cache hit (`cache` config`) and `output` was suppressed (`outputMode`) + $ ${TURBO} run add-keys-task --filter=add-keys + \xe2\x80\xa2 Packages in scope: add-keys (esc) + \xe2\x80\xa2 Running add-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + add-keys:add-keys-underlying-task: cache hit, replaying output a33d34272db64281 + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: > add-keys-underlying-task + add-keys:add-keys-underlying-task: > echo "running add-keys-underlying-task" + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: running add-keys-underlying-task + add-keys:add-keys-task: cache hit, suppressing output f4bbaa26e53aac6f + + Tasks: 2 successful, 2 total + Cached: 2 cached, 2 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + +# 3. Change input file and assert cache miss + $ echo "more text" >> $TARGET_DIR/apps/add-keys/src/foo.txt + $ ${TURBO} run add-keys-task --filter=add-keys + \xe2\x80\xa2 Packages in scope: add-keys (esc) + \xe2\x80\xa2 Running add-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + add-keys:add-keys-underlying-task: cache miss, executing dfc32b367b1c6a9a + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: > add-keys-underlying-task + add-keys:add-keys-underlying-task: > echo "running add-keys-underlying-task" + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: running add-keys-underlying-task + add-keys:add-keys-task: cache miss, executing e0596a25ab3888ea + add-keys:add-keys-task: + add-keys:add-keys-task: > add-keys-task + add-keys:add-keys-task: > echo "running add-keys-task" > out/foo.min.txt + add-keys:add-keys-task: + + Tasks: 2 successful, 2 total + Cached: 0 cached, 2 total + Time:\s*[\.0-9]+m?s (re) + +# 4. Set env var and assert cache miss + $ SOME_VAR=somevalue ${TURBO} run add-keys-task --filter=add-keys + \xe2\x80\xa2 Packages in scope: add-keys (esc) + \xe2\x80\xa2 Running add-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + add-keys:add-keys-underlying-task: cache hit, replaying output dfc32b367b1c6a9a + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: > add-keys-underlying-task + add-keys:add-keys-underlying-task: > echo "running add-keys-underlying-task" + add-keys:add-keys-underlying-task: + add-keys:add-keys-underlying-task: running add-keys-underlying-task + add-keys:add-keys-task: cache miss, executing 2ff6e32f88af5a65 + add-keys:add-keys-task: + add-keys:add-keys-task: > add-keys-task + add-keys:add-keys-task: > echo "running add-keys-task" > out/foo.min.txt + add-keys:add-keys-task: + + Tasks: 2 successful, 2 total + Cached: 1 cached, 2 total + Time:\s*[\.0-9]+m?s (re) + diff --git a/cli/integration_tests/composable_config/composing-add-tasks.t b/cli/integration_tests/composable_config/composing-add-tasks.t new file mode 100644 index 0000000000000..b2a11bca79a5a --- /dev/null +++ b/cli/integration_tests/composable_config/composing-add-tasks.t @@ -0,0 +1,18 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + + $ ${TURBO} run added-task --filter=add-tasks + \xe2\x80\xa2 Packages in scope: add-tasks (esc) + \xe2\x80\xa2 Running added-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + add-tasks:added-task: cache miss, executing 8f82d0bc5ced2f1c + add-tasks:added-task: + add-tasks:added-task: > added-task + add-tasks:added-task: > echo "running added-task" > out/foo.min.txt + add-tasks:added-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s+[.0-9]+m?s (re) + \ No newline at end of file diff --git a/cli/integration_tests/composable_config/composing-bad-json.t b/cli/integration_tests/composable_config/composing-bad-json.t new file mode 100644 index 0000000000000..2a00dd6305cf6 --- /dev/null +++ b/cli/integration_tests/composable_config/composing-bad-json.t @@ -0,0 +1,14 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# Put some bad JSON into the turbo.json in this app + $ echo '{"pipeline": {"trailing-comma": {},}}' > "$TARGET_DIR/apps/bad-json/turbo.json" +# The test is greping from a logfile because the list of errors can appear in any order + +# Errors are shown if we run across a malformed turbo.json + $ ${TURBO} run trailing-comma --filter=bad-json > tmp.log 2>&1 + [1] + $ cat tmp.log + ERROR run failed: error preparing engine: turbo.json: invalid character '}' looking for beginning of object key string + Turbo error: error preparing engine: turbo.json: invalid character '}' looking for beginning of object key string diff --git a/cli/integration_tests/composable_config/composing-cache.t b/cli/integration_tests/composable_config/composing-cache.t new file mode 100644 index 0000000000000..b098200530e9c --- /dev/null +++ b/cli/integration_tests/composable_config/composing-cache.t @@ -0,0 +1,102 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +This test covers: +# - `cache:false` in root, override `cache:true` in workspace +# - `cache:true` in root, override to `cache:false` in workspace +# - No `cache` config in root, override `cache:false` in workspace +# - `cache:false` in root still works if workspace has no turbo.json + +# cache:false in root, override to cache:true in workspace + $ ${TURBO} run cached-task-1 --filter=cached > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: cached (esc) + \xe2\x80\xa2 Running cached-task-1 in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + cached:cached-task-1: cache miss, executing e74036fd7badaaf6 + cached:cached-task-1: + cached:cached-task-1: > cached-task-1 + cached:cached-task-1: > echo 'cached-task-1' > out/foo.min.txt + cached:cached-task-1: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s+[.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "cached:cached-task-1.* executing .*" | awk '{print $5}') + $ echo $HASH + [a-z0-9]{16} (re) + $ tar -tf $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + apps/cached/.turbo/turbo-cached-task-1.log + apps/cached/out/ + apps/cached/out/.keep + apps/cached/out/foo.min.txt + +# cache:true in root, override to cache:false in workspace + $ ${TURBO} run cached-task-2 --filter=cached > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: cached (esc) + \xe2\x80\xa2 Running cached-task-2 in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + cached:cached-task-2: cache bypass, force executing a98a2c287f1d2763 + cached:cached-task-2: + cached:cached-task-2: > cached-task-2 + cached:cached-task-2: > echo 'cached-task-2' > out/foo.min.txt + cached:cached-task-2: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s+[.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "cached:cached-task-2.* executing .*" | awk '{print $6}') + $ echo $HASH + [a-z0-9]{16} (re) + $ test -f $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + [1] + +no `cache` config in root, cache:false in workspace + $ ${TURBO} run cached-task-3 --filter=cached > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: cached (esc) + \xe2\x80\xa2 Running cached-task-3 in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + cached:cached-task-3: cache bypass, force executing 8a426151da6db286 + cached:cached-task-3: + cached:cached-task-3: > cached-task-3 + cached:cached-task-3: > echo 'cached-task-3' > out/foo.min.txt + cached:cached-task-3: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s+[.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "cached:cached-task-3.* executing .*" | awk '{print $6}') + $ echo $HASH + [a-z0-9]{16} (re) + $ test -f $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + [1] + +cache:false in root, no turbo.json in workspace. +Note that this is run against another workspace than the other tests, because +we already have a workspace that doesn't have a config + $ ${TURBO} run cached-task-4 --filter=missing-workspace-config > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running cached-task-4 in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + missing-workspace-config:cached-task-4: cache bypass, force executing ecbdd1ee8d9f34a5 + missing-workspace-config:cached-task-4: + missing-workspace-config:cached-task-4: > cached-task-4 + missing-workspace-config:cached-task-4: > echo 'cached-task-4' > out/foo.min.txt + missing-workspace-config:cached-task-4: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s+[.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "missing-workspace-config:cached-task-4.* executing .*" | awk '{print $6}') + $ echo $HASH + [a-z0-9]{16} (re) + $ test -f $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + [1] diff --git a/cli/integration_tests/composable_config/composing-config-change.t b/cli/integration_tests/composable_config/composing-config-change.t new file mode 100644 index 0000000000000..b6e9462a6fb10 --- /dev/null +++ b/cli/integration_tests/composable_config/composing-config-change.t @@ -0,0 +1,16 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# 1. First run, check the hash + $ ${TURBO} run config-change-task --filter=config-change --dry=json | jq .tasks[0].hash + "b17ced7629048d97" + +2. Run again and assert task hash stays the same + $ ${TURBO} run config-change-task --filter=config-change --dry=json | jq .tasks[0].hash + "b17ced7629048d97" + +3. Change turbo.json and assert that hash changes + $ cp $TARGET_DIR/apps/config-change/turbo-changed.json $TARGET_DIR/apps/config-change/turbo.json + $ ${TURBO} run config-change-task --filter=config-change --dry=json | jq .tasks[0].hash + "6c56b35e06abb856" diff --git a/cli/integration_tests/composable_config/composing-invalid-config.t b/cli/integration_tests/composable_config/composing-invalid-config.t new file mode 100644 index 0000000000000..061ac108837cd --- /dev/null +++ b/cli/integration_tests/composable_config/composing-invalid-config.t @@ -0,0 +1,37 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The test is greping from a logfile because the list of errors can appear in any order + +Errors are shown if we run a task that is misconfigured (invalid-config#build) + $ ${TURBO} run build --filter=invalid-config > tmp.log 2>&1 + [1] + $ cat tmp.log | grep "Invalid turbo.json" + ERROR run failed: error preparing engine: Invalid turbo.json + Turbo error: error preparing engine: Invalid turbo.json + $ cat tmp.log | grep "invalid-config#build" + - "invalid-config#build". Use "build" instead + - "invalid-config#build". Use "build" instead + $ cat tmp.log | grep "//#some-root-task" + - "//#some-root-task". Use "some-root-task" instead + - "//#some-root-task". Use "some-root-task" instead + $ cat tmp.log | grep "extends" + - No "extends" key found + - No "extends" key found + +Same error even if you're running a valid task in the package. + $ ${TURBO} run valid-task --filter=invalid-config > tmp.log 2>&1 + [1] + $ cat tmp.log | grep "Invalid turbo.json" + ERROR run failed: error preparing engine: Invalid turbo.json + Turbo error: error preparing engine: Invalid turbo.json + $ cat tmp.log | grep "invalid-config#build" + - "invalid-config#build". Use "build" instead + - "invalid-config#build". Use "build" instead + $ cat tmp.log | grep "//#some-root-task" + - "//#some-root-task". Use "some-root-task" instead + - "//#some-root-task". Use "some-root-task" instead + $ cat tmp.log | grep "extends" + - No "extends" key found + - No "extends" key found diff --git a/cli/integration_tests/composable_config/composing-missing-workspace-config-deps.t b/cli/integration_tests/composable_config/composing-missing-workspace-config-deps.t new file mode 100644 index 0000000000000..89a9738bb5e79 --- /dev/null +++ b/cli/integration_tests/composable_config/composing-missing-workspace-config-deps.t @@ -0,0 +1,44 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The missing-workspace-config-task-with-deps configures dependsOn in the root turbo.json. +# The workspace does not have a turbo.json config. This test checks that both regular dependencies +# and Topological dependencies are retained from the root config. + +# 1. First run, assert that dependet tasks run `dependsOn` + $ ${TURBO} run missing-workspace-config-task-with-deps --filter=missing-workspace-config > tmp.log +# Validate in pieces. `omit-key` task has two dependsOn values, and those tasks +# can run in non-deterministic order. So we need to validate the logs in the pieces. + $ cat tmp.log | grep "in scope" -A 2 + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running missing-workspace-config-task-with-deps in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + + $ cat tmp.log | grep "missing-workspace-config:missing-workspace-config-task-with-deps" + missing-workspace-config:missing-workspace-config-task-with-deps: cache miss, executing 68582686ba468bdb + missing-workspace-config:missing-workspace-config-task-with-deps: + missing-workspace-config:missing-workspace-config-task-with-deps: > missing-workspace-config-task-with-deps + missing-workspace-config:missing-workspace-config-task-with-deps: > echo "running missing-workspace-config-task-with-deps" > out/foo.min.txt + missing-workspace-config:missing-workspace-config-task-with-deps: + + $ cat tmp.log | grep "missing-workspace-config:missing-workspace-config-underlying-task" + missing-workspace-config:missing-workspace-config-underlying-task: cache miss, executing 73dd0ecdcdc4a3f4 + missing-workspace-config:missing-workspace-config-underlying-task: + missing-workspace-config:missing-workspace-config-underlying-task: > missing-workspace-config-underlying-task + missing-workspace-config:missing-workspace-config-underlying-task: > echo "running missing-workspace-config-underlying-task" + missing-workspace-config:missing-workspace-config-underlying-task: + missing-workspace-config:missing-workspace-config-underlying-task: running missing-workspace-config-underlying-task + + $ cat tmp.log | grep "blank-pkg:missing-workspace-config-underlying-topo-task" + blank-pkg:missing-workspace-config-underlying-topo-task: cache miss, executing 7f25337c32f440a0 + blank-pkg:missing-workspace-config-underlying-topo-task: + blank-pkg:missing-workspace-config-underlying-topo-task: > missing-workspace-config-underlying-topo-task + blank-pkg:missing-workspace-config-underlying-topo-task: > echo "missing-workspace-config-underlying-topo-task from blank-pkg" + blank-pkg:missing-workspace-config-underlying-topo-task: + blank-pkg:missing-workspace-config-underlying-topo-task: missing-workspace-config-underlying-topo-task from blank-pkg + + $ cat tmp.log | grep "Tasks:" -A 2 + Tasks: 3 successful, 3 total + Cached: 0 cached, 3 total + Time:\s*[\.0-9]+m?s (re) diff --git a/cli/integration_tests/composable_config/composing-missing-workspace-config.t b/cli/integration_tests/composable_config/composing-missing-workspace-config.t new file mode 100644 index 0000000000000..3441ab96b6984 --- /dev/null +++ b/cli/integration_tests/composable_config/composing-missing-workspace-config.t @@ -0,0 +1,107 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The missing-workspace-config-task task in the root turbo.json has config. The workspace config +# does not have a turbo.json. The tests below use `missing-workspace-config-task` to assert that: +# - `outputs`, `inputs`, `env` are retained from the root. + +# 1. First run, assert for `outputs` + $ ${TURBO} run missing-workspace-config-task --filter=missing-workspace-config > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running missing-workspace-config-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + missing-workspace-config:missing-workspace-config-task: cache miss, executing b4851e92e758d2a8 + missing-workspace-config:missing-workspace-config-task: + missing-workspace-config:missing-workspace-config-task: > missing-workspace-config-task + missing-workspace-config:missing-workspace-config-task: > echo "running missing-workspace-config-task" > out/foo.min.txt + missing-workspace-config:missing-workspace-config-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "missing-workspace-config:missing-workspace-config-task.* executing .*" | awk '{print $5}') + $ tar -tf $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + apps/missing-workspace-config/.turbo/turbo-missing-workspace-config-task.log + apps/missing-workspace-config/out/ + apps/missing-workspace-config/out/.keep + apps/missing-workspace-config/out/foo.min.txt + +2. Run again and assert cache hit, and that output is suppressed + $ ${TURBO} run missing-workspace-config-task --filter=missing-workspace-config + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running missing-workspace-config-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + missing-workspace-config:missing-workspace-config-task: cache hit, suppressing output b4851e92e758d2a8 + + Tasks: 1 successful, 1 total + Cached: 1 cached, 1 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + +3. Change input file and assert cache miss, and not FULL TURBO + $ echo "more text" >> $TARGET_DIR/apps/missing-workspace-config/src/foo.txt + $ ${TURBO} run missing-workspace-config-task --filter=missing-workspace-config + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running missing-workspace-config-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + missing-workspace-config:missing-workspace-config-task: cache miss, executing 1ca45c09eccb3931 + missing-workspace-config:missing-workspace-config-task: + missing-workspace-config:missing-workspace-config-task: > missing-workspace-config-task + missing-workspace-config:missing-workspace-config-task: > echo "running missing-workspace-config-task" > out/foo.min.txt + missing-workspace-config:missing-workspace-config-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + +3a. Changing a different file (that is not in `inputs` config) gets cache hit and FULL TURBO + $ echo "more text" >> $TARGET_DIR/apps/missing-workspace-config/src/bar.txt + $ ${TURBO} run missing-workspace-config-task --filter=missing-workspace-config + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running missing-workspace-config-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + missing-workspace-config:missing-workspace-config-task: cache hit, suppressing output 1ca45c09eccb3931 + + Tasks: 1 successful, 1 total + Cached: 1 cached, 1 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + +4. Set env var and assert cache miss, and that hash is different from above + $ SOME_VAR=somevalue ${TURBO} run missing-workspace-config-task --filter=missing-workspace-config + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running missing-workspace-config-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + missing-workspace-config:missing-workspace-config-task: cache miss, executing 06fd150c6e5e8a1b + missing-workspace-config:missing-workspace-config-task: + missing-workspace-config:missing-workspace-config-task: > missing-workspace-config-task + missing-workspace-config:missing-workspace-config-task: > echo "running missing-workspace-config-task" > out/foo.min.txt + missing-workspace-config:missing-workspace-config-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + +5. Assert that task with cache:false doesn't get cached + $ ${TURBO} run cached-task-4 --filter=missing-workspace-config > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: missing-workspace-config (esc) + \xe2\x80\xa2 Running cached-task-4 in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + missing-workspace-config:cached-task-4: cache bypass, force executing aaa8d1d189163b4c + missing-workspace-config:cached-task-4: + missing-workspace-config:cached-task-4: > cached-task-4 + missing-workspace-config:cached-task-4: > echo 'cached-task-4' > out/foo.min.txt + missing-workspace-config:cached-task-4: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "missing-workspace-config:cached-task-4.* executing .*" | awk '{print $6}') + $ echo $HASH + [a-z0-9]{16} (re) + $ test -f $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + [1] diff --git a/cli/integration_tests/composable_config/composing-omit-keys-deps.t b/cli/integration_tests/composable_config/composing-omit-keys-deps.t new file mode 100644 index 0000000000000..ade83979a5c4c --- /dev/null +++ b/cli/integration_tests/composable_config/composing-omit-keys-deps.t @@ -0,0 +1,51 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The omit-keys-task-with-deps configures dependsOn. The workspace config +# defines the task, but does not override anything. This test checks +# that both regular dependencies and Topological dependencies are retained +# from the root config. + +# 1. First run, assert for `dependsOn` and `outputs` keys + $ ${TURBO} run omit-keys-task-with-deps --filter=omit-keys > tmp.log +# Validate in pieces. `omit-key` task has two dependsOn values, and those tasks +# can run in non-deterministic order. So we need to validatte the logs in pieces. + $ cat tmp.log | grep "in scope" -A 1 + \xe2\x80\xa2 Packages in scope: omit-keys (esc) + \xe2\x80\xa2 Running omit-keys-task-with-deps in 1 packages (esc) + + $ cat tmp.log | grep "omit-keys:omit-keys-task-with-deps" + omit-keys:omit-keys-task-with-deps: cache miss, executing c12b0d1d341419fd + omit-keys:omit-keys-task-with-deps: + omit-keys:omit-keys-task-with-deps: > omit-keys-task-with-deps + omit-keys:omit-keys-task-with-deps: > echo "running omit-keys-task-with-deps" > out/foo.min.txt + omit-keys:omit-keys-task-with-deps: + + $ cat tmp.log | grep "omit-keys:omit-keys-underlying-task" + omit-keys:omit-keys-underlying-task: cache miss, executing a16948b5c74ccef9 + omit-keys:omit-keys-underlying-task: + omit-keys:omit-keys-underlying-task: > omit-keys-underlying-task + omit-keys:omit-keys-underlying-task: > echo "running omit-keys-underlying-task" + omit-keys:omit-keys-underlying-task: + omit-keys:omit-keys-underlying-task: running omit-keys-underlying-task + + $ cat tmp.log | grep "blank-pkg:omit-keys-underlying-topo-task" + blank-pkg:omit-keys-underlying-topo-task: cache miss, executing 5b3c524f8ead8679 + blank-pkg:omit-keys-underlying-topo-task: + blank-pkg:omit-keys-underlying-topo-task: > omit-keys-underlying-topo-task + blank-pkg:omit-keys-underlying-topo-task: > echo "omit-keys-underlying-topo-task from blank-pkg" + blank-pkg:omit-keys-underlying-topo-task: + blank-pkg:omit-keys-underlying-topo-task: omit-keys-underlying-topo-task from blank-pkg + + $ cat tmp.log | grep "Tasks:" -A 2 + Tasks: 3 successful, 3 total + Cached: 0 cached, 3 total + Time:\s*[\.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "omit-keys:omit-keys-task-with-deps.* executing .*" | awk '{print $5}') + $ tar -tf $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + apps/omit-keys/.turbo/turbo-omit-keys-task-with-deps.log + apps/omit-keys/out/ + apps/omit-keys/out/.keep + apps/omit-keys/out/foo.min.txt diff --git a/cli/integration_tests/composable_config/composing-omit-keys.t b/cli/integration_tests/composable_config/composing-omit-keys.t new file mode 100644 index 0000000000000..a4399471399f0 --- /dev/null +++ b/cli/integration_tests/composable_config/composing-omit-keys.t @@ -0,0 +1,89 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The omit-keys-task task in the root turbo.json has ALL the config. The workspace config +# defines the task, but does not override any of the keys. The tests below use `omit-keys-task` +# to assert that `outputs`, `inputs`, `env` are retained from the root. +# These tests use a different task from the composing-omit-keys-deps.t, because +# tasks with dependencies have side effects and can have cache +# misses because of those dependencies. These tests attempt to isolate for configs other than dependsOn. + +# 1. First run, assert for `outputs` + $ ${TURBO} run omit-keys-task --filter=omit-keys > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: omit-keys (esc) + \xe2\x80\xa2 Running omit-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + omit-keys:omit-keys-task: cache miss, executing a2c5f2a3a6b20d6e + omit-keys:omit-keys-task: + omit-keys:omit-keys-task: > omit-keys-task + omit-keys:omit-keys-task: > echo "running omit-keys-task" > out/foo.min.txt + omit-keys:omit-keys-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "omit-keys:omit-keys-task.* executing .*" | awk '{print $5}') + $ tar -tf $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + apps/omit-keys/.turbo/turbo-omit-keys-task.log + apps/omit-keys/out/ + apps/omit-keys/out/.keep + apps/omit-keys/out/foo.min.txt + +2. Run again and assert cache hit, and that output is suppressed + $ ${TURBO} run omit-keys-task --filter=omit-keys + \xe2\x80\xa2 Packages in scope: omit-keys (esc) + \xe2\x80\xa2 Running omit-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + omit-keys:omit-keys-task: cache hit, suppressing output a2c5f2a3a6b20d6e + + Tasks: 1 successful, 1 total + Cached: 1 cached, 1 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + +3. Change input file and assert cache miss, and not FULL TURBO + $ echo "more text" >> $TARGET_DIR/apps/omit-keys/src/foo.txt + $ ${TURBO} run omit-keys-task --filter=omit-keys + \xe2\x80\xa2 Packages in scope: omit-keys (esc) + \xe2\x80\xa2 Running omit-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + omit-keys:omit-keys-task: cache miss, executing b8b6909ecb130e0f + omit-keys:omit-keys-task: + omit-keys:omit-keys-task: > omit-keys-task + omit-keys:omit-keys-task: > echo "running omit-keys-task" > out/foo.min.txt + omit-keys:omit-keys-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + +3a. Changing a different file (that is not in `inputs` config) gets cache hit and FULL TURBO + $ echo "more text" >> $TARGET_DIR/apps/omit-keys/src/bar.txt + $ ${TURBO} run omit-keys-task --filter=omit-keys + \xe2\x80\xa2 Packages in scope: omit-keys (esc) + \xe2\x80\xa2 Running omit-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + omit-keys:omit-keys-task: cache hit, suppressing output b8b6909ecb130e0f + + Tasks: 1 successful, 1 total + Cached: 1 cached, 1 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + +4. Set env var and assert cache miss, and that hash is different from above + $ SOME_VAR=somevalue ${TURBO} run omit-keys-task --filter=omit-keys + \xe2\x80\xa2 Packages in scope: omit-keys (esc) + \xe2\x80\xa2 Running omit-keys-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + omit-keys:omit-keys-task: cache miss, executing bb73a08ebe0a4ed6 + omit-keys:omit-keys-task: + omit-keys:omit-keys-task: > omit-keys-task + omit-keys:omit-keys-task: > echo "running omit-keys-task" > out/foo.min.txt + omit-keys:omit-keys-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + diff --git a/cli/integration_tests/composable_config/composing-override-values-deps.t b/cli/integration_tests/composable_config/composing-override-values-deps.t new file mode 100644 index 0000000000000..a68b3a10a639c --- /dev/null +++ b/cli/integration_tests/composable_config/composing-override-values-deps.t @@ -0,0 +1,44 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The override-values-task-with-deps configures dependsOn in the root turbo.json. +# The workspace does not have a turbo.json config. This test checks that both regular dependencies +# and Topological dependencies are retained from the root config. + +# 1. First run, assert that dependet tasks run `dependsOn` + $ ${TURBO} run override-values-task-with-deps --filter=override-values > tmp.log +# Validate in pieces. `omit-key` task has two dependsOn values, and those tasks +# can run in non-deterministic order. So we need to validate the logs in the pieces. + $ cat tmp.log | grep "in scope" -A 2 + \xe2\x80\xa2 Packages in scope: override-values (esc) + \xe2\x80\xa2 Running override-values-task-with-deps in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + + $ cat tmp.log | grep "override-values:override-values-task-with-deps" + override-values:override-values-task-with-deps: cache miss, executing cf35abb7b46ffad7 + override-values:override-values-task-with-deps: + override-values:override-values-task-with-deps: > override-values-task-with-deps + override-values:override-values-task-with-deps: > echo "running override-values-task-with-deps" > out/foo.min.txt + override-values:override-values-task-with-deps: + + $ cat tmp.log | grep "override-values:override-values-underlying-task" + override-values:override-values-underlying-task: cache miss, executing 783a94e433071496 + override-values:override-values-underlying-task: + override-values:override-values-underlying-task: > override-values-underlying-task + override-values:override-values-underlying-task: > echo "running override-values-underlying-task" + override-values:override-values-underlying-task: + override-values:override-values-underlying-task: running override-values-underlying-task + + $ cat tmp.log | grep "blank-pkg:override-values-underlying-topo-task" + blank-pkg:override-values-underlying-topo-task: cache miss, executing 0e2630802fda80c3 + blank-pkg:override-values-underlying-topo-task: + blank-pkg:override-values-underlying-topo-task: > override-values-underlying-topo-task + blank-pkg:override-values-underlying-topo-task: > echo "override-values-underlying-topo-task from blank-pkg" + blank-pkg:override-values-underlying-topo-task: + blank-pkg:override-values-underlying-topo-task: override-values-underlying-topo-task from blank-pkg + + $ cat tmp.log | grep "Tasks:" -A 2 + Tasks: 3 successful, 3 total + Cached: 0 cached, 3 total + Time:\s*[\.0-9]+m?s (re) diff --git a/cli/integration_tests/composable_config/composing-override-values.t b/cli/integration_tests/composable_config/composing-override-values.t new file mode 100644 index 0000000000000..9297afdcd8e73 --- /dev/null +++ b/cli/integration_tests/composable_config/composing-override-values.t @@ -0,0 +1,108 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +# The override-values-task task in the root turbo.json has ALL the config. The workspace config +# defines the task and overrides all the keys. The tests below use `override-values-task` to assert that: +# - `outputs`, `inputs`, `env`, and `outputMode` are overriden from the root config. + +# 1. First run, assert that the right `outputs` are cached. + $ ${TURBO} run override-values-task --filter=override-values > tmp.log + $ cat tmp.log + \xe2\x80\xa2 Packages in scope: override-values (esc) + \xe2\x80\xa2 Running override-values-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + override-values:override-values-task: cache miss, executing 51d1d668d9adffe5 + override-values:override-values-task: + override-values:override-values-task: > override-values-task + override-values:override-values-task: > echo "running override-values-task" > lib/bar.min.txt + override-values:override-values-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + $ HASH=$(cat tmp.log | grep -E "override-values:override-values-task.* executing .*" | awk '{print $5}') + $ tar -tf $TARGET_DIR/node_modules/.cache/turbo/$HASH.tar.zst; + apps/override-values/.turbo/turbo-override-values-task.log + apps/override-values/lib/ + apps/override-values/lib/.keep + apps/override-values/lib/bar.min.txt + +2. Run again and assert cache hit, and that full output is displayed + $ ${TURBO} run override-values-task --filter=override-values + \xe2\x80\xa2 Packages in scope: override-values (esc) + \xe2\x80\xa2 Running override-values-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + override-values:override-values-task: cache hit, replaying output 51d1d668d9adffe5 + override-values:override-values-task: + override-values:override-values-task: > override-values-task + override-values:override-values-task: > echo "running override-values-task" > lib/bar.min.txt + override-values:override-values-task: + + Tasks: 1 successful, 1 total + Cached: 1 cached, 1 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + +3. Change input file and assert cache miss + $ echo "more text" >> $TARGET_DIR/apps/override-values/src/bar.txt + $ ${TURBO} run override-values-task --filter=override-values + \xe2\x80\xa2 Packages in scope: override-values (esc) + \xe2\x80\xa2 Running override-values-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + override-values:override-values-task: cache miss, executing 8f07b7ef52189a94 + override-values:override-values-task: + override-values:override-values-task: > override-values-task + override-values:override-values-task: > echo "running override-values-task" > lib/bar.min.txt + override-values:override-values-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + +3a. Change a file that is declared as input in root config, and assert cache hit and FULL TURBO + $ echo "more text" >> $TARGET_DIR/apps/override-values/src/foo.txt + $ ${TURBO} run override-values-task --filter=override-values + \xe2\x80\xa2 Packages in scope: override-values (esc) + \xe2\x80\xa2 Running override-values-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + override-values:override-values-task: cache hit, replaying output 8f07b7ef52189a94 + override-values:override-values-task: + override-values:override-values-task: > override-values-task + override-values:override-values-task: > echo "running override-values-task" > lib/bar.min.txt + override-values:override-values-task: + + Tasks: 1 successful, 1 total + Cached: 1 cached, 1 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + +4. Set env var and assert cache miss, and that hash is different from above + $ OTHER_VAR=somevalue ${TURBO} run override-values-task --filter=override-values + \xe2\x80\xa2 Packages in scope: override-values (esc) + \xe2\x80\xa2 Running override-values-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + override-values:override-values-task: cache miss, executing 7106c9435e784aaf + override-values:override-values-task: + override-values:override-values-task: > override-values-task + override-values:override-values-task: > echo "running override-values-task" > lib/bar.min.txt + override-values:override-values-task: + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + +4a. Set env var that is declared in root config, and assert cache hit and FULL TURBO + $ OTHER_VAR=somevalue ${TURBO} run override-values-task --filter=override-values + \xe2\x80\xa2 Packages in scope: override-values (esc) + \xe2\x80\xa2 Running override-values-task in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + override-values:override-values-task: cache hit, replaying output 7106c9435e784aaf + override-values:override-values-task: + override-values:override-values-task: > override-values-task + override-values:override-values-task: > echo "running override-values-task" > lib/bar.min.txt + override-values:override-values-task: + + Tasks: 1 successful, 1 total + Cached: 1 cached, 1 total + Time:\s*[\.0-9]+m?s >>> FULL TURBO (re) + diff --git a/cli/integration_tests/composable_config/composing-persistent.t b/cli/integration_tests/composable_config/composing-persistent.t new file mode 100644 index 0000000000000..54117d17ec83e --- /dev/null +++ b/cli/integration_tests/composable_config/composing-persistent.t @@ -0,0 +1,60 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) ./monorepo + +This test covers: +- [x] `persistent:true` in root, omit in workspace with turbo.json +- [x] `persistent:true` in root, override to `false` in workspace +- [x] `persistent:true` in root, task exists in workspace, but doesn't touch persistent +- [x] No `persistent` flag in workspace, add `true` in workspace + +# persistent-task-1-parent dependsOn persistent-task-1 +# persistent-task-1 is persistent:true in the root workspace, and does NOT get overriden in the workspace + $ ${TURBO} run persistent-task-1-parent --filter=persistent + ERROR run failed: error preparing engine: Invalid persistent task dependency: + "persistent#persistent-task-1" is a persistent task, "persistent#persistent-task-1-parent" cannot depend on it + Turbo error: error preparing engine: Invalid persistent task dependency: + "persistent#persistent-task-1" is a persistent task, "persistent#persistent-task-1-parent" cannot depend on it + [1] + +# persistent-task-2-parent dependsOn persistent-task-2 +# persistent-task-2 is persistent:true in the root workspace, and IS overriden to false in the workspace + $ ${TURBO} run persistent-task-2-parent --filter=persistent + \xe2\x80\xa2 Packages in scope: persistent (esc) + \xe2\x80\xa2 Running persistent-task-2-parent in 1 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + persistent:persistent-task-2: cache miss, executing 5b8a1c3719e1add7 + persistent:persistent-task-2: + persistent:persistent-task-2: > persistent-task-2 + persistent:persistent-task-2: > echo 'persistent-task-2' + persistent:persistent-task-2: + persistent:persistent-task-2: persistent-task-2 + persistent:persistent-task-2-parent: cache miss, executing e352678c40ca2536 + persistent:persistent-task-2-parent: + persistent:persistent-task-2-parent: > persistent-task-2-parent + persistent:persistent-task-2-parent: > echo 'persistent-task-2-parent' + persistent:persistent-task-2-parent: + persistent:persistent-task-2-parent: persistent-task-2-parent + + Tasks: 2 successful, 2 total + Cached: 0 cached, 2 total + Time:\s*[\.0-9]+m?s (re) + +# persistent-task-3-parent dependsOn persistent-task-3 +# persistent-task-3 is persistent:true in the root workspace +# persistent-task-3 is defined in workspace, but does NOT have the persistent flag + $ ${TURBO} run persistent-task-3-parent --filter=persistent + ERROR run failed: error preparing engine: Invalid persistent task dependency: + "persistent#persistent-task-3" is a persistent task, "persistent#persistent-task-3-parent" cannot depend on it + Turbo error: error preparing engine: Invalid persistent task dependency: + "persistent#persistent-task-3" is a persistent task, "persistent#persistent-task-3-parent" cannot depend on it + [1] + +# persistent-task-4-parent dependsOn persistent-task-4 +# persistent-task-4 has no config in the root workspace, and is set to true in the workspace + $ ${TURBO} run persistent-task-4-parent --filter=persistent + ERROR run failed: error preparing engine: Invalid persistent task dependency: + "persistent#persistent-task-4" is a persistent task, "persistent#persistent-task-4-parent" cannot depend on it + Turbo error: error preparing engine: Invalid persistent task dependency: + "persistent#persistent-task-4" is a persistent task, "persistent#persistent-task-4-parent" cannot depend on it + [1] diff --git a/cli/integration_tests/composable_config/monorepo/.gitignore b/cli/integration_tests/composable_config/monorepo/.gitignore new file mode 100644 index 0000000000000..8671e9c7a8ab4 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/.gitignore @@ -0,0 +1,7 @@ +node_modules/ +.turbo +.npmrc +apps/**/lib/** +apps/**/out/** +!apps/**/lib/.keep +!apps/**/out/.keep diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-keys/out/.keep b/cli/integration_tests/composable_config/monorepo/apps/add-keys/out/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-keys/package.json b/cli/integration_tests/composable_config/monorepo/apps/add-keys/package.json new file mode 100644 index 0000000000000..2e576b50e43f7 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/add-keys/package.json @@ -0,0 +1,10 @@ +{ + "name": "add-keys", + "scripts": { + "add-keys-task": "echo \"running add-keys-task\" > out/foo.min.txt", + "add-keys-underlying-task": "echo \"running add-keys-underlying-task\"" + }, + "dependencies": { + "blank-pkg": "*" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-keys/src/foo.txt b/cli/integration_tests/composable_config/monorepo/apps/add-keys/src/foo.txt new file mode 100644 index 0000000000000..4491a1e461d4d --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/add-keys/src/foo.txt @@ -0,0 +1 @@ +example text diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-keys/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/add-keys/turbo.json new file mode 100644 index 0000000000000..91bbd1a6ebd4b --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/add-keys/turbo.json @@ -0,0 +1,14 @@ +{ + "extends": ["//"], + + "pipeline": { + "add-keys-task": { + "dependsOn": ["add-keys-underlying-task"], + "inputs": ["src/foo.txt"], + "outputs": ["out/**"], + "env": ["SOME_VAR"], + "outputMode": "new-only" + }, + "add-keys-underlying-task": {} + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-tasks/out/.keep b/cli/integration_tests/composable_config/monorepo/apps/add-tasks/out/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-tasks/package.json b/cli/integration_tests/composable_config/monorepo/apps/add-tasks/package.json new file mode 100644 index 0000000000000..e41cd6efca97a --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/add-tasks/package.json @@ -0,0 +1,9 @@ +{ + "name": "add-tasks", + "scripts": { + "added-task": "echo \"running added-task\" > out/foo.min.txt" + }, + "dependencies": { + "blank-pkg": "*" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-tasks/src/foo.txt b/cli/integration_tests/composable_config/monorepo/apps/add-tasks/src/foo.txt new file mode 100644 index 0000000000000..4491a1e461d4d --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/add-tasks/src/foo.txt @@ -0,0 +1 @@ +example text diff --git a/cli/integration_tests/composable_config/monorepo/apps/add-tasks/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/add-tasks/turbo.json new file mode 100644 index 0000000000000..6be0e0aadeac8 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/add-tasks/turbo.json @@ -0,0 +1,9 @@ +{ + "extends": ["//"], + + "pipeline": { + "added-task": { + "outputs": ["out/**"] + } + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/bad-json/package.json b/cli/integration_tests/composable_config/monorepo/apps/bad-json/package.json new file mode 100644 index 0000000000000..e118a37603edb --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/bad-json/package.json @@ -0,0 +1,6 @@ +{ + "name": "bad-json", + "scripts": { + "trailing-comma": "echo 'trailing-comma'" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/cached/out/.keep b/cli/integration_tests/composable_config/monorepo/apps/cached/out/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/cli/integration_tests/composable_config/monorepo/apps/cached/package.json b/cli/integration_tests/composable_config/monorepo/apps/cached/package.json new file mode 100644 index 0000000000000..1ac157237884e --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/cached/package.json @@ -0,0 +1,8 @@ +{ + "name": "cached", + "scripts": { + "cached-task-1": "echo 'cached-task-1' > out/foo.min.txt", + "cached-task-2": "echo 'cached-task-2' > out/foo.min.txt", + "cached-task-3": "echo 'cached-task-3' > out/foo.min.txt" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/cached/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/cached/turbo.json new file mode 100644 index 0000000000000..89633c6bb9bd6 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/cached/turbo.json @@ -0,0 +1,14 @@ +{ + "extends": ["//"], + "pipeline": { + "cached-task-1": { + "cache": true + }, + "cached-task-2": { + "cache": false + }, + "cached-task-3": { + "cache": false + } + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/config-change/package.json b/cli/integration_tests/composable_config/monorepo/apps/config-change/package.json new file mode 100644 index 0000000000000..ea37b9a90f2ee --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/config-change/package.json @@ -0,0 +1,6 @@ +{ + "name": "config-change", + "scripts": { + "config-change-task": "echo 'config-change-task'" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/config-change/src/foo.txt b/cli/integration_tests/composable_config/monorepo/apps/config-change/src/foo.txt new file mode 100644 index 0000000000000..12f00e90b6ef7 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/config-change/src/foo.txt @@ -0,0 +1 @@ +contents diff --git a/cli/integration_tests/composable_config/monorepo/apps/config-change/turbo-changed.json b/cli/integration_tests/composable_config/monorepo/apps/config-change/turbo-changed.json new file mode 100644 index 0000000000000..59794bd4698db --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/config-change/turbo-changed.json @@ -0,0 +1,9 @@ +{ + "extends": ["//"], + "pipeline": { + "config-change-task": {}, + "other-task": { + "env": ["ARBITRARY_CHANGE"] + } + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/config-change/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/config-change/turbo.json new file mode 100644 index 0000000000000..cedcef6a2d303 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/config-change/turbo.json @@ -0,0 +1,7 @@ +{ + "extends": ["//"], + "pipeline": { + "config-change-task": {}, + "other-task": {} + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/invalid-config/package.json b/cli/integration_tests/composable_config/monorepo/apps/invalid-config/package.json new file mode 100644 index 0000000000000..b033b4cece044 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/invalid-config/package.json @@ -0,0 +1,6 @@ +{ + "name": "invalid-config", + "scripts": { + "build": "echo 'build invalid-config' > out/foo.min.txt" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/invalid-config/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/invalid-config/turbo.json new file mode 100644 index 0000000000000..375a220129953 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/invalid-config/turbo.json @@ -0,0 +1,9 @@ +{ + "pipeline": { + "invalid-config#build": { + "outputs": ["out/**", "lib/**"] + }, + "//#some-root-task": {}, + "valid-task": {} + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/out/.keep b/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/out/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/package.json b/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/package.json new file mode 100644 index 0000000000000..1c4c93e495d16 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/package.json @@ -0,0 +1,12 @@ +{ + "name": "missing-workspace-config", + "scripts": { + "missing-workspace-config-task": "echo \"running missing-workspace-config-task\" > out/foo.min.txt", + "missing-workspace-config-task-with-deps": "echo \"running missing-workspace-config-task-with-deps\" > out/foo.min.txt", + "missing-workspace-config-underlying-task": "echo \"running missing-workspace-config-underlying-task\"", + "cached-task-4": "echo 'cached-task-4' > out/foo.min.txt" + }, + "dependencies": { + "blank-pkg": "*" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/src/foo.txt b/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/src/foo.txt new file mode 100644 index 0000000000000..4491a1e461d4d --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/missing-workspace-config/src/foo.txt @@ -0,0 +1 @@ +example text diff --git a/cli/integration_tests/composable_config/monorepo/apps/omit-keys/out/.keep b/cli/integration_tests/composable_config/monorepo/apps/omit-keys/out/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/cli/integration_tests/composable_config/monorepo/apps/omit-keys/package.json b/cli/integration_tests/composable_config/monorepo/apps/omit-keys/package.json new file mode 100644 index 0000000000000..c03cf3bb3aef6 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/omit-keys/package.json @@ -0,0 +1,11 @@ +{ + "name": "omit-keys", + "scripts": { + "omit-keys-task": "echo \"running omit-keys-task\" > out/foo.min.txt", + "omit-keys-task-with-deps": "echo \"running omit-keys-task-with-deps\" > out/foo.min.txt", + "omit-keys-underlying-task": "echo \"running omit-keys-underlying-task\"" + }, + "devDependencies": { + "blank-pkg": "*" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/omit-keys/src/foo.txt b/cli/integration_tests/composable_config/monorepo/apps/omit-keys/src/foo.txt new file mode 100644 index 0000000000000..4491a1e461d4d --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/omit-keys/src/foo.txt @@ -0,0 +1 @@ +example text diff --git a/cli/integration_tests/composable_config/monorepo/apps/omit-keys/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/omit-keys/turbo.json new file mode 100644 index 0000000000000..35ddebb5803f9 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/omit-keys/turbo.json @@ -0,0 +1,8 @@ +{ + "extends": ["//"], + + "pipeline": { + "omit-keys-task": {}, + "omit-keys-task-with-deps": {} + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/override-values/lib/.keep b/cli/integration_tests/composable_config/monorepo/apps/override-values/lib/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/cli/integration_tests/composable_config/monorepo/apps/override-values/out/.keep b/cli/integration_tests/composable_config/monorepo/apps/override-values/out/.keep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/cli/integration_tests/composable_config/monorepo/apps/override-values/package.json b/cli/integration_tests/composable_config/monorepo/apps/override-values/package.json new file mode 100644 index 0000000000000..3e6243e94b973 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/override-values/package.json @@ -0,0 +1,11 @@ +{ + "name": "override-values", + "scripts": { + "override-values-task": "echo \"running override-values-task\" > lib/bar.min.txt", + "override-values-task-with-deps": "echo \"running override-values-task-with-deps\" > out/foo.min.txt", + "override-values-underlying-task": "echo \"running override-values-underlying-task\"" + }, + "dependencies": { + "blank-pkg": "*" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/override-values/src/foo.txt b/cli/integration_tests/composable_config/monorepo/apps/override-values/src/foo.txt new file mode 100644 index 0000000000000..4491a1e461d4d --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/override-values/src/foo.txt @@ -0,0 +1 @@ +example text diff --git a/cli/integration_tests/composable_config/monorepo/apps/override-values/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/override-values/turbo.json new file mode 100644 index 0000000000000..605abda080544 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/override-values/turbo.json @@ -0,0 +1,15 @@ +{ + "extends": ["//"], + + "pipeline": { + "override-values-task": { + "inputs": ["src/bar.txt"], + "outputs": ["lib/**"], + "env": ["OTHER_VAR"], + "outputMode": "full" + }, + "override-values-task-with-deps": { + "dependsOn": [] + } + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/persistent/package.json b/cli/integration_tests/composable_config/monorepo/apps/persistent/package.json new file mode 100644 index 0000000000000..e88b70e1503ff --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/persistent/package.json @@ -0,0 +1,13 @@ +{ + "name": "persistent", + "scripts": { + "persistent-task-1": "echo 'persistent-task-1'", + "persistent-task-2": "echo 'persistent-task-2'", + "persistent-task-3": "echo 'persistent-task-3'", + "persistent-task-4": "echo 'persistent-task-4'", + "persistent-task-1-parent": "echo 'persistent-task-1-parent'", + "persistent-task-2-parent": "echo 'persistent-task-2-parent'", + "persistent-task-3-parent": "echo 'persistent-task-3-parent'", + "persistent-task-4-parent": "echo 'persistent-task-4-parent'" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/apps/persistent/turbo.json b/cli/integration_tests/composable_config/monorepo/apps/persistent/turbo.json new file mode 100644 index 0000000000000..e1f98a5b4f1af --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/apps/persistent/turbo.json @@ -0,0 +1,12 @@ +{ + "extends": ["//"], + "pipeline": { + "persistent-task-2": { + "persistent": false + }, + "persistent-task-3": {}, + "persistent-task-4": { + "persistent": true + } + } +} diff --git a/cli/integration_tests/composable_config/monorepo/package-lock.json b/cli/integration_tests/composable_config/monorepo/package-lock.json new file mode 100644 index 0000000000000..2d4cf0e67ddd4 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/package-lock.json @@ -0,0 +1,54 @@ +{ + "name": "monorepo", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "monorepo", + "workspaces": [ + "apps/**", + "packages/**" + ] + }, + "apps/add-keys": { + "dependencies": { + "blank-pkg": "*" + } + }, + "apps/omit-keys": { + "devDependencies": { + "blank-pkg": "*" + } + }, + "node_modules/add-keys": { + "resolved": "apps/add-keys", + "link": true + }, + "node_modules/blank-pkg": { + "resolved": "packages/blank-pkg", + "link": true + }, + "node_modules/omit-keys": { + "resolved": "apps/omit-keys", + "link": true + }, + "packages/blank-pkg": {} + }, + "dependencies": { + "add-keys": { + "version": "file:apps/add-keys", + "requires": { + "blank-pkg": "*" + } + }, + "blank-pkg": { + "version": "file:packages/blank-pkg" + }, + "omit-keys": { + "version": "file:apps/omit-keys", + "requires": { + "blank-pkg": "*" + } + } + } +} diff --git a/cli/integration_tests/composable_config/monorepo/package.json b/cli/integration_tests/composable_config/monorepo/package.json new file mode 100644 index 0000000000000..85175c18a49b0 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/package.json @@ -0,0 +1,7 @@ +{ + "name": "monorepo", + "workspaces": [ + "apps/**", + "packages/**" + ] +} diff --git a/cli/integration_tests/composable_config/monorepo/packages/blank-pkg/package.json b/cli/integration_tests/composable_config/monorepo/packages/blank-pkg/package.json new file mode 100644 index 0000000000000..90325af41d6d2 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/packages/blank-pkg/package.json @@ -0,0 +1,8 @@ +{ + "name": "blank-pkg", + "scripts": { + "omit-keys-underlying-topo-task": "echo \"omit-keys-underlying-topo-task from blank-pkg\"", + "missing-workspace-config-underlying-topo-task": "echo \"missing-workspace-config-underlying-topo-task from blank-pkg\"", + "override-values-underlying-topo-task": "echo \"override-values-underlying-topo-task from blank-pkg\"" + } +} diff --git a/cli/integration_tests/composable_config/monorepo/turbo.json b/cli/integration_tests/composable_config/monorepo/turbo.json new file mode 100644 index 0000000000000..75828ea4578f6 --- /dev/null +++ b/cli/integration_tests/composable_config/monorepo/turbo.json @@ -0,0 +1,99 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "add-keys-task": {}, + "add-keys-underlying-task": {}, + + "omit-keys-task-with-deps": { + "dependsOn": [ + "omit-keys-underlying-task", + "^omit-keys-underlying-topo-task" + ], + "outputs": ["out/**"] + }, + "omit-keys-underlying-task": {}, + "omit-keys-underlying-topo-task": {}, + + "omit-keys-task": { + "inputs": ["src/foo.txt"], + "outputs": ["out/**"], + "env": ["SOME_VAR"], + "outputMode": "new-only" + }, + + "missing-workspace-config-task-with-deps": { + "dependsOn": [ + "missing-workspace-config-underlying-task", + "^missing-workspace-config-underlying-topo-task" + ], + "outputs": ["out/**"] + }, + "missing-workspace-config-underlying-task": {}, + "missing-workspace-config-underlying-topo-task": {}, + + "missing-workspace-config-task": { + "inputs": ["src/foo.txt"], + "outputs": ["out/**"], + "env": ["SOME_VAR"], + "outputMode": "new-only" + }, + + "override-values-task": { + "inputs": ["src/foo.txt"], + "outputs": ["out/**"], + "env": ["SOME_VAR"], + "outputMode": "new-only" + }, + "override-values-task-with-deps": { + "dependsOn": [ + "override-values-underlying-task", + "^override-values-underlying-topo-task" + ] + }, + "override-values-underlying-task": {}, + "override-values-underlying-topo-task": {}, + + "persistent-task-1": { + "persistent": true + }, + "persistent-task-2": { + "persistent": true + }, + "persistent-task-3": { + "persistent": true + }, + "persistent-task-4": {}, + "persistent-task-1-parent": { + "dependsOn": ["persistent-task-1"] + }, + "persistent-task-2-parent": { + "dependsOn": ["persistent-task-2"] + }, + "persistent-task-3-parent": { + "dependsOn": ["persistent-task-3"] + }, + "persistent-task-4-parent": { + "dependsOn": ["persistent-task-4"] + }, + + "cached-task-1": { + "cache": false, + "outputs": ["out/**"] + }, + "cached-task-2": { + "cache": true, + "outputs": ["out/**"] + }, + "cached-task-3": { + "outputs": ["out/**"] + }, + "cached-task-4": { + "cache": false, + "outputs": ["out/**"] + }, + + "config-change-task": { + "inputs": ["src/foo.txt"] + } + } +} diff --git a/cli/integration_tests/composable_config/setup.sh b/cli/integration_tests/composable_config/setup.sh new file mode 100755 index 0000000000000..d771eae6cbad2 --- /dev/null +++ b/cli/integration_tests/composable_config/setup.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +SCRIPT_DIR=$(dirname ${BASH_SOURCE[0]}) +TARGET_DIR=$1 +TEST_DIR=$2 +cp -a ${SCRIPT_DIR}/$TEST_DIR/. ${TARGET_DIR}/ +${SCRIPT_DIR}/../setup_git.sh ${TARGET_DIR} diff --git a/cli/integration_tests/prune/composable-config.t b/cli/integration_tests/prune/composable-config.t new file mode 100644 index 0000000000000..cbea9b6ca7c4d --- /dev/null +++ b/cli/integration_tests/prune/composable-config.t @@ -0,0 +1,26 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) + +Make sure that the internal util package is part of the prune output + $ ${TURBO} prune --scope=docs + Generating pruned monorepo for docs in .*\/out (re) + - Added docs + - Added shared + - Added util + $ cd out && ${TURBO} run new-task + WARNING cannot find a .git folder. Falling back to manual file hashing (which may be slower). If you are running this build in a pruned directory, you can ignore this message. Otherwise, please initialize a git repository in the root of your monorepo + \xe2\x80\xa2 Packages in scope: docs, shared, util (esc) + \xe2\x80\xa2 Running new-task in 3 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + docs:new-task: cache miss, executing 89b0cf4ede0c4ae5 + docs:new-task: + docs:new-task: > docs@ new-task .*out/apps/docs (re) + docs:new-task: > echo 'running new task' + docs:new-task: + docs:new-task: running new task + + Tasks: 1 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + diff --git a/cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/package.json b/cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/package.json index 90b47b0bf108d..8a2038fcd1753 100644 --- a/cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/package.json +++ b/cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/package.json @@ -2,5 +2,8 @@ "name": "docs", "dependencies": { "shared": "workspace:*" + }, + "scripts": { + "new-task": "echo 'running new task'" } } diff --git a/cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/turbo.json b/cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/turbo.json new file mode 100644 index 0000000000000..93e9625690ab3 --- /dev/null +++ b/cli/integration_tests/prune/monorepo_with_root_dep/apps/docs/turbo.json @@ -0,0 +1,6 @@ +{ + "extends": ["//"], + "pipeline": { + "new-task": {} + } +} diff --git a/cli/integration_tests/task-dependencies/complex.t b/cli/integration_tests/task-dependencies/complex.t index 41f73c9bb487c..2c864df5bf53a 100644 --- a/cli/integration_tests/task-dependencies/complex.t +++ b/cli/integration_tests/task-dependencies/complex.t @@ -33,14 +33,14 @@ We can scope the run to specific packages Can't depend on unknown tasks $ ${TURBO} run build2 - ERROR run failed: error preparing engine: Could not find task "workspace-a#custom" in pipeline - Turbo error: error preparing engine: Could not find task "workspace-a#custom" in pipeline + ERROR run failed: error preparing engine: Could not find "app-a#custom" in root turbo.json or "app-a" workspace + Turbo error: error preparing engine: Could not find "app-a#custom" in root turbo.json or "app-a" workspace [1] Can't depend on tasks from unknown packages $ ${TURBO} run build3 - ERROR run failed: error preparing engine: Could not find task "unknown#custom" in pipeline - Turbo error: error preparing engine: Could not find task "unknown#custom" in pipeline + ERROR run failed: error preparing engine: Could not find workspace "unknown" from task "unknown#custom" in project + Turbo error: error preparing engine: Could not find workspace "unknown" from task "unknown#custom" in project [1] diff --git a/cli/integration_tests/task-dependencies/complex/turbo.json b/cli/integration_tests/task-dependencies/complex/turbo.json index 3c4147e684a0d..998a9d6c6fd46 100644 --- a/cli/integration_tests/task-dependencies/complex/turbo.json +++ b/cli/integration_tests/task-dependencies/complex/turbo.json @@ -16,7 +16,7 @@ }, "build2": { - "dependsOn": ["workspace-a#custom"] + "dependsOn": ["app-a#custom"] }, "build3": { diff --git a/cli/internal/context/context.go b/cli/internal/context/context.go index 52b87fc21fe27..08c5d8d75041f 100644 --- a/cli/internal/context/context.go +++ b/cli/internal/context/context.go @@ -144,7 +144,10 @@ func isWorkspaceReference(packageVersion string, dependencyVersion string, cwd s // SinglePackageGraph constructs a Context instance from a single package. func SinglePackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON) (*Context, error) { - workspaceInfos := map[string]*fs.PackageJSON{util.RootPkgName: rootPackageJSON} + workspaceInfos := graph.WorkspaceInfos{ + PackageJSONs: map[string]*fs.PackageJSON{util.RootPkgName: rootPackageJSON}, + TurboConfigs: map[string]*fs.TurboJSON{}, + } c := &Context{ WorkspaceInfos: workspaceInfos, RootNode: core.ROOT_NODE_NAME, @@ -162,7 +165,10 @@ func SinglePackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON * func BuildPackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON) (*Context, error) { c := &Context{} rootpath := repoRoot.ToStringDuringMigration() - c.WorkspaceInfos = make(graph.WorkspaceInfos) + c.WorkspaceInfos = graph.WorkspaceInfos{ + PackageJSONs: map[string]*fs.PackageJSON{}, + TurboConfigs: map[string]*fs.TurboJSON{}, + } c.RootNode = core.ROOT_NODE_NAME var warnings Warnings @@ -207,7 +213,7 @@ func BuildPackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *f return nil, err } populateGraphWaitGroup := &errgroup.Group{} - for _, pkg := range c.WorkspaceInfos { + for _, pkg := range c.WorkspaceInfos.PackageJSONs { pkg := pkg populateGraphWaitGroup.Go(func() error { return c.populateWorkspaceGraphForPackageJSON(pkg, rootpath, pkg.Name, &warnings) @@ -224,7 +230,7 @@ func BuildPackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *f if err != nil { return nil, fmt.Errorf("failed to resolve dependencies for root package: %v", err) } - c.WorkspaceInfos[util.RootPkgName] = rootPackageJSON + c.WorkspaceInfos.PackageJSONs[util.RootPkgName] = rootPackageJSON return c, warnings.errorOrNil() } @@ -293,7 +299,7 @@ func (c *Context) populateWorkspaceGraphForPackageJSON(pkg *fs.PackageJSON, root // split out internal vs. external deps for depName, depVersion := range depMap { - if item, ok := c.WorkspaceInfos[depName]; ok && isWorkspaceReference(item.Version, depVersion, pkg.Dir.ToStringDuringMigration(), rootpath) { + if item, ok := c.WorkspaceInfos.PackageJSONs[depName]; ok && isWorkspaceReference(item.Version, depVersion, pkg.Dir.ToStringDuringMigration(), rootpath) { internalDepsSet.Add(depName) c.WorkspaceGraph.Connect(dag.BasicEdge(vertexName, depName)) } else { @@ -363,11 +369,11 @@ func (c *Context) parsePackageJSON(repoRoot turbopath.AbsoluteSystemPath, pkgJSO c.WorkspaceGraph.Add(pkg.Name) pkg.PackageJSONPath = turbopath.AnchoredSystemPathFromUpstream(relativePkgJSONPath) pkg.Dir = turbopath.AnchoredSystemPathFromUpstream(filepath.Dir(relativePkgJSONPath)) - if c.WorkspaceInfos[pkg.Name] != nil { - existing := c.WorkspaceInfos[pkg.Name] + if c.WorkspaceInfos.PackageJSONs[pkg.Name] != nil { + existing := c.WorkspaceInfos.PackageJSONs[pkg.Name] return fmt.Errorf("Failed to add workspace \"%s\" from %s, it already exists at %s", pkg.Name, pkg.Dir, existing.Dir) } - c.WorkspaceInfos[pkg.Name] = pkg + c.WorkspaceInfos.PackageJSONs[pkg.Name] = pkg c.WorkspaceNames = append(c.WorkspaceNames, pkg.Name) } return nil @@ -481,12 +487,12 @@ func (c *Context) ChangedPackages(previousLockfile lockfile.Lockfile) ([]string, return false } - changedPkgs := make([]string, 0, len(c.WorkspaceInfos)) + changedPkgs := make([]string, 0, len(c.WorkspaceInfos.PackageJSONs)) // check if prev and current have "global" changes e.g. lockfile bump globalChange := c.Lockfile.GlobalChange(previousLockfile) - for pkgName, pkg := range c.WorkspaceInfos { + for pkgName, pkg := range c.WorkspaceInfos.PackageJSONs { if globalChange { break } @@ -500,8 +506,8 @@ func (c *Context) ChangedPackages(previousLockfile lockfile.Lockfile) ([]string, } if globalChange { - changedPkgs = make([]string, 0, len(c.WorkspaceInfos)) - for pkgName := range c.WorkspaceInfos { + changedPkgs = make([]string, 0, len(c.WorkspaceInfos.PackageJSONs)) + for pkgName := range c.WorkspaceInfos.PackageJSONs { changedPkgs = append(changedPkgs, pkgName) } sort.Strings(changedPkgs) diff --git a/cli/internal/core/engine.go b/cli/internal/core/engine.go index 07fadc22259fb..866f5dcbb9c0f 100644 --- a/cli/internal/core/engine.go +++ b/cli/internal/core/engine.go @@ -1,7 +1,9 @@ package core import ( + "errors" "fmt" + "os" "sort" "strings" @@ -33,11 +35,6 @@ type Engine struct { // completeGraph is the CompleteGraph. We need this to look up the Pipeline, etc. completeGraph *graph.CompleteGraph - - // Map of packageName to pipeline. We resolve task definitions from here - // but we don't want to read from the filesystem every time - pipelines map[string]fs.Pipeline - // isSinglePackage is used to load turbo.json correctly isSinglePackage bool } @@ -52,7 +49,6 @@ func NewEngine( TaskGraph: &dag.AcyclicGraph{}, PackageTaskDeps: map[string][]string{}, rootEnabledTasks: make(util.Set), - pipelines: map[string]fs.Pipeline{}, isSinglePackage: isSinglePackage, } } @@ -97,27 +93,61 @@ func (e *Engine) Execute(visitor Visitor, opts EngineExecutionOptions) []error { }) } -func (e *Engine) getTaskDefinition(taskName string, taskID string) (*Task, error) { - pipeline, err := e.getPipelineFromWorkspace(util.RootPkgName) +// MissingTaskError is a specialized Error thrown in the case that we can't find a task. +// We want to allow this error when getting task definitions, so we have to special case it. +type MissingTaskError struct { + workspaceName string + taskID string + taskName string +} + +func (m *MissingTaskError) Error() string { + return fmt.Sprintf("Could not find \"%s\" or \"%s\" in workspace \"%s\"", m.taskName, m.taskID, m.workspaceName) +} + +func (e *Engine) getTaskDefinition(pkg string, taskName string, taskID string) (*Task, error) { + pipeline, err := e.completeGraph.GetPipelineFromWorkspace(pkg, e.isSinglePackage) + if err != nil { + if pkg != util.RootPkgName { + // If there was no turbo.json in the workspace, fallback to the root turbo.json + if errors.Is(err, os.ErrNotExist) { + return e.getTaskDefinition(util.RootPkgName, taskName, taskID) + } + + // otherwise bubble it up + return nil, err + } + return nil, err } if task, ok := pipeline[taskID]; ok { return &Task{ Name: taskName, - TaskDefinition: task, + TaskDefinition: task.TaskDefinition, }, nil } if task, ok := pipeline[taskName]; ok { return &Task{ Name: taskName, - TaskDefinition: task, + TaskDefinition: task.TaskDefinition, }, nil } - return nil, fmt.Errorf("Missing task definition, configure \"%s\" or \"%s\" in turbo.json", taskName, taskID) + // An error here means turbo.json exists, but didn't define the task. + // Fallback to the root pipeline to find the task. + if pkg != util.RootPkgName { + return e.getTaskDefinition(util.RootPkgName, taskName, taskID) + } + + // Return this as a custom type so we can ignore it specifically + return nil, &MissingTaskError{ + taskName: taskName, + taskID: taskID, + workspaceName: pkg, + } } // Prepare constructs the Task Graph for a list of packages and tasks @@ -128,20 +158,27 @@ func (e *Engine) Prepare(options *EngineBuildingOptions) error { traversalQueue := []string{} + // Get a list of entry points into our TaskGraph. + // We do this by taking the input taskNames, and pkgs + // and creating a queue of taskIDs that we can traverse and gather dependencies from. for _, pkg := range pkgs { isRootPkg := pkg == util.RootPkgName - for _, taskName := range taskNames { // If it's not a task from the root workspace (i.e. tasks from every other workspace) // or if it's a task that we know is rootEnabled task, add it to the traversal queue. if !isRootPkg || e.rootEnabledTasks.Includes(taskName) { taskID := util.GetTaskId(pkg, taskName) // Skip tasks that don't have a definition - if _, err := e.getTaskDefinition(taskName, taskID); err != nil { - // Initially, non-package tasks are not required to exist, as long as some - // package in the list packages defines it as a package-task. Dependencies - // *are* required to have a definition. - continue + if _, err := e.getTaskDefinition(pkg, taskName, taskID); err != nil { + var e *MissingTaskError + if errors.As(err, &e) { + // Initially, non-package tasks are not required to exist, as long as some + // package in the list packages defines it as a package-task. Dependencies + // *are* required to have a definition. + continue + } + + return err } traversalQueue = append(traversalQueue, taskID) @@ -163,12 +200,21 @@ func (e *Engine) Prepare(options *EngineBuildingOptions) error { return fmt.Errorf("%v needs an entry in turbo.json before it can be depended on because it is a task run from the root package", taskID) } - taskDefinition, err := e.GetResolvedTaskDefinition( - &e.completeGraph.Pipeline, - taskName, - taskID, - ) + if pkg != ROOT_NODE_NAME { + if _, ok := e.completeGraph.WorkspaceInfos.PackageJSONs[pkg]; !ok { + // If we have a pkg it should be in WorkspaceInfos. + // If we're hitting this error something has gone wrong earlier when building WorkspaceInfos + // or the workspace really doesn't exist and turbo.json is misconfigured. + return fmt.Errorf("Could not find workspace \"%s\" from task \"%s\" in project", pkg, taskID) + } + } + + taskDefinitions, err := e.getTaskDefinitionChain(taskID, taskName) + if err != nil { + return err + } + taskDefinition, err := fs.MergeTaskDefinitions(taskDefinitions) if err != nil { return err } @@ -349,21 +395,21 @@ func (e *Engine) ValidatePersistentDependencies(graph *graph.CompleteGraph) erro packageName, taskName := util.GetPackageTaskFromId(depTaskID) // Get the Task Definition so we can check if it is Persistent - // TODO(mehulkar): Do we need to get a resolved taskDefinition here? - depTaskDefinition, taskExists := e.getTaskDefinition(taskName, depTaskID) - if taskExists != nil { + depTaskDefinition, taskExists := e.completeGraph.TaskDefinitions[depTaskID] + + if !taskExists { return fmt.Errorf("Cannot find task definition for %v in package %v", depTaskID, packageName) } // Get information about the package - pkg, pkgExists := graph.WorkspaceInfos[packageName] + pkg, pkgExists := graph.WorkspaceInfos.PackageJSONs[packageName] if !pkgExists { return fmt.Errorf("Cannot find package %v", packageName) } _, hasScript := pkg.Scripts[taskName] // If both conditions are true set a value and break out of checking the dependencies - if depTaskDefinition.TaskDefinition.Persistent && hasScript { + if depTaskDefinition.Persistent && hasScript { validationError = fmt.Errorf( "\"%s\" is a persistent task, \"%s\" cannot depend on it", util.GetTaskId(packageName, taskName), @@ -385,11 +431,106 @@ func (e *Engine) ValidatePersistentDependencies(graph *graph.CompleteGraph) erro return validationError } -// GetResolvedTaskDefinition returns a "resolved" TaskDefinition. -// Today, it just looks up the task from the root Pipeline, but in the future -// we will compose the TaskDefinition from workspaces using the `extends` key. -func (e *Engine) GetResolvedTaskDefinition(rootPipeline *fs.Pipeline, taskName string, taskID string) (*fs.TaskDefinition, error) { - return rootPipeline.GetTask(taskID, taskName) +// getTaskDefinitionChain gets a set of TaskDefinitions that apply to the taskID. +// These definitions should be merged by the consumer. +func (e *Engine) getTaskDefinitionChain(taskID string, taskName string) ([]fs.BookkeepingTaskDefinition, error) { + // Start a list of TaskDefinitions we've found for this TaskID + taskDefinitions := []fs.BookkeepingTaskDefinition{} + + rootPipeline, err := e.completeGraph.GetPipelineFromWorkspace(util.RootPkgName, e.isSinglePackage) + if err != nil { + // It should be very unlikely that we can't find a root pipeline. Even for single package repos + // the pipeline is synthesized from package.json, so there should be _something_ here. + return nil, err + } + + // Look for the taskDefinition in the root pipeline. + if rootTaskDefinition, err := rootPipeline.GetTask(taskID, taskName); err == nil { + taskDefinitions = append(taskDefinitions, *rootTaskDefinition) + } + + // If we're in a single package repo, we can just exit with the TaskDefinition in the root pipeline + // since there are no workspaces, and we don't need to follow any extends keys. + if e.isSinglePackage { + if len(taskDefinitions) == 0 { + return nil, fmt.Errorf("Could not find \"%s\" in root turbo.json", taskID) + } + return taskDefinitions, nil + } + + // If the taskID is a root task (e.g. //#build), we don't need to look + // for a workspace task, since these can only be defined in the root turbo.json. + taskIDPackage, _ := util.GetPackageTaskFromId(taskID) + if taskIDPackage != util.RootPkgName && taskIDPackage != ROOT_NODE_NAME { + // If there is an error, we can ignore it, since turbo.json config is not required in the workspace. + if workspaceTurboJSON, err := e.completeGraph.GetTurboConfigFromWorkspace(taskIDPackage, e.isSinglePackage); err != nil { + // swallow the error where the config file doesn't exist, but bubble up other things + if !errors.Is(err, os.ErrNotExist) { + return nil, err + } + } else { + // Run some validations on a workspace turbo.json. Note that these validations are on + // the whole struct, and not relevant to the taskID we're looking at right now. + validationErrors := workspaceTurboJSON.Validate([]fs.TurboJSONValidation{ + validateNoPackageTaskSyntax, + validateExtends, + }) + + if len(validationErrors) > 0 { + fullError := errors.New("Invalid turbo.json") + for _, validationErr := range validationErrors { + fullError = fmt.Errorf("%w\n - %s", fullError, validationErr) + } + + return nil, fullError + } + + // If there are no errors, we can (try to) add the TaskDefinition to our list. + if workspaceDefinition, ok := workspaceTurboJSON.Pipeline[taskName]; ok { + taskDefinitions = append(taskDefinitions, workspaceDefinition) + } + } + } + + if len(taskDefinitions) == 0 { + return nil, fmt.Errorf("Could not find \"%s\" in root turbo.json or \"%s\" workspace", taskID, taskIDPackage) + } + + return taskDefinitions, nil +} + +func validateNoPackageTaskSyntax(turboJSON *fs.TurboJSON) []error { + errors := []error{} + + for taskIDOrName := range turboJSON.Pipeline { + if util.IsPackageTask(taskIDOrName) { + taskName := util.StripPackageName(taskIDOrName) + errors = append(errors, fmt.Errorf("\"%s\". Use \"%s\" instead", taskIDOrName, taskName)) + } + } + + return errors +} + +func validateExtends(turboJSON *fs.TurboJSON) []error { + extendErrors := []error{} + extends := turboJSON.Extends + // TODO(mehulkar): Enable extending from more than one workspace. + if len(extends) > 1 { + extendErrors = append(extendErrors, fmt.Errorf("You can only extend from the root workspace")) + } + + // We don't support this right now + if len(extends) == 0 { + extendErrors = append(extendErrors, fmt.Errorf("No \"extends\" key found")) + } + + // TODO(mehulkar): Enable extending from non-root workspace. + if len(extends) == 1 && extends[0] != util.RootPkgName { + extendErrors = append(extendErrors, fmt.Errorf("You can only extend from the root workspace")) + } + + return extendErrors } // GetTaskGraphAncestors gets all the ancestors for a given task in the graph. @@ -429,43 +570,3 @@ func (e *Engine) GetTaskGraphDescendants(taskID string) ([]string, error) { sort.Strings(stringDescendents) return stringDescendents, nil } - -func (e *Engine) getPipelineFromWorkspace(workspaceName string) (fs.Pipeline, error) { - cachedPipeline, ok := e.pipelines[workspaceName] - if ok { - return cachedPipeline, nil - } - - // Note: dir for the root workspace will be an empty string, and for - // other workspaces, it will be a relative path. - dir := e.completeGraph.WorkspaceInfos[workspaceName].Dir - repoRoot := e.completeGraph.RepoRoot - dirAbsolutePath := dir.RestoreAnchor(repoRoot) - - // We need to a PackageJSON, because LoadTurboConfig requires it as an argument - // so it can synthesize tasks for single-package repos. - // In the root workspace, actually get and use the root package.json. - // For all other workspaces, we don't need the synthesis feature, so we can proceed - // with a default/blank PackageJSON - pkgJSON := &fs.PackageJSON{} - - if workspaceName == util.RootPkgName { - rootPkgJSONPath := dirAbsolutePath.Join("package.json") - rootPkgJSON, err := fs.ReadPackageJSON(rootPkgJSONPath) - if err != nil { - return nil, err - } - pkgJSON = rootPkgJSON - } - - turboConfig, err := fs.LoadTurboConfig(repoRoot, pkgJSON, e.isSinglePackage) - if err != nil { - return nil, err - } - - // Add to internal cache so we don't have to read file system for every task - e.pipelines[workspaceName] = turboConfig.Pipeline - - // Return the config from the workspace. - return e.pipelines[workspaceName], nil -} diff --git a/cli/internal/fs/turbo_json.go b/cli/internal/fs/turbo_json.go index 5473807409f4c..9e3b479d66c1a 100644 --- a/cli/internal/fs/turbo_json.go +++ b/cli/internal/fs/turbo_json.go @@ -31,14 +31,31 @@ type rawTurboJSON struct { Pipeline Pipeline `json:"pipeline"` // Configuration options when interfacing with the remote cache RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"` + + // Extends can be the name of another workspace + Extends []string `json:"extends,omitempty"` +} + +// pristineTurboJSON is used when marshaling a TurboJSON object into a turbo.json string +// Notably, it includes a PristinePipeline instead of the regular Pipeline. (i.e. TaskDefinition +// instead of BookkeepingTaskDefinition.) +type pristineTurboJSON struct { + GlobalDependencies []string `json:"globalDependencies,omitempty"` + GlobalEnv []string `json:"globalEnv,omitempty"` + Pipeline PristinePipeline `json:"pipeline"` + RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"` + Extends []string `json:"extends,omitempty"` } -// TurboJSON is the root turborepo configuration +// TurboJSON represents a turbo.json configuration file type TurboJSON struct { GlobalDeps []string GlobalEnv []string Pipeline Pipeline RemoteCacheOptions RemoteCacheOptions + + // A list of Workspace names + Extends []string } // RemoteCacheOptions is a struct for deserializing .remoteCache of configFile @@ -47,7 +64,10 @@ type RemoteCacheOptions struct { Signature bool `json:"signature,omitempty"` } -type rawTask struct { +// rawTaskWithDefaults exists to Marshal (i.e. turn a TaskDefinition into json). +// We use this for printing ResolvedTaskConfiguration, because we _want_ to show +// the user the default values for key they have not configured. +type rawTaskWithDefaults struct { Outputs []string `json:"outputs"` Cache *bool `json:"cache"` DependsOn []string `json:"dependsOn"` @@ -57,8 +77,30 @@ type rawTask struct { Persistent bool `json:"persistent"` } +// rawTask exists to Unmarshal from json. When fields are omitted, we _want_ +// them to be missing, so that we can distinguish missing from empty value. +type rawTask struct { + Outputs []string `json:"outputs,omitempty"` + Cache *bool `json:"cache,omitempty"` + DependsOn []string `json:"dependsOn,omitempty"` + Inputs []string `json:"inputs,omitempty"` + OutputMode *util.TaskOutputMode `json:"outputMode,omitempty"` + Env []string `json:"env,omitempty"` + Persistent *bool `json:"persistent,omitempty"` +} + +// PristinePipeline contains original TaskDefinitions without the bookkeeping +type PristinePipeline map[string]TaskDefinition + // Pipeline is a struct for deserializing .pipeline in configFile -type Pipeline map[string]TaskDefinition +type Pipeline map[string]BookkeepingTaskDefinition + +// BookkeepingTaskDefinition holds the underlying TaskDefinition and some bookkeeping data +// about the TaskDefinition. This wrapper struct allows us to leave TaskDefinition untouched. +type BookkeepingTaskDefinition struct { + definedFields util.Set + TaskDefinition TaskDefinition +} // TaskDefinition is a representation of the configFile pipeline for further computation. type TaskDefinition struct { @@ -93,12 +135,12 @@ type TaskDefinition struct { } // GetTask returns a TaskDefinition based on the ID (package#task format) or name (e.g. "build") -func (p Pipeline) GetTask(taskID string, taskName string) (*TaskDefinition, error) { +func (pc Pipeline) GetTask(taskID string, taskName string) (*BookkeepingTaskDefinition, error) { // first check for package-tasks - taskDefinition, ok := p[taskID] + taskDefinition, ok := pc[taskID] if !ok { // then check for regular tasks - fallbackTaskDefinition, notcool := p[taskName] + fallbackTaskDefinition, notcool := pc[taskName] // if neither, then bail if !notcool { // Return an empty TaskDefinition @@ -113,7 +155,7 @@ func (p Pipeline) GetTask(taskID string, taskName string) (*TaskDefinition, erro } // LoadTurboConfig loads, or optionally, synthesizes a TurboJSON instance -func LoadTurboConfig(rootPath turbopath.AbsoluteSystemPath, rootPackageJSON *PackageJSON, includeSynthesizedFromRootPackageJSON bool) (*TurboJSON, error) { +func LoadTurboConfig(dir turbopath.AbsoluteSystemPath, rootPackageJSON *PackageJSON, includeSynthesizedFromRootPackageJSON bool) (*TurboJSON, error) { // If the root package.json stil has a `turbo` key, log a warning and remove it. if rootPackageJSON.LegacyTurboConfig != nil { log.Printf("[WARNING] \"turbo\" in package.json is no longer supported. Migrate to %s by running \"npx @turbo/codemod create-turbo-config\"\n", configFile) @@ -121,12 +163,13 @@ func LoadTurboConfig(rootPath turbopath.AbsoluteSystemPath, rootPackageJSON *Pac } var turboJSON *TurboJSON - turboFromFiles, err := ReadTurboConfig(rootPath.UntypedJoin(configFile)) + turboFromFiles, err := ReadTurboConfig(dir.UntypedJoin(configFile)) if !includeSynthesizedFromRootPackageJSON && err != nil { // If the file didn't exist, throw a custom error here instead of propagating if errors.Is(err, os.ErrNotExist) { - return nil, fmt.Errorf("Could not find %s. Follow directions at https://turbo.build/repo/docs to create one: file does not exist", configFile) + return nil, errors.Wrap(err, fmt.Sprintf("Could not find %s. Follow directions at https://turbo.build/repo/docs to create one", configFile)) + } // There was an error, and we don't have any chance of recovering @@ -161,12 +204,35 @@ func LoadTurboConfig(rootPath turbopath.AbsoluteSystemPath, rootPackageJSON *Pac for scriptName := range rootPackageJSON.Scripts { if !turboJSON.Pipeline.HasTask(scriptName) { taskName := util.RootTaskID(scriptName) - turboJSON.Pipeline[taskName] = TaskDefinition{} + // Explicitly set ShouldCache to false in this definition and add the bookkeeping fields + // so downstream we can pretend that it was set on purpose (as if read from a config file) + // rather than defaulting to the 0-value of a boolean field. + turboJSON.Pipeline[taskName] = BookkeepingTaskDefinition{ + definedFields: util.SetFromStrings([]string{"ShouldCache"}), + TaskDefinition: TaskDefinition{ + ShouldCache: false, + }, + } } } return turboJSON, nil } +// TurboJSONValidation is the signature for a validation function passed to Validate() +type TurboJSONValidation func(*TurboJSON) []error + +// Validate calls an array of validation functions on the TurboJSON struct. +// The validations can be customized by the caller. +func (tj *TurboJSON) Validate(validations []TurboJSONValidation) []error { + allErrors := []error{} + for _, validation := range validations { + errors := validation(tj) + allErrors = append(allErrors, errors...) + } + + return allErrors +} + // TaskOutputs represents the patterns for including and excluding files from outputs type TaskOutputs struct { Inclusions []string @@ -197,7 +263,7 @@ func ReadTurboConfig(turboJSONPath turbopath.AbsoluteSystemPath) (*TurboJSON, er } // If there's no turbo.json, return an error. - return nil, errors.Wrapf(os.ErrNotExist, "Could not find %s", configFile) + return nil, os.ErrNotExist } // readTurboJSON reads the configFile in to a struct @@ -224,11 +290,11 @@ func readTurboJSON(path turbopath.AbsoluteSystemPath) (*TurboJSON, error) { // GetTaskDefinition returns a TaskDefinition from a serialized definition in configFile func (pc Pipeline) GetTaskDefinition(taskID string) (TaskDefinition, bool) { if entry, ok := pc[taskID]; ok { - return entry, true + return entry.TaskDefinition, true } _, task := util.GetPackageTaskFromId(taskID) entry, ok := pc[task] - return entry, ok + return entry.TaskDefinition, ok } // HasTask returns true if the given task is defined in the pipeline, either directly or @@ -248,17 +314,88 @@ func (pc Pipeline) HasTask(task string) bool { return false } -// UnmarshalJSON deserializes JSON into a TaskDefinition -func (c *TaskDefinition) UnmarshalJSON(data []byte) error { +// Pristine returns a PristinePipeline +func (pc Pipeline) Pristine() PristinePipeline { + pristine := PristinePipeline{} + for taskName, taskDef := range pc { + pristine[taskName] = taskDef.TaskDefinition + } + return pristine +} + +// hasField checks the internal bookkeeping definedFields field to +// see whether a field was actually in the underlying turbo.json +// or whether it was initialized with its 0-value. +func (btd BookkeepingTaskDefinition) hasField(fieldName string) bool { + return btd.definedFields.Includes(fieldName) +} + +// MergeTaskDefinitions accepts an array of BookkeepingTaskDefinitions and merges them into +// a single TaskDefinition. It uses the bookkeeping definedFields to determine which fields should +// be overwritten and when 0-values should be respected. +func MergeTaskDefinitions(taskDefinitions []BookkeepingTaskDefinition) (*TaskDefinition, error) { + // Start with an empty definition + mergedTaskDefinition := &TaskDefinition{} + + // Set the default, because the 0-value will be false, and if no turbo.jsons had + // this field set for this task, we want it to be true. + mergedTaskDefinition.ShouldCache = true + + // For each of the TaskDefinitions we know of, merge them in + for _, bookkeepingTaskDef := range taskDefinitions { + taskDef := bookkeepingTaskDef.TaskDefinition + if bookkeepingTaskDef.hasField("Outputs") { + mergedTaskDefinition.Outputs = taskDef.Outputs + } + + if bookkeepingTaskDef.hasField("ShouldCache") { + mergedTaskDefinition.ShouldCache = taskDef.ShouldCache + } + + if bookkeepingTaskDef.hasField("EnvVarDependencies") { + mergedTaskDefinition.EnvVarDependencies = taskDef.EnvVarDependencies + } + + if bookkeepingTaskDef.hasField("TopologicalDependencies") { + mergedTaskDefinition.TopologicalDependencies = taskDef.TopologicalDependencies + } + + if bookkeepingTaskDef.hasField("TaskDependencies") { + mergedTaskDefinition.TaskDependencies = taskDef.TaskDependencies + } + + if bookkeepingTaskDef.hasField("Inputs") { + mergedTaskDefinition.Inputs = taskDef.Inputs + } + + if bookkeepingTaskDef.hasField("OutputMode") { + mergedTaskDefinition.OutputMode = taskDef.OutputMode + } + if bookkeepingTaskDef.hasField("Persistent") { + mergedTaskDefinition.Persistent = taskDef.Persistent + } + } + + return mergedTaskDefinition, nil +} + +// UnmarshalJSON deserializes a single task definition from +// turbo.json into a TaskDefinition struct +func (btd *BookkeepingTaskDefinition) UnmarshalJSON(data []byte) error { task := rawTask{} if err := json.Unmarshal(data, &task); err != nil { return err } - var inclusions []string - var exclusions []string + btd.definedFields = util.Set{} if task.Outputs != nil { + var inclusions []string + var exclusions []string + // Assign a bookkeeping field so we know that there really were + // outputs configured in the underlying config file. + btd.definedFields.Add("Outputs") + for _, glob := range task.Outputs { if strings.HasPrefix(glob, "!") { exclusions = append(exclusions, glob[1:]) @@ -266,64 +403,89 @@ func (c *TaskDefinition) UnmarshalJSON(data []byte) error { inclusions = append(inclusions, glob) } } - } - c.Outputs = TaskOutputs{ - Inclusions: inclusions, - Exclusions: exclusions, + btd.TaskDefinition.Outputs = TaskOutputs{ + Inclusions: inclusions, + Exclusions: exclusions, + } + + sort.Strings(btd.TaskDefinition.Outputs.Inclusions) + sort.Strings(btd.TaskDefinition.Outputs.Exclusions) } - sort.Strings(c.Outputs.Inclusions) - sort.Strings(c.Outputs.Exclusions) if task.Cache == nil { - c.ShouldCache = true + btd.TaskDefinition.ShouldCache = true } else { - c.ShouldCache = *task.Cache + btd.definedFields.Add("ShouldCache") + btd.TaskDefinition.ShouldCache = *task.Cache } envVarDependencies := make(util.Set) - c.TopologicalDependencies = []string{} // TODO @mehulkar: this should be a set - c.TaskDependencies = []string{} // TODO @mehulkar: this should be a set + btd.TaskDefinition.TopologicalDependencies = []string{} // TODO @mehulkar: this should be a set + btd.TaskDefinition.TaskDependencies = []string{} // TODO @mehulkar: this should be a set for _, dependency := range task.DependsOn { if strings.HasPrefix(dependency, envPipelineDelimiter) { log.Printf("[DEPRECATED] Declaring an environment variable in \"dependsOn\" is deprecated, found %s. Use the \"env\" key or use `npx @turbo/codemod migrate-env-var-dependencies`.\n", dependency) envVarDependencies.Add(strings.TrimPrefix(dependency, envPipelineDelimiter)) } else if strings.HasPrefix(dependency, topologicalPipelineDelimiter) { - c.TopologicalDependencies = append(c.TopologicalDependencies, strings.TrimPrefix(dependency, topologicalPipelineDelimiter)) + // Note: This will get assigned multiple times in the loop, but we only care that it's true + btd.definedFields.Add("TopologicalDependencies") + btd.TaskDefinition.TopologicalDependencies = append(btd.TaskDefinition.TopologicalDependencies, strings.TrimPrefix(dependency, topologicalPipelineDelimiter)) } else { - c.TaskDependencies = append(c.TaskDependencies, dependency) + // Note: This will get assigned multiple times in the loop, but we only care that it's true + btd.definedFields.Add("TaskDependencies") + btd.TaskDefinition.TaskDependencies = append(btd.TaskDefinition.TaskDependencies, dependency) } } - sort.Strings(c.TaskDependencies) - sort.Strings(c.TopologicalDependencies) + + sort.Strings(btd.TaskDefinition.TaskDependencies) + sort.Strings(btd.TaskDefinition.TopologicalDependencies) // Append env key into EnvVarDependencies - for _, value := range task.Env { - if strings.HasPrefix(value, envPipelineDelimiter) { - // Hard error to help people specify this correctly during migration. - // TODO: Remove this error after we have run summary. - return fmt.Errorf("You specified \"%s\" in the \"env\" key. You should not prefix your environment variables with \"$\"", value) + if task.Env != nil { + btd.definedFields.Add("EnvVarDependencies") + for _, value := range task.Env { + if strings.HasPrefix(value, envPipelineDelimiter) { + // Hard error to help people specify this correctly during migration. + // TODO: Remove this error after we have run summary. + return fmt.Errorf("You specified \"%s\" in the \"env\" key. You should not prefix your environment variables with \"$\"", value) + } + + envVarDependencies.Add(value) } + } - envVarDependencies.Add(value) + btd.TaskDefinition.EnvVarDependencies = envVarDependencies.UnsafeListOfStrings() + + sort.Strings(btd.TaskDefinition.EnvVarDependencies) + + if task.Inputs != nil { + // Note that we don't require Inputs to be sorted, we're going to + // hash the resulting files and sort that instead + btd.definedFields.Add("Inputs") + btd.TaskDefinition.Inputs = task.Inputs + } + + if task.OutputMode != nil { + btd.definedFields.Add("OutputMode") + btd.TaskDefinition.OutputMode = *task.OutputMode } - c.EnvVarDependencies = envVarDependencies.UnsafeListOfStrings() - sort.Strings(c.EnvVarDependencies) - // Note that we don't require Inputs to be sorted, we're going to - // hash the resulting files and sort that instead - c.Inputs = task.Inputs - c.OutputMode = task.OutputMode - c.Persistent = task.Persistent + if task.Persistent != nil { + btd.definedFields.Add("Persistent") + btd.TaskDefinition.Persistent = *task.Persistent + } else { + btd.TaskDefinition.Persistent = false + } return nil } -// MarshalJSON deserializes JSON into a TaskDefinition +// MarshalJSON serializes TaskDefinition struct into json func (c TaskDefinition) MarshalJSON() ([]byte, error) { // Initialize with empty arrays, so we get empty arrays serialized into JSON - task := rawTask{ + task := rawTaskWithDefaults{ Outputs: []string{}, Inputs: []string{}, Env: []string{}, @@ -353,6 +515,7 @@ func (c TaskDefinition) MarshalJSON() ([]byte, error) { if len(c.TaskDependencies) > 0 { task.DependsOn = append(task.DependsOn, c.TaskDependencies...) } + for _, i := range c.TopologicalDependencies { task.DependsOn = append(task.DependsOn, "^"+i) } @@ -368,7 +531,7 @@ func (c TaskDefinition) MarshalJSON() ([]byte, error) { return json.Marshal(task) } -// UnmarshalJSON deserializes TurboJSON objects into struct +// UnmarshalJSON deserializes the contents of turbo.json into a TurboJSON struct func (c *TurboJSON) UnmarshalJSON(data []byte) error { raw := &rawTurboJSON{} if err := json.Unmarshal(data, &raw); err != nil { @@ -406,6 +569,7 @@ func (c *TurboJSON) UnmarshalJSON(data []byte) error { // copy these over, we don't need any changes here. c.Pipeline = raw.Pipeline c.RemoteCacheOptions = raw.RemoteCacheOptions + c.Extends = raw.Extends return nil } @@ -413,10 +577,10 @@ func (c *TurboJSON) UnmarshalJSON(data []byte) error { // MarshalJSON converts a TurboJSON into the equivalent json object in bytes // note: we go via rawTurboJSON so that the output format is correct func (c *TurboJSON) MarshalJSON() ([]byte, error) { - raw := rawTurboJSON{} + raw := pristineTurboJSON{} raw.GlobalDependencies = c.GlobalDeps raw.GlobalEnv = c.GlobalEnv - raw.Pipeline = c.Pipeline + raw.Pipeline = c.Pipeline.Pristine() raw.RemoteCacheOptions = c.RemoteCacheOptions return json.Marshal(&raw) diff --git a/cli/internal/fs/turbo_json_test.go b/cli/internal/fs/turbo_json_test.go index db73222396090..bd2565a307c69 100644 --- a/cli/internal/fs/turbo_json_test.go +++ b/cli/internal/fs/turbo_json_test.go @@ -35,39 +35,51 @@ func Test_ReadTurboConfig(t *testing.T) { t.Fatalf("invalid parse: %#v", turboJSONReadErr) } - pipelineExpected := map[string]TaskDefinition{ + pipelineExpected := map[string]BookkeepingTaskDefinition{ "build": { - Outputs: TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, - TopologicalDependencies: []string{"build"}, - EnvVarDependencies: []string{}, - TaskDependencies: []string{}, - ShouldCache: true, - OutputMode: util.NewTaskOutput, + definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "TopologicalDependencies"}), + TaskDefinition: TaskDefinition{ + Outputs: TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, + TopologicalDependencies: []string{"build"}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{}, + ShouldCache: true, + OutputMode: util.NewTaskOutput, + }, }, "lint": { - Outputs: TaskOutputs{}, - TopologicalDependencies: []string{}, - EnvVarDependencies: []string{"MY_VAR"}, - TaskDependencies: []string{}, - ShouldCache: true, - OutputMode: util.NewTaskOutput, + definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "ShouldCache"}), + TaskDefinition: TaskDefinition{ + Outputs: TaskOutputs{}, + TopologicalDependencies: []string{}, + EnvVarDependencies: []string{"MY_VAR"}, + TaskDependencies: []string{}, + ShouldCache: true, + OutputMode: util.NewTaskOutput, + }, }, "dev": { - Outputs: TaskOutputs{}, - TopologicalDependencies: []string{}, - EnvVarDependencies: []string{}, - TaskDependencies: []string{}, - ShouldCache: false, - OutputMode: util.FullTaskOutput, + definedFields: util.SetFromStrings([]string{"OutputMode", "ShouldCache"}), + TaskDefinition: TaskDefinition{ + Outputs: TaskOutputs{}, + TopologicalDependencies: []string{}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{}, + ShouldCache: false, + OutputMode: util.FullTaskOutput, + }, }, "publish": { - Outputs: TaskOutputs{Inclusions: []string{"dist/**"}}, - TopologicalDependencies: []string{"build", "publish"}, - EnvVarDependencies: []string{}, - TaskDependencies: []string{"admin#lint", "build"}, - ShouldCache: false, - Inputs: []string{"build/**/*"}, - OutputMode: util.FullTaskOutput, + definedFields: util.SetFromStrings([]string{"Inputs", "Outputs", "TaskDependencies", "TopologicalDependencies", "ShouldCache"}), + TaskDefinition: TaskDefinition{ + Outputs: TaskOutputs{Inclusions: []string{"dist/**"}}, + TopologicalDependencies: []string{"build", "publish"}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{"admin#lint", "build"}, + ShouldCache: false, + Inputs: []string{"build/**/*"}, + OutputMode: util.FullTaskOutput, + }, }, } @@ -106,14 +118,17 @@ func Test_LoadTurboConfig_BothCorrectAndLegacy(t *testing.T) { t.Fatalf("invalid parse: %#v", turboJSONReadErr) } - pipelineExpected := map[string]TaskDefinition{ + pipelineExpected := map[string]BookkeepingTaskDefinition{ "build": { - Outputs: TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, - TopologicalDependencies: []string{"build"}, - EnvVarDependencies: []string{}, - TaskDependencies: []string{}, - ShouldCache: true, - OutputMode: util.NewTaskOutput, + definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "TopologicalDependencies"}), + TaskDefinition: TaskDefinition{ + Outputs: TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, + TopologicalDependencies: []string{"build"}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{}, + ShouldCache: true, + OutputMode: util.NewTaskOutput, + }, }, } @@ -155,16 +170,16 @@ func Test_ReadTurboConfig_EnvDeclarations(t *testing.T) { } pipeline := turboJSON.Pipeline - assert.EqualValues(t, pipeline["task1"].EnvVarDependencies, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task2"].EnvVarDependencies, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task3"].EnvVarDependencies, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task4"].EnvVarDependencies, sortedArray([]string{"A", "B"})) - assert.EqualValues(t, pipeline["task6"].EnvVarDependencies, sortedArray([]string{"A", "B", "C", "D", "E", "F"})) - assert.EqualValues(t, pipeline["task7"].EnvVarDependencies, sortedArray([]string{"A", "B", "C"})) - assert.EqualValues(t, pipeline["task8"].EnvVarDependencies, sortedArray([]string{"A", "B", "C"})) - assert.EqualValues(t, pipeline["task9"].EnvVarDependencies, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task10"].EnvVarDependencies, sortedArray([]string{"A"})) - assert.EqualValues(t, pipeline["task11"].EnvVarDependencies, sortedArray([]string{"A", "B"})) + assert.EqualValues(t, pipeline["task1"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task2"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task3"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task4"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B"})) + assert.EqualValues(t, pipeline["task6"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B", "C", "D", "E", "F"})) + assert.EqualValues(t, pipeline["task7"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B", "C"})) + assert.EqualValues(t, pipeline["task8"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B", "C"})) + assert.EqualValues(t, pipeline["task9"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task10"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task11"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B"})) // check global env vars also assert.EqualValues(t, sortedArray([]string{"FOO", "BAR", "BAZ", "QUX"}), sortedArray(turboJSON.GlobalEnv)) @@ -182,14 +197,14 @@ func Test_TaskOutputsSort(t *testing.T) { } // Helpers -func validateOutput(t *testing.T, turboJSON *TurboJSON, expectedPipeline map[string]TaskDefinition) { +func validateOutput(t *testing.T, turboJSON *TurboJSON, expectedPipeline Pipeline) { t.Helper() assertIsSorted(t, turboJSON.GlobalDeps, "Global Deps") assertIsSorted(t, turboJSON.GlobalEnv, "Global Env") validatePipeline(t, turboJSON.Pipeline, expectedPipeline) } -func validatePipeline(t *testing.T, actual Pipeline, expected map[string]TaskDefinition) { +func validatePipeline(t *testing.T, actual Pipeline, expected Pipeline) { t.Helper() // check top level keys if len(actual) != len(expected) { @@ -206,18 +221,18 @@ func validatePipeline(t *testing.T, actual Pipeline, expected map[string]TaskDef // check individual task definitions for taskName, expectedTaskDefinition := range expected { - actualTaskDefinition, ok := actual[taskName] + bookkeepingTaskDef, ok := actual[taskName] if !ok { t.Errorf("missing expected task: %v", taskName) } + actualTaskDefinition := bookkeepingTaskDef.TaskDefinition assertIsSorted(t, actualTaskDefinition.Outputs.Inclusions, "Task output inclusions") assertIsSorted(t, actualTaskDefinition.Outputs.Exclusions, "Task output exclusions") assertIsSorted(t, actualTaskDefinition.EnvVarDependencies, "Task env vars") assertIsSorted(t, actualTaskDefinition.TopologicalDependencies, "Topo deps") assertIsSorted(t, actualTaskDefinition.TaskDependencies, "Task deps") - assert.EqualValuesf(t, expectedTaskDefinition, actualTaskDefinition, "task definition mismatch for %v", taskName) + assert.EqualValuesf(t, expectedTaskDefinition, bookkeepingTaskDef, "task definition mismatch for %v", taskName) } - } func getTestDir(t *testing.T, testName string) turbopath.AbsoluteSystemPath { diff --git a/cli/internal/graph/graph.go b/cli/internal/graph/graph.go index 97c0c884e00e1..5f27c74da4425 100644 --- a/cli/internal/graph/graph.go +++ b/cli/internal/graph/graph.go @@ -14,7 +14,10 @@ import ( ) // WorkspaceInfos holds information about each workspace in the monorepo. -type WorkspaceInfos map[string]*fs.PackageJSON +type WorkspaceInfos struct { + PackageJSONs map[string]*fs.PackageJSON + TurboConfigs map[string]*fs.TurboJSON +} // CompleteGraph represents the common state inferred from the filesystem and pipeline. // It is not intended to include information specific to a particular run. @@ -35,8 +38,7 @@ type CompleteGraph struct { // Map of TaskDefinitions by taskID TaskDefinitions map[string]*fs.TaskDefinition - - RepoRoot turbopath.AbsoluteSystemPath + RepoRoot turbopath.AbsoluteSystemPath } // GetPackageTaskVisitor wraps a `visitor` function that is used for walking the TaskGraph @@ -45,7 +47,7 @@ type CompleteGraph struct { func (g *CompleteGraph) GetPackageTaskVisitor(ctx gocontext.Context, visitor func(ctx gocontext.Context, packageTask *nodes.PackageTask) error) func(taskID string) error { return func(taskID string) error { packageName, taskName := util.GetPackageTaskFromId(taskID) - pkg, ok := g.WorkspaceInfos[packageName] + pkg, ok := g.WorkspaceInfos.PackageJSONs[packageName] if !ok { return fmt.Errorf("cannot find package %v for task %v", packageName, taskID) } @@ -76,6 +78,57 @@ func (g *CompleteGraph) GetPackageTaskVisitor(ctx gocontext.Context, visitor fun } } +// GetPipelineFromWorkspace returns the Unmarshaled fs.Pipeline struct from turbo.json in the given workspace. +func (g *CompleteGraph) GetPipelineFromWorkspace(workspaceName string, isSinglePackage bool) (fs.Pipeline, error) { + turboConfig, err := g.GetTurboConfigFromWorkspace(workspaceName, isSinglePackage) + + if err != nil { + return nil, err + } + + return turboConfig.Pipeline, nil +} + +// GetTurboConfigFromWorkspace returns the Unmarshaled fs.TurboJSON from turbo.json in the given workspace. +func (g *CompleteGraph) GetTurboConfigFromWorkspace(workspaceName string, isSinglePackage bool) (*fs.TurboJSON, error) { + cachedTurboConfig, ok := g.WorkspaceInfos.TurboConfigs[workspaceName] + + if ok { + return cachedTurboConfig, nil + } + + var workspacePackageJSON *fs.PackageJSON + if pkgJSON, err := g.GetPackageJSONFromWorkspace(workspaceName); err == nil { + workspacePackageJSON = pkgJSON + } else { + return nil, err + } + + // Note: pkgJSON.Dir for the root workspace will be an empty string, and for + // other workspaces, it will be a relative path. + workspaceAbsolutePath := workspacePackageJSON.Dir.RestoreAnchor(g.RepoRoot) + turboConfig, err := fs.LoadTurboConfig(workspaceAbsolutePath, workspacePackageJSON, isSinglePackage) + + // If we failed to load a TurboConfig, bubble up the error + if err != nil { + return nil, err + } + + // add to cache + g.WorkspaceInfos.TurboConfigs[workspaceName] = turboConfig + + return g.WorkspaceInfos.TurboConfigs[workspaceName], nil +} + +// GetPackageJSONFromWorkspace returns an Unmarshaled struct of the package.json in the given workspace +func (g *CompleteGraph) GetPackageJSONFromWorkspace(workspaceName string) (*fs.PackageJSON, error) { + if pkgJSON, ok := g.WorkspaceInfos.PackageJSONs[workspaceName]; ok { + return pkgJSON, nil + } + + return nil, fmt.Errorf("No package.json for %s", workspaceName) +} + // repoRelativeLogFile returns the path to the log file for this task execution as a // relative path from the root of the monorepo. func repoRelativeLogFile(pt *nodes.PackageTask) string { diff --git a/cli/internal/hashing/package_deps_hash.go b/cli/internal/hashing/package_deps_hash.go index 9be60ddf8cec8..612df5e6767da 100644 --- a/cli/internal/hashing/package_deps_hash.go +++ b/cli/internal/hashing/package_deps_hash.go @@ -43,12 +43,16 @@ func GetPackageDeps(rootPath turbopath.AbsoluteSystemPath, p *PackageDepsOptions } result = gitLsTreeOutput } else { - - // Add in package.json to input patterns because if the `scripts` in - // the package.json change (i.e. the tasks that turbo executes), we want - // a cache miss, since any existing cache could be invalid. - // Note this package.json will be resolved relative to the pkgPath. + // Add in package.json and turbo.json to input patterns. Both file paths are relative to pkgPath + // + // - package.json is an input because if the `scripts` in + // the package.json change (i.e. the tasks that turbo executes), we want + // a cache miss, since any existing cache could be invalid. + // - turbo.json because it's the definition of the tasks themselves. The root turbo.json + // is similarly included in the global hash. This file may not exist in the workspace, but + // that is ok, because it will get ignored downstream. calculatedInputs = append(calculatedInputs, "package.json") + calculatedInputs = append(calculatedInputs, "turbo.json") // The input patterns are relative to the package. // However, we need to change the globbing to be relative to the repo root. diff --git a/cli/internal/prune/prune.go b/cli/internal/prune/prune.go index 4386431d2da06..d3085393769b0 100644 --- a/cli/internal/prune/prune.go +++ b/cli/internal/prune/prune.go @@ -80,7 +80,7 @@ func (p *prune) prune(opts *turbostate.PrunePayload) error { for _, scope := range opts.Scope { p.base.Logger.Trace("scope", "value", scope) - target, scopeIsValid := ctx.WorkspaceInfos[scope] + target, scopeIsValid := ctx.WorkspaceInfos.PackageJSONs[scope] if !scopeIsValid { return errors.Errorf("invalid scope: package %v not found", scope) } @@ -139,35 +139,35 @@ func (p *prune) prune(opts *turbostate.PrunePayload) error { continue } - workspaces = append(workspaces, ctx.WorkspaceInfos[internalDep].Dir) - originalDir := ctx.WorkspaceInfos[internalDep].Dir.RestoreAnchor(p.base.RepoRoot) + workspaces = append(workspaces, ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir) + originalDir := ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir.RestoreAnchor(p.base.RepoRoot) info, err := originalDir.Lstat() if err != nil { return errors.Wrapf(err, "failed to lstat %s", originalDir) } - targetDir := ctx.WorkspaceInfos[internalDep].Dir.RestoreAnchor(fullDir) + targetDir := ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir.RestoreAnchor(fullDir) if err := targetDir.MkdirAllMode(info.Mode()); err != nil { return errors.Wrapf(err, "failed to create folder %s for %v", targetDir, internalDep) } - if err := fs.RecursiveCopy(ctx.WorkspaceInfos[internalDep].Dir.ToStringDuringMigration(), targetDir.ToStringDuringMigration()); err != nil { + if err := fs.RecursiveCopy(ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir.ToStringDuringMigration(), targetDir.ToStringDuringMigration()); err != nil { return errors.Wrapf(err, "failed to copy %v into %v", internalDep, targetDir) } if opts.Docker { - jsonDir := outDir.UntypedJoin("json", ctx.WorkspaceInfos[internalDep].PackageJSONPath.ToStringDuringMigration()) + jsonDir := outDir.UntypedJoin("json", ctx.WorkspaceInfos.PackageJSONs[internalDep].PackageJSONPath.ToStringDuringMigration()) if err := jsonDir.EnsureDir(); err != nil { return errors.Wrapf(err, "failed to create folder %v for %v", jsonDir, internalDep) } - if err := fs.RecursiveCopy(ctx.WorkspaceInfos[internalDep].PackageJSONPath.ToStringDuringMigration(), jsonDir.ToStringDuringMigration()); err != nil { + if err := fs.RecursiveCopy(ctx.WorkspaceInfos.PackageJSONs[internalDep].PackageJSONPath.ToStringDuringMigration(), jsonDir.ToStringDuringMigration()); err != nil { return errors.Wrapf(err, "failed to copy %v into %v", internalDep, jsonDir) } } - for _, pkg := range ctx.WorkspaceInfos[internalDep].TransitiveDeps { + for _, pkg := range ctx.WorkspaceInfos.PackageJSONs[internalDep].TransitiveDeps { lockfileKeys = append(lockfileKeys, pkg.Key) } - p.base.UI.Output(fmt.Sprintf(" - Added %v", ctx.WorkspaceInfos[internalDep].Name)) + p.base.UI.Output(fmt.Sprintf(" - Added %v", ctx.WorkspaceInfos.PackageJSONs[internalDep].Name)) } p.base.Logger.Trace("new workspaces", "value", workspaces) diff --git a/cli/internal/run/dry_run.go b/cli/internal/run/dry_run.go index 88754fc829efa..d4be4ed65a983 100644 --- a/cli/internal/run/dry_run.go +++ b/cli/internal/run/dry_run.go @@ -208,7 +208,7 @@ func displayDryTextRun(ui cli.Ui, summary *dryRunSummary, workspaceInfos graph.W p := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) fmt.Fprintln(p, "Name\tPath\t") for _, pkg := range summary.Packages { - fmt.Fprintf(p, "%s\t%s\t\n", pkg, workspaceInfos[pkg].Dir) + fmt.Fprintf(p, "%s\t%s\t\n", pkg, workspaceInfos.PackageJSONs[pkg].Dir) } if err := p.Flush(); err != nil { return err diff --git a/cli/internal/run/global_hash.go b/cli/internal/run/global_hash.go index 1ef4fdf16cc99..2b768b8f7dba0 100644 --- a/cli/internal/run/global_hash.go +++ b/cli/internal/run/global_hash.go @@ -84,19 +84,21 @@ func calculateGlobalHash(rootpath turbopath.AbsoluteSystemPath, rootPackageJSON if err != nil { return "", fmt.Errorf("error hashing files: %w", err) } + globalHashable := struct { globalFileHashMap map[turbopath.AnchoredUnixPath]string rootExternalDepsHash string hashedSortedEnvPairs []string globalCacheKey string - pipeline fs.Pipeline + pipeline fs.PristinePipeline }{ globalFileHashMap: globalFileHashMap, rootExternalDepsHash: rootPackageJSON.ExternalDepsHash, hashedSortedEnvPairs: globalHashableEnvPairs, globalCacheKey: _globalCacheKey, - pipeline: pipeline, + pipeline: pipeline.Pristine(), } + globalHash, err := fs.HashObject(globalHashable) if err != nil { return "", fmt.Errorf("error hashing global dependencies %w", err) diff --git a/cli/internal/run/run.go b/cli/internal/run/run.go index 850f0a67aef6f..7f275b7c6c185 100644 --- a/cli/internal/run/run.go +++ b/cli/internal/run/run.go @@ -160,13 +160,6 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { if err != nil { return fmt.Errorf("failed to read package.json: %w", err) } - turboJSON, err := fs.LoadTurboConfig(r.base.RepoRoot, rootPackageJSON, r.opts.runOpts.singlePackage) - if err != nil { - return err - } - - // TODO: these values come from a config file, hopefully viper can help us merge these - r.opts.cacheOpts.RemoteCacheOpts = turboJSON.RemoteCacheOptions var pkgDepGraph *context.Context if r.opts.runOpts.singlePackage { @@ -201,17 +194,26 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { return errors.Wrap(err, "Invalid package dependency graph") } - pipeline := turboJSON.Pipeline - if err := validateTasks(pipeline, targets); err != nil { - location := "" - if r.opts.runOpts.singlePackage { - location = "in `scripts` in \"package.json\"" - } else { - location = "in `pipeline` in \"turbo.json\"" - } - return fmt.Errorf("%s %s. Are you sure you added it?", err, location) + // TODO: consolidate some of these arguments + // Note: not all properties are set here. GlobalHash and Pipeline keys are set later + g := &graph.CompleteGraph{ + WorkspaceGraph: pkgDepGraph.WorkspaceGraph, + WorkspaceInfos: pkgDepGraph.WorkspaceInfos, + RootNode: pkgDepGraph.RootNode, + TaskDefinitions: map[string]*fs.TaskDefinition{}, + RepoRoot: r.base.RepoRoot, } + turboJSON, err := g.GetTurboConfigFromWorkspace(util.RootPkgName, r.opts.runOpts.singlePackage) + if err != nil { + return err + } + + // TODO: these values come from a config file, hopefully viper can help us merge these + r.opts.cacheOpts.RemoteCacheOpts = turboJSON.RemoteCacheOptions + + pipeline := turboJSON.Pipeline + g.Pipeline = pipeline scmInstance, err := scm.FromInRepo(r.base.RepoRoot) if err != nil { if errors.Is(err, scm.ErrFallback) { @@ -235,6 +237,7 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { } } } + globalHash, err := calculateGlobalHash( r.base.RepoRoot, rootPackageJSON, @@ -246,25 +249,15 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { r.base.Logger, os.Environ(), ) + + g.GlobalHash = globalHash + if err != nil { return fmt.Errorf("failed to calculate global hash: %v", err) } r.base.Logger.Debug("global hash", "value", globalHash) r.base.Logger.Debug("local cache folder", "path", r.opts.cacheOpts.OverrideDir) - // TODO: consolidate some of these arguments - g := &graph.CompleteGraph{ - WorkspaceGraph: pkgDepGraph.WorkspaceGraph, - // TODO(mehulkar): We can remove pipeline from here eventually - // It is only used by the taskhash tracker to look up taskDefinitions - // but we will eventually replace that - Pipeline: pipeline, - WorkspaceInfos: pkgDepGraph.WorkspaceInfos, - GlobalHash: globalHash, - RootNode: pkgDepGraph.RootNode, - TaskDefinitions: map[string]*fs.TaskDefinition{}, - RepoRoot: r.base.RepoRoot, - } rs := &runSpec{ Targets: targets, FilteredPkgs: filteredPkgs, @@ -281,6 +274,7 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { if err != nil { return errors.Wrap(err, "error preparing engine") } + tracker := taskhash.NewTracker( g.RootNode, g.GlobalHash, @@ -289,7 +283,13 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { g.WorkspaceInfos, ) - err = tracker.CalculateFileHashes(engine.TaskGraph.Vertices(), rs.Opts.runOpts.concurrency, r.base.RepoRoot) + err = tracker.CalculateFileHashes( + engine.TaskGraph.Vertices(), + rs.Opts.runOpts.concurrency, + r.base.RepoRoot, + g, + ) + if err != nil { return errors.Wrap(err, "error hashing package files") } @@ -442,12 +442,3 @@ const ( _dryRunJSONValue = "Json" _dryRunTextValue = "Text" ) - -func validateTasks(pipeline fs.Pipeline, tasks []string) error { - for _, task := range tasks { - if !pipeline.HasTask(task) { - return fmt.Errorf("task `%v` not found", task) - } - } - return nil -} diff --git a/cli/internal/scope/filter/filter.go b/cli/internal/scope/filter/filter.go index 885268f29b6ed..7c3b960d1e11e 100644 --- a/cli/internal/scope/filter/filter.go +++ b/cli/internal/scope/filter/filter.go @@ -269,7 +269,7 @@ func (r *Resolver) filterNodesWithSelector(selector *TargetSelector) (util.Set, } else if matches { entryPackages.Add(pkgName) } - } else if pkg, ok := r.WorkspaceInfos[pkgNameStr]; !ok { + } else if pkg, ok := r.WorkspaceInfos.PackageJSONs[pkgNameStr]; !ok { return nil, fmt.Errorf("missing info for package %v", pkgName) } else if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) @@ -287,7 +287,7 @@ func (r *Resolver) filterNodesWithSelector(selector *TargetSelector) (util.Set, if parentDir == "." { entryPackages.Add(util.RootPkgName) } else { - for name, pkg := range r.WorkspaceInfos { + for name, pkg := range r.WorkspaceInfos.PackageJSONs { if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) } else if matches { @@ -332,7 +332,7 @@ func (r *Resolver) filterSubtreesWithSelector(selector *TargetSelector) (util.Se parentDir := selector.parentDir entryPackages := make(util.Set) - for name, pkg := range r.WorkspaceInfos { + for name, pkg := range r.WorkspaceInfos.PackageJSONs { if parentDir == "" { entryPackages.Add(name) } else if matches, err := doublestar.PathMatch(parentDir.ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { diff --git a/cli/internal/scope/filter/filter_test.go b/cli/internal/scope/filter/filter_test.go index 79ca1b18db4dd..6e49045860ef0 100644 --- a/cli/internal/scope/filter/filter_test.go +++ b/cli/internal/scope/filter/filter_test.go @@ -37,7 +37,10 @@ func Test_filter(t *testing.T) { if err != nil { t.Fatalf("failed to get working directory: %v", err) } - packageJSONs := make(graph.WorkspaceInfos) + workspaceInfos := graph.WorkspaceInfos{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs graph := &dag.AcyclicGraph{} graph.Add("project-0") packageJSONs["project-0"] = &fs.PackageJSON{ @@ -291,7 +294,7 @@ func Test_filter(t *testing.T) { t.Run(tc.Name, func(t *testing.T) { r := &Resolver{ Graph: graph, - WorkspaceInfos: packageJSONs, + WorkspaceInfos: workspaceInfos, Cwd: root, Inference: tc.PackageInference, } @@ -306,7 +309,7 @@ func Test_filter(t *testing.T) { t.Run("report unmatched filters", func(t *testing.T) { r := &Resolver{ Graph: graph, - WorkspaceInfos: packageJSONs, + WorkspaceInfos: workspaceInfos, Cwd: root, } pkgs, err := r.getFilteredPackages([]*TargetSelector{ @@ -338,7 +341,10 @@ func Test_matchScopedPackage(t *testing.T) { t.Fatalf("failed to get working directory: %v", err) } - packageJSONs := make(graph.WorkspaceInfos) + workspaceInfos := graph.WorkspaceInfos{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs graph := &dag.AcyclicGraph{} graph.Add("@foo/bar") packageJSONs["@foo/bar"] = &fs.PackageJSON{ @@ -347,7 +353,7 @@ func Test_matchScopedPackage(t *testing.T) { } r := &Resolver{ Graph: graph, - WorkspaceInfos: packageJSONs, + WorkspaceInfos: workspaceInfos, Cwd: root, } pkgs, err := r.getFilteredPackages([]*TargetSelector{ @@ -371,7 +377,10 @@ func Test_matchExactPackages(t *testing.T) { t.Fatalf("failed to get working directory: %v", err) } - packageJSONs := make(graph.WorkspaceInfos) + workspaceInfos := graph.WorkspaceInfos{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs graph := &dag.AcyclicGraph{} graph.Add("@foo/bar") packageJSONs["@foo/bar"] = &fs.PackageJSON{ @@ -385,7 +394,7 @@ func Test_matchExactPackages(t *testing.T) { } r := &Resolver{ Graph: graph, - WorkspaceInfos: packageJSONs, + WorkspaceInfos: workspaceInfos, Cwd: root, } pkgs, err := r.getFilteredPackages([]*TargetSelector{ @@ -409,7 +418,10 @@ func Test_matchMultipleScopedPackages(t *testing.T) { t.Fatalf("failed to get working directory: %v", err) } - packageJSONs := make(graph.WorkspaceInfos) + workspaceInfos := graph.WorkspaceInfos{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs graph := &dag.AcyclicGraph{} graph.Add("@foo/bar") packageJSONs["@foo/bar"] = &fs.PackageJSON{ @@ -423,7 +435,7 @@ func Test_matchMultipleScopedPackages(t *testing.T) { } r := &Resolver{ Graph: graph, - WorkspaceInfos: packageJSONs, + WorkspaceInfos: workspaceInfos, Cwd: root, } pkgs, err := r.getFilteredPackages([]*TargetSelector{ @@ -452,7 +464,10 @@ func Test_SCM(t *testing.T) { head1Changed.Add(util.RootPkgName) head2Changed := make(util.Set) head2Changed.Add("package-3") - packageJSONs := make(graph.WorkspaceInfos) + workspaceInfos := graph.WorkspaceInfos{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs graph := &dag.AcyclicGraph{} graph.Add("package-1") packageJSONs["package-1"] = &fs.PackageJSON{ @@ -479,7 +494,7 @@ func Test_SCM(t *testing.T) { r := &Resolver{ Graph: graph, - WorkspaceInfos: packageJSONs, + WorkspaceInfos: workspaceInfos, Cwd: root, PackagesChangedInRange: func(fromRef string, toRef string) (util.Set, error) { if fromRef == "HEAD~1" && toRef == "HEAD" { diff --git a/cli/internal/scope/scope.go b/cli/internal/scope/scope.go index 204ef7ed08ae4..a6768de904091 100644 --- a/cli/internal/scope/scope.go +++ b/cli/internal/scope/scope.go @@ -160,7 +160,7 @@ func calculateInference(repoRoot turbopath.AbsoluteSystemPath, rawPkgInferenceDi } logger.Debug(fmt.Sprintf("Using %v as a basis for selecting packages", pkgInferencePath)) fullInferencePath := repoRoot.Join(pkgInferencePath) - for _, pkgInfo := range packageInfos { + for _, pkgInfo := range packageInfos.PackageJSONs { pkgPath := pkgInfo.Dir.RestoreAnchor(repoRoot) inferredPathIsBelow, err := pkgPath.ContainsPath(fullInferencePath) if err != nil { @@ -208,7 +208,7 @@ func (o *Opts) getPackageChangeFunc(scm scm.SCM, cwd turbopath.AbsoluteSystemPat } makeAllPkgs := func() util.Set { allPkgs := make(util.Set) - for pkg := range ctx.WorkspaceInfos { + for pkg := range ctx.WorkspaceInfos.PackageJSONs { allPkgs.Add(pkg) } return allPkgs @@ -350,7 +350,7 @@ func getChangedPackages(changedFiles []string, packageInfos graph.WorkspaceInfos changedPackages := make(util.Set) for _, changedFile := range changedFiles { found := false - for pkgName, pkgInfo := range packageInfos { + for pkgName, pkgInfo := range packageInfos.PackageJSONs { if pkgName != util.RootPkgName && fileInPackage(changedFile, pkgInfo.Dir.ToStringDuringMigration()) { changedPackages.Add(pkgName) found = true diff --git a/cli/internal/scope/scope_test.go b/cli/internal/scope/scope_test.go index c904907c085bf..8ec743b45f0b5 100644 --- a/cli/internal/scope/scope_test.go +++ b/cli/internal/scope/scope_test.go @@ -129,57 +129,59 @@ func TestResolvePackages(t *testing.T) { graph.Connect(dag.BasicEdge("app2", "libC")) graph.Connect(dag.BasicEdge("app2-a", "libC")) workspaceInfos := internalGraph.WorkspaceInfos{ - "//": { - Dir: turbopath.AnchoredSystemPath("").ToSystemPath(), - UnresolvedExternalDeps: map[string]string{"global": "2"}, - TransitiveDeps: []lockfile.Package{{Key: "global2", Version: "2", Found: true}}, - }, - "app0": { - Dir: turbopath.AnchoredUnixPath("app/app0").ToSystemPath(), - Name: "app0", - UnresolvedExternalDeps: map[string]string{"app0-dep": "2"}, - TransitiveDeps: []lockfile.Package{ - {Key: "app0-dep2", Version: "2", Found: true}, - {Key: "app0-util2", Version: "2", Found: true}, + PackageJSONs: map[string]*fs.PackageJSON{ + "//": { + Dir: turbopath.AnchoredSystemPath("").ToSystemPath(), + UnresolvedExternalDeps: map[string]string{"global": "2"}, + TransitiveDeps: []lockfile.Package{{Key: "global2", Version: "2", Found: true}}, }, - }, - "app1": { - Dir: turbopath.AnchoredUnixPath("app/app1").ToSystemPath(), - Name: "app1", - }, - "app2": { - Dir: turbopath.AnchoredUnixPath("app/app2").ToSystemPath(), - Name: "app2", - }, - "app2-a": { - Dir: turbopath.AnchoredUnixPath("app/app2-a").ToSystemPath(), - Name: "app2-a", - }, - "libA": { - Dir: turbopath.AnchoredUnixPath("libs/libA").ToSystemPath(), - Name: "libA", - }, - "libB": { - Dir: turbopath.AnchoredUnixPath("libs/libB").ToSystemPath(), - Name: "libB", - UnresolvedExternalDeps: map[string]string{"external": "1"}, - TransitiveDeps: []lockfile.Package{ - {Key: "external-dep-a1", Version: "1", Found: true}, - {Key: "external-dep-b1", Version: "1", Found: true}, - {Key: "external1", Version: "1", Found: true}, + "app0": { + Dir: turbopath.AnchoredUnixPath("app/app0").ToSystemPath(), + Name: "app0", + UnresolvedExternalDeps: map[string]string{"app0-dep": "2"}, + TransitiveDeps: []lockfile.Package{ + {Key: "app0-dep2", Version: "2", Found: true}, + {Key: "app0-util2", Version: "2", Found: true}, + }, + }, + "app1": { + Dir: turbopath.AnchoredUnixPath("app/app1").ToSystemPath(), + Name: "app1", + }, + "app2": { + Dir: turbopath.AnchoredUnixPath("app/app2").ToSystemPath(), + Name: "app2", + }, + "app2-a": { + Dir: turbopath.AnchoredUnixPath("app/app2-a").ToSystemPath(), + Name: "app2-a", + }, + "libA": { + Dir: turbopath.AnchoredUnixPath("libs/libA").ToSystemPath(), + Name: "libA", + }, + "libB": { + Dir: turbopath.AnchoredUnixPath("libs/libB").ToSystemPath(), + Name: "libB", + UnresolvedExternalDeps: map[string]string{"external": "1"}, + TransitiveDeps: []lockfile.Package{ + {Key: "external-dep-a1", Version: "1", Found: true}, + {Key: "external-dep-b1", Version: "1", Found: true}, + {Key: "external1", Version: "1", Found: true}, + }, + }, + "libC": { + Dir: turbopath.AnchoredUnixPath("libs/libC").ToSystemPath(), + Name: "libC", + }, + "libD": { + Dir: turbopath.AnchoredUnixPath("libs/libD").ToSystemPath(), + Name: "libD", }, - }, - "libC": { - Dir: turbopath.AnchoredUnixPath("libs/libC").ToSystemPath(), - Name: "libC", - }, - "libD": { - Dir: turbopath.AnchoredUnixPath("libs/libD").ToSystemPath(), - Name: "libD", }, } packageNames := []string{} - for name := range workspaceInfos { + for name := range workspaceInfos.PackageJSONs { packageNames = append(packageNames, name) } diff --git a/cli/internal/taskhash/taskhash.go b/cli/internal/taskhash/taskhash.go index daadd43ca8a17..616125b1be741 100644 --- a/cli/internal/taskhash/taskhash.go +++ b/cli/internal/taskhash/taskhash.go @@ -92,6 +92,7 @@ func (pfs *packageFileSpec) hash(pkg *fs.PackageJSON, repoRoot turbopath.Absolut } hashObject = manualHashObject } + hashOfFiles, otherErr := fs.HashObject(hashObject) if otherErr != nil { return "", otherErr @@ -158,7 +159,12 @@ type packageFileHashes map[packageFileHashKey]string // CalculateFileHashes hashes each unique package-inputs combination that is present // in the task graph. Must be called before calculating task hashes. -func (th *Tracker) CalculateFileHashes(allTasks []dag.Vertex, workerCount int, repoRoot turbopath.AbsoluteSystemPath) error { +func (th *Tracker) CalculateFileHashes( + allTasks []dag.Vertex, + workerCount int, + repoRoot turbopath.AbsoluteSystemPath, + completeGraph *graph.CompleteGraph, +) error { hashTasks := make(util.Set) for _, v := range allTasks { @@ -174,9 +180,7 @@ func (th *Tracker) CalculateFileHashes(allTasks []dag.Vertex, workerCount int, r continue } - // TODO(mehulkar): Once we start composing turbo.json, we need to change this - // to look in the graph for TaskDefinitions, rather than the root pipeline. - taskDefinition, ok := th.pipeline.GetTaskDefinition(taskID) + taskDefinition, ok := completeGraph.TaskDefinitions[taskID] if !ok { return fmt.Errorf("missing pipeline entry %v", taskID) } @@ -196,7 +200,7 @@ func (th *Tracker) CalculateFileHashes(allTasks []dag.Vertex, workerCount int, r for i := 0; i < workerCount; i++ { hashErrs.Go(func() error { for packageFileSpec := range hashQueue { - pkg, ok := th.workspaceInfos[packageFileSpec.pkg] + pkg, ok := th.workspaceInfos.PackageJSONs[packageFileSpec.pkg] if !ok { return fmt.Errorf("cannot find package %v", packageFileSpec.pkg) } From 772fabbe4794c56bfdd1b36a96d7ab11deaf7b4b Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 13 Feb 2023 15:24:01 -0800 Subject: [PATCH 11/31] Warn on absolute paths used in turbo.json (#3658) --- .../absolute_path_warning.t | 10 ++++++++ .../invalid_turbo_json/monorepo/.gitignore | 3 +++ .../monorepo/apps/my-app/package.json | 9 ++++++++ .../invalid_turbo_json/monorepo/package.json | 7 ++++++ .../monorepo/packages/util/package.json | 6 +++++ .../invalid_turbo_json/monorepo/turbo.json | 10 ++++++++ .../invalid_turbo_json/setup.sh | 6 +++++ cli/internal/fs/turbo_json.go | 23 ++++++++++++++++--- cli/internal/fs/turbo_json_test.go | 10 ++++---- 9 files changed, 76 insertions(+), 8 deletions(-) create mode 100644 cli/integration_tests/invalid_turbo_json/absolute_path_warning.t create mode 100644 cli/integration_tests/invalid_turbo_json/monorepo/.gitignore create mode 100644 cli/integration_tests/invalid_turbo_json/monorepo/apps/my-app/package.json create mode 100644 cli/integration_tests/invalid_turbo_json/monorepo/package.json create mode 100644 cli/integration_tests/invalid_turbo_json/monorepo/packages/util/package.json create mode 100644 cli/integration_tests/invalid_turbo_json/monorepo/turbo.json create mode 100755 cli/integration_tests/invalid_turbo_json/setup.sh diff --git a/cli/integration_tests/invalid_turbo_json/absolute_path_warning.t b/cli/integration_tests/invalid_turbo_json/absolute_path_warning.t new file mode 100644 index 0000000000000..83cbc244ea844 --- /dev/null +++ b/cli/integration_tests/invalid_turbo_json/absolute_path_warning.t @@ -0,0 +1,10 @@ +Setup + $ . ${TESTDIR}/../setup.sh + $ . ${TESTDIR}/setup.sh $(pwd) + +Expect warnings + $ ${TURBO} build -v --dry > /dev/null + [-0-9:.TWZ+]+ \[INFO] turbo: skipping turbod since we appear to be in a non-interactive context (re) + [0-9]{4}/[0-9]{2}/[0-9]{2} [-0-9:.TWZ+]+ \[WARNING] Using an absolute path in "outputs" \(/another/absolute/path\) will not work and will be an error in a future version (re) + [0-9]{4}/[0-9]{2}/[0-9]{2} [-0-9:.TWZ+]+ \[WARNING] Using an absolute path in "inputs" \(/some/absolute/path\) will not work and will be an error in a future version (re) + [0-9]{4}/[0-9]{2}/[0-9]{2} [-0-9:.TWZ+]+ \[WARNING] Using an absolute path in "globalDependencies" \(/an/absolute/path\) will not work and will be an error in a future version (re) diff --git a/cli/integration_tests/invalid_turbo_json/monorepo/.gitignore b/cli/integration_tests/invalid_turbo_json/monorepo/.gitignore new file mode 100644 index 0000000000000..77af9fc60321d --- /dev/null +++ b/cli/integration_tests/invalid_turbo_json/monorepo/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +.turbo +.npmrc diff --git a/cli/integration_tests/invalid_turbo_json/monorepo/apps/my-app/package.json b/cli/integration_tests/invalid_turbo_json/monorepo/apps/my-app/package.json new file mode 100644 index 0000000000000..f2a5d2525f399 --- /dev/null +++ b/cli/integration_tests/invalid_turbo_json/monorepo/apps/my-app/package.json @@ -0,0 +1,9 @@ +{ + "name": "my-app", + "scripts": { + "build": "echo 'building'" + }, + "dependencies": { + "util": "*" + } +} diff --git a/cli/integration_tests/invalid_turbo_json/monorepo/package.json b/cli/integration_tests/invalid_turbo_json/monorepo/package.json new file mode 100644 index 0000000000000..85175c18a49b0 --- /dev/null +++ b/cli/integration_tests/invalid_turbo_json/monorepo/package.json @@ -0,0 +1,7 @@ +{ + "name": "monorepo", + "workspaces": [ + "apps/**", + "packages/**" + ] +} diff --git a/cli/integration_tests/invalid_turbo_json/monorepo/packages/util/package.json b/cli/integration_tests/invalid_turbo_json/monorepo/packages/util/package.json new file mode 100644 index 0000000000000..8d3e121335e16 --- /dev/null +++ b/cli/integration_tests/invalid_turbo_json/monorepo/packages/util/package.json @@ -0,0 +1,6 @@ +{ + "name": "util", + "scripts": { + "build": "echo 'building'" + } +} diff --git a/cli/integration_tests/invalid_turbo_json/monorepo/turbo.json b/cli/integration_tests/invalid_turbo_json/monorepo/turbo.json new file mode 100644 index 0000000000000..2d2a8e28deaae --- /dev/null +++ b/cli/integration_tests/invalid_turbo_json/monorepo/turbo.json @@ -0,0 +1,10 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalDependencies": ["/an/absolute/path", "some/file"], + "pipeline": { + "build": { + "inputs": ["another/file", "/some/absolute/path"], + "outputs": ["/another/absolute/path", "a/relative/path"] + } + } +} diff --git a/cli/integration_tests/invalid_turbo_json/setup.sh b/cli/integration_tests/invalid_turbo_json/setup.sh new file mode 100755 index 0000000000000..864b7a64a37a6 --- /dev/null +++ b/cli/integration_tests/invalid_turbo_json/setup.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +SCRIPT_DIR=$(dirname ${BASH_SOURCE[0]}) +TARGET_DIR=$1 +cp -a ${SCRIPT_DIR}/monorepo/. ${TARGET_DIR}/ +${SCRIPT_DIR}/../setup_git.sh ${TARGET_DIR} diff --git a/cli/internal/fs/turbo_json.go b/cli/internal/fs/turbo_json.go index 9e3b479d66c1a..18fb5cab432d8 100644 --- a/cli/internal/fs/turbo_json.go +++ b/cli/internal/fs/turbo_json.go @@ -6,6 +6,7 @@ import ( "io/ioutil" "log" "os" + "path/filepath" "sort" "strings" @@ -163,7 +164,7 @@ func LoadTurboConfig(dir turbopath.AbsoluteSystemPath, rootPackageJSON *PackageJ } var turboJSON *TurboJSON - turboFromFiles, err := ReadTurboConfig(dir.UntypedJoin(configFile)) + turboFromFiles, err := readTurboConfig(dir.UntypedJoin(configFile)) if !includeSynthesizedFromRootPackageJSON && err != nil { // If the file didn't exist, throw a custom error here instead of propagating @@ -250,8 +251,8 @@ func (to TaskOutputs) Sort() TaskOutputs { return TaskOutputs{Inclusions: inclusions, Exclusions: exclusions} } -// ReadTurboConfig reads turbo.json from a provided path -func ReadTurboConfig(turboJSONPath turbopath.AbsoluteSystemPath) (*TurboJSON, error) { +// readTurboConfig reads turbo.json from a provided path +func readTurboConfig(turboJSONPath turbopath.AbsoluteSystemPath) (*TurboJSON, error) { // If the configFile exists, use that if turboJSONPath.FileExists() { turboJSON, err := readTurboJSON(turboJSONPath) @@ -398,8 +399,14 @@ func (btd *BookkeepingTaskDefinition) UnmarshalJSON(data []byte) error { for _, glob := range task.Outputs { if strings.HasPrefix(glob, "!") { + if filepath.IsAbs(glob[1:]) { + log.Printf("[WARNING] Using an absolute path in \"outputs\" (%v) will not work and will be an error in a future version", glob) + } exclusions = append(exclusions, glob[1:]) } else { + if filepath.IsAbs(glob) { + log.Printf("[WARNING] Using an absolute path in \"outputs\" (%v) will not work and will be an error in a future version", glob) + } inclusions = append(inclusions, glob) } } @@ -465,6 +472,12 @@ func (btd *BookkeepingTaskDefinition) UnmarshalJSON(data []byte) error { // Note that we don't require Inputs to be sorted, we're going to // hash the resulting files and sort that instead btd.definedFields.Add("Inputs") + // TODO: during rust port, this should be moved to a post-parse validation step + for _, input := range task.Inputs { + if filepath.IsAbs(input) { + log.Printf("[WARNING] Using an absolute path in \"inputs\" (%v) will not work and will be an error in a future version", input) + } + } btd.TaskDefinition.Inputs = task.Inputs } @@ -551,11 +564,15 @@ func (c *TurboJSON) UnmarshalJSON(data []byte) error { envVarDependencies.Add(value) } + // TODO: In the rust port, warnings should be refactored to a post-parse validation step for _, value := range raw.GlobalDependencies { if strings.HasPrefix(value, envPipelineDelimiter) { log.Printf("[DEPRECATED] Declaring an environment variable in \"globalDependencies\" is deprecated, found %s. Use the \"globalEnv\" key or use `npx @turbo/codemod migrate-env-var-dependencies`.\n", value) envVarDependencies.Add(strings.TrimPrefix(value, envPipelineDelimiter)) } else { + if filepath.IsAbs(value) { + log.Printf("[WARNING] Using an absolute path in \"globalDependencies\" (%v) will not work and will be an error in a future version", value) + } globalFileDependencies.Add(value) } } diff --git a/cli/internal/fs/turbo_json_test.go b/cli/internal/fs/turbo_json_test.go index bd2565a307c69..94fd78eb2c15a 100644 --- a/cli/internal/fs/turbo_json_test.go +++ b/cli/internal/fs/turbo_json_test.go @@ -29,7 +29,7 @@ func assertIsSorted(t *testing.T, arr []string, msg string) { func Test_ReadTurboConfig(t *testing.T) { testDir := getTestDir(t, "correct") - turboJSON, turboJSONReadErr := ReadTurboConfig(testDir.UntypedJoin("turbo.json")) + turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) if turboJSONReadErr != nil { t.Fatalf("invalid parse: %#v", turboJSONReadErr) @@ -141,7 +141,7 @@ func Test_LoadTurboConfig_BothCorrectAndLegacy(t *testing.T) { func Test_ReadTurboConfig_InvalidEnvDeclarations1(t *testing.T) { testDir := getTestDir(t, "invalid-env-1") - _, turboJSONReadErr := ReadTurboConfig(testDir.UntypedJoin("turbo.json")) + _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) expectedErrorMsg := "turbo.json: You specified \"$A\" in the \"env\" key. You should not prefix your environment variables with \"$\"" assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) @@ -149,21 +149,21 @@ func Test_ReadTurboConfig_InvalidEnvDeclarations1(t *testing.T) { func Test_ReadTurboConfig_InvalidEnvDeclarations2(t *testing.T) { testDir := getTestDir(t, "invalid-env-2") - _, turboJSONReadErr := ReadTurboConfig(testDir.UntypedJoin("turbo.json")) + _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) expectedErrorMsg := "turbo.json: You specified \"$A\" in the \"env\" key. You should not prefix your environment variables with \"$\"" assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) } func Test_ReadTurboConfig_InvalidGlobalEnvDeclarations(t *testing.T) { testDir := getTestDir(t, "invalid-global-env") - _, turboJSONReadErr := ReadTurboConfig(testDir.UntypedJoin("turbo.json")) + _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) expectedErrorMsg := "turbo.json: You specified \"$QUX\" in the \"env\" key. You should not prefix your environment variables with \"$\"" assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) } func Test_ReadTurboConfig_EnvDeclarations(t *testing.T) { testDir := getTestDir(t, "legacy-env") - turboJSON, turboJSONReadErr := ReadTurboConfig(testDir.UntypedJoin("turbo.json")) + turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) if turboJSONReadErr != nil { t.Fatalf("invalid parse: %#v", turboJSONReadErr) From 0ac37c0f2e5221b1f05c6bafebd2e02d13f744e1 Mon Sep 17 00:00:00 2001 From: OJ Kwon <1210596+kwonoj@users.noreply.github.com> Date: Mon, 13 Feb 2023 16:58:24 -0800 Subject: [PATCH 12/31] ci(workflow): adjust summary report, drain all logs correctly (#3681) Closes WEB-544. PR fixes minor issue to drain logs correctly. Also PR adjusts summary to not to print fixed tests - upstream test sometimes flaky and displaying some test as `fixed` intermittently can cause some confusions. For now, disabling those summary. Still we display `new failed`, false negatives are relatively not occur frequently and we can try for a while to see. --- .../actions/next-integration-stat/index.js | 72 ++++++++++++------- .../next-integration-stat/src/index.ts | 70 ++++++++++++------ .github/workflows/nextjs-integration-test.yml | 1 + ...upload-nextjs-integration-test-results.yml | 2 + 4 files changed, 96 insertions(+), 49 deletions(-) diff --git a/.github/actions/next-integration-stat/index.js b/.github/actions/next-integration-stat/index.js index a20e6828e97f9..d9f20d64fbb6c 100644 --- a/.github/actions/next-integration-stat/index.js +++ b/.github/actions/next-integration-stat/index.js @@ -16005,10 +16005,11 @@ }) .map((logs) => { var _a, _b, _c, _d; - let failedSplitLogs = logs.split(`failed to pass within`); + const failedSplitLogs = logs.split(`failed to pass within`); + let logLine = failedSplitLogs.shift(); const ret = []; - while (!!failedSplitLogs && failedSplitLogs.length >= 1) { - let failedTest = failedSplitLogs.shift(); + while (logLine) { + let failedTest = logLine; // Look for the failed test file name failedTest = ( failedTest === null || failedTest === void 0 @@ -16044,6 +16045,7 @@ name: failedTest, data: JSON.parse(testData), }); + logLine = failedSplitLogs.shift(); } catch (_) { console.log(`Failed to parse test data`); } @@ -16323,15 +16325,30 @@ )).data.tree; // If base is main, get the tree under `test-results/main` // Otherwise iterate over all the trees under `test-results` then find latest next.js release - let baseTree; + let testResultJsonTree; if (shouldDiffWithMain) { console.log("Trying to find latest test results from main branch"); - baseTree = testResultsTree.find((tree) => tree.path === "main"); + const baseTree = testResultsTree.find((tree) => tree.path === "main"); + if (!baseTree || !baseTree.sha) { + console.log("There is no base to compare test results against"); + return null; + } + console.log("Found base tree", baseTree); + // Now tree should point the list of .json for the actual test results + testResultJsonTree = (yield octokit.rest.git.getTree( + Object.assign( + Object.assign( + {}, + _actions_github__WEBPACK_IMPORTED_MODULE_0__.context.repo + ), + { tree_sha: baseTree.sha } + ) + )).data.tree; } else { console.log( "Trying to find latest test results from next.js release" ); - baseTree = testResultsTree + const baseTree = testResultsTree .filter((tree) => tree.path !== "main") .reduce((acc, value) => { if (!acc) { @@ -16339,22 +16356,14 @@ } return semver.gt(value.path, acc.path) ? value : acc; }, null); + if (!baseTree || !baseTree.sha) { + console.log("There is no base to compare test results against"); + return null; + } + console.log("Found base tree", baseTree); + // If the results is for the release, no need to traverse down the tree + testResultJsonTree = [baseTree]; } - if (!baseTree || !baseTree.sha) { - console.log("There is no base to compare test results against"); - return null; - } - console.log("Found base tree", baseTree); - // Now tree should point the list of .json for the actual test results - const testResultJsonTree = (yield octokit.rest.git.getTree( - Object.assign( - Object.assign( - {}, - _actions_github__WEBPACK_IMPORTED_MODULE_0__.context.repo - ), - { tree_sha: baseTree.sha } - ) - )).data.tree; if (!testResultJsonTree) { console.log("There is no test results stored in the base yet"); return null; @@ -16549,11 +16558,15 @@ const newFailedTests = currentTestFailedNames.filter( (name) => !baseTestFailedNames.includes(name) ); - if (fixedTests.length > 0) { - ret += `\n:white_check_mark: **Fixed tests:**\n\n${fixedTests - .map((t) => (t.length > 5 ? `\t- ${t}` : t)) - .join(" \n")}`; - } + /* + //NOTE: upstream test can be flaky, so this can appear intermittently + //even if there aren't actual fix. To avoid confusion, do not display this + //for now. + if (fixedTests.length > 0) { + ret += `\n:white_check_mark: **Fixed tests:**\n\n${fixedTests + .map((t) => (t.length > 5 ? `\t- ${t}` : t)) + .join(" \n")}`; + }*/ if (newFailedTests.length > 0) { ret += `\n:x: **Newly failed tests:**\n\n${newFailedTests .map((t) => (t.length > 5 ? `\t- ${t}` : t)) @@ -16661,6 +16674,7 @@ shouldDiffWithMain ); const postCommentAsync = createCommentPostAsync(octokit, prNumber); + const failedTestLists = []; // Consturct a comment body to post test report with summary & full details. const comments = failedJobResults.result.reduce((acc, value, idx) => { var _a, _b, _c; @@ -16702,6 +16716,7 @@ groupedFails[ancestorKey].push(fail); } commentValues.push(`\`${failedTest}\``); + failedTestLists.push(failedTest); for (const group of Object.keys(groupedFails).sort()) { const fails = groupedFails[group]; commentValues.push(`\n`); @@ -16765,6 +16780,11 @@ if (!prNumber) { return; } + // Store the list of failed test paths to a file + fs.writeFileSync( + "./failed-test-path-list.json", + JSON.stringify(failedTestLists, null, 2) + ); if (failedJobResults.result.length === 0) { console.log("No failed test results found :tada:"); yield postCommentAsync( diff --git a/.github/actions/next-integration-stat/src/index.ts b/.github/actions/next-integration-stat/src/index.ts index d2dbdb5a4e4c4..b7d0b5515ceb0 100644 --- a/.github/actions/next-integration-stat/src/index.ts +++ b/.github/actions/next-integration-stat/src/index.ts @@ -177,11 +177,12 @@ function collectFailedTestResults( return true; }) .map((logs) => { - let failedSplitLogs = logs.split(`failed to pass within`); + const failedSplitLogs = logs.split(`failed to pass within`); + let logLine = failedSplitLogs.shift(); const ret = []; - while (!!failedSplitLogs && failedSplitLogs.length >= 1) { - let failedTest = failedSplitLogs.shift(); + while (logLine) { + let failedTest = logLine; // Look for the failed test file name failedTest = failedTest?.includes("test/") ? failedTest?.split("\n").pop()?.trim() @@ -201,6 +202,7 @@ function collectFailedTestResults( name: failedTest, data: JSON.parse(testData), }); + logLine = failedSplitLogs.shift(); } catch (_) { console.log(`Failed to parse test data`); } @@ -420,17 +422,32 @@ async function getTestResultDiffBase( // If base is main, get the tree under `test-results/main` // Otherwise iterate over all the trees under `test-results` then find latest next.js release - let baseTree: + let testResultJsonTree: | Awaited< ReturnType> - >["data"]["tree"][number] + >["data"]["tree"] | undefined; + if (shouldDiffWithMain) { console.log("Trying to find latest test results from main branch"); - baseTree = testResultsTree.find((tree) => tree.path === "main"); + const baseTree = testResultsTree.find((tree) => tree.path === "main"); + + if (!baseTree || !baseTree.sha) { + console.log("There is no base to compare test results against"); + return null; + } + console.log("Found base tree", baseTree); + + // Now tree should point the list of .json for the actual test results + testResultJsonTree = ( + await octokit.rest.git.getTree({ + ...context.repo, + tree_sha: baseTree.sha, + }) + ).data.tree; } else { console.log("Trying to find latest test results from next.js release"); - baseTree = testResultsTree + const baseTree = testResultsTree .filter((tree) => tree.path !== "main") .reduce((acc, value) => { if (!acc) { @@ -438,23 +455,17 @@ async function getTestResultDiffBase( } return semver.gt(value.path, acc.path) ? value : acc; - }, null as any as typeof baseTree); - } + }, null); - if (!baseTree || !baseTree.sha) { - console.log("There is no base to compare test results against"); - return null; - } - - console.log("Found base tree", baseTree); + if (!baseTree || !baseTree.sha) { + console.log("There is no base to compare test results against"); + return null; + } + console.log("Found base tree", baseTree); - // Now tree should point the list of .json for the actual test results - const testResultJsonTree = ( - await octokit.rest.git.getTree({ - ...context.repo, - tree_sha: baseTree.sha, - }) - ).data.tree; + // If the results is for the release, no need to traverse down the tree + testResultJsonTree = [baseTree]; + } if (!testResultJsonTree) { console.log("There is no test results stored in the base yet"); @@ -652,11 +663,15 @@ function getTestSummary( (name) => !baseTestFailedNames.includes(name) ); + /* + //NOTE: upstream test can be flaky, so this can appear intermittently + //even if there aren't actual fix. To avoid confusion, do not display this + //for now. if (fixedTests.length > 0) { ret += `\n:white_check_mark: **Fixed tests:**\n\n${fixedTests .map((t) => (t.length > 5 ? `\t- ${t}` : t)) .join(" \n")}`; - } + }*/ if (newFailedTests.length > 0) { ret += `\n:x: **Newly failed tests:**\n\n${newFailedTests @@ -773,6 +788,8 @@ async function run() { const postCommentAsync = createCommentPostAsync(octokit, prNumber); + const failedTestLists = []; + // Consturct a comment body to post test report with summary & full details. const comments = failedJobResults.result.reduce((acc, value, idx) => { const { name: failedTest, data: testData } = value; @@ -797,6 +814,7 @@ async function run() { } commentValues.push(`\`${failedTest}\``); + failedTestLists.push(failedTest); for (const group of Object.keys(groupedFails).sort()) { const fails = groupedFails[group]; @@ -868,6 +886,12 @@ async function run() { return; } + // Store the list of failed test paths to a file + fs.writeFileSync( + "./failed-test-path-list.json", + JSON.stringify(failedTestLists, null, 2) + ); + if (failedJobResults.result.length === 0) { console.log("No failed test results found :tada:"); await postCommentAsync( diff --git a/.github/workflows/nextjs-integration-test.yml b/.github/workflows/nextjs-integration-test.yml index 3a4ec9a9f5918..3859d13596ff4 100644 --- a/.github/workflows/nextjs-integration-test.yml +++ b/.github/workflows/nextjs-integration-test.yml @@ -230,4 +230,5 @@ jobs: name: test-results path: | nextjs-test-results.json + failed-test-path-list.json slack-payload.json diff --git a/.github/workflows/upload-nextjs-integration-test-results.yml b/.github/workflows/upload-nextjs-integration-test-results.yml index 6bfced7de1bef..6e5c7f3568918 100644 --- a/.github/workflows/upload-nextjs-integration-test-results.yml +++ b/.github/workflows/upload-nextjs-integration-test-results.yml @@ -35,6 +35,7 @@ jobs: run: | ls -al ./test-results/main cat ./test-results/main/nextjs-test-results.json + cat ./test-results/main/failed-test-path-list.json echo "NEXTJS_VERSION=$(cat ./test-results/main/nextjs-test-results.json | jq .nextjsVersion | tr -d '"' | cut -d ' ' -f2)" >> $GITHUB_ENV echo "SHA_SHORT=$(git rev-parse --short HEAD)" >> $GITHUB_ENV echo "RESULT_SUBPATH=$(if ${{ inputs.is_main_branch }}; then echo 'main'; else echo ${{ env.NEXTJS_VERSION }}; fi)" >> $GITHUB_ENV @@ -46,6 +47,7 @@ jobs: echo "Configured test result subpath for ${{ env.RESULT_SUBPATH }} / ${{ env.NEXTJS_VERSION }} / ${{ env.SHA_SHORT }}" mkdir -p test-results/${{ env.RESULT_SUBPATH }} mv test-results/main/nextjs-test-results.json test-results/${{ env.RESULT_SUBPATH }}/$(date '+%Y%m%d%H%M')-${{ env.NEXTJS_VERSION }}-${{ env.SHA_SHORT }}.json + mv -f test-results/main/failed-test-path-list.json test-results/${{ env.RESULT_SUBPATH }}/failed-test-path-list.json ls -al ./test-results ls -al ./test-results/${{ env.RESULT_SUBPATH }} From 7f72e4cceca586577e0bae046af63bbc126d6dca Mon Sep 17 00:00:00 2001 From: Anthony Shew Date: Mon, 13 Feb 2023 18:12:00 -0800 Subject: [PATCH 13/31] Update `$schema` for `turbo.json`. (#3789) The in-editor recommendations for `turbo.json` that come from `$schema` seem to have fallen behind the API in a few spots. I also took a moment to update some language for improved clarity. I also had to shave a few yaks to work on this: - Updated `docs#dev` task to ensure it generates the `schema.json` when we run `pnpm run docs` - Improve deprecation note for environment variables - Added `errors-only` to `outputMode` doc --- .../repo/docs/reference/configuration.mdx | 9 +- packages/turbo-types/src/types/config.ts | 119 ++++++++++++------ turbo.json | 3 + 3 files changed, 91 insertions(+), 40 deletions(-) diff --git a/docs/pages/repo/docs/reference/configuration.mdx b/docs/pages/repo/docs/reference/configuration.mdx index a4a8621b706b4..7564ae717f163 100644 --- a/docs/pages/repo/docs/reference/configuration.mdx +++ b/docs/pages/repo/docs/reference/configuration.mdx @@ -5,6 +5,7 @@ description: Learn how to configure Turborepo through `turbo.json`. import Callout from "../../../../components/Callout"; import OutputModeTable from "../../../../components/output-mode-table.mdx"; +import Link from 'next/link' # Configuration Options (`turbo.json`) @@ -94,11 +95,9 @@ Prefixing an item in `dependsOn` with a `^` tells `turbo` that this pipeline tas Items in `dependsOn` without `^` prefix, express the relationships between tasks at the workspace level (e.g. "a workspace's `test` and `lint` commands depend on `build` being completed first"). -Prefixing an item in `dependsOn` with a `$` tells `turbo` that this pipeline task depends on the value of that environment variable. - - Using `$` to declare environment variables in the `dependsOn` config is - deprecated. Use the `env` key instead. + As of version 1.5, using `$` to declare environment variables in the `dependsOn` config is + deprecated. Use the `env` key instead. **Example** @@ -286,7 +285,7 @@ Specifying `[]` will cause the task to be rerun when any file in the workspace c ### `outputMode` -`type: "full" | "hash-only" | "new-only" | "none"` +`type: "full" | "hash-only" | "new-only" | "errors-only" | "none"` Set type of output logging. diff --git a/packages/turbo-types/src/types/config.ts b/packages/turbo-types/src/types/config.ts index 59497e3373b1f..8f7a7608124d9 100644 --- a/packages/turbo-types/src/types/config.ts +++ b/packages/turbo-types/src/types/config.ts @@ -4,24 +4,31 @@ export interface Schema { $schema?: string; /** - * A list of globs for implicit global hash dependencies. + * A list of globs to include in the set of implicit global hash dependencies. * * The contents of these files will be included in the global hashing * algorithm and affect the hashes of all tasks. * - * This is useful for busting the cache based on .env files (not in Git), - * or any root level file that impacts package tasks (but are not represented - * in the traditional dependency graph + * This is useful for busting the cache based on: * - * (e.g. a root tsconfig.json, jest.config.js, .eslintrc, etc.)). + * - .env files (not in Git) + * + * - any root level file that impacts package tasks + * that are not represented in the traditional dependency graph + * (e.g. a root tsconfig.json, jest.config.js, .eslintrc, etc.) + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#globaldependencies * * @default [] */ globalDependencies?: string[]; /** - * A list of environment variables, (e.g. GITHUB_TOKEN), - * for implicit global hash dependencies. + * A list of environment variables for implicit global hash dependencies. + * + * The variables included in this list will affect all task hashes. + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#globalenv * * @default [] */ @@ -29,22 +36,26 @@ export interface Schema { /** * An object representing the task dependency graph of your project. turbo interprets - * these conventions to properly schedule, execute, and cache the outputs of tasks in + * these conventions to schedule, execute, and cache the outputs of tasks in * your project. * + * Documentation: https://turbo.build/repo/docs/reference/configuration#pipeline + * * @default {} */ pipeline: { /** - * The name of a task that can be executed by turbo run. If turbo finds a workspace + * The name of a task that can be executed by turbo. If turbo finds a workspace * package with a package.json scripts object with a matching key, it will apply the - * pipeline task configuration to that npm script during execution. This allows you to - * use pipeline to set conventions across your entire Turborepo. + * pipeline task configuration to that npm script during execution. */ [script: string]: Pipeline; }; /** - * Configuration options that control how turbo interfaces with the remote Cache. + * Configuration options that control how turbo interfaces with the remote cache. + * + * Documentation: https://turbo.build/repo/docs/core-concepts/remote-caching + * * @default {} */ remoteCache?: RemoteCache; @@ -52,23 +63,32 @@ export interface Schema { export interface Pipeline { /** - * The list of tasks and environment variables that this task depends on. + * The list of tasks that this task depends on. * - * Prefixing an item in dependsOn with a ^ tells turbo that this pipeline task depends - * on the package's topological dependencies completing the task with the ^ prefix first - * (e.g. "a package's build tasks should only run once all of its dependencies and - * devDependencies have completed their own build commands"). + * Prefixing an item in dependsOn with a ^ prefix tells turbo that this task depends + * on the package's topological dependencies completing the task first. + * (e.g. "A package's build tasks should only run once all of its workspace dependencies + * have completed their own build commands.") * - * Items in dependsOn without ^ prefix, express the relationships between tasks at the - * package level (e.g. "a package's test and lint commands depend on build being - * completed first"). + * Items in dependsOn without a ^ prefix express the relationships between tasks within the + * same package (e.g. "A package's test and lint commands depend on its own build being + * completed first.") + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#dependson * * @default [] */ dependsOn?: string[]; /** - * A list of environment variables, **not** prefixed with $ (e.g. $GITHUB_TOKEN), that this task depends on. + * A list of environment variables that this task depends on. + * + * Note: If you are migrating from a turbo version 1.5 or below, + * you may be used to prefixing your variables with a $. + * You no longer need to use the $ prefix. + * (e.g. $GITHUB_TOKEN -> GITHUB_TOKEN) + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#env * * @default [] */ @@ -79,15 +99,20 @@ export interface Pipeline { * * Turborepo captures task logs for all tasks. This enables us to cache tasks whose runs * produce no artifacts other than logs (such as linters). Logs are always treated as a - * cacheable artifact, and never need to be specified. + * cacheable artifact and never need to be specified. + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#outputs * * @default [] */ outputs?: string[]; /** - * Whether or not to cache the task outputs. Setting cache to false is useful for daemon - * or long-running "watch" or development mode tasks that you don't want to cache. + * Whether or not to cache the outputs of the task. + * + * Setting cache to false is useful for long-running "watch" or development mode tasks. + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#cache * * @default true */ @@ -96,29 +121,46 @@ export interface Pipeline { /** * The set of glob patterns to consider as inputs to this task. * - * Changes to files covered by these globs will cause a cache miss and force - * the task to rerun. Changes to files in the package not covered by these globs - * will not cause a cache miss. + * Changes to files covered by these globs will cause a cache miss and + * the task will be rerun. + * + * If a file has been changed that is **not** included in the set of globs, + * it will not cause a cache miss. * * If omitted or empty, all files in the package are considered as inputs. + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#inputs + * * @default [] */ inputs?: string[]; /** - * The style of output for this task. Use "full" to display the entire output of - * the task. Use "hash-only" to show only the computed task hashes. Use "new-only" to - * show the full output of cache misses and the computed hashes for cache hits. Use - * "none" to hide task output. + * Output mode for the task. + * + * "full": Displays all output + * + * "hash-only": Show only the hashes of the tasks + * + * "new-only": Only show output from cache misses + * + * "errors-only": Only show output from task failures + * + * "none": Hides all task output + * + * Documentation: https://turbo.build/repo/docs/reference/command-line-reference#--output-logs * * @default full */ - outputMode?: string; + outputMode?: OutputMode; /** - * Indicates whether the task exits or not. Setting `persistent` to `true`, tells - * Turbo that this is a long-running task. Turbo will ensure that other tasks do not - * depend on it. + * Indicates whether the task exits or not. Setting `persistent` to `true` tells + * turbo that this is a long-running task and will ensure that other tasks + * cannot depend on it. + * + * Documentation: https://turbo.build/repo/docs/reference/configuration#persistent + * * @default false */ persistent?: boolean; @@ -135,3 +177,10 @@ export interface RemoteCache { */ signature?: boolean; } + +export type OutputMode = + | "full" + | "hash-only" + | "new-only" + | "errors-only" + | "none"; diff --git a/turbo.json b/turbo.json index 4a62dc8269165..789de19fc4419 100644 --- a/turbo.json +++ b/turbo.json @@ -15,6 +15,9 @@ "dev": { "cache": false }, + "docs#dev": { + "dependsOn": ["schema"] + }, "//#build:ts": { "outputs": ["packages/**/dist"], "dependsOn": [] From 3fbb505fb8e2e87164c992dfbc581ec3ba63be1e Mon Sep 17 00:00:00 2001 From: LongYinan Date: Tue, 14 Feb 2023 11:23:54 +0800 Subject: [PATCH 14/31] Split swc_core/allocator_node out of __swc_core_binding_napi (#3772) We only can use `turbo_malloc` in `next.js` without `swc_node_base` included --- crates/next-binding/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/next-binding/Cargo.toml b/crates/next-binding/Cargo.toml index 5c781a5c4a395..fccbdf2a886cd 100644 --- a/crates/next-binding/Cargo.toml +++ b/crates/next-binding/Cargo.toml @@ -32,7 +32,6 @@ __swc_core_next_core = [ __swc_core_binding_napi = [ "__swc_core", - "swc_core/allocator_node", "swc_core/base_concurrent", "swc_core/base_node", "swc_core/common_concurrent", @@ -53,6 +52,7 @@ __swc_core_binding_napi = [ "swc_core/ecma_visit", ] __swc_core_binding_napi_plugin = ["swc_core/plugin_transform_host_native"] +__swc_core_binding_napi_allocator = ["swc_core/allocator_node"] __swc_core_binding_wasm = [ "__swc_core", From 811f03ce5a4c2e2b47b8044b1d43b8f4671b325f Mon Sep 17 00:00:00 2001 From: Hana Date: Tue, 14 Feb 2023 15:07:54 +0800 Subject: [PATCH 15/31] perf: skip visiting if it's out of range (#3762) Visiting is not necessary for those sub-trees that are already out of range. ## The first code change For example, we have visitors with `ast_path` like this, and the current index is `1`, and the current path is `Path::A`: ```rust enum Path { A, B, C, D, E } visitors: vec![(vec![Path::B, Path::C], visitor), (vec![Path::B, Path::D], visitor)] ``` Given the fact that sub-trees that are not matched were already skipped, so there is no possibility for us to have things like this if we are already under `Path::B` and looking for the next path (`ast_path: [Path::B]`): ```rust visitors: vec![(vec![Path::B, Path::C], visitor), (vec![Path::C, Path::A], visitor)] ``` Combining all the things above, we can skip visits any furthur where its `kind` is greater than the `kind` for the current index, for the first example, the underlying path is `Path::A`, we can skip the sub-tree `Path::C` and `Path::D` as there's no possibility for the visitor subscribers to do with them. ## The second code change If the starting point for the partition result is already greater than the underlying `kind`, it's not necessary for us to visit them too. --- .../turbopack-ecmascript/src/path_visitor.rs | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/crates/turbopack-ecmascript/src/path_visitor.rs b/crates/turbopack-ecmascript/src/path_visitor.rs index 54a2b1d446a3a..895029cbc5092 100644 --- a/crates/turbopack-ecmascript/src/path_visitor.rs +++ b/crates/turbopack-ecmascript/src/path_visitor.rs @@ -29,15 +29,29 @@ fn find_range<'a, 'b>( index: usize, ) -> Option<&'b [(&'a AstPath, &'a dyn VisitorFactory)]> { // Precondition: visitors is never empty - let start = if visitors.first().unwrap().0[index] >= *kind { - // Fast path: It's likely that the whole range is selected + if visitors.first().unwrap().0[index] > *kind || visitors.last().unwrap().0[index] < *kind { + // Fast path: If ast path of the first visitor is already out of range, then we + // can skip the whole visit. + return None; + } + + let start = if visitors.first().unwrap().0[index] == *kind { + // Fast path: It looks like the whole range is selected 0 } else { visitors.partition_point(|(path, _)| path[index] < *kind) }; + if start >= visitors.len() { return None; } + + if visitors[start].0[index] > *kind { + // Fast path: If the starting point is greater than the given kind, it's + // meaningless to visit later. + return None; + } + let end = if visitors.last().unwrap().0[index] == *kind { // Fast path: It's likely that the whole range is selected visitors.len() From faa10dc79dab5b2bc22827c3b809596175e8b7ef Mon Sep 17 00:00:00 2001 From: Alex Kirszenberg Date: Tue, 14 Feb 2023 13:33:07 +0100 Subject: [PATCH 16/31] Fix the router when Next.js returns no result (#3741) I changed the return type of the Next.js router in https://github.com/vercel/next.js/pull/45668 to account for cases where the router does and does not find a matching rewrite. --- crates/next-core/js/package.json | 2 +- crates/next-core/js/src/entry/router.ts | 38 ++-- crates/next-core/src/router.rs | 3 + crates/next-core/src/router_source.rs | 3 + .../next/404/navigate/input/pages/index.tsx | 47 +++-- crates/next-dev-tests/tests/package.json | 2 +- .../benches/bundlers/turbopack/mod.rs | 2 +- pnpm-lock.yaml | 180 +++++++++--------- 8 files changed, 152 insertions(+), 125 deletions(-) diff --git a/crates/next-core/js/package.json b/crates/next-core/js/package.json index 18de24a3193ee..64fbf08563984 100644 --- a/crates/next-core/js/package.json +++ b/crates/next-core/js/package.json @@ -12,7 +12,7 @@ "@vercel/turbopack-runtime": "latest", "anser": "^2.1.1", "css.escape": "^1.5.1", - "next": "13.1.7-canary.8", + "next": "13.1.7-canary.12", "platform": "1.3.6", "react-dom": "^18.2.0", "react": "^18.2.0", diff --git a/crates/next-core/js/src/entry/router.ts b/crates/next-core/js/src/entry/router.ts index 4ea12f0d1cd51..6f36a5c870ff5 100644 --- a/crates/next-core/js/src/entry/router.ts +++ b/crates/next-core/js/src/entry/router.ts @@ -16,10 +16,15 @@ type RouterRequest = { rawQuery: string; }; -type RouteResult = { - url: string; - headers: Record; -}; +type RouteResult = + | { + type: "rewrite"; + url: string; + headers: Record; + } + | { + type: "none"; + }; type IpcOutgoingMessage = { type: "jsonValue"; @@ -36,7 +41,8 @@ type MessageData = | { type: "rewrite"; data: RewriteResponse; - }; + } + | { type: "none" }; type RewriteResponse = { url: string; @@ -136,13 +142,21 @@ async function handleClientResponse( } const data = JSON.parse(buffer) as RouteResult; - return { - type: "rewrite", - data: { - url: data.url, - headers: Object.entries(data.headers).flat(), - }, - }; + + switch (data.type) { + case "rewrite": + return { + type: "rewrite", + data: { + url: data.url, + headers: Object.entries(data.headers).flat(), + }, + }; + case "none": + return { + type: "none", + }; + } } const responseHeaders: MiddlewareHeadersResponse = { diff --git a/crates/next-core/src/router.rs b/crates/next-core/src/router.rs index 4c200f9ae0b36..4a735f644e49d 100644 --- a/crates/next-core/src/router.rs +++ b/crates/next-core/src/router.rs @@ -92,6 +92,7 @@ enum RouterIncomingMessage { FullMiddleware { data: FullMiddlewareResponse, }, + None, Error(StructuredError), } @@ -100,6 +101,7 @@ enum RouterIncomingMessage { pub enum RouterResult { Rewrite(RewriteResponse), FullMiddleware(FullMiddlewareResponse), + None, Error, } @@ -108,6 +110,7 @@ impl From for RouterResult { match value { RouterIncomingMessage::Rewrite { data } => Self::Rewrite(data), RouterIncomingMessage::FullMiddleware { data } => Self::FullMiddleware(data), + RouterIncomingMessage::None => Self::None, _ => Self::Error, } } diff --git a/crates/next-core/src/router_source.rs b/crates/next-core/src/router_source.rs index ae2d5dfe94c8d..a5e2295ad89f1 100644 --- a/crates/next-core/src/router_source.rs +++ b/crates/next-core/src/router_source.rs @@ -90,6 +90,9 @@ impl ContentSource for NextRouterContentSource { this.inner .get(path, Value::new(ContentSourceData::default())) } + RouterResult::None => this + .inner + .get(path, Value::new(ContentSourceData::default())), RouterResult::Rewrite(data) => { // TODO: We can't set response headers on the returned content. ContentSourceResultVc::exact( diff --git a/crates/next-dev-tests/tests/integration/next/404/navigate/input/pages/index.tsx b/crates/next-dev-tests/tests/integration/next/404/navigate/input/pages/index.tsx index adfc06d2bbfd5..9ebf461c3f609 100644 --- a/crates/next-dev-tests/tests/integration/next/404/navigate/input/pages/index.tsx +++ b/crates/next-dev-tests/tests/integration/next/404/navigate/input/pages/index.tsx @@ -18,27 +18,38 @@ export default function Page() { type Harness = typeof import("@turbo/pack-test-harness"); function runTests(harness: Harness, iframe: HTMLIFrameElement) { - it("returns a 404 status code", async () => { - const res = await fetch("/not-found"); - expect(res.status).toBe(404); - }); + // These tests requires a longer timeout because we're rendering the 404 page as well. + const TIMEOUT = 20000; + + it( + "returns a 404 status code", + async () => { + const res = await fetch("/not-found"); + expect(res.status).toBe(404); + }, + TIMEOUT + ); - it("navigates to the 404 page", async () => { - await harness.waitForHydration(iframe, "/link"); + it( + "navigates to the 404 page", + async () => { + await harness.waitForHydration(iframe, "/link"); - const link = iframe.contentDocument!.querySelector("a[data-test-link]"); - expect(link).not.toBeNull(); - expect(link!).toBeInstanceOf( - (iframe.contentWindow as any).HTMLAnchorElement - ); - expect(link!.textContent).toBe("Not found"); + const link = iframe.contentDocument!.querySelector("a[data-test-link]"); + expect(link).not.toBeNull(); + expect(link!).toBeInstanceOf( + (iframe.contentWindow as any).HTMLAnchorElement + ); + expect(link!.textContent).toBe("Not found"); - (link as HTMLAnchorElement).click(); + (link as HTMLAnchorElement).click(); - await harness.waitForHydration(iframe, "/not-found"); + await harness.waitForHydration(iframe, "/not-found"); - const error = iframe.contentDocument!.querySelector("[data-test-error]"); - expect(error).not.toBeNull(); - expect(error!.textContent).toBe("static"); - }); + const error = iframe.contentDocument!.querySelector("[data-test-error]"); + expect(error).not.toBeNull(); + expect(error!.textContent).toBe("static"); + }, + TIMEOUT + ); } diff --git a/crates/next-dev-tests/tests/package.json b/crates/next-dev-tests/tests/package.json index 8e106c2584919..5a259d56d7a7d 100644 --- a/crates/next-dev-tests/tests/package.json +++ b/crates/next-dev-tests/tests/package.json @@ -6,7 +6,7 @@ "@turbo/pack-test-harness": "file:../test-harness", "autoprefixer": "^10.4.13", "loader-runner": "^4.3.0", - "next": "13.1.7-canary.8", + "next": "13.1.7-canary.12", "postcss": "^8.4.20", "react": "^18.2.0", "react-dom": "^18.2.0", diff --git a/crates/next-dev/benches/bundlers/turbopack/mod.rs b/crates/next-dev/benches/bundlers/turbopack/mod.rs index 1d92e1e2f4375..f9606b8208085 100644 --- a/crates/next-dev/benches/bundlers/turbopack/mod.rs +++ b/crates/next-dev/benches/bundlers/turbopack/mod.rs @@ -49,7 +49,7 @@ impl Bundler for Turbopack { npm::install( install_dir, &[ - NpmPackage::new("next", "13.1.7-canary.8"), + NpmPackage::new("next", "13.1.7-canary.12"), // Dependency on this is inserted by swc's preset_env NpmPackage::new("@swc/helpers", "^0.4.11"), ], diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b4fdc35498f36..c3751c237b39e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -98,7 +98,7 @@ importers: anser: ^2.1.1 css.escape: ^1.5.1 find-up: ^6.3.0 - next: 13.1.7-canary.8 + next: 13.1.7-canary.12 platform: 1.3.6 react: ^18.2.0 react-dom: ^18.2.0 @@ -109,7 +109,7 @@ importers: '@vercel/turbopack-runtime': link:../../turbopack-ecmascript/js anser: 2.1.1 css.escape: 1.5.1 - next: 13.1.7-canary.8_biqbaboplfbrettd7655fr4n2y + next: 13.1.7-canary.12_biqbaboplfbrettd7655fr4n2y platform: 1.3.6 react: 18.2.0 react-dom: 18.2.0_react@18.2.0 @@ -140,7 +140,7 @@ importers: '@types/jest': 29.4.0 autoprefixer: ^10.4.13 loader-runner: ^4.3.0 - next: 13.1.7-canary.8 + next: 13.1.7-canary.12 postcss: ^8.4.20 react: ^18.2.0 react-dom: ^18.2.0 @@ -152,7 +152,7 @@ importers: '@types/jest': 29.4.0 autoprefixer: 10.4.13_postcss@8.4.20 loader-runner: 4.3.0 - next: 13.1.7-canary.8_pjwopsidmaokadturxaafygjp4 + next: 13.1.7-canary.12_pjwopsidmaokadturxaafygjp4 postcss: 8.4.20 react: 18.2.0 react-dom: 18.2.0_react@18.2.0 @@ -1083,7 +1083,7 @@ packages: engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 chalk: 4.1.2 jest-message-util: 27.5.1 jest-util: 27.5.1 @@ -1095,7 +1095,7 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@jest/types': 29.4.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 chalk: 4.1.2 jest-message-util: 29.4.1 jest-util: 29.4.1 @@ -1152,7 +1152,7 @@ packages: dependencies: '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 jest-mock: 27.5.1 dev: true @@ -1186,7 +1186,7 @@ packages: dependencies: '@jest/types': 27.5.1 '@sinonjs/fake-timers': 8.1.0 - '@types/node': 18.13.0 + '@types/node': 16.11.56 jest-message-util: 27.5.1 jest-mock: 27.5.1 jest-util: 27.5.1 @@ -1198,7 +1198,7 @@ packages: dependencies: '@jest/types': 29.4.1 '@sinonjs/fake-timers': 10.0.2 - '@types/node': 18.13.0 + '@types/node': 16.11.56 jest-message-util: 29.4.1 jest-mock: 29.4.1 jest-util: 29.4.1 @@ -1237,7 +1237,7 @@ packages: '@jest/test-result': 27.5.1 '@jest/transform': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 chalk: 4.1.2 collect-v8-coverage: 1.0.1 exit: 0.1.2 @@ -1390,7 +1390,7 @@ packages: dependencies: '@types/istanbul-lib-coverage': 2.0.4 '@types/istanbul-reports': 3.0.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 '@types/yargs': 16.0.4 chalk: 4.1.2 dev: true @@ -1658,8 +1658,8 @@ packages: /@next/env/13.0.6: resolution: {integrity: sha512-yceT6DCHKqPRS1cAm8DHvDvK74DLIkDQdm5iV+GnIts8h0QbdHvkUIkdOvQoOODgpr6018skbmSQp12z5OWIQQ==} - /@next/env/13.1.7-canary.8: - resolution: {integrity: sha512-+KOlUQXcfBQOKkTm3O5IAq4NZQW6U7WmqsR41GT9ZLwcYKHxAngCTD7umF8zk2NTBjwHQ8KHifJyCaAZVWy3Mg==} + /@next/env/13.1.7-canary.12: + resolution: {integrity: sha512-jHnNukl+IQ11/M8ZsujZZ2cKn5Qd3VEnCml0ORyB18HUgSPYtA+4kHJ0j9p8aXXeoG1+o2zXSVNfpNUHHJZ3XA==} /@next/eslint-plugin-next/12.3.1: resolution: {integrity: sha512-sw+lTf6r6P0j+g/n9y4qdWWI2syPqZx+uc0+B/fRENqfR3KpSid6MIKqc9gNwGhJASazEQ5b3w8h4cAET213jw==} @@ -1702,8 +1702,8 @@ packages: requiresBuild: true optional: true - /@next/swc-android-arm-eabi/13.1.7-canary.8: - resolution: {integrity: sha512-aFvD1+260+hj7C0LeSEpJrbfN3N/nSV9o7+iP2XulxSfY3jvsvqEsPjKeMwUJWvvjFX6a9jpxQSxnGpaJkMjxw==} + /@next/swc-android-arm-eabi/13.1.7-canary.12: + resolution: {integrity: sha512-KzGFaiAzBqD8ilWZXF7fg/t/kCuKba/EW+Eo58yo/uC117xa5ay/F62Td/EQzeo1hy5m7DPBHQxQsMKfaRH78Q==} engines: {node: '>= 10'} cpu: [arm] os: [android] @@ -1727,8 +1727,8 @@ packages: requiresBuild: true optional: true - /@next/swc-android-arm64/13.1.7-canary.8: - resolution: {integrity: sha512-sZnx2W0/cK1xtq/XOlZXhqhotlyMvsQRwIhEkn7/7YH4L3/6vDT+od6m2c1VVK3i9WI63HLaK2kFzikwvI4Wjw==} + /@next/swc-android-arm64/13.1.7-canary.12: + resolution: {integrity: sha512-xcaPRD1saddk+//Um8dHNZx86H/DdP+FmwfJqhyIsIZrwCJn78k3I4ppTb93tFCdegmIXbG9eQV2icb/pODFlg==} engines: {node: '>= 10'} cpu: [arm64] os: [android] @@ -1752,8 +1752,8 @@ packages: requiresBuild: true optional: true - /@next/swc-darwin-arm64/13.1.7-canary.8: - resolution: {integrity: sha512-BQn6w5nepX3Xsp+WX2Ns6unIWPua7c654oveD6cjMYbuKX7CO9dI3UlVaokiP1wrEPIc1vwzaoU1Awiqhb54jA==} + /@next/swc-darwin-arm64/13.1.7-canary.12: + resolution: {integrity: sha512-mJZrAW+Pb1tzz7oDyFUJdjez2M7scGowDad+5F1FTe/mwhORwAeEpfcOVJJLhxpdrYUgybLneDjlvfxn4yr8SQ==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] @@ -1777,8 +1777,8 @@ packages: requiresBuild: true optional: true - /@next/swc-darwin-x64/13.1.7-canary.8: - resolution: {integrity: sha512-u7HIqv54KcZU2EdCwBrvL8w7mN3hSZClwrnb9ydCYNX+HFoET7BokYDf3KOn0zKo+BjdayPjYdn+s34dXuaqdw==} + /@next/swc-darwin-x64/13.1.7-canary.12: + resolution: {integrity: sha512-e0cfAL0ekU16Vugr1y31b/5FbqAZdl9x+31GK9P2yI2OgX/AKdyFtyLNtqPtd3Fx7ueaCxmyPprriUf84bu1Bw==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] @@ -1802,8 +1802,8 @@ packages: requiresBuild: true optional: true - /@next/swc-freebsd-x64/13.1.7-canary.8: - resolution: {integrity: sha512-gIqMbBylIKNi4UKNAsn18q3aTMYeMdS8AZOaLIphZ1/c2H/nxijtADA/V91ISqZChRgjkcFmlYyDEEfbEFEEaw==} + /@next/swc-freebsd-x64/13.1.7-canary.12: + resolution: {integrity: sha512-0qWa3f3vM0EDLhGYBPs9ukE/rTCZQGGf0bqIDXL117t8LBFmD3wHlf3kCq9WDL5m5hPKQMCwgH3tjn1FxWAWog==} engines: {node: '>= 10'} cpu: [x64] os: [freebsd] @@ -1827,8 +1827,8 @@ packages: requiresBuild: true optional: true - /@next/swc-linux-arm-gnueabihf/13.1.7-canary.8: - resolution: {integrity: sha512-oA/i/hvMnbQOeR8O6gCS4GYAp9BnPPQbaPxoxP25Jz7Cl12GGlIUp2c0nwaSjpd39xD6j2415cSsb2z7AUreSQ==} + /@next/swc-linux-arm-gnueabihf/13.1.7-canary.12: + resolution: {integrity: sha512-OTSL6QcjH7YVUhgO6KEnlrVAZ2lHghJd7zMd5RmCmzPI//Uf2XuUjjkbasyHjZuvDlohlCNC2Olfd0yxAtPn2Q==} engines: {node: '>= 10'} cpu: [arm] os: [linux] @@ -1852,8 +1852,8 @@ packages: requiresBuild: true optional: true - /@next/swc-linux-arm64-gnu/13.1.7-canary.8: - resolution: {integrity: sha512-C5s73Yr7VzbR6D0GgBgg/54PYNN7XFC6uB7jMOUrY6WZn/pEDRx8Tsyl0iQ/jfUGD3G1OK9x2PDaxnqyFIZwLw==} + /@next/swc-linux-arm64-gnu/13.1.7-canary.12: + resolution: {integrity: sha512-54t8uXD64Bw09E5qQ75dwEcFO1BWfKzLAdof9r7cPmi3h3O2PePqHPnJvhMaKLhL2cKg4+oxYLkcXrWJ9d8THw==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] @@ -1877,8 +1877,8 @@ packages: requiresBuild: true optional: true - /@next/swc-linux-arm64-musl/13.1.7-canary.8: - resolution: {integrity: sha512-jxxtsPYCoBE1OtjQa/KBfP/MXdKZHo2ofKtt8OOnSfL5dkPsXNGOqe37IyKmndkcXy7scY9Fw1BLbf7QlmY3YQ==} + /@next/swc-linux-arm64-musl/13.1.7-canary.12: + resolution: {integrity: sha512-jZIRA/O7mRvaEBUUo0h0XqeyycWszAbQjDF/enH/JUWx1UjQkGClkZJVeWKCYOxrG0NDpv9UIZLsYo0E7w29Ww==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] @@ -1902,8 +1902,8 @@ packages: requiresBuild: true optional: true - /@next/swc-linux-x64-gnu/13.1.7-canary.8: - resolution: {integrity: sha512-sVs89h8pUrpHa9Verlgb2BJ6eK0RFJtQEGs/SEmH4chhMvJJDvjwq4vciaef8PCDV1sBdvIQ46fJhUIroU13Nw==} + /@next/swc-linux-x64-gnu/13.1.7-canary.12: + resolution: {integrity: sha512-qytYxySVDgJNid1L1gcc9RskUfKpPQIds3wi/9BMpi9QtNK0ZGrU6IZs4IgKSX3IJ1qvxadfNCaWYkfEdPdxpQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] @@ -1927,8 +1927,8 @@ packages: requiresBuild: true optional: true - /@next/swc-linux-x64-musl/13.1.7-canary.8: - resolution: {integrity: sha512-cZX0O4GAF4asxI3iUen5uKuaH5HwIeCE7dnI5sVOZ5smKaLTLtlEsMyFS27YIKZC2o10KaekCEhCOYST6d3JSg==} + /@next/swc-linux-x64-musl/13.1.7-canary.12: + resolution: {integrity: sha512-opzV9KuJqe2CqeVRntq5i7pG+Dn62DNAIUI1/S0tj8E4yoqgeNHLH6dWzzn98B58bHSlkNQ0VvgFS9WXyILrpQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] @@ -1952,8 +1952,8 @@ packages: requiresBuild: true optional: true - /@next/swc-win32-arm64-msvc/13.1.7-canary.8: - resolution: {integrity: sha512-pdQ3WPsAo2DptXX3IkLr+O6S6AUiBmkT9uBd5CU53BxnuiydVYQUTnknxz1uFDVMdbj04aUQIV+10qE57MLLiw==} + /@next/swc-win32-arm64-msvc/13.1.7-canary.12: + resolution: {integrity: sha512-BA1ppRNM6qP44FXRr3fHMUunWFE7G3pG5FI1UvX3NmwUJDzM/ajptsmUIHpghnYjG4CJAozlbzU+a7cmLmanYg==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] @@ -1977,8 +1977,8 @@ packages: requiresBuild: true optional: true - /@next/swc-win32-ia32-msvc/13.1.7-canary.8: - resolution: {integrity: sha512-b973zs34sODsnNjkbZLpkVdQm2+F2oHSqjiawtYSsTZXDVtoAttJ1zwJqPCQaXoSPayAzdZGmxl5Z/atJJNO4Q==} + /@next/swc-win32-ia32-msvc/13.1.7-canary.12: + resolution: {integrity: sha512-lc+vA8oNG9xYa4TpRHckfsdigSPp0riJdOFl+sKKp8lux+/1v5potCAmwhCI59QoxKe9N01BXMY4YAk4zMCopQ==} engines: {node: '>= 10'} cpu: [ia32] os: [win32] @@ -2002,8 +2002,8 @@ packages: requiresBuild: true optional: true - /@next/swc-win32-x64-msvc/13.1.7-canary.8: - resolution: {integrity: sha512-qTim85I7GagGoTbpsvJSNs5SSZC3jF31Rwpn4PHPtwunDQ3pw7EhxpIKtKsITwlUO8xKh78NkcuYVNPKZI6hgA==} + /@next/swc-win32-x64-msvc/13.1.7-canary.12: + resolution: {integrity: sha512-skXx00Cw6kdsWFC475JPw6LRMnhW7fXtkyFtdNWioI9Kc/PDKtVQmiSoEGgF7hgkGOZsoCDbdvMg3TCComkSwQ==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -2465,7 +2465,7 @@ packages: /@types/graceful-fs/4.1.5: resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} dependencies: - '@types/node': 18.13.0 + '@types/node': 16.11.56 /@types/gradient-string/1.1.2: resolution: {integrity: sha512-zIet2KvHr2dkOCPI5ggQQ+WJVyfBSFaqK9sNelhgDjlE2K3Fu2muuPJwu5aKM3xoWuc3WXudVEMUwI1QWhykEQ==} @@ -2536,7 +2536,7 @@ packages: resolution: {integrity: sha512-Ny/PJkO6nxWAQnaet8q/oWz15lrfwvdvBpuY4treB0CSsBO1CG0fVuNLngR3m3bepQLd+E4c3Y3DlC2okpUvPw==} dependencies: '@types/fined': 1.1.3 - '@types/node': 18.11.11 + '@types/node': 16.11.56 dev: true /@types/loader-runner/2.2.4: @@ -2577,14 +2577,10 @@ packages: /@types/node/16.11.56: resolution: {integrity: sha512-aFcUkv7EddxxOa/9f74DINReQ/celqH8DiB3fRYgVDM2Xm5QJL8sl80QKuAnGvwAsMn+H3IFA6WCrQh1CY7m1A==} - dev: true /@types/node/18.11.11: resolution: {integrity: sha512-KJ021B1nlQUBLopzZmPBVuGU9un7WJd/W4ya7Ih02B4Uwky5Nja0yGYav2EfYIk0RR2Q9oVhf60S2XR1BCWJ2g==} - /@types/node/18.13.0: - resolution: {integrity: sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==} - /@types/normalize-package-data/2.4.1: resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} dev: false @@ -2643,7 +2639,7 @@ packages: /@types/through/0.0.30: resolution: {integrity: sha512-FvnCJljyxhPM3gkRgWmxmDZyAQSiBQQWLI0A0VFL0K7W1oRUrPJSqNO0NvTnLkBcotdlp3lKvaT0JrnyRDkzOg==} dependencies: - '@types/node': 18.13.0 + '@types/node': 16.11.56 dev: true /@types/tinycolor2/1.4.3: @@ -2677,7 +2673,7 @@ packages: /@types/whatwg-url/8.2.2: resolution: {integrity: sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==} dependencies: - '@types/node': 18.13.0 + '@types/node': 16.11.56 '@types/webidl-conversions': 7.0.0 dev: false @@ -7182,7 +7178,7 @@ packages: '@jest/environment': 27.5.1 '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 chalk: 4.1.2 co: 4.6.0 dedent: 0.7.0 @@ -7361,7 +7357,7 @@ packages: '@jest/environment': 27.5.1 '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 jest-mock: 27.5.1 jest-util: 27.5.1 jsdom: 16.7.0 @@ -7379,7 +7375,7 @@ packages: '@jest/environment': 27.5.1 '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 jest-mock: 27.5.1 jest-util: 27.5.1 dev: true @@ -7403,7 +7399,7 @@ packages: dependencies: '@jest/types': 27.5.1 '@types/graceful-fs': 4.1.5 - '@types/node': 18.13.0 + '@types/node': 16.11.56 anymatch: 3.1.2 fb-watchman: 2.0.1 graceful-fs: 4.2.10 @@ -7423,7 +7419,7 @@ packages: dependencies: '@jest/types': 29.4.1 '@types/graceful-fs': 4.1.5 - '@types/node': 18.13.0 + '@types/node': 16.11.56 anymatch: 3.1.2 fb-watchman: 2.0.1 graceful-fs: 4.2.10 @@ -7443,7 +7439,7 @@ packages: '@jest/source-map': 27.5.1 '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 chalk: 4.1.2 co: 4.6.0 expect: 27.5.1 @@ -7545,7 +7541,7 @@ packages: engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 dev: true /jest-mock/29.4.1: @@ -7553,7 +7549,7 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@jest/types': 29.4.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 jest-util: 29.4.1 /jest-pnp-resolver/1.2.2_jest-resolve@27.5.1: @@ -7642,7 +7638,7 @@ packages: '@jest/test-result': 27.5.1 '@jest/transform': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 chalk: 4.1.2 emittery: 0.8.1 graceful-fs: 4.2.10 @@ -7729,7 +7725,7 @@ packages: resolution: {integrity: sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: - '@types/node': 18.13.0 + '@types/node': 16.11.56 graceful-fs: 4.2.10 dev: true @@ -7846,7 +7842,7 @@ packages: dependencies: '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 18.13.0 + '@types/node': 16.11.56 ansi-escapes: 4.3.2 chalk: 4.1.2 jest-util: 27.5.1 @@ -7857,7 +7853,7 @@ packages: resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} engines: {node: '>= 10.13.0'} dependencies: - '@types/node': 18.13.0 + '@types/node': 16.11.56 merge-stream: 2.0.0 supports-color: 8.1.1 dev: true @@ -7866,7 +7862,7 @@ packages: resolution: {integrity: sha512-O9doU/S1EBe+yp/mstQ0VpPwpv0Clgn68TkNwGxL6/usX/KUW9Arnn4ag8C3jc6qHcXznhsT5Na1liYzAsuAbQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@types/node': 18.13.0 + '@types/node': 16.11.56 jest-util: 29.4.1 merge-stream: 2.0.0 supports-color: 8.1.1 @@ -9281,8 +9277,8 @@ packages: - '@babel/core' - babel-plugin-macros - /next/13.1.7-canary.8_biqbaboplfbrettd7655fr4n2y: - resolution: {integrity: sha512-JqmeYZBiWxFJTmTyrbLPdl+YaeDKz0zI9XMLP21fua428JmH/30CFlmoHnJYjrDyYWzDX1MSLRDlc7LL13crLg==} + /next/13.1.7-canary.12_biqbaboplfbrettd7655fr4n2y: + resolution: {integrity: sha512-Tr3ZESTZckJKsq5oa4P06RhjlpTCvpgMa0lGGML6LpJja1aRxC8SwrPCHJt6xyHr78s5QNrMR0uZXsdGmuzS3w==} engines: {node: '>=14.6.0'} hasBin: true peerDependencies: @@ -9299,7 +9295,7 @@ packages: sass: optional: true dependencies: - '@next/env': 13.1.7-canary.8 + '@next/env': 13.1.7-canary.12 '@swc/helpers': 0.4.14 caniuse-lite: 1.0.30001439 postcss: 8.4.14 @@ -9307,26 +9303,26 @@ packages: react-dom: 18.2.0_react@18.2.0 styled-jsx: 5.1.1_react@18.2.0 optionalDependencies: - '@next/swc-android-arm-eabi': 13.1.7-canary.8 - '@next/swc-android-arm64': 13.1.7-canary.8 - '@next/swc-darwin-arm64': 13.1.7-canary.8 - '@next/swc-darwin-x64': 13.1.7-canary.8 - '@next/swc-freebsd-x64': 13.1.7-canary.8 - '@next/swc-linux-arm-gnueabihf': 13.1.7-canary.8 - '@next/swc-linux-arm64-gnu': 13.1.7-canary.8 - '@next/swc-linux-arm64-musl': 13.1.7-canary.8 - '@next/swc-linux-x64-gnu': 13.1.7-canary.8 - '@next/swc-linux-x64-musl': 13.1.7-canary.8 - '@next/swc-win32-arm64-msvc': 13.1.7-canary.8 - '@next/swc-win32-ia32-msvc': 13.1.7-canary.8 - '@next/swc-win32-x64-msvc': 13.1.7-canary.8 + '@next/swc-android-arm-eabi': 13.1.7-canary.12 + '@next/swc-android-arm64': 13.1.7-canary.12 + '@next/swc-darwin-arm64': 13.1.7-canary.12 + '@next/swc-darwin-x64': 13.1.7-canary.12 + '@next/swc-freebsd-x64': 13.1.7-canary.12 + '@next/swc-linux-arm-gnueabihf': 13.1.7-canary.12 + '@next/swc-linux-arm64-gnu': 13.1.7-canary.12 + '@next/swc-linux-arm64-musl': 13.1.7-canary.12 + '@next/swc-linux-x64-gnu': 13.1.7-canary.12 + '@next/swc-linux-x64-musl': 13.1.7-canary.12 + '@next/swc-win32-arm64-msvc': 13.1.7-canary.12 + '@next/swc-win32-ia32-msvc': 13.1.7-canary.12 + '@next/swc-win32-x64-msvc': 13.1.7-canary.12 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros dev: false - /next/13.1.7-canary.8_pjwopsidmaokadturxaafygjp4: - resolution: {integrity: sha512-JqmeYZBiWxFJTmTyrbLPdl+YaeDKz0zI9XMLP21fua428JmH/30CFlmoHnJYjrDyYWzDX1MSLRDlc7LL13crLg==} + /next/13.1.7-canary.12_pjwopsidmaokadturxaafygjp4: + resolution: {integrity: sha512-Tr3ZESTZckJKsq5oa4P06RhjlpTCvpgMa0lGGML6LpJja1aRxC8SwrPCHJt6xyHr78s5QNrMR0uZXsdGmuzS3w==} engines: {node: '>=14.6.0'} hasBin: true peerDependencies: @@ -9343,7 +9339,7 @@ packages: sass: optional: true dependencies: - '@next/env': 13.1.7-canary.8 + '@next/env': 13.1.7-canary.12 '@swc/helpers': 0.4.14 caniuse-lite: 1.0.30001439 postcss: 8.4.14 @@ -9351,19 +9347,19 @@ packages: react-dom: 18.2.0_react@18.2.0 styled-jsx: 5.1.1_2exiyaescjxorpwwmy4ejghgte optionalDependencies: - '@next/swc-android-arm-eabi': 13.1.7-canary.8 - '@next/swc-android-arm64': 13.1.7-canary.8 - '@next/swc-darwin-arm64': 13.1.7-canary.8 - '@next/swc-darwin-x64': 13.1.7-canary.8 - '@next/swc-freebsd-x64': 13.1.7-canary.8 - '@next/swc-linux-arm-gnueabihf': 13.1.7-canary.8 - '@next/swc-linux-arm64-gnu': 13.1.7-canary.8 - '@next/swc-linux-arm64-musl': 13.1.7-canary.8 - '@next/swc-linux-x64-gnu': 13.1.7-canary.8 - '@next/swc-linux-x64-musl': 13.1.7-canary.8 - '@next/swc-win32-arm64-msvc': 13.1.7-canary.8 - '@next/swc-win32-ia32-msvc': 13.1.7-canary.8 - '@next/swc-win32-x64-msvc': 13.1.7-canary.8 + '@next/swc-android-arm-eabi': 13.1.7-canary.12 + '@next/swc-android-arm64': 13.1.7-canary.12 + '@next/swc-darwin-arm64': 13.1.7-canary.12 + '@next/swc-darwin-x64': 13.1.7-canary.12 + '@next/swc-freebsd-x64': 13.1.7-canary.12 + '@next/swc-linux-arm-gnueabihf': 13.1.7-canary.12 + '@next/swc-linux-arm64-gnu': 13.1.7-canary.12 + '@next/swc-linux-arm64-musl': 13.1.7-canary.12 + '@next/swc-linux-x64-gnu': 13.1.7-canary.12 + '@next/swc-linux-x64-musl': 13.1.7-canary.12 + '@next/swc-win32-arm64-msvc': 13.1.7-canary.12 + '@next/swc-win32-ia32-msvc': 13.1.7-canary.12 + '@next/swc-win32-x64-msvc': 13.1.7-canary.12 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros From 6d5fee7f19b229e75d692ef6fbb75056c20511e8 Mon Sep 17 00:00:00 2001 From: LongYinan Date: Tue, 14 Feb 2023 20:53:56 +0800 Subject: [PATCH 17/31] Revert "Issue Reporters (#3707)" (#3796) This reverts commit dae3e2155dcc5dc8e240825da4fd091296486b18. This commit makes the `turbotrace` hanging, x-ref: https://vercel.slack.com/archives/C02UJN0A1UL/p1676369060652879 --- Cargo.lock | 1 - crates/next-dev/src/lib.rs | 85 ++++++------------- crates/node-file-trace/src/lib.rs | 27 +++--- crates/turbopack-cli-utils/src/issue.rs | 61 +++++++------ crates/turbopack-core/src/issue/mod.rs | 27 +----- crates/turbopack-dev-server/Cargo.toml | 1 - crates/turbopack-dev-server/src/http.rs | 11 +-- crates/turbopack-dev-server/src/lib.rs | 45 +++++----- .../src/source/resolve.rs | 6 +- .../turbopack-dev-server/src/update/server.rs | 11 +-- 10 files changed, 107 insertions(+), 168 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 48da205fb4ebc..22eae93e2f665 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7779,7 +7779,6 @@ dependencies = [ "indexmap", "mime", "mime_guess", - "once_cell", "parking_lot", "pin-project-lite", "serde", diff --git a/crates/next-dev/src/lib.rs b/crates/next-dev/src/lib.rs index 7095dc54da096..c8efb922eca65 100644 --- a/crates/next-dev/src/lib.rs +++ b/crates/next-dev/src/lib.rs @@ -27,15 +27,14 @@ use owo_colors::OwoColorize; use turbo_malloc::TurboMalloc; use turbo_tasks::{ util::{FormatBytes, FormatDuration}, - CollectiblesSource, RawVc, StatsType, TransientInstance, TransientValue, TurboTasks, - TurboTasksBackendApi, Value, + RawVc, StatsType, TransientInstance, TransientValue, TurboTasks, TurboTasksBackendApi, Value, }; use turbo_tasks_fs::{DiskFileSystemVc, FileSystem, FileSystemVc}; use turbo_tasks_memory::MemoryBackend; -use turbopack_cli_utils::issue::{ConsoleUiVc, LogOptions}; +use turbopack_cli_utils::issue::{ConsoleUi, ConsoleUiVc, LogOptions}; use turbopack_core::{ environment::ServerAddr, - issue::{IssueReporter, IssueReporterVc, IssueSeverity, IssueVc}, + issue::IssueSeverity, resolve::{parse::RequestVc, pattern::QueryMapVc}, server_fs::ServerFileSystemVc, }; @@ -63,7 +62,6 @@ pub struct NextDevServerBuilder { entry_requests: Vec, eager_compile: bool, hostname: Option, - issue_reporter: Option>, port: Option, browserslist_query: String, log_level: IssueSeverity, @@ -85,7 +83,6 @@ impl NextDevServerBuilder { entry_requests: vec![], eager_compile: false, hostname: None, - issue_reporter: None, port: None, browserslist_query: "last 1 Chrome versions, last 1 Firefox versions, last 1 Safari \ versions, last 1 Edge versions" @@ -142,14 +139,6 @@ impl NextDevServerBuilder { self } - pub fn issue_reporter( - mut self, - issue_reporter: Box, - ) -> NextDevServerBuilder { - self.issue_reporter = Some(issue_reporter); - self - } - /// Attempts to find an open port to bind. fn find_port(&self, host: IpAddr, port: u16, max_attempts: u16) -> Result { // max_attempts of 1 means we loop 0 times. @@ -203,22 +192,17 @@ impl NextDevServerBuilder { let show_all = self.show_all; let log_detail = self.log_detail; let browserslist_query = self.browserslist_query; - let log_options = Arc::new(LogOptions { + let log_options = LogOptions { current_dir: current_dir().unwrap(), show_all, log_detail, log_level: self.log_level, - }); + }; let entry_requests = Arc::new(self.entry_requests); + let console_ui = Arc::new(ConsoleUi::new(log_options)); + let console_ui_to_dev_server = console_ui.clone(); let server_addr = Arc::new(server.addr); let tasks = turbo_tasks.clone(); - let issue_provider = self.issue_reporter.unwrap_or_else(|| { - // Initialize a ConsoleUi reporter if no custom reporter was provided - Box::new(move || ConsoleUiVc::new(log_options.clone().into()).into()) - }); - let issue_reporter_arc = Arc::new(move || issue_provider.get_issue_reporter()); - - let get_issue_reporter = issue_reporter_arc.clone(); let source = move || { source( root_dir.clone(), @@ -226,31 +210,22 @@ impl NextDevServerBuilder { entry_requests.clone().into(), eager_compile, turbo_tasks.clone().into(), - get_issue_reporter(), + console_ui.clone().into(), browserslist_query.clone(), server_addr.clone().into(), ) }; - Ok(server.serve(tasks, source, issue_reporter_arc.clone())) + Ok(server.serve(tasks, source, console_ui_to_dev_server)) } } -async fn handle_issues + CollectiblesSource + Copy>( - source: T, - issue_reporter: IssueReporterVc, -) -> Result<()> { - let issues = IssueVc::peek_issues_with_path(source) - .await? - .strongly_consistent() +async fn handle_issues>(source: T, console_ui: ConsoleUiVc) -> Result<()> { + let state = console_ui + .group_and_display_issues(TransientValue::new(source.into())) .await?; - issue_reporter.report_issues( - TransientInstance::new(issues.clone()), - TransientValue::new(source.into()), - ); - - if issues.has_fatal().await? { + if state.has_fatal { Err(anyhow!("Fatal issue(s) occurred")) } else { Ok(()) @@ -258,17 +233,17 @@ async fn handle_issues + CollectiblesSource + Copy>( } #[turbo_tasks::function] -async fn project_fs(project_dir: &str, issue_reporter: IssueReporterVc) -> Result { +async fn project_fs(project_dir: &str, console_ui: ConsoleUiVc) -> Result { let disk_fs = DiskFileSystemVc::new("project".to_string(), project_dir.to_string()); - handle_issues(disk_fs, issue_reporter).await?; + handle_issues(disk_fs, console_ui).await?; disk_fs.await?.start_watching()?; Ok(disk_fs.into()) } #[turbo_tasks::function] -async fn output_fs(project_dir: &str, issue_reporter: IssueReporterVc) -> Result { +async fn output_fs(project_dir: &str, console_ui: ConsoleUiVc) -> Result { let disk_fs = DiskFileSystemVc::new("output".to_string(), project_dir.to_string()); - handle_issues(disk_fs, issue_reporter).await?; + handle_issues(disk_fs, console_ui).await?; disk_fs.await?.start_watching()?; Ok(disk_fs.into()) } @@ -281,12 +256,13 @@ async fn source( entry_requests: TransientInstance>, eager_compile: bool, turbo_tasks: TransientInstance>, - issue_reporter: IssueReporterVc, + console_ui: TransientInstance, browserslist_query: String, server_addr: TransientInstance, ) -> Result { - let output_fs = output_fs(&project_dir, issue_reporter); - let fs = project_fs(&root_dir, issue_reporter); + let console_ui = (*console_ui).clone().cell(); + let output_fs = output_fs(&project_dir, console_ui); + let fs = project_fs(&root_dir, console_ui); let project_relative = project_dir.strip_prefix(&root_dir).unwrap(); let project_relative = project_relative .strip_prefix(MAIN_SEPARATOR) @@ -396,9 +372,9 @@ async fn source( .cell() .into(); - handle_issues(dev_server_fs, issue_reporter).await?; - handle_issues(web_source, issue_reporter).await?; - handle_issues(page_source, issue_reporter).await?; + handle_issues(dev_server_fs, console_ui).await?; + handle_issues(web_source, console_ui).await?; + handle_issues(page_source, console_ui).await?; Ok(source) } @@ -575,16 +551,3 @@ fn profile_timeout( ) -> impl Future { future } - -pub trait IssueReporterProvider: Send + Sync + 'static { - fn get_issue_reporter(&self) -> IssueReporterVc; -} - -impl IssueReporterProvider for T -where - T: Fn() -> IssueReporterVc + Send + Sync + Clone + 'static, -{ - fn get_issue_reporter(&self) -> IssueReporterVc { - self() - } -} diff --git a/crates/node-file-trace/src/lib.rs b/crates/node-file-trace/src/lib.rs index b1873526afea8..1dcf76412009c 100644 --- a/crates/node-file-trace/src/lib.rs +++ b/crates/node-file-trace/src/lib.rs @@ -37,12 +37,12 @@ use turbopack::{ resolve_options_context::ResolveOptionsContext, transition::TransitionsByNameVc, ModuleAssetContextVc, }; -use turbopack_cli_utils::issue::{ConsoleUiVc, IssueSeverityCliOption, LogOptions}; +use turbopack_cli_utils::issue::{ConsoleUi, IssueSeverityCliOption, LogOptions}; use turbopack_core::{ asset::{Asset, AssetVc, AssetsVc}, context::{AssetContext, AssetContextVc}, environment::{EnvironmentIntention, EnvironmentVc, ExecutionEnvironment, NodeJsEnvironment}, - issue::{IssueReporter, IssueSeverity, IssueVc}, + issue::{IssueSeverity, IssueVc}, reference::all_assets, resolve::options::{ImportMapping, ResolvedMap}, source_asset::SourceAssetVc, @@ -487,27 +487,24 @@ async fn run>( let (sender, mut receiver) = channel(1); let dir = current_dir().unwrap(); let tt = create_tt(); + let console_ui = Arc::new(ConsoleUi::new(LogOptions { + current_dir: dir.clone(), + show_all, + log_detail, + log_level: log_level.map_or_else(|| IssueSeverity::Error, |l| l.0), + })); let task = tt.spawn_root_task(move || { - let console_ui = ConsoleUiVc::new(TransientInstance::new(LogOptions { - current_dir: dir.clone(), - show_all, - log_detail, - log_level: log_level.map_or_else(|| IssueSeverity::Error, |l| l.0), - })); let dir = dir.clone(); let args = args.clone(); + let console_ui = console_ui.clone(); let sender = sender.clone(); Box::pin(async move { let output = main_operation(TransientValue::new(dir.clone()), args.clone().into()); - let source = TransientValue::new(output.into()); - let issues = IssueVc::peek_issues_with_path(output) - .await? - .strongly_consistent() - .await?; + let console_ui = (*console_ui).clone().cell(); console_ui - .as_issue_reporter() - .report_issues(TransientInstance::new(issues), source); + .group_and_display_issues(TransientValue::new(output.into())) + .await?; if has_return_value { let output_read_ref = output.await?; diff --git a/crates/turbopack-cli-utils/src/issue.rs b/crates/turbopack-cli-utils/src/issue.rs index 52f8a6bd4f310..7f9303dc2df73 100644 --- a/crates/turbopack-cli-utils/src/issue.rs +++ b/crates/turbopack-cli-utils/src/issue.rs @@ -10,17 +10,15 @@ use std::{ use anyhow::{anyhow, Result}; use crossterm::style::{StyledContent, Stylize}; use owo_colors::{OwoColorize as _, Style}; -use turbo_tasks::{ - RawVc, ReadRef, TransientInstance, TransientValue, TryJoinIterExt, ValueToString, -}; +use turbo_tasks::{RawVc, TransientValue, TryJoinIterExt, ValueToString}; use turbo_tasks_fs::{ attach::AttachedFileSystemVc, source_context::{get_source_context, SourceContextLine}, to_sys_path, FileLinesContent, FileSystemPathVc, }; use turbopack_core::issue::{ - CapturedIssues, Issue, IssueProcessingPathItem, IssueReporter, IssueReporterVc, IssueSeverity, - OptionIssueProcessingPathItemsVc, PlainIssue, PlainIssueSource, + Issue, IssueProcessingPathItem, IssueSeverity, IssueVc, OptionIssueProcessingPathItemsVc, + PlainIssue, PlainIssueSource, }; #[derive(Clone, Copy, PartialEq, Eq, Debug)] @@ -414,34 +412,41 @@ impl PartialEq for ConsoleUi { } } -#[turbo_tasks::value_impl] -impl ConsoleUiVc { - #[turbo_tasks::function] - pub fn new(options: TransientInstance) -> Self { +impl ConsoleUi { + pub fn new(options: LogOptions) -> Self { ConsoleUi { - options: (*options).clone(), + options, seen: Arc::new(Mutex::new(SeenIssues::new())), } - .cell() } } +#[turbo_tasks::value(transparent)] +pub struct DisplayIssueState { + pub has_fatal: bool, + pub has_issues: bool, + pub has_new_issues: bool, +} + #[turbo_tasks::value_impl] -impl IssueReporter for ConsoleUi { +impl ConsoleUiVc { #[turbo_tasks::function] - async fn report_issues( - &self, - issues: TransientInstance>, + pub async fn group_and_display_issues( + self, source: TransientValue, - ) -> Result<()> { - let issues = &*issues; - let LogOptions { + ) -> Result { + let source = source.into_value(); + let this = self.await?; + + let issues = IssueVc::peek_issues_with_path(source).await?; + let issues = issues.await?; + let &LogOptions { ref current_dir, show_all, log_detail, log_level, .. - } = self.options; + } = &this.options; let mut grouped_issues: GroupedIssues = HashMap::new(); let issues = issues @@ -459,11 +464,11 @@ impl IssueReporter for ConsoleUi { .iter() .map(|(_, _, _, id)| *id) .collect::>(); - let mut new_ids = self - .seen - .lock() - .unwrap() - .new_ids(source.into_value(), issue_ids); + let mut new_ids = this.seen.lock().unwrap().new_ids(source, issue_ids); + + let mut has_fatal = false; + let has_issues = !issues.is_empty(); + let has_new_issues = !new_ids.is_empty(); for (plain_issue, path, context, id) in issues { if !new_ids.remove(&id) { @@ -474,6 +479,7 @@ impl IssueReporter for ConsoleUi { let context_path = make_relative_to_cwd(context, current_dir).await?; let category = &plain_issue.category; let title = &plain_issue.title; + has_fatal = severity == IssueSeverity::Fatal; let severity_map = grouped_issues .entry(severity) .or_insert_with(Default::default); @@ -606,7 +612,12 @@ impl IssueReporter for ConsoleUi { } } - Ok(()) + Ok(DisplayIssueState { + has_fatal, + has_issues, + has_new_issues, + } + .cell()) } } diff --git a/crates/turbopack-core/src/issue/mod.rs b/crates/turbopack-core/src/issue/mod.rs index 1ab17a3d38046..6af1c8407171c 100644 --- a/crates/turbopack-core/src/issue/mod.rs +++ b/crates/turbopack-core/src/issue/mod.rs @@ -17,8 +17,7 @@ use futures::FutureExt; use turbo_tasks::{ emit, primitives::{BoolVc, StringVc, U64Vc}, - CollectiblesSource, RawVc, ReadRef, TransientInstance, TransientValue, TryJoinIterExt, - ValueToString, ValueToStringVc, + CollectiblesSource, ReadRef, TryJoinIterExt, ValueToString, ValueToStringVc, }; use turbo_tasks_fs::{ FileContent, FileContentReadRef, FileLine, FileLinesContent, FileSystemPathReadRef, @@ -341,21 +340,6 @@ pub struct CapturedIssues { processing_path: ItemIssueProcessingPathVc, } -impl CapturedIssues { - pub async fn has_fatal(&self) -> Result { - let mut has_fatal = false; - - for issue in self.issues.iter() { - let severity = *issue.severity().await?; - if severity == IssueSeverity::Fatal { - has_fatal = true; - break; - } - } - Ok(has_fatal) - } -} - #[turbo_tasks::value_impl] impl CapturedIssuesVc { #[turbo_tasks::function] @@ -584,12 +568,3 @@ impl PlainAssetVc { .cell()) } } - -#[turbo_tasks::value_trait] -pub trait IssueReporter { - fn report_issues( - &self, - issues: TransientInstance>, - source: TransientValue, - ); -} diff --git a/crates/turbopack-dev-server/Cargo.toml b/crates/turbopack-dev-server/Cargo.toml index 28959cea404ea..ab19c1e9623c7 100644 --- a/crates/turbopack-dev-server/Cargo.toml +++ b/crates/turbopack-dev-server/Cargo.toml @@ -17,7 +17,6 @@ hyper-tungstenite = "0.8.1" indexmap = { workspace = true, features = ["serde"] } mime = "0.3.16" mime_guess = "2.0.4" -once_cell = "1.13.0" parking_lot = "0.12.1" pin-project-lite = "0.2.9" serde = "1.0.136" diff --git a/crates/turbopack-dev-server/src/http.rs b/crates/turbopack-dev-server/src/http.rs index 91b1851d28cd3..695e40d050fae 100644 --- a/crates/turbopack-dev-server/src/http.rs +++ b/crates/turbopack-dev-server/src/http.rs @@ -4,7 +4,8 @@ use hyper::{header::HeaderName, Request, Response}; use mime_guess::mime; use turbo_tasks::TransientInstance; use turbo_tasks_fs::{FileContent, FileContentReadRef}; -use turbopack_core::{asset::AssetContent, issue::IssueReporterVc, version::VersionedContent}; +use turbopack_cli_utils::issue::ConsoleUiVc; +use turbopack_core::{asset::AssetContent, version::VersionedContent}; use crate::source::{ request::SourceRequest, @@ -29,10 +30,10 @@ enum GetFromSourceResult { async fn get_from_source( source: ContentSourceVc, request: TransientInstance, - issue_repoter: IssueReporterVc, + console_ui: ConsoleUiVc, ) -> Result { Ok( - match &*resolve_source_request(source, request, issue_repoter).await? { + match &*resolve_source_request(source, request, console_ui).await? { ResolveSourceRequestResult::Static(static_content_vc) => { let static_content = static_content_vc.await?; if let AssetContent::File(file) = &*static_content.content.content().await? { @@ -59,11 +60,11 @@ async fn get_from_source( pub async fn process_request_with_content_source( source: ContentSourceVc, request: Request, - issue_reporter: IssueReporterVc, + console_ui: ConsoleUiVc, ) -> Result> { let original_path = request.uri().path().to_string(); let request = http_request_to_source_request(request).await?; - let result = get_from_source(source, TransientInstance::new(request), issue_reporter); + let result = get_from_source(source, TransientInstance::new(request), console_ui); match &*result.strongly_consistent().await? { GetFromSourceResult::Static { content, diff --git a/crates/turbopack-dev-server/src/lib.rs b/crates/turbopack-dev-server/src/lib.rs index 9851b65ff5682..edf9a4d1c0c07 100644 --- a/crates/turbopack-dev-server/src/lib.rs +++ b/crates/turbopack-dev-server/src/lib.rs @@ -16,17 +16,16 @@ use std::{ time::{Duration, Instant}, }; -use anyhow::{anyhow, Context, Result}; +use anyhow::{bail, Context, Result}; use hyper::{ server::{conn::AddrIncoming, Builder}, service::{make_service_fn, service_fn}, Request, Response, Server, }; use turbo_tasks::{ - run_once, trace::TraceRawVcs, util::FormatDuration, CollectiblesSource, RawVc, - TransientInstance, TransientValue, TurboTasksApi, + run_once, trace::TraceRawVcs, util::FormatDuration, RawVc, TransientValue, TurboTasksApi, }; -use turbopack_core::issue::{IssueReporter, IssueReporterVc, IssueVc}; +use turbopack_cli_utils::issue::{ConsoleUi, ConsoleUiVc}; use self::{ source::{ContentSourceResultVc, ContentSourceVc}, @@ -67,27 +66,21 @@ pub struct DevServer { pub future: Pin> + Send + 'static>>, } -async fn handle_issues + CollectiblesSource + Copy>( +// Just print issues to console for now... +async fn handle_issues>( source: T, path: &str, operation: &str, - issue_reporter: IssueReporterVc, + console_ui: ConsoleUiVc, ) -> Result<()> { - let issues = IssueVc::peek_issues_with_path(source) - .await? - .strongly_consistent() + let state = console_ui + .group_and_display_issues(TransientValue::new(source.into())) .await?; - - issue_reporter.report_issues( - TransientInstance::new(issues.clone()), - TransientValue::new(source.into()), - ); - - if issues.has_fatal().await? { - Err(anyhow!("Fatal issue(s) occurred in {path} ({operation})")) - } else { - Ok(()) + if state.has_fatal { + bail!("Fatal issue(s) occurred in {path} ({operation}") } + + Ok(()) } impl DevServer { @@ -113,21 +106,21 @@ impl DevServerBuilder { self, turbo_tasks: Arc, source_provider: impl SourceProvider + Clone + Send + Sync, - get_issue_reporter: Arc IssueReporterVc + Send + Sync>, + console_ui: Arc, ) -> DevServer { let make_svc = make_service_fn(move |_| { let tt = turbo_tasks.clone(); let source_provider = source_provider.clone(); - let get_issue_reporter = get_issue_reporter.clone(); + let console_ui = console_ui.clone(); async move { let handler = move |request: Request| { + let console_ui = console_ui.clone(); let start = Instant::now(); let tt = tt.clone(); - let get_issue_reporter = get_issue_reporter.clone(); let source_provider = source_provider.clone(); let future = async move { run_once(tt.clone(), async move { - let issue_reporter = get_issue_reporter(); + let console_ui = (*console_ui).clone().cell(); if hyper_tungstenite::is_upgrade_request(&request) { let uri = request.uri(); @@ -137,7 +130,7 @@ impl DevServerBuilder { let (response, websocket) = hyper_tungstenite::upgrade(request, None)?; let update_server = - UpdateServer::new(source_provider, issue_reporter); + UpdateServer::new(source_provider, console_ui); update_server.run(&*tt, websocket); return Ok(response); } @@ -165,12 +158,12 @@ impl DevServerBuilder { let uri = request.uri(); let path = uri.path().to_string(); let source = source_provider.get_source(); - handle_issues(source, &path, "get source", issue_reporter).await?; + handle_issues(source, &path, "get source", console_ui).await?; let resolved_source = source.resolve_strongly_consistent().await?; let response = http::process_request_with_content_source( resolved_source, request, - issue_reporter, + console_ui, ) .await?; let status = response.status().as_u16(); diff --git a/crates/turbopack-dev-server/src/source/resolve.rs b/crates/turbopack-dev-server/src/source/resolve.rs index 96e827842382a..aa5cd38ab5303 100644 --- a/crates/turbopack-dev-server/src/source/resolve.rs +++ b/crates/turbopack-dev-server/src/source/resolve.rs @@ -6,7 +6,7 @@ use std::{ use anyhow::{bail, Result}; use hyper::Uri; use turbo_tasks::{TransientInstance, Value}; -use turbopack_core::issue::IssueReporterVc; +use turbopack_cli_utils::issue::ConsoleUiVc; use super::{ headers::{HeaderValue, Headers}, @@ -36,7 +36,7 @@ pub enum ResolveSourceRequestResult { pub async fn resolve_source_request( source: ContentSourceVc, request: TransientInstance, - issue_reporter: IssueReporterVc, + console_ui: ConsoleUiVc, ) -> Result { let mut data = ContentSourceData::default(); let mut current_source = source; @@ -50,7 +50,7 @@ pub async fn resolve_source_request( result, &original_path, "get content from source", - issue_reporter, + console_ui, ) .await?; diff --git a/crates/turbopack-dev-server/src/update/server.rs b/crates/turbopack-dev-server/src/update/server.rs index d4719ac194199..2ff147f7fca5c 100644 --- a/crates/turbopack-dev-server/src/update/server.rs +++ b/crates/turbopack-dev-server/src/update/server.rs @@ -12,7 +12,8 @@ use tokio::select; use tokio_stream::StreamMap; use turbo_tasks::{TransientInstance, TurboTasksApi}; use turbo_tasks_fs::json::parse_json_with_source_context; -use turbopack_core::{issue::IssueReporterVc, version::Update}; +use turbopack_cli_utils::issue::ConsoleUiVc; +use turbopack_core::version::Update; use super::{ protocol::{ClientMessage, ClientUpdateInstruction, Issue, ResourceIdentifier}, @@ -27,15 +28,15 @@ use crate::{ /// A server that listens for updates and sends them to connected clients. pub(crate) struct UpdateServer { source_provider: P, - issue_reporter: IssueReporterVc, + console_ui: ConsoleUiVc, } impl UpdateServer

{ /// Create a new update server with the given websocket and content source. - pub fn new(source_provider: P, issue_reporter: IssueReporterVc) -> Self { + pub fn new(source_provider: P, console_ui: ConsoleUiVc) -> Self { Self { source_provider, - issue_reporter, + console_ui, } } @@ -68,7 +69,7 @@ impl UpdateServer

{ resolve_source_request( source, TransientInstance::new(request), - self.issue_reporter + self.console_ui ) } }; From d07f0368fc5c0b3a40dbc8c0b045b90928860130 Mon Sep 17 00:00:00 2001 From: LongYinan Date: Tue, 14 Feb 2023 21:19:31 +0800 Subject: [PATCH 18/31] Fix Bench Turbotrace against @vercel/nft ci (#3721) --- .github/workflows/bench-turbotrace-against-node-nft.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/bench-turbotrace-against-node-nft.yml b/.github/workflows/bench-turbotrace-against-node-nft.yml index 6ab7a38750def..7ff7d7fba7e7b 100644 --- a/.github/workflows/bench-turbotrace-against-node-nft.yml +++ b/.github/workflows/bench-turbotrace-against-node-nft.yml @@ -1,4 +1,4 @@ -name: Bench Turbo trace against @vercel/nft +name: Bench Turbotrace against @vercel/nft on: push: @@ -21,7 +21,7 @@ jobs: name: bench env: - BUILD_ARGS: --release -p turbopack --features bench_against_node_nft bench_against_node_nft + BENCH_ARGS: --release -p turbopack --features bench_against_node_nft bench_against_node_nft steps: - uses: actions/checkout@v3 From a66afa5eaa3464fd0fa7b8f899edb800f4b2ddbf Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Tue, 14 Feb 2023 14:21:58 +0100 Subject: [PATCH 19/31] reduce function count (#3768) avoid a few resolve tasks in this hot code --- crates/turbo-tasks-fs/src/attach.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/crates/turbo-tasks-fs/src/attach.rs b/crates/turbo-tasks-fs/src/attach.rs index a8efb251cf8d5..f7cafdf2600a3 100644 --- a/crates/turbo-tasks-fs/src/attach.rs +++ b/crates/turbo-tasks-fs/src/attach.rs @@ -53,9 +53,13 @@ impl AttachedFileSystemVc { // already on this filesystem fs if fs == self_fs => Ok(contained_path_vc), // in the root filesystem, just need to rebase on this filesystem - fs if fs == this.root_fs => Ok(self.root().join(&contained_path.path)), + fs if fs == this.root_fs => Ok(self.root().resolve().await?.join(&contained_path.path)), // in the child filesystem, so we expand to the full path by appending to child_path - fs if fs == this.child_fs => Ok(self.child_path().join(&contained_path.path)), + fs if fs == this.child_fs => Ok(self + .child_path() + .resolve() + .await? + .join(&contained_path.path)), _ => bail!( "path {} not part of self, the root fs or the child fs", contained_path_vc.to_string().await? @@ -67,7 +71,7 @@ impl AttachedFileSystemVc { /// [AttachedFileSystem] #[turbo_tasks::function] async fn child_path(self) -> Result { - Ok(self.root().join(&self.await?.child_path)) + Ok(self.root().resolve().await?.join(&self.await?.child_path)) } /// Resolves the local path of the root or child filesystem from a path @@ -88,9 +92,9 @@ impl AttachedFileSystemVc { let child_path = self.child_path().await?; Ok(if let Some(inner_path) = child_path.get_path_to(&path) { - this.child_fs.root().join(inner_path) + this.child_fs.root().resolve().await?.join(inner_path) } else { - this.root_fs.root().join(&path.path) + this.root_fs.root().resolve().await?.join(&path.path) }) } } From 43b4b038c344d111002612d76acdd61e0a743b65 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Tue, 14 Feb 2023 15:38:55 +0100 Subject: [PATCH 20/31] fix export * warning for client pages (#3795) This gets rid of this warning when using a client component as page: ``` warning - [analyze] [project-with-next]/src/app/client/page.jsx unexpected export * export * used with module [project-with-next]/src/app/client/page.jsx which has no exports Typescript only: Did you want to export only types with `export type { ... } from "..."`? ``` * fix export * warning for client pages * Detect `__turbopack_export_value__` as CJS style exports * fix `__turbopack_cjs__` name * flag turbopackHelper in client proxy --- .../next-core/js/src/entry/app-renderer.tsx | 8 ++++-- .../js/src/entry/app/layout-entry.tsx | 4 +-- .../src/references/esm/export.rs | 2 +- .../src/references/mod.rs | 27 +++++++++++++++++-- .../src/transform/server_to_client_proxy.rs | 15 ++++++++--- ...ot_export-alls_cjs-2_input_index_0ea679.js | 4 +-- ...port-alls_cjs-script_input_index_1cb91d.js | 2 +- 7 files changed, 49 insertions(+), 13 deletions(-) diff --git a/crates/next-core/js/src/entry/app-renderer.tsx b/crates/next-core/js/src/entry/app-renderer.tsx index 9385f38c64166..7dff9a1339f22 100644 --- a/crates/next-core/js/src/entry/app-renderer.tsx +++ b/crates/next-core/js/src/entry/app-renderer.tsx @@ -117,7 +117,11 @@ async function runOperation(renderData: RenderData) { const layoutInfoChunks: Record = {}; const pageItem = LAYOUT_INFO[LAYOUT_INFO.length - 1]; const pageModule = pageItem.page!.module; - let tree: LoaderTree = ["", {}, { page: [() => pageModule, "page.js"] }]; + let tree: LoaderTree = [ + "", + {}, + { page: [() => pageModule.module, "page.js"] }, + ]; layoutInfoChunks["page"] = pageItem.page!.chunks; for (let i = LAYOUT_INFO.length - 2; i >= 0; i--) { const info = LAYOUT_INFO[i]; @@ -127,7 +131,7 @@ async function runOperation(renderData: RenderData) { continue; } const k = key as FileType; - components[k] = [() => info[k]!.module, `${k}${i}.js`]; + components[k] = [() => info[k]!.module.module, `${k}${i}.js`]; layoutInfoChunks[`${k}${i}`] = info[k]!.chunks; } tree = [info.segment, { children: tree }, components]; diff --git a/crates/next-core/js/src/entry/app/layout-entry.tsx b/crates/next-core/js/src/entry/app/layout-entry.tsx index d5b5dd503ff06..4f09dbdb77cd7 100644 --- a/crates/next-core/js/src/entry/app/layout-entry.tsx +++ b/crates/next-core/js/src/entry/app/layout-entry.tsx @@ -8,5 +8,5 @@ import * as serverHooks from "next/dist/client/components/hooks-server-context.j export { serverHooks }; export { renderToReadableStream } from "next/dist/compiled/react-server-dom-webpack/server.browser"; -export { default } from "."; -export * from "."; +import * as module from "."; +export { module }; diff --git a/crates/turbopack-ecmascript/src/references/esm/export.rs b/crates/turbopack-ecmascript/src/references/esm/export.rs index 8f14e4bfaaab7..9b9a049ba906c 100644 --- a/crates/turbopack-ecmascript/src/references/esm/export.rs +++ b/crates/turbopack-ecmascript/src/references/esm/export.rs @@ -167,7 +167,7 @@ impl CodeGenerateable for EsmExports { let ident = ReferencedAsset::get_ident_from_placeable(asset).await?; cjs_exports.push(quote_expr!( - "__turbopack__cjs__($arg)", + "__turbopack_cjs__($arg)", arg: Expr = Ident::new(ident.into(), DUMMY_SP).into() )); } diff --git a/crates/turbopack-ecmascript/src/references/mod.rs b/crates/turbopack-ecmascript/src/references/mod.rs index d4f6683076858..e36184ac10894 100644 --- a/crates/turbopack-ecmascript/src/references/mod.rs +++ b/crates/turbopack-ecmascript/src/references/mod.rs @@ -2134,12 +2134,32 @@ async fn resolve_as_webpack_runtime( #[turbo_tasks::value(transparent, serialization = "none")] pub struct AstPath(#[turbo_tasks(trace_ignore)] Vec); +pub static TURBOPACK_HELPER: &str = "__turbopackHelper"; + +pub fn is_turbopack_helper_import(import: &ImportDecl) -> bool { + import.asserts.as_ref().map_or(true, |asserts| { + asserts.props.iter().any(|assert| { + assert + .as_prop() + .and_then(|prop| prop.as_key_value()) + .and_then(|kv| kv.key.as_ident()) + .map_or(true, |ident| &*ident.sym != TURBOPACK_HELPER) + }) + }) +} + fn has_cjs_export(p: &Program) -> bool { use swc_core::ecma::visit::{visit_obj_and_computed, Visit, VisitWith}; if let Program::Module(m) = p { // Check for imports/exports - if m.body.iter().any(ModuleItem::is_module_decl) { + if m.body.iter().any(|item| { + item.as_module_decl().map_or(false, |module_decl| { + module_decl + .as_import() + .map_or(true, |import| !is_turbopack_helper_import(import)) + }) + }) { return false; } } @@ -2152,7 +2172,10 @@ fn has_cjs_export(p: &Program) -> bool { visit_obj_and_computed!(); fn visit_ident(&mut self, i: &Ident) { - if &*i.sym == "module" || &*i.sym == "exports" { + if &*i.sym == "module" + || &*i.sym == "exports" + || &*i.sym == "__turbopack_export_value__" + { self.found = true; } } diff --git a/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs b/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs index 74938c906a68e..f8651ce3b5f30 100644 --- a/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs +++ b/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs @@ -2,14 +2,17 @@ use swc_core::{ common::DUMMY_SP, ecma::{ ast::{ - Expr, ExprStmt, ImportDecl, ImportDefaultSpecifier, ImportSpecifier, Lit, Module, - ModuleDecl, ModuleItem, Program, Stmt, Str, + Expr, ExprStmt, Ident, ImportDecl, ImportDefaultSpecifier, ImportSpecifier, + KeyValueProp, Lit, Module, ModuleDecl, ModuleItem, ObjectLit, Program, Prop, PropName, + PropOrSpread, Stmt, Str, }, utils::private_ident, }, quote, }; +use crate::references::TURBOPACK_HELPER; + macro_rules! has_client_directive { ($stmts:expr) => { $stmts @@ -52,7 +55,13 @@ pub fn create_proxy_module(transition_name: &str, target_import: &str) -> Progra })], src: box target_import.into(), type_only: false, - asserts: None, + asserts: Some(box ObjectLit { + span: DUMMY_SP, + props: vec![PropOrSpread::Prop(box Prop::KeyValue(KeyValueProp { + key: PropName::Ident(Ident::new(TURBOPACK_HELPER.into(), DUMMY_SP)), + value: box Expr::Lit(true.into()), + }))], + }), span: DUMMY_SP, })), ModuleItem::Stmt(quote!( diff --git a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_0ea679.js b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_0ea679.js index da554cfec52c0..5d8baa8e0dc76 100644 --- a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_0ea679.js +++ b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_0ea679.js @@ -12,7 +12,7 @@ console.log(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$ __turbopack_esm__({}); var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$2$2f$input$2f$c$2e$js__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/input/c.js (ecmascript)"); -__turbopack__cjs__(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$2$2f$input$2f$c$2e$js__); +__turbopack_cjs__(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$2$2f$input$2f$c$2e$js__); "__TURBOPACK__ecmascript__hoisting__location__"; ; @@ -21,7 +21,7 @@ __turbopack__cjs__(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$tur __turbopack_esm__({}); var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$2$2f$input$2f$commonjs$2e$js__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/input/commonjs.js (ecmascript)"); -__turbopack__cjs__(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$2$2f$input$2f$commonjs$2e$js__); +__turbopack_cjs__(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$2$2f$input$2f$commonjs$2e$js__); "__TURBOPACK__ecmascript__hoisting__location__"; ; diff --git a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_1cb91d.js b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_1cb91d.js index bbfb8f658c353..0bfd8d5187157 100644 --- a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_1cb91d.js +++ b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_1cb91d.js @@ -12,7 +12,7 @@ console.log(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$ __turbopack_esm__({}); var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$script$2f$input$2f$exported$2e$cjs__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/input/exported.cjs (ecmascript)"); -__turbopack__cjs__(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$script$2f$input$2f$exported$2e$cjs__); +__turbopack_cjs__(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$export$2d$alls$2f$cjs$2d$script$2f$input$2f$exported$2e$cjs__); "__TURBOPACK__ecmascript__hoisting__location__"; ; console.log('Hoist test'); From e4a195124d6596ab2d48946a521cbd39e962b47c Mon Sep 17 00:00:00 2001 From: Will Binns-Smith Date: Tue, 14 Feb 2023 11:10:12 -0800 Subject: [PATCH 21/31] Restore Issue Reporters (#3803) This restores issue reporters, addressing a bug that prevented turbo-trace from completing. This moves `ConsoleUiVc::new` into an async block to prevent it from stalling. Test Plan: Verify `cargo run --bin node-file-trace -- print path/to/my/app` no longer stalls. --- Cargo.lock | 1 + crates/next-dev/src/lib.rs | 85 +++++++++++++------ crates/node-file-trace/src/lib.rs | 28 +++--- crates/turbopack-cli-utils/src/issue.rs | 61 ++++++------- crates/turbopack-core/src/issue/mod.rs | 27 +++++- crates/turbopack-dev-server/Cargo.toml | 1 + crates/turbopack-dev-server/src/http.rs | 11 ++- crates/turbopack-dev-server/src/lib.rs | 45 +++++----- .../src/source/resolve.rs | 6 +- .../turbopack-dev-server/src/update/server.rs | 11 ++- 10 files changed, 169 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 22eae93e2f665..48da205fb4ebc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7779,6 +7779,7 @@ dependencies = [ "indexmap", "mime", "mime_guess", + "once_cell", "parking_lot", "pin-project-lite", "serde", diff --git a/crates/next-dev/src/lib.rs b/crates/next-dev/src/lib.rs index c8efb922eca65..7095dc54da096 100644 --- a/crates/next-dev/src/lib.rs +++ b/crates/next-dev/src/lib.rs @@ -27,14 +27,15 @@ use owo_colors::OwoColorize; use turbo_malloc::TurboMalloc; use turbo_tasks::{ util::{FormatBytes, FormatDuration}, - RawVc, StatsType, TransientInstance, TransientValue, TurboTasks, TurboTasksBackendApi, Value, + CollectiblesSource, RawVc, StatsType, TransientInstance, TransientValue, TurboTasks, + TurboTasksBackendApi, Value, }; use turbo_tasks_fs::{DiskFileSystemVc, FileSystem, FileSystemVc}; use turbo_tasks_memory::MemoryBackend; -use turbopack_cli_utils::issue::{ConsoleUi, ConsoleUiVc, LogOptions}; +use turbopack_cli_utils::issue::{ConsoleUiVc, LogOptions}; use turbopack_core::{ environment::ServerAddr, - issue::IssueSeverity, + issue::{IssueReporter, IssueReporterVc, IssueSeverity, IssueVc}, resolve::{parse::RequestVc, pattern::QueryMapVc}, server_fs::ServerFileSystemVc, }; @@ -62,6 +63,7 @@ pub struct NextDevServerBuilder { entry_requests: Vec, eager_compile: bool, hostname: Option, + issue_reporter: Option>, port: Option, browserslist_query: String, log_level: IssueSeverity, @@ -83,6 +85,7 @@ impl NextDevServerBuilder { entry_requests: vec![], eager_compile: false, hostname: None, + issue_reporter: None, port: None, browserslist_query: "last 1 Chrome versions, last 1 Firefox versions, last 1 Safari \ versions, last 1 Edge versions" @@ -139,6 +142,14 @@ impl NextDevServerBuilder { self } + pub fn issue_reporter( + mut self, + issue_reporter: Box, + ) -> NextDevServerBuilder { + self.issue_reporter = Some(issue_reporter); + self + } + /// Attempts to find an open port to bind. fn find_port(&self, host: IpAddr, port: u16, max_attempts: u16) -> Result { // max_attempts of 1 means we loop 0 times. @@ -192,17 +203,22 @@ impl NextDevServerBuilder { let show_all = self.show_all; let log_detail = self.log_detail; let browserslist_query = self.browserslist_query; - let log_options = LogOptions { + let log_options = Arc::new(LogOptions { current_dir: current_dir().unwrap(), show_all, log_detail, log_level: self.log_level, - }; + }); let entry_requests = Arc::new(self.entry_requests); - let console_ui = Arc::new(ConsoleUi::new(log_options)); - let console_ui_to_dev_server = console_ui.clone(); let server_addr = Arc::new(server.addr); let tasks = turbo_tasks.clone(); + let issue_provider = self.issue_reporter.unwrap_or_else(|| { + // Initialize a ConsoleUi reporter if no custom reporter was provided + Box::new(move || ConsoleUiVc::new(log_options.clone().into()).into()) + }); + let issue_reporter_arc = Arc::new(move || issue_provider.get_issue_reporter()); + + let get_issue_reporter = issue_reporter_arc.clone(); let source = move || { source( root_dir.clone(), @@ -210,22 +226,31 @@ impl NextDevServerBuilder { entry_requests.clone().into(), eager_compile, turbo_tasks.clone().into(), - console_ui.clone().into(), + get_issue_reporter(), browserslist_query.clone(), server_addr.clone().into(), ) }; - Ok(server.serve(tasks, source, console_ui_to_dev_server)) + Ok(server.serve(tasks, source, issue_reporter_arc.clone())) } } -async fn handle_issues>(source: T, console_ui: ConsoleUiVc) -> Result<()> { - let state = console_ui - .group_and_display_issues(TransientValue::new(source.into())) +async fn handle_issues + CollectiblesSource + Copy>( + source: T, + issue_reporter: IssueReporterVc, +) -> Result<()> { + let issues = IssueVc::peek_issues_with_path(source) + .await? + .strongly_consistent() .await?; - if state.has_fatal { + issue_reporter.report_issues( + TransientInstance::new(issues.clone()), + TransientValue::new(source.into()), + ); + + if issues.has_fatal().await? { Err(anyhow!("Fatal issue(s) occurred")) } else { Ok(()) @@ -233,17 +258,17 @@ async fn handle_issues>(source: T, console_ui: ConsoleUiVc) -> Re } #[turbo_tasks::function] -async fn project_fs(project_dir: &str, console_ui: ConsoleUiVc) -> Result { +async fn project_fs(project_dir: &str, issue_reporter: IssueReporterVc) -> Result { let disk_fs = DiskFileSystemVc::new("project".to_string(), project_dir.to_string()); - handle_issues(disk_fs, console_ui).await?; + handle_issues(disk_fs, issue_reporter).await?; disk_fs.await?.start_watching()?; Ok(disk_fs.into()) } #[turbo_tasks::function] -async fn output_fs(project_dir: &str, console_ui: ConsoleUiVc) -> Result { +async fn output_fs(project_dir: &str, issue_reporter: IssueReporterVc) -> Result { let disk_fs = DiskFileSystemVc::new("output".to_string(), project_dir.to_string()); - handle_issues(disk_fs, console_ui).await?; + handle_issues(disk_fs, issue_reporter).await?; disk_fs.await?.start_watching()?; Ok(disk_fs.into()) } @@ -256,13 +281,12 @@ async fn source( entry_requests: TransientInstance>, eager_compile: bool, turbo_tasks: TransientInstance>, - console_ui: TransientInstance, + issue_reporter: IssueReporterVc, browserslist_query: String, server_addr: TransientInstance, ) -> Result { - let console_ui = (*console_ui).clone().cell(); - let output_fs = output_fs(&project_dir, console_ui); - let fs = project_fs(&root_dir, console_ui); + let output_fs = output_fs(&project_dir, issue_reporter); + let fs = project_fs(&root_dir, issue_reporter); let project_relative = project_dir.strip_prefix(&root_dir).unwrap(); let project_relative = project_relative .strip_prefix(MAIN_SEPARATOR) @@ -372,9 +396,9 @@ async fn source( .cell() .into(); - handle_issues(dev_server_fs, console_ui).await?; - handle_issues(web_source, console_ui).await?; - handle_issues(page_source, console_ui).await?; + handle_issues(dev_server_fs, issue_reporter).await?; + handle_issues(web_source, issue_reporter).await?; + handle_issues(page_source, issue_reporter).await?; Ok(source) } @@ -551,3 +575,16 @@ fn profile_timeout( ) -> impl Future { future } + +pub trait IssueReporterProvider: Send + Sync + 'static { + fn get_issue_reporter(&self) -> IssueReporterVc; +} + +impl IssueReporterProvider for T +where + T: Fn() -> IssueReporterVc + Send + Sync + Clone + 'static, +{ + fn get_issue_reporter(&self) -> IssueReporterVc { + self() + } +} diff --git a/crates/node-file-trace/src/lib.rs b/crates/node-file-trace/src/lib.rs index 1dcf76412009c..42697f84de714 100644 --- a/crates/node-file-trace/src/lib.rs +++ b/crates/node-file-trace/src/lib.rs @@ -37,12 +37,12 @@ use turbopack::{ resolve_options_context::ResolveOptionsContext, transition::TransitionsByNameVc, ModuleAssetContextVc, }; -use turbopack_cli_utils::issue::{ConsoleUi, IssueSeverityCliOption, LogOptions}; +use turbopack_cli_utils::issue::{ConsoleUiVc, IssueSeverityCliOption, LogOptions}; use turbopack_core::{ asset::{Asset, AssetVc, AssetsVc}, context::{AssetContext, AssetContextVc}, environment::{EnvironmentIntention, EnvironmentVc, ExecutionEnvironment, NodeJsEnvironment}, - issue::{IssueSeverity, IssueVc}, + issue::{IssueReporter, IssueSeverity, IssueVc}, reference::all_assets, resolve::options::{ImportMapping, ResolvedMap}, source_asset::SourceAssetVc, @@ -487,25 +487,29 @@ async fn run>( let (sender, mut receiver) = channel(1); let dir = current_dir().unwrap(); let tt = create_tt(); - let console_ui = Arc::new(ConsoleUi::new(LogOptions { - current_dir: dir.clone(), - show_all, - log_detail, - log_level: log_level.map_or_else(|| IssueSeverity::Error, |l| l.0), - })); let task = tt.spawn_root_task(move || { let dir = dir.clone(); let args = args.clone(); - let console_ui = console_ui.clone(); let sender = sender.clone(); Box::pin(async move { let output = main_operation(TransientValue::new(dir.clone()), args.clone().into()); - let console_ui = (*console_ui).clone().cell(); - console_ui - .group_and_display_issues(TransientValue::new(output.into())) + let source = TransientValue::new(output.into()); + let issues = IssueVc::peek_issues_with_path(output) + .await? + .strongly_consistent() .await?; + let console_ui = ConsoleUiVc::new(TransientInstance::new(LogOptions { + current_dir: dir.clone(), + show_all, + log_detail, + log_level: log_level.map_or_else(|| IssueSeverity::Error, |l| l.0), + })); + console_ui + .as_issue_reporter() + .report_issues(TransientInstance::new(issues), source); + if has_return_value { let output_read_ref = output.await?; let output_iter = output_read_ref.iter().cloned(); diff --git a/crates/turbopack-cli-utils/src/issue.rs b/crates/turbopack-cli-utils/src/issue.rs index 7f9303dc2df73..52f8a6bd4f310 100644 --- a/crates/turbopack-cli-utils/src/issue.rs +++ b/crates/turbopack-cli-utils/src/issue.rs @@ -10,15 +10,17 @@ use std::{ use anyhow::{anyhow, Result}; use crossterm::style::{StyledContent, Stylize}; use owo_colors::{OwoColorize as _, Style}; -use turbo_tasks::{RawVc, TransientValue, TryJoinIterExt, ValueToString}; +use turbo_tasks::{ + RawVc, ReadRef, TransientInstance, TransientValue, TryJoinIterExt, ValueToString, +}; use turbo_tasks_fs::{ attach::AttachedFileSystemVc, source_context::{get_source_context, SourceContextLine}, to_sys_path, FileLinesContent, FileSystemPathVc, }; use turbopack_core::issue::{ - Issue, IssueProcessingPathItem, IssueSeverity, IssueVc, OptionIssueProcessingPathItemsVc, - PlainIssue, PlainIssueSource, + CapturedIssues, Issue, IssueProcessingPathItem, IssueReporter, IssueReporterVc, IssueSeverity, + OptionIssueProcessingPathItemsVc, PlainIssue, PlainIssueSource, }; #[derive(Clone, Copy, PartialEq, Eq, Debug)] @@ -412,41 +414,34 @@ impl PartialEq for ConsoleUi { } } -impl ConsoleUi { - pub fn new(options: LogOptions) -> Self { +#[turbo_tasks::value_impl] +impl ConsoleUiVc { + #[turbo_tasks::function] + pub fn new(options: TransientInstance) -> Self { ConsoleUi { - options, + options: (*options).clone(), seen: Arc::new(Mutex::new(SeenIssues::new())), } + .cell() } } -#[turbo_tasks::value(transparent)] -pub struct DisplayIssueState { - pub has_fatal: bool, - pub has_issues: bool, - pub has_new_issues: bool, -} - #[turbo_tasks::value_impl] -impl ConsoleUiVc { +impl IssueReporter for ConsoleUi { #[turbo_tasks::function] - pub async fn group_and_display_issues( - self, + async fn report_issues( + &self, + issues: TransientInstance>, source: TransientValue, - ) -> Result { - let source = source.into_value(); - let this = self.await?; - - let issues = IssueVc::peek_issues_with_path(source).await?; - let issues = issues.await?; - let &LogOptions { + ) -> Result<()> { + let issues = &*issues; + let LogOptions { ref current_dir, show_all, log_detail, log_level, .. - } = &this.options; + } = self.options; let mut grouped_issues: GroupedIssues = HashMap::new(); let issues = issues @@ -464,11 +459,11 @@ impl ConsoleUiVc { .iter() .map(|(_, _, _, id)| *id) .collect::>(); - let mut new_ids = this.seen.lock().unwrap().new_ids(source, issue_ids); - - let mut has_fatal = false; - let has_issues = !issues.is_empty(); - let has_new_issues = !new_ids.is_empty(); + let mut new_ids = self + .seen + .lock() + .unwrap() + .new_ids(source.into_value(), issue_ids); for (plain_issue, path, context, id) in issues { if !new_ids.remove(&id) { @@ -479,7 +474,6 @@ impl ConsoleUiVc { let context_path = make_relative_to_cwd(context, current_dir).await?; let category = &plain_issue.category; let title = &plain_issue.title; - has_fatal = severity == IssueSeverity::Fatal; let severity_map = grouped_issues .entry(severity) .or_insert_with(Default::default); @@ -612,12 +606,7 @@ impl ConsoleUiVc { } } - Ok(DisplayIssueState { - has_fatal, - has_issues, - has_new_issues, - } - .cell()) + Ok(()) } } diff --git a/crates/turbopack-core/src/issue/mod.rs b/crates/turbopack-core/src/issue/mod.rs index 6af1c8407171c..1ab17a3d38046 100644 --- a/crates/turbopack-core/src/issue/mod.rs +++ b/crates/turbopack-core/src/issue/mod.rs @@ -17,7 +17,8 @@ use futures::FutureExt; use turbo_tasks::{ emit, primitives::{BoolVc, StringVc, U64Vc}, - CollectiblesSource, ReadRef, TryJoinIterExt, ValueToString, ValueToStringVc, + CollectiblesSource, RawVc, ReadRef, TransientInstance, TransientValue, TryJoinIterExt, + ValueToString, ValueToStringVc, }; use turbo_tasks_fs::{ FileContent, FileContentReadRef, FileLine, FileLinesContent, FileSystemPathReadRef, @@ -340,6 +341,21 @@ pub struct CapturedIssues { processing_path: ItemIssueProcessingPathVc, } +impl CapturedIssues { + pub async fn has_fatal(&self) -> Result { + let mut has_fatal = false; + + for issue in self.issues.iter() { + let severity = *issue.severity().await?; + if severity == IssueSeverity::Fatal { + has_fatal = true; + break; + } + } + Ok(has_fatal) + } +} + #[turbo_tasks::value_impl] impl CapturedIssuesVc { #[turbo_tasks::function] @@ -568,3 +584,12 @@ impl PlainAssetVc { .cell()) } } + +#[turbo_tasks::value_trait] +pub trait IssueReporter { + fn report_issues( + &self, + issues: TransientInstance>, + source: TransientValue, + ); +} diff --git a/crates/turbopack-dev-server/Cargo.toml b/crates/turbopack-dev-server/Cargo.toml index ab19c1e9623c7..28959cea404ea 100644 --- a/crates/turbopack-dev-server/Cargo.toml +++ b/crates/turbopack-dev-server/Cargo.toml @@ -17,6 +17,7 @@ hyper-tungstenite = "0.8.1" indexmap = { workspace = true, features = ["serde"] } mime = "0.3.16" mime_guess = "2.0.4" +once_cell = "1.13.0" parking_lot = "0.12.1" pin-project-lite = "0.2.9" serde = "1.0.136" diff --git a/crates/turbopack-dev-server/src/http.rs b/crates/turbopack-dev-server/src/http.rs index 695e40d050fae..91b1851d28cd3 100644 --- a/crates/turbopack-dev-server/src/http.rs +++ b/crates/turbopack-dev-server/src/http.rs @@ -4,8 +4,7 @@ use hyper::{header::HeaderName, Request, Response}; use mime_guess::mime; use turbo_tasks::TransientInstance; use turbo_tasks_fs::{FileContent, FileContentReadRef}; -use turbopack_cli_utils::issue::ConsoleUiVc; -use turbopack_core::{asset::AssetContent, version::VersionedContent}; +use turbopack_core::{asset::AssetContent, issue::IssueReporterVc, version::VersionedContent}; use crate::source::{ request::SourceRequest, @@ -30,10 +29,10 @@ enum GetFromSourceResult { async fn get_from_source( source: ContentSourceVc, request: TransientInstance, - console_ui: ConsoleUiVc, + issue_repoter: IssueReporterVc, ) -> Result { Ok( - match &*resolve_source_request(source, request, console_ui).await? { + match &*resolve_source_request(source, request, issue_repoter).await? { ResolveSourceRequestResult::Static(static_content_vc) => { let static_content = static_content_vc.await?; if let AssetContent::File(file) = &*static_content.content.content().await? { @@ -60,11 +59,11 @@ async fn get_from_source( pub async fn process_request_with_content_source( source: ContentSourceVc, request: Request, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, ) -> Result> { let original_path = request.uri().path().to_string(); let request = http_request_to_source_request(request).await?; - let result = get_from_source(source, TransientInstance::new(request), console_ui); + let result = get_from_source(source, TransientInstance::new(request), issue_reporter); match &*result.strongly_consistent().await? { GetFromSourceResult::Static { content, diff --git a/crates/turbopack-dev-server/src/lib.rs b/crates/turbopack-dev-server/src/lib.rs index edf9a4d1c0c07..9851b65ff5682 100644 --- a/crates/turbopack-dev-server/src/lib.rs +++ b/crates/turbopack-dev-server/src/lib.rs @@ -16,16 +16,17 @@ use std::{ time::{Duration, Instant}, }; -use anyhow::{bail, Context, Result}; +use anyhow::{anyhow, Context, Result}; use hyper::{ server::{conn::AddrIncoming, Builder}, service::{make_service_fn, service_fn}, Request, Response, Server, }; use turbo_tasks::{ - run_once, trace::TraceRawVcs, util::FormatDuration, RawVc, TransientValue, TurboTasksApi, + run_once, trace::TraceRawVcs, util::FormatDuration, CollectiblesSource, RawVc, + TransientInstance, TransientValue, TurboTasksApi, }; -use turbopack_cli_utils::issue::{ConsoleUi, ConsoleUiVc}; +use turbopack_core::issue::{IssueReporter, IssueReporterVc, IssueVc}; use self::{ source::{ContentSourceResultVc, ContentSourceVc}, @@ -66,21 +67,27 @@ pub struct DevServer { pub future: Pin> + Send + 'static>>, } -// Just print issues to console for now... -async fn handle_issues>( +async fn handle_issues + CollectiblesSource + Copy>( source: T, path: &str, operation: &str, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, ) -> Result<()> { - let state = console_ui - .group_and_display_issues(TransientValue::new(source.into())) + let issues = IssueVc::peek_issues_with_path(source) + .await? + .strongly_consistent() .await?; - if state.has_fatal { - bail!("Fatal issue(s) occurred in {path} ({operation}") - } - Ok(()) + issue_reporter.report_issues( + TransientInstance::new(issues.clone()), + TransientValue::new(source.into()), + ); + + if issues.has_fatal().await? { + Err(anyhow!("Fatal issue(s) occurred in {path} ({operation})")) + } else { + Ok(()) + } } impl DevServer { @@ -106,21 +113,21 @@ impl DevServerBuilder { self, turbo_tasks: Arc, source_provider: impl SourceProvider + Clone + Send + Sync, - console_ui: Arc, + get_issue_reporter: Arc IssueReporterVc + Send + Sync>, ) -> DevServer { let make_svc = make_service_fn(move |_| { let tt = turbo_tasks.clone(); let source_provider = source_provider.clone(); - let console_ui = console_ui.clone(); + let get_issue_reporter = get_issue_reporter.clone(); async move { let handler = move |request: Request| { - let console_ui = console_ui.clone(); let start = Instant::now(); let tt = tt.clone(); + let get_issue_reporter = get_issue_reporter.clone(); let source_provider = source_provider.clone(); let future = async move { run_once(tt.clone(), async move { - let console_ui = (*console_ui).clone().cell(); + let issue_reporter = get_issue_reporter(); if hyper_tungstenite::is_upgrade_request(&request) { let uri = request.uri(); @@ -130,7 +137,7 @@ impl DevServerBuilder { let (response, websocket) = hyper_tungstenite::upgrade(request, None)?; let update_server = - UpdateServer::new(source_provider, console_ui); + UpdateServer::new(source_provider, issue_reporter); update_server.run(&*tt, websocket); return Ok(response); } @@ -158,12 +165,12 @@ impl DevServerBuilder { let uri = request.uri(); let path = uri.path().to_string(); let source = source_provider.get_source(); - handle_issues(source, &path, "get source", console_ui).await?; + handle_issues(source, &path, "get source", issue_reporter).await?; let resolved_source = source.resolve_strongly_consistent().await?; let response = http::process_request_with_content_source( resolved_source, request, - console_ui, + issue_reporter, ) .await?; let status = response.status().as_u16(); diff --git a/crates/turbopack-dev-server/src/source/resolve.rs b/crates/turbopack-dev-server/src/source/resolve.rs index aa5cd38ab5303..96e827842382a 100644 --- a/crates/turbopack-dev-server/src/source/resolve.rs +++ b/crates/turbopack-dev-server/src/source/resolve.rs @@ -6,7 +6,7 @@ use std::{ use anyhow::{bail, Result}; use hyper::Uri; use turbo_tasks::{TransientInstance, Value}; -use turbopack_cli_utils::issue::ConsoleUiVc; +use turbopack_core::issue::IssueReporterVc; use super::{ headers::{HeaderValue, Headers}, @@ -36,7 +36,7 @@ pub enum ResolveSourceRequestResult { pub async fn resolve_source_request( source: ContentSourceVc, request: TransientInstance, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, ) -> Result { let mut data = ContentSourceData::default(); let mut current_source = source; @@ -50,7 +50,7 @@ pub async fn resolve_source_request( result, &original_path, "get content from source", - console_ui, + issue_reporter, ) .await?; diff --git a/crates/turbopack-dev-server/src/update/server.rs b/crates/turbopack-dev-server/src/update/server.rs index 2ff147f7fca5c..d4719ac194199 100644 --- a/crates/turbopack-dev-server/src/update/server.rs +++ b/crates/turbopack-dev-server/src/update/server.rs @@ -12,8 +12,7 @@ use tokio::select; use tokio_stream::StreamMap; use turbo_tasks::{TransientInstance, TurboTasksApi}; use turbo_tasks_fs::json::parse_json_with_source_context; -use turbopack_cli_utils::issue::ConsoleUiVc; -use turbopack_core::version::Update; +use turbopack_core::{issue::IssueReporterVc, version::Update}; use super::{ protocol::{ClientMessage, ClientUpdateInstruction, Issue, ResourceIdentifier}, @@ -28,15 +27,15 @@ use crate::{ /// A server that listens for updates and sends them to connected clients. pub(crate) struct UpdateServer { source_provider: P, - console_ui: ConsoleUiVc, + issue_reporter: IssueReporterVc, } impl UpdateServer

{ /// Create a new update server with the given websocket and content source. - pub fn new(source_provider: P, console_ui: ConsoleUiVc) -> Self { + pub fn new(source_provider: P, issue_reporter: IssueReporterVc) -> Self { Self { source_provider, - console_ui, + issue_reporter, } } @@ -69,7 +68,7 @@ impl UpdateServer

{ resolve_source_request( source, TransientInstance::new(request), - self.console_ui + self.issue_reporter ) } }; From 74f9f6eebc1c5d8323643313280f6d0d3fbcecbd Mon Sep 17 00:00:00 2001 From: OJ Kwon <1210596+kwonoj@users.noreply.github.com> Date: Tue, 14 Feb 2023 12:37:59 -0800 Subject: [PATCH 22/31] ci(workflow): store failed test path list (#3792) Trying to close WEB-589: along with full test results, stores latest test failure list to skip retrying for the known failures. --- .github/actions/next-integration-stat/index.js | 17 +++++++++++++---- .../actions/next-integration-stat/src/index.ts | 17 ++++++++++++----- .../upload-nextjs-integration-test-results.yml | 6 +++--- 3 files changed, 28 insertions(+), 12 deletions(-) diff --git a/.github/actions/next-integration-stat/index.js b/.github/actions/next-integration-stat/index.js index d9f20d64fbb6c..f68f6b23a5477 100644 --- a/.github/actions/next-integration-stat/index.js +++ b/.github/actions/next-integration-stat/index.js @@ -16572,6 +16572,11 @@ .map((t) => (t.length > 5 ? `\t- ${t}` : t)) .join(" \n")}`; } + console.log( + "Newly failed tests", + JSON.stringify(newFailedTests, null, 2) + ); + console.log("Fixed tests", JSON.stringify(fixedTests, null, 2)); // Store a json payload to share via slackapi/slack-github-action into Slack channel if (shouldShareTestSummaryToSlack) { let resultsSummary = ""; @@ -16777,14 +16782,18 @@ ]; const isMultipleComments = comments.length > 1; try { - if (!prNumber) { - return; - } // Store the list of failed test paths to a file fs.writeFileSync( "./failed-test-path-list.json", - JSON.stringify(failedTestLists, null, 2) + JSON.stringify( + failedTestLists.filter((x) => x.length > 5), + null, + 2 + ) ); + if (!prNumber) { + return; + } if (failedJobResults.result.length === 0) { console.log("No failed test results found :tada:"); yield postCommentAsync( diff --git a/.github/actions/next-integration-stat/src/index.ts b/.github/actions/next-integration-stat/src/index.ts index b7d0b5515ceb0..ea139cc3f60e4 100644 --- a/.github/actions/next-integration-stat/src/index.ts +++ b/.github/actions/next-integration-stat/src/index.ts @@ -679,6 +679,9 @@ function getTestSummary( .join(" \n")}`; } + console.log("Newly failed tests", JSON.stringify(newFailedTests, null, 2)); + console.log("Fixed tests", JSON.stringify(fixedTests, null, 2)); + // Store a json payload to share via slackapi/slack-github-action into Slack channel if (shouldShareTestSummaryToSlack) { let resultsSummary = ""; @@ -882,16 +885,20 @@ async function run() { const isMultipleComments = comments.length > 1; try { - if (!prNumber) { - return; - } - // Store the list of failed test paths to a file fs.writeFileSync( "./failed-test-path-list.json", - JSON.stringify(failedTestLists, null, 2) + JSON.stringify( + failedTestLists.filter((x) => x.length > 5), + null, + 2 + ) ); + if (!prNumber) { + return; + } + if (failedJobResults.result.length === 0) { console.log("No failed test results found :tada:"); await postCommentAsync( diff --git a/.github/workflows/upload-nextjs-integration-test-results.yml b/.github/workflows/upload-nextjs-integration-test-results.yml index 6e5c7f3568918..dd5983549909b 100644 --- a/.github/workflows/upload-nextjs-integration-test-results.yml +++ b/.github/workflows/upload-nextjs-integration-test-results.yml @@ -34,7 +34,7 @@ jobs: - name: Print test results run: | ls -al ./test-results/main - cat ./test-results/main/nextjs-test-results.json + echo "Print failed test path list:" cat ./test-results/main/failed-test-path-list.json echo "NEXTJS_VERSION=$(cat ./test-results/main/nextjs-test-results.json | jq .nextjsVersion | tr -d '"' | cut -d ' ' -f2)" >> $GITHUB_ENV echo "SHA_SHORT=$(git rev-parse --short HEAD)" >> $GITHUB_ENV @@ -46,8 +46,8 @@ jobs: run: | echo "Configured test result subpath for ${{ env.RESULT_SUBPATH }} / ${{ env.NEXTJS_VERSION }} / ${{ env.SHA_SHORT }}" mkdir -p test-results/${{ env.RESULT_SUBPATH }} - mv test-results/main/nextjs-test-results.json test-results/${{ env.RESULT_SUBPATH }}/$(date '+%Y%m%d%H%M')-${{ env.NEXTJS_VERSION }}-${{ env.SHA_SHORT }}.json - mv -f test-results/main/failed-test-path-list.json test-results/${{ env.RESULT_SUBPATH }}/failed-test-path-list.json + mv -v test-results/main/nextjs-test-results.json test-results/${{ env.RESULT_SUBPATH }}/$(date '+%Y%m%d%H%M')-${{ env.NEXTJS_VERSION }}-${{ env.SHA_SHORT }}.json + mv -fvn test-results/main/failed-test-path-list.json test-results/${{ env.RESULT_SUBPATH }}/failed-test-path-list.json ls -al ./test-results ls -al ./test-results/${{ env.RESULT_SUBPATH }} From d66abefa5dfb82b03cdfbe206ed47a170b7de0be Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Tue, 14 Feb 2023 16:20:17 -0800 Subject: [PATCH 23/31] Add needs:triage label to new issues (#3776) --- .github/ISSUE_TEMPLATE/0-turborepo-bug-report.yml | 2 +- .github/ISSUE_TEMPLATE/1-turbopack-bug-report.yml | 2 +- .github/ISSUE_TEMPLATE/2-feature-request.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/0-turborepo-bug-report.yml b/.github/ISSUE_TEMPLATE/0-turborepo-bug-report.yml index d0814520dd721..141f2b34c0516 100644 --- a/.github/ISSUE_TEMPLATE/0-turborepo-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/0-turborepo-bug-report.yml @@ -1,7 +1,7 @@ name: Turborepo Bug Report description: Create a bug report for the Turborepo team title: "[turborepo] " -labels: ["kind: bug", "area: turborepo"] +labels: ["kind: bug", "area: turborepo", "needs: triage"] body: - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/1-turbopack-bug-report.yml b/.github/ISSUE_TEMPLATE/1-turbopack-bug-report.yml index b551734e29963..a16a2620ca5a7 100644 --- a/.github/ISSUE_TEMPLATE/1-turbopack-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/1-turbopack-bug-report.yml @@ -3,7 +3,7 @@ name: Turbopack Bug Report description: Create a bug report for the Turbopack team title: "[turbopack] " -labels: ["kind: bug", "area: turbopack"] +labels: ["kind: bug", "area: turbopack", "needs: triage"] body: - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/2-feature-request.yml b/.github/ISSUE_TEMPLATE/2-feature-request.yml index aaeae8510683c..398f21ca88b48 100644 --- a/.github/ISSUE_TEMPLATE/2-feature-request.yml +++ b/.github/ISSUE_TEMPLATE/2-feature-request.yml @@ -1,6 +1,6 @@ name: Feature Request description: Create a feature request -labels: ["story"] +labels: ["story", "needs: triage"] body: - type: markdown attributes: From 1274b4d72ec4fcc4d21cef27f11f1b92fddc62c0 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Tue, 14 Feb 2023 16:56:17 -0800 Subject: [PATCH 24/31] Change error log line to debug, it's not an error (#3808) --- cli/internal/fs/fs.go | 7 +++++++ cli/internal/fs/fs_windows_test.go | 18 ++++++++++++++++++ cli/internal/globwatcher/globwatcher.go | 2 +- 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 cli/internal/fs/fs_windows_test.go diff --git a/cli/internal/fs/fs.go b/cli/internal/fs/fs.go index 4bf072e929677..77804c0708010 100644 --- a/cli/internal/fs/fs.go +++ b/cli/internal/fs/fs.go @@ -6,6 +6,7 @@ import ( "log" "os" "path/filepath" + "runtime" "strings" "github.com/pkg/errors" @@ -39,6 +40,12 @@ var nonRelativeSentinel string = ".." + string(filepath.Separator) // DirContainsPath returns true if the path 'target' is contained within 'dir' // Expects both paths to be absolute and does not verify that either path exists. func DirContainsPath(dir string, target string) (bool, error) { + // On windows, trying to get a relative path between files on different volumes + // is an error. We don't care about the error, it's good enough for us to say + // that one path doesn't contain the other if they're on different volumes. + if runtime.GOOS == "windows" && filepath.VolumeName(dir) != filepath.VolumeName(target) { + return false, nil + } // In Go, filepath.Rel can return a path that starts with "../" or equivalent. // Checking filesystem-level contains can get extremely complicated // (see https://github.com/golang/dep/blob/f13583b555deaa6742f141a9c1185af947720d60/internal/fs/fs.go#L33) diff --git a/cli/internal/fs/fs_windows_test.go b/cli/internal/fs/fs_windows_test.go new file mode 100644 index 0000000000000..4e71e2c816b61 --- /dev/null +++ b/cli/internal/fs/fs_windows_test.go @@ -0,0 +1,18 @@ +//go:build windows +// +build windows + +package fs + +import "testing" + +func TestDifferentVolumes(t *testing.T) { + p1 := "C:\\some\\path" + p2 := "D:\\other\\path" + contains, err := DirContainsPath(p1, p2) + if err != nil { + t.Errorf("DirContainsPath got error %v, want ", err) + } + if contains { + t.Errorf("DirContainsPath got true, want false") + } +} diff --git a/cli/internal/globwatcher/globwatcher.go b/cli/internal/globwatcher/globwatcher.go index 450dbae2021b3..9226cfaeed983 100644 --- a/cli/internal/globwatcher/globwatcher.go +++ b/cli/internal/globwatcher/globwatcher.go @@ -135,7 +135,7 @@ func (g *GlobWatcher) OnFileWatchEvent(ev filewatcher.Event) { absolutePath := ev.Path repoRelativePath, err := g.repoRoot.RelativePathString(absolutePath.ToStringDuringMigration()) if err != nil { - g.logger.Error(fmt.Sprintf("could not get relative path from %v to %v: %v", g.repoRoot, absolutePath, err)) + g.logger.Debug(fmt.Sprintf("could not get relative path from %v to %v: %v", g.repoRoot, absolutePath, err)) return } g.mu.Lock() From d9ec4a1a127481533789c902a44a006a97e6dd29 Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Tue, 14 Feb 2023 19:40:12 -0800 Subject: [PATCH 25/31] Remove TIMING=1 from eslint command in examples and create-turbo starter (#3813) This is not a cross-platform compatible way to set an environment variable, and it is not critical to have this as part of the examples and starter template. Fixes https://github.com/vercel/turbo/issues/3797 --- examples/basic/packages/ui/package.json | 2 +- examples/design-system/packages/acme-core/package.json | 2 +- examples/design-system/packages/acme-utils/package.json | 2 +- examples/kitchen-sink/apps/admin/package.json | 2 +- examples/kitchen-sink/apps/api/package.json | 2 +- examples/kitchen-sink/apps/storefront/package.json | 2 +- examples/kitchen-sink/packages/logger/package.json | 2 +- examples/kitchen-sink/packages/ui/package.json | 2 +- examples/with-changesets/apps/docs/package.json | 2 +- examples/with-changesets/packages/acme-core/package.json | 2 +- examples/with-changesets/packages/acme-utils/package.json | 2 +- examples/with-create-react-app/apps/docs/package.json | 2 +- examples/with-create-react-app/apps/web/package.json | 2 +- examples/with-create-react-app/packages/ui/package.json | 2 +- examples/with-docker/apps/api/package.json | 2 +- examples/with-docker/packages/logger/package.json | 2 +- examples/with-docker/packages/ui/package.json | 2 +- examples/with-npm/packages/ui/package.json | 2 +- examples/with-prisma/packages/database/package.json | 2 +- examples/with-svelte/apps/docs/package.json | 2 +- examples/with-svelte/apps/web/package.json | 2 +- examples/with-vite/apps/docs/package.json | 2 +- examples/with-vite/apps/web/package.json | 2 +- examples/with-vite/packages/ui/package.json | 2 +- examples/with-yarn/packages/ui/package.json | 2 +- .../create-turbo/templates/_shared_ts/packages/ui/package.json | 2 +- 26 files changed, 26 insertions(+), 26 deletions(-) diff --git a/examples/basic/packages/ui/package.json b/examples/basic/packages/ui/package.json index 8914852733edb..f96974331bf35 100644 --- a/examples/basic/packages/ui/package.json +++ b/examples/basic/packages/ui/package.json @@ -5,7 +5,7 @@ "types": "./index.tsx", "license": "MIT", "scripts": { - "lint": "TIMING=1 eslint \"**/*.ts*\"" + "lint": "eslint \"**/*.ts*\"" }, "devDependencies": { "@types/react": "^17.0.37", diff --git a/examples/design-system/packages/acme-core/package.json b/examples/design-system/packages/acme-core/package.json index 9662ecf1b2ed8..ebe78925a3e72 100644 --- a/examples/design-system/packages/acme-core/package.json +++ b/examples/design-system/packages/acme-core/package.json @@ -12,7 +12,7 @@ "scripts": { "build": "tsup src/index.tsx --format esm,cjs --dts --external react", "dev": "tsup src/index.tsx --format esm,cjs --watch --dts --external react", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist" }, "devDependencies": { diff --git a/examples/design-system/packages/acme-utils/package.json b/examples/design-system/packages/acme-utils/package.json index 7d979b699cfef..debaa2acfde42 100644 --- a/examples/design-system/packages/acme-utils/package.json +++ b/examples/design-system/packages/acme-utils/package.json @@ -12,7 +12,7 @@ "scripts": { "build": "tsup src/index.tsx --format esm,cjs --dts --external react", "dev": "tsup src/index.tsx --format esm,cjs --watch --dts --external react", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist" }, "devDependencies": { diff --git a/examples/kitchen-sink/apps/admin/package.json b/examples/kitchen-sink/apps/admin/package.json index 276b664042548..727d9693740fc 100644 --- a/examples/kitchen-sink/apps/admin/package.json +++ b/examples/kitchen-sink/apps/admin/package.json @@ -7,7 +7,7 @@ "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist", "deploy": "vercel deploy dist --team=turborepo --confirm", "dev": "vite --host 0.0.0.0 --port 3001 --clearScreen false", - "lint": "tsc --noEmit && TIMING=1 eslint \"src/**/*.ts*\"" + "lint": "tsc --noEmit && eslint \"src/**/*.ts*\"" }, "dependencies": { "react": "^18.2.0", diff --git a/examples/kitchen-sink/apps/api/package.json b/examples/kitchen-sink/apps/api/package.json index cbf1ae2c5f0cf..0c2a0c0dc00cc 100644 --- a/examples/kitchen-sink/apps/api/package.json +++ b/examples/kitchen-sink/apps/api/package.json @@ -6,7 +6,7 @@ "build": "tsup src/index.ts --format cjs", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist", "dev": "tsup src/index.ts --format cjs --watch --onSuccess \"node dist/index.js\"", - "lint": "tsc --noEmit && TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "tsc --noEmit && eslint \"src/**/*.ts*\"", "start": "node dist/index.js", "test": "jest --detectOpenHandles" }, diff --git a/examples/kitchen-sink/apps/storefront/package.json b/examples/kitchen-sink/apps/storefront/package.json index e10b1d734bc7e..08800fd70f0cf 100644 --- a/examples/kitchen-sink/apps/storefront/package.json +++ b/examples/kitchen-sink/apps/storefront/package.json @@ -6,7 +6,7 @@ "build": "next build", "clean": "rm -rf .next", "dev": "next dev -p 3002", - "lint": "TIMING=1 next lint", + "lint": "next lint", "start": "next start " }, "dependencies": { diff --git a/examples/kitchen-sink/packages/logger/package.json b/examples/kitchen-sink/packages/logger/package.json index 7d0815c01038e..4b082c7996842 100644 --- a/examples/kitchen-sink/packages/logger/package.json +++ b/examples/kitchen-sink/packages/logger/package.json @@ -12,7 +12,7 @@ "build": "tsc", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist", "dev": "tsc -w", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "test": "jest" }, "jest": { diff --git a/examples/kitchen-sink/packages/ui/package.json b/examples/kitchen-sink/packages/ui/package.json index 1f815999b78a8..2241c4067a15c 100644 --- a/examples/kitchen-sink/packages/ui/package.json +++ b/examples/kitchen-sink/packages/ui/package.json @@ -14,7 +14,7 @@ "build": "tsup src/index.tsx --format esm,cjs --dts --external react", "clean": "rm -rf dist", "dev": "tsup src/index.tsx --format esm,cjs --watch --dts --external react", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "test": "jest" }, "jest": { diff --git a/examples/with-changesets/apps/docs/package.json b/examples/with-changesets/apps/docs/package.json index 204b146b2b726..ad53e236df72e 100644 --- a/examples/with-changesets/apps/docs/package.json +++ b/examples/with-changesets/apps/docs/package.json @@ -6,7 +6,7 @@ "build": "next build", "start": "next start ", "dev": "next dev -p 3002", - "lint": "TIMING=1 next lint", + "lint": "next lint", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf .next" }, "dependencies": { diff --git a/examples/with-changesets/packages/acme-core/package.json b/examples/with-changesets/packages/acme-core/package.json index 1a9a7de346acf..8aa79ba75bd57 100644 --- a/examples/with-changesets/packages/acme-core/package.json +++ b/examples/with-changesets/packages/acme-core/package.json @@ -12,7 +12,7 @@ "scripts": { "build": "tsup src/index.tsx --format esm,cjs --dts --external react", "dev": "tsup src/index.tsx --format esm,cjs --watch --dts --external react", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist" }, "devDependencies": { diff --git a/examples/with-changesets/packages/acme-utils/package.json b/examples/with-changesets/packages/acme-utils/package.json index 7bab41f0c14f5..0c540363236b2 100644 --- a/examples/with-changesets/packages/acme-utils/package.json +++ b/examples/with-changesets/packages/acme-utils/package.json @@ -12,7 +12,7 @@ "scripts": { "build": "tsup src/index.tsx --format esm,cjs --dts --external react", "dev": "tsup src/index.tsx --format esm,cjs --watch --dts --external react", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist" }, "devDependencies": { diff --git a/examples/with-create-react-app/apps/docs/package.json b/examples/with-create-react-app/apps/docs/package.json index a9aa4e435ca20..97be3e9ae81d7 100644 --- a/examples/with-create-react-app/apps/docs/package.json +++ b/examples/with-create-react-app/apps/docs/package.json @@ -8,7 +8,7 @@ "build": "react-scripts build", "test": "react-scripts test", "eject": "react-scripts eject", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "clean": "rm -rf build" }, "dependencies": { diff --git a/examples/with-create-react-app/apps/web/package.json b/examples/with-create-react-app/apps/web/package.json index 84d5a98ff1cf5..768d1913f359d 100644 --- a/examples/with-create-react-app/apps/web/package.json +++ b/examples/with-create-react-app/apps/web/package.json @@ -8,7 +8,7 @@ "build": "react-scripts build", "test": "react-scripts test", "eject": "react-scripts eject", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "clean": "rm -rf build" }, "dependencies": { diff --git a/examples/with-create-react-app/packages/ui/package.json b/examples/with-create-react-app/packages/ui/package.json index e8da2fd96c779..e9aedabb3c693 100644 --- a/examples/with-create-react-app/packages/ui/package.json +++ b/examples/with-create-react-app/packages/ui/package.json @@ -10,7 +10,7 @@ "build": "tsup src/index.tsx --format cjs --dts --external react", "clean": "rm -rf dist", "dev": "tsup src/index.tsx --format cjs --watch --dts --external react", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"" + "lint": "eslint \"src/**/*.ts*\"" }, "devDependencies": { "@types/react": "^17.0.13", diff --git a/examples/with-docker/apps/api/package.json b/examples/with-docker/apps/api/package.json index 2379d936d364a..abe79b4864495 100644 --- a/examples/with-docker/apps/api/package.json +++ b/examples/with-docker/apps/api/package.json @@ -6,7 +6,7 @@ "build": "tsc", "clean": "rm -rf dist", "dev": "nodemon --exec \"node -r esbuild-register ./src/index.ts\" -e .ts", - "lint": "tsc --noEmit && TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "tsc --noEmit && eslint \"src/**/*.ts*\"", "start": "node -r esbuild-register ./src/index.ts", "test": "jest --detectOpenHandles" }, diff --git a/examples/with-docker/packages/logger/package.json b/examples/with-docker/packages/logger/package.json index d29cc6bc3c39b..17079aeacc90b 100644 --- a/examples/with-docker/packages/logger/package.json +++ b/examples/with-docker/packages/logger/package.json @@ -12,7 +12,7 @@ "build": "tsc", "clean": "rm -rf dist", "dev": "tsc -w", - "lint": "TIMING=1 eslint \"src/**/*.ts*\"", + "lint": "eslint \"src/**/*.ts*\"", "test": "jest" }, "jest": { diff --git a/examples/with-docker/packages/ui/package.json b/examples/with-docker/packages/ui/package.json index 07f541aa556d6..e687c7728296c 100644 --- a/examples/with-docker/packages/ui/package.json +++ b/examples/with-docker/packages/ui/package.json @@ -5,7 +5,7 @@ "main": "./index.tsx", "types": "./index.tsx", "scripts": { - "lint": "TIMING=1 eslint \"**/*.ts*\"" + "lint": "eslint \"**/*.ts*\"" }, "devDependencies": { "@types/react": "^17.0.37", diff --git a/examples/with-npm/packages/ui/package.json b/examples/with-npm/packages/ui/package.json index e2c5284be884c..ce16212d086bf 100644 --- a/examples/with-npm/packages/ui/package.json +++ b/examples/with-npm/packages/ui/package.json @@ -5,7 +5,7 @@ "types": "./index.tsx", "license": "MIT", "scripts": { - "lint": "TIMING=1 eslint \"**/*.ts*\"" + "lint": "eslint \"**/*.ts*\"" }, "devDependencies": { "@types/react": "^17.0.37", diff --git a/examples/with-prisma/packages/database/package.json b/examples/with-prisma/packages/database/package.json index 4331724570c25..b7c046c9b7200 100644 --- a/examples/with-prisma/packages/database/package.json +++ b/examples/with-prisma/packages/database/package.json @@ -18,7 +18,7 @@ "dev": "tsup --watch", "format": "prisma format", "generate": "prisma generate", - "lint": "TIMING=1 eslint \"src/**/*.ts\"", + "lint": "eslint \"src/**/*.ts\"", "prebuild": "npm run generate", "predev": "npm run generate", "studio": "prisma studio" diff --git a/examples/with-svelte/apps/docs/package.json b/examples/with-svelte/apps/docs/package.json index 273abb8fb385e..6e633e9220618 100644 --- a/examples/with-svelte/apps/docs/package.json +++ b/examples/with-svelte/apps/docs/package.json @@ -8,7 +8,7 @@ "preview": "vite preview", "check": "svelte-check --tsconfig ./tsconfig.json", "check:watch": "svelte-check --tsconfig ./tsconfig.json --watch", - "lint": "prettier --check --ignore-path=../../.prettierignore . && TIMING=1 eslint \"src\"", + "lint": "prettier --check --ignore-path=../../.prettierignore . && eslint \"src\"", "format": "prettier --write --ignore-path=../../.prettierignore ." }, "dependencies": { diff --git a/examples/with-svelte/apps/web/package.json b/examples/with-svelte/apps/web/package.json index c09f2aee47525..9dee6520b3c73 100644 --- a/examples/with-svelte/apps/web/package.json +++ b/examples/with-svelte/apps/web/package.json @@ -8,7 +8,7 @@ "preview": "vite preview", "check": "svelte-check --tsconfig ./tsconfig.json", "check:watch": "svelte-check --tsconfig ./tsconfig.json --watch", - "lint": "prettier --check --ignore-path=../../.prettierignore . && TIMING=1 eslint \"src\"", + "lint": "prettier --check --ignore-path=../../.prettierignore . && eslint \"src\"", "format": "prettier --write --ignore-path=../../.prettierignore ." }, "dependencies": { diff --git a/examples/with-vite/apps/docs/package.json b/examples/with-vite/apps/docs/package.json index fd0719e00074f..c9adc4b0c7266 100644 --- a/examples/with-vite/apps/docs/package.json +++ b/examples/with-vite/apps/docs/package.json @@ -7,7 +7,7 @@ "dev": "vite", "build": "tsc && vite build", "preview": "vite preview", - "lint": "TIMING=1 eslint \"src/**/*.ts\"" + "lint": "eslint \"src/**/*.ts\"" }, "dependencies": { "ui": "workspace:*" diff --git a/examples/with-vite/apps/web/package.json b/examples/with-vite/apps/web/package.json index 4c1045902d828..de475af9e05a5 100644 --- a/examples/with-vite/apps/web/package.json +++ b/examples/with-vite/apps/web/package.json @@ -7,7 +7,7 @@ "dev": "vite", "build": "tsc && vite build", "preview": "vite preview", - "lint": "TIMING=1 eslint \"src/**/*.ts\"" + "lint": "eslint \"src/**/*.ts\"" }, "dependencies": { "ui": "workspace:*" diff --git a/examples/with-vite/packages/ui/package.json b/examples/with-vite/packages/ui/package.json index 12860b04721b7..3db9e0d9f293a 100644 --- a/examples/with-vite/packages/ui/package.json +++ b/examples/with-vite/packages/ui/package.json @@ -5,7 +5,7 @@ "types": "./index.ts", "license": "MIT", "scripts": { - "lint": "TIMING=1 eslint \"**/*.ts\"" + "lint": "eslint \"**/*.ts\"" }, "devDependencies": { "eslint": "^7.32.0", diff --git a/examples/with-yarn/packages/ui/package.json b/examples/with-yarn/packages/ui/package.json index e2c5284be884c..ce16212d086bf 100644 --- a/examples/with-yarn/packages/ui/package.json +++ b/examples/with-yarn/packages/ui/package.json @@ -5,7 +5,7 @@ "types": "./index.tsx", "license": "MIT", "scripts": { - "lint": "TIMING=1 eslint \"**/*.ts*\"" + "lint": "eslint \"**/*.ts*\"" }, "devDependencies": { "@types/react": "^17.0.37", diff --git a/packages/create-turbo/templates/_shared_ts/packages/ui/package.json b/packages/create-turbo/templates/_shared_ts/packages/ui/package.json index 60722717d6116..2b92d57c91dba 100644 --- a/packages/create-turbo/templates/_shared_ts/packages/ui/package.json +++ b/packages/create-turbo/templates/_shared_ts/packages/ui/package.json @@ -5,7 +5,7 @@ "types": "./index.tsx", "license": "MIT", "scripts": { - "lint": "TIMING=1 eslint \"**/*.ts*\"" + "lint": "eslint \"**/*.ts*\"" }, "devDependencies": { "@types/react": "^17.0.37", From 3c70451cfcb16859c82469bc166b3b22c415e633 Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Tue, 14 Feb 2023 19:43:57 -0800 Subject: [PATCH 26/31] add test to verify that turbo.json can have comments (#3812) This doesn't c lose #3793, but verifies that comments work --- .vscode/settings.json | 3 ++- cli/integration_tests/basic_monorepo/monorepo/turbo.json | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 34c53d4de4149..2e987dcddb62d 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -12,7 +12,8 @@ }, "go.lintTool": "golangci-lint", "files.associations": { - "libturbo.h": "c" + "libturbo.h": "c", + "turbo.json": "jsonc" }, "[cram]": { "editor.trimAutoWhitespace": false, diff --git a/cli/integration_tests/basic_monorepo/monorepo/turbo.json b/cli/integration_tests/basic_monorepo/monorepo/turbo.json index dae8f204b7d3e..9f192ba360728 100644 --- a/cli/integration_tests/basic_monorepo/monorepo/turbo.json +++ b/cli/integration_tests/basic_monorepo/monorepo/turbo.json @@ -4,7 +4,7 @@ "build": { "outputs": [] }, - + // this comment verifies that turbo can read .json files with comments "my-app#build": { "outputs": ["banana.txt", "apple.json"] } From b9d1cf28a4c07ac986e0b0c983e874568550bde6 Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Tue, 14 Feb 2023 22:10:13 -0800 Subject: [PATCH 27/31] Use non-high intensity yellow for warnings for readability on white background (#3810) Closes #3799. | bg | before | after | | --- | --- | --- | | dark | CleanShot 2023-02-14 at 16 07 13@2x | CleanShot 2023-02-14 at 16 07 09@2x | | light | CleanShot 2023-02-14 at 16 06 59@2x | CleanShot 2023-02-14 at 16 06 56@2x | the new color is a bit more dull, but it is also more visible. There are a few other options we can pick from without spending too much more time on this: https://github.com/fatih/color/blob/3d5097c6b003cf3a784e670ddb79710cf46e9a07/color.go#L68-L90 --- cli/internal/ui/ui.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/internal/ui/ui.go b/cli/internal/ui/ui.go index bbb7dcff97eda..9084c76c5a1c9 100644 --- a/cli/internal/ui/ui.go +++ b/cli/internal/ui/ui.go @@ -115,7 +115,7 @@ func BuildColoredUi(colorMode ColorMode) *cli.ColoredUi { }, OutputColor: cli.UiColorNone, InfoColor: cli.UiColorNone, - WarnColor: cli.UiColorYellow, + WarnColor: cli.UiColor{Code: int(color.FgYellow), Bold: false}, ErrorColor: cli.UiColorRed, } } From 44d6a46066ab3cb1d30500c79461125c82b75e3b Mon Sep 17 00:00:00 2001 From: Alex Kirszenberg Date: Wed, 15 Feb 2023 11:36:09 +0100 Subject: [PATCH 28/31] Add support for CSS module composes: and fix CSS precedence issues (#3771) Adds support for the CSS module `composes:` rule. This also fixes a large issue with our chunk ordering and CSS precedence, where the BFS order of our chunks did not match the expected topological ordering. --- Cargo.lock | 1 + Cargo.toml | 1 + crates/next-core/Cargo.toml | 2 +- crates/next-core/src/manifest.rs | 28 +- crates/turbo-tasks/src/graph/get_children.rs | 121 +++++ crates/turbo-tasks/src/graph/graph_store.rs | 10 + .../turbo-tasks/src/graph/graph_traversal.rs | 131 +++++ crates/turbo-tasks/src/graph/mod.rs | 10 + .../src/graph/non_deterministic.rs | 31 ++ .../src/graph/reverse_topological.rs | 118 +++++ crates/turbo-tasks/src/join_iter_ext.rs | 122 +---- crates/turbo-tasks/src/lib.rs | 3 +- crates/turbopack-core/src/chunk/mod.rs | 96 ++-- crates/turbopack-core/src/reference_type.rs | 1 + crates/turbopack-create-test-app/Cargo.toml | 2 +- crates/turbopack-css/Cargo.toml | 1 + crates/turbopack-css/src/asset.rs | 21 +- crates/turbopack-css/src/chunk/mod.rs | 1 + crates/turbopack-css/src/chunk/optimize.rs | 126 +++-- crates/turbopack-css/src/chunk/writer.rs | 31 +- crates/turbopack-css/src/module_asset.rs | 457 +++++++++++++++--- .../turbopack-css/src/references/compose.rs | 53 ++ crates/turbopack-css/src/references/mod.rs | 1 + crates/turbopack-ecmascript/Cargo.toml | 2 +- .../tests/snapshot/css/css/input/style.css | 4 + .../snapshot/css/css/input/style.module.css | 10 + .../css/css/output/8697f_foo_style.module.css | 2 +- ...sts_snapshot_css_css_input_index_531223.js | 2 + ...snapshot_css_css_input_index_531223.js.map | 4 +- ...sts_tests_snapshot_css_css_input_style.css | 9 + ...tests_snapshot_css_css_input_style.css.map | 10 +- ...ts_snapshot_css_css_input_style.module.css | 8 +- ...napshot_css_css_input_style.module.css.map | 4 +- ...hot_emotion_emotion_input_index_6545dc.js} | 6 +- ...emotion_emotion_input_index_6545dc.js.map} | 0 ...sforms_input_packages_app_index_968e59.js} | 6 +- ...ms_input_packages_app_index_968e59.js.map} | 0 ...ansforms_preset_env_input_index_311eca.js} | 6 +- ...orms_preset_env_input_index_311eca.js.map} | 0 39 files changed, 1125 insertions(+), 316 deletions(-) create mode 100644 crates/turbo-tasks/src/graph/get_children.rs create mode 100644 crates/turbo-tasks/src/graph/graph_store.rs create mode 100644 crates/turbo-tasks/src/graph/graph_traversal.rs create mode 100644 crates/turbo-tasks/src/graph/mod.rs create mode 100644 crates/turbo-tasks/src/graph/non_deterministic.rs create mode 100644 crates/turbo-tasks/src/graph/reverse_topological.rs create mode 100644 crates/turbopack-css/src/references/compose.rs rename crates/turbopack-tests/tests/snapshot/emotion/emotion/output/{crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js => crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js} (98%) rename crates/turbopack-tests/tests/snapshot/emotion/emotion/output/{crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js.map => crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js.map} (100%) rename crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/{a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js => a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js} (98%) rename crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/{a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js.map => a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js.map} (100%) rename crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/{79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js => 79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js} (99%) rename crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/{79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js.map => 79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js.map} (100%) diff --git a/Cargo.lock b/Cargo.lock index 48da205fb4ebc..18256de23872c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7755,6 +7755,7 @@ dependencies = [ "anyhow", "async-trait", "indexmap", + "indoc", "once_cell", "regex", "serde", diff --git a/Cargo.toml b/Cargo.toml index 33df72cdf08b7..cb524233d8755 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -92,6 +92,7 @@ opt-level = 3 # ref: https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#inheriting-a-dependency-from-a-workspace [workspace.dependencies] indexmap = { version = "1.9.2" } +indoc = "1.0" # Keep consistent with preset_env_base through swc_core browserslist-rs = { version = "0.12.2" } swc_core = { version = "0.59.26" } diff --git a/crates/next-core/Cargo.toml b/crates/next-core/Cargo.toml index 7688666af2def..9cc7e894740cd 100644 --- a/crates/next-core/Cargo.toml +++ b/crates/next-core/Cargo.toml @@ -12,7 +12,7 @@ bench = false anyhow = "1.0.47" auto-hash-map = { path = "../auto-hash-map" } indexmap = { workspace = true, features = ["serde"] } -indoc = "1.0" +indoc = { workspace = true } mime = "0.3.16" once_cell = "1.13.0" qstring = "0.7.2" diff --git a/crates/next-core/src/manifest.rs b/crates/next-core/src/manifest.rs index 21a438b671306..795f96de125e5 100644 --- a/crates/next-core/src/manifest.rs +++ b/crates/next-core/src/manifest.rs @@ -3,8 +3,9 @@ use indexmap::IndexMap; use mime::{APPLICATION_JAVASCRIPT_UTF_8, APPLICATION_JSON}; use serde::Serialize; use turbo_tasks::{ + graph::{GraphTraversal, NonDeterministic}, primitives::{StringVc, StringsVc}, - TryFlatMapRecursiveJoinIterExt, TryJoinIterExt, + TryJoinIterExt, }; use turbo_tasks_fs::File; use turbopack_core::asset::AssetContentVc; @@ -60,19 +61,18 @@ impl DevManifestContentSourceVc { Ok(content_source.get_children().await?.clone_value()) } - let mut routes = this - .page_roots - .iter() - .copied() - .try_flat_map_recursive_join(get_content_source_children) - .await? - .into_iter() - .map(content_source_to_pathname) - .try_join() - .await? - .into_iter() - .flatten() - .collect::>(); + let mut routes = GraphTraversal::>::visit( + this.page_roots.iter().copied(), + get_content_source_children, + ) + .await? + .into_iter() + .map(content_source_to_pathname) + .try_join() + .await? + .into_iter() + .flatten() + .collect::>(); routes.sort_by_cached_key(|s| s.split('/').map(PageSortKey::from).collect::>()); diff --git a/crates/turbo-tasks/src/graph/get_children.rs b/crates/turbo-tasks/src/graph/get_children.rs new file mode 100644 index 0000000000000..196e9656f0a88 --- /dev/null +++ b/crates/turbo-tasks/src/graph/get_children.rs @@ -0,0 +1,121 @@ +use std::{collections::HashSet, future::Future}; + +use anyhow::Result; + +/// A trait that allows a graph traversal to get the children of a node. +pub trait GetChildren { + type Children: IntoIterator; + type Future: Future>; + + fn get_children(&mut self, item: &T) -> Option; +} + +// The different `Impl*` here are necessary in order to avoid the `Conflicting +// implementations of trait` error when implementing `GetChildren` on different +// kinds of `FnMut`. +// See https://users.rust-lang.org/t/conflicting-implementation-when-implementing-traits-for-fn/53359/3 + +pub struct ImplRef; + +impl GetChildren for C +where + C: FnMut(&T) -> F, + F: Future>, + CI: IntoIterator, +{ + type Children = CI; + type Future = F; + + fn get_children(&mut self, item: &T) -> Option { + Some((self)(item)) + } +} + +pub struct ImplRefOption; + +impl GetChildren for C +where + C: FnMut(&T) -> Option, + F: Future>, + CI: IntoIterator, +{ + type Children = CI; + type Future = F; + + fn get_children(&mut self, item: &T) -> Option { + (self)(item) + } +} + +pub struct ImplValue; + +impl GetChildren for C +where + T: Copy, + C: FnMut(T) -> F, + F: Future>, + CI: IntoIterator, +{ + type Children = CI; + type Future = F; + + fn get_children(&mut self, item: &T) -> Option { + Some((self)(*item)) + } +} + +pub struct ImplValueOption; + +impl GetChildren for C +where + T: Copy, + C: FnMut(T) -> Option, + F: Future>, + CI: IntoIterator, +{ + type Children = CI; + type Future = F; + + fn get_children(&mut self, item: &T) -> Option { + (self)(*item) + } +} + +/// A [`GetChildren`] implementation that skips nodes that have already been +/// visited. This is necessary to avoid repeated work when traversing non-tree +/// graphs (i.e. where a child can have more than one parent). +#[derive(Debug)] +pub struct SkipDuplicates { + get_children: C, + visited: HashSet, + _phantom: std::marker::PhantomData, +} + +impl SkipDuplicates { + /// Create a new [`SkipDuplicates`] that wraps the given [`GetChildren`]. + pub fn new(get_children: C) -> Self { + Self { + get_children, + visited: HashSet::new(), + _phantom: std::marker::PhantomData, + } + } +} + +impl GetChildren for SkipDuplicates +where + T: Eq + std::hash::Hash + Clone, + C: GetChildren, +{ + type Children = C::Children; + type Future = C::Future; + + fn get_children(&mut self, item: &T) -> Option { + if !self.visited.contains(item) { + self.visited.insert(item.clone()); + self.get_children.get_children(item) + } else { + None + } + } +} diff --git a/crates/turbo-tasks/src/graph/graph_store.rs b/crates/turbo-tasks/src/graph/graph_store.rs new file mode 100644 index 0000000000000..7fda8a5b3f6d6 --- /dev/null +++ b/crates/turbo-tasks/src/graph/graph_store.rs @@ -0,0 +1,10 @@ +/// A graph store is a data structure that will be built up during a graph +/// traversal. It is used to store the results of the traversal. +pub trait GraphStore: Default { + type Handle: Clone; + + // TODO(alexkirsz) An `entry(parent_handle) -> Entry` API would be more + // efficient, as right now we're getting the same key multiple times. + /// Inserts a node into the graph store, and returns a handle to it. + fn insert(&mut self, parent_handle: Option, node: T) -> (Self::Handle, &T); +} diff --git a/crates/turbo-tasks/src/graph/graph_traversal.rs b/crates/turbo-tasks/src/graph/graph_traversal.rs new file mode 100644 index 0000000000000..ff7dac484b044 --- /dev/null +++ b/crates/turbo-tasks/src/graph/graph_traversal.rs @@ -0,0 +1,131 @@ +use std::{future::Future, pin::Pin, task::ready}; + +use anyhow::Result; +use futures::{stream::FuturesUnordered, Stream}; +use pin_project_lite::pin_project; + +use super::{graph_store::GraphStore, GetChildren}; + +/// [`GraphTraversal`] is a utility type that can be used to traverse a graph of +/// nodes, where each node can have a variable number of children. The traversal +/// is done in parallel, and the order of the nodes in the traversal result is +/// determined by the [`GraphStore`] parameter. +pub struct GraphTraversal { + _store: std::marker::PhantomData, +} + +impl GraphTraversal { + /// Visits the graph starting from the given `roots`, and returns a future + /// that will resolve to the traversal result. + pub fn visit(roots: I, mut get_children: C) -> GraphTraversalFuture + where + S: GraphStore, + I: IntoIterator, + C: GetChildren, + { + let mut store = S::default(); + let futures = FuturesUnordered::new(); + for item in roots { + let (parent_handle, item) = store.insert(None, item); + if let Some(future) = get_children.get_children(item) { + futures.push(WithHandle::new(future, parent_handle)); + } + } + GraphTraversalFuture { + store, + futures, + get_children, + } + } +} + +/// A future that resolves to a [`GraphStore`] containing the result of a graph +/// traversal. +pub struct GraphTraversalFuture +where + S: GraphStore, + C: GetChildren, +{ + store: S, + futures: FuturesUnordered>, + get_children: C, +} + +impl Future for GraphTraversalFuture +where + S: GraphStore, + C: GetChildren, +{ + type Output = Result; + + fn poll( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let this = unsafe { self.get_unchecked_mut() }; + loop { + let futures = unsafe { Pin::new_unchecked(&mut this.futures) }; + if let Some((parent_handle, result)) = ready!(futures.poll_next(cx)) { + match result { + Ok(children) => { + for item in children { + let (child_handle, item) = + this.store.insert(Some(parent_handle.clone()), item); + + if let Some(future) = this.get_children.get_children(item) { + this.futures.push(WithHandle::new(future, child_handle)); + } + } + } + Err(err) => return std::task::Poll::Ready(Err(err)), + } + } else { + return std::task::Poll::Ready(Ok(std::mem::take(&mut this.store))); + } + } + } +} + +pin_project! { + struct WithHandle + where + T: Future, + { + #[pin] + future: T, + handle: Option

, + } +} + +impl WithHandle +where + T: Future, +{ + pub fn new(future: T, handle: H) -> Self { + Self { + future, + handle: Some(handle), + } + } +} + +impl Future for WithHandle +where + T: Future, +{ + type Output = (H, T::Output); + + fn poll( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let this = self.project(); + match this.future.poll(cx) { + std::task::Poll::Ready(result) => std::task::Poll::Ready(( + this.handle.take().expect("polled after completion"), + result, + )), + std::task::Poll::Pending => std::task::Poll::Pending, + } + } +} diff --git a/crates/turbo-tasks/src/graph/mod.rs b/crates/turbo-tasks/src/graph/mod.rs new file mode 100644 index 0000000000000..c5505c9366e95 --- /dev/null +++ b/crates/turbo-tasks/src/graph/mod.rs @@ -0,0 +1,10 @@ +mod get_children; +mod graph_store; +mod graph_traversal; +mod non_deterministic; +mod reverse_topological; + +pub use get_children::{GetChildren, SkipDuplicates}; +pub use graph_traversal::GraphTraversal; +pub use non_deterministic::NonDeterministic; +pub use reverse_topological::ReverseTopological; diff --git a/crates/turbo-tasks/src/graph/non_deterministic.rs b/crates/turbo-tasks/src/graph/non_deterministic.rs new file mode 100644 index 0000000000000..c90f59bd991a1 --- /dev/null +++ b/crates/turbo-tasks/src/graph/non_deterministic.rs @@ -0,0 +1,31 @@ +use super::graph_store::GraphStore; + +/// A graph traversal that does not guarantee any particular order, and may not +/// return the same order every time it is run. +pub struct NonDeterministic { + output: Vec, +} + +impl Default for NonDeterministic { + fn default() -> Self { + Self { output: Vec::new() } + } +} + +impl GraphStore for NonDeterministic { + type Handle = (); + + fn insert(&mut self, _parent_handle: Option, node: T) -> (Self::Handle, &T) { + self.output.push(node); + ((), self.output.last().unwrap()) + } +} + +impl IntoIterator for NonDeterministic { + type Item = T; + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.output.into_iter() + } +} diff --git a/crates/turbo-tasks/src/graph/reverse_topological.rs b/crates/turbo-tasks/src/graph/reverse_topological.rs new file mode 100644 index 0000000000000..c6e26c9e03a6b --- /dev/null +++ b/crates/turbo-tasks/src/graph/reverse_topological.rs @@ -0,0 +1,118 @@ +use std::collections::{HashMap, HashSet}; + +use super::graph_store::GraphStore; + +/// A graph traversal that returns nodes in reverse topological order. +pub struct ReverseTopological +where + T: Eq + std::hash::Hash + Clone, +{ + adjacency_map: HashMap>, + roots: Vec, +} + +impl Default for ReverseTopological +where + T: Eq + std::hash::Hash + Clone, +{ + fn default() -> Self { + Self { + adjacency_map: HashMap::new(), + roots: Vec::new(), + } + } +} + +impl GraphStore for ReverseTopological +where + T: Eq + std::hash::Hash + Clone, +{ + type Handle = T; + + fn insert(&mut self, parent: Option, node: T) -> (Self::Handle, &T) { + let vec = if let Some(parent) = parent { + self.adjacency_map + .entry(parent) + .or_insert_with(|| Vec::with_capacity(1)) + } else { + &mut self.roots + }; + + vec.push(node.clone()); + (node, vec.last().unwrap()) + } +} + +#[derive(Debug)] +enum ReverseTopologicalPass { + Pre, + Post, +} + +impl IntoIterator for ReverseTopological +where + T: Eq + std::hash::Hash + Clone, +{ + type Item = T; + type IntoIter = ReverseTopologicalIntoIter; + + fn into_iter(self) -> Self::IntoIter { + ReverseTopologicalIntoIter { + adjacency_map: self.adjacency_map, + stack: self + .roots + .into_iter() + .map(|root| (ReverseTopologicalPass::Pre, root)) + .collect(), + visited: HashSet::new(), + } + } +} + +pub struct ReverseTopologicalIntoIter +where + T: Eq + std::hash::Hash + Clone, +{ + adjacency_map: HashMap>, + stack: Vec<(ReverseTopologicalPass, T)>, + visited: HashSet, +} + +impl Iterator for ReverseTopologicalIntoIter +where + T: Eq + std::hash::Hash + Clone, +{ + type Item = T; + + fn next(&mut self) -> Option { + let current = loop { + let (pass, current) = self.stack.pop()?; + + match pass { + ReverseTopologicalPass::Post => { + break current; + } + ReverseTopologicalPass::Pre => { + if self.visited.contains(¤t) { + continue; + } + + self.visited.insert(current.clone()); + + let Some(children) = self.adjacency_map.get(¤t) else { + break current; + }; + + self.stack.push((ReverseTopologicalPass::Post, current)); + self.stack.extend( + children + .iter() + .map(|child| (ReverseTopologicalPass::Pre, child.clone())), + ); + } + } + }; + + Some(current) + } +} diff --git a/crates/turbo-tasks/src/join_iter_ext.rs b/crates/turbo-tasks/src/join_iter_ext.rs index 31c3fc7fd1983..263eb5f54705f 100644 --- a/crates/turbo-tasks/src/join_iter_ext.rs +++ b/crates/turbo-tasks/src/join_iter_ext.rs @@ -1,18 +1,10 @@ -use std::{ - future::{Future, IntoFuture}, - hash::Hash, - mem::take, - pin::Pin, - task::ready, -}; +use std::future::{Future, IntoFuture}; use anyhow::Result; use futures::{ future::{join_all, JoinAll}, - stream::FuturesOrdered, - FutureExt, Stream, + FutureExt, }; -use indexmap::IndexSet; /// Future for the [JoinIterExt::join] method. pub struct Join @@ -37,6 +29,16 @@ where } } +pub trait JoinIterExt: Iterator +where + T: Unpin, + F: Future, +{ + /// Returns a future that resolves to a vector of the outputs of the futures + /// in the iterator. + fn join(self) -> Join; +} + /// Future for the [TryJoinIterExt::try_join] method. pub struct TryJoin where @@ -65,63 +67,6 @@ where } } -pub struct TryFlatMapRecursiveJoin -where - T: Hash + PartialEq + Eq + Clone, - C: Fn(T) -> F, - F: Future>, - CI: IntoIterator, -{ - set: IndexSet, - futures: FuturesOrdered, - get_children: C, -} - -impl Future for TryFlatMapRecursiveJoin -where - T: Hash + PartialEq + Eq + Clone, - C: Fn(T) -> F, - F: Future>, - CI: IntoIterator, -{ - type Output = Result>; - fn poll( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll { - let this = unsafe { self.get_unchecked_mut() }; - loop { - let futures = unsafe { Pin::new_unchecked(&mut this.futures) }; - if let Some(result) = ready!(futures.poll_next(cx)) { - match result { - Ok(children) => { - for item in children { - let (index, new) = this.set.insert_full(item); - if new { - this.futures - .push_back((this.get_children)(this.set[index].clone())); - } - } - } - Err(err) => return std::task::Poll::Ready(Err(err)), - } - } else { - return std::task::Poll::Ready(Ok(take(&mut this.set))); - } - } - } -} - -pub trait JoinIterExt: Iterator -where - T: Unpin, - F: Future, -{ - /// Returns a future that resolves to a vector of the outputs of the futures - /// in the iterator. - fn join(self) -> Join; -} - pub trait TryJoinIterExt: Iterator where T: Unpin, @@ -135,24 +80,6 @@ where fn try_join(self) -> TryJoin; } -pub trait TryFlatMapRecursiveJoinIterExt: Iterator -where - T: Hash + PartialEq + Eq + Clone, - C: Fn(T) -> F, - F: Future>, - CI: IntoIterator, -{ - /// Applies the `get_children` function on each item in the iterator, and on - /// each item that is returned by `get_children`. Collects all items from - /// the iterator and all items returns by `get_children` into an index set. - /// The order of items is equal to a breadth-first traversal of the tree, - /// but `get_children` will execute concurrently. It will handle circular - /// references gracefully. Returns a future that resolve to a - /// [Result]. It will resolve to the first error that occur in - /// breadth-first order. - fn try_flat_map_recursive_join(self, get_children: C) -> TryFlatMapRecursiveJoin; -} - impl JoinIterExt for It where T: Unpin, @@ -180,28 +107,3 @@ where } } } - -impl TryFlatMapRecursiveJoinIterExt for It -where - T: Hash + PartialEq + Eq + Clone, - C: Fn(T) -> F, - F: Future>, - CI: IntoIterator, - It: Iterator, -{ - fn try_flat_map_recursive_join(self, get_children: C) -> TryFlatMapRecursiveJoin { - let mut set = IndexSet::new(); - let mut futures = FuturesOrdered::new(); - for item in self { - let (index, new) = set.insert_full(item); - if new { - futures.push_back(get_children(set[index].clone())); - } - } - TryFlatMapRecursiveJoin { - set, - futures, - get_children, - } - } -} diff --git a/crates/turbo-tasks/src/lib.rs b/crates/turbo-tasks/src/lib.rs index 59d2ccd2da647..571cbd140efa1 100644 --- a/crates/turbo-tasks/src/lib.rs +++ b/crates/turbo-tasks/src/lib.rs @@ -39,6 +39,7 @@ mod completion; pub mod debug; mod display; pub mod event; +pub mod graph; mod id; mod id_factory; mod join_iter_ext; @@ -70,7 +71,7 @@ pub use id::{ with_task_id_mapping, without_task_id_mapping, FunctionId, IdMapping, TaskId, TraitTypeId, ValueTypeId, }; -pub use join_iter_ext::{JoinIterExt, TryFlatMapRecursiveJoinIterExt, TryJoinIterExt}; +pub use join_iter_ext::{JoinIterExt, TryJoinIterExt}; pub use manager::{ dynamic_call, emit, get_invalidator, mark_stateful, run_once, spawn_blocking, spawn_thread, trait_call, turbo_tasks, Invalidator, StatsType, TaskIdProvider, TurboTasks, TurboTasksApi, diff --git a/crates/turbopack-core/src/chunk/mod.rs b/crates/turbopack-core/src/chunk/mod.rs index 3624cdae9f2f6..ca05e9f49af4c 100644 --- a/crates/turbopack-core/src/chunk/mod.rs +++ b/crates/turbopack-core/src/chunk/mod.rs @@ -12,9 +12,10 @@ use indexmap::IndexSet; use serde::{Deserialize, Serialize}; use turbo_tasks::{ debug::ValueDebugFormat, + graph::{GraphTraversal, ReverseTopological, SkipDuplicates}, primitives::{BoolVc, StringVc}, trace::TraceRawVcs, - TryFlatMapRecursiveJoinIterExt, TryJoinIterExt, ValueToString, ValueToStringVc, + TryJoinIterExt, ValueToString, ValueToStringVc, }; use turbo_tasks_fs::FileSystemPathVc; use turbo_tasks_hash::DeterministicHash; @@ -125,52 +126,15 @@ impl ChunkGroupVc { /// All chunks should be loaded in parallel. #[turbo_tasks::function] pub async fn chunks(self) -> Result { - async fn reference_to_chunks( - r: AssetReferenceVc, - ) -> Result + Send> { - let mut result = Vec::new(); - if let Some(pc) = ParallelChunkReferenceVc::resolve_from(r).await? { - if *pc.is_loaded_in_parallel().await? { - result = r - .resolve_reference() - .await? - .primary - .iter() - .map(|r| async move { - Ok(if let PrimaryResolveResult::Asset(a) = r { - ChunkVc::resolve_from(a).await? - } else { - None - }) - }) - .try_join() - .await?; - } - } - Ok(result.into_iter().flatten()) - } - - async fn get_chunk_children( - chunk: ChunkVc, - ) -> Result + Send> { - Ok(chunk - .references() - .await? - .iter() - .copied() - .map(reference_to_chunks) - .try_join() - .await? - .into_iter() - .flatten()) - } - - let chunks = [self.await?.entry] - .into_iter() - .try_flat_map_recursive_join(get_chunk_children) - .await?; - - let chunks = ChunksVc::cell(chunks.into_iter().collect()); + let chunks: Vec<_> = GraphTraversal::>::visit( + [self.await?.entry], + SkipDuplicates::new(get_chunk_children), + ) + .await? + .into_iter() + .collect(); + + let chunks = ChunksVc::cell(chunks); let chunks = optimize(chunks, self); let chunks = ChunksVc::cell( chunks @@ -184,6 +148,44 @@ impl ChunkGroupVc { } } +/// Computes the list of all chunk children of a given chunk. +async fn get_chunk_children(parent: ChunkVc) -> Result + Send> { + Ok(parent + .references() + .await? + .iter() + .copied() + .map(reference_to_chunks) + .try_join() + .await? + .into_iter() + .flatten()) +} + +/// Get all parallel chunks from a parallel chunk reference. +async fn reference_to_chunks(r: AssetReferenceVc) -> Result + Send> { + let mut result = Vec::new(); + if let Some(pc) = ParallelChunkReferenceVc::resolve_from(r).await? { + if *pc.is_loaded_in_parallel().await? { + result = r + .resolve_reference() + .await? + .primary + .iter() + .map(|r| async move { + Ok(if let PrimaryResolveResult::Asset(a) = r { + ChunkVc::resolve_from(a).await? + } else { + None + }) + }) + .try_join() + .await?; + } + } + Ok(result.into_iter().flatten()) +} + #[turbo_tasks::value_impl] impl ValueToString for ChunkGroup { #[turbo_tasks::function] diff --git a/crates/turbopack-core/src/reference_type.rs b/crates/turbopack-core/src/reference_type.rs index 26cd954cce458..4b23764f53014 100644 --- a/crates/turbopack-core/src/reference_type.rs +++ b/crates/turbopack-core/src/reference_type.rs @@ -24,6 +24,7 @@ pub enum EcmaScriptModulesReferenceSubType { #[derive(Debug, Clone, PartialOrd, Ord, Hash)] pub enum CssReferenceSubType { AtImport, + Compose, Custom(u8), Undefined, } diff --git a/crates/turbopack-create-test-app/Cargo.toml b/crates/turbopack-create-test-app/Cargo.toml index 8ce8dee06d896..09588cffb0605 100644 --- a/crates/turbopack-create-test-app/Cargo.toml +++ b/crates/turbopack-create-test-app/Cargo.toml @@ -19,7 +19,7 @@ bench = false [dependencies] anyhow = "1.0.47" clap = { version = "4.0.18", features = ["derive"] } -indoc = "1.0" +indoc = { workspace = true } pathdiff = "0.2.1" serde_json = "1.0.85" tempfile = "3.3.0" diff --git a/crates/turbopack-css/Cargo.toml b/crates/turbopack-css/Cargo.toml index 88d25b554fe5b..538738a7c0afb 100644 --- a/crates/turbopack-css/Cargo.toml +++ b/crates/turbopack-css/Cargo.toml @@ -13,6 +13,7 @@ bench = false anyhow = "1.0.47" async-trait = "0.1.56" indexmap = { workspace = true } +indoc = { workspace = true } once_cell = "1.13.0" regex = "1.6.0" serde = "1.0.136" diff --git a/crates/turbopack-css/src/asset.rs b/crates/turbopack-css/src/asset.rs index 5fd32f10ba9d3..e8194d482d754 100644 --- a/crates/turbopack-css/src/asset.rs +++ b/crates/turbopack-css/src/asset.rs @@ -28,7 +28,10 @@ use crate::{ code_gen::{CodeGenerateable, CodeGenerateableVc}, parse::{parse, ParseResult, ParseResultSourceMap, ParseResultVc}, path_visitor::ApplyVisitors, - references::{analyze_css_stylesheet, import::ImportAssetReferenceVc}, + references::{ + analyze_css_stylesheet, compose::CssModuleComposeReferenceVc, + import::ImportAssetReferenceVc, + }, transform::CssInputTransformsVc, CssModuleAssetType, }; @@ -170,17 +173,27 @@ impl CssChunkItem for ModuleChunkItem { let context = self.context; for reference in references.iter() { - if let Some(import) = ImportAssetReferenceVc::resolve_from(reference).await? { - for result in import.resolve_reference().await?.primary.iter() { + if let Some(import_ref) = ImportAssetReferenceVc::resolve_from(reference).await? { + for result in import_ref.resolve_reference().await?.primary.iter() { if let PrimaryResolveResult::Asset(asset) = result { if let Some(placeable) = CssChunkPlaceableVc::resolve_from(asset).await? { imports.push(CssImport::Internal( - import, + import_ref, placeable.as_chunk_item(context), )); } } } + } else if let Some(compose_ref) = + CssModuleComposeReferenceVc::resolve_from(reference).await? + { + for result in compose_ref.resolve_reference().await?.primary.iter() { + if let PrimaryResolveResult::Asset(asset) = result { + if let Some(placeable) = CssChunkPlaceableVc::resolve_from(asset).await? { + imports.push(CssImport::Composes(placeable.as_chunk_item(context))); + } + } + } } } diff --git a/crates/turbopack-css/src/chunk/mod.rs b/crates/turbopack-css/src/chunk/mod.rs index e77531f7335f1..e866c06e6b39f 100644 --- a/crates/turbopack-css/src/chunk/mod.rs +++ b/crates/turbopack-css/src/chunk/mod.rs @@ -388,6 +388,7 @@ pub struct CssChunkPlaceables(Vec); pub enum CssImport { External(StringVc), Internal(ImportAssetReferenceVc, CssChunkItemVc), + Composes(CssChunkItemVc), } #[turbo_tasks::value(shared)] diff --git a/crates/turbopack-css/src/chunk/optimize.rs b/crates/turbopack-css/src/chunk/optimize.rs index c6786cc76b83f..fac6a583100c3 100644 --- a/crates/turbopack-css/src/chunk/optimize.rs +++ b/crates/turbopack-css/src/chunk/optimize.rs @@ -1,11 +1,8 @@ -use std::mem::take; - use anyhow::{bail, Result}; use indexmap::IndexSet; use turbo_tasks::TryJoinIterExt; -use turbo_tasks_fs::FileSystemPathOptionVc; use turbopack_core::chunk::{ - optimize::{optimize_by_common_parent, ChunkOptimizer, ChunkOptimizerVc}, + optimize::{ChunkOptimizer, ChunkOptimizerVc}, ChunkGroupVc, ChunkVc, ChunkingContextVc, ChunksVc, }; @@ -26,7 +23,19 @@ impl CssChunkOptimizerVc { impl ChunkOptimizer for CssChunkOptimizer { #[turbo_tasks::function] async fn optimize(&self, chunks: ChunksVc, _chunk_group: ChunkGroupVc) -> Result { - optimize_by_common_parent(chunks, get_common_parent, optimize_css).await + // The CSS optimizer works under the constraint that the order in which + // CSS chunks are loaded must be preserved, as CSS rules + // precedence is determined by the order in which they are + // loaded. This means that we may not merge chunks that are not + // adjacent to each other in a valid reverse topological order. + + // TODO(alexkirsz) It might be more interesting to only merge adjacent + // chunks when they are part of the same chunk subgraph. + // However, the optimizer currently does not have access to this + // information, as chunks are already fully flattened by the + // time they reach the optimizer. + + merge_adjacent_chunks(chunks).await } } @@ -38,13 +47,11 @@ async fn css(chunk: ChunkVc) -> Result { } } -#[turbo_tasks::function] -async fn get_common_parent(chunk: ChunkVc) -> Result { - Ok(css(chunk).await?.common_parent()) -} - -async fn merge_chunks(first: CssChunkVc, chunks: &[CssChunkVc]) -> Result { - let chunks = chunks.iter().copied().try_join().await?; +async fn merge_chunks( + first: CssChunkVc, + chunks: impl IntoIterator, +) -> Result { + let chunks = chunks.into_iter().copied().try_join().await?; let main_entries = chunks .iter() .map(|c| c.main_entries) @@ -59,49 +66,62 @@ async fn merge_chunks(first: CssChunkVc, chunks: &[CssChunkVc]) -> Result, children: Vec) -> Result { - let mut chunks = Vec::new(); - // TODO optimize - if let Some(local) = local { - // Local chunks have the same common_parent and could be merged into fewer - // chunks. (We use a pretty large threshold for that.) - let mut local = local.await?.iter().copied().map(css).try_join().await?; - // Merge all local chunks when they are too many - if local.len() > LOCAL_CHUNK_MERGE_THRESHOLD { - let merged = take(&mut local); - if let Some(first) = merged.first().copied() { - local.push(merge_chunks(first, &merged).await?); - } +/// The maximum number of chunks to exist in a single chunk group. The optimizer +/// will merge chunks into groups until it has at most this number of chunks. +const MAX_CHUNK_COUNT: usize = 20; + +/// Groups adjacent chunks into at most `MAX_CHUNK_COUNT` groups. +fn aggregate_adjacent_chunks(chunks: &[ChunkVc]) -> Vec> { + // Each of the resulting merged chunks will have `chunks_per_merged_chunk` + // chunks in them, except for the first `chunks_mod` chunks, which will have + // one more chunk. + let chunks_per_merged_chunk = chunks.len() / MAX_CHUNK_COUNT; + let mut chunks_mod = chunks.len() % MAX_CHUNK_COUNT; + + let mut chunks_vecs = vec![]; + let mut current_chunks = vec![]; + + for chunk in chunks.iter().copied() { + if current_chunks.len() < chunks_per_merged_chunk { + current_chunks.push(chunk); + } else if current_chunks.len() == chunks_per_merged_chunk && chunks_mod > 0 { + current_chunks.push(chunk); + chunks_mod -= 1; + chunks_vecs.push(std::mem::take(&mut current_chunks)); + } else { + chunks_vecs.push(std::mem::take(&mut current_chunks)); + current_chunks.push(chunk); } - chunks.append(&mut local); } - for children in children { - let mut children = children.await?.iter().copied().map(css).try_join().await?; - chunks.append(&mut children); + + if !current_chunks.is_empty() { + chunks_vecs.push(current_chunks); } - // Multiple very small chunks could be merged to avoid requests. (We use a small - // threshold for that.) - // TODO implement that - - // When there are too many chunks, try hard to reduce the number of chunks to - // limit the request count. - if chunks.len() > TOTAL_CHUNK_MERGE_THRESHOLD { - let size = chunks.len().div_ceil(TOTAL_CHUNK_MERGE_THRESHOLD); - // TODO be smarter in selecting the chunks to merge - // see ecmascript implementation - for merged in take(&mut chunks).chunks(size) { - chunks.push(merge_chunks(*merged.first().unwrap(), merged).await?); - } + + chunks_vecs +} + +/// Merges adjacent chunks into at most `MAX_CHUNK_COUNT` chunks. +async fn merge_adjacent_chunks(chunks_vc: ChunksVc) -> Result { + let chunks = chunks_vc.await?; + + if chunks.len() <= MAX_CHUNK_COUNT { + return Ok(chunks_vc); } - Ok(ChunksVc::cell( - chunks.into_iter().map(|c| c.as_chunk()).collect(), - )) + + let chunks = aggregate_adjacent_chunks(&chunks); + + let chunks = chunks + .into_iter() + .map(|chunks| async move { + let chunks = chunks.iter().copied().map(css).try_join().await?; + merge_chunks(*chunks.first().unwrap(), &chunks).await + }) + .try_join() + .await? + .into_iter() + .map(|chunk| chunk.as_chunk()) + .collect(); + + Ok(ChunksVc::cell(chunks)) } diff --git a/crates/turbopack-css/src/chunk/writer.rs b/crates/turbopack-css/src/chunk/writer.rs index 73782bd522f0e..4f8ffa5297d54 100644 --- a/crates/turbopack-css/src/chunk/writer.rs +++ b/crates/turbopack-css/src/chunk/writer.rs @@ -1,4 +1,7 @@ -use std::{collections::VecDeque, io::Write}; +use std::{ + collections::{HashSet, VecDeque}, + io::Write, +}; use anyhow::Result; use turbo_tasks::{primitives::StringVc, ValueToString}; @@ -18,12 +21,22 @@ pub async fn expand_imports( "".to_string(), )]; let mut external_imports = vec![]; + let mut imported_chunk_items: HashSet<(String, String, CssChunkItemVc)> = HashSet::default(); + let mut composed_chunk_items: HashSet = HashSet::default(); while let Some((chunk_item, imports, close)) = stack.last_mut() { match imports.pop_front() { Some(CssImport::Internal(import, imported_chunk_item)) => { let (open, close) = import.await?.attributes.await?.print_block()?; + if !imported_chunk_items.insert(( + open.clone(), + close.clone(), + imported_chunk_item.resolve().await?, + )) { + continue; + } + let id = &*imported_chunk_item.to_string().await?; writeln!(code, "/* import({}) */", id)?; writeln!(code, "{}", open)?; @@ -36,6 +49,22 @@ pub async fn expand_imports( close, )); } + Some(CssImport::Composes(composed_chunk_item)) => { + if !composed_chunk_items.insert(composed_chunk_item.resolve().await?) { + continue; + } + + let id = &*composed_chunk_item.to_string().await?; + writeln!(code, "/* composes({}) */", id)?; + + let composed_content_vc = composed_chunk_item.content(); + let composed_content = &*composed_content_vc.await?; + stack.push(( + composed_chunk_item, + composed_content.imports.iter().cloned().collect(), + "".to_string(), + )); + } Some(CssImport::External(url_vc)) => { external_imports.push(url_vc); } diff --git a/crates/turbopack-css/src/module_asset.rs b/crates/turbopack-css/src/module_asset.rs index 019b2f3a418f5..1d50aaadf36dd 100644 --- a/crates/turbopack-css/src/module_asset.rs +++ b/crates/turbopack-css/src/module_asset.rs @@ -1,11 +1,13 @@ use std::{fmt::Write, sync::Arc}; use anyhow::Result; +use indexmap::IndexMap; +use indoc::formatdoc; use swc_core::{ common::{BytePos, FileName, LineCol, SourceMap}, css::modules::CssClassName, }; -use turbo_tasks::{primitives::StringVc, ValueToString, ValueToStringVc}; +use turbo_tasks::{primitives::StringVc, Value, ValueToString, ValueToStringVc}; use turbo_tasks_fs::FileSystemPathVc; use turbopack_core::{ asset::{Asset, AssetContentVc, AssetVc}, @@ -15,9 +17,11 @@ use turbopack_core::{ ChunkingTypeOptionVc, }, context::AssetContextVc, + issue::{Issue, IssueSeverity, IssueSeverityVc, IssueVc}, reference::{AssetReference, AssetReferenceVc, AssetReferencesVc}, resolve::{ origin::{ResolveOrigin, ResolveOriginVc}, + parse::RequestVc, ResolveResult, ResolveResultVc, }, }; @@ -31,7 +35,16 @@ use turbopack_ecmascript::{ ParseResultSourceMap, ParseResultSourceMapVc, }; -use crate::{parse::ParseResult, transform::CssInputTransformsVc, CssModuleAssetVc}; +use crate::{ + chunk::{ + CssChunkItem, CssChunkItemContentVc, CssChunkItemVc, CssChunkPlaceable, + CssChunkPlaceableVc, CssChunkVc, + }, + parse::ParseResult, + references::compose::CssModuleComposeReferenceVc, + transform::CssInputTransformsVc, + CssModuleAssetVc, +}; #[turbo_tasks::value] #[derive(Clone)] @@ -62,8 +75,118 @@ impl Asset for ModuleCssModuleAsset { } #[turbo_tasks::function] - fn references(&self) -> AssetReferencesVc { - self.inner.references() + async fn references(self_vc: ModuleCssModuleAssetVc) -> Result { + let references = self_vc.await?.inner.references().await?; + let module_references = self_vc.module_references().await?; + + let references: Vec<_> = references + .iter() + .copied() + .chain(module_references.iter().copied()) + .collect(); + + Ok(AssetReferencesVc::cell(references)) + } +} + +/// A CSS class that is exported from a CSS module. +/// +/// See [`ModuleCssClasses`] for more information. +#[turbo_tasks::value(transparent)] +#[derive(Debug, Clone)] +enum ModuleCssClass { + Local { + name: String, + }, + Global { + name: String, + }, + Import { + original: String, + from: CssModuleComposeReferenceVc, + }, +} + +/// A map of CSS classes exported from a CSS module. +/// +/// ## Example +/// +/// ```css +/// :global(.class1) { +/// color: red; +/// } +/// +/// .class2 { +/// color: blue; +/// } +/// +/// .class3 { +/// composes: class4 from "./other.module.css"; +/// } +/// ``` +/// +/// The above CSS module would have the following exports: +/// 1. class1: [Global("exported_class1")] +/// 2. class2: [Local("exported_class2")] +/// 3. class3: [Local("exported_class3), Import("class4", "./other.module.css")] +#[turbo_tasks::value(transparent)] +#[derive(Debug, Clone)] +struct ModuleCssClasses(IndexMap>); + +#[turbo_tasks::value_impl] +impl ModuleCssModuleAssetVc { + #[turbo_tasks::function] + async fn classes(self) -> Result { + let inner = self.await?.inner; + let parse_result = inner.parse().await?; + let mut classes = IndexMap::default(); + + // TODO(alexkirsz) Should we report an error on parse error here? + if let ParseResult::Ok { exports, .. } = &*parse_result { + for (class_name, export_class_names) in exports { + let mut export = Vec::default(); + + for export_class_name in export_class_names { + export.push(match export_class_name { + CssClassName::Import { from, name } => ModuleCssClass::Import { + original: name.to_string(), + from: CssModuleComposeReferenceVc::new( + self.as_resolve_origin(), + RequestVc::parse(Value::new(from.to_string().into())), + ), + }, + CssClassName::Local { name } => ModuleCssClass::Local { + name: name.to_string(), + }, + CssClassName::Global { name } => ModuleCssClass::Global { + name: name.to_string(), + }, + }) + } + + classes.insert(class_name.to_string(), export); + } + } + + Ok(ModuleCssClassesVc::cell(classes)) + } + + #[turbo_tasks::function] + async fn module_references(self) -> Result { + let mut references = vec![]; + + for (_, class_names) in &*self.classes().await? { + for class_name in class_names { + match class_name { + ModuleCssClass::Import { from, .. } => { + references.push((*from).into()); + } + ModuleCssClass::Local { .. } | ModuleCssClass::Global { .. } => {} + } + } + } + + Ok(AssetReferencesVc::cell(references)) } } @@ -78,10 +201,13 @@ impl ChunkableAsset for ModuleCssModuleAsset { #[turbo_tasks::value_impl] impl EcmascriptChunkPlaceable for ModuleCssModuleAsset { #[turbo_tasks::function] - fn as_chunk_item(&self, context: ChunkingContextVc) -> EcmascriptChunkItemVc { + fn as_chunk_item( + self_vc: ModuleCssModuleAssetVc, + context: ChunkingContextVc, + ) -> EcmascriptChunkItemVc { ModuleChunkItem { context, - module: self.inner, + module: self_vc, } .cell() .into() @@ -108,7 +234,7 @@ impl ResolveOrigin for ModuleCssModuleAsset { #[turbo_tasks::value] struct ModuleChunkItem { - module: CssModuleAssetVc, + module: ModuleCssModuleAssetVc, context: ChunkingContextVc, } @@ -118,7 +244,7 @@ impl ValueToString for ModuleChunkItem { async fn to_string(&self) -> Result { Ok(StringVc::cell(format!( "{} (css module)", - self.module.await?.source.path().to_string().await? + self.module.path().to_string().await? ))) } } @@ -126,27 +252,136 @@ impl ValueToString for ModuleChunkItem { #[turbo_tasks::value_impl] impl ChunkItem for ModuleChunkItem { #[turbo_tasks::function] - fn references(&self) -> AssetReferencesVc { - AssetReferencesVc::cell(vec![CssProxyToCssAssetReference { + async fn references(&self) -> Result { + // The proxy reference must come first so it is processed before other potential + // references inside of the CSS, like `@import` and `composes:`. + // This affects the order in which the resulting CSS chunks will be loaded: + // later references are processed first in the post-order traversal of the + // reference tree, and as such they will be loaded first in the resulting HTML. + let mut references = vec![CssProxyToCssAssetReference { module: self.module, - context: self.context, } .cell() - .into()]) + .into()]; + + references.extend(self.module.references().await?.iter().copied()); + + Ok(AssetReferencesVc::cell(references)) + } +} + +#[turbo_tasks::value_impl] +impl EcmascriptChunkItem for ModuleChunkItem { + #[turbo_tasks::function] + fn chunking_context(&self) -> ChunkingContextVc { + self.context + } + + #[turbo_tasks::function] + fn related_path(&self) -> FileSystemPathVc { + self.module.path() + } + + #[turbo_tasks::function] + async fn content(&self) -> Result { + let classes = self.module.classes().await?; + + let mut code = "__turbopack_export_value__({\n".to_string(); + for (export_name, class_names) in &*classes { + let mut exported_class_names = Vec::with_capacity(class_names.len()); + + for class_name in class_names { + match class_name { + ModuleCssClass::Import { + original: original_name, + from, + } => { + let resolved_module = from.resolve_reference().first_asset().await?; + + let Some(resolved_module) = &*resolved_module else { + CssModuleComposesIssue { + severity: IssueSeverity::Error.cell(), + path: self.module.path(), + message: StringVc::cell(formatdoc! { + r#" + Module {from} referenced in `composes: ... from {from};` can't be resolved. + "#, + from = &*from.await?.request.to_string().await? + }), + }.cell().as_issue().emit(); + continue; + }; + + let Some(css_module) = ModuleCssModuleAssetVc::resolve_from(resolved_module).await? else { + CssModuleComposesIssue { + severity: IssueSeverity::Error.cell(), + path: self.module.path(), + message: StringVc::cell(formatdoc! { + r#" + Module {from} referenced in `composes: ... from {from};` is not a CSS module. + "#, + from = &*from.await?.request.to_string().await? + }), + }.cell().as_issue().emit(); + continue; + }; + + // TODO(alexkirsz) We should also warn if `original_name` can't be found in + // the target module. + + let Some(placeable) = EcmascriptChunkPlaceableVc::resolve_from(css_module).await? else { + unreachable!("ModuleCssModuleAsset implements EcmascriptChunkPlaceableVc"); + }; + + let module_id = + stringify_js(&*placeable.as_chunk_item(self.context).id().await?); + let original_name = stringify_js(original_name); + exported_class_names.push(format! { + "__turbopack_import__({module_id})[{original_name}]" + }); + } + ModuleCssClass::Local { name: class_name } + | ModuleCssClass::Global { name: class_name } => { + exported_class_names.push(stringify_js(class_name)); + } + } + } + + writeln!( + code, + " {}: {},", + stringify_js(export_name), + exported_class_names.join(" + \" \" + ") + )?; + } + code += "});\n"; + Ok(EcmascriptChunkItemContent { + inner_code: code.clone().into(), + // We generate a minimal map for runtime code so that the filename is + // displayed in dev tools. + source_map: Some(generate_minimal_source_map( + format!("{}.js", self.module.path().await?.path), + code, + )), + ..Default::default() + } + .cell()) } } #[turbo_tasks::value] struct CssProxyToCssAssetReference { - module: CssModuleAssetVc, - context: ChunkingContextVc, + module: ModuleCssModuleAssetVc, } #[turbo_tasks::value_impl] impl ValueToString for CssProxyToCssAssetReference { #[turbo_tasks::function] - fn to_string(&self) -> StringVc { - StringVc::cell("css".to_string()) + async fn to_string(&self) -> Result { + Ok(StringVc::cell(format!( + "proxy(css) {}", + self.module.path().to_string().await?, + ))) } } @@ -154,7 +389,14 @@ impl ValueToString for CssProxyToCssAssetReference { impl AssetReference for CssProxyToCssAssetReference { #[turbo_tasks::function] fn resolve_reference(&self) -> ResolveResultVc { - ResolveResult::asset(self.module.into()).cell() + ResolveResult::asset( + CssProxyModuleAsset { + module: self.module, + } + .cell() + .into(), + ) + .cell() } } @@ -166,62 +408,112 @@ impl ChunkableAssetReference for CssProxyToCssAssetReference { } } +/// This structure exists solely in order to extend the `references` returned by +/// a standard [`CssModuleAsset`] with CSS modules' `composes:` references. +#[turbo_tasks::value] +#[derive(Clone)] +struct CssProxyModuleAsset { + module: ModuleCssModuleAssetVc, +} + #[turbo_tasks::value_impl] -impl EcmascriptChunkItem for ModuleChunkItem { +impl Asset for CssProxyModuleAsset { #[turbo_tasks::function] - fn chunking_context(&self) -> ChunkingContextVc { - self.context + fn path(&self) -> FileSystemPathVc { + self.module.path() } #[turbo_tasks::function] - fn related_path(&self) -> FileSystemPathVc { + fn content(&self) -> AssetContentVc { + self.module.content() + } + + #[turbo_tasks::function] + async fn references(&self) -> Result { + // The original references must come first so they're processed before other + // potential references inside of the CSS, like `@import` and `composes:`. This + // affects the order in which the resulting CSS chunks will be loaded: + // later references are processed first in the post-order traversal of + // the reference tree, and as such they will be loaded first in the + // resulting HTML. + let mut references = self.module.await?.inner.references().await?.clone_value(); + + references.extend(self.module.module_references().await?.iter().copied()); + + Ok(AssetReferencesVc::cell(references)) + } +} + +#[turbo_tasks::value_impl] +impl ChunkableAsset for CssProxyModuleAsset { + #[turbo_tasks::function] + fn as_chunk(self_vc: CssProxyModuleAssetVc, context: ChunkingContextVc) -> ChunkVc { + CssChunkVc::new(context, self_vc.into()).into() + } +} + +#[turbo_tasks::value_impl] +impl CssChunkPlaceable for CssProxyModuleAsset { + #[turbo_tasks::function] + fn as_chunk_item(&self, context: ChunkingContextVc) -> CssChunkItemVc { + CssProxyModuleChunkItemVc::cell(CssProxyModuleChunkItem { + module: self.module, + context, + }) + .into() + } +} + +#[turbo_tasks::value_impl] +impl ResolveOrigin for CssProxyModuleAsset { + #[turbo_tasks::function] + fn origin_path(&self) -> FileSystemPathVc { self.module.path() } #[turbo_tasks::function] - async fn content(&self) -> Result { - let parsed = self.module.parse().await?; - Ok(match &*parsed { - ParseResult::Ok { exports, .. } => { - let mut code = "__turbopack_export_value__({\n".to_string(); - for (key, elements) in exports { - let content = elements - .iter() - .map(|element| match element { - CssClassName::Local { name } | CssClassName::Global { name } => &**name, - CssClassName::Import { .. } => "TODO", - }) - .collect::>() - .join(" "); - writeln!(code, " {}: {},", stringify_js(key), stringify_js(&content))?; - } - code += "});\n"; - EcmascriptChunkItemContent { - inner_code: code.clone().into(), - // We generate a minimal map for runtime code so that the filename is - // displayed in dev tools. - source_map: Some(generate_minimal_source_map( - format!("{}.js", self.module.path().await?.path), - code, - )), - ..Default::default() - } - } - ParseResult::NotFound | ParseResult::Unparseable => { - let code = "__turbopack_export_value__({});\n"; - EcmascriptChunkItemContent { - inner_code: code.into(), - // We generate a minimal map for runtime code so that the filename is - // displayed in dev tools. - source_map: Some(generate_minimal_source_map( - format!("{}.js", self.module.path().await?.path), - code.into(), - )), - ..Default::default() - } - } - } - .cell()) + fn context(&self) -> AssetContextVc { + self.module.context() + } +} + +#[turbo_tasks::value] +struct CssProxyModuleChunkItem { + module: ModuleCssModuleAssetVc, + context: ChunkingContextVc, +} + +#[turbo_tasks::value_impl] +impl ValueToString for CssProxyModuleChunkItem { + #[turbo_tasks::function] + fn to_string(&self) -> StringVc { + self.module.as_chunk_item(self.context).to_string() + } +} + +#[turbo_tasks::value_impl] +impl ChunkItem for CssProxyModuleChunkItem { + #[turbo_tasks::function] + fn references(&self) -> AssetReferencesVc { + self.module.references() + } +} + +#[turbo_tasks::value_impl] +impl CssChunkItem for CssProxyModuleChunkItem { + #[turbo_tasks::function] + async fn content(&self) -> Result { + Ok(self + .module + .await? + .inner + .as_chunk_item(self.context) + .content()) + } + + #[turbo_tasks::function] + fn chunking_context(&self) -> ChunkingContextVc { + self.context } } @@ -244,3 +536,40 @@ fn generate_minimal_source_map(filename: String, source: String) -> ParseResultS let map = ParseResultSourceMap::new(sm, mappings); map.cell() } + +#[turbo_tasks::value(shared)] +struct CssModuleComposesIssue { + severity: IssueSeverityVc, + path: FileSystemPathVc, + message: StringVc, +} + +#[turbo_tasks::value_impl] +impl Issue for CssModuleComposesIssue { + #[turbo_tasks::function] + fn severity(&self) -> IssueSeverityVc { + self.severity + } + + #[turbo_tasks::function] + async fn title(&self) -> Result { + Ok(StringVc::cell( + "An issue occurred while resolving a CSS module `composes:` rule".to_string(), + )) + } + + #[turbo_tasks::function] + fn category(&self) -> StringVc { + StringVc::cell("css".to_string()) + } + + #[turbo_tasks::function] + fn context(&self) -> FileSystemPathVc { + self.path + } + + #[turbo_tasks::function] + fn description(&self) -> StringVc { + self.message + } +} diff --git a/crates/turbopack-css/src/references/compose.rs b/crates/turbopack-css/src/references/compose.rs new file mode 100644 index 0000000000000..2f5ccc19c0a2a --- /dev/null +++ b/crates/turbopack-css/src/references/compose.rs @@ -0,0 +1,53 @@ +use anyhow::Result; +use turbo_tasks::{primitives::StringVc, Value, ValueToString, ValueToStringVc}; +use turbopack_core::{ + chunk::{ChunkableAssetReference, ChunkableAssetReferenceVc}, + reference::{AssetReference, AssetReferenceVc}, + reference_type::CssReferenceSubType, + resolve::{origin::ResolveOriginVc, parse::RequestVc, ResolveResultVc}, +}; + +use crate::references::css_resolve; + +/// A `composes: ... from ...` CSS module reference. +#[turbo_tasks::value] +#[derive(Hash, Debug)] +pub struct CssModuleComposeReference { + pub origin: ResolveOriginVc, + pub request: RequestVc, +} + +#[turbo_tasks::value_impl] +impl CssModuleComposeReferenceVc { + /// Creates a new [`CssModuleComposeReference`]. + #[turbo_tasks::function] + pub fn new(origin: ResolveOriginVc, request: RequestVc) -> Self { + Self::cell(CssModuleComposeReference { origin, request }) + } +} + +#[turbo_tasks::value_impl] +impl AssetReference for CssModuleComposeReference { + #[turbo_tasks::function] + fn resolve_reference(&self) -> ResolveResultVc { + css_resolve( + self.origin, + self.request, + Value::new(CssReferenceSubType::Compose), + ) + } +} + +#[turbo_tasks::value_impl] +impl ValueToString for CssModuleComposeReference { + #[turbo_tasks::function] + async fn to_string(&self) -> Result { + Ok(StringVc::cell(format!( + "compose(url) {}", + self.request.to_string().await?, + ))) + } +} + +#[turbo_tasks::value_impl] +impl ChunkableAssetReference for CssModuleComposeReference {} diff --git a/crates/turbopack-css/src/references/mod.rs b/crates/turbopack-css/src/references/mod.rs index dcabbb0d86a55..2828ec1410364 100644 --- a/crates/turbopack-css/src/references/mod.rs +++ b/crates/turbopack-css/src/references/mod.rs @@ -27,6 +27,7 @@ use crate::{ CssInputTransformsVc, CssModuleAssetType, }; +pub(crate) mod compose; pub(crate) mod import; pub(crate) mod url; diff --git a/crates/turbopack-ecmascript/Cargo.toml b/crates/turbopack-ecmascript/Cargo.toml index e79647e9e41fd..42800585bd280 100644 --- a/crates/turbopack-ecmascript/Cargo.toml +++ b/crates/turbopack-ecmascript/Cargo.toml @@ -15,7 +15,7 @@ async-trait = "0.1.56" easy-error = "1.0.0" fxhash = "0.2.1" indexmap = { workspace = true } -indoc = "1.0" +indoc = { workspace = true } lazy_static = "1.4.0" next-font = { path = "../next-font" } next-transform-dynamic = { path = "../next-transform-dynamic" } diff --git a/crates/turbopack-tests/tests/snapshot/css/css/input/style.css b/crates/turbopack-tests/tests/snapshot/css/css/input/style.css index f8017cfb225af..38ceb56638d8e 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/input/style.css +++ b/crates/turbopack-tests/tests/snapshot/css/css/input/style.css @@ -1,3 +1,7 @@ +@import url("./imported.css"); +/* De-duplicate similar imports */ +@import url("../input/imported.css"); +/* But not if they have different attributes */ @import url("./imported.css") layer(layer) print; .style { color: yellow; diff --git a/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css b/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css index fc4e8827b82ac..cd9abdebd64a2 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css +++ b/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css @@ -5,3 +5,13 @@ background: purple; } } + +.composed-module-style { + composes: foo-module-style from "foo/style.module.css"; + color: green; +} + +.another-composed-module-style { + composes: foo-module-style from "foo/style.module.css"; + color: yellow; +} \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/8697f_foo_style.module.css b/crates/turbopack-tests/tests/snapshot/css/css/output/8697f_foo_style.module.css index 6e4de4f4868f6..fd1cad7f446ce 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/8697f_foo_style.module.css +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/8697f_foo_style.module.css @@ -1,5 +1,5 @@ /* chunk [workspace]/crates/turbopack-tests/tests/snapshot/css/css/output/8697f_foo_style.module.css */ -@layer s\[project\]\/crates\/turbopack-tests\/tests\/snapshot\/css\/css\/input\/node_modules\/foo\/style\.module\.css\ \(css\) { +@layer s\[project\]\/crates\/turbopack-tests\/tests\/snapshot\/css\/css\/input\/node_modules\/foo\/style\.module\.css\ \(css\ module\) { .foo-module-style__style__abf9e738 { color: blue; } diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js index 9fc4b235675aa..9fbdba47cebd8 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js @@ -16,6 +16,8 @@ console.log(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$ "[project]/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css (css module)": (({ r: __turbopack_require__, x: __turbopack_external_require__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, p: process, g: global, __dirname }) => (() => { __turbopack_export_value__({ + "another-composed-module-style": "another-composed-module-style__style__9bcf751c" + " " + __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/css/css/input/node_modules/foo/style.module.css (css module)")["foo-module-style"], + "composed-module-style": "composed-module-style__style__9bcf751c" + " " + __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/css/css/input/node_modules/foo/style.module.css (css module)")["foo-module-style"], "inner": "inner__style__9bcf751c", "module-style": "module-style__style__9bcf751c", }); diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js.map b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js.map index 7b9b68b138e03..73b0d8ea4ae2f 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js.map +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_531223.js.map @@ -3,6 +3,6 @@ "sections": [ {"offset": {"line": 4, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/index.js"],"sourcesContent":["import \"foo/style.css\";\nimport \"foo\";\nimport \"./style.css\";\nimport fooStyle from \"foo/style.module.css\";\nimport style from \"./style.module.css\";\n\nconsole.log(style, fooStyle);\n"],"names":[],"mappings":";;;;;;;;AAMA,QAAQ,GAAG"}}, {"offset": {"line": 13, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, - {"offset": {"line": 17, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css.js"],"sourcesContent":["__turbopack_export_value__({\n \"inner\": \"inner__style__9bcf751c\",\n \"module-style\": \"module-style__style__9bcf751c\",\n});\n"],"names":[],"mappings":"AAAA;AACA;AACA;AACA"}}, - {"offset": {"line": 21, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] + {"offset": {"line": 17, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css.js"],"sourcesContent":["__turbopack_export_value__({\n \"another-composed-module-style\": \"another-composed-module-style__style__9bcf751c\" + \" \" + __turbopack_import__(\"[project]/crates/turbopack-tests/tests/snapshot/css/css/input/node_modules/foo/style.module.css (css module)\")[\"foo-module-style\"],\n \"composed-module-style\": \"composed-module-style__style__9bcf751c\" + \" \" + __turbopack_import__(\"[project]/crates/turbopack-tests/tests/snapshot/css/css/input/node_modules/foo/style.module.css (css module)\")[\"foo-module-style\"],\n \"inner\": \"inner__style__9bcf751c\",\n \"module-style\": \"module-style__style__9bcf751c\",\n});\n"],"names":[],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA"}}, + {"offset": {"line": 23, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] } \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css index 32febf3713d4d..9f2a8ac05b1f6 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css @@ -1,4 +1,13 @@ /* chunk [workspace]/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css */ +/* import([project]/crates/turbopack-tests/tests/snapshot/css/css/input/imported.css (css)) */ + +@layer s\[project\]\/crates\/turbopack-tests\/tests\/snapshot\/css\/css\/input\/imported\.css\ \(css\) { +.imported { + color: cyan; +} +} + + /* import([project]/crates/turbopack-tests/tests/snapshot/css/css/input/imported.css (css)) */ @layer layer { @media print { diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css.map b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css.map index ce2530f0e339f..fec14d9064dfe 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css.map +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.css.map @@ -1,8 +1,10 @@ { "version": 3, "sections": [ - {"offset": {"line": 5, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/imported.css"],"sourcesContent":[".imported {\n color: cyan;\n}\n"],"names":[],"mappings":"AAAA,CAAC,QAAQ,CAAC,CAAC;EACT,KAAK,EAAE,IAAI;AACb,CAAC"}}, - {"offset": {"line": 7, "column": 1}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, - {"offset": {"line": 13, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/style.css"],"sourcesContent":["@import url(\"./imported.css\") layer(layer) print;\n.style {\n color: yellow;\n}\n"],"names":[],"mappings":"AACA,CAAC,KAAK,CAAC,CAAC;EACN,KAAK,EAAE,MAAM;AACf,CAAC"}}, - {"offset": {"line": 15, "column": 1}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] + {"offset": {"line": 4, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/imported.css"],"sourcesContent":[".imported {\n color: cyan;\n}\n"],"names":[],"mappings":"AAAA,CAAC,QAAQ,CAAC,CAAC;EACT,KAAK,EAAE,IAAI;AACb,CAAC"}}, + {"offset": {"line": 6, "column": 1}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 14, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/imported.css"],"sourcesContent":[".imported {\n color: cyan;\n}\n"],"names":[],"mappings":"AAAA,CAAC,QAAQ,CAAC,CAAC;EACT,KAAK,EAAE,IAAI;AACb,CAAC"}}, + {"offset": {"line": 16, "column": 1}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 22, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/style.css"],"sourcesContent":["@import url(\"./imported.css\");\n/* De-duplicate similar imports */\n@import url(\"../input/imported.css\");\n/* But not if they have different attributes */\n@import url(\"./imported.css\") layer(layer) print;\n.style {\n color: yellow;\n}\n"],"names":[],"mappings":"AAKA,CAAC,KAAK,CAAC,CAAC;EACN,KAAK,EAAE,MAAM;AACf,CAAC"}}, + {"offset": {"line": 24, "column": 1}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] } \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css index daaf02703a5e8..3f3eed4b19c53 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css @@ -1,5 +1,5 @@ /* chunk [workspace]/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css */ -@layer s\[project\]\/crates\/turbopack-tests\/tests\/snapshot\/css\/css\/input\/style\.module\.css\ \(css\) { +@layer s\[project\]\/crates\/turbopack-tests\/tests\/snapshot\/css\/css\/input\/style\.module\.css\ \(css\ module\) { .module-style__style__9bcf751c { color: magenta; } @@ -7,6 +7,12 @@ .module-style__style__9bcf751c + .inner__style__9bcf751c { background: purple; } +.composed-module-style__style__9bcf751c { + color: green; +} +.another-composed-module-style__style__9bcf751c { + color: yellow; +} } diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css.map b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css.map index 50a6ef92f1c9a..4aaa78e2bf72a 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css.map +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_style.module.css.map @@ -1,6 +1,6 @@ { "version": 3, "sections": [ - {"offset": {"line": 2, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css"],"sourcesContent":[".module-style {\n color: magenta;\n > h1,\n + .inner {\n background: purple;\n }\n}\n"],"names":[],"mappings":"AAAA,CAAC,6BAAY,CAAC,CAAC;EACb,KAAK,EAAE,OAAO;AAKhB,CAAC;AAND,CAAC,6BAAY,CAEX,CAAC,CAAC,EAAE;AAFN,CAAC,6BAAY,CAGX,CAAC,CAAC,CAAC,sBAAK,CAAC,CAAC;EACR,UAAU,EAAE,MAAM;AACpB,CAAC"}}, - {"offset": {"line": 8, "column": 1}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] + {"offset": {"line": 2, "column": 0}, "map": {"version":3,"sources":["/crates/turbopack-tests/tests/snapshot/css/css/input/style.module.css"],"sourcesContent":[".module-style {\n color: magenta;\n > h1,\n + .inner {\n background: purple;\n }\n}\n\n.composed-module-style {\n composes: foo-module-style from \"foo/style.module.css\";\n color: green;\n}\n\n.another-composed-module-style {\n composes: foo-module-style from \"foo/style.module.css\";\n color: yellow;\n}"],"names":[],"mappings":"AAAA,CAAC,6BAAY,CAAC,CAAC;EACb,KAAK,EAAE,OAAO;AAKhB,CAAC;AAND,CAAC,6BAAY,CAEX,CAAC,CAAC,EAAE;AAFN,CAAC,6BAAY,CAGX,CAAC,CAAC,CAAC,sBAAK,CAAC,CAAC;EACR,UAAU,EAAE,MAAM;AACpB,CAAC;AAGH,CAAC,sCAAqB,CAAC,CAAC;EAEtB,KAAK,EAAE,KAAK;AACd,CAAC;AAED,CAAC,8CAA6B,CAAC,CAAC;EAE9B,KAAK,EAAE,MAAM;AACf,CAAC"}}, + {"offset": {"line": 14, "column": 1}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] } \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js b/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js similarity index 98% rename from crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js rename to crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js index 9f094626a8880..37c777498018f 100644 --- a/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js +++ b/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js @@ -1,4 +1,4 @@ -(self.TURBOPACK = self.TURBOPACK || []).push(["output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js", { +(self.TURBOPACK = self.TURBOPACK || []).push(["output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js", { "[project]/crates/turbopack-tests/tests/snapshot/emotion/emotion/input/index.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, p: process, g: global, __dirname }) => (() => { @@ -29,7 +29,7 @@ console.log(StyledButton, ClassNameButton); })()), }, ({ loadedChunks, instantiateRuntimeModule }) => { - if(!(true && loadedChunks.has("output/63a02_@emotion_react_jsx-dev-runtime.js") && loadedChunks.has("output/63a02_@emotion_react_index.js") && loadedChunks.has("output/63a02_@emotion_styled_index.js") && loadedChunks.has("output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_ccc906.js"))) return true; + if(!(true && loadedChunks.has("output/63a02_@emotion_styled_index.js") && loadedChunks.has("output/63a02_@emotion_react_index.js") && loadedChunks.has("output/63a02_@emotion_react_jsx-dev-runtime.js") && loadedChunks.has("output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_8c70a7.js"))) return true; instantiateRuntimeModule("[project]/crates/turbopack-tests/tests/snapshot/emotion/emotion/input/index.js (ecmascript)"); }]); (() => { @@ -1079,4 +1079,4 @@ globalThis.TURBOPACK = { })(); -//# sourceMappingURL=crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js.map \ No newline at end of file +//# sourceMappingURL=crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js.map \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js.map b/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js.map similarity index 100% rename from crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_f0bbb5.js.map rename to crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_6545dc.js.map diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js b/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js similarity index 98% rename from crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js rename to crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js index ba449bf058035..3f8a0c2a48181 100644 --- a/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js +++ b/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js @@ -1,4 +1,4 @@ -(self.TURBOPACK = self.TURBOPACK || []).push(["output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js", { +(self.TURBOPACK = self.TURBOPACK || []).push(["output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js", { "[project]/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/input/packages/app/index.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, p: process, g: global, __dirname }) => (() => { @@ -11,7 +11,7 @@ console.log(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$ })()), }, ({ loadedChunks, instantiateRuntimeModule }) => { - if(!(true && loadedChunks.has("output/63a02_react_jsx-dev-runtime.js") && loadedChunks.has("output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_c4293a.js") && loadedChunks.has("output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_component_index.js") && loadedChunks.has("output/7b7bf_third_party_component_index.js"))) return true; + if(!(true && loadedChunks.has("output/63a02_react_jsx-dev-runtime.js") && loadedChunks.has("output/7b7bf_third_party_component_index.js") && loadedChunks.has("output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_component_index.js") && loadedChunks.has("output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_569e79.js"))) return true; instantiateRuntimeModule("[project]/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/input/packages/app/index.js (ecmascript)"); }]); (() => { @@ -1061,4 +1061,4 @@ globalThis.TURBOPACK = { })(); -//# sourceMappingURL=a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js.map \ No newline at end of file +//# sourceMappingURL=a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js.map \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js.map b/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js.map similarity index 100% rename from crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_a3868e.js.map rename to crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_968e59.js.map diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js b/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js similarity index 99% rename from crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js rename to crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js index 1e08534ab1d46..97b3e07e8c5f3 100644 --- a/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js +++ b/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js @@ -1,4 +1,4 @@ -(self.TURBOPACK = self.TURBOPACK || []).push(["output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js", { +(self.TURBOPACK = self.TURBOPACK || []).push(["output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js", { "[project]/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/input/index.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, p: process, g: global, __dirname }) => (() => { @@ -13,7 +13,7 @@ console.log(Foo, [].includes("foo")); })()), }, ({ loadedChunks, instantiateRuntimeModule }) => { - if(!(true && loadedChunks.has("output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_eede4f.js") && loadedChunks.has("output/63a02_@swc_helpers_src__class_call_check.mjs._.js"))) return true; + if(!(true && loadedChunks.has("output/63a02_@swc_helpers_src__class_call_check.mjs._.js") && loadedChunks.has("output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_eede4f.js"))) return true; instantiateRuntimeModule("[project]/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/input/index.js (ecmascript)"); }]); (() => { @@ -1063,4 +1063,4 @@ globalThis.TURBOPACK = { })(); -//# sourceMappingURL=79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js.map \ No newline at end of file +//# sourceMappingURL=79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js.map \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js.map b/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js.map similarity index 100% rename from crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_3894a9.js.map rename to crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_311eca.js.map From 7fa70425f2f7d32481a639733ef6ab1225d2d161 Mon Sep 17 00:00:00 2001 From: Nicholas Yang Date: Wed, 15 Feb 2023 11:09:08 -0500 Subject: [PATCH 29/31] docs: Documented process for adding crate (#3806) --- CONTRIBUTING.md | 58 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4f67396da31ea..b775578c62ba4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,6 +14,7 @@ Thanks for your interest in contributing to Turbo! - [Benchmarking Turborepo](#benchmarking-turborepo) - [Updating `turbo`](#updating-turbo) - [Publishing `turbo` to the npm registry](#publishing-turbo-to-the-npm-registry) + - [Adding a new crate](#adding-a-new-crate) - [Contributing to Turbopack](#contributing-to-turbopack) - [Turbopack Architecture](#turbopack-architecture) - [Testing Turbopack](#testing-turbopack) @@ -132,6 +133,63 @@ These lists are by no means exhaustive. Feel free to add to them with other stra See [the publishing guide](./release.md#release-turborepo). +## Adding A New Crate + +When adding a new crate to the repo, it is essential that it is included/excluded from the +relevant workflows. This ensures that changes to the crate are tested by the correct workflows, +but that they do not trigger unnecessary workflows as well. + +First, determine whether the crate is for Turbopack or Turborepo. If it is for Turbopack, then the crate +should be added to the `default-members` key in the root `Cargo.toml`. If the crate is for Turborepo, the +crate must be added to the `PATTERNS` list in "Turborepo related changes" section of the `test.yml` +workflow file. It must also be excluded from the "Turbopack related changes" section of the +`test.yml` workflow file. + +For instance, if we were adding a `turborepo-foo` crate, we would add the following patterns: + +```diff + - name: Turbopack related changes + id: turbopack + uses: technote-space/get-diff-action@v6 + with: + PATTERNS: | + pnpm-lock.yaml + package.json + crates/** + xtask/** + .cargo/** + rust-toolchain + !crates/turborepo/** + !crates/turborepo-lib/** + !crates/turborepo-ffi/** + !crates/turbo-updater/** ++ !crates/turborepo-foo/** + !**.md + !**.mdx + + - name: Turborepo related changes + id: turborepo + uses: technote-space/get-diff-action@v6 + with: + PATTERNS: | + pnpm-lock.yaml + package.json + crates/turborepo/** + crates/turborepo-lib/** + crates/turborepo-ffi/** + crates/turbo-updater/** ++ crates/turborepo-foo/** + .cargo/** + rust-toolchain + !**.md + !**.mdx +``` + +The crate must also be explicitly excluded from build commands +when building Turbopack. To do so, add a `--exclude turbopack-foo` +flag to the build command. Search through `test.yml` and add this +flag to all cargo commands that already exclude `turborepo-lib`. + ## Contributing to Turbopack Turbopack uses [Cargo workspaces][workspaces] in the Turbo monorepo. You'll find From 4198e198417da61a3d03133b3c2110d9d1a3eb41 Mon Sep 17 00:00:00 2001 From: Nicholas Yang Date: Wed, 15 Feb 2023 11:36:24 -0500 Subject: [PATCH 30/31] fix: Link formatting (#3804) Realized our ported link command is slightly different than the original Go one. It's still a little different but not significantly: # Old link Screen Shot 2023-02-14 at 2 43 24 PM Screen Shot 2023-02-14 at 2 42 47 PM # New link: Screen Shot 2023-02-14 at 2 41 50 PM Screen Shot 2023-02-14 at 2 42 01 PM --- Cargo.lock | 10 +++++++ crates/turborepo-lib/Cargo.toml | 2 +- crates/turborepo-lib/src/commands/link.rs | 33 ++++++++++++++++++----- 3 files changed, 37 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 18256de23872c..a2085ba7525d9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1636,6 +1636,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af3c796f3b0b408d9fd581611b47fa850821fcb84aa640b83a3c1a5be2d691f2" dependencies = [ "console", + "fuzzy-matcher", "shell-words", "tempfile", "zeroize", @@ -2256,6 +2257,15 @@ dependencies = [ "slab", ] +[[package]] +name = "fuzzy-matcher" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94" +dependencies = [ + "thread_local", +] + [[package]] name = "fxhash" version = "0.2.1" diff --git a/crates/turborepo-lib/Cargo.toml b/crates/turborepo-lib/Cargo.toml index ada532f3bc562..37456af29dbb5 100644 --- a/crates/turborepo-lib/Cargo.toml +++ b/crates/turborepo-lib/Cargo.toml @@ -28,7 +28,7 @@ clap = { version = "4.0.22", features = ["derive"] } clap_complete = "4.0.6" config = "0.13" console = "0.15.5" -dialoguer = "0.10.3" +dialoguer = { version = "0.10.3", features = ["fuzzy-select"] } dirs-next = "2.0.0" dunce = "1.0" env_logger = "0.10.0" diff --git a/crates/turborepo-lib/src/commands/link.rs b/crates/turborepo-lib/src/commands/link.rs index 9999404aad3a1..6356c18efe6ab 100644 --- a/crates/turborepo-lib/src/commands/link.rs +++ b/crates/turborepo-lib/src/commands/link.rs @@ -11,7 +11,7 @@ use anyhow::{anyhow, Context, Result}; #[cfg(not(test))] use console::Style; #[cfg(not(test))] -use dialoguer::Select; +use dialoguer::FuzzySelect; use dialoguer::{theme::ColorfulTheme, Confirm}; use dirs_next::home_dir; #[cfg(test)] @@ -105,8 +105,10 @@ pub async fn link(base: &mut CommandBase, modify_gitignore: bool) -> Result<()> let homedir = homedir_path.to_string_lossy(); println!( ">>> Remote Caching + {} - For more info, see {}", + For more info, see {} + ", REMOTE_CACHING_INFO, base.ui.apply(UNDERLINE.apply_to(REMOTE_CACHING_URL)) ); @@ -141,7 +143,7 @@ pub async fn link(base: &mut CommandBase, modify_gitignore: bool) -> Result<()> .as_deref() .unwrap_or(user_response.user.username.as_str()); - let selected_team = select_team(&teams_response.teams, user_display_name)?; + let selected_team = select_team(base, &teams_response.teams, user_display_name)?; let team_id = match selected_team { SelectedTeam::User => user_response.user.id.as_str(), @@ -171,7 +173,7 @@ pub async fn link(base: &mut CommandBase, modify_gitignore: bool) -> Result<()> {} ", base.ui.rainbow(">>> Success!"), - chosen_team_name, + base.ui.apply(BOLD.apply_to(chosen_team_name)), GREY.apply_to("To disable Remote Caching, run `npx turbo unlink`") ); Ok(()) @@ -189,7 +191,7 @@ fn should_enable_caching() -> Result { } #[cfg(test)] -fn select_team<'a>(teams: &'a [Team], _: &'a str) -> Result> { +fn select_team<'a>(_: &CommandBase, teams: &'a [Team], _: &'a str) -> Result> { let mut rng = rand::thread_rng(); let idx = rng.gen_range(0..=(teams.len())); if idx == teams.len() { @@ -200,16 +202,33 @@ fn select_team<'a>(teams: &'a [Team], _: &'a str) -> Result> { } #[cfg(not(test))] -fn select_team<'a>(teams: &'a [Team], user_display_name: &'a str) -> Result> { +fn select_team<'a>( + base: &CommandBase, + teams: &'a [Team], + user_display_name: &'a str, +) -> Result> { let mut team_names = vec![user_display_name]; team_names.extend(teams.iter().map(|team| team.name.as_str())); let theme = ColorfulTheme { active_item_style: Style::new().cyan().bold(), active_item_prefix: Style::new().cyan().bold().apply_to(">".to_string()), + prompt_prefix: Style::new().dim().bold().apply_to("?".to_string()), + values_style: Style::new().cyan(), ..ColorfulTheme::default() }; - let selection = Select::with_theme(&theme) + + let prompt = format!( + "{}\n {}", + base.ui.apply(BOLD.apply_to( + "Which Vercel scope (and Remote Cache) do you want associated with this Turborepo?", + )), + base.ui + .apply(CYAN.apply_to("[Use arrows to move, type to filter]")) + ); + + let selection = FuzzySelect::with_theme(&theme) + .with_prompt(prompt) .items(&team_names) .default(0) .interact()?; From 805a5ae14f1ab67f3c3e01e758f7b53d44f9d696 Mon Sep 17 00:00:00 2001 From: David Pike <1872727+oddnavy@users.noreply.github.com> Date: Thu, 16 Feb 2023 04:05:43 +1100 Subject: [PATCH 31/31] use consistent actions/checkout version in examples (#3791) The other example on this page uses `actions/checkout@v3` --- docs/pages/repo/docs/ci/github-actions.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/pages/repo/docs/ci/github-actions.mdx b/docs/pages/repo/docs/ci/github-actions.mdx index ed49564fbcd77..6076abea24552 100644 --- a/docs/pages/repo/docs/ci/github-actions.mdx +++ b/docs/pages/repo/docs/ci/github-actions.mdx @@ -223,7 +223,7 @@ jobs: steps: - name: Check out code - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 2 # ...