From 49a9cdfe44f505a5ca36270b137ea88bb573559a Mon Sep 17 00:00:00 2001 From: Chris Olszewski Date: Mon, 24 Apr 2023 12:43:15 -0700 Subject: [PATCH 01/24] fix: rebuild protobuf code and update ffi callsite (#4685) ### Description Not quite sure how this got in, but I think the generated Go protobuf code got out of sync. This PR is the result of `make turborepo-ffi-proto` on main and then fixing the build error caused by trying to construct `ChangedFilesReq` with a `*string` instead of a `string` in the `ToCommit` field ### Testing Instructions --- cli/internal/ffi/ffi.go | 3 +- cli/internal/ffi/proto/messages.pb.go | 146 +++++++++++++------------- 2 files changed, 73 insertions(+), 76 deletions(-) diff --git a/cli/internal/ffi/ffi.go b/cli/internal/ffi/ffi.go index 7ac15e45ff59e..d767488509414 100644 --- a/cli/internal/ffi/ffi.go +++ b/cli/internal/ffi/ffi.go @@ -118,12 +118,11 @@ func stringToRef(s string) *string { // ChangedFiles returns the files changed in between two commits, the workdir and the index, and optionally untracked files func ChangedFiles(gitRoot string, turboRoot string, fromCommit string, toCommit string) ([]string, error) { fromCommitRef := stringToRef(fromCommit) - toCommitRef := stringToRef(toCommit) req := ffi_proto.ChangedFilesReq{ GitRoot: gitRoot, FromCommit: fromCommitRef, - ToCommit: toCommitRef, + ToCommit: toCommit, TurboRoot: turboRoot, } diff --git a/cli/internal/ffi/proto/messages.pb.go b/cli/internal/ffi/proto/messages.pb.go index 22992d32663a5..666f32a164416 100644 --- a/cli/internal/ffi/proto/messages.pb.go +++ b/cli/internal/ffi/proto/messages.pb.go @@ -273,7 +273,7 @@ type ChangedFilesReq struct { GitRoot string `protobuf:"bytes,1,opt,name=git_root,json=gitRoot,proto3" json:"git_root,omitempty"` TurboRoot string `protobuf:"bytes,2,opt,name=turbo_root,json=turboRoot,proto3" json:"turbo_root,omitempty"` FromCommit *string `protobuf:"bytes,3,opt,name=from_commit,json=fromCommit,proto3,oneof" json:"from_commit,omitempty"` - ToCommit *string `protobuf:"bytes,4,opt,name=to_commit,json=toCommit,proto3,oneof" json:"to_commit,omitempty"` + ToCommit string `protobuf:"bytes,4,opt,name=to_commit,json=toCommit,proto3" json:"to_commit,omitempty"` } func (x *ChangedFilesReq) Reset() { @@ -330,8 +330,8 @@ func (x *ChangedFilesReq) GetFromCommit() string { } func (x *ChangedFilesReq) GetToCommit() string { - if x != nil && x.ToCommit != nil { - return *x.ToCommit + if x != nil { + return x.ToCommit } return "" } @@ -1027,84 +1027,82 @@ var file_turborepo_ffi_messages_proto_rawDesc = []byte{ 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x24, 0x0a, 0x0c, 0x47, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22, - 0xb1, 0x01, 0x0a, 0x0f, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, + 0x9e, 0x01, 0x0a, 0x0f, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x67, 0x69, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x24, 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x08, 0x74, 0x6f, 0x43, 0x6f, 0x6d, 0x6d, - 0x69, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, - 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6d, - 0x6d, 0x69, 0x74, 0x22, 0x61, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, - 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x12, 0x29, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, - 0x46, 0x69, 0x6c, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x66, 0x69, 0x6c, - 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x28, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, - 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, - 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, - 0x22, 0x6d, 0x0a, 0x12, 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x6f, - 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x67, 0x69, 0x74, 0x52, 0x6f, 0x6f, - 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x43, 0x6f, 0x6d, 0x6d, - 0x69, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x22, - 0x55, 0x0a, 0x13, 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x12, 0x1a, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xf0, 0x01, 0x0a, 0x15, 0x54, 0x72, 0x61, 0x6e, 0x73, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, - 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, 0x69, - 0x72, 0x12, 0x53, 0x0a, 0x0f, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, - 0x64, 0x65, 0x70, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x54, 0x72, 0x61, - 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x44, 0x65, 0x70, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, - 0x65, 0x64, 0x44, 0x65, 0x70, 0x73, 0x1a, 0x41, 0x0a, 0x13, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, - 0x6c, 0x76, 0x65, 0x64, 0x44, 0x65, 0x70, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x70, 0x0a, 0x16, 0x54, 0x72, 0x61, - 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, - 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x08, 0x70, - 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x6f, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x22, 0x61, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x12, 0x29, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12, + 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x28, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, + 0x6c, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22, 0x6d, 0x0a, + 0x12, 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x52, 0x65, 0x71, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x67, 0x69, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1f, + 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x12, + 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x22, 0x55, 0x0a, 0x13, + 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, + 0x65, 0x73, 0x70, 0x12, 0x1a, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, + 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0xf0, 0x01, 0x0a, 0x15, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, + 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, 0x69, 0x72, 0x12, 0x53, + 0x0a, 0x0f, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x64, 0x65, 0x70, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, + 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x44, 0x65, 0x70, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x0e, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x44, + 0x65, 0x70, 0x73, 0x1a, 0x41, 0x0a, 0x13, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, + 0x64, 0x44, 0x65, 0x70, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x70, 0x0a, 0x16, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x32, 0x0a, 0x08, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, + 0x6b, 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x08, 0x70, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, + 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x53, 0x0a, 0x0f, 0x4c, 0x6f, 0x63, 0x6b, + 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, + 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x22, 0x3b, 0x0a, + 0x13, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, + 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, + 0x6b, 0x61, 0x67, 0x65, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x69, 0x0a, 0x0f, 0x53, 0x75, + 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, + 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x63, + 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x63, + 0x6b, 0x61, 0x67, 0x65, 0x73, 0x22, 0x54, 0x0a, 0x10, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, + 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x08, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, - 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x53, 0x0a, 0x0f, 0x4c, - 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x6f, - 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x75, 0x6e, 0x64, - 0x22, 0x3b, 0x0a, 0x13, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, - 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, - 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x69, 0x0a, - 0x0f, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x0a, - 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, - 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, - 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, - 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x22, 0x54, 0x0a, 0x10, 0x53, 0x75, 0x62, 0x67, - 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x08, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, - 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x0b, - 0x5a, 0x09, 0x66, 0x66, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x0b, 0x5a, 0x09, 0x66, + 0x66, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( From f91efbe98eff5fe533d3831dfcc34cf6bd831926 Mon Sep 17 00:00:00 2001 From: Alexander Lyon Date: Tue, 25 Apr 2023 09:35:33 +0100 Subject: [PATCH 02/24] Daemon server rust port (#4306) ### Description This ports the daemon server to rust, leaving only run an prune in go-land. This needs some polish but I would like to get some feedback now. I have also added a flag for running the grpc server over http instead, which is useful for debugging. It has support for reflection, so can be used via postman or grpcurl. For code reviews, it should be ok to go in commit-order. Outstanding parts: - [x] incomplete tests in glob watch - [x] error handling for watch errors - [ ] logging / log rotation Out of scope (for now): - startup performance pass ### Testing Instructions This is a drop-in replacement for the go server, so `turbo run` in any repo should work identically. --- .github/CODEOWNERS | 1 + .github/workflows/test.yml | 8 +- .gitignore | 3 + Cargo.lock | 301 +++++--- Cargo.toml | 2 + cli/internal/cmd/root.go | 5 +- cli/internal/daemon/connector/connector.go | 13 +- cli/internal/daemon/daemon.go | 223 ------ cli/internal/daemon/daemon_test.go | 262 ------- cli/internal/globwatcher/globwatcher.go | 210 ------ cli/internal/globwatcher/globwatcher_test.go | 232 ------- cli/internal/server/server.go | 192 ------ cli/internal/server/server_test.go | 73 -- cli/internal/turbostate/turbostate.go | 5 +- crates/glob-match/benches/bench.rs | 4 +- crates/glob-match/src/lib.rs | 168 +++-- crates/globwatch/Cargo.toml | 35 + crates/globwatch/examples/cancel.rs | 45 ++ crates/globwatch/readme.md | 27 + crates/globwatch/src/lib.rs | 516 ++++++++++++++ crates/pidlock/Cargo.toml | 4 +- crates/pidlock/src/lib.rs | 63 +- crates/turborepo-lib/Cargo.toml | 14 +- crates/turborepo-lib/build.rs | 1 + crates/turborepo-lib/src/cli.rs | 33 +- crates/turborepo-lib/src/commands/daemon.rs | 36 +- .../turborepo-lib/src/daemon/bump_timeout.rs | 69 ++ .../src/daemon/bump_timeout_layer.rs | 62 ++ crates/turborepo-lib/src/daemon/client.rs | 9 + crates/turborepo-lib/src/daemon/connector.rs | 11 +- crates/turborepo-lib/src/daemon/endpoint.rs | 172 +++++ crates/turborepo-lib/src/daemon/mod.rs | 9 + crates/turborepo-lib/src/daemon/server.rs | 386 +++++++++++ crates/turborepo-lib/src/globwatcher/mod.rs | 642 ++++++++++++++++++ crates/turborepo-lib/src/lib.rs | 1 + crates/turborepo/Cargo.toml | 1 + 36 files changed, 2414 insertions(+), 1424 deletions(-) delete mode 100644 cli/internal/daemon/daemon_test.go delete mode 100644 cli/internal/globwatcher/globwatcher.go delete mode 100644 cli/internal/globwatcher/globwatcher_test.go delete mode 100644 cli/internal/server/server.go delete mode 100644 cli/internal/server/server_test.go create mode 100644 crates/globwatch/Cargo.toml create mode 100644 crates/globwatch/examples/cancel.rs create mode 100644 crates/globwatch/readme.md create mode 100644 crates/globwatch/src/lib.rs create mode 100644 crates/turborepo-lib/src/daemon/bump_timeout.rs create mode 100644 crates/turborepo-lib/src/daemon/bump_timeout_layer.rs create mode 100644 crates/turborepo-lib/src/daemon/endpoint.rs create mode 100644 crates/turborepo-lib/src/daemon/server.rs create mode 100644 crates/turborepo-lib/src/globwatcher/mod.rs diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1a8a2e547f52c..da437bd797088 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -35,6 +35,7 @@ pnpm-lock.yaml /crates @vercel/web-tooling # overrides for crates that are owned by turbo-oss /crates/globwatch @vercel/turbo-oss +/crates/pidlock @vercel/turbo-oss /crates/turbopath @vercel/turbo-oss /crates/turborepo @vercel/turbo-oss /crates/turborepo-api-client @vercel/turbo-oss diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a4649b2c7b3a6..021ddc68c1142 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -95,7 +95,9 @@ jobs: xtask/** .cargo/** rust-toolchain - !glob-watch + !crates/glob-match/** + !crates/globwatch/** + !crates/pidlock/** !crates/turborepo/** !crates/turborepo-api-client/** !crates/turborepo-lib/** @@ -121,7 +123,9 @@ jobs: PATTERNS: | pnpm-lock.yaml package.json - glob-watch + crates/glob-match/** + crates/globwatch/** + crates/pidlock/** crates/turborepo/** crates/turborepo-api-client/** crates/turborepo-lib/** diff --git a/.gitignore b/.gitignore index 8114a80f5b0d2..5883dcd9c8841 100644 --- a/.gitignore +++ b/.gitignore @@ -53,3 +53,6 @@ cli/internal/ffi/libturborepo_ffi*.a *.t.rej *.t.orig .cram_env + +# generated by tonic +file_descriptor_set.bin diff --git a/Cargo.lock b/Cargo.lock index 22e94fd3b1d79..cd3cb3e804759 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -259,7 +259,7 @@ dependencies = [ "log", "parking", "polling", - "rustix 0.37.11", + "rustix 0.37.3", "slab", "socket2", "waker-fn", @@ -309,7 +309,7 @@ checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.8", ] [[package]] @@ -369,13 +369,13 @@ checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" [[package]] name = "async-trait" -version = "0.1.68" +version = "0.1.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" +checksum = "86ea188f25f0255d8f92797797c97ebf5631fa88178beb1a46fdf5622c9a00e4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.8", ] [[package]] @@ -1322,9 +1322,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "core-graphics" @@ -1378,9 +1378,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.6" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "280a9f2d8b3a38871a3c8a46fb80db65e5e5ed97da80c4d08bf27fb63e35e181" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", ] @@ -1552,9 +1552,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -1735,7 +1735,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.15", + "syn 2.0.8", ] [[package]] @@ -1752,7 +1752,7 @@ checksum = "631569015d0d8d54e6c241733f944042623ab6df7bc3be7466874b05fcdb1c5f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.8", ] [[package]] @@ -2147,13 +2147,13 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0" dependencies = [ "errno-dragonfly", "libc", - "windows-sys 0.48.0", + "windows-sys 0.45.0", ] [[package]] @@ -2207,14 +2207,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.21" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" +checksum = "8a3de6e8d11b22ff9edc6d916f890800597d60f8b2da1caf2955c274638d6412" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.16", - "windows-sys 0.48.0", + "redox_syscall", + "windows-sys 0.45.0", ] [[package]] @@ -2340,7 +2340,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" dependencies = [ "bitflags 1.3.2", - "fsevent-sys", + "fsevent-sys 2.0.1", ] [[package]] @@ -2352,6 +2352,15 @@ dependencies = [ "libc", ] +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -2376,9 +2385,9 @@ checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "531ac96c6ff5fd7c62263c5e3c67a603af4fcaee2e1a0ae5565ba3a11e69e549" dependencies = [ "futures-channel", "futures-core", @@ -2391,9 +2400,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "164713a5a0dcc3e7b4b1ed7d3b433cabc18025386f9339346e8daf15963cf7ac" dependencies = [ "futures-core", "futures-sink", @@ -2401,15 +2410,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "86d7a0c1aa76363dac491de0ee99faf6941128376f1cf96f07db7603b7de69dd" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "1997dd9df74cdac935c76252744c1ed5794fac083242ea4fe77ef3ed60ba0f83" dependencies = [ "futures-core", "futures-task", @@ -2418,9 +2427,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +checksum = "89d422fa3cbe3b40dca574ab087abb5bc98258ea57eea3fd6f1fa7162c778b91" [[package]] name = "futures-lite" @@ -2439,13 +2448,13 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "3eb14ed937631bd8b8b8977f2c198443447a8355b6e3ca599f38c975e5a963b6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 1.0.109", ] [[package]] @@ -2461,15 +2470,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "ec93083a4aecafb2a80a885c9de1f0ccae9dbd32c2bb54b0c3a65690e0b8d2f2" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "fd65540d33b37b16542a0438c12e6aeead10d4ac5d05bd3f805b8f35ab592879" [[package]] name = "futures-timer" @@ -2479,9 +2488,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "3ef6b17e481503ec85211fed8f39d1970f128935ca1f814cd32ac4a6842e84ab" dependencies = [ "futures-channel", "futures-core", @@ -2515,9 +2524,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.7" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check", @@ -2525,9 +2534,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -2637,6 +2646,28 @@ dependencies = [ "regex", ] +[[package]] +name = "globwatch" +version = "0.1.0" +dependencies = [ + "futures", + "glob-match 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools", + "merge-streams", + "notify 5.1.0", + "notify-debouncer-mini", + "pin-project", + "stop-token", + "test-case", + "tokio", + "tokio-stream", + "tracing", + "tracing-subscriber", + "tracing-test", + "unic-segment", + "walkdir", +] + [[package]] name = "gloo-timers" version = "0.2.6" @@ -2649,6 +2680,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "go-parse-duration" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "558b88954871f5e5b2af0e62e2e176c8bde7a6c2c4ed41b13d138d96da2e2cbd" + [[package]] name = "h2" version = "0.3.16" @@ -2941,9 +2978,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.56" +version = "0.1.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" +checksum = "0c17cc76786e99f8d2f055c11159e7f0091c42474dcc3189fbab96072e873e6d" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3039,9 +3076,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.9.3" +version = "1.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" dependencies = [ "autocfg", "hashbrown 0.12.3", @@ -3136,13 +3173,13 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "1.0.10" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" +checksum = "09270fd4fa1111bc614ed2246c7ef56239a3063d5be0d1ec3b589c505d400aeb" dependencies = [ "hermit-abi 0.3.1", "libc", - "windows-sys 0.48.0", + "windows-sys 0.45.0", ] [[package]] @@ -3738,6 +3775,16 @@ dependencies = [ "autocfg", ] +[[package]] +name = "merge-streams" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f84f6452969abd246e7ac1fe4fe75906c76e8ec88d898df9aef37e0f3b6a7c2" +dependencies = [ + "futures-core", + "pin-project", +] + [[package]] name = "miette" version = "4.7.1" @@ -4116,7 +4163,7 @@ dependencies = [ "bitflags 1.3.2", "filetime", "fsevent", - "fsevent-sys", + "fsevent-sys 2.0.1", "inotify 0.7.1", "libc", "mio 0.6.23", @@ -4132,7 +4179,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9" dependencies = [ "bitflags 1.3.2", + "crossbeam-channel", "filetime", + "fsevent-sys 4.1.0", "inotify 0.9.6", "kqueue", "libc", @@ -4141,6 +4190,15 @@ dependencies = [ "windows-sys 0.42.0", ] +[[package]] +name = "notify-debouncer-mini" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e23e9fa24f094b143c1eb61f90ac6457de87be6987bc70746e0179f7dbc9007b" +dependencies = [ + "notify 5.1.0", +] + [[package]] name = "ntapi" version = "0.4.0" @@ -4410,7 +4468,7 @@ checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.16", + "redox_syscall", "smallvec", "windows-sys 0.45.0", ] @@ -4585,6 +4643,7 @@ dependencies = [ "log", "rand 0.8.5", "tempdir", + "thiserror", "windows-sys 0.45.0", ] @@ -4863,9 +4922,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.56" +version = "1.0.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +checksum = "ba466839c78239c09faf015484e5cc04860f88242cff4d03eb038f04b4699b73" dependencies = [ "unicode-ident", ] @@ -5162,15 +5221,6 @@ dependencies = [ "bitflags 1.3.2", ] -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_users" version = "0.4.3" @@ -5178,7 +5228,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ "getrandom", - "redox_syscall 0.2.16", + "redox_syscall", "thiserror", ] @@ -5196,9 +5246,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.3" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" +checksum = "cce168fea28d3e05f158bda4576cf0c844d5045bc2cc3620fa0292ed5bb5814c" dependencies = [ "aho-corasick", "memchr", @@ -5431,9 +5481,9 @@ checksum = "cb626abdbed5e93f031baae60d72032f56bc964e11ac2ff65f2ba3ed98d6d3e1" [[package]] name = "rustc-demangle" -version = "0.1.22" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b" +checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" [[package]] name = "rustc-hash" @@ -5494,16 +5544,16 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.11" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85597d61f83914ddeba6a47b3b8ffe7365107221c2e557ed94426489fefb5f77" +checksum = "62b24138615de35e32031d041a09032ef3487a616d901ca4db224e7d557efae2" dependencies = [ "bitflags 1.3.2", - "errno 0.3.1", + "errno 0.3.0", "io-lifetimes", "libc", "linux-raw-sys 0.3.0", - "windows-sys 0.48.0", + "windows-sys 0.45.0", ] [[package]] @@ -5670,9 +5720,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.160" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" +checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9" dependencies = [ "serde_derive", ] @@ -5709,20 +5759,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.160" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" +checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.8", ] [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" dependencies = [ "indexmap", "itoa", @@ -6182,6 +6232,18 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0" +[[package]] +name = "stop-token" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af91f480ee899ab2d9f8435bfdfc14d08a5754bd9d3fef1f1a1c23336aad6c8b" +dependencies = [ + "async-channel", + "cfg-if 1.0.0", + "futures-core", + "pin-project-lite", +] + [[package]] name = "string_cache" version = "0.8.7" @@ -7388,9 +7450,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.15" +version = "2.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +checksum = "bcc02725fd69ab9f26eab07fad303e2497fad6fb9eba4f96c4d1687bdf704ad9" dependencies = [ "proc-macro2", "quote", @@ -7474,15 +7536,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.5.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" dependencies = [ "cfg-if 1.0.0", "fastrand", - "redox_syscall 0.3.5", - "rustix 0.37.11", - "windows-sys 0.45.0", + "redox_syscall", + "rustix 0.36.11", + "windows-sys 0.42.0", ] [[package]] @@ -7520,7 +7582,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" dependencies = [ - "rustix 0.37.11", + "rustix 0.37.3", "windows-sys 0.48.0", ] @@ -7653,7 +7715,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.8", ] [[package]] @@ -7799,13 +7861,14 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.27.0" +version = "1.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0de47a4eecbe11f498978a9b29d792f0d2692d1dd003650c24c76510e3bc001" +checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64" dependencies = [ "autocfg", "bytes", "libc", + "memchr", "mio 0.8.6", "num_cpus", "parking_lot", @@ -7829,13 +7892,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.0.0" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce" +checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 1.0.109", ] [[package]] @@ -7985,6 +8048,20 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "tonic-reflection" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67494bad4dda4c9bffae901dfe14e2b2c0f760adb4706dc10beeb81799f7f7b2" +dependencies = [ + "bytes", + "prost", + "prost-types", + "tokio", + "tokio-stream", + "tonic", +] + [[package]] name = "tower" version = "0.4.13" @@ -8090,6 +8167,29 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "tracing-test" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a2c0ff408fe918a94c428a3f2ad04e4afd5c95bbc08fcf868eff750c15728a4" +dependencies = [ + "lazy_static", + "tracing-core", + "tracing-subscriber", + "tracing-test-macro", +] + +[[package]] +name = "tracing-test-macro" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "258bc1c4f8e2e73a977812ab339d503e6feeb92700f6d07a6de4d321522d5c08" +dependencies = [ + "lazy_static", + "quote", + "syn 1.0.109", +] + [[package]] name = "triomphe" version = "0.1.8" @@ -8905,11 +9005,14 @@ dependencies = [ "const_format", "ctrlc", "dialoguer", + "directories", "dirs-next", "dunce", "env_logger 0.10.0", "futures", "glob-match 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "globwatch", + "go-parse-duration", "hex", "hostname", "humantime", @@ -8933,6 +9036,7 @@ dependencies = [ "sha2", "shared_child", "sysinfo", + "tempdir", "tempfile", "test-case", "thiserror", @@ -8942,7 +9046,10 @@ dependencies = [ "tokio-util", "tonic", "tonic-build", + "tonic-reflection", "tower", + "tracing", + "tracing-test", "turbo-updater", "turbopath", "turborepo-api-client", @@ -9954,11 +10061,11 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.48.0" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" dependencies = [ - "windows-targets 0.48.0", + "windows-targets 0.42.2", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 68bdd51491450..3c5efc9294c33 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,8 @@ resolver = "2" members = [ "crates/auto-hash-map", + "crates/globwatch", + "crates/pidlock", "crates/node-file-trace", "crates/swc-ast-explorer", "crates/turbo-binding", diff --git a/cli/internal/cmd/root.go b/cli/internal/cmd/root.go index d8d0e33c25d77..314cb97c99a4b 100644 --- a/cli/internal/cmd/root.go +++ b/cli/internal/cmd/root.go @@ -10,7 +10,6 @@ import ( "github.com/pkg/errors" "github.com/vercel/turbo/cli/internal/cmdutil" - "github.com/vercel/turbo/cli/internal/daemon" "github.com/vercel/turbo/cli/internal/process" "github.com/vercel/turbo/cli/internal/prune" "github.com/vercel/turbo/cli/internal/run" @@ -64,9 +63,7 @@ func RunWithArgs(args *turbostate.ParsedArgsFromRust, turboVersion string) int { var execErr error go func() { command := args.Command - if command.Daemon != nil { - execErr = daemon.ExecuteDaemon(ctx, helper, signalWatcher, args) - } else if command.Prune != nil { + if command.Prune != nil { execErr = prune.ExecutePrune(helper, args) } else if command.Run != nil { execErr = run.ExecuteRun(ctx, helper, signalWatcher, args) diff --git a/cli/internal/daemon/connector/connector.go b/cli/internal/daemon/connector/connector.go index d05ef598c46e6..f8742d317807b 100644 --- a/cli/internal/daemon/connector/connector.go +++ b/cli/internal/daemon/connector/connector.go @@ -26,6 +26,7 @@ var ( // ErrVersionMismatch is returned when the daemon process was spawned by a different version than the connecting client ErrVersionMismatch = errors.New("daemon version does not match client version") errConnectionFailure = errors.New("could not connect to daemon") + errUnavailable = errors.New("the server is not ready yet") // ErrTooManyAttempts is returned when the client fails to connect too many times ErrTooManyAttempts = errors.New("reached maximum number of attempts contacting daemon") // ErrDaemonNotRunning is returned when the client cannot contact the daemon and has @@ -124,7 +125,7 @@ func (c *Connector) addr() string { // an error to the user that includes the file location so that // they can resolve it. const ( - _maxAttempts = 3 + _maxAttempts = 10 _shutdownTimeout = 1 * time.Second _socketPollTimeout = 1 * time.Second ) @@ -252,13 +253,9 @@ func (c *Connector) connectInternal(ctx context.Context) (*Client, error) { return nil, err } // Loops back around and tries again. - } else if errors.Is(err, errConnectionFailure) { + } else if errors.Is(err, errUnavailable) { // close the client, see if we can kill the stale daemon - _ = client.Close() - if err := c.killDeadServer(serverPid); err != nil { - return nil, err - } - // if we successfully killed the dead server, loop around and try again + c.Logger.Debug("server not ready yet") } else if err != nil { // Some other error occurred, close the client and // report the error to the user @@ -331,7 +328,7 @@ func (c *Connector) sendHello(ctx context.Context, client turbodprotocol.TurbodC case codes.FailedPrecondition: return ErrVersionMismatch case codes.Unavailable: - return errConnectionFailure + return errUnavailable default: return err } diff --git a/cli/internal/daemon/daemon.go b/cli/internal/daemon/daemon.go index 81d52830b5ef9..0c34485db29c3 100644 --- a/cli/internal/daemon/daemon.go +++ b/cli/internal/daemon/daemon.go @@ -5,37 +5,16 @@ import ( "crypto/sha256" "encoding/hex" "fmt" - "io" - "net" "os" "path/filepath" "strings" - "time" - grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" "github.com/hashicorp/go-hclog" - "github.com/nightlyone/lockfile" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/cmdutil" "github.com/vercel/turbo/cli/internal/daemon/connector" "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/server" - "github.com/vercel/turbo/cli/internal/signals" "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/turbostate" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" ) -type daemon struct { - logger hclog.Logger - repoRoot turbopath.AbsoluteSystemPath - timeout time.Duration - reqCh chan struct{} - timedOutCh chan struct{} -} - func getRepoHash(repoRoot turbopath.AbsoluteSystemPath) string { pathHash := sha256.Sum256([]byte(repoRoot.ToString())) // We grab a substring of the hash because there is a 108-character limit on the length @@ -68,208 +47,6 @@ func getPidFile(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemP return root.UntypedJoin("turbod.pid") } -// logError logs an error and outputs it to the UI. -func (d *daemon) logError(err error) { - d.logger.Error(fmt.Sprintf("error %v", err)) -} - -// we're only appending, and we're creating the file if it doesn't exist. -// we do not need to read the log file. -var _logFileFlags = os.O_WRONLY | os.O_APPEND | os.O_CREATE - -// ExecuteDaemon executes the root daemon command -func ExecuteDaemon(ctx context.Context, helper *cmdutil.Helper, signalWatcher *signals.Watcher, args *turbostate.ParsedArgsFromRust) error { - base, err := helper.GetCmdBase(args) - if err != nil { - return err - } - if args.TestRun { - base.UI.Info("Daemon test run successful") - return nil - } - - idleTimeout := 4 * time.Hour - if args.Command.Daemon.IdleTimeout != "" { - idleTimeout, err = time.ParseDuration(args.Command.Daemon.IdleTimeout) - if err != nil { - return err - } - } - - logFilePath, err := getLogFilePath(base.RepoRoot) - if err != nil { - return err - } - if err := logFilePath.EnsureDir(); err != nil { - return err - } - logFile, err := logFilePath.OpenFile(_logFileFlags, 0644) - if err != nil { - return err - } - defer func() { _ = logFile.Close() }() - logger := hclog.New(&hclog.LoggerOptions{ - Output: io.MultiWriter(logFile, os.Stdout), - Level: hclog.Info, - Color: hclog.ColorOff, - Name: "turbod", - }) - - d := &daemon{ - logger: logger, - repoRoot: base.RepoRoot, - timeout: idleTimeout, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - serverName := getRepoHash(base.RepoRoot) - turboServer, err := server.New(serverName, d.logger.Named("rpc server"), base.RepoRoot, base.TurboVersion, logFilePath) - if err != nil { - d.logError(err) - return err - } - defer func() { _ = turboServer.Close() }() - err = d.runTurboServer(ctx, turboServer, signalWatcher) - if err != nil { - d.logError(err) - return err - } - return nil -} - -var errInactivityTimeout = errors.New("turbod shut down from inactivity") - -// tryAcquirePidfileLock attempts to ensure that only one daemon is running from the given pid file path -// at a time. If this process fails to write its PID to the lockfile, it must exit. -func tryAcquirePidfileLock(pidPath turbopath.AbsoluteSystemPath) (lockfile.Lockfile, error) { - if err := pidPath.EnsureDir(); err != nil { - return "", err - } - lockFile, err := lockfile.New(pidPath.ToString()) - if err != nil { - // lockfile.New should only return an error if it wasn't given an absolute path. - // We are attempting to use the type system to enforce that we are passing an - // absolute path. An error here likely means a bug, and we should crash. - panic(err) - } - if err := lockFile.TryLock(); err != nil { - return "", err - } - return lockFile, nil -} - -type rpcServer interface { - Register(grpcServer server.GRPCServer) -} - -func (d *daemon) runTurboServer(parentContext context.Context, rpcServer rpcServer, signalWatcher *signals.Watcher) error { - ctx, cancel := context.WithCancel(parentContext) - defer cancel() - pidPath := getPidFile(d.repoRoot) - lock, err := tryAcquirePidfileLock(pidPath) - if err != nil { - return errors.Wrapf(err, "failed to lock the pid file at %v. Is another turbo daemon running?", lock) - } - // When we're done serving, clean up the pid file. - // Also, if *this* goroutine panics, make sure we unlock the pid file. - defer func() { - if err := lock.Unlock(); err != nil { - d.logger.Error(errors.Wrapf(err, "failed unlocking pid file at %v", lock).Error()) - } - }() - // This handler runs in request goroutines. If a request causes a panic, - // this handler will get called after a call to recover(), meaning we are - // no longer panicking. We return a server error and cancel our context, - // which triggers a shutdown of the server. - panicHandler := func(thePanic interface{}) error { - cancel() - d.logger.Error(fmt.Sprintf("Caught panic %v", thePanic)) - return status.Error(codes.Internal, "server panicked") - } - - // If we have the lock, assume that we are the owners of the socket file, - // whether it already exists or not. That means we are free to remove it. - sockPath := getUnixSocket(d.repoRoot) - if err := sockPath.Remove(); err != nil && !errors.Is(err, os.ErrNotExist) { - return err - } - d.logger.Debug(fmt.Sprintf("Using socket path %v (%v)\n", sockPath, len(sockPath))) - lis, err := net.Listen("unix", sockPath.ToString()) - if err != nil { - return err - } - // We don't need to explicitly close 'lis', the grpc server will handle that - s := grpc.NewServer( - grpc.ChainUnaryInterceptor( - d.onRequest, - grpc_recovery.UnaryServerInterceptor(grpc_recovery.WithRecoveryHandler(panicHandler)), - ), - ) - go d.timeoutLoop(ctx) - - rpcServer.Register(s) - errCh := make(chan error) - go func(errCh chan<- error) { - if err := s.Serve(lis); err != nil { - errCh <- err - } - close(errCh) - }(errCh) - - // Note that we aren't deferring s.GracefulStop here because we also need - // to drain the error channel, which isn't guaranteed to happen until - // the server has stopped. That in turn may depend on GracefulStop being - // called. - // Future work could restructure this to make that simpler. - var exitErr error - select { - case err, ok := <-errCh: - // The server exited - if ok { - exitErr = err - } - case <-d.timedOutCh: - // This is the inactivity timeout case - exitErr = errInactivityTimeout - s.GracefulStop() - case <-ctx.Done(): - // If a request handler panics, it will cancel this context - s.GracefulStop() - case <-signalWatcher.Done(): - // This is fired if caught a signal - s.GracefulStop() - } - // Wait for the server to exit, if it hasn't already. - // When it does, this channel will close. We don't - // care about the error in this scenario because we've - // either requested a close via cancelling the context, - // an inactivity timeout, or caught a signal. - for range errCh { - } - return exitErr -} - -func (d *daemon) onRequest(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) { - d.reqCh <- struct{}{} - return handler(ctx, req) -} - -func (d *daemon) timeoutLoop(ctx context.Context) { - timeoutCh := time.After(d.timeout) -outer: - for { - select { - case <-d.reqCh: - timeoutCh = time.After(d.timeout) - case <-timeoutCh: - close(d.timedOutCh) - break outer - case <-ctx.Done(): - break outer - } - } -} - // ClientOpts re-exports connector.Ops to encapsulate the connector package type ClientOpts = connector.Opts diff --git a/cli/internal/daemon/daemon_test.go b/cli/internal/daemon/daemon_test.go deleted file mode 100644 index 66a714d3eea55..0000000000000 --- a/cli/internal/daemon/daemon_test.go +++ /dev/null @@ -1,262 +0,0 @@ -package daemon - -import ( - "context" - "errors" - "os/exec" - "runtime" - "strconv" - "sync" - "testing" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/nightlyone/lockfile" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/server" - "github.com/vercel/turbo/cli/internal/signals" - "github.com/vercel/turbo/cli/internal/turbopath" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" - "google.golang.org/grpc/test/grpc_testing" - "gotest.tools/v3/assert" -) - -// testBin returns a platform-appropriate node binary. -// We need some process to be running and findable by the -// lockfile library, and we don't particularly care what it is. -// Since node is required for turbo development, it makes a decent -// candidate. -func testBin() string { - if runtime.GOOS == "windows" { - return "node.exe" - } - return "node" -} - -func TestPidFileLock(t *testing.T) { - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - pidPath := getPidFile(repoRoot) - // the lockfile library handles removing pids from dead owners - _, err := tryAcquirePidfileLock(pidPath) - assert.NilError(t, err, "acquirePidLock") - - // Start up a node process and fake a pid file for it. - // Ensure that we can't start the daemon while the node process is live - bin := testBin() - node := exec.Command(bin) - err = node.Start() - assert.NilError(t, err, "Start") - stopNode := func() error { - if err := node.Process.Kill(); err != nil { - return err - } - // We expect an error from node, we just sent a kill signal - _ = node.Wait() - return nil - } - // In case we fail the test, still try to kill the node process - t.Cleanup(func() { _ = stopNode() }) - nodePid := node.Process.Pid - err = pidPath.WriteFile([]byte(strconv.Itoa(nodePid)), 0644) - assert.NilError(t, err, "WriteFile") - - _, err = tryAcquirePidfileLock(pidPath) - assert.ErrorIs(t, err, lockfile.ErrBusy) - - // Stop the node process, but leave the pid file there - // This simulates a crash - err = stopNode() - assert.NilError(t, err, "stopNode") - // the lockfile library handles removing pids from dead owners - _, err = tryAcquirePidfileLock(pidPath) - assert.NilError(t, err, "acquirePidLock") -} - -type testRPCServer struct { - grpc_testing.UnimplementedTestServiceServer - registered chan struct{} -} - -func (ts *testRPCServer) EmptyCall(ctx context.Context, req *grpc_testing.Empty) (*grpc_testing.Empty, error) { - panic("intended to panic") -} - -func (ts *testRPCServer) Register(grpcServer server.GRPCServer) { - grpc_testing.RegisterTestServiceServer(grpcServer, ts) - ts.registered <- struct{}{} -} - -func newTestRPCServer() *testRPCServer { - return &testRPCServer{ - registered: make(chan struct{}, 1), - } -} - -func waitForFile(t *testing.T, filename turbopath.AbsoluteSystemPath, timeout time.Duration) { - t.Helper() - deadline := time.After(timeout) -outer: - for !filename.FileExists() { - select { - case <-deadline: - break outer - case <-time.After(10 * time.Millisecond): - } - } - if !filename.FileExists() { - t.Errorf("timed out waiting for %v to exist after %v", filename, timeout) - } -} - -func TestDaemonLifecycle(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx, cancel := context.WithCancel(context.Background()) - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 10 * time.Second, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - - var serverErr error - wg := &sync.WaitGroup{} - wg.Add(1) - go func() { - serverErr = d.runTurboServer(ctx, ts, watcher) - wg.Done() - }() - - sockPath := getUnixSocket(repoRoot) - waitForFile(t, sockPath, 30*time.Second) - pidPath := getPidFile(repoRoot) - waitForFile(t, pidPath, 1*time.Second) - cancel() - wg.Wait() - assert.NilError(t, serverErr, "runTurboServer") - if sockPath.FileExists() { - t.Errorf("%v still exists, should have been cleaned up", sockPath) - } - if pidPath.FileExists() { - t.Errorf("%v still exists, should have been cleaned up", sockPath) - } -} - -func TestTimeout(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx := context.Background() - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 5 * time.Millisecond, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - err := d.runTurboServer(ctx, ts, watcher) - if !errors.Is(err, errInactivityTimeout) { - t.Errorf("server error got %v, want %v", err, errInactivityTimeout) - } -} - -func TestCaughtSignal(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx := context.Background() - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 5 * time.Second, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - errCh := make(chan error) - go func() { - err := d.runTurboServer(ctx, ts, watcher) - errCh <- err - }() - <-ts.registered - // grpc doesn't provide a signal to know when the server is serving. - // So while this call to Close can race with the call to grpc.Server.Serve, if we've - // registered with the turboserver, we've registered all of our - // signal handlers as well. We just may or may not be serving when Close() - // is called. It shouldn't matter for the purposes of this test: - // Either we are serving, and Serve will return with nil when GracefulStop is - // called, or we aren't serving yet, and the subsequent call to Serve will - // immediately return with grpc.ErrServerStopped. So, both nil and grpc.ErrServerStopped - // are acceptable outcomes for runTurboServer. Any other error, or a timeout, is a - // failure. - watcher.Close() - - err := <-errCh - pidPath := getPidFile(repoRoot) - if pidPath.FileExists() { - t.Errorf("expected to clean up %v, but it still exists", pidPath) - } - // We'll either get nil or ErrServerStopped, depending on whether - // or not we close the signal watcher before grpc.Server.Serve was - // called. - if err != nil && !errors.Is(err, grpc.ErrServerStopped) { - t.Errorf("runTurboServer got err %v, want nil or ErrServerStopped", err) - } -} - -func TestCleanupOnPanic(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - ts := newTestRPCServer() - watcher := signals.NewWatcher() - ctx := context.Background() - - d := &daemon{ - logger: logger, - repoRoot: repoRoot, - timeout: 5 * time.Second, - reqCh: make(chan struct{}), - timedOutCh: make(chan struct{}), - } - errCh := make(chan error) - go func() { - err := d.runTurboServer(ctx, ts, watcher) - errCh <- err - }() - <-ts.registered - - creds := insecure.NewCredentials() - sockFile := getUnixSocket(repoRoot) - conn, err := grpc.Dial("unix://"+sockFile.ToString(), grpc.WithTransportCredentials(creds)) - assert.NilError(t, err, "Dial") - - client := grpc_testing.NewTestServiceClient(conn) - _, err = client.EmptyCall(ctx, &grpc_testing.Empty{}) - if err == nil { - t.Error("nil error") - } - // wait for the server to finish - <-errCh - - pidPath := getPidFile(repoRoot) - if pidPath.FileExists() { - t.Errorf("expected to clean up %v, but it still exists", pidPath) - } -} diff --git a/cli/internal/globwatcher/globwatcher.go b/cli/internal/globwatcher/globwatcher.go deleted file mode 100644 index 9226cfaeed983..0000000000000 --- a/cli/internal/globwatcher/globwatcher.go +++ /dev/null @@ -1,210 +0,0 @@ -package globwatcher - -import ( - "errors" - "fmt" - "path/filepath" - "sync" - - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/doublestar" - "github.com/vercel/turbo/cli/internal/filewatcher" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/util" -) - -// ErrClosed is returned when attempting to get changed globs after glob watching has closed -var ErrClosed = errors.New("glob watching is closed") - -type globs struct { - Inclusions util.Set - Exclusions util.Set -} - -// GlobWatcher is used to track unchanged globs by hash. Once a glob registers a file change -// it is no longer tracked until a new hash requests it. Once all globs for a particular hash -// have changed, that hash is no longer tracked. -type GlobWatcher struct { - logger hclog.Logger - repoRoot turbopath.AbsoluteSystemPath - cookieWaiter filewatcher.CookieWaiter - - mu sync.RWMutex // protects field below - hashGlobs map[string]globs - globStatus map[string]util.Set // glob -> hashes where this glob hasn't changed - - closed bool -} - -// New returns a new GlobWatcher instance -func New(logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, cookieWaiter filewatcher.CookieWaiter) *GlobWatcher { - return &GlobWatcher{ - logger: logger, - repoRoot: repoRoot, - cookieWaiter: cookieWaiter, - hashGlobs: make(map[string]globs), - globStatus: make(map[string]util.Set), - } -} - -func (g *GlobWatcher) setClosed() { - g.mu.Lock() - g.closed = true - g.mu.Unlock() -} - -func (g *GlobWatcher) isClosed() bool { - g.mu.RLock() - defer g.mu.RUnlock() - return g.closed -} - -// WatchGlobs registers the given set of globs to be watched for changes and grouped -// under the given hash. This method pairs with GetChangedGlobs to determine which globs -// out of a set of candidates have changed since WatchGlobs was called for the same hash. -func (g *GlobWatcher) WatchGlobs(hash string, globsToWatch fs.TaskOutputs) error { - if g.isClosed() { - return ErrClosed - } - // Wait for a cookie here - // that will ensure that we have seen all filesystem writes - // *by the calling client*. Other tasks _could_ write to the - // same output directories, however we are relying on task - // execution dependencies to prevent that. - if err := g.cookieWaiter.WaitForCookie(); err != nil { - return err - } - g.mu.Lock() - defer g.mu.Unlock() - g.hashGlobs[hash] = globs{ - Inclusions: util.SetFromStrings(globsToWatch.Inclusions), - Exclusions: util.SetFromStrings(globsToWatch.Exclusions), - } - - for _, glob := range globsToWatch.Inclusions { - existing, ok := g.globStatus[glob] - if !ok { - existing = make(util.Set) - } - existing.Add(hash) - g.globStatus[glob] = existing - } - return nil -} - -// GetChangedGlobs returns the subset of the given candidates that we are not currently -// tracking as "unchanged". -func (g *GlobWatcher) GetChangedGlobs(hash string, candidates []string) ([]string, error) { - if g.isClosed() { - // If filewatching has crashed, return all candidates as changed. - return candidates, nil - } - // Wait for a cookie here - // that will ensure that we have seen all filesystem writes - // *by the calling client*. Other tasks _could_ write to the - // same output directories, however we are relying on task - // execution dependencies to prevent that. - if err := g.cookieWaiter.WaitForCookie(); err != nil { - return nil, err - } - // hashGlobs tracks all of the unchanged globs for a given hash - // If hashGlobs doesn't have our hash, either everything has changed, - // or we were never tracking it. Either way, consider all the candidates - // to be changed globs. - g.mu.RLock() - defer g.mu.RUnlock() - globsToCheck, ok := g.hashGlobs[hash] - if !ok { - return candidates, nil - } - allGlobs := util.SetFromStrings(candidates) - diff := allGlobs.Difference(globsToCheck.Inclusions) - - return diff.UnsafeListOfStrings(), nil -} - -// OnFileWatchEvent implements FileWatchClient.OnFileWatchEvent -// On a file change, check if we have a glob that matches this file. Invalidate -// any matching globs, and remove them from the set of unchanged globs for the corresponding -// hashes. If this is the last glob for a hash, remove the hash from being tracked. -func (g *GlobWatcher) OnFileWatchEvent(ev filewatcher.Event) { - // At this point, we don't care what the Op is, any Op represents a change - // that should invalidate matching globs - g.logger.Trace(fmt.Sprintf("Got fsnotify event %v", ev)) - absolutePath := ev.Path - repoRelativePath, err := g.repoRoot.RelativePathString(absolutePath.ToStringDuringMigration()) - if err != nil { - g.logger.Debug(fmt.Sprintf("could not get relative path from %v to %v: %v", g.repoRoot, absolutePath, err)) - return - } - g.mu.Lock() - defer g.mu.Unlock() - for glob, hashStatus := range g.globStatus { - matches, err := doublestar.Match(glob, filepath.ToSlash(repoRelativePath)) - if err != nil { - g.logger.Error(fmt.Sprintf("failed to check path %v against glob %v: %v", repoRelativePath, glob, err)) - continue - } - // If this glob matches, we know that it has changed for every hash that included this glob - // and is not excluded by a hash's exclusion globs. - // So, we can delete this glob from every hash tracking it as well as stop watching this glob. - // To stop watching, we unref each of the directories corresponding to this glob. - if matches { - for hashUntyped := range hashStatus { - hash := hashUntyped.(string) - hashGlobs, ok := g.hashGlobs[hash] - - if !ok { - g.logger.Warn(fmt.Sprintf("failed to find hash %v referenced from glob %v", hash, glob)) - continue - } - - isExcluded := false - // Check if we've excluded this path by going through exclusion globs - for exclusionGlob := range hashGlobs.Exclusions { - matches, err := doublestar.Match(exclusionGlob.(string), filepath.ToSlash(repoRelativePath)) - if err != nil { - g.logger.Error(fmt.Sprintf("failed to check path %v against glob %v: %v", repoRelativePath, glob, err)) - continue - } - - if matches { - isExcluded = true - break - } - } - - // If we have excluded this path, then we skip it - if isExcluded { - continue - } - - // We delete hash from the globStatus entry - g.globStatus[glob].Delete(hash) - - // If we've deleted the last hash for a glob in globStatus, delete the whole glob entry - if len(g.globStatus[glob]) == 0 { - delete(g.globStatus, glob) - } - - hashGlobs.Inclusions.Delete(glob) - // If we've deleted the last glob for a hash, delete the whole hash entry - if hashGlobs.Inclusions.Len() == 0 { - delete(g.hashGlobs, hash) - } - } - } - } -} - -// OnFileWatchError implements FileWatchClient.OnFileWatchError -func (g *GlobWatcher) OnFileWatchError(err error) { - g.logger.Error(fmt.Sprintf("file watching received an error: %v", err)) -} - -// OnFileWatchClosed implements FileWatchClient.OnFileWatchClosed -func (g *GlobWatcher) OnFileWatchClosed() { - g.setClosed() - g.logger.Warn("GlobWatching is closing due to file watching closing") -} diff --git a/cli/internal/globwatcher/globwatcher_test.go b/cli/internal/globwatcher/globwatcher_test.go deleted file mode 100644 index 6fb89a76c8d71..0000000000000 --- a/cli/internal/globwatcher/globwatcher_test.go +++ /dev/null @@ -1,232 +0,0 @@ -package globwatcher - -import ( - "testing" - - "github.com/hashicorp/go-hclog" - "github.com/vercel/turbo/cli/internal/filewatcher" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbopath" - "gotest.tools/v3/assert" -) - -func setup(t *testing.T, repoRoot turbopath.AbsoluteSystemPath) { - // Directory layout: - // / - // my-pkg/ - // irrelevant - // dist/ - // dist-file - // distChild/ - // child-file - // .next/ - // next-file - distPath := repoRoot.UntypedJoin("my-pkg", "dist") - childFilePath := distPath.UntypedJoin("distChild", "child-file") - err := childFilePath.EnsureDir() - assert.NilError(t, err, "EnsureDir") - f, err := childFilePath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") - distFilePath := repoRoot.UntypedJoin("my-pkg", "dist", "dist-file") - f, err = distFilePath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") - nextFilePath := repoRoot.UntypedJoin("my-pkg", ".next", "next-file") - err = nextFilePath.EnsureDir() - assert.NilError(t, err, "EnsureDir") - f, err = nextFilePath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") - irrelevantPath := repoRoot.UntypedJoin("my-pkg", "irrelevant") - f, err = irrelevantPath.Create() - assert.NilError(t, err, "Create") - err = f.Close() - assert.NilError(t, err, "Close") -} - -type noopCookieWaiter struct{} - -func (*noopCookieWaiter) WaitForCookie() error { - return nil -} - -var _noopCookieWaiter = &noopCookieWaiter{} - -func TestTrackOutputs(t *testing.T) { - logger := hclog.Default() - - repoRootRaw := t.TempDir() - repoRoot := fs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - setup(t, repoRoot) - - globWatcher := New(logger, repoRoot, _noopCookieWaiter) - - globs := fs.TaskOutputs{ - Inclusions: []string{ - "my-pkg/dist/**", - "my-pkg/.next/**", - }, - Exclusions: []string{"my-pkg/.next/cache/**"}, - } - - hash := "the-hash" - err := globWatcher.WatchGlobs(hash, globs) - assert.NilError(t, err, "WatchGlobs") - - changed, err := globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make an irrelevant change - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", "irrelevant"), - }) - - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make an excluded change - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.Join("my-pkg", ".next", "cache", "foo"), - }) - - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make a relevant change - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", "dist", "foo"), - }) - - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 1, len(changed), "Expected one changed path remaining") - expected := "my-pkg/dist/**" - assert.Equal(t, expected, changed[0], "Expected dist glob to have changed") - - // Change a file matching the other glob - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "foo"), - }) - // We should no longer be watching anything, since both globs have - // registered changes - if len(globWatcher.hashGlobs) != 0 { - t.Errorf("expected to not track any hashes, found %v", globWatcher.hashGlobs) - } - - // Both globs have changed, we should have stopped tracking - // this hash - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.DeepEqual(t, globs.Inclusions, changed) -} - -func TestTrackMultipleHashes(t *testing.T) { - logger := hclog.Default() - - repoRootRaw := t.TempDir() - repoRoot := fs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - setup(t, repoRoot) - - globWatcher := New(logger, repoRoot, _noopCookieWaiter) - - globs := fs.TaskOutputs{ - Inclusions: []string{ - "my-pkg/dist/**", - "my-pkg/.next/**", - }, - } - - hash := "the-hash" - err := globWatcher.WatchGlobs(hash, globs) - assert.NilError(t, err, "WatchGlobs") - - secondGlobs := fs.TaskOutputs{ - Inclusions: []string{ - "my-pkg/.next/**", - }, - Exclusions: []string{"my-pkg/.next/cache/**"}, - } - - secondHash := "the-second-hash" - err = globWatcher.WatchGlobs(secondHash, secondGlobs) - assert.NilError(t, err, "WatchGlobs") - - changed, err := globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - changed, err = globWatcher.GetChangedGlobs(secondHash, secondGlobs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - // Make a change that is excluded in one of the hashes but not in the other - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "cache", "foo"), - }) - - changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 1, len(changed), "Expected one changed path remaining") - - changed, err = globWatcher.GetChangedGlobs(secondHash, secondGlobs.Inclusions) - assert.NilError(t, err, "GetChangedGlobs") - assert.Equal(t, 0, len(changed), "Expected no changed paths") - - assert.Equal(t, 1, len(globWatcher.globStatus["my-pkg/.next/**"]), "Expected to be still watching `my-pkg/.next/**`") - - // Make a change for secondHash - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "bar"), - }) - - assert.Equal(t, 0, len(globWatcher.globStatus["my-pkg/.next/**"]), "Expected to be no longer watching `my-pkg/.next/**`") -} - -func TestWatchSingleFile(t *testing.T) { - logger := hclog.Default() - - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - - setup(t, repoRoot) - - //watcher := newTestWatcher() - globWatcher := New(logger, repoRoot, _noopCookieWaiter) - globs := fs.TaskOutputs{ - Inclusions: []string{"my-pkg/.next/next-file"}, - Exclusions: []string{}, - } - hash := "the-hash" - err := globWatcher.WatchGlobs(hash, globs) - assert.NilError(t, err, "WatchGlobs") - - assert.Equal(t, 1, len(globWatcher.hashGlobs)) - - // A change to an irrelevant file - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "foo"), - }) - assert.Equal(t, 1, len(globWatcher.hashGlobs)) - - // Change the watched file - globWatcher.OnFileWatchEvent(filewatcher.Event{ - EventType: filewatcher.FileAdded, - Path: repoRoot.UntypedJoin("my-pkg", ".next", "next-file"), - }) - assert.Equal(t, 0, len(globWatcher.hashGlobs)) -} diff --git a/cli/internal/server/server.go b/cli/internal/server/server.go deleted file mode 100644 index 5e738ccc14ebd..0000000000000 --- a/cli/internal/server/server.go +++ /dev/null @@ -1,192 +0,0 @@ -package server - -import ( - "context" - "sync" - "time" - - "github.com/hashicorp/go-hclog" - "github.com/pkg/errors" - "github.com/vercel/turbo/cli/internal/filewatcher" - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/globwatcher" - "github.com/vercel/turbo/cli/internal/turbodprotocol" - "github.com/vercel/turbo/cli/internal/turbopath" - "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// Server implements the GRPC serverside of TurbodServer -// Note for the future: we don't yet make use of turbo.json -// or the package graph in the server. Once we do, we may need a -// layer of indirection between "the thing that responds to grpc requests" -// and "the thing that holds our persistent data structures" to handle -// changes in the underlying configuration. -type Server struct { - turbodprotocol.UnimplementedTurbodServer - watcher *filewatcher.FileWatcher - globWatcher *globwatcher.GlobWatcher - turboVersion string - started time.Time - logFilePath turbopath.AbsoluteSystemPath - repoRoot turbopath.AbsoluteSystemPath - closerMu sync.Mutex - closer *closer -} - -// GRPCServer is the interface that the turbo server needs to the underlying -// GRPC server. This lets the turbo server register itself, as well as provides -// a hook for shutting down the server. -type GRPCServer interface { - grpc.ServiceRegistrar - GracefulStop() -} - -type closer struct { - grpcServer GRPCServer - once sync.Once -} - -func (c *closer) close() { - // This can get triggered from a request handler (Shutdown). Since - // calling GracefulStop blocks until all request handlers complete, - // we need to run it in a goroutine to let the Shutdown handler complete - // and avoid deadlocking. - c.once.Do(func() { - go func() { - c.grpcServer.GracefulStop() - }() - }) -} - -var _defaultCookieTimeout = 500 * time.Millisecond - -// New returns a new instance of Server -func New(serverName string, logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, turboVersion string, logFilePath turbopath.AbsoluteSystemPath) (*Server, error) { - cookieDir := fs.GetTurboDataDir().UntypedJoin("cookies", serverName) - cookieJar, err := filewatcher.NewCookieJar(cookieDir, _defaultCookieTimeout) - if err != nil { - return nil, err - } - watcher, err := filewatcher.GetPlatformSpecificBackend(logger) - if err != nil { - return nil, err - } - fileWatcher := filewatcher.New(logger.Named("FileWatcher"), repoRoot, watcher) - globWatcher := globwatcher.New(logger.Named("GlobWatcher"), repoRoot, cookieJar) - server := &Server{ - watcher: fileWatcher, - globWatcher: globWatcher, - turboVersion: turboVersion, - started: time.Now(), - logFilePath: logFilePath, - repoRoot: repoRoot, - } - server.watcher.AddClient(cookieJar) - server.watcher.AddClient(globWatcher) - server.watcher.AddClient(server) - if err := server.watcher.Start(); err != nil { - return nil, errors.Wrapf(err, "watching %v", repoRoot) - } - if err := server.watcher.AddRoot(cookieDir); err != nil { - _ = server.watcher.Close() - return nil, errors.Wrapf(err, "failed to watch cookie directory: %v", cookieDir) - } - return server, nil -} - -func (s *Server) tryClose() bool { - s.closerMu.Lock() - defer s.closerMu.Unlock() - if s.closer != nil { - s.closer.close() - return true - } - return false -} - -// OnFileWatchEvent implements filewatcher.FileWatchClient.OnFileWatchEvent -// In the event that the root of the monorepo is deleted, shut down the server. -func (s *Server) OnFileWatchEvent(ev filewatcher.Event) { - if ev.EventType == filewatcher.FileDeleted && ev.Path == s.repoRoot { - _ = s.tryClose() - } -} - -// OnFileWatchError implements filewatcher.FileWatchClient.OnFileWatchError -func (s *Server) OnFileWatchError(err error) {} - -// OnFileWatchClosed implements filewatcher.FileWatchClient.OnFileWatchClosed -func (s *Server) OnFileWatchClosed() {} - -// Close is used for shutting down this copy of the server -func (s *Server) Close() error { - return s.watcher.Close() -} - -// Register registers this server to respond to GRPC requests -func (s *Server) Register(grpcServer GRPCServer) { - s.closerMu.Lock() - s.closer = &closer{ - grpcServer: grpcServer, - } - s.closerMu.Unlock() - turbodprotocol.RegisterTurbodServer(grpcServer, s) -} - -// NotifyOutputsWritten implements the NotifyOutputsWritten rpc from turbo.proto -func (s *Server) NotifyOutputsWritten(ctx context.Context, req *turbodprotocol.NotifyOutputsWrittenRequest) (*turbodprotocol.NotifyOutputsWrittenResponse, error) { - outputs := fs.TaskOutputs{ - Inclusions: req.OutputGlobs, - Exclusions: req.OutputExclusionGlobs, - } - - err := s.globWatcher.WatchGlobs(req.Hash, outputs) - if err != nil { - return nil, err - } - return &turbodprotocol.NotifyOutputsWrittenResponse{}, nil -} - -// GetChangedOutputs implements the GetChangedOutputs rpc from turbo.proto -func (s *Server) GetChangedOutputs(ctx context.Context, req *turbodprotocol.GetChangedOutputsRequest) (*turbodprotocol.GetChangedOutputsResponse, error) { - - changedGlobs, err := s.globWatcher.GetChangedGlobs(req.Hash, req.OutputGlobs) - if err != nil { - return nil, err - } - return &turbodprotocol.GetChangedOutputsResponse{ - ChangedOutputGlobs: changedGlobs, - }, nil -} - -// Hello implements the Hello rpc from turbo.proto -func (s *Server) Hello(ctx context.Context, req *turbodprotocol.HelloRequest) (*turbodprotocol.HelloResponse, error) { - clientVersion := req.Version - if clientVersion != s.turboVersion { - err := status.Errorf(codes.FailedPrecondition, "version mismatch. Client %v Server %v", clientVersion, s.turboVersion) - return nil, err - } - return &turbodprotocol.HelloResponse{}, nil -} - -// Shutdown implements the Shutdown rpc from turbo.proto -func (s *Server) Shutdown(ctx context.Context, req *turbodprotocol.ShutdownRequest) (*turbodprotocol.ShutdownResponse, error) { - if s.tryClose() { - return &turbodprotocol.ShutdownResponse{}, nil - } - err := status.Error(codes.NotFound, "shutdown mechanism not found") - return nil, err -} - -// Status implements the Status rpc from turbo.proto -func (s *Server) Status(ctx context.Context, req *turbodprotocol.StatusRequest) (*turbodprotocol.StatusResponse, error) { - uptime := uint64(time.Since(s.started).Milliseconds()) - return &turbodprotocol.StatusResponse{ - DaemonStatus: &turbodprotocol.DaemonStatus{ - LogFile: s.logFilePath.ToString(), - UptimeMsec: uptime, - }, - }, nil -} diff --git a/cli/internal/server/server_test.go b/cli/internal/server/server_test.go deleted file mode 100644 index b7dcf3a15bcbe..0000000000000 --- a/cli/internal/server/server_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package server - -import ( - "context" - "testing" - "time" - - "github.com/hashicorp/go-hclog" - "google.golang.org/grpc" - "gotest.tools/v3/assert" - - turbofs "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbodprotocol" -) - -type mockGrpc struct { - stopped chan struct{} -} - -func (m *mockGrpc) GracefulStop() { - close(m.stopped) -} - -func (m *mockGrpc) RegisterService(desc *grpc.ServiceDesc, impl interface{}) {} - -func TestDeleteRepoRoot(t *testing.T) { - logger := hclog.Default() - logger.SetLevel(hclog.Debug) - repoRootRaw := t.TempDir() - repoRoot := turbofs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - grpcServer := &mockGrpc{ - stopped: make(chan struct{}), - } - - s, err := New("testServer", logger, repoRoot, "some-version", "/log/file/path") - assert.NilError(t, err, "New") - s.Register(grpcServer) - - // Delete the repo root, ensure that GracefulStop got called - err = repoRoot.Remove() - assert.NilError(t, err, "Remove") - - select { - case <-grpcServer.stopped: - case <-time.After(2 * time.Second): - t.Error("timed out waiting for graceful stop to be called") - } -} - -func TestShutdown(t *testing.T) { - logger := hclog.Default() - repoRootRaw := t.TempDir() - repoRoot := turbofs.AbsoluteSystemPathFromUpstream(repoRootRaw) - - grpcServer := &mockGrpc{ - stopped: make(chan struct{}), - } - - s, err := New("testServer", logger, repoRoot, "some-version", "/log/file/path") - assert.NilError(t, err, "New") - s.Register(grpcServer) - - ctx := context.Background() - _, err = s.Shutdown(ctx, &turbodprotocol.ShutdownRequest{}) - assert.NilError(t, err, "Shutdown") - // Ensure that graceful stop gets called - select { - case <-grpcServer.stopped: - case <-time.After(2 * time.Second): - t.Error("timed out waiting for graceful stop to be called") - } -} diff --git a/cli/internal/turbostate/turbostate.go b/cli/internal/turbostate/turbostate.go index dad5b470f9c2e..2ed23ddc0f537 100644 --- a/cli/internal/turbostate/turbostate.go +++ b/cli/internal/turbostate/turbostate.go @@ -72,9 +72,8 @@ type RunPayload struct { // Command consists of the data necessary to run a command. // Only one of these fields should be initialized at a time. type Command struct { - Daemon *DaemonPayload `json:"daemon"` - Prune *PrunePayload `json:"prune"` - Run *RunPayload `json:"run"` + Prune *PrunePayload `json:"prune"` + Run *RunPayload `json:"run"` } // ParsedArgsFromRust are the parsed command line arguments passed diff --git a/crates/glob-match/benches/bench.rs b/crates/glob-match/benches/bench.rs index e7f1cb54ef63a..b96d867108f0d 100644 --- a/crates/glob-match/benches/bench.rs +++ b/crates/glob-match/benches/bench.rs @@ -17,7 +17,9 @@ fn globset(pat: &str, s: &str) -> bool { } fn glob_match_crate(b: &mut Criterion) { - b.bench_function("mine", |b| b.iter(|| assert!(glob_match(GLOB, PATH)))); + b.bench_function("mine", |b| { + b.iter(|| assert!(glob_match(GLOB, PATH).unwrap_or_default())) + }); } fn glob_crate(b: &mut Criterion) { diff --git a/crates/glob-match/src/lib.rs b/crates/glob-match/src/lib.rs index 6e0da51826cab..0f300608c742d 100644 --- a/crates/glob-match/src/lib.rs +++ b/crates/glob-match/src/lib.rs @@ -24,13 +24,13 @@ struct Wildcard { type Capture = Range; -pub fn glob_match(glob: &str, path: &str) -> bool { +pub fn glob_match(glob: &str, path: &str) -> Option { glob_match_internal(glob, path, None) } pub fn glob_match_with_captures<'a>(glob: &str, path: &'a str) -> Option> { let mut captures = Vec::new(); - if glob_match_internal(glob, path, Some(&mut captures)) { + if let Some(true) = glob_match_internal(glob, path, Some(&mut captures)) { return Some(captures); } None @@ -41,7 +41,7 @@ fn glob_match_internal<'a>( glob_str: &str, path_str: &'a str, mut captures: Option<&mut Vec>, -) -> bool { +) -> Option { let glob = glob_str.as_bytes(); let path = path_str.as_bytes(); @@ -137,10 +137,7 @@ fn glob_match_internal<'a>( && state.glob_index < glob.len() && matches!(glob[state.glob_index], b',' | b'}') { - if state.skip_braces(glob, &mut captures, false) == BraceState::Invalid { - // invalid pattern! - return false; - } + state.skip_braces(glob, &mut captures, false)?; } continue; @@ -148,20 +145,14 @@ fn glob_match_internal<'a>( b'?' if state.path_index < path.len() => { if !is_separator(path[state.path_index] as char) { state.glob_index += 1; - let cap = match get_char_slice(path_str, path, &mut state.path_index) { - Some(c) => c, - None => return false, - }; + let cap = get_char_slice(path_str, path, &mut state.path_index)?; state.add_char_capture(&mut captures, cap); continue; } } b'[' if state.path_index < path.len() => { state.glob_index += 1; - let c = match get_char_slice(path_str, path, &mut state.path_index) { - Some(c) => c, - None => return false, - }; + let c = get_char_slice(path_str, path, &mut state.path_index)?; // Check if the character class is negated. let mut negated = false; @@ -174,10 +165,7 @@ fn glob_match_internal<'a>( let mut first = true; let mut is_match = false; while state.glob_index < glob.len() && (first || glob[state.glob_index] != b']') { - let low = match get_char_slice(glob_str, glob, &mut state.glob_index) { - Some(c) => c, - None => return false, - }; + let low = get_char_slice(glob_str, glob, &mut state.glob_index)?; // If there is a - and the following character is not ], read the range end character. let high = if state.glob_index + 1 < glob.len() @@ -185,10 +173,7 @@ fn glob_match_internal<'a>( && glob[state.glob_index + 1] != b']' { state.glob_index += 1; - match get_char_slice(glob_str, glob, &mut state.glob_index) { - Some(c) => c, - None => return false, - } + get_char_slice(glob_str, glob, &mut state.glob_index)? } else { low }; @@ -200,7 +185,7 @@ fn glob_match_internal<'a>( } if state.glob_index >= glob.len() { // invalid pattern! - return false; + return None; } state.glob_index += 1; if is_match != negated { @@ -211,7 +196,7 @@ fn glob_match_internal<'a>( b'{' if state.path_index < path.len() => { if brace_stack.length as usize >= brace_stack.stack.len() { // Invalid pattern! Too many nested braces. - return false; + return None; } state.end_capture(&mut captures); @@ -244,7 +229,7 @@ fn glob_match_internal<'a>( // Match escaped characters as literals. if !unescape(&mut c, glob, &mut state.glob_index) { // Invalid pattern! - return false; + return None; } let is_match = if c == b'/' { @@ -283,13 +268,9 @@ fn glob_match_internal<'a>( if brace_stack.length > 0 { // If in braces, find next option and reset path to index where we saw the '{' - match state.skip_braces(glob, &mut captures, true) { - BraceState::Invalid => return false, - BraceState::Comma => { - state.path_index = brace_stack.last().path_index; - continue; - } - BraceState::EndBrace => {} + if let BraceState::Comma = state.skip_braces(glob, &mut captures, true)? { + state.path_index = brace_stack.last().path_index; + continue; } // Hit the end. Pop the stack. @@ -311,7 +292,7 @@ fn glob_match_internal<'a>( } } - return negated; + return Some(negated); } if brace_stack.length > 0 && state.glob_index > 0 && glob[state.glob_index - 1] == b'}' { @@ -319,7 +300,7 @@ fn glob_match_internal<'a>( brace_stack.pop(&state, &mut captures); } - !negated + Some(!negated) } /// gets a slice to a unicode grapheme at the given index @@ -395,7 +376,6 @@ fn unescape(c: &mut u8, glob: &[u8], glob_index: &mut usize) -> bool { #[derive(PartialEq)] enum BraceState { - Invalid, Comma, EndBrace, } @@ -460,7 +440,7 @@ impl State { glob: &[u8], captures: &mut Option<&mut Vec>, stop_on_comma: bool, - ) -> BraceState { + ) -> Option { let mut braces = 1; let mut in_brackets = false; let mut capture_index = self.capture_index + 1; @@ -471,7 +451,7 @@ impl State { b'}' if !in_brackets => braces -= 1, b',' if stop_on_comma && braces == 1 && !in_brackets => { self.glob_index += 1; - return BraceState::Comma; + return Some(BraceState::Comma); } c @ (b'*' | b'?' | b'[') if !in_brackets => { if c == b'[' { @@ -502,10 +482,10 @@ impl State { } if braces != 0 { - return BraceState::Invalid; + return None; } - BraceState::EndBrace + Some(BraceState::EndBrace) } } @@ -642,8 +622,9 @@ mod tests { #[test_case("🇩🇪?z", "🇩🇪🇩🇪z" ; "Germany flag wildcard emoji")] #[test_case("j[🇬🇧-🇳🇴]", "j🇬🇧" ; "latin letter with flag emoji range match")] fn unicode(glob: &str, path: &str) { - assert!( + assert_eq!( glob_match(glob, path), + Some(true), "`{}` doesn't match `{}`", path, glob @@ -716,8 +697,9 @@ mod tests { "some/foo/a/bigger/path/to/the/crazy/needle.txt" )] fn basic(path: &str, glob: &str) { - assert!( + assert_eq!( glob_match(path, glob), + Some(true), "`{}` doesn't match `{}`", path, glob @@ -755,7 +737,13 @@ mod tests { "some/foo/d/bigger/path/to/the/crazy/needle.txt" )] fn basic_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + assert_eq!( + glob_match(glob, path), + Some(false), + "`{}` matches `{}`", + path, + glob + ); } // The below tests are based on Bash and micromatch. @@ -772,8 +760,9 @@ mod tests { #[test_case("\\a*", "abd")] #[test_case("\\a*", "abe")] fn bash(glob: &str, path: &str) { - assert!( + assert_eq!( glob_match(glob, path), + Some(true), "`{}` doesn't match `{}`", path, glob @@ -805,13 +794,20 @@ mod tests { #[test_case("\\a*", "dd")] #[test_case("\\a*", "de")] fn bash_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + assert_eq!( + glob_match(glob, path), + Some(false), + "`{}` matches `{}`", + path, + glob + ); } #[test_case("b*/", "bdir/")] fn bash_directories(glob: &str, path: &str) { - assert!( + assert_eq!( glob_match(glob, path), + Some(true), "`{}` doesn't match `{}`", path, glob @@ -837,7 +833,13 @@ mod tests { #[test_case("b*/", "dd")] #[test_case("b*/", "de")] fn bash_directories_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + assert_eq!( + glob_match(glob, path), + Some(false), + "`{}` matches `{}`", + path, + glob + ); } #[test_case("\\*", "*" ; "escaped star")] @@ -846,8 +848,9 @@ mod tests { #[test_case("\\**", "*" ; "escaped double star")] #[test_case("\\**", "**" ; "escaped double star 2")] fn bash_escaping(glob: &str, path: &str) { - assert!( + assert_eq!( glob_match(glob, path), + Some(true), "`{}` doesn't match `{}`", path, glob @@ -947,7 +950,13 @@ mod tests { #[test_case("\\**", "dd")] #[test_case("\\**", "de")] fn bash_escaping_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + assert_eq!( + glob_match(glob, path), + Some(false), + "`{}` matches `{}`", + path, + glob + ); } #[test_case("a*[^c]", "abd")] @@ -991,9 +1000,10 @@ mod tests { #[test_case("[^a-c]*", "BZZ")] #[test_case("[^a-c]*", "BewAre")] fn bash_classes(glob: &str, path: &str) { - assert!( + assert_eq!( glob_match(glob, path), - "`{}` does not match `{}`", + Some(true), + "`{}` doesn't match `{}`", path, glob ); @@ -1189,7 +1199,13 @@ mod tests { #[test_case("[^a-c]*", "bzz")] #[test_case("[^a-c]*", "beware" ; "not a to c beware")] fn bash_classes_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + assert_eq!( + glob_match(glob, path), + Some(false), + "`{}` matches `{}`", + path, + glob + ); } #[test_case("]", "]")] @@ -1200,24 +1216,30 @@ mod tests { #[test_case("t[a-g]n", "ten")] #[test_case("t[^a-g]n", "ton")] fn bash_wildmatch(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("a[]-]b", "aab")] #[test_case("[ten]", "ten")] fn bash_wildmatch_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + assert_eq!( + glob_match(glob, path), + Some(false), + "`{}` matches `{}`", + path, + glob + ); } #[test_case("foo[/]bar", "foo/bar")] #[test_case("f[^eiu][^eiu][^eiu][^eiu][^eiu]r", "foo-bar")] fn bash_slashmatch(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } // #[test_case("f[^eiu][^eiu][^eiu][^eiu][^eiu]r", "f[^eiu][^eiu][^eiu][^eiu][^eiu]r")] // fn bash_slashmatch_not(glob: &str, path: &str) { - // assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + // assert_eq!(glob_match(glob, path), Some(false), "`{}` matches `{}`", path, glob); // } #[test_case("a**c", "abc" ; "a doublestar")] @@ -1246,7 +1268,7 @@ mod tests { #[test_case("a**?**cd**?**??***k**", "abcdecdhjk")] #[test_case("a****c**?**??*****", "abcdecdhjk")] fn bash_extra_stars(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("a**c", "bbc")] @@ -1256,7 +1278,13 @@ mod tests { #[test_case("a*****?c", "bbc" ; "a 5 star qmark c")] #[test_case("?***?****c", "bbd")] fn bash_extra_stars_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path), "`{}` matches `{}`", path, glob); + assert_eq!( + glob_match(glob, path), + Some(false), + "`{}` matches `{}`", + path, + glob + ); } #[test_case("*.js", "z.js")] @@ -1391,7 +1419,7 @@ mod tests { #[test_case("foo/**/bar", "foo/baz/bar" ; "foo doublestar bar")] #[test_case("**/foo", "XXX/foo")] fn stars(glob: &str, path: &str) { - assert!(glob_match(glob, path)) + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("*.js", "a/b/c/z.js")] @@ -1504,7 +1532,7 @@ mod tests { #[test_case("foo**bar", "foo/baz/bar" ; "foo doublestar bar")] #[test_case("foo*bar", "foo/baz/bar" ; "foo star bar")] fn stars_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path)) + assert_eq!(glob_match(glob, path), Some(false)); } #[test_case("**/*.js", "a/b/c/d.js")] @@ -1628,7 +1656,7 @@ mod tests { #[test_case("a/**/*", "a/b/c/d" ; "a doublestar star a/b/c/d")] #[test_case("a/**/**/*", "a/b/c/d" ; "a doublestar doublestar star a/b/c/d")] fn globstars(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("a/b/**/*.js", "a/d.js")] @@ -1677,7 +1705,7 @@ mod tests { #[test_case("**/d/*", "a/b/c/d")] #[test_case("b/**", "a/b/c/d")] fn globstars_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(false)); } #[test_case("フ*/**/*", "フォルダ/aaa.js")] @@ -1686,7 +1714,7 @@ mod tests { #[test_case("フ*ル*/**/*", "フォルダ/aaa.js")] #[test_case("フォルダ/**/*", "フォルダ/aaa.js")] fn utf8(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("*!*.md", "!foo!.md" ; "not star")] @@ -1796,7 +1824,7 @@ mod tests { #[test_case("!*.md", "a/b.md")] #[test_case("!**/*.md", "c.txt")] fn negation(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("!*", "abc")] @@ -1874,7 +1902,7 @@ mod tests { #[test_case("!*.md", "a.md")] // #[test_case("!**/*.md", "b.md")] fn negation_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(false)); } #[test_case("?", "a")] @@ -1892,7 +1920,7 @@ mod tests { #[test_case("a/???/c.md", "a/bbb/c.md")] #[test_case("a/????/c.md", "a/bbbb/c.md")] fn question_mark(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("?", "aa")] @@ -1918,7 +1946,7 @@ mod tests { #[test_case("a/?/c/???/e.md", "a/b/c/d/e.md")] #[test_case("a/?/c.md", "a/bb/c.md")] fn question_mark_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(false)); } #[test_case("{a,b,c}", "a")] @@ -2020,7 +2048,7 @@ mod tests { // #[test_case("a{,.*{foo,db},\\(bar\\)}", "a")] // #[test_case("a{,*.{foo,db},\\(bar\\)}", "a")] fn braces(glob: &str, path: &str) { - assert!(glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(true)); } #[test_case("{a,b,c}", "aa")] @@ -2059,7 +2087,7 @@ mod tests { #[test_case("a/?/e.md", "a/bb/e.md" ; "a qmark e.md")] #[test_case("a/?/**/e.md", "a/bb/e.md" ; "a qmark doublestar e.md")] fn braces_not(glob: &str, path: &str) { - assert!(!glob_match(glob, path)); + assert_eq!(glob_match(glob, path), Some(false)); } #[test_case("a/*[a-z]x/c", "a/yybx/c" => Some(vec!["yy", "b"]))] @@ -2110,6 +2138,6 @@ mod tests { #[test_case("{*{??*{??**,Uz*zz}w**{*{**a,z***b*[!}w??*azzzzzzzz*!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!z[za,z&zz}w**z*z*}")] #[test_case("**** *{*{??*{??***\u{5} *{*{??*{??***\u{5},\0U\0}]*****\u{1},\0***\0,\0\0}w****,\0U\0}]*****\u{1},\0***\0,\0\0}w*****\u{1}***{}*.*\0\0*\0")] fn fuzz_tests(fuzz: &str) { - assert!(!glob_match(fuzz, fuzz)); + assert_eq!(glob_match(fuzz, fuzz), None); } } diff --git a/crates/globwatch/Cargo.toml b/crates/globwatch/Cargo.toml new file mode 100644 index 0000000000000..9af15b20c895b --- /dev/null +++ b/crates/globwatch/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "globwatch" +version = "0.1.0" +edition = "2021" +description = "Watch a set of globs efficiently" +license = "MIT OR Apache-2.0" + +[dependencies] +futures = { version = "0.3.26" } +glob-match = "0.2.1" +itertools.workspace = true +merge-streams = "0.1.2" +notify = { version = "5.1.0", default-features = false, features = [ + "macos_fsevent", + "fsevent-sys", +] } +notify-debouncer-mini = { version = "0.2.1", default-features = false } +pin-project = "1.0.12" +stop-token = "0.7.0" +tokio = { version = "1.25.0", features = ["sync"] } +tokio-stream = "0.1.12" +tracing = "0.1.37" +unic-segment = "0.9.0" +walkdir = "2.3.2" + +[dev-dependencies] +test-case = "3.0.0" +tokio = { version = "1.25.0", features = [ + "rt", + "rt-multi-thread", + "time", + "macros", +] } +tracing-subscriber = "0.3.16" +tracing-test = "0.2.4" diff --git a/crates/globwatch/examples/cancel.rs b/crates/globwatch/examples/cancel.rs new file mode 100644 index 0000000000000..5dcc5b9226c8d --- /dev/null +++ b/crates/globwatch/examples/cancel.rs @@ -0,0 +1,45 @@ +use std::{path::PathBuf, time::Duration}; + +use futures::{join, StreamExt}; +use globwatch::GlobWatcher; +use tracing::{info, info_span}; + +#[tokio::main] +async fn main() { + tracing_subscriber::fmt::init(); + let (watcher, config) = GlobWatcher::new("./flush".into()).unwrap(); + let stop = stop_token::StopSource::new(); + let mut stream = watcher.into_stream(stop.token()); + + let watch_fut = async { + let span = info_span!("watch_fut"); + let _ = span.enter(); + while let Some(Ok(e)) = stream.next().await { + info!(parent: &span, "{:?}", e); + } + info!(parent: &span, "done"); + }; + + let config_fut = async { + let span = info_span!("config_fut"); + let _ = span.enter(); + for x in 0..5 { + info!(parent: &span, "iteration {}", x); + config + .include(&PathBuf::try_from(".").unwrap(), "globwatch/src/**") + .await + .unwrap(); + tokio::time::sleep(Duration::from_secs(1)).await; + config + .exclude(&PathBuf::try_from(".").unwrap(), "globwatch/src/**") + .await + .unwrap(); + tokio::time::sleep(Duration::from_secs(1)).await; + } + + info!(parent: &span, "dropping stop"); + drop(stop); + }; + + join!(watch_fut, config_fut); +} diff --git a/crates/globwatch/readme.md b/crates/globwatch/readme.md new file mode 100644 index 0000000000000..4288ba29597e2 --- /dev/null +++ b/crates/globwatch/readme.md @@ -0,0 +1,27 @@ +# Globwatch + +> Watch a set of globs + +This library provides an async interface over notify and glob-match to +efficiently watch a list of globs. Where possible it attempts to minimize the +number of watched directories by registering watchers for the minimum possible +set of files / folders by breaking down the glob pattern into a list of folders. + +This is exposed as a `Stream` and a `Sink`. The stream produces `notify` events, +whereas the `Sink` can be used to update the configuration on-the-fly. + +For a basic example see the `examples/cancel.rs`. + +```rust +let (watcher, mut config) = GlobWatcher::new("./flush").unwrap(); +let stop = StopSource::new(); +let mut stream = watcher.into_stream(stop.token()); +config.include(Path::new("/app/css").into()); +config.include(Path::new("/app/html").into()); +while let Some(Ok(e)) = stream.next().await { + debug!("received event: {:?}", e); + + // use the cancellation token to stop the watcher + drop(stop); +} +``` diff --git a/crates/globwatch/src/lib.rs b/crates/globwatch/src/lib.rs new file mode 100644 index 0000000000000..6730e5d9ded57 --- /dev/null +++ b/crates/globwatch/src/lib.rs @@ -0,0 +1,516 @@ +//! A wrapper around notify that allows for glob-based watching. +//! +//! ## What is flushing? +//! +//! On certain filesystems, file events are not guaranteed to be delivered in +//! the correct order, or on time. This can cause issues when trying to +//! determine if a file has changed, as we don't want to register a watcher +//! for a file if we are not 'up to date'. The flushing mechanism allows us to +//! watch for a full round trip through the filesystem to ensure the watcher is +//! up to date. + +#![deny( + missing_docs, + missing_debug_implementations, + missing_copy_implementations, + clippy::unwrap_used, + unused_must_use, + unsafe_code +)] +#![feature(drain_filter)] + +use std::{ + collections::HashMap, + fs::File, + path::{Path, PathBuf}, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, Mutex, + }, +}; + +use futures::{channel::oneshot, future::Either, Stream, StreamExt as _}; +use itertools::Itertools; +use merge_streams::MergeStreams; +pub use notify::{Error, Event, Watcher}; +pub use stop_token::{stream::StreamExt, StopSource, StopToken, TimedOutError}; +use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender}; +use tokio_stream::wrappers::UnboundedReceiverStream; +use tracing::{event, span, trace, warn, Id, Level, Span}; + +/// A wrapper around notify that allows for glob-based watching. +#[derive(Debug)] +pub struct GlobWatcher { + stream: UnboundedReceiver, + flush_dir: PathBuf, + + config: UnboundedReceiver, +} + +impl GlobWatcher { + /// Create a new watcher, using the given flush directory as a temporary + /// storage when flushing file events. For more information on flushing, + /// see the module-level documentation. + #[tracing::instrument] + pub fn new( + flush_dir: PathBuf, + ) -> Result<(Self, WatchConfig), Error> { + let (send_event, receive_event) = tokio::sync::mpsc::unbounded_channel(); + let (send_config, receive_config) = tokio::sync::mpsc::unbounded_channel(); + + // even if this fails, we may still be able to continue + std::fs::create_dir_all(&flush_dir).ok(); + + let mut watcher = notify::recommended_watcher(move |event: Result| { + let span = span!(tracing::Level::TRACE, "watcher"); + let _ = span.enter(); + + let result = event.map(|e| { + trace!(parent: &span, "sending event: {:?}", e); + let tx = send_event.clone(); + futures::executor::block_on(async move { tx.send(e) }) + }); + + match result { + Ok(Ok(_)) => {} + Ok(Err(e)) => { + warn!(parent: &span, "watch server closed: {:?}", e); + } + Err(e) => { + warn!(parent: &span, "error from notify: {:?}", e); + } + } + })?; + + watcher.watch(flush_dir.as_path(), notify::RecursiveMode::Recursive)?; + + let watcher = Arc::new(Mutex::new(watcher)); + + Ok(( + Self { + flush_dir, + stream: receive_event, + config: receive_config, + }, + WatchConfig { + flush: send_config, + watcher, + }, + )) + } +} + +impl GlobWatcher { + /// Convert the watcher into a stream of events, handling config changes and + /// flushing transparently. + /// + /// This is implemented as a zipped stream which processes filesystem events + /// and config changes driven by the same stream. This allows us to ensure + /// that anything watching for filesystem is also propagating config changes + #[tracing::instrument(skip(self))] + pub fn into_stream( + self, + token: stop_token::StopToken, + ) -> impl Stream> + Send + Sync + 'static + Unpin { + let flush_id = Arc::new(AtomicU64::new(1)); + let flush_dir = Arc::new(self.flush_dir.clone()); + let flush = Arc::new(Mutex::new(HashMap::>::new())); + + Box::pin( + ( + UnboundedReceiverStream::new(self.stream).map(Either::Left), + UnboundedReceiverStream::new(self.config).map(Either::Right), + ) + .merge() + // apply a filter_map, yielding only valid events and consuming config changes and + // flushes + .filter_map(move |f| { + let span = span!(tracing::Level::TRACE, "stream_processor"); + let _ = span.enter(); + + // clone all the Arcs needed + let flush_id = flush_id.clone(); + let flush_dir = flush_dir.clone(); + let flush = flush.clone(); + + async move { + match f { + Either::Left(mut e) => { + // if we receive an event for a file in the flush dir, we need to + // remove it from the events list, and send a signal to the flush + // requestor. flushes should not be considered as events. + for flush_id in e + .paths + .drain_filter(|p| p.starts_with(flush_dir.as_path())) + .filter_map(|p| { + get_flush_id( + p.strip_prefix(flush_dir.as_path()) + .expect("confirmed above"), + ) + }) + { + trace!("flushing {:?}", flush); + if let Some(tx) = flush + .lock() + .expect("only fails if holder panics") + .remove(&flush_id) + { + // if this fails, it just means the requestor has gone away + // and we can ignore it + tx.send(()).ok(); + } + } + + // if we have any paths left on the event, yield it + if !e.paths.is_empty() { + event!(parent: &span, Level::TRACE, "yielding {:?}", e); + Some(e) + } else { + None + } + } + Either::Right(WatcherCommand::Flush(tx)) => { + // create file in flush dir + let flush_id = flush_id.fetch_add(1, Ordering::SeqCst); + let flush_file = flush_dir.join(flush_id.to_string()); + if let Err(e) = File::create(flush_file) { + warn!("failed to create flush file: {}", e); + } else { + flush + .lock() + .expect("only fails if holder panics") + .insert(flush_id, tx); + } + None + } + } + } + }) + .timeout_at(token), + ) + } +} + +fn get_flush_id(relative_path: &Path) -> Option { + relative_path + .file_name() + .and_then(|p| p.to_str()) + .and_then(|p| p.parse().ok()) +} + +/// A configuration change to the watcher. +#[derive(Debug)] +pub enum WatcherCommand { + /// A request to flush the watcher. + Flush(oneshot::Sender<()>), +} + +/// A change to the watcher configuration. +/// +/// This is used to communicate changes to the watcher +/// from other threads. Can optionally contain the span +/// that the change was made in, for tracing purposes. +#[derive(Debug)] +pub enum WatcherChange { + /// Register a glob to be included by the watcher. + Include(String, Option), + /// Register a glob to be excluded by the watcher. + Exclude(String, Option), +} + +/// A sender for watcher configuration changes. +#[derive(Debug, Clone)] +pub struct WatchConfig { + flush: UnboundedSender, + watcher: Arc>, +} + +/// The server is no longer running. +#[derive(Debug)] +pub enum ConfigError { + /// The server is no longer running. + ServerStopped, + /// Watch error + WatchError(Vec), +} + +impl WatchConfig { + /// Register a glob to be included by the watcher. + #[tracing::instrument(skip(self))] + pub async fn include(&self, relative_to: &Path, glob: &str) -> Result<(), ConfigError> { + trace!("including {:?}", glob); + + glob_to_paths(&glob) + .iter() + .map(|p| relative_to.join(p)) + .map(|p| { + trace!("watching {:?}", p); + self.watcher + .lock() + .expect("only fails if poisoned") + .watch(&p, notify::RecursiveMode::Recursive) + }) + .map(|r| match r { + Ok(()) => Ok(()), + Err(Error { + kind: notify::ErrorKind::PathNotFound, + .. + }) => { + // if the path we are trying to watch doesn't exist + // it is not immediately an error; glob_to_paths + // will generate paths that potentially don't exist, + // since it doesn't walk the fs, no no-op + Ok(()) + } + Err(Error { + kind: notify::ErrorKind::Generic(s), + .. + }) if s.contains("No such file or directory") + || s.eq("Input watch path is neither a file nor a directory.") => + { + Ok(()) + } + Err(e) => Err(e), + }) + .fold(Ok(()), |acc, next| match (acc, next) { + (Ok(()), Ok(())) => Ok(()), + (Ok(()), Err(e)) => Err(vec![e]), + (Err(acc), Ok(())) => Err(acc), + (Err(mut acc), Err(e)) => { + acc.push(e); + Err(acc) + } + }) + .map_err(ConfigError::WatchError) + } + + /// Register a glob to be excluded by the watcher. + #[tracing::instrument(skip(self))] + pub async fn exclude(&self, relative_to: &Path, glob: &str) -> Result<(), ConfigError> { + trace!("excluding {:?}", glob); + + for p in glob_to_paths(&glob).iter().map(|p| relative_to.join(p)) { + // we don't care if this fails, it's just a best-effort + self.watcher + .lock() + .expect("only fails if poisoned") + .unwatch(&p) + .ok(); + } + Ok(()) + } + + /// Await a full filesystem flush from the watcher. + pub async fn flush(&self) -> Result<(), ConfigError> { + let (tx, rx) = oneshot::channel(); + self.flush + .send(WatcherCommand::Flush(tx)) + .map_err(|_| ConfigError::ServerStopped)?; + rx.await.map_err(|_| ConfigError::ServerStopped) + } +} + +#[derive(PartialEq, Eq, Debug)] +enum GlobSymbol<'a> { + Char(&'a [u8]), + OpenBracket, + CloseBracket, + OpenBrace, + CloseBrace, + Star, + DoubleStar, + Question, + Negation, + PathSeperator, +} + +/// Gets the minimum set of paths that can be watched for a given glob, +/// specified in minimatch glob syntax. +/// +/// syntax: +/// ? Matches any single character. +/// +/// *  Matches zero or more characters, except for path separators. +/// +/// ** Matches zero or more characters, including path separators. +/// Must match a complete path segment. +/// +/// [ab] Matches one of the characters contained in the brackets. +/// Character ranges, e.g. [a-z] are also supported. Use [!ab] or [^ab] +/// to match any character except those contained in the brackets. +/// +/// {a,b} Matches one of the patterns contained in the braces. Any of the +/// wildcard characters can be used in the sub-patterns. Braces may +/// be nested up to 10 levels deep. +/// +/// ! When at the start of the glob, this negates the result. +/// Multiple ! characters negate the glob multiple times. +/// +/// \ A backslash character may be used to escape any special characters. +/// +/// Of these, we only handle `{` and escaping. +/// +/// note: it is currently extremely conservative, handling only `**`, braces, +/// and `?`. any other case watches the entire directory. +fn glob_to_paths(glob: &str) -> Vec { + // get all the symbols and chunk them by path seperator + let chunks = glob_to_symbols(glob).group_by(|s| s != &GlobSymbol::PathSeperator); + let chunks = chunks + .into_iter() + .filter_map(|(not_sep, chunk)| (not_sep).then(|| chunk)); + + // multi cartisian product allows us to get all the possible combinations + // of path components for each chunk. for example, if we have a glob + // `{a,b}/1/{c,d}`, it will lazily yield the following sets of segments: + // ["a", "1", "c"] + // ["a", "1", "d"] + // ["b", "1", "c"] + // ["b", "1", "d"] + + chunks + .map(symbols_to_combinations) // yield all the possible segments for each glob chunk + .take_while(|c| c.is_some()) // if any segment has no possible paths, we can stop + .filter_map(|chunk| chunk) + .multi_cartesian_product() // get all the possible combinations of path segments + .map(|chunks| { + let prefix = if glob.starts_with("/") { "/" } else { "" }; + std::iter::once(prefix) + .chain(chunks.iter().map(|s| s.as_str())) + .collect::() + }) + .collect() +} + +/// given a set of symbols, returns an iterator over the possible path segments +/// that can be generated from them. this currently is very conservative, and +/// simply ejects if it encounters glob-like symbols. in the future, we should +/// handle brackets and braces. +/// +/// example: given the symbols "{a,b}b" it will yield ["ab"] and ["bb"] +fn symbols_to_combinations<'a, T: Iterator>>( + symbols: T, +) -> Option + Clone> { + let mut bytes = Vec::new(); + + for symbol in symbols { + match symbol { + GlobSymbol::Char(c) => { + bytes.extend_from_slice(c); + } + GlobSymbol::OpenBracket => return None, // todo handle brackets + GlobSymbol::CloseBracket => return None, + GlobSymbol::OpenBrace => return None, // todo handle braces + GlobSymbol::CloseBrace => return None, + GlobSymbol::Star => return None, + GlobSymbol::DoubleStar => return None, + GlobSymbol::Question => return None, + GlobSymbol::Negation => return None, + GlobSymbol::PathSeperator => return None, + } + } + + Some(std::iter::once( + String::from_utf8(bytes).expect("char is always valid utf8"), + )) +} + +/// parses and escapes a glob, returning an iterator over the symbols +fn glob_to_symbols(glob: &str) -> impl Iterator { + let glob_bytes = glob.as_bytes(); + let mut escaped = false; + let mut cursor = unic_segment::GraphemeCursor::new(0, glob.len()); + + std::iter::from_fn(move || loop { + let start = cursor.cur_cursor(); + if start == glob.len() { + return None; + } + + let end = match cursor.next_boundary(glob, 0) { + Ok(Some(end)) => end, + _ => return None, + }; + + if escaped { + escaped = false; + return if end - start == 1 { + Some(GlobSymbol::Char(match glob_bytes[start] { + b'a' => &[b'\x61'], + b'b' => &[b'\x08'], + b'n' => &[b'\n'], + b'r' => &[b'\r'], + b't' => &[b'\t'], + _ => &glob_bytes[start..end], + })) + } else { + return Some(GlobSymbol::Char(&glob_bytes[start..end])); + }; + } + + return if end - start == 1 { + match glob_bytes[start] { + b'\\' => { + escaped = true; + continue; + } + b'[' => Some(GlobSymbol::OpenBracket), + b']' => Some(GlobSymbol::CloseBracket), + b'{' => Some(GlobSymbol::OpenBrace), + b'}' => Some(GlobSymbol::CloseBrace), + b'*' => { + if glob_bytes.get(end) == Some(&b'*') { + cursor.set_cursor(end + 1); + Some(GlobSymbol::DoubleStar) + } else { + Some(GlobSymbol::Star) + } + } + b'?' => Some(GlobSymbol::Question), + b'!' => Some(GlobSymbol::Negation), + b'/' => Some(GlobSymbol::PathSeperator), + _ => Some(GlobSymbol::Char(&glob_bytes[start..end])), + } + } else { + Some(GlobSymbol::Char(&glob_bytes[start..end])) + }; + }) +} + +#[cfg(test)] +mod test { + use std::path::PathBuf; + + use test_case::test_case; + + use super::GlobSymbol::*; + + #[test_case("foo/**", vec!["foo"])] + #[test_case("foo/{a,b}", vec!["foo"])] + #[test_case("foo/*/bar", vec!["foo"])] + #[test_case("foo/[a-d]/bar", vec!["foo"])] + #[test_case("foo/a?/bar", vec!["foo"])] + #[test_case("foo/ab?/bar", vec!["foo"] ; "question marks ")] + #[test_case("foo/{a,b}/ab?", vec!["foo"])] + #[test_case("/abc", vec!["/abc"])] + #[test_case("/abc/abc/*", vec!["/abc/abc"])] + fn test_glob_to_paths(glob: &str, paths_exp: Vec<&str>) { + let mut paths = super::glob_to_paths(glob); + paths.sort(); + assert_eq!( + paths, + paths_exp.iter().map(PathBuf::from).collect::>() + ); + } + + #[test_case("🇳🇴/🇳🇴", vec![Char("🇳🇴".as_bytes()), PathSeperator, Char("🇳🇴".as_bytes())])] + #[test_case("foo/**", vec![Char(b"f"), Char(b"o"), Char(b"o"), PathSeperator, DoubleStar])] + #[test_case("foo/{a,b}", vec![Char(b"f"), Char(b"o"), Char(b"o"), PathSeperator, OpenBrace, Char(b"a"), Char(b","), Char(b"b"), CloseBrace])] + #[test_case("\\f", vec![Char(b"f")])] + #[test_case("\\\\f", vec![Char(b"\\"), Char(b"f")])] + #[test_case("\\🇳🇴", vec![Char("🇳🇴".as_bytes())])] + #[test_case("\\n", vec![Char(b"\n")])] + fn test_glob_to_symbols(glob: &str, symbols_exp: Vec) { + let symbols = super::glob_to_symbols(glob).collect::>(); + assert_eq!(symbols.as_slice(), symbols_exp.as_slice()); + } +} diff --git a/crates/pidlock/Cargo.toml b/crates/pidlock/Cargo.toml index daf814e584329..7edd6597e1544 100644 --- a/crates/pidlock/Cargo.toml +++ b/crates/pidlock/Cargo.toml @@ -24,6 +24,7 @@ strict = [] # Warnings are errors [dependencies] libc = "0.2.40" log = "0.4.1" +thiserror = { workspace = true } [target.'cfg(windows)'.dependencies] windows-sys = { version = "0.45.0", features = [ @@ -34,6 +35,3 @@ windows-sys = { version = "0.45.0", features = [ [dev-dependencies] rand = "0.8.2" tempdir = "0.3.7" - -[badges] -circle-ci = { repository = "rockstar/pidlock", branch = "master" } diff --git a/crates/pidlock/src/lib.rs b/crates/pidlock/src/lib.rs index d91d4962c7975..012560e4e1fd8 100644 --- a/crates/pidlock/src/lib.rs +++ b/crates/pidlock/src/lib.rs @@ -9,12 +9,18 @@ use std::{ use log::warn; /// Errors that may occur during the `Pidlock` lifetime. -#[derive(Debug, PartialEq)] +#[derive(Debug, thiserror::Error, PartialEq)] pub enum PidlockError { - #[doc = "A lock already exists"] - LockExists, - #[doc = "An operation was attempted in the wrong state, e.g. releasing before acquiring."] + /// A lock already exists + #[error("lock exists at {0}")] + LockExists(PathBuf), + /// An operation was attempted in the wrong state, e.g. releasing before + /// acquiring. + #[error("invalid state")] InvalidState, + /// The lock is already owned by a running process + #[error("already owned")] + AlreadyOwned, } /// A result from a Pidlock operation @@ -23,11 +29,11 @@ type PidlockResult = Result<(), PidlockError>; /// States a Pidlock can be in during its lifetime. #[derive(Debug, PartialEq)] enum PidlockState { - #[doc = "A new pidlock, unacquired"] + /// A new pidlock, unacquired New, - #[doc = "A lock is acquired"] + /// A lock is acquired Acquired, - #[doc = "A lock is released"] + /// A lock is released Released, } @@ -86,12 +92,6 @@ impl Pidlock { } } - /// Check whether a lock file already exists, and if it does, whether the - /// specified pid is still a valid process id on the system. - fn check_stale(&self) { - self.get_owner(); - } - /// Acquire a lock. pub fn acquire(&mut self) -> PidlockResult { match self.state { @@ -100,7 +100,16 @@ impl Pidlock { return Err(PidlockError::InvalidState); } } - self.check_stale(); + + // acquiring something with a valid owner is an error + if self.get_owner().is_some() { + return Err(PidlockError::AlreadyOwned); + } + + if let Some(p) = self.path.parent() { + // even if this fails, the next call might not + std::fs::create_dir_all(p).ok(); + } let mut file = match fs::OpenOptions::new() .create_new(true) @@ -109,7 +118,7 @@ impl Pidlock { { Ok(file) => file, Err(_) => { - return Err(PidlockError::LockExists); + return Err(PidlockError::LockExists(self.path.clone())); } }; file.write_all(&format!("{}", self.pid).into_bytes()[..]) @@ -125,7 +134,7 @@ impl Pidlock { } /// Release the lock. - pub fn release(&mut self) -> PidlockResult { + fn release(&mut self) -> PidlockResult { match self.state { PidlockState::Acquired => {} _ => { @@ -172,6 +181,14 @@ impl Pidlock { } } +impl Drop for Pidlock { + fn drop(&mut self) { + if self.locked() { + self.release().ok(); + } + } +} + #[cfg(test)] mod tests { use std::{fs, io::Write, path::PathBuf}; @@ -225,7 +242,7 @@ mod tests { match pidfile.acquire() { Err(err) => { orig_pidfile.release().unwrap(); - assert_eq!(err, PidlockError::LockExists); + assert_eq!(err, PidlockError::AlreadyOwned); } _ => { orig_pidfile.release().unwrap(); @@ -294,8 +311,8 @@ mod tests { drop(file); - let mut pidfile = Pidlock::new(path); - assert_eq!(pidfile.acquire(), Err(PidlockError::LockExists)); + let mut pidfile = Pidlock::new(path.clone()); + assert_eq!(pidfile.acquire(), Err(PidlockError::LockExists(path))); } #[test] @@ -316,9 +333,9 @@ mod tests { drop(file); - let mut pidfile = Pidlock::new(path); + let mut pidfile = Pidlock::new(path.clone()); - assert_eq!(pidfile.acquire(), Err(PidlockError::LockExists)); + assert_eq!(pidfile.acquire(), Err(PidlockError::LockExists(path))); } #[test] @@ -335,7 +352,7 @@ mod tests { drop(file); - let mut pidfile = Pidlock::new(path); - assert_eq!(pidfile.acquire(), Err(PidlockError::LockExists)); + let mut pidfile = Pidlock::new(path.clone()); + assert_eq!(pidfile.acquire(), Err(PidlockError::LockExists(path))); } } diff --git a/crates/turborepo-lib/Cargo.toml b/crates/turborepo-lib/Cargo.toml index 6a156d16910eb..b1161dd8b29e5 100644 --- a/crates/turborepo-lib/Cargo.toml +++ b/crates/turborepo-lib/Cargo.toml @@ -11,6 +11,9 @@ default = ["rustls-tls"] native-tls = ["turborepo-api-client/native-tls", "turbo-updater/native-tls"] rustls-tls = ["turborepo-api-client/rustls-tls", "turbo-updater/rustls-tls"] +# serve the daemon over a port (useful for testing) +http = ["tonic-reflection"] + # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dev-dependencies] assert_cmd = { workspace = true } @@ -19,8 +22,11 @@ itertools = { workspace = true } port_scanner = { workspace = true } pretty_assertions = { workspace = true } rand = { workspace = true } +tempdir = "0.3.7" tempfile = { workspace = true } test-case = "3.0.0" +tracing-test = { version = "0.2.4", features = ["no-env-filter"] } +tracing.workspace = true vercel-api-mock = { workspace = true } [dependencies] @@ -36,15 +42,18 @@ config = "0.13" console = { workspace = true } ctrlc = { version = "3.2.5", features = ["termination"] } dialoguer = { workspace = true, features = ["fuzzy-select"] } +directories = "4.0.1" dirs-next = "2.0.0" dunce = { workspace = true } env_logger = "0.10.0" futures = "0.3.26" glob-match = "0.2.1" +globwatch = { path = "../globwatch" } hex = "0.4.3" hostname = "0.3.1" humantime = "2.1.0" indicatif = { workspace = true } +itertools = "0.10.5" lazy_static = { workspace = true } libc = "0.2.140" log = { workspace = true } @@ -65,14 +74,17 @@ sysinfo = "0.27.7" thiserror = "1.0.38" tiny-gradient = { workspace = true } tokio = { workspace = true, features = ["full", "time"] } -tokio-stream = "0.1.12" +tokio-stream = { version = "0.1.12", features = ["net"] } tokio-util = { version = "0.7.7", features = ["compat"] } tonic = { version = "0.8.3", features = ["transport"] } +tonic-reflection = { version = "0.6.0", optional = true } tower = "0.4.13" uds_windows = "1.0.2" url = "2.3.1" const_format = "0.2.30" +go-parse-duration = "0.1.1" +tracing.workspace = true turbo-updater = { workspace = true } turbopath = { workspace = true } turborepo-api-client = { workspace = true } diff --git a/crates/turborepo-lib/build.rs b/crates/turborepo-lib/build.rs index cd1a037f58823..427f4915368ed 100644 --- a/crates/turborepo-lib/build.rs +++ b/crates/turborepo-lib/build.rs @@ -10,6 +10,7 @@ fn main() -> Result<(), Box> { tonic_build::configure() .build_server(true) + .file_descriptor_set_path("src/daemon/file_descriptor_set.bin") .compile(&["turbod.proto"], &["../../cli/internal/turbodprotocol"])?; Ok(()) } diff --git a/crates/turborepo-lib/src/cli.rs b/crates/turborepo-lib/src/cli.rs index 6ad731a68b467..a2489183cbc8c 100644 --- a/crates/turborepo-lib/src/cli.rs +++ b/crates/turborepo-lib/src/cli.rs @@ -243,9 +243,9 @@ pub enum Command { Completion { shell: Shell }, /// Runs the Turborepo background daemon Daemon { - /// Set the idle timeout for turbod (default 4h0m0s) - #[clap(long)] - idle_time: Option, + /// Set the idle timeout for turbod + #[clap(long, default_value_t = String::from("4h0m0s"))] + idle_time: String, #[clap(subcommand)] #[serde(flatten)] command: Option, @@ -520,7 +520,10 @@ pub async fn run(repo_state: Option) -> Result { Ok(Payload::Rust(Ok(0))) } - Command::Link { no_gitignore, target} => { + Command::Link { + no_gitignore, + target, + } => { if clap_args.test_run { println!("Link test run successful"); return Ok(Payload::Rust(Ok(0))); @@ -549,19 +552,17 @@ pub async fn run(repo_state: Option) -> Result { Ok(Payload::Rust(Ok(0))) } - Command::Daemon { - command: Some(command), - .. - } => { - let command = *command; - let base = CommandBase::new(clap_args, repo_root, version)?; - daemon::main(&command, &base).await?; + Command::Daemon { command, idle_time } => { + let base = CommandBase::new(clap_args.clone(), repo_root, version)?; + + match command { + Some(command) => daemon::daemon_client(command, &base).await, + None => daemon::daemon_server(&base, idle_time).await, + }?; + Ok(Payload::Rust(Ok(0))) - }, - Command::Prune { .. } - | Command::Run(_) - // the daemon itself still delegates to Go - | Command::Daemon { .. } => Ok(Payload::Go(Box::new(clap_args))), + } + Command::Prune { .. } | Command::Run(_) => Ok(Payload::Go(Box::new(clap_args))), Command::Completion { shell } => { generate(*shell, &mut Args::command(), "turbo", &mut io::stdout()); diff --git a/crates/turborepo-lib/src/commands/daemon.rs b/crates/turborepo-lib/src/commands/daemon.rs index 4cfdc09cade6d..0acc40b66adbe 100644 --- a/crates/turborepo-lib/src/commands/daemon.rs +++ b/crates/turborepo-lib/src/commands/daemon.rs @@ -1,10 +1,15 @@ use std::{path::PathBuf, time::Duration}; +use turbopath::{AbsoluteSystemPathBuf, RelativeSystemPathBuf}; + use super::CommandBase; -use crate::{cli::DaemonCommand, daemon::DaemonConnector}; +use crate::{ + cli::DaemonCommand, + daemon::{DaemonConnector, DaemonError}, +}; /// Runs the daemon command. -pub async fn main(command: &DaemonCommand, base: &CommandBase) -> anyhow::Result<()> { +pub async fn daemon_client(command: &DaemonCommand, base: &CommandBase) -> Result<(), DaemonError> { let (can_start_server, can_kill_server) = match command { DaemonCommand::Status { .. } => (false, false), DaemonCommand::Restart | DaemonCommand::Stop => (false, true), @@ -59,6 +64,33 @@ pub async fn main(command: &DaemonCommand, base: &CommandBase) -> anyhow::Result Ok(()) } +pub async fn daemon_server(base: &CommandBase, idle_time: &String) -> Result<(), DaemonError> { + let log_file = { + let directories = directories::ProjectDirs::from("com", "turborepo", "turborepo") + .expect("user has a home dir"); + + let folder = AbsoluteSystemPathBuf::new(directories.data_dir()).expect("absolute"); + + let hash = format!("{}-turbo.log", base.repo_hash()); + + let logs = RelativeSystemPathBuf::new("logs").expect("forward relative"); + let file = RelativeSystemPathBuf::new(hash).expect("forward relative"); + + folder.join_relative(logs).join_relative(file) + }; + + let repo_root = AbsoluteSystemPathBuf::new(base.repo_root.clone()).expect("absolute"); + + let timeout = go_parse_duration::parse_duration(idle_time) + .map_err(|_| DaemonError::InvalidTimeout(idle_time.to_owned())) + .map(|d| Duration::from_nanos(d as u64))?; + + let server = crate::daemon::DaemonServer::new(base, timeout, log_file)?; + server.serve(repo_root).await; + + Ok(()) +} + #[derive(serde::Serialize)] pub struct DaemonStatus { pub uptime_ms: u64, diff --git a/crates/turborepo-lib/src/daemon/bump_timeout.rs b/crates/turborepo-lib/src/daemon/bump_timeout.rs new file mode 100644 index 0000000000000..d06d9a41bf2b6 --- /dev/null +++ b/crates/turborepo-lib/src/daemon/bump_timeout.rs @@ -0,0 +1,69 @@ +use std::{ + sync::atomic::{AtomicU64, Ordering}, + time::Duration, +}; + +use tokio::time::Instant; + +/// A timeout that can be bumped forward in time by calling reset. +/// +/// Calling reset with a new duration will change the deadline +/// to the current time plus the new duration. It is non-mutating +/// and can be called from multiple threads. +#[derive(Debug)] +pub struct BumpTimeout { + start: Instant, + increment: Duration, + deadline: AtomicU64, +} + +impl BumpTimeout { + pub fn new(increment: Duration) -> Self { + let start = Instant::now(); + let millis = increment.as_millis(); + Self { + start, + deadline: AtomicU64::new(millis as u64), + increment, + } + } + + pub fn duration(&self) -> Duration { + Duration::from_millis(self.deadline.load(Ordering::Relaxed)) + } + + pub fn deadline(&self) -> Instant { + self.start + self.duration() + } + + pub fn elapsed(&self) -> Duration { + self.start.elapsed() + } + + /// Resets the deadline to the current time plus the given duration. + pub fn reset(&self) { + let duration = self.start.elapsed() + self.increment; + self.deadline + .store(duration.as_millis() as u64, Ordering::Relaxed); + } + + pub fn as_instant(&self) -> Instant { + self.start + self.duration() + } + + /// Waits until the deadline is reached, but if the deadline is + /// changed while waiting, it will wait until the new deadline is reached. + pub async fn wait(&self) { + let mut deadline = self.as_instant(); + loop { + tokio::time::sleep_until(deadline).await; + let new_deadline = self.as_instant(); + + if new_deadline > deadline { + deadline = new_deadline; + } else { + break; + } + } + } +} diff --git a/crates/turborepo-lib/src/daemon/bump_timeout_layer.rs b/crates/turborepo-lib/src/daemon/bump_timeout_layer.rs new file mode 100644 index 0000000000000..1cc8f5fba6447 --- /dev/null +++ b/crates/turborepo-lib/src/daemon/bump_timeout_layer.rs @@ -0,0 +1,62 @@ +//! timeout_middleware +//! +//! This is middleware for tonic that integrates with bump_timeout to +//! continually reset the timeout when a request is received. + +use std::sync::Arc; + +use tonic::transport::NamedService; +use tower::{Layer, Service}; + +use super::bump_timeout::BumpTimeout; + +/// A layer that resets a when a request is received. +pub struct BumpTimeoutLayer(Arc); + +impl BumpTimeoutLayer { + pub fn new(timeout: Arc) -> Self { + Self(timeout) + } +} + +impl Layer for BumpTimeoutLayer { + type Service = BumpTimeoutService; + + fn layer(&self, inner: S) -> Self::Service { + BumpTimeoutService { + inner, + timeout: self.0.clone(), + } + } +} + +#[derive(Clone)] +pub struct BumpTimeoutService { + inner: S, + timeout: Arc, +} + +impl Service for BumpTimeoutService +where + S: Service, +{ + type Response = S::Response; + type Error = S::Error; + type Future = S::Future; + + fn poll_ready( + &mut self, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, req: Request) -> Self::Future { + self.timeout.reset(); + self.inner.call(req) + } +} + +impl NamedService for BumpTimeoutService { + const NAME: &'static str = T::NAME; +} diff --git a/crates/turborepo-lib/src/daemon/client.rs b/crates/turborepo-lib/src/daemon/client.rs index 6e8d9dee76ee1..a407a5fc5a1ac 100644 --- a/crates/turborepo-lib/src/daemon/client.rs +++ b/crates/turborepo-lib/src/daemon/client.rs @@ -137,6 +137,15 @@ pub enum DaemonError { /// There was an issue connecting to the daemon. #[error("unable to connect: {0}")] DaemonConnect(#[from] DaemonConnectorError), + /// The timeout specified was invalid. + #[error("invalid timeout specified ({0})")] + InvalidTimeout(String), + /// The server is unable to start file watching. + #[error("unable to start file watching")] + FileWatching(#[from] globwatch::Error), + + #[error("unable to display output: {0}")] + DisplayError(#[from] serde_json::Error), } impl From for DaemonError { diff --git a/crates/turborepo-lib/src/daemon/connector.rs b/crates/turborepo-lib/src/daemon/connector.rs index b2db064432cb0..af4a9b87ec8c4 100644 --- a/crates/turborepo-lib/src/daemon/connector.rs +++ b/crates/turborepo-lib/src/daemon/connector.rs @@ -67,6 +67,7 @@ impl DaemonConnector { const CONNECT_RETRY_MAX: usize = 3; const SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(1); const SOCKET_TIMEOUT: Duration = Duration::from_secs(1); + const SOCKET_ERROR_WAIT: Duration = Duration::from_millis(50); /// Attempt, with retries, to: /// 1. find (or start) the daemon process @@ -86,7 +87,13 @@ impl DaemonConnector { debug!("got daemon with pid: {}", pid); let conn = match self.get_connection(self.sock_file.clone()).await { - Err(DaemonConnectorError::Watcher(_) | DaemonConnectorError::Socket(_)) => continue, + Err(DaemonConnectorError::Watcher(_)) => continue, + Err(DaemonConnectorError::Socket(e)) => { + // assume the server is not yet ready + debug!("socket error: {}", e); + tokio::time::sleep(DaemonConnector::SOCKET_ERROR_WAIT).await; + continue; + } rest => rest?, }; @@ -95,7 +102,7 @@ impl DaemonConnector { match client.handshake().await { Ok(_) => { return { - debug!("connected in {}ms", time.elapsed().as_micros()); + debug!("connected in {}µs", time.elapsed().as_micros()); Ok(client.with_connect_settings(self)) } } diff --git a/crates/turborepo-lib/src/daemon/endpoint.rs b/crates/turborepo-lib/src/daemon/endpoint.rs new file mode 100644 index 0000000000000..c7ab7ef04779d --- /dev/null +++ b/crates/turborepo-lib/src/daemon/endpoint.rs @@ -0,0 +1,172 @@ +use std::{ + io::ErrorKind, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, + time::Duration, +}; + +use futures::Stream; +use log::debug; +use tokio::io::{AsyncRead, AsyncWrite}; +use tonic::transport::server::Connected; +use turbopath::{AbsoluteSystemPathBuf, RelativeSystemPathBuf}; + +#[derive(thiserror::Error, Debug)] +pub enum SocketOpenError { + #[error("IO error: {0}")] + Io(#[from] std::io::Error), + #[error("pidlock error")] + LockError(#[from] pidlock::PidlockError), +} + +const WINDOWS_POLL_DURATION: Duration = Duration::from_millis(1); + +/// Gets a stream of incoming connections from a Unix socket. +/// On windows, this will use the `uds_windows` crate, and +/// poll the result in another thread. +/// +/// note: the running param is used by the windows +/// code path to shut down the non-blocking polling +pub async fn open_socket( + path: AbsoluteSystemPathBuf, + running: Arc, +) -> Result< + ( + pidlock::Pidlock, + impl Stream>, + ), + SocketOpenError, +> { + let pid_path = path.join_relative(RelativeSystemPathBuf::new("turbod.pid").unwrap()); + let sock_path = path.join_relative(RelativeSystemPathBuf::new("turbod.sock").unwrap()); + let mut lock = pidlock::Pidlock::new(pid_path.as_path().to_owned()); + + debug!("opening socket at {} {}", pid_path, sock_path); + + // this will fail if the pid is already owned + lock.acquire()?; + std::fs::remove_file(&sock_path).ok(); + + #[cfg(unix)] + { + Ok(( + lock, + tokio_stream::wrappers::UnixListenerStream::new(tokio::net::UnixListener::bind( + sock_path, + )?), + )) + } + + #[cfg(windows)] + { + use tokio_util::compat::FuturesAsyncReadCompatExt; + + let listener = Arc::new(uds_windows::UnixListener::bind(sock_path)?); + listener.set_nonblocking(true)?; + + let stream = futures::stream::unfold(listener, move |listener| { + let task_running = running.clone(); + async move { + // ensure the underlying thread is aborted on drop + let task_listener = listener.clone(); + let task = tokio::task::spawn_blocking(move || loop { + break match task_listener.accept() { + Err(e) if e.kind() == ErrorKind::WouldBlock => { + std::thread::sleep(WINDOWS_POLL_DURATION); + if !task_running.load(Ordering::SeqCst) { + None + } else { + continue; + } + } + res => Some(res), + }; + }); + + let result = task + .await + .expect("no panic")? + .map(|(stream, _)| stream) + .and_then(async_io::Async::new) + .map(FuturesAsyncReadCompatExt::compat) + .map(UdsWindowsStream); + + Some((result, listener)) + } + }); + + Ok((lock, stream)) + } +} + +/// An adaptor over uds_windows that implements AsyncRead and AsyncWrite. +/// +/// It utilizes structural pinning to forward async read and write +/// implementations onto the inner type. +#[cfg(windows)] +struct UdsWindowsStream(T); + +#[cfg(windows)] +impl UdsWindowsStream { + /// Project the (pinned) uds windows stream to get the inner (pinned) type + /// + /// SAFETY + /// + /// structural pinning requires a few invariants to hold which can be seen + /// here https://doc.rust-lang.org/std/pin/#pinning-is-structural-for-field + /// + /// in short: + /// - we cannot implement Unpin for UdsWindowsStream + /// - we cannot use repr packed + /// - we cannot move in the drop impl (the default impl doesn't) + /// - we must uphold the rust 'drop guarantee' + /// - we cannot offer any api to move data out of the pinned value (such as + /// Option::take) + fn project(self: std::pin::Pin<&mut Self>) -> std::pin::Pin<&mut T> { + unsafe { self.map_unchecked_mut(|s| &mut s.0) } + } +} + +#[cfg(windows)] +impl AsyncRead for UdsWindowsStream { + fn poll_read( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut tokio::io::ReadBuf<'_>, + ) -> std::task::Poll> { + self.project().poll_read(cx, buf) + } +} + +#[cfg(windows)] +impl AsyncWrite for UdsWindowsStream { + fn poll_write( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + self.project().poll_write(cx, buf) + } + + fn poll_flush( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.project().poll_flush(cx) + } + + fn poll_shutdown( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.project().poll_shutdown(cx) + } +} + +#[cfg(windows)] +impl Connected for UdsWindowsStream { + type ConnectInfo = (); + fn connect_info(&self) -> Self::ConnectInfo {} +} diff --git a/crates/turborepo-lib/src/daemon/mod.rs b/crates/turborepo-lib/src/daemon/mod.rs index 6aacb0ca47add..3dcb85e6100f0 100644 --- a/crates/turborepo-lib/src/daemon/mod.rs +++ b/crates/turborepo-lib/src/daemon/mod.rs @@ -1,5 +1,14 @@ +mod bump_timeout; +mod bump_timeout_layer; mod client; mod connector; +pub(crate) mod endpoint; +mod server; pub use client::{DaemonClient, DaemonError}; pub use connector::DaemonConnector; +pub use server::DaemonServer; + +pub(crate) mod proto { + tonic::include_proto!("turbodprotocol"); +} diff --git a/crates/turborepo-lib/src/daemon/server.rs b/crates/turborepo-lib/src/daemon/server.rs new file mode 100644 index 0000000000000..9861969c71956 --- /dev/null +++ b/crates/turborepo-lib/src/daemon/server.rs @@ -0,0 +1,386 @@ +//! Daemon Server +//! +//! This module houses the daemon server, some implementation notes for which +//! are below. +//! +//! ## Implementation Notes +//! +//! The basic goals of the daemon are to watch for, and be able to provide +//! details about, filesystem changes. It is organised as an async server, which +//! holds a `HashGlobWatcher` which holds data about hashes, globs to watch for +//! that hash, and files that have been updated for that hash. In addition, this +//! server can be interrogated over grpc to register interest in particular +//! globs, and to query for changes for those globs. + +use std::{ + collections::HashSet, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, + time::{Duration, Instant}, +}; + +use globwatch::{StopSource, Watcher}; +use log::error; +use tokio::{ + select, + signal::ctrl_c, + sync::{ + oneshot::{Receiver, Sender}, + Mutex, + }, +}; +use tonic::transport::{NamedService, Server}; +use tower::ServiceBuilder; +use turbopath::{AbsoluteSystemPathBuf, RelativeSystemPathBuf}; + +use super::{ + bump_timeout::BumpTimeout, + endpoint::SocketOpenError, + proto::{self}, + DaemonError, +}; +use crate::{ + commands::CommandBase, daemon::bump_timeout_layer::BumpTimeoutLayer, get_version, + globwatcher::HashGlobWatcher, +}; + +pub struct DaemonServer { + daemon_root: AbsoluteSystemPathBuf, + log_file: AbsoluteSystemPathBuf, + + start_time: Instant, + timeout: Arc, + + watcher: Arc>, + shutdown: Mutex>>, + shutdown_rx: Option>, + + running: Arc, +} + +#[derive(Debug)] +pub enum CloseReason { + Timeout, + Shutdown, + WatcherClosed, + ServerClosed, + Interrupt, + SocketOpenError(SocketOpenError), +} + +impl DaemonServer { + pub fn new( + base: &CommandBase, + timeout: Duration, + log_file: AbsoluteSystemPathBuf, + ) -> Result { + let daemon_root = base.daemon_file_root(); + + let watcher = Arc::new(HashGlobWatcher::new( + AbsoluteSystemPathBuf::new(base.repo_root.clone()).expect("valid repo root"), + daemon_root + .join_relative(RelativeSystemPathBuf::new("flush").expect("valid forward path")) + .as_path() + .to_owned(), + )?); + + let (send_shutdown, recv_shutdown) = tokio::sync::oneshot::channel::<()>(); + + Ok(Self { + daemon_root, + log_file, + + start_time: Instant::now(), + timeout: Arc::new(BumpTimeout::new(timeout)), + + watcher, + shutdown: Mutex::new(Some(send_shutdown)), + shutdown_rx: Some(recv_shutdown), + + running: Arc::new(AtomicBool::new(true)), + }) + } +} + +impl Drop for DaemonServer { + fn drop(&mut self) { + self.running.store(false, Ordering::SeqCst); + } +} + +impl DaemonServer { + /// Serve the daemon server, while also watching for filesystem changes. + pub async fn serve(mut self, repo_root: AbsoluteSystemPathBuf) -> CloseReason { + let stop = StopSource::new(); + let watcher = self.watcher.clone(); + let watcher_fut = watcher.watch(repo_root.as_path().to_owned(), stop.token()); + + let timer = self.timeout.clone(); + let timeout_fut = timer.wait(); + + // if shutdown is available, then listen. otherwise just wait forever + let shutdown_rx = self.shutdown_rx.take(); + let shutdown_fut = async move { + match shutdown_rx { + Some(rx) => { + rx.await.ok(); + } + None => { + futures::pending!(); + } + } + }; + + // when one of these futures complete, let the server gracefully shutdown + let mut shutdown_reason = Option::None; + let shutdown_fut = async { + shutdown_reason = select! { + _ = shutdown_fut => Some(CloseReason::Shutdown), + _ = timeout_fut => Some(CloseReason::Timeout), + _ = ctrl_c() => Some(CloseReason::Interrupt), + }; + }; + + tracing::info!("here"); + + #[cfg(feature = "http")] + let server_fut = { + // set up grpc reflection + let efd = include_bytes!("file_descriptor_set.bin"); + let reflection = tonic_reflection::server::Builder::configure() + .register_encoded_file_descriptor_set(efd) + .build() + .unwrap(); + + let service = ServiceBuilder::new() + .layer(BumpTimeoutLayer::new(self.timeout.clone())) + .service(crate::daemon::proto::turbod_server::TurbodServer::new(self)); + + Server::builder() + .add_service(reflection) + .add_service(service) + .serve_with_shutdown("127.0.0.1:5000".parse().unwrap(), shutdown_fut) + }; + + #[cfg(not(feature = "http"))] + let (_lock, server_fut) = { + let (lock, stream) = match crate::daemon::endpoint::open_socket( + self.daemon_root.clone(), + self.running.clone(), + ) + .await + { + Ok(val) => val, + Err(e) => return CloseReason::SocketOpenError(e), + }; + + tracing::info!("starting server"); + + let service = ServiceBuilder::new() + .layer(BumpTimeoutLayer::new(self.timeout.clone())) + .service(crate::daemon::proto::turbod_server::TurbodServer::new(self)); + + ( + lock, + Server::builder() + .add_service(service) + .serve_with_incoming_shutdown(stream, shutdown_fut), + ) + }; + + tracing::info!("select!"); + + select! { + _ = server_fut => { + match shutdown_reason { + Some(reason) => reason, + None => CloseReason::ServerClosed, + } + }, + _ = watcher_fut => CloseReason::WatcherClosed, + } + + // here the stop token is dropped, and the pid lock is dropped + // causing them to be cleaned up + } +} + +#[tonic::async_trait] +impl proto::turbod_server::Turbod for DaemonServer { + async fn hello( + &self, + request: tonic::Request, + ) -> Result, tonic::Status> { + if request.into_inner().version != get_version() { + return Err(tonic::Status::unimplemented("version mismatch")); + } else { + Ok(tonic::Response::new(proto::HelloResponse {})) + } + } + + async fn shutdown( + &self, + _request: tonic::Request, + ) -> Result, tonic::Status> { + self.shutdown.lock().await.take().map(|s| s.send(())); + + // if Some(Ok), then the server is shutting down now + // if Some(Err), then the server is already shutting down + // if None, then someone has already called shutdown + Ok(tonic::Response::new(proto::ShutdownResponse {})) + } + + async fn status( + &self, + _request: tonic::Request, + ) -> Result, tonic::Status> { + Ok(tonic::Response::new(proto::StatusResponse { + daemon_status: Some(proto::DaemonStatus { + uptime_msec: self.start_time.elapsed().as_millis() as u64, + log_file: self.log_file.to_str().unwrap().to_string(), + }), + })) + } + + async fn notify_outputs_written( + &self, + request: tonic::Request, + ) -> Result, tonic::Status> { + let inner = request.into_inner(); + + match self + .watcher + .watch_globs( + Arc::new(inner.hash), + inner.output_globs, + inner.output_exclusion_globs, + ) + .await + { + Ok(_) => Ok(tonic::Response::new(proto::NotifyOutputsWrittenResponse {})), + Err(e) => { + error!("failed to watch globs: {:?}", e); + Err(tonic::Status::internal("failed to watch globs")) + } + } + } + + async fn get_changed_outputs( + &self, + request: tonic::Request, + ) -> Result, tonic::Status> { + let inner = request.into_inner(); + let changed = self + .watcher + .changed_globs( + &Arc::new(inner.hash), + HashSet::from_iter(inner.output_globs), + ) + .await; + + Ok(tonic::Response::new(proto::GetChangedOutputsResponse { + changed_output_globs: changed.into_iter().collect(), + })) + } +} + +impl NamedService for DaemonServer { + const NAME: &'static str = "turborepo.Daemon"; +} + +#[cfg(test)] +mod test { + use std::{ + assert_matches, + time::{Duration, Instant}, + }; + + use tokio::select; + use turbopath::{AbsoluteSystemPathBuf, RelativeSystemPathBuf}; + + use super::DaemonServer; + use crate::{commands::CommandBase, Args}; + + // the windows runner starts a new thread to accept uds requests, + // so we need a multi-threaded runtime + #[tokio::test(flavor = "multi_thread")] + #[tracing_test::traced_test] + async fn lifecycle() { + let tempdir = tempfile::tempdir().unwrap(); + let path = AbsoluteSystemPathBuf::new(tempdir.path()).unwrap(); + + tracing::info!("start"); + + let daemon = DaemonServer::new( + &CommandBase::new( + Args { + ..Default::default() + }, + path.as_path().to_path_buf(), + "test", + ) + .unwrap(), + Duration::from_secs(60 * 60), + path.clone(), + ) + .unwrap(); + + tracing::info!("server started"); + + let pid_path = path.join_relative(RelativeSystemPathBuf::new("turbod.pid").unwrap()); + let sock_path = path.join_relative(RelativeSystemPathBuf::new("turbod.sock").unwrap()); + + select! { + _ = daemon.serve(path) => panic!("must not close"), + _ = tokio::time::sleep(Duration::from_millis(10)) => (), + } + + tracing::info!("yay we are done"); + + assert!(!pid_path.exists(), "pid file must be deleted"); + assert!(!sock_path.exists(), "socket file must be deleted"); + + tracing::info!("and files cleaned up") + } + + // the windows runner starts a new thread to accept uds requests, + // so we need a multi-threaded runtime + #[tokio::test(flavor = "multi_thread")] + #[tracing_test::traced_test] + async fn timeout() { + let tempdir = tempfile::tempdir().unwrap(); + let path = AbsoluteSystemPathBuf::new(tempdir.path()).unwrap(); + + let daemon = DaemonServer::new( + &CommandBase::new( + Args { + ..Default::default() + }, + path.as_path().to_path_buf(), + "test", + ) + .unwrap(), + Duration::from_millis(5), + path.clone(), + ) + .unwrap(); + + let pid_path = path.join_relative(RelativeSystemPathBuf::new("turbod.pid").unwrap()); + + let now = Instant::now(); + let close_reason = daemon.serve(path).await; + + assert!( + now.elapsed() >= Duration::from_millis(5), + "must wait at least 5ms" + ); + assert_matches::assert_matches!( + super::CloseReason::Timeout, + close_reason, + "must close due to timeout" + ); + assert!(!pid_path.exists(), "pid file must be deleted"); + } +} diff --git a/crates/turborepo-lib/src/globwatcher/mod.rs b/crates/turborepo-lib/src/globwatcher/mod.rs new file mode 100644 index 0000000000000..fdc482da6f6c0 --- /dev/null +++ b/crates/turborepo-lib/src/globwatcher/mod.rs @@ -0,0 +1,642 @@ +use std::{ + collections::{hash_map::Entry, HashMap, HashSet}, + path::{Path, PathBuf}, + sync::{Arc, Mutex, MutexGuard}, +}; + +use futures::{stream::iter, StreamExt}; +use globwatch::{ConfigError, GlobWatcher, StopToken, WatchConfig, Watcher}; +use itertools::Itertools; +use notify::RecommendedWatcher; +use tracing::{trace, warn}; +use turbopath::AbsoluteSystemPathBuf; + +// these aliases are for readability, but they're just strings. it may make +// sense to use a newtype wrapper for these types in the future. +type Glob = Arc; +type Hash = Arc; + +/// Tracks changes for a given hash. A hash is a unique identifier for a set of +/// files. Given a hash and a set of globs to track, this will watch for file +/// changes and allow the user to query for changes. Once all globs for a +/// particular hash have changed, that hash is no longer tracked. +#[derive(Clone)] +pub struct HashGlobWatcher { + relative_to: AbsoluteSystemPathBuf, + + /// maintains the list of to watch for a given hash + hash_globs: Arc>>, + + /// maps a glob to the hashes for which this glob hasn't changed + glob_statuses: Arc>>>, + + watcher: Arc>>, + config: WatchConfig, +} + +#[derive(Clone, Debug)] +pub struct GlobSet { + include: HashSet, + exclude: HashSet, +} + +impl HashGlobWatcher { + pub fn new( + relative_to: AbsoluteSystemPathBuf, + flush_folder: PathBuf, + ) -> Result { + let (watcher, config) = GlobWatcher::new(flush_folder)?; + Ok(Self { + relative_to, + hash_globs: Default::default(), + glob_statuses: Default::default(), + watcher: Arc::new(Mutex::new(Some(watcher))), + config, + }) + } +} + +impl HashGlobWatcher { + /// Watches a given path, using the flush_folder as temporary storage to + /// make sure that file events are handled in the appropriate order. + pub async fn watch(&self, root_folder: PathBuf, token: StopToken) { + let start_globs = { + let lock = self.hash_globs.lock().expect("only fails if poisoned"); + lock.iter() + .flat_map(|(_, g)| &g.include) + .cloned() + .collect::>() + }; + + let mut stream = match self.watcher.lock().expect("only fails if poisoned").take() { + Some(watcher) => watcher.into_stream(token), + None => { + warn!("watcher already consumed"); + return; + } + }; + + // watch all the globs currently in the map + for glob in start_globs { + self.config.include(&root_folder, &glob).await.ok(); + } + + while let Some(Ok(event)) = stream.next().await { + trace!("event: {:?}", event); + + let repo_relative_paths = event + .paths + .iter() + .filter_map(|path| path.strip_prefix(&root_folder).ok()); + + // put these in a block so we can drop the locks before we await + let globs_to_exclude = { + let glob_statuses = self.glob_statuses.lock().expect("only fails if poisoned"); + let hash_globs = self.hash_globs.lock().expect("only fails if poisoned"); + + // hash globs is unlocked after this + let (hash_globs_to_clear, globs_to_exclude) = + populate_hash_globs(&glob_statuses, repo_relative_paths, hash_globs); + + // glob_statuses is unlocked after this + clear_hash_globs(glob_statuses, hash_globs_to_clear); + + globs_to_exclude + }; + + for glob in globs_to_exclude { + self.config + .exclude(self.relative_to.as_path(), &glob) + .await + .unwrap(); + } + } + } + + /// registers a hash with a set of globs to watch for changes + pub async fn watch_globs< + Iter: IntoIterator, + Iter2: IntoIterator, + >( + &self, + hash: Hash, + include: Iter, + exclude: Iter2, + ) -> Result<(), ConfigError> { + // wait for a the watcher to flush its events + // that will ensure that we have seen all filesystem writes + // *by the calling client*. Other tasks _could_ write to the + // same output directories, however we are relying on task + // execution dependencies to prevent that. + self.config.flush().await.unwrap(); + + let include: HashSet<_> = include.into_iter().map(Arc::new).collect(); + let exclude = exclude.into_iter().map(Arc::new).collect(); + + let result: Vec<(Glob, ConfigError)> = iter(include.iter()) + .then(|glob| async move { + ( + glob.clone(), + self.config.include(self.relative_to.as_path(), glob).await, + ) + }) + .filter_map(|(glob, res)| async { + match res { + Ok(_) => None, + Err(err) => Some((glob, err)), + } + }) + .collect() + .await; + + { + let mut glob_statuses = self.glob_statuses.lock().expect("only fails if poisoned"); + for glob in include.iter() { + glob_statuses + .entry(glob.clone()) + .or_default() + .insert(hash.clone()); + } + } + + { + let mut hash_globs = self.hash_globs.lock().expect("only fails if poisoned"); + hash_globs.insert(hash.clone(), GlobSet { include, exclude }); + } + + if !result.is_empty() { + // we now 'undo' the failed watches if we encountered errors watching any + // globs, and return an error + + let hash_globs_to_clear = result + .iter() + .map(|(glob, _)| (hash.clone(), glob.clone())) + .collect(); + + let glob_statuses = self.glob_statuses.lock().expect("only fails if poisoned"); + // mutex is consumedd here + clear_hash_globs(glob_statuses, hash_globs_to_clear); + + use ConfigError::*; + Err(result + .into_iter() + .fold(WatchError(vec![]), |acc, (_, err)| { + // accumulate any watch errors, but override if the server stopped + match (acc, err) { + (WatchError(_), ServerStopped) => ServerStopped, + (WatchError(files), WatchError(files2)) => { + WatchError(files.into_iter().chain(files2).collect()) + } + (err, _) => err, + } + })) + } else { + Ok(()) + } + } + + /// given a hash and a set of candidates, return the subset of candidates + /// that have changed. + pub async fn changed_globs( + &self, + hash: &Hash, + mut candidates: HashSet, + ) -> HashSet { + // wait for a the watcher to flush its events + // that will ensure that we have seen all filesystem writes + // *by the calling client*. Other tasks _could_ write to the + // same output directories, however we are relying on task + // execution dependencies to prevent that. + self.config.flush().await.unwrap(); + + // hash_globs tracks all unchanged globs for a given hash. + // if a hash is not in globs, then either everything has changed + // or it was never registered. either way, we return all candidates + let hash_globs = self.hash_globs.lock().expect("only fails if poisoned"); + match hash_globs.get(hash) { + Some(glob) => { + candidates.retain(|c| !glob.include.contains(c)); + candidates + } + None => candidates, + } + } +} + +/// iterate each path-glob pair and stop tracking globs whose files have +/// changed. if a path is not a valid utf8 string, it is ignored. this is +/// okay, because we don't register any paths that are not valid utf8, +/// since the source globs are valid utf8 +/// +/// returns a list of hash-glob pairs to clear, and a list of globs to exclude +/// +/// note: we take a mutex guard to make sure that the mutex is dropped +/// when the function returns +fn populate_hash_globs<'a>( + glob_statuses: &MutexGuard>>, + repo_relative_paths: impl Iterator + Clone, + mut hash_globs: MutexGuard>, +) -> (Vec<(Arc, Arc)>, Vec>) { + let mut clear_glob_status = vec![]; + let mut exclude_globs = vec![]; + + for ((glob, hash_status), path) in glob_statuses + .iter() + .cartesian_product(repo_relative_paths) + .filter(|((glob, _), path)| { + // ignore paths that don't match the glob, or are not valid utf8 + path.to_str() + .map(|s| glob_match::glob_match(glob, s)) + .unwrap_or(false) + }) + { + let mut stop_watching = true; + + for hash in hash_status.iter() { + let globs = match hash_globs.get_mut(hash).filter(|globs| { + !globs + .exclude + .iter() + .any(|f| glob_match::glob_match(f, path.to_str().unwrap())) + }) { + Some(globs) => globs, + None => { + // if we get here, then the hash is excluded by a glob + // so we don't need to stop watching this glob + stop_watching = false; + continue; + } + }; + + // if we get here, we know that the glob has changed for every hash that + // included this glob and is not excluded by a hash's exclusion globs. + // So, we can delete this glob from every hash tracking it as well as stop + // watching this glob. To stop watching, we unref each of the + // directories corresponding to this glob + + // we can stop tracking that glob + globs.include.remove(glob); + if globs.include.is_empty() { + hash_globs.remove(hash); + } + + clear_glob_status.push((hash.clone(), glob.clone())); + } + + if stop_watching { + // store the hash and glob so we can remove it from the glob_status + exclude_globs.push(glob.to_owned()); + } + } + + (clear_glob_status, exclude_globs) +} + +/// given a list of hash-glob pairs to stop tracking, remove them from the +/// map and remove the entry if the set of globs for that hash is empty +/// +/// note: we take a mutex guard to make sure that the mutex is dropped +/// when the function returns +fn clear_hash_globs( + mut glob_status: MutexGuard>>, + hash_globs_to_clear: Vec<(Hash, Glob)>, +) { + for (hash, glob) in hash_globs_to_clear { + if let Entry::Occupied(mut o) = glob_status.entry(glob) { + let val = o.get_mut(); + val.remove(&hash); + if val.is_empty() { + o.remove(); + } + }; + } +} + +#[cfg(test)] +mod test { + use std::{fs::File, sync::Arc}; + + use globwatch::StopSource; + use turbopath::AbsoluteSystemPathBuf; + + fn setup() -> tempdir::TempDir { + let tmp = tempdir::TempDir::new("globwatch").unwrap(); + + let directories = ["my-pkg/dist/distChild", "my-pkg/.next/cache"]; + + let files = [ + "my-pkg/dist/distChild/dist-file", + "my-pkg/dist/dist-file", + "my-pkg/.next/next-file", + "my-pkg/irrelevant", + ]; + + for dir in directories.iter() { + std::fs::create_dir_all(tmp.path().join(dir)).unwrap(); + } + + for file in files.iter() { + std::fs::File::create(tmp.path().join(file)).unwrap(); + } + + tmp + } + + #[tokio::test] + #[tracing_test::traced_test] + async fn track_outputs() { + let dir = setup(); + let flush = tempdir::TempDir::new("globwatch-flush").unwrap(); + let watcher = Arc::new( + super::HashGlobWatcher::new( + AbsoluteSystemPathBuf::new(dir.path()).unwrap(), + flush.path().to_path_buf(), + ) + .unwrap(), + ); + + let stop = StopSource::new(); + + let task_watcher = watcher.clone(); + let watch_dir = dir.path().to_owned(); + let token = stop.token(); + + // dropped when the test ends + let _s = tokio::task::spawn(async move { task_watcher.watch(watch_dir, token).await }); + + let hash = Arc::new("the-hash".to_string()); + let include = ["my-pkg/dist/**".to_string(), "my-pkg/.next/**".to_string()]; + let exclude = ["my-pkg/.next/cache/**".to_string()]; + + println!("{:?} {:?}", include, exclude); + + watcher + .watch_globs( + hash.clone(), + include.clone().into_iter(), + exclude.clone().into_iter(), + ) + .await + .unwrap(); + + let changed = watcher + .changed_globs(&hash, include.clone().into_iter().collect()) + .await; + + assert!( + changed.is_empty(), + "expected no changed globs, got {:?}", + changed + ); + + // change a file that is neither included nor excluded + + File::create(dir.path().join("my-pkg/irrelevant2")).unwrap(); + let changed = watcher + .changed_globs(&hash, include.clone().into_iter().collect()) + .await; + + assert!( + changed.is_empty(), + "expected no changed globs, got {:?}", + changed + ); + + // change a file that is excluded + + File::create(dir.path().join("my-pkg/.next/cache/next-file2")).unwrap(); + let changed = watcher + .changed_globs(&hash, include.clone().into_iter().collect()) + .await; + + assert!( + changed.is_empty(), + "expected no changed globs, got {:?}", + changed + ); + + // change a file that is included + + File::create(dir.path().join("my-pkg/dist/dist-file2")).unwrap(); + let changed = watcher + .changed_globs(&hash, include.clone().into_iter().collect()) + .await; + + assert_eq!( + changed, + ["my-pkg/dist/**".to_string()].into_iter().collect(), + "expected one of the globs to have changed" + ); + + // change a file that is included but with a subdirectory that is excluded + // now both globs should be marked as changed + + File::create(dir.path().join("my-pkg/.next/next-file2")).unwrap(); + let changed = watcher + .changed_globs(&hash, include.clone().into_iter().collect()) + .await; + + assert_eq!( + changed, + include.into_iter().collect(), + "expected both globs to have changed" + ); + + assert!( + watcher.hash_globs.lock().unwrap().is_empty(), + "we should no longer be watching any hashes" + ); + + assert!( + watcher.glob_statuses.lock().unwrap().is_empty(), + "we should no longer be watching any globs: {:?}", + watcher.glob_statuses.lock().unwrap() + ); + } + + #[tokio::test] + #[tracing_test::traced_test] + async fn test_multiple_hashes() { + let dir = setup(); + let flush = tempdir::TempDir::new("globwatch-flush").unwrap(); + let watcher = Arc::new( + super::HashGlobWatcher::new( + AbsoluteSystemPathBuf::new(dir.path()).unwrap(), + flush.path().to_path_buf(), + ) + .unwrap(), + ); + + let stop = StopSource::new(); + + let task_watcher = watcher.clone(); + let watch_dir = dir.path().to_owned(); + let token = stop.token(); + + // dropped when the test ends + let _s = tokio::task::spawn(async move { task_watcher.watch(watch_dir, token).await }); + + let hash1 = Arc::new("the-hash-1".to_string()); + let hash2 = Arc::new("the-hash-2".to_string()); + + let globs1_inclusion = ["my-pkg/dist/**".to_string(), "my-pkg/.next/**".to_string()]; + let globs2_inclusion = ["my-pkg/.next/**".to_string()]; + let globs2_exclusion = ["my-pkg/.next/cache/**".to_string()]; + + watcher + .watch_globs( + hash1.clone(), + globs1_inclusion.clone().into_iter(), + vec![].into_iter(), + ) + .await + .unwrap(); + + watcher + .watch_globs( + hash2.clone(), + globs2_inclusion.clone().into_iter(), + globs2_exclusion.clone().into_iter(), + ) + .await + .unwrap(); + + let changed = watcher + .changed_globs(&hash1, globs1_inclusion.clone().into_iter().collect()) + .await; + + assert!( + changed.is_empty(), + "expected no changed globs, got {:?}", + changed + ); + + let changed = watcher + .changed_globs(&hash2, globs2_inclusion.clone().into_iter().collect()) + .await; + + assert!( + changed.is_empty(), + "expected no changed globs, got {:?}", + changed + ); + + // make a change excluded in only one of the hashes + + File::create(dir.path().join("my-pkg/.next/cache/next-file2")).unwrap(); + let changed = watcher + .changed_globs(&hash1, globs1_inclusion.clone().into_iter().collect()) + .await; + + assert_eq!( + changed, + ["my-pkg/.next/**".to_string()].into_iter().collect(), + "expected one of the globs to have changed" + ); + + let changed = watcher + .changed_globs(&hash2, globs2_inclusion.clone().into_iter().collect()) + .await; + + assert!( + changed.is_empty(), + "expected no changed globs, got {:?}", + changed + ); + + // make a change for the other hash + + File::create(dir.path().join("my-pkg/.next/next-file2")).unwrap(); + let changed = watcher + .changed_globs(&hash2, globs2_inclusion.clone().into_iter().collect()) + .await; + + assert_eq!( + changed, + ["my-pkg/.next/**".to_string()].into_iter().collect(), + "expected one of the globs to have changed" + ); + + assert_eq!( + watcher.hash_globs.lock().unwrap().keys().len(), + 1, + "we should be watching one hash, got {:?}", + watcher.hash_globs.lock().unwrap() + ); + + assert_eq!( + watcher.glob_statuses.lock().unwrap().keys().len(), + 1, + "we should be watching one glob, got {:?}", + watcher.glob_statuses.lock().unwrap() + ); + } + + #[tokio::test] + #[tracing_test::traced_test] + async fn watch_single_file() { + let dir = setup(); + let flush = tempdir::TempDir::new("globwatch-flush").unwrap(); + let watcher = Arc::new( + super::HashGlobWatcher::new( + AbsoluteSystemPathBuf::new(dir.path()).unwrap(), + flush.path().to_path_buf(), + ) + .unwrap(), + ); + + let stop = StopSource::new(); + + let task_watcher = watcher.clone(); + let watch_dir = dir.path().to_owned(); + let token = stop.token(); + + // dropped when the test ends + let _s = tokio::task::spawn(async move { task_watcher.watch(watch_dir, token).await }); + + let hash = Arc::new("the-hash".to_string()); + let inclusions = ["my-pkg/.next/next-file".to_string()]; + + watcher + .watch_globs( + hash.clone(), + inclusions.clone().into_iter(), + vec![].into_iter(), + ) + .await + .unwrap(); + + File::create(dir.path().join("my-pkg/.next/irrelevant")).unwrap(); + let changed = watcher + .changed_globs(&hash, inclusions.clone().into_iter().collect()) + .await; + + assert!( + changed.is_empty(), + "expected no changed globs, got {:?}", + changed + ); + + File::create(dir.path().join("my-pkg/.next/next-file")).unwrap(); + let changed = watcher + .changed_globs(&hash, inclusions.clone().into_iter().collect()) + .await; + + assert_eq!( + changed, + inclusions.clone().into_iter().collect(), + "expected one of the globs to have changed" + ); + + assert!( + watcher.hash_globs.lock().unwrap().is_empty(), + "we should no longer be watching any hashes" + ); + + assert!( + watcher.glob_statuses.lock().unwrap().is_empty(), + "we should no longer be watching any globs: {:?}", + watcher.glob_statuses.lock().unwrap() + ); + } +} diff --git a/crates/turborepo-lib/src/lib.rs b/crates/turborepo-lib/src/lib.rs index c7e8d4a44f139..7aa29855de0ae 100644 --- a/crates/turborepo-lib/src/lib.rs +++ b/crates/turborepo-lib/src/lib.rs @@ -5,6 +5,7 @@ mod cli; mod commands; mod config; mod daemon; +pub(crate) mod globwatcher; mod package_manager; mod shim; mod ui; diff --git a/crates/turborepo/Cargo.toml b/crates/turborepo/Cargo.toml index 5fafeaa8baf31..3e170740b28ed 100644 --- a/crates/turborepo/Cargo.toml +++ b/crates/turborepo/Cargo.toml @@ -12,6 +12,7 @@ license = "MPL-2.0" default = ["rustls-tls"] native-tls = ["turborepo-lib/native-tls"] rustls-tls = ["turborepo-lib/rustls-tls"] +http = ["turborepo-lib/http"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [build-dependencies] From 7f381a1f099aaa8966b788201c40dc13e5e23e9e Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Tue, 25 Apr 2023 10:57:38 +0200 Subject: [PATCH 03/24] fix error reporting for get_from_source errors (#4690) ### Description We never reported issues from get_from_source. This leads to cryptic fatal errors from get_from_source without further details. Now we report the actual issues with show the details. --- crates/turbopack-dev-server/src/http.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/turbopack-dev-server/src/http.rs b/crates/turbopack-dev-server/src/http.rs index 768f96d219d3b..681b43196d154 100644 --- a/crates/turbopack-dev-server/src/http.rs +++ b/crates/turbopack-dev-server/src/http.rs @@ -15,10 +15,13 @@ use turbo_tasks_bytes::Bytes; use turbo_tasks_fs::{FileContent, FileContentReadRef}; use turbopack_core::{asset::AssetContent, issue::IssueReporterVc, version::VersionedContent}; -use crate::source::{ - request::SourceRequest, - resolve::{resolve_source_request, ResolveSourceRequestResult}, - Body, ContentSourceVc, HeaderListReadRef, ProxyResultReadRef, +use crate::{ + handle_issues, + source::{ + request::SourceRequest, + resolve::{resolve_source_request, ResolveSourceRequestResult}, + Body, ContentSourceVc, HeaderListReadRef, ProxyResultReadRef, + }, }; #[turbo_tasks::value(serialization = "none")] @@ -75,6 +78,7 @@ pub async fn process_request_with_content_source( let original_path = request.uri().path().to_string(); let request = http_request_to_source_request(request).await?; let result = get_from_source(source, TransientInstance::new(request), issue_reporter); + handle_issues(result, &original_path, "get_from_source", issue_reporter).await?; match &*result.strongly_consistent().await? { GetFromSourceResult::Static { content, From 0235c889899de98e4eddb4223ba3747ec46eed84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Donny/=EA=B0=95=EB=8F=99=EC=9C=A4?= Date: Tue, 25 Apr 2023 19:13:22 +0900 Subject: [PATCH 04/24] feat(turbopack): Consider cycles while spliting modules (#4595) ### Description This is optimization for modules with cyclic dependencies between AST nodes. - Fixes WEB-706. ### Testing Instructions Look at `output.mdx` using GitHub diff view, and you can see it rendered by selecting `Open file` from the context menu. --------- Co-authored-by: Alex Kirszenberg --- .../src/tree_shake/graph.rs | 21 +- .../tests/tree-shaker/analyzer/3/config.json | 6 + .../tests/tree-shaker/analyzer/3/input.js | 38 ++ .../tests/tree-shaker/analyzer/3/output.md | 612 ++++++++++++++++++ .../tree-shaker/analyzer/complex/output.md | 202 +++--- 5 files changed, 792 insertions(+), 87 deletions(-) create mode 100644 crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/config.json create mode 100644 crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/input.js create mode 100644 crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/output.md diff --git a/crates/turbopack-ecmascript/src/tree_shake/graph.rs b/crates/turbopack-ecmascript/src/tree_shake/graph.rs index 1fc85723761cb..3c59280c546ff 100644 --- a/crates/turbopack-ecmascript/src/tree_shake/graph.rs +++ b/crates/turbopack-ecmascript/src/tree_shake/graph.rs @@ -370,9 +370,6 @@ impl DepGraph { global_done: &mut FxHashSet, group_done: &mut FxHashSet, ) -> bool { - // TODO(WEB-706): Consider cycles - // - let mut changed = false; // Check deps of `start`. @@ -416,6 +413,24 @@ impl DepGraph { } } + // Cycles should form a separate group + for id in self.g.graph_ix.iter() { + let ix = self.g.get_node(id); + + if let Some(cycle) = cycles.iter().find(|v| v.contains(&ix)) { + if cycle.iter().all(|v| !global_done.contains(v)) { + let ids = cycle + .iter() + .map(|&ix| self.g.graph_ix[ix as usize].clone()) + .collect::>(); + + global_done.extend(cycle.iter().copied()); + + groups.push((ids, Default::default())); + } + } + } + // Expand **starting** nodes for (ix, id) in self.g.graph_ix.iter().enumerate() { // If a node is reachable from two or more nodes, it should be in a diff --git a/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/config.json b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/config.json new file mode 100644 index 0000000000000..ca4995f8a4a3c --- /dev/null +++ b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/config.json @@ -0,0 +1,6 @@ +{ + "exports":[ + ["c1_3"], + ["c1_3", "c2_2"] + ] +} \ No newline at end of file diff --git a/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/input.js b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/input.js new file mode 100644 index 0000000000000..ac8511ed5874e --- /dev/null +++ b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/input.js @@ -0,0 +1,38 @@ + + +function d1() { } + + +function d2() { } + +function d3() { } + + + + +export function c1_1() { + return c1_2() +} + +function c1_2() { + return c1_3(d1) +} +export function c1_3() { + return c1_1(d2) +} + + +function c2_1() { + return c2_2(d3) +} + +export function c2_2() { + return c2_3() +} +function c2_3() { + return c2_1() +} + + +c1_3() +c2_2() \ No newline at end of file diff --git a/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/output.md b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/output.md new file mode 100644 index 0000000000000..650453250f4ec --- /dev/null +++ b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/3/output.md @@ -0,0 +1,612 @@ +# Items + +Count: 15 + +## Item 1: Stmt 0, `Normal` + +```js +function d1() {} + +``` + +- Hoisted +- Declares: `d1` + +## Item 2: Stmt 1, `Normal` + +```js +function d2() {} + +``` + +- Hoisted +- Declares: `d2` + +## Item 3: Stmt 2, `Normal` + +```js +function d3() {} + +``` + +- Hoisted +- Declares: `d3` + +## Item 4: Stmt 3, `Normal` + +```js +export function c1_1() { + return c1_2(); +} + +``` + +- Hoisted +- Declares: `c1_1` +- Reads (eventual): `c1_2` + +## Item 5: Stmt 4, `Normal` + +```js +function c1_2() { + return c1_3(d1); +} + +``` + +- Hoisted +- Declares: `c1_2` +- Reads (eventual): `c1_3`, `d1` + +## Item 6: Stmt 5, `Normal` + +```js +export function c1_3() { + return c1_1(d2); +} + +``` + +- Hoisted +- Declares: `c1_3` +- Reads (eventual): `c1_1`, `d2` + +## Item 7: Stmt 6, `Normal` + +```js +function c2_1() { + return c2_2(d3); +} + +``` + +- Hoisted +- Declares: `c2_1` +- Reads (eventual): `c2_2`, `d3` + +## Item 8: Stmt 7, `Normal` + +```js +export function c2_2() { + return c2_3(); +} + +``` + +- Hoisted +- Declares: `c2_2` +- Reads (eventual): `c2_3` + +## Item 9: Stmt 8, `Normal` + +```js +function c2_3() { + return c2_1(); +} + +``` + +- Hoisted +- Declares: `c2_3` +- Reads (eventual): `c2_1` + +## Item 10: Stmt 9, `Normal` + +```js +c1_3(); + +``` + +- Side effects +- Reads: `c1_3` + +## Item 11: Stmt 10, `Normal` + +```js +c2_2(); + +``` + +- Side effects +- Reads: `c2_2` + +# Phase 1 +```mermaid +graph TD + Item1; + Item2; + Item3; + Item4; + Item5; + Item6; + Item7; + Item8; + Item9; + Item10; + Item11; + Item12; + Item12["ModuleEvaluation"]; + Item13; + Item13["export c1_1"]; + Item14; + Item14["export c1_3"]; + Item15; + Item15["export c2_2"]; +``` +# Phase 2 +```mermaid +graph TD + Item1; + Item2; + Item3; + Item4; + Item5; + Item6; + Item7; + Item8; + Item9; + Item10; + Item11; + Item12; + Item12["ModuleEvaluation"]; + Item13; + Item13["export c1_1"]; + Item14; + Item14["export c1_3"]; + Item15; + Item15["export c2_2"]; + Item10 --> Item6; + Item10 -.-> Item5; + Item10 -.-> Item1; + Item10 -.-> Item4; + Item10 -.-> Item2; + Item10 -.-> Item8; + Item10 -.-> Item3; + Item10 -.-> Item9; + Item10 -.-> Item7; + Item11 --> Item8; + Item11 --> Item10; + Item11 -.-> Item5; + Item11 -.-> Item6; + Item11 -.-> Item1; + Item11 -.-> Item4; + Item11 -.-> Item2; + Item11 -.-> Item3; + Item11 -.-> Item9; + Item11 -.-> Item7; +``` +# Phase 3 +```mermaid +graph TD + Item1; + Item2; + Item3; + Item4; + Item5; + Item6; + Item7; + Item8; + Item9; + Item10; + Item11; + Item12; + Item12["ModuleEvaluation"]; + Item13; + Item13["export c1_1"]; + Item14; + Item14["export c1_3"]; + Item15; + Item15["export c2_2"]; + Item10 --> Item6; + Item10 -.-> Item5; + Item10 -.-> Item1; + Item10 -.-> Item4; + Item10 -.-> Item2; + Item10 -.-> Item8; + Item10 -.-> Item3; + Item10 -.-> Item9; + Item10 -.-> Item7; + Item11 --> Item8; + Item11 --> Item10; + Item11 -.-> Item5; + Item11 -.-> Item6; + Item11 -.-> Item1; + Item11 -.-> Item4; + Item11 -.-> Item2; + Item11 -.-> Item3; + Item11 -.-> Item9; + Item11 -.-> Item7; + Item4 --> Item5; + Item5 --> Item6; + Item5 --> Item1; + Item6 --> Item4; + Item6 --> Item2; + Item7 --> Item8; + Item7 --> Item3; + Item8 --> Item9; + Item9 --> Item7; +``` +# Phase 4 +```mermaid +graph TD + Item1; + Item2; + Item3; + Item4; + Item5; + Item6; + Item7; + Item8; + Item9; + Item10; + Item11; + Item12; + Item12["ModuleEvaluation"]; + Item13; + Item13["export c1_1"]; + Item14; + Item14["export c1_3"]; + Item15; + Item15["export c2_2"]; + Item10 --> Item6; + Item10 -.-> Item5; + Item10 -.-> Item1; + Item10 -.-> Item4; + Item10 -.-> Item2; + Item10 -.-> Item8; + Item10 -.-> Item3; + Item10 -.-> Item9; + Item10 -.-> Item7; + Item11 --> Item8; + Item11 --> Item10; + Item11 -.-> Item5; + Item11 -.-> Item6; + Item11 -.-> Item1; + Item11 -.-> Item4; + Item11 -.-> Item2; + Item11 -.-> Item3; + Item11 -.-> Item9; + Item11 -.-> Item7; + Item4 --> Item5; + Item5 --> Item6; + Item5 --> Item1; + Item6 --> Item4; + Item6 --> Item2; + Item7 --> Item8; + Item7 --> Item3; + Item8 --> Item9; + Item9 --> Item7; + Item12 --> Item10; + Item12 --> Item11; + Item13 --> Item4; + Item14 --> Item6; + Item15 --> Item8; +``` +# Final +```mermaid +graph TD + N0["Items: [ItemId(ModuleEvaluation), ItemId(9, Normal), ItemId(10, Normal)]"]; + N1["Items: [ItemId(Export((Atom('c1_1' type=inline), #0)))]"]; + N2["Items: [ItemId(Export((Atom('c1_3' type=inline), #0)))]"]; + N3["Items: [ItemId(Export((Atom('c2_2' type=inline), #0)))]"]; + N4["Items: [ItemId(3, Normal), ItemId(4, Normal), ItemId(5, Normal)]"]; + N5["Items: [ItemId(6, Normal), ItemId(7, Normal), ItemId(8, Normal)]"]; + N6["Items: [ItemId(0, Normal)]"]; + N7["Items: [ItemId(1, Normal)]"]; + N8["Items: [ItemId(2, Normal)]"]; + N0 --> N4; + N0 --> N6; + N0 --> N7; + N0 --> N5; + N0 --> N8; + N1 --> N4; + N2 --> N4; + N3 --> N5; + N4 --> N6; + N4 --> N7; + N5 --> N8; +``` +# Modules (dev) +## Part 0 +```js +import { c1_3 } from "entry.js" assert { + __turbopack_chunk__: 4 +}; +import "entry.js" assert { + __turbopack_chunk__: 6 +}; +import "entry.js" assert { + __turbopack_chunk__: 7 +}; +import { c2_2 } from "entry.js" assert { + __turbopack_chunk__: 5 +}; +import "entry.js" assert { + __turbopack_chunk__: 8 +}; +"module evaluation"; +c1_3(); +c2_2(); + +``` +## Part 1 +```js +import { c1_1 } from "entry.js" assert { + __turbopack_chunk__: 4 +}; +export { c1_1 }; + +``` +## Part 2 +```js +import { c1_3 } from "entry.js" assert { + __turbopack_chunk__: 4 +}; +export { c1_3 }; + +``` +## Part 3 +```js +import { c2_2 } from "entry.js" assert { + __turbopack_chunk__: 5 +}; +export { c2_2 }; + +``` +## Part 4 +```js +import { d1 } from "entry.js" assert { + __turbopack_chunk__: 6 +}; +import { d2 } from "entry.js" assert { + __turbopack_chunk__: 7 +}; +function c1_1() { + return c1_2(); +} +function c1_2() { + return c1_3(d1); +} +function c1_3() { + return c1_1(d2); +} + +``` +## Part 5 +```js +import { d3 } from "entry.js" assert { + __turbopack_chunk__: 8 +}; +function c2_1() { + return c2_2(d3); +} +function c2_2() { + return c2_3(); +} +function c2_3() { + return c2_1(); +} + +``` +## Part 6 +```js +function d1() {} + +``` +## Part 7 +```js +function d2() {} + +``` +## Part 8 +```js +function d3() {} + +``` +## Merged (module eval) +```js +function d1() {} +function d2() {} +function c1_1() { + return c1_2(); +} +function c1_2() { + return c1_3(d1); +} +function c1_3() { + return c1_1(d2); +} +function d3() {} +function c2_1() { + return c2_2(d3); +} +function c2_2() { + return c2_3(); +} +function c2_3() { + return c2_1(); +} +"module evaluation"; +c1_3(); +c2_2(); + +``` +# Modules (prod) +## Part 0 +```js +import { c1_3 } from "entry.js" assert { + __turbopack_chunk__: 4 +}; +import { c2_2 } from "entry.js" assert { + __turbopack_chunk__: 5 +}; +"module evaluation"; +c1_3(); +c2_2(); + +``` +## Part 1 +```js +import { c1_1 } from "entry.js" assert { + __turbopack_chunk__: 4 +}; +export { c1_1 }; + +``` +## Part 2 +```js +import { c1_3 } from "entry.js" assert { + __turbopack_chunk__: 4 +}; +export { c1_3 }; + +``` +## Part 3 +```js +import { c2_2 } from "entry.js" assert { + __turbopack_chunk__: 5 +}; +export { c2_2 }; + +``` +## Part 4 +```js +import { d1 } from "entry.js" assert { + __turbopack_chunk__: 6 +}; +import { d2 } from "entry.js" assert { + __turbopack_chunk__: 7 +}; +function c1_1() { + return c1_2(); +} +function c1_2() { + return c1_3(d1); +} +function c1_3() { + return c1_1(d2); +} + +``` +## Part 5 +```js +import { d3 } from "entry.js" assert { + __turbopack_chunk__: 8 +}; +function c2_1() { + return c2_2(d3); +} +function c2_2() { + return c2_3(); +} +function c2_3() { + return c2_1(); +} + +``` +## Part 6 +```js +function d1() {} + +``` +## Part 7 +```js +function d2() {} + +``` +## Part 8 +```js +function d3() {} + +``` +## Merged (module eval) +```js +function d1() {} +function d2() {} +function c1_1() { + return c1_2(); +} +function c1_2() { + return c1_3(d1); +} +function c1_3() { + return c1_1(d2); +} +function d3() {} +function c2_1() { + return c2_2(d3); +} +function c2_2() { + return c2_3(); +} +function c2_3() { + return c2_1(); +} +"module evaluation"; +c1_3(); +c2_2(); + +``` +## Merged (c1_3) +```js +function d1() {} +function d2() {} +function c1_1() { + return c1_2(); +} +function c1_2() { + return c1_3(d1); +} +function c1_3() { + return c1_1(d2); +} +export { c1_3 }; + +``` +## Merged (c1_3,c2_2) +```js +function d1() {} +function d2() {} +function c1_1() { + return c1_2(); +} +function c1_2() { + return c1_3(d1); +} +function c1_3() { + return c1_1(d2); +} +export { c1_3 }; +function d3() {} +function c2_1() { + return c2_2(d3); +} +function c2_2() { + return c2_3(); +} +function c2_3() { + return c2_1(); +} +export { c2_2 }; + +``` diff --git a/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/complex/output.md b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/complex/output.md index 32ad876e077c1..8fa78129359f0 100644 --- a/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/complex/output.md +++ b/crates/turbopack-ecmascript/tests/tree-shaker/analyzer/complex/output.md @@ -349,99 +349,83 @@ graph TD ```mermaid graph TD N0["Items: [ItemId(ModuleEvaluation)]"]; - N1["Items: [ItemId(Export((Atom('dogRef' type=inline), #0))), ItemId(3, Normal), ItemId(6, Normal), ItemId(9, VarDeclarator(0))]"]; + N1["Items: [ItemId(Export((Atom('dogRef' type=inline), #0)))]"]; N2["Items: [ItemId(Export((Atom('cat' type=inline), #0)))]"]; N3["Items: [ItemId(Export((Atom('initialCat' type=dynamic), #0))), ItemId(11, VarDeclarator(0))]"]; N4["Items: [ItemId(Export((Atom('getChimera' type=dynamic), #0))), ItemId(12, Normal)]"]; - N5["Items: [ItemId(0, VarDeclarator(0))]"]; - N6["Items: [ItemId(1, Normal)]"]; - N7["Items: [ItemId(2, Normal)]"]; - N8["Items: [ItemId(4, Normal)]"]; - N9["Items: [ItemId(5, Normal)]"]; - N10["Items: [ItemId(7, Normal)]"]; - N11["Items: [ItemId(8, Normal)]"]; - N12["Items: [ItemId(10, VarDeclarator(0))]"]; - N0 --> N7; - N0 --> N9; + N5["Items: [ItemId(6, Normal), ItemId(9, VarDeclarator(0))]"]; + N6["Items: [ItemId(0, VarDeclarator(0))]"]; + N7["Items: [ItemId(1, Normal)]"]; + N8["Items: [ItemId(2, Normal)]"]; + N9["Items: [ItemId(3, Normal)]"]; + N10["Items: [ItemId(4, Normal)]"]; + N11["Items: [ItemId(5, Normal)]"]; + N12["Items: [ItemId(7, Normal)]"]; + N13["Items: [ItemId(8, Normal)]"]; + N14["Items: [ItemId(10, VarDeclarator(0))]"]; + N0 --> N8; N0 --> N11; + N0 --> N13; N1 --> N5; - N1 --> N6; - N1 --> N8; - N1 --> N10; - N1 --> N11; - N2 --> N12; - N3 --> N12; - N4 --> N12; - N4 --> N5; + N2 --> N14; + N3 --> N14; + N4 --> N14; N4 --> N6; - N4 --> N8; + N4 --> N7; N4 --> N10; - N7 --> N5; - N7 --> N6; + N4 --> N12; + N5 --> N13; + N5 --> N6; + N5 --> N7; + N5 --> N10; + N5 --> N12; + N5 --> N9; + N8 --> N6; N8 --> N7; - N9 --> N5; N9 --> N6; - N9 --> N8; N9 --> N7; - N10 --> N9; - N11 --> N5; + N9 --> N10; + N9 --> N12; + N10 --> N8; N11 --> N6; - N11 --> N8; - N11 --> N10; N11 --> N7; - N11 --> N9; + N11 --> N10; + N11 --> N8; + N12 --> N11; + N13 --> N6; + N13 --> N7; + N13 --> N10; + N13 --> N12; + N13 --> N8; + N13 --> N11; ``` # Modules (dev) ## Part 0 ```js import "entry.js" assert { - __turbopack_chunk__: 7 + __turbopack_chunk__: 8 }; import "entry.js" assert { - __turbopack_chunk__: 9 + __turbopack_chunk__: 11 }; import "entry.js" assert { - __turbopack_chunk__: 11 + __turbopack_chunk__: 13 }; "module evaluation"; ``` ## Part 1 ```js -import { dog } from "entry.js" assert { +import { dogRef } from "entry.js" assert { __turbopack_chunk__: 5 }; -import "entry.js" assert { - __turbopack_chunk__: 6 -}; -import "entry.js" assert { - __turbopack_chunk__: 8 -}; -import "entry.js" assert { - __turbopack_chunk__: 10 -}; -import "entry.js" assert { - __turbopack_chunk__: 11 -}; -export { dogRef }; -function getDog() { - return dog; -} -function setDog(newDog) { - dog = newDog; -} -const dogRef = { - initial: dog, - get: getDog, - set: setDog -}; export { dogRef }; ``` ## Part 2 ```js import { cat } from "entry.js" assert { - __turbopack_chunk__: 12 + __turbopack_chunk__: 14 }; export { cat }; @@ -449,7 +433,7 @@ export { cat }; ## Part 3 ```js import { cat } from "entry.js" assert { - __turbopack_chunk__: 12 + __turbopack_chunk__: 14 }; export { initialCat }; const initialCat = cat; @@ -459,20 +443,20 @@ export { initialCat }; ## Part 4 ```js import { cat } from "entry.js" assert { - __turbopack_chunk__: 12 + __turbopack_chunk__: 14 }; import { dog } from "entry.js" assert { - __turbopack_chunk__: 5 -}; -import "entry.js" assert { __turbopack_chunk__: 6 }; import "entry.js" assert { - __turbopack_chunk__: 8 + __turbopack_chunk__: 7 }; import "entry.js" assert { __turbopack_chunk__: 10 }; +import "entry.js" assert { + __turbopack_chunk__: 12 +}; export { getChimera }; function getChimera() { return cat + dog; @@ -481,86 +465,136 @@ function getChimera() { ``` ## Part 5 ```js +import "entry.js" assert { + __turbopack_chunk__: 13 +}; +import { dog } from "entry.js" assert { + __turbopack_chunk__: 6 +}; +import "entry.js" assert { + __turbopack_chunk__: 7 +}; +import "entry.js" assert { + __turbopack_chunk__: 10 +}; +import "entry.js" assert { + __turbopack_chunk__: 12 +}; +import { getDog } from "entry.js" assert { + __turbopack_chunk__: 9 +}; +function setDog(newDog) { + dog = newDog; +} +const dogRef = { + initial: dog, + get: getDog, + set: setDog +}; +export { dogRef }; + +``` +## Part 6 +```js let dog = "dog"; export { dog }; ``` -## Part 6 +## Part 7 ```js dog += "!"; export { dog }; ``` -## Part 7 +## Part 8 ```js import { dog } from "entry.js" assert { - __turbopack_chunk__: 5 + __turbopack_chunk__: 6 }; import "entry.js" assert { - __turbopack_chunk__: 6 + __turbopack_chunk__: 7 }; console.log(dog); ``` -## Part 8 +## Part 9 ```js +import { dog } from "entry.js" assert { + __turbopack_chunk__: 6 +}; import "entry.js" assert { __turbopack_chunk__: 7 }; +import "entry.js" assert { + __turbopack_chunk__: 10 +}; +import "entry.js" assert { + __turbopack_chunk__: 12 +}; +function getDog() { + return dog; +} + +``` +## Part 10 +```js +import "entry.js" assert { + __turbopack_chunk__: 8 +}; dog += "!"; export { dog }; ``` -## Part 9 +## Part 11 ```js import { dog } from "entry.js" assert { - __turbopack_chunk__: 5 + __turbopack_chunk__: 6 }; import "entry.js" assert { - __turbopack_chunk__: 6 + __turbopack_chunk__: 7 }; import "entry.js" assert { - __turbopack_chunk__: 8 + __turbopack_chunk__: 10 }; import "entry.js" assert { - __turbopack_chunk__: 7 + __turbopack_chunk__: 8 }; console.log(dog); ``` -## Part 10 +## Part 12 ```js import "entry.js" assert { - __turbopack_chunk__: 9 + __turbopack_chunk__: 11 }; dog += "!"; export { dog }; ``` -## Part 11 +## Part 13 ```js import { dog } from "entry.js" assert { - __turbopack_chunk__: 5 -}; -import "entry.js" assert { __turbopack_chunk__: 6 }; import "entry.js" assert { - __turbopack_chunk__: 8 + __turbopack_chunk__: 7 }; import "entry.js" assert { __turbopack_chunk__: 10 }; import "entry.js" assert { - __turbopack_chunk__: 7 + __turbopack_chunk__: 12 }; import "entry.js" assert { - __turbopack_chunk__: 9 + __turbopack_chunk__: 8 +}; +import "entry.js" assert { + __turbopack_chunk__: 11 }; console.log(dog); ``` -## Part 12 +## Part 14 ```js let cat = "cat"; export { cat }; From 73ee47c4ff27ca67c55bbd4e074e66c645c47016 Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Tue, 25 Apr 2023 08:02:16 -0700 Subject: [PATCH 05/24] Use spaceID from turbo.json if available (#4687) The flag will takes precedence over config --- cli/internal/fs/turbo_json.go | 57 +++++++++++++++++++++++------------ cli/internal/run/run.go | 6 ++++ 2 files changed, 43 insertions(+), 20 deletions(-) diff --git a/cli/internal/fs/turbo_json.go b/cli/internal/fs/turbo_json.go index 71ef29d391da8..9887061cf3c9b 100644 --- a/cli/internal/fs/turbo_json.go +++ b/cli/internal/fs/turbo_json.go @@ -22,6 +22,12 @@ const ( topologicalPipelineDelimiter = "^" ) +// SpaceConfig is used to marshal and unmarshal the +// `experimentalSpaceId` field in a turbo.json +type SpaceConfig struct { + ID string `json:"id"` +} + type rawTurboJSON struct { // Global root filesystem dependencies GlobalDependencies []string `json:"globalDependencies,omitempty"` @@ -39,9 +45,12 @@ type rawTurboJSON struct { // Extends can be the name of another workspace Extends []string `json:"extends,omitempty"` + + // Configuration for the space + Space *SpaceConfig `json:"experimentalSpaces,omitempty"` } -// pristineTurboJSON is used when marshaling a TurboJSON object into a turbo.json string +// pristineTurboJSON is used when marshaling a TurboJSON object into a json string // Notably, it includes a PristinePipeline instead of the regular Pipeline. (i.e. TaskDefinition // instead of BookkeepingTaskDefinition.) type pristineTurboJSON struct { @@ -51,6 +60,7 @@ type pristineTurboJSON struct { Pipeline PristinePipeline `json:"pipeline"` RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"` Extends []string `json:"extends,omitempty"` + Space *SpaceConfig `json:"experimentalSpaces,omitempty"` } // TurboJSON represents a turbo.json configuration file @@ -60,9 +70,8 @@ type TurboJSON struct { GlobalPassthroughEnv []string Pipeline Pipeline RemoteCacheOptions RemoteCacheOptions - - // A list of Workspace names - Extends []string + Extends []string // A list of Workspace names + SpaceID string } // RemoteCacheOptions is a struct for deserializing .remoteCache of configFile @@ -611,7 +620,7 @@ func (c TaskDefinition) MarshalJSON() ([]byte, error) { } // UnmarshalJSON deserializes the contents of turbo.json into a TurboJSON struct -func (c *TurboJSON) UnmarshalJSON(data []byte) error { +func (tj *TurboJSON) UnmarshalJSON(data []byte) error { raw := &rawTurboJSON{} if err := json.Unmarshal(data, &raw); err != nil { return err @@ -642,21 +651,25 @@ func (c *TurboJSON) UnmarshalJSON(data []byte) error { } // turn the set into an array and assign to the TurboJSON struct fields. - c.GlobalEnv = envVarDependencies.UnsafeListOfStrings() - sort.Strings(c.GlobalEnv) + tj.GlobalEnv = envVarDependencies.UnsafeListOfStrings() + sort.Strings(tj.GlobalEnv) if raw.GlobalPassthroughEnv != nil { - c.GlobalPassthroughEnv = envVarPassthroughs.UnsafeListOfStrings() - sort.Strings(c.GlobalPassthroughEnv) + tj.GlobalPassthroughEnv = envVarPassthroughs.UnsafeListOfStrings() + sort.Strings(tj.GlobalPassthroughEnv) } - c.GlobalDeps = globalFileDependencies.UnsafeListOfStrings() - sort.Strings(c.GlobalDeps) + tj.GlobalDeps = globalFileDependencies.UnsafeListOfStrings() + sort.Strings(tj.GlobalDeps) // copy these over, we don't need any changes here. - c.Pipeline = raw.Pipeline - c.RemoteCacheOptions = raw.RemoteCacheOptions - c.Extends = raw.Extends + tj.Pipeline = raw.Pipeline + tj.RemoteCacheOptions = raw.RemoteCacheOptions + tj.Extends = raw.Extends + // Directly to SpaceID, we don't need to keep the struct + if raw.Space != nil { + tj.SpaceID = raw.Space.ID + } return nil } @@ -666,13 +679,17 @@ func (c *TurboJSON) UnmarshalJSON(data []byte) error { // This is used by `turbo prune` to generate a pruned turbo.json // and also by --summarize & --dry=json to serialize the known config // into something we can print to screen -func (c *TurboJSON) MarshalJSON() ([]byte, error) { +func (tj *TurboJSON) MarshalJSON() ([]byte, error) { raw := pristineTurboJSON{} - raw.GlobalDependencies = c.GlobalDeps - raw.GlobalEnv = c.GlobalEnv - raw.GlobalPassthroughEnv = c.GlobalPassthroughEnv - raw.Pipeline = c.Pipeline.Pristine() - raw.RemoteCacheOptions = c.RemoteCacheOptions + raw.GlobalDependencies = tj.GlobalDeps + raw.GlobalEnv = tj.GlobalEnv + raw.GlobalPassthroughEnv = tj.GlobalPassthroughEnv + raw.Pipeline = tj.Pipeline.Pristine() + raw.RemoteCacheOptions = tj.RemoteCacheOptions + + if tj.SpaceID != "" { + raw.Space = &SpaceConfig{ID: tj.SpaceID} + } return json.Marshal(&raw) } diff --git a/cli/internal/run/run.go b/cli/internal/run/run.go index 2ac114121187d..fb5dda5cb5b61 100644 --- a/cli/internal/run/run.go +++ b/cli/internal/run/run.go @@ -215,6 +215,12 @@ func (r *run) run(ctx gocontext.Context, targets []string) error { // TODO: these values come from a config file, hopefully viper can help us merge these r.opts.cacheOpts.RemoteCacheOpts = turboJSON.RemoteCacheOptions + // If a spaceID wasn't passed as a flag, read it from the turbo.json config. + // If that is not set either, we'll still end up with a blank string. + if r.opts.runOpts.ExperimentalSpaceID == "" { + r.opts.runOpts.ExperimentalSpaceID = turboJSON.SpaceID + } + pipeline := turboJSON.Pipeline g.Pipeline = pipeline scmInstance, err := scm.FromInRepo(r.base.RepoRoot) From f56af36e8319f1dbf587738b15c9c13566bb5d15 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Tue, 25 Apr 2023 17:08:55 +0200 Subject: [PATCH 06/24] add FixedStaticAsset and TextContentSourceAsset (#4692) ### Description needed for metadata support in next.js (https://github.com/vercel/next.js/pull/48823) --- crates/turbopack-ecmascript/src/lib.rs | 1 + crates/turbopack-ecmascript/src/text/mod.rs | 49 +++++++++++++++++++++ crates/turbopack-static/src/fixed.rs | 39 ++++++++++++++++ crates/turbopack-static/src/lib.rs | 2 + 4 files changed, 91 insertions(+) create mode 100644 crates/turbopack-ecmascript/src/text/mod.rs create mode 100644 crates/turbopack-static/src/fixed.rs diff --git a/crates/turbopack-ecmascript/src/lib.rs b/crates/turbopack-ecmascript/src/lib.rs index d425c843a9d0f..39e887d9f2167 100644 --- a/crates/turbopack-ecmascript/src/lib.rs +++ b/crates/turbopack-ecmascript/src/lib.rs @@ -17,6 +17,7 @@ mod path_visitor; pub(crate) mod references; pub mod resolve; pub(crate) mod special_cases; +pub mod text; pub(crate) mod transform; pub mod tree_shake; pub mod typescript; diff --git a/crates/turbopack-ecmascript/src/text/mod.rs b/crates/turbopack-ecmascript/src/text/mod.rs new file mode 100644 index 0000000000000..f7c0aef1689c4 --- /dev/null +++ b/crates/turbopack-ecmascript/src/text/mod.rs @@ -0,0 +1,49 @@ +use anyhow::Result; +use turbo_tasks::primitives::StringVc; +use turbo_tasks_fs::FileContent; +use turbopack_core::{ + asset::{Asset, AssetContentVc, AssetVc}, + ident::AssetIdentVc, +}; + +use crate::utils::StringifyJs; + +#[turbo_tasks::function] +fn modifier() -> StringVc { + StringVc::cell("text content".to_string()) +} + +/// A source asset that exports the string content of an asset as the default +/// export of a JS module. +#[turbo_tasks::value] +pub struct TextContentSourceAsset { + pub source: AssetVc, +} + +#[turbo_tasks::value_impl] +impl TextContentSourceAssetVc { + #[turbo_tasks::function] + pub fn new(source: AssetVc) -> Self { + TextContentSourceAsset { source }.cell() + } +} + +#[turbo_tasks::value_impl] +impl Asset for TextContentSourceAsset { + #[turbo_tasks::function] + fn ident(&self) -> AssetIdentVc { + self.source.ident().with_modifier(modifier()) + } + + #[turbo_tasks::function] + async fn content(&self) -> Result { + let source = self.source.content().file_content(); + let FileContent::Content(content) = &*source.await? else { + return Ok(FileContent::NotFound.cell().into()); + }; + let text = content.content().to_str()?; + let code = format!("export default {};", StringifyJs(&text)); + let content = FileContent::Content(code.into()).cell(); + Ok(content.into()) + } +} diff --git a/crates/turbopack-static/src/fixed.rs b/crates/turbopack-static/src/fixed.rs new file mode 100644 index 0000000000000..2023779d83eb3 --- /dev/null +++ b/crates/turbopack-static/src/fixed.rs @@ -0,0 +1,39 @@ +use anyhow::Result; +use turbo_tasks_fs::FileSystemPathVc; +use turbopack_core::{ + asset::{Asset, AssetContentVc, AssetVc}, + ident::AssetIdentVc, +}; + +/// A static asset that is served at a fixed output path. It won't use +/// content hashing to generate a long term cacheable URL. +#[turbo_tasks::value] +pub struct FixedStaticAsset { + output_path: FileSystemPathVc, + source: AssetVc, +} + +#[turbo_tasks::value_impl] +impl FixedStaticAssetVc { + #[turbo_tasks::function] + pub fn new(output_path: FileSystemPathVc, source: AssetVc) -> Self { + FixedStaticAsset { + output_path, + source, + } + .cell() + } +} + +#[turbo_tasks::value_impl] +impl Asset for FixedStaticAsset { + #[turbo_tasks::function] + async fn ident(&self) -> Result { + Ok(AssetIdentVc::from_path(self.output_path)) + } + + #[turbo_tasks::function] + fn content(&self) -> AssetContentVc { + self.source.content() + } +} diff --git a/crates/turbopack-static/src/lib.rs b/crates/turbopack-static/src/lib.rs index 166377fbd5a49..834fa659919a6 100644 --- a/crates/turbopack-static/src/lib.rs +++ b/crates/turbopack-static/src/lib.rs @@ -10,6 +10,8 @@ #![feature(min_specialization)] +pub mod fixed; + use anyhow::{anyhow, Result}; use turbo_tasks::{primitives::StringVc, Value, ValueToString}; use turbo_tasks_fs::FileContent; From 27b654b7c1e1462f4d5023e03f42bbcfafb5e758 Mon Sep 17 00:00:00 2001 From: Nicholas Yang Date: Tue, 25 Apr 2023 15:26:43 -0400 Subject: [PATCH 07/24] port(turborepo): Config (#4520) ### Description Creates an `ExecutionState` struct that gets serialized to the Go code. The goal is to build up more and more of the info, such as config and graph state in this struct, so that we can do more work on the Rust side. ### Testing Instructions --- cli/cmd/turbo/main.go | 10 +- cli/internal/client/client.go | 32 +--- cli/internal/client/client_test.go | 17 +- cli/internal/cmd/root.go | 16 +- cli/internal/cmdutil/cmdutil.go | 58 +----- cli/internal/cmdutil/cmdutil_test.go | 87 +++------ cli/internal/config/config_file.go | 168 ------------------ cli/internal/config/config_file_test.go | 157 ---------------- cli/internal/prune/prune.go | 8 +- cli/internal/run/run.go | 12 +- cli/internal/run/run_spec.go | 5 - cli/internal/turbostate/turbostate.go | 60 ++----- crates/turborepo-api-client/src/lib.rs | 15 +- crates/turborepo-lib/src/cli.rs | 45 ++--- crates/turborepo-lib/src/commands/mod.rs | 4 + crates/turborepo-lib/src/config/client.rs | 57 +++--- crates/turborepo-lib/src/config/repo.rs | 72 +++++--- crates/turborepo-lib/src/config/user.rs | 30 ++-- crates/turborepo-lib/src/execution_state.rs | 46 +++++ crates/turborepo-lib/src/lib.rs | 11 +- crates/turborepo/src/main.rs | 14 +- .../integration/tests/api-client-config.t | 42 +++++ 22 files changed, 313 insertions(+), 653 deletions(-) delete mode 100644 cli/internal/config/config_file_test.go create mode 100644 crates/turborepo-lib/src/execution_state.rs create mode 100644 turborepo-tests/integration/tests/api-client-config.t diff --git a/cli/cmd/turbo/main.go b/cli/cmd/turbo/main.go index d4155f5e0ffcb..326cb94888cba 100644 --- a/cli/cmd/turbo/main.go +++ b/cli/cmd/turbo/main.go @@ -15,14 +15,14 @@ func main() { os.Exit(1) } - argsString := os.Args[1] - var args turbostate.ParsedArgsFromRust - err := json.Unmarshal([]byte(argsString), &args) + executionStateString := os.Args[1] + var executionState turbostate.ExecutionState + err := json.Unmarshal([]byte(executionStateString), &executionState) if err != nil { - fmt.Printf("Error unmarshalling CLI args: %v\n Arg string: %v\n", err, argsString) + fmt.Printf("Error unmarshalling execution state: %v\n Execution state string: %v\n", err, executionStateString) os.Exit(1) } - exitCode := cmd.RunWithArgs(&args, turboVersion) + exitCode := cmd.RunWithExecutionState(&executionState, turboVersion) os.Exit(exitCode) } diff --git a/cli/internal/client/client.go b/cli/internal/client/client.go index 822b2df559885..d177a2da8caf0 100644 --- a/cli/internal/client/client.go +++ b/cli/internal/client/client.go @@ -17,6 +17,7 @@ import ( "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-retryablehttp" "github.com/vercel/turbo/cli/internal/ci" + "github.com/vercel/turbo/cli/internal/turbostate" ) // APIClient is the main interface for making network requests to Vercel @@ -47,31 +48,14 @@ func (c *APIClient) SetToken(token string) { c.token = token } -// RemoteConfig holds the authentication and endpoint details for the API client -type RemoteConfig struct { - Token string - TeamID string - TeamSlug string - APIURL string -} - -// Opts holds values for configuring the behavior of the API client -type Opts struct { - UsePreflight bool - Timeout uint64 -} - -// ClientTimeout Exported ClientTimeout used in run.go -const ClientTimeout uint64 = 20 - // NewClient creates a new APIClient -func NewClient(remoteConfig RemoteConfig, logger hclog.Logger, turboVersion string, opts Opts) *APIClient { +func NewClient(config turbostate.APIClientConfig, logger hclog.Logger, turboVersion string) *APIClient { client := &APIClient{ - baseURL: remoteConfig.APIURL, + baseURL: config.APIURL, turboVersion: turboVersion, HTTPClient: &retryablehttp.Client{ HTTPClient: &http.Client{ - Timeout: time.Duration(opts.Timeout) * time.Second, + Timeout: time.Duration(config.Timeout) * time.Second, }, RetryWaitMin: 2 * time.Second, RetryWaitMax: 10 * time.Second, @@ -79,10 +63,10 @@ func NewClient(remoteConfig RemoteConfig, logger hclog.Logger, turboVersion stri Backoff: retryablehttp.DefaultBackoff, Logger: logger, }, - token: remoteConfig.Token, - teamID: remoteConfig.TeamID, - teamSlug: remoteConfig.TeamSlug, - usePreflight: opts.UsePreflight, + token: config.Token, + teamID: config.TeamID, + teamSlug: config.TeamSlug, + usePreflight: config.UsePreflight, } client.HTTPClient.CheckRetry = client.checkRetry return client diff --git a/cli/internal/client/client_test.go b/cli/internal/client/client_test.go index 36ff3fbc7eb87..455861b0d2b3e 100644 --- a/cli/internal/client/client_test.go +++ b/cli/internal/client/client_test.go @@ -12,6 +12,7 @@ import ( "github.com/google/uuid" "github.com/hashicorp/go-hclog" + "github.com/vercel/turbo/cli/internal/turbostate" "github.com/vercel/turbo/cli/internal/util" ) @@ -30,12 +31,12 @@ func Test_sendToServer(t *testing.T) { })) defer ts.Close() - remoteConfig := RemoteConfig{ + apiClientConfig := turbostate.APIClientConfig{ TeamSlug: "my-team-slug", APIURL: ts.URL, Token: "my-token", } - apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") myUUID, err := uuid.NewUUID() if err != nil { @@ -85,12 +86,12 @@ func Test_PutArtifact(t *testing.T) { defer ts.Close() // Set up test expected values - remoteConfig := RemoteConfig{ + apiClientConfig := turbostate.APIClientConfig{ TeamSlug: "my-team-slug", APIURL: ts.URL, Token: "my-token", } - apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") expectedArtifactBody := []byte("My string artifact") // Test Put Artifact @@ -111,12 +112,12 @@ func Test_PutWhenCachingDisabled(t *testing.T) { defer ts.Close() // Set up test expected values - remoteConfig := RemoteConfig{ + apiClientConfig := turbostate.APIClientConfig{ TeamSlug: "my-team-slug", APIURL: ts.URL, Token: "my-token", } - apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") expectedArtifactBody := []byte("My string artifact") // Test Put Artifact err := apiClient.PutArtifact("hash", expectedArtifactBody, 500, "") @@ -138,12 +139,12 @@ func Test_FetchWhenCachingDisabled(t *testing.T) { defer ts.Close() // Set up test expected values - remoteConfig := RemoteConfig{ + apiClientConfig := turbostate.APIClientConfig{ TeamSlug: "my-team-slug", APIURL: ts.URL, Token: "my-token", } - apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + apiClient := NewClient(apiClientConfig, hclog.Default(), "v1") // Test Put Artifact resp, err := apiClient.FetchArtifact("hash") cd := &util.CacheDisabledError{} diff --git a/cli/internal/cmd/root.go b/cli/internal/cmd/root.go index 314cb97c99a4b..7b637be86b2be 100644 --- a/cli/internal/cmd/root.go +++ b/cli/internal/cmd/root.go @@ -44,29 +44,29 @@ func initializeOutputFiles(helper *cmdutil.Helper, parsedArgs *turbostate.Parsed return nil } -// RunWithArgs runs turbo with the ParsedArgsFromRust that is passed from the Rust side. -func RunWithArgs(args *turbostate.ParsedArgsFromRust, turboVersion string) int { +// RunWithExecutionState runs turbo with the ParsedArgsFromRust that is passed from the Rust side. +func RunWithExecutionState(executionState *turbostate.ExecutionState, turboVersion string) int { util.InitPrintf() // TODO: replace this with a context signalWatcher := signals.NewWatcher() - helper := cmdutil.NewHelper(turboVersion, args) + helper := cmdutil.NewHelper(turboVersion, &executionState.CLIArgs) ctx := context.Background() - err := initializeOutputFiles(helper, args) + err := initializeOutputFiles(helper, &executionState.CLIArgs) if err != nil { fmt.Printf("%v", err) return 1 } - defer helper.Cleanup(args) + defer helper.Cleanup(&executionState.CLIArgs) doneCh := make(chan struct{}) var execErr error go func() { - command := args.Command + command := executionState.CLIArgs.Command if command.Prune != nil { - execErr = prune.ExecutePrune(helper, args) + execErr = prune.ExecutePrune(helper, executionState) } else if command.Run != nil { - execErr = run.ExecuteRun(ctx, helper, signalWatcher, args) + execErr = run.ExecuteRun(ctx, helper, signalWatcher, executionState) } else { execErr = fmt.Errorf("unknown command: %v", command) } diff --git a/cli/internal/cmdutil/cmdutil.go b/cli/internal/cmdutil/cmdutil.go index 0b02392a8a741..112d3964c10a8 100644 --- a/cli/internal/cmdutil/cmdutil.go +++ b/cli/internal/cmdutil/cmdutil.go @@ -7,7 +7,6 @@ import ( "io" "io/ioutil" "os" - "strconv" "sync" "github.com/hashicorp/go-hclog" @@ -39,8 +38,6 @@ type Helper struct { rawRepoRoot string - clientOpts client.Opts - // UserConfigPath is the path to where we expect to find // a user-specific config file, if one is present. Public // to allow overrides in tests @@ -74,12 +71,12 @@ func (h *Helper) Cleanup(cliConfig *turbostate.ParsedArgsFromRust) { } } -func (h *Helper) getUI(cliConfig *turbostate.ParsedArgsFromRust) cli.Ui { +func (h *Helper) getUI(cliArgs *turbostate.ParsedArgsFromRust) cli.Ui { colorMode := ui.GetColorModeFromEnv() - if cliConfig.GetNoColor() { + if cliArgs.NoColor { colorMode = ui.ColorModeSuppressed } - if cliConfig.GetColor() { + if cliArgs.Color { colorMode = ui.ColorModeForced } return ui.BuildColoredUi(colorMode) @@ -134,15 +131,15 @@ func NewHelper(turboVersion string, args *turbostate.ParsedArgsFromRust) *Helper // GetCmdBase returns a CmdBase instance configured with values from this helper. // It additionally returns a mechanism to set an error, so -func (h *Helper) GetCmdBase(cliConfig *turbostate.ParsedArgsFromRust) (*CmdBase, error) { +func (h *Helper) GetCmdBase(executionState *turbostate.ExecutionState) (*CmdBase, error) { // terminal is for color/no-color output - terminal := h.getUI(cliConfig) + terminal := h.getUI(&executionState.CLIArgs) // logger is configured with verbosity level using --verbosity flag from end users logger, err := h.getLogger() if err != nil { return nil, err } - cwdRaw, err := cliConfig.GetCwd() + cwdRaw := executionState.CLIArgs.CWD if err != nil { return nil, err } @@ -155,45 +152,12 @@ func (h *Helper) GetCmdBase(cliConfig *turbostate.ParsedArgsFromRust) (*CmdBase, if err != nil { return nil, err } - repoConfig, err := config.ReadRepoConfigFile(config.GetRepoConfigPath(repoRoot), cliConfig) - if err != nil { - return nil, err - } - userConfig, err := config.ReadUserConfigFile(h.UserConfigPath, cliConfig) - if err != nil { - return nil, err - } - remoteConfig := repoConfig.GetRemoteConfig(userConfig.Token()) - if remoteConfig.Token == "" && ui.IsCI { - vercelArtifactsToken := os.Getenv("VERCEL_ARTIFACTS_TOKEN") - vercelArtifactsOwner := os.Getenv("VERCEL_ARTIFACTS_OWNER") - if vercelArtifactsToken != "" { - remoteConfig.Token = vercelArtifactsToken - } - if vercelArtifactsOwner != "" { - remoteConfig.TeamID = vercelArtifactsOwner - } - } - - // Primacy: Arg > Env - timeout, err := cliConfig.GetRemoteCacheTimeout() - if err == nil { - h.clientOpts.Timeout = timeout - } else { - val, ok := os.LookupEnv("TURBO_REMOTE_CACHE_TIMEOUT") - if ok { - number, err := strconv.ParseUint(val, 10, 64) - if err == nil { - h.clientOpts.Timeout = number - } - } - } + apiClientConfig := executionState.APIClientConfig apiClient := client.NewClient( - remoteConfig, + apiClientConfig, logger, h.TurboVersion, - h.clientOpts, ) return &CmdBase{ @@ -201,9 +165,6 @@ func (h *Helper) GetCmdBase(cliConfig *turbostate.ParsedArgsFromRust) (*CmdBase, Logger: logger, RepoRoot: repoRoot, APIClient: apiClient, - RepoConfig: repoConfig, - UserConfig: userConfig, - RemoteConfig: remoteConfig, TurboVersion: h.TurboVersion, }, nil } @@ -214,9 +175,6 @@ type CmdBase struct { Logger hclog.Logger RepoRoot turbopath.AbsoluteSystemPath APIClient *client.APIClient - RepoConfig *config.RepoConfig - UserConfig *config.UserConfig - RemoteConfig client.RemoteConfig TurboVersion string } diff --git a/cli/internal/cmdutil/cmdutil_test.go b/cli/internal/cmdutil/cmdutil_test.go index 4e6cf7047bf16..bf54d82952b8d 100644 --- a/cli/internal/cmdutil/cmdutil_test.go +++ b/cli/internal/cmdutil/cmdutil_test.go @@ -5,75 +5,25 @@ import ( "testing" "time" - "github.com/vercel/turbo/cli/internal/fs" "github.com/vercel/turbo/cli/internal/turbostate" "gotest.tools/v3/assert" ) -func TestTokenEnvVar(t *testing.T) { - // Set up an empty config so we're just testing environment variables - userConfigPath := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") - expectedPrefix := "my-token" - vars := []string{"TURBO_TOKEN", "VERCEL_ARTIFACTS_TOKEN"} - for _, v := range vars { - t.Run(v, func(t *testing.T) { - t.Cleanup(func() { - _ = os.Unsetenv(v) - }) - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - } - h := NewHelper("test-version", args) - h.UserConfigPath = userConfigPath - - expectedToken := expectedPrefix + v - err := os.Setenv(v, expectedToken) - if err != nil { - t.Fatalf("setenv %v", err) - } - - base, err := h.GetCmdBase(args) - if err != nil { - t.Fatalf("failed to get command base %v", err) - } - assert.Equal(t, base.RemoteConfig.Token, expectedToken) - }) +func TestRemoteCacheTimeoutFlag(t *testing.T) { + args := turbostate.ParsedArgsFromRust{ + CWD: "", } -} - -func TestRemoteCacheTimeoutEnvVar(t *testing.T) { - key := "TURBO_REMOTE_CACHE_TIMEOUT" - expectedTimeout := "600" - t.Run(key, func(t *testing.T) { - t.Cleanup(func() { - _ = os.Unsetenv(key) - }) - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - } - h := NewHelper("test-version", args) - err := os.Setenv(key, expectedTimeout) - if err != nil { - t.Fatalf("setenv %v", err) - } - - base, err := h.GetCmdBase(args) - if err != nil { - t.Fatalf("failed to get command base %v", err) - } - assert.Equal(t, base.APIClient.HTTPClient.HTTPClient.Timeout, time.Duration(600)*time.Second) - }) -} - -func TestRemoteCacheTimeoutFlag(t *testing.T) { - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - RemoteCacheTimeout: 599, + executionState := turbostate.ExecutionState{ + APIClientConfig: turbostate.APIClientConfig{ + Timeout: 599, + }, + CLIArgs: args, } - h := NewHelper("test-version", args) - base, err := h.GetCmdBase(args) + h := NewHelper("test-version", &args) + + base, err := h.GetCmdBase(&executionState) if err != nil { t.Fatalf("failed to get command base %v", err) } @@ -89,18 +39,23 @@ func TestRemoteCacheTimeoutPrimacy(t *testing.T) { t.Cleanup(func() { _ = os.Unsetenv(key) }) - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - RemoteCacheTimeout: 1, + args := turbostate.ParsedArgsFromRust{ + CWD: "", + } + executionState := turbostate.ExecutionState{ + APIClientConfig: turbostate.APIClientConfig{ + Timeout: 1, + }, + CLIArgs: args, } - h := NewHelper("test-version", args) + h := NewHelper("test-version", &args) err := os.Setenv(key, value) if err != nil { t.Fatalf("setenv %v", err) } - base, err := h.GetCmdBase(args) + base, err := h.GetCmdBase(&executionState) if err != nil { t.Fatalf("failed to get command base %v", err) } diff --git a/cli/internal/config/config_file.go b/cli/internal/config/config_file.go index d3118b8732da2..a5567436cb4d7 100644 --- a/cli/internal/config/config_file.go +++ b/cli/internal/config/config_file.go @@ -1,13 +1,9 @@ package config import ( - "os" - "github.com/spf13/viper" - "github.com/vercel/turbo/cli/internal/client" "github.com/vercel/turbo/cli/internal/fs" "github.com/vercel/turbo/cli/internal/turbopath" - "github.com/vercel/turbo/cli/internal/turbostate" ) // RepoConfig is a configuration object for the logged-in turborepo.com user @@ -21,172 +17,8 @@ func (rc *RepoConfig) LoginURL() string { return rc.repoViper.GetString("loginurl") } -// SetTeamID sets the teamID and clears the slug, since it may have been from an old team -func (rc *RepoConfig) SetTeamID(teamID string) error { - // Note that we can't use viper.Set to set a nil value, we have to merge it in - newVals := map[string]interface{}{ - "teamid": teamID, - "teamslug": nil, - } - if err := rc.repoViper.MergeConfigMap(newVals); err != nil { - return err - } - return rc.write() -} - -// GetRemoteConfig produces the necessary values for an API client configuration -func (rc *RepoConfig) GetRemoteConfig(token string) client.RemoteConfig { - return client.RemoteConfig{ - Token: token, - TeamID: rc.repoViper.GetString("teamid"), - TeamSlug: rc.repoViper.GetString("teamslug"), - APIURL: rc.repoViper.GetString("apiurl"), - } -} - -// Internal call to save this config data to the user config file. -func (rc *RepoConfig) write() error { - if err := rc.path.EnsureDir(); err != nil { - return err - } - return rc.repoViper.WriteConfig() -} - -// Delete deletes the config file. This repo config shouldn't be used -// afterwards, it needs to be re-initialized -func (rc *RepoConfig) Delete() error { - return rc.path.Remove() -} - -// UserConfig is a wrapper around the user-specific configuration values -// for Turborepo. -type UserConfig struct { - userViper *viper.Viper - path turbopath.AbsoluteSystemPath -} - -// Token returns the Bearer token for this user if it exists -func (uc *UserConfig) Token() string { - return uc.userViper.GetString("token") -} - -// SetToken saves a Bearer token for this user, writing it to the -// user config file, creating it if necessary -func (uc *UserConfig) SetToken(token string) error { - // Technically Set works here, due to how overrides work, but use merge for consistency - if err := uc.userViper.MergeConfigMap(map[string]interface{}{"token": token}); err != nil { - return err - } - return uc.write() -} - -// Internal call to save this config data to the user config file. -func (uc *UserConfig) write() error { - if err := uc.path.EnsureDir(); err != nil { - return err - } - return uc.userViper.WriteConfig() -} - -// Delete deletes the config file. This user config shouldn't be used -// afterwards, it needs to be re-initialized -func (uc *UserConfig) Delete() error { - return uc.path.Remove() -} - -// ReadUserConfigFile creates a UserConfig using the -// specified path as the user config file. Note that the path or its parents -// do not need to exist. On a write to this configuration, they will be created. -func ReadUserConfigFile(path turbopath.AbsoluteSystemPath, cliConfig *turbostate.ParsedArgsFromRust) (*UserConfig, error) { - userViper := viper.New() - userViper.SetConfigFile(path.ToString()) - userViper.SetConfigType("json") - userViper.SetEnvPrefix("turbo") - userViper.MustBindEnv("token") - - token, err := cliConfig.GetToken() - if err != nil { - return nil, err - } - if token != "" { - userViper.Set("token", token) - } - - if err := userViper.ReadInConfig(); err != nil && !os.IsNotExist(err) { - return nil, err - } - return &UserConfig{ - userViper: userViper, - path: path, - }, nil -} - // DefaultUserConfigPath returns the default platform-dependent place that // we store the user-specific configuration. func DefaultUserConfigPath() turbopath.AbsoluteSystemPath { return fs.GetUserConfigDir().UntypedJoin("config.json") } - -const ( - _defaultAPIURL = "https://vercel.com/api" - _defaultLoginURL = "https://vercel.com" -) - -// ReadRepoConfigFile creates a RepoConfig using the -// specified path as the repo config file. Note that the path or its -// parents do not need to exist. On a write to this configuration, they -// will be created. -func ReadRepoConfigFile(path turbopath.AbsoluteSystemPath, cliConfig *turbostate.ParsedArgsFromRust) (*RepoConfig, error) { - repoViper := viper.New() - repoViper.SetConfigFile(path.ToString()) - repoViper.SetConfigType("json") - repoViper.SetEnvPrefix("turbo") - repoViper.MustBindEnv("apiurl", "TURBO_API") - repoViper.MustBindEnv("loginurl", "TURBO_LOGIN") - repoViper.MustBindEnv("teamslug", "TURBO_TEAM") - repoViper.MustBindEnv("teamid") - repoViper.SetDefault("apiurl", _defaultAPIURL) - repoViper.SetDefault("loginurl", _defaultLoginURL) - - login, err := cliConfig.GetLogin() - if err != nil { - return nil, err - } - if login != "" { - repoViper.Set("loginurl", login) - } - - api, err := cliConfig.GetAPI() - if err != nil { - return nil, err - } - if api != "" { - repoViper.Set("apiurl", api) - } - - team, err := cliConfig.GetTeam() - if err != nil { - return nil, err - } - if team != "" { - repoViper.Set("teamslug", team) - } - - if err := repoViper.ReadInConfig(); err != nil && !os.IsNotExist(err) { - return nil, err - } - // If team was set via commandline, don't read the teamId from the config file, as it - // won't necessarily match. - if team != "" { - repoViper.Set("teamid", "") - } - return &RepoConfig{ - repoViper: repoViper, - path: path, - }, nil -} - -// GetRepoConfigPath reads the user-specific configuration values -func GetRepoConfigPath(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { - return repoRoot.UntypedJoin(".turbo", "config.json") -} diff --git a/cli/internal/config/config_file_test.go b/cli/internal/config/config_file_test.go deleted file mode 100644 index 7a19108269e6e..0000000000000 --- a/cli/internal/config/config_file_test.go +++ /dev/null @@ -1,157 +0,0 @@ -package config - -import ( - "fmt" - "testing" - - "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/turbostate" - "gotest.tools/v3/assert" -) - -func TestReadRepoConfigWhenMissing(t *testing.T) { - testDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("config.json") - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - } - - config, err := ReadRepoConfigFile(testDir, args) - if err != nil { - t.Errorf("got error reading non-existent config file: %v, want ", err) - } - if config == nil { - t.Error("got , wanted config value") - } -} - -func TestReadRepoConfigSetTeamAndAPIFlag(t *testing.T) { - testConfigFile := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") - - slug := "my-team-slug" - apiURL := "http://my-login-url" - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - Team: slug, - API: apiURL, - } - - teamID := "some-id" - assert.NilError(t, testConfigFile.EnsureDir(), "EnsureDir") - assert.NilError(t, testConfigFile.WriteFile([]byte(fmt.Sprintf(`{"teamId":"%v"}`, teamID)), 0644), "WriteFile") - - config, err := ReadRepoConfigFile(testConfigFile, args) - if err != nil { - t.Errorf("ReadRepoConfigFile err got %v, want ", err) - } - remoteConfig := config.GetRemoteConfig("") - if remoteConfig.TeamID != "" { - t.Errorf("TeamID got %v, want ", remoteConfig.TeamID) - } - if remoteConfig.TeamSlug != slug { - t.Errorf("TeamSlug got %v, want %v", remoteConfig.TeamSlug, slug) - } - if remoteConfig.APIURL != apiURL { - t.Errorf("APIURL got %v, want %v", remoteConfig.APIURL, apiURL) - } -} - -func TestRepoConfigIncludesDefaults(t *testing.T) { - testConfigFile := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - } - - expectedTeam := "my-team" - - assert.NilError(t, testConfigFile.EnsureDir(), "EnsureDir") - assert.NilError(t, testConfigFile.WriteFile([]byte(fmt.Sprintf(`{"teamSlug":"%v"}`, expectedTeam)), 0644), "WriteFile") - - config, err := ReadRepoConfigFile(testConfigFile, args) - if err != nil { - t.Errorf("ReadRepoConfigFile err got %v, want ", err) - } - - remoteConfig := config.GetRemoteConfig("") - if remoteConfig.APIURL != _defaultAPIURL { - t.Errorf("api url got %v, want %v", remoteConfig.APIURL, _defaultAPIURL) - } - if remoteConfig.TeamSlug != expectedTeam { - t.Errorf("team slug got %v, want %v", remoteConfig.TeamSlug, expectedTeam) - } -} - -func TestWriteRepoConfig(t *testing.T) { - repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) - testConfigFile := repoRoot.UntypedJoin(".turbo", "config.json") - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - } - - expectedTeam := "my-team" - - assert.NilError(t, testConfigFile.EnsureDir(), "EnsureDir") - assert.NilError(t, testConfigFile.WriteFile([]byte(fmt.Sprintf(`{"teamSlug":"%v"}`, expectedTeam)), 0644), "WriteFile") - - initial, err := ReadRepoConfigFile(testConfigFile, args) - assert.NilError(t, err, "GetRepoConfig") - // setting the teamID should clear the slug, since it may have been from an old team - expectedTeamID := "my-team-id" - err = initial.SetTeamID(expectedTeamID) - assert.NilError(t, err, "SetTeamID") - - config, err := ReadRepoConfigFile(testConfigFile, args) - if err != nil { - t.Errorf("ReadRepoConfig err got %v, want ", err) - } - - remoteConfig := config.GetRemoteConfig("") - if remoteConfig.TeamSlug != "" { - t.Errorf("Expected TeamSlug to be cleared, got %v", remoteConfig.TeamSlug) - } - if remoteConfig.TeamID != expectedTeamID { - t.Errorf("TeamID got %v, want %v", remoteConfig.TeamID, expectedTeamID) - } -} - -func TestWriteUserConfig(t *testing.T) { - configPath := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - } - - // Non-existent config file should get empty values - userConfig, err := ReadUserConfigFile(configPath, args) - assert.NilError(t, err, "readUserConfigFile") - assert.Equal(t, userConfig.Token(), "") - assert.Equal(t, userConfig.path, configPath) - - expectedToken := "my-token" - err = userConfig.SetToken(expectedToken) - assert.NilError(t, err, "SetToken") - - config, err := ReadUserConfigFile(configPath, args) - assert.NilError(t, err, "readUserConfigFile") - assert.Equal(t, config.Token(), expectedToken) - - err = config.Delete() - assert.NilError(t, err, "deleteConfigFile") - assert.Equal(t, configPath.FileExists(), false, "config file should be deleted") - - final, err := ReadUserConfigFile(configPath, args) - assert.NilError(t, err, "readUserConfigFile") - assert.Equal(t, final.Token(), "") - assert.Equal(t, configPath.FileExists(), false, "config file should be deleted") -} - -func TestUserConfigFlags(t *testing.T) { - configPath := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") - args := &turbostate.ParsedArgsFromRust{ - CWD: "", - Token: "my-token", - } - - userConfig, err := ReadUserConfigFile(configPath, args) - assert.NilError(t, err, "readUserConfigFile") - assert.Equal(t, userConfig.Token(), "my-token") - assert.Equal(t, userConfig.path, configPath) -} diff --git a/cli/internal/prune/prune.go b/cli/internal/prune/prune.go index a82023f13bd14..2dc541e86b150 100644 --- a/cli/internal/prune/prune.go +++ b/cli/internal/prune/prune.go @@ -28,12 +28,12 @@ type opts struct { } // ExecutePrune executes the `prune` command. -func ExecutePrune(helper *cmdutil.Helper, args *turbostate.ParsedArgsFromRust) error { - base, err := helper.GetCmdBase(args) +func ExecutePrune(helper *cmdutil.Helper, executionState *turbostate.ExecutionState) error { + base, err := helper.GetCmdBase(executionState) if err != nil { return err } - if len(args.Command.Prune.Scope) == 0 { + if len(executionState.CLIArgs.Command.Prune.Scope) == 0 { err := errors.New("at least one target must be specified") base.LogError(err.Error()) return err @@ -41,7 +41,7 @@ func ExecutePrune(helper *cmdutil.Helper, args *turbostate.ParsedArgsFromRust) e p := &prune{ base, } - if err := p.prune(args.Command.Prune); err != nil { + if err := p.prune(executionState.CLIArgs.Command.Prune); err != nil { logError(p.base.Logger, p.base.UI, err) return err } diff --git a/cli/internal/run/run.go b/cli/internal/run/run.go index fb5dda5cb5b61..8575e85f6f047 100644 --- a/cli/internal/run/run.go +++ b/cli/internal/run/run.go @@ -32,18 +32,18 @@ import ( ) // ExecuteRun executes the run command -func ExecuteRun(ctx gocontext.Context, helper *cmdutil.Helper, signalWatcher *signals.Watcher, args *turbostate.ParsedArgsFromRust) error { - base, err := helper.GetCmdBase(args) +func ExecuteRun(ctx gocontext.Context, helper *cmdutil.Helper, signalWatcher *signals.Watcher, executionState *turbostate.ExecutionState) error { + base, err := helper.GetCmdBase(executionState) LogTag(base.Logger) if err != nil { return err } - tasks := args.Command.Run.Tasks - passThroughArgs := args.Command.Run.PassThroughArgs + tasks := executionState.CLIArgs.Command.Run.Tasks + passThroughArgs := executionState.CLIArgs.Command.Run.PassThroughArgs if len(tasks) == 0 { return errors.New("at least one task must be specified") } - opts, err := optsFromArgs(args) + opts, err := optsFromArgs(&executionState.CLIArgs) if err != nil { return err } @@ -66,8 +66,6 @@ func optsFromArgs(args *turbostate.ParsedArgsFromRust) (*Opts, error) { return nil, err } - // Cache flags - opts.clientOpts.Timeout = args.RemoteCacheTimeout opts.cacheOpts.SkipFilesystem = runPayload.RemoteOnly opts.cacheOpts.OverrideDir = runPayload.CacheDir opts.cacheOpts.Workers = runPayload.CacheWorkers diff --git a/cli/internal/run/run_spec.go b/cli/internal/run/run_spec.go index 14402d39b5c83..8350ee56f0c2b 100644 --- a/cli/internal/run/run_spec.go +++ b/cli/internal/run/run_spec.go @@ -6,7 +6,6 @@ import ( "strings" "github.com/vercel/turbo/cli/internal/cache" - "github.com/vercel/turbo/cli/internal/client" "github.com/vercel/turbo/cli/internal/runcache" "github.com/vercel/turbo/cli/internal/scope" "github.com/vercel/turbo/cli/internal/util" @@ -42,7 +41,6 @@ func (rs *runSpec) ArgsForTask(task string) []string { type Opts struct { runOpts util.RunOpts cacheOpts cache.Opts - clientOpts client.Opts runcacheOpts runcache.Opts scopeOpts scope.Opts } @@ -83,8 +81,5 @@ func getDefaultOptions() *Opts { runOpts: util.RunOpts{ Concurrency: 10, }, - clientOpts: client.Opts{ - Timeout: client.ClientTimeout, - }, } } diff --git a/cli/internal/turbostate/turbostate.go b/cli/internal/turbostate/turbostate.go index 2ed23ddc0f537..3f34df7a45bd8 100644 --- a/cli/internal/turbostate/turbostate.go +++ b/cli/internal/turbostate/turbostate.go @@ -4,18 +4,9 @@ package turbostate import ( - "fmt" - "github.com/vercel/turbo/cli/internal/util" ) -// RepoState is the state for repository. Consists of the root for the repo -// along with the mode (single package or multi package) -type RepoState struct { - Root string `json:"root"` - Mode string `json:"mode"` -} - // DaemonPayload is the extra flags and command that are // passed for the `daemon` subcommand type DaemonPayload struct { @@ -96,45 +87,18 @@ type ParsedArgsFromRust struct { Command Command `json:"command"` } -// GetColor returns the value of the `color` flag. -func (a ParsedArgsFromRust) GetColor() bool { - return a.Color -} - -// GetNoColor returns the value of the `token` flag. -func (a ParsedArgsFromRust) GetNoColor() bool { - return a.NoColor -} - -// GetLogin returns the value of the `login` flag. -func (a ParsedArgsFromRust) GetLogin() (string, error) { - return a.Login, nil -} - -// GetAPI returns the value of the `api` flag. -func (a ParsedArgsFromRust) GetAPI() (string, error) { - return a.API, nil -} - -// GetTeam returns the value of the `team` flag. -func (a ParsedArgsFromRust) GetTeam() (string, error) { - return a.Team, nil -} - -// GetToken returns the value of the `token` flag. -func (a ParsedArgsFromRust) GetToken() (string, error) { - return a.Token, nil -} - -// GetCwd returns the value of the `cwd` flag. -func (a ParsedArgsFromRust) GetCwd() (string, error) { - return a.CWD, nil +// ExecutionState is the entire state of a turbo execution that is passed from the Rust shim. +type ExecutionState struct { + APIClientConfig APIClientConfig `json:"remote_config"` + CLIArgs ParsedArgsFromRust `json:"cli_args"` } -// GetRemoteCacheTimeout returns the value of the `remote-cache-timeout` flag. -func (a ParsedArgsFromRust) GetRemoteCacheTimeout() (uint64, error) { - if a.RemoteCacheTimeout != 0 { - return a.RemoteCacheTimeout, nil - } - return 0, fmt.Errorf("no remote cache timeout provided") +// APIClientConfig holds the authentication and endpoint details for the API client +type APIClientConfig struct { + Token string `json:"token"` + TeamID string `json:"team_id"` + TeamSlug string `json:"team_slug"` + APIURL string `json:"api_url"` + UsePreflight bool `json:"use_preflight"` + Timeout uint64 `json:"timeout"` } diff --git a/crates/turborepo-api-client/src/lib.rs b/crates/turborepo-api-client/src/lib.rs index ef242b7099178..30c50027713af 100644 --- a/crates/turborepo-api-client/src/lib.rs +++ b/crates/turborepo-api-client/src/lib.rs @@ -308,16 +308,13 @@ impl APIClient { false } - pub fn new( - base_url: impl AsRef, - timeout: Option, - version: &'static str, - ) -> Result { - let client = match timeout { - Some(timeout) => reqwest::Client::builder() + pub fn new(base_url: impl AsRef, timeout: u64, version: &'static str) -> Result { + let client = if timeout != 0 { + reqwest::Client::builder() .timeout(std::time::Duration::from_secs(timeout)) - .build()?, - None => reqwest::Client::builder().build()?, + .build()? + } else { + reqwest::Client::builder().build()? }; let user_agent = format!( diff --git a/crates/turborepo-lib/src/cli.rs b/crates/turborepo-lib/src/cli.rs index a2489183cbc8c..599ff14b45f65 100644 --- a/crates/turborepo-lib/src/cli.rs +++ b/crates/turborepo-lib/src/cli.rs @@ -434,12 +434,12 @@ pub enum LogPrefix { /// returns: Result #[tokio::main] pub async fn run(repo_state: Option) -> Result { - let mut clap_args = Args::new()?; + let mut cli_args = Args::new()?; // If there is no command, we set the command to `Command::Run` with // `self.parsed_args.run_args` as arguments. - if clap_args.command.is_none() { - if let Some(run_args) = mem::take(&mut clap_args.run_args) { - clap_args.command = Some(Command::Run(Box::new(run_args))); + if cli_args.command.is_none() { + if let Some(run_args) = mem::take(&mut cli_args.run_args) { + cli_args.command = Some(Command::Run(Box::new(run_args))); } else { return Err(anyhow!("No command specified")); } @@ -447,8 +447,8 @@ pub async fn run(repo_state: Option) -> Result { // If this is a run command, and we know the actual invocation path, set the // inference root, as long as the user hasn't overridden the cwd - if clap_args.cwd.is_none() { - if let Some(Command::Run(run_args)) = &mut clap_args.command { + if cli_args.cwd.is_none() { + if let Some(Command::Run(run_args)) = &mut cli_args.command { if let Ok(invocation_dir) = env::var(INVOCATION_DIR_ENV_VAR) { let invocation_path = Path::new(&invocation_dir); @@ -473,16 +473,16 @@ pub async fn run(repo_state: Option) -> Result { // Do this after the above, since we're now always setting cwd. if let Some(repo_state) = repo_state { - if let Some(Command::Run(run_args)) = &mut clap_args.command { + if let Some(Command::Run(run_args)) = &mut cli_args.command { run_args.single_package = matches!(repo_state.mode, RepoMode::SinglePackage); } - clap_args.cwd = Some(repo_state.root); + cli_args.cwd = Some(repo_state.root); } - let repo_root = if let Some(cwd) = &clap_args.cwd { + let repo_root = if let Some(cwd) = &cli_args.cwd { let canonical_cwd = fs_canonicalize(cwd)?; // Update on clap_args so that Go gets a canonical path. - clap_args.cwd = Some(canonical_cwd.clone()); + cli_args.cwd = Some(canonical_cwd.clone()); canonical_cwd } else { current_dir()? @@ -490,27 +490,27 @@ pub async fn run(repo_state: Option) -> Result { let version = get_version(); - match clap_args.command.as_ref().unwrap() { + match cli_args.command.as_ref().unwrap() { Command::Bin { .. } => { bin::run()?; Ok(Payload::Rust(Ok(0))) } Command::Logout { .. } => { - let mut base = CommandBase::new(clap_args, repo_root, version)?; + let mut base = CommandBase::new(cli_args, repo_root, version)?; logout::logout(&mut base)?; Ok(Payload::Rust(Ok(0))) } Command::Login { sso_team } => { - if clap_args.test_run { + if cli_args.test_run { println!("Login test run successful"); return Ok(Payload::Rust(Ok(0))); } let sso_team = sso_team.clone(); - let mut base = CommandBase::new(clap_args, repo_root, version)?; + let mut base = CommandBase::new(cli_args, repo_root, version)?; if let Some(sso_team) = sso_team { login::sso_login(&mut base, &sso_team).await?; @@ -524,36 +524,36 @@ pub async fn run(repo_state: Option) -> Result { no_gitignore, target, } => { - if clap_args.test_run { + if cli_args.test_run { println!("Link test run successful"); return Ok(Payload::Rust(Ok(0))); } let modify_gitignore = !*no_gitignore; let to = *target; - let mut base = CommandBase::new(clap_args, repo_root, version)?; + let mut base = CommandBase::new(cli_args, repo_root, version)?; if let Err(err) = link::link(&mut base, modify_gitignore, to).await { error!("error: {}", err.to_string()) - }; + } Ok(Payload::Rust(Ok(0))) } Command::Unlink { target } => { - if clap_args.test_run { + if cli_args.test_run { println!("Unlink test run successful"); return Ok(Payload::Rust(Ok(0))); } let from = *target; - let mut base = CommandBase::new(clap_args, repo_root, version)?; + let mut base = CommandBase::new(cli_args, repo_root, version)?; unlink::unlink(&mut base, from)?; Ok(Payload::Rust(Ok(0))) } Command::Daemon { command, idle_time } => { - let base = CommandBase::new(clap_args.clone(), repo_root, version)?; + let base = CommandBase::new(cli_args.clone(), repo_root, version)?; match command { Some(command) => daemon::daemon_client(command, &base).await, @@ -562,7 +562,10 @@ pub async fn run(repo_state: Option) -> Result { Ok(Payload::Rust(Ok(0))) } - Command::Prune { .. } | Command::Run(_) => Ok(Payload::Go(Box::new(clap_args))), + Command::Prune { .. } | Command::Run(_) => { + let base = CommandBase::new(cli_args, repo_root, version)?; + Ok(Payload::Go(Box::new(base))) + } Command::Completion { shell } => { generate(*shell, &mut Args::command(), "turbo", &mut io::stdout()); diff --git a/crates/turborepo-lib/src/commands/mod.rs b/crates/turborepo-lib/src/commands/mod.rs index db5383c3ee63b..c55bde422648e 100644 --- a/crates/turborepo-lib/src/commands/mod.rs +++ b/crates/turborepo-lib/src/commands/mod.rs @@ -128,6 +128,10 @@ impl CommandBase { Ok(self.client_config.get().unwrap()) } + pub fn args(&self) -> &Args { + &self.args + } + pub fn api_client(&mut self) -> Result { let repo_config = self.repo_config()?; let client_config = self.client_config()?; diff --git a/crates/turborepo-lib/src/config/client.rs b/crates/turborepo-lib/src/config/client.rs index 567b2e0373065..ae77a21cff0b3 100644 --- a/crates/turborepo-lib/src/config/client.rs +++ b/crates/turborepo-lib/src/config/client.rs @@ -13,7 +13,7 @@ pub struct ClientConfig { #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] struct ClientConfigValue { - remote_cache_timeout: Option, + remote_cache_timeout: u64, } #[derive(Debug, Clone)] @@ -23,24 +23,13 @@ pub struct ClientConfigLoader { } impl ClientConfig { - #[allow(dead_code)] - pub fn remote_cache_timeout(&self) -> Option { - match self.config.remote_cache_timeout { - // Pass 0 to get no timeout. - Some(0) => None, - - // Pass any non-zero uint64 to get a timeout of that duration measured in seconds. - Some(other) => Some(other), - - // If the _config_ doesn't have a remote_cache_timeout, give them the default. - None => Some(DEFAULT_TIMEOUT), - } + pub fn remote_cache_timeout(&self) -> u64 { + self.config.remote_cache_timeout } } impl ClientConfigLoader { /// Creates a loader that will load the client config - #[allow(dead_code)] pub fn new() -> Self { Self { remote_cache_timeout: None, @@ -49,7 +38,6 @@ impl ClientConfigLoader { } /// Set an override for token that the user provided via the command line - #[allow(dead_code)] pub fn with_remote_cache_timeout(mut self, remote_cache_timeout: Option) -> Self { self.remote_cache_timeout = remote_cache_timeout; self @@ -61,7 +49,6 @@ impl ClientConfigLoader { self } - #[allow(dead_code)] pub fn load(self) -> Result { let Self { remote_cache_timeout, @@ -79,7 +66,7 @@ impl ClientConfigLoader { match config_attempt { Err(_) => Ok(ClientConfig { config: ClientConfigValue { - remote_cache_timeout: None, + remote_cache_timeout: DEFAULT_TIMEOUT, }, }), Ok(config) => Ok(ClientConfig { config }), @@ -107,16 +94,16 @@ mod test { fn test_client_default() -> Result<()> { let config = ClientConfigLoader::new().load()?; - assert_eq!(config.remote_cache_timeout(), Some(DEFAULT_TIMEOUT)); + assert_eq!(config.remote_cache_timeout(), DEFAULT_TIMEOUT); Ok(()) } fn test_client_arg_variable() -> Result<()> { - let arg_value = Some(1); + let arg_value: u64 = 1; let config = ClientConfigLoader::new() - .with_remote_cache_timeout(arg_value) + .with_remote_cache_timeout(Some(arg_value)) .load()?; assert_eq!(config.remote_cache_timeout(), arg_value); @@ -137,7 +124,7 @@ mod test { assert_eq!( config.remote_cache_timeout(), - Some(env_value.parse::().unwrap()) + env_value.parse::().unwrap() ); Ok(()) @@ -145,76 +132,78 @@ mod test { #[test] fn test_client_arg_env_variable() -> Result<()> { + #[derive(Debug)] struct TestCase { arg: Option, env: String, - output: Option, + output: u64, } let tests = [ TestCase { arg: Some(0), env: String::from("0"), - output: None, + output: 0, }, TestCase { arg: Some(0), env: String::from("2"), - output: None, + output: 0, }, TestCase { arg: Some(0), env: String::from("garbage"), - output: None, + output: 0, }, TestCase { arg: Some(0), env: String::from(""), - output: None, + output: 0, }, TestCase { arg: Some(1), env: String::from("0"), - output: Some(1), + output: 1, }, TestCase { arg: Some(1), env: String::from("2"), - output: Some(1), + output: 1, }, TestCase { arg: Some(1), env: String::from("garbage"), - output: Some(1), + output: 1, }, TestCase { arg: Some(1), env: String::from(""), - output: Some(1), + output: 1, }, TestCase { arg: None, env: String::from("0"), - output: None, + output: 0, }, TestCase { arg: None, env: String::from("2"), - output: Some(2), + output: 2, }, TestCase { arg: None, env: String::from("garbage"), - output: Some(DEFAULT_TIMEOUT), + output: DEFAULT_TIMEOUT, }, TestCase { arg: None, env: String::from(""), - output: Some(DEFAULT_TIMEOUT), + output: DEFAULT_TIMEOUT, }, ]; for test in &tests { + println!("{:?}", test); let config = ClientConfigLoader::new() .with_remote_cache_timeout(test.arg) .with_environment({ diff --git a/crates/turborepo-lib/src/config/repo.rs b/crates/turborepo-lib/src/config/repo.rs index f960e57dba564..3d4615bf4c0d9 100644 --- a/crates/turborepo-lib/src/config/repo.rs +++ b/crates/turborepo-lib/src/config/repo.rs @@ -1,5 +1,6 @@ use std::{ collections::HashMap, + env, path::{Path, PathBuf}, }; @@ -21,10 +22,14 @@ pub struct RepoConfig { #[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq, Default)] struct RepoConfigValue { - apiurl: Option, - loginurl: Option, - teamslug: Option, - teamid: Option, + #[serde(rename = "apiurl")] + api_url: Option, + #[serde(rename = "loginurl")] + login_url: Option, + #[serde(rename = "teamslug")] + team_slug: Option, + #[serde(rename = "teamid")] + team_id: Option, } #[derive(Debug, Clone)] @@ -32,39 +37,42 @@ pub struct RepoConfigLoader { path: PathBuf, api: Option, login: Option, - teamslug: Option, + team_slug: Option, environment: Option>, } impl RepoConfig { #[allow(dead_code)] pub fn api_url(&self) -> &str { - self.config.apiurl.as_deref().unwrap_or(DEFAULT_API_URL) + self.config.api_url.as_deref().unwrap_or(DEFAULT_API_URL) } #[allow(dead_code)] pub fn login_url(&self) -> &str { - self.config.loginurl.as_deref().unwrap_or(DEFAULT_LOGIN_URL) + self.config + .login_url + .as_deref() + .unwrap_or(DEFAULT_LOGIN_URL) } #[allow(dead_code)] pub fn team_slug(&self) -> Option<&str> { - self.config.teamslug.as_deref() + self.config.team_slug.as_deref() } #[allow(dead_code)] pub fn team_id(&self) -> Option<&str> { - self.config.teamid.as_deref() + self.config.team_id.as_deref() } /// Sets the team id and clears the team slug, since it may have been from /// an old team #[allow(dead_code)] pub fn set_team_id(&mut self, team_id: Option) -> Result<()> { - self.disk_config.teamslug = None; - self.config.teamslug = None; - self.disk_config.teamid = team_id.clone(); - self.config.teamid = team_id; + self.disk_config.team_slug = None; + self.config.team_slug = None; + self.disk_config.team_id = team_id.clone(); + self.config.team_id = team_id; self.write_to_disk() } @@ -85,7 +93,7 @@ impl RepoConfigLoader { path, api: None, login: None, - teamslug: None, + team_slug: None, environment: None, } } @@ -104,7 +112,7 @@ impl RepoConfigLoader { #[allow(dead_code)] pub fn with_team_slug(mut self, team_slug: Option) -> Self { - self.teamslug = team_slug; + self.team_slug = team_slug; self } @@ -120,7 +128,7 @@ impl RepoConfigLoader { path, api, login, - teamslug, + team_slug, environment, } = self; let raw_disk_config = Config::builder() @@ -131,7 +139,7 @@ impl RepoConfigLoader { ) .build()?; - let has_teamslug_override = teamslug.is_some(); + let has_team_slug_override = team_slug.is_some(); let mut config: RepoConfigValue = Config::builder() .add_source(raw_disk_config.clone()) @@ -144,7 +152,7 @@ impl RepoConfigLoader { ) .set_override_option("apiurl", api)? .set_override_option("loginurl", login)? - .set_override_option("teamslug", teamslug)? + .set_override_option("teamslug", team_slug)? // set teamid to none if teamslug present .build()? .try_deserialize()?; @@ -153,8 +161,13 @@ impl RepoConfigLoader { // If teamid was passed via command line flag we ignore team slug as it // might not match. - if has_teamslug_override { - config.teamid = None; + if has_team_slug_override { + config.team_id = None; + } + + // We don't set this above because it's specific to team_id + if let Ok(vercel_artifacts_owner) = env::var("VERCEL_ARTIFACTS_OWNER") { + config.team_id = Some(vercel_artifacts_owner); } Ok(RepoConfig { @@ -173,6 +186,14 @@ mod test { use super::*; + #[test] + fn test_repo_config_when_missing() -> Result<()> { + let config = RepoConfigLoader::new(PathBuf::from("missing")).load(); + assert!(config.is_ok()); + + Ok(()) + } + #[test] fn test_repo_config_with_team_and_api_flags() -> Result<()> { let mut config_file = NamedTempFile::new()?; @@ -190,6 +211,17 @@ mod test { Ok(()) } + #[test] + fn test_repo_config_includes_defaults() { + let config = RepoConfigLoader::new(PathBuf::from("missing")) + .load() + .unwrap(); + assert_eq!(config.api_url(), DEFAULT_API_URL); + assert_eq!(config.login_url(), DEFAULT_LOGIN_URL); + assert_eq!(config.team_slug(), None); + assert_eq!(config.team_id(), None); + } + #[test] fn test_team_override_clears_id() -> Result<()> { let mut config_file = NamedTempFile::new()?; diff --git a/crates/turborepo-lib/src/config/user.rs b/crates/turborepo-lib/src/config/user.rs index aa03910249096..487bb1b3485fc 100644 --- a/crates/turborepo-lib/src/config/user.rs +++ b/crates/turborepo-lib/src/config/user.rs @@ -92,7 +92,8 @@ impl UserConfigLoader { let config = Config::builder() .add_source(raw_disk_config.clone()) - .add_source(Environment::with_prefix("turbo").source(environment)) + .add_source(Environment::with_prefix("TURBO").source(environment.clone())) + .add_source(Environment::with_prefix("VERCEL_ARTIFACTS").source(environment)) .set_override_option("token", token)? .build()? .try_deserialize()?; @@ -144,19 +145,28 @@ mod test { Ok(()) } + static TOKEN_ENV_VARS: [&'static str; 2] = ["TURBO_TOKEN", "VERCEL_ARTIFACTS_TOKEN"]; + #[test] fn test_env_var_trumps_disk() -> Result<()> { let mut config_file = NamedTempFile::new()?; writeln!(&mut config_file, "{{\"token\": \"foo\"}}")?; - let env = { - let mut map = HashMap::new(); - map.insert("TURBO_TOKEN".into(), "bar".into()); - map - }; - let config = UserConfigLoader::new(config_file.path().to_path_buf()) - .with_environment(Some(env)) - .load()?; - assert_eq!(config.token(), Some("bar")); + + for (idx, env_var) in TOKEN_ENV_VARS.into_iter().enumerate() { + let env_var_value = format!("bar{}", idx); + + let env = { + let mut map = HashMap::new(); + map.insert(env_var.into(), env_var_value.clone()); + map + }; + let config = UserConfigLoader::new(config_file.path().to_path_buf()) + .with_environment(Some(env)) + .load()?; + + assert_eq!(config.token(), Some(env_var_value.as_str())); + } + Ok(()) } } diff --git a/crates/turborepo-lib/src/execution_state.rs b/crates/turborepo-lib/src/execution_state.rs new file mode 100644 index 0000000000000..9f6558e8f8c3d --- /dev/null +++ b/crates/turborepo-lib/src/execution_state.rs @@ -0,0 +1,46 @@ +use serde::Serialize; + +use crate::{cli::Args, commands::CommandBase}; + +#[derive(Debug, Serialize)] +pub struct ExecutionState<'a> { + pub api_client_config: APIClientConfig<'a>, + pub cli_args: &'a Args, +} + +#[derive(Debug, Serialize, Default)] +pub struct APIClientConfig<'a> { + // Comes from user config, i.e. $XDG_CONFIG_HOME/turborepo/config.json + pub token: Option<&'a str>, + // Comes from repo config, i.e. ./.turbo/config.json + pub team_id: Option<&'a str>, + pub team_slug: Option<&'a str>, + pub api_url: &'a str, + pub use_preflight: bool, + pub timeout: u64, +} + +impl<'a> TryFrom<&'a CommandBase> for ExecutionState<'a> { + type Error = anyhow::Error; + + fn try_from(base: &'a CommandBase) -> Result { + let repo_config = base.repo_config()?; + let user_config = base.user_config()?; + let client_config = base.client_config()?; + let args = base.args(); + + let remote_config = APIClientConfig { + token: user_config.token(), + team_id: repo_config.team_id(), + team_slug: repo_config.team_slug(), + api_url: repo_config.api_url(), + use_preflight: args.preflight, + timeout: client_config.remote_cache_timeout(), + }; + + Ok(ExecutionState { + api_client_config: remote_config, + cli_args: base.args(), + }) + } +} diff --git a/crates/turborepo-lib/src/lib.rs b/crates/turborepo-lib/src/lib.rs index 7aa29855de0ae..baa0d97e9c0a1 100644 --- a/crates/turborepo-lib/src/lib.rs +++ b/crates/turborepo-lib/src/lib.rs @@ -5,6 +5,7 @@ mod cli; mod commands; mod config; mod daemon; +mod execution_state; pub(crate) mod globwatcher; mod package_manager; mod shim; @@ -14,15 +15,15 @@ use anyhow::Result; pub use child::spawn_child; use log::error; -pub use crate::cli::Args; -use crate::package_manager::PackageManager; +pub use crate::{cli::Args, execution_state::ExecutionState}; +use crate::{commands::CommandBase, package_manager::PackageManager}; /// The payload from running main, if the program can complete without using Go -/// the Rust variant will be returned. If Go is needed then the args that -/// should be passed to Go will be returned. +/// the Rust variant will be returned. If Go is needed then the execution state +/// that should be passed to Go will be returned. pub enum Payload { Rust(Result), - Go(Box), + Go(Box), } pub fn get_version() -> &'static str { diff --git a/crates/turborepo/src/main.rs b/crates/turborepo/src/main.rs index 84ee07a7bde3c..c917dd0b50010 100644 --- a/crates/turborepo/src/main.rs +++ b/crates/turborepo/src/main.rs @@ -7,9 +7,9 @@ use std::{ use anyhow::Result; use dunce::canonicalize as fs_canonicalize; use log::{debug, error, trace}; -use turborepo_lib::{spawn_child, Args, Payload}; +use turborepo_lib::{spawn_child, ExecutionState, Payload}; -fn run_go_binary(args: Args) -> Result { +fn run_go_binary(execution_state: ExecutionState) -> Result { // canonicalize the binary path to ensure we can find go-turbo let turbo_path = fs_canonicalize(current_exe()?)?; let mut go_binary_path = turbo_path.clone(); @@ -41,7 +41,13 @@ fn run_go_binary(args: Args) -> Result { )); } - let serialized_args = serde_json::to_string(&args)?; + if execution_state.cli_args.test_run { + let serialized_args = serde_json::to_string_pretty(&execution_state)?; + println!("{}", serialized_args); + return Ok(0); + } + + let serialized_args = serde_json::to_string(&execution_state)?; trace!("Invoking go binary with {}", serialized_args); let mut command = process::Command::new(go_binary_path); command @@ -60,7 +66,7 @@ fn run_go_binary(args: Args) -> Result { fn main() -> Result<()> { let exit_code = match turborepo_lib::main() { Payload::Rust(res) => res.unwrap_or(1), - Payload::Go(state) => run_go_binary(*state)?, + Payload::Go(base) => run_go_binary((&*base).try_into()?)?, }; process::exit(exit_code) diff --git a/turborepo-tests/integration/tests/api-client-config.t b/turborepo-tests/integration/tests/api-client-config.t new file mode 100644 index 0000000000000..a53706b9e5202 --- /dev/null +++ b/turborepo-tests/integration/tests/api-client-config.t @@ -0,0 +1,42 @@ +Setup + $ . ${TESTDIR}/../../helpers/setup.sh + $ . ${TESTDIR}/_helpers/setup_monorepo.sh $(pwd) + +Run test run + $ ${TURBO} run build --__test-run | jq .remote_config + { + "token": null, + "team_id": null, + "team_slug": null, + "api_url": "https://vercel.com/api", + "use_preflight": false, + "timeout": 20 + } + +Run test run with api overloaded + $ ${TURBO} run build --__test-run --api http://localhost:8000 | jq .remote_config.api_url + null + +Run test run with token overloaded + $ ${TURBO} run build --__test-run --token 1234567890 | jq .remote_config.token + "1234567890" + +Run test run with token overloaded from both TURBO_TOKEN and VERCEL_ARTIFACTS_TOKEN + $ TURBO_TOKEN=turbo VERCEL_ARTIFACTS_TOKEN=vercel ${TURBO} run build --__test-run | jq .remote_config.token + "vercel" + +Run test run with team overloaded + $ ${TURBO} run build --__test-run --team vercel | jq .remote_config.team_slug + "vercel" + +Run test run with team overloaded from both env and flag (flag should take precedence) + $ TURBO_TEAM=vercel ${TURBO} run build --__test-run --team turbo | jq .remote_config.team_slug + "turbo" + +Run test run with remote cache timeout env variable set + $ TURBO_REMOTE_CACHE_TIMEOUT=123 ${TURBO} run build --__test-run | jq .remote_config.timeout + 123 + +Run test run with remote cache timeout from both env and flag (flag should take precedence) + $ TURBO_REMOTE_CACHE_TIMEOUT=123 ${TURBO} run build --__test-run --remote-cache-timeout 456 | jq .remote_config.timeout + 456 From 85a95ea6eee4e7509b1b92f513c844db9e6e7de4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 25 Apr 2023 21:36:28 +0100 Subject: [PATCH 08/24] release(turborepo): 1.9.4-canary.3 (#4698) Co-authored-by: Turbobot --- cli/cmd/turbo/version.go | 2 +- packages/create-turbo/package.json | 2 +- packages/eslint-config-turbo/package.json | 2 +- packages/eslint-plugin-turbo/package.json | 2 +- packages/turbo-codemod/package.json | 2 +- packages/turbo-ignore/package.json | 2 +- packages/turbo-workspaces/package.json | 2 +- packages/turbo/package.json | 14 +++++++------- version.txt | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/cli/cmd/turbo/version.go b/cli/cmd/turbo/version.go index 3febcaaf85643..d9c367fc59d21 100644 --- a/cli/cmd/turbo/version.go +++ b/cli/cmd/turbo/version.go @@ -1,3 +1,3 @@ package main -const turboVersion = "1.9.4-canary.2" +const turboVersion = "1.9.4-canary.3" diff --git a/packages/create-turbo/package.json b/packages/create-turbo/package.json index 9f723bac41437..c5bbbf82e3d80 100644 --- a/packages/create-turbo/package.json +++ b/packages/create-turbo/package.json @@ -1,6 +1,6 @@ { "name": "create-turbo", - "version": "1.9.4-canary.2", + "version": "1.9.4-canary.3", "description": "Create a new Turborepo", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/eslint-config-turbo/package.json b/packages/eslint-config-turbo/package.json index 88717ca14331e..b4e0148c1fa24 100644 --- a/packages/eslint-config-turbo/package.json +++ b/packages/eslint-config-turbo/package.json @@ -1,6 +1,6 @@ { "name": "eslint-config-turbo", - "version": "1.9.4-canary.2", + "version": "1.9.4-canary.3", "description": "ESLint config for Turborepo", "repository": { "type": "git", diff --git a/packages/eslint-plugin-turbo/package.json b/packages/eslint-plugin-turbo/package.json index d19eef2b4f73c..b37f76f257f6d 100644 --- a/packages/eslint-plugin-turbo/package.json +++ b/packages/eslint-plugin-turbo/package.json @@ -1,6 +1,6 @@ { "name": "eslint-plugin-turbo", - "version": "1.9.4-canary.2", + "version": "1.9.4-canary.3", "description": "ESLint plugin for Turborepo", "keywords": [ "turbo", diff --git a/packages/turbo-codemod/package.json b/packages/turbo-codemod/package.json index d16bb0585c324..d3d5c890b2d8e 100644 --- a/packages/turbo-codemod/package.json +++ b/packages/turbo-codemod/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/codemod", - "version": "1.9.4-canary.2", + "version": "1.9.4-canary.3", "description": "Provides Codemod transformations to help upgrade your Turborepo codebase when a feature is deprecated.", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/turbo-ignore/package.json b/packages/turbo-ignore/package.json index 0fae0722a5db7..7089f9b12ebf9 100644 --- a/packages/turbo-ignore/package.json +++ b/packages/turbo-ignore/package.json @@ -1,6 +1,6 @@ { "name": "turbo-ignore", - "version": "1.9.4-canary.2", + "version": "1.9.4-canary.3", "description": "", "homepage": "https://turbo.build/repo", "keywords": [], diff --git a/packages/turbo-workspaces/package.json b/packages/turbo-workspaces/package.json index 5dd24902d0d77..f007e4d066285 100644 --- a/packages/turbo-workspaces/package.json +++ b/packages/turbo-workspaces/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/workspaces", - "version": "1.9.4-canary.2", + "version": "1.9.4-canary.3", "description": "Tools for working with package managers", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/turbo/package.json b/packages/turbo/package.json index 4627a334ecabf..f74b1fd71683e 100644 --- a/packages/turbo/package.json +++ b/packages/turbo/package.json @@ -1,6 +1,6 @@ { "name": "turbo", - "version": "1.9.4-canary.2", + "version": "1.9.4-canary.3", "description": "Turborepo is a high-performance build system for JavaScript and TypeScript codebases.", "repository": "https://github.com/vercel/turbo", "bugs": "https://github.com/vercel/turbo/issues", @@ -19,11 +19,11 @@ "install.js" ], "optionalDependencies": { - "turbo-darwin-64": "1.9.4-canary.2", - "turbo-darwin-arm64": "1.9.4-canary.2", - "turbo-linux-64": "1.9.4-canary.2", - "turbo-linux-arm64": "1.9.4-canary.2", - "turbo-windows-64": "1.9.4-canary.2", - "turbo-windows-arm64": "1.9.4-canary.2" + "turbo-darwin-64": "1.9.4-canary.3", + "turbo-darwin-arm64": "1.9.4-canary.3", + "turbo-linux-64": "1.9.4-canary.3", + "turbo-linux-arm64": "1.9.4-canary.3", + "turbo-windows-64": "1.9.4-canary.3", + "turbo-windows-arm64": "1.9.4-canary.3" } } diff --git a/version.txt b/version.txt index 445bd2626febb..439cfa4bb7668 100644 --- a/version.txt +++ b/version.txt @@ -1,2 +1,2 @@ -1.9.4-canary.2 +1.9.4-canary.3 canary From 3b3243a4352ed9bbe2df597724d43d5b7a66d369 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Tue, 25 Apr 2023 14:51:32 -0700 Subject: [PATCH 09/24] Existing and error logs behavior (#4656) ### Description - Adds an integration test for existing behavior (`continue.t`) - Fixes behavior of `--output-logs=errors-only` to match behavior for `full` in the case of a task that errors - Fixes `--continue` behavior to report all failed tasks at the end ### Testing Instructions New `continue.t` integration test. I recommend viewing the changes to that file at each commit to see initial behavior and how it changes. Fixes #4504 link WEB-895 --- cli/internal/core/engine.go | 99 ++++++++++++++----- cli/internal/core/engine_test.go | 2 +- cli/internal/run/real_run.go | 11 +-- cli/internal/runcache/runcache.go | 1 + .../monorepo_dependency_error/.gitignore | 3 + .../apps/my-app/package.json | 9 ++ .../apps/other-app/package.json | 9 ++ .../apps/some-lib/package.json | 6 ++ .../monorepo_dependency_error/package.json | 6 ++ .../monorepo_dependency_error/turbo.json | 9 ++ turborepo-tests/integration/tests/continue.t | 85 ++++++++++++++++ 11 files changed, 208 insertions(+), 32 deletions(-) create mode 100644 turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/.gitignore create mode 100644 turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/my-app/package.json create mode 100644 turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/other-app/package.json create mode 100644 turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/some-lib/package.json create mode 100644 turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/package.json create mode 100644 turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/turbo.json create mode 100644 turborepo-tests/integration/tests/continue.t diff --git a/cli/internal/core/engine.go b/cli/internal/core/engine.go index 7f08ea8ed7a35..f6e3a7a91d42c 100644 --- a/cli/internal/core/engine.go +++ b/cli/internal/core/engine.go @@ -6,6 +6,7 @@ import ( "os" "sort" "strings" + "sync" "sync/atomic" "github.com/vercel/turbo/cli/internal/fs" @@ -72,40 +73,88 @@ type EngineExecutionOptions struct { Concurrency int } +// StopExecutionSentinel is used to return an error from a graph Walk that indicates that +// all further walking should stop. +type StopExecutionSentinel struct { + err error +} + +// StopExecution wraps the given error in a sentinel error indicating that +// graph traversal should stop. Note that this will stop all tasks, not just +// downstream tasks. +func StopExecution(reason error) *StopExecutionSentinel { + return &StopExecutionSentinel{ + err: reason, + } +} + +// Error implements error.Error for StopExecutionSentinel +func (se *StopExecutionSentinel) Error() string { + return fmt.Sprintf("Execution stopped due to error: %v", se.err) +} + // Execute executes the pipeline, constructing an internal task graph and walking it accordingly. func (e *Engine) Execute(visitor Visitor, opts EngineExecutionOptions) []error { var sema = util.NewSemaphore(opts.Concurrency) var errored int32 - return e.TaskGraph.Walk(func(v dag.Vertex) error { - // If something has already errored, short-circuit. - // There is a race here between concurrent tasks. However, if there is not a - // dependency edge between them, we are not required to have a strict order - // between them, so a failed task can fail to short-circuit a concurrent - // task that happened to be starting at the same time. - if atomic.LoadInt32(&errored) != 0 { - return nil - } - // Each vertex in the graph is a taskID (package#task format) - taskID := dag.VertexName(v) - // Always return if it is the root node - if strings.Contains(taskID, ROOT_NODE_NAME) { - return nil - } + // The dag library's behavior is that returning an error from the Walk callback cancels downstream + // tasks, but not unrelated tasks. + // The behavior we want is to either cancel everything or nothing (--continue). So, we do our own + // error handling. Collect any errors that occur in "errors", and report them as the result of + // Execute. panic on any other error returned by Walk. + var errorMu sync.Mutex + var errors []error + recordErr := func(err error) { + errorMu.Lock() + defer errorMu.Unlock() + errors = append(errors, err) + } + unusedErrs := e.TaskGraph.Walk(func(v dag.Vertex) error { + // Use an extra func() to ensure that we are not returning any errors to Walk + func() { + // If something has already errored, short-circuit. + // There is a race here between concurrent tasks. However, if there is not a + // dependency edge between them, we are not required to have a strict order + // between them, so a failed task can fail to short-circuit a concurrent + // task that happened to be starting at the same time. + if atomic.LoadInt32(&errored) != 0 { + return + } + // Each vertex in the graph is a taskID (package#task format) + taskID := dag.VertexName(v) - // Acquire the semaphore unless parallel - if !opts.Parallel { - sema.Acquire() - defer sema.Release() - } + // Always return if it is the root node + if strings.Contains(taskID, ROOT_NODE_NAME) { + return + } - if err := visitor(taskID); err != nil { - // We only ever flip from false to true, so we don't need to compare and swap the atomic - atomic.StoreInt32(&errored, 1) - return err - } + // Acquire the semaphore unless parallel + if !opts.Parallel { + sema.Acquire() + defer sema.Release() + } + + if err := visitor(taskID); err != nil { + if se, ok := err.(*StopExecutionSentinel); ok { + // We only ever flip from false to true, so we don't need to compare and swap the atomic + atomic.StoreInt32(&errored, 1) + recordErr(se.err) + // Note: returning an error here would cancel execution of downstream tasks only, and show + // up in the errors returned from Walk. However, we are doing our own error collection + // and intentionally ignoring errors from walk, so fallthrough and use the "errored" mechanism + // to skip downstream tasks + } else { + recordErr(err) + } + } + }() return nil }) + if len(unusedErrs) > 0 { + panic("we should be handling execution errors via our own errors + errored mechanism") + } + return errors } // MissingTaskError is a specialized Error thrown in the case that we can't find a task. diff --git a/cli/internal/core/engine_test.go b/cli/internal/core/engine_test.go index a92264de29cc1..3ce00bf44573f 100644 --- a/cli/internal/core/engine_test.go +++ b/cli/internal/core/engine_test.go @@ -71,7 +71,7 @@ func TestShortCircuiting(t *testing.T) { println(taskID) executed[taskID] = true if taskID == "b#build" { - return expectedErr + return StopExecution(expectedErr) } return nil } diff --git a/cli/internal/run/real_run.go b/cli/internal/run/real_run.go index 32c79659f3e28..9f869e1c7b632 100644 --- a/cli/internal/run/real_run.go +++ b/cli/internal/run/real_run.go @@ -322,7 +322,7 @@ func (ec *execContext) exec(ctx gocontext.Context, packageTask *nodes.PackageTas ec.logError(prettyPrefix, err) if !ec.rs.Opts.runOpts.ContinueOnError { - return nil, errors.Wrapf(err, "failed to capture outputs for \"%v\"", packageTask.TaskID) + return nil, core.StopExecution(errors.Wrapf(err, "failed to capture outputs for \"%v\"", packageTask.TaskID)) } } @@ -381,19 +381,18 @@ func (ec *execContext) exec(ctx gocontext.Context, packageTask *nodes.PackageTas tracer(runsummary.TargetBuildFailed, err, nil) } + // If there was an error, flush the buffered output + taskCache.OnError(prefixedUI, progressLogger) progressLogger.Error(fmt.Sprintf("Error: command finished with error: %v", err)) if !ec.rs.Opts.runOpts.ContinueOnError { prefixedUI.Error(fmt.Sprintf("ERROR: command finished with error: %s", err)) ec.processes.Close() + // We're not continuing, stop graph traversal + err = core.StopExecution(err) } else { prefixedUI.Warn("command finished with error, but continuing...") - // Set to nil so we don't short-circuit any other execution - err = nil } - // If there was an error, flush the buffered output - taskCache.OnError(prefixedUI, progressLogger) - return taskExecutionSummary, err } diff --git a/cli/internal/runcache/runcache.go b/cli/internal/runcache/runcache.go index ba6145b7d9661..1355fb45eed5c 100644 --- a/cli/internal/runcache/runcache.go +++ b/cli/internal/runcache/runcache.go @@ -190,6 +190,7 @@ func (tc TaskCache) ReplayLogFile(prefixedUI *cli.PrefixedUi, progressLogger hcl // This is called if the task exited with an non-zero error code. func (tc TaskCache) OnError(terminal *cli.PrefixedUi, logger hclog.Logger) { if tc.taskOutputMode == util.ErrorTaskOutput { + terminal.Output(fmt.Sprintf("cache miss, executing %s", ui.Dim(tc.hash))) tc.ReplayLogFile(terminal, logger) } } diff --git a/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/.gitignore b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/.gitignore new file mode 100644 index 0000000000000..77af9fc60321d --- /dev/null +++ b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +.turbo +.npmrc diff --git a/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/my-app/package.json b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/my-app/package.json new file mode 100644 index 0000000000000..acfc61307753d --- /dev/null +++ b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/my-app/package.json @@ -0,0 +1,9 @@ +{ + "name": "my-app", + "scripts": { + "build": "echo 'working'" + }, + "dependencies": { + "some-lib": "*" + } +} diff --git a/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/other-app/package.json b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/other-app/package.json new file mode 100644 index 0000000000000..1985ed79922d3 --- /dev/null +++ b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/other-app/package.json @@ -0,0 +1,9 @@ +{ + "name": "other-app", + "scripts": { + "build": "exit 3" + }, + "dependencies": { + "some-lib": "*" + } +} diff --git a/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/some-lib/package.json b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/some-lib/package.json new file mode 100644 index 0000000000000..496cb9f3627d9 --- /dev/null +++ b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/apps/some-lib/package.json @@ -0,0 +1,6 @@ +{ + "name": "some-lib", + "scripts": { + "build": "exit 2" + } +} diff --git a/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/package.json b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/package.json new file mode 100644 index 0000000000000..9557291c8197e --- /dev/null +++ b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/package.json @@ -0,0 +1,6 @@ +{ + "name": "monorepo", + "workspaces": [ + "apps/**" + ] +} diff --git a/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/turbo.json b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/turbo.json new file mode 100644 index 0000000000000..30922a61b1baf --- /dev/null +++ b/turborepo-tests/integration/tests/_fixtures/monorepo_dependency_error/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build": { + "dependsOn": ["^build"], + "outputs": [] + } + } +} diff --git a/turborepo-tests/integration/tests/continue.t b/turborepo-tests/integration/tests/continue.t new file mode 100644 index 0000000000000..f750394d8aea1 --- /dev/null +++ b/turborepo-tests/integration/tests/continue.t @@ -0,0 +1,85 @@ +Setup + $ . ${TESTDIR}/../../helpers/setup.sh + $ . ${TESTDIR}/_helpers/setup_monorepo.sh $(pwd) monorepo_dependency_error +Run without --continue + $ ${TURBO} build + \xe2\x80\xa2 Packages in scope: my-app, other-app, some-lib (esc) + \xe2\x80\xa2 Running build in 3 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + some-lib:build: cache miss, executing 3494007308f52ad6 + some-lib:build: + some-lib:build: > build + some-lib:build: > exit 2 + some-lib:build: + some-lib:build: npm ERR! Lifecycle script `build` failed with error: + some-lib:build: npm ERR! Error: command failed + some-lib:build: npm ERR! in workspace: some-lib + some-lib:build: npm ERR! at location: (.*)/apps/some-lib (re) + some-lib:build: ERROR: command finished with error: command \((.*)/apps/some-lib\) npm run build exited \(1\) (re) + command \((.*)/apps/some-lib\) npm run build exited \(1\) (re) + + Tasks: 0 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + ERROR run failed: command exited (1) + [1] + +Run without --continue, and with only errors. + $ ${TURBO} build --output-logs=errors-only + \xe2\x80\xa2 Packages in scope: my-app, other-app, some-lib (esc) + \xe2\x80\xa2 Running build in 3 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + some-lib:build: cache miss, executing 3494007308f52ad6 + some-lib:build: + some-lib:build: > build + some-lib:build: > exit 2 + some-lib:build: + some-lib:build: npm ERR! Lifecycle script `build` failed with error: + some-lib:build: npm ERR! Error: command failed + some-lib:build: npm ERR! in workspace: some-lib + some-lib:build: npm ERR! at location: (.*)/apps/some-lib (re) + some-lib:build: ERROR: command finished with error: command \((.*)/apps/some-lib\) npm run build exited \(1\) (re) + command \((.*)/apps/some-lib\) npm run build exited \(1\) (re) + + Tasks: 0 successful, 1 total + Cached: 0 cached, 1 total + Time:\s*[\.0-9]+m?s (re) + + ERROR run failed: command exited (1) + [1] + +Run with --continue + $ ${TURBO} build --output-logs=errors-only --continue + \xe2\x80\xa2 Packages in scope: my-app, other-app, some-lib (esc) + \xe2\x80\xa2 Running build in 3 packages (esc) + \xe2\x80\xa2 Remote caching disabled (esc) + some-lib:build: cache miss, executing 3494007308f52ad6 + some-lib:build: + some-lib:build: > build + some-lib:build: > exit 2 + some-lib:build: + some-lib:build: npm ERR! Lifecycle script `build` failed with error: + some-lib:build: npm ERR! Error: command failed + some-lib:build: npm ERR! in workspace: some-lib + some-lib:build: npm ERR! at location: (.*)/apps/some-lib (re) + some-lib:build: command finished with error, but continuing... + other-app:build: cache miss, executing af6505fe5634a5f5 + other-app:build: + other-app:build: > build + other-app:build: > exit 3 + other-app:build: + other-app:build: npm ERR! Lifecycle script `build` failed with error: + other-app:build: npm ERR! Error: command failed + other-app:build: npm ERR! in workspace: other-app + other-app:build: npm ERR! at location: (.*)/apps/other-app (re) + other-app:build: command finished with error, but continuing... + command \((.*)/apps/some-lib\) npm run build exited \(1\) (re) + command \((.*)/apps/other-app\) npm run build exited \(1\) (re) + + Tasks: 1 successful, 3 total + Cached: 0 cached, 3 total + Time:\s*[\.0-9]+m?s (re) + + ERROR run failed: command exited (1) + [1] From 946307c811065e925929f7c5a2872cf666e3aeeb Mon Sep 17 00:00:00 2001 From: OJ Kwon <1210596+kwonoj@users.noreply.github.com> Date: Tue, 25 Apr 2023 18:03:51 -0700 Subject: [PATCH 10/24] refactor(turbopack): remove next.js specific unsupported warn (#4697) ### Description WEB-953. This PR removes check for next.js specific unsupported packages. Instead, let next-* (next-core) handles it via resolve plugin. See https://github.com/vercel/next.js/pull/48837 for the corresponding change. This change makes turbopack does not need to aware specific next.js package names. --- crates/turbopack/src/lib.rs | 57 ++----------------------------------- 1 file changed, 3 insertions(+), 54 deletions(-) diff --git a/crates/turbopack/src/lib.rs b/crates/turbopack/src/lib.rs index 4e3f495549330..3cc0f0189ec13 100644 --- a/crates/turbopack/src/lib.rs +++ b/crates/turbopack/src/lib.rs @@ -21,7 +21,6 @@ use ecmascript::{ EcmascriptModuleAssetVc, }; use graph::{aggregate, AggregatedGraphNodeContent, AggregatedGraphVc}; -use lazy_static::lazy_static; use module_options::{ ModuleOptionsContextVc, ModuleOptionsVc, ModuleRuleEffect, ModuleType, ModuleTypeVc, }; @@ -36,16 +35,13 @@ use turbopack_core::{ compile_time_info::CompileTimeInfoVc, context::{AssetContext, AssetContextVc}, ident::AssetIdentVc, - issue::{unsupported_module::UnsupportedModuleIssue, Issue, IssueVc}, + issue::{Issue, IssueVc}, plugin::CustomModuleType, reference::all_referenced_assets, reference_type::{EcmaScriptModulesReferenceSubType, ReferenceType}, resolve::{ - options::ResolveOptionsVc, - origin::PlainResolveOriginVc, - parse::{Request, RequestVc}, - pattern::Pattern, - resolve, ModulePartVc, ResolveResultVc, + options::ResolveOptionsVc, origin::PlainResolveOriginVc, parse::RequestVc, resolve, + ModulePartVc, ResolveResultVc, }, }; @@ -71,12 +67,6 @@ use self::{ transition::{TransitionVc, TransitionsByNameVc}, }; -lazy_static! { - static ref UNSUPPORTED_PACKAGES: HashSet = ["@vercel/og".to_owned()].into(); - static ref UNSUPPORTED_PACKAGE_PATHS: HashSet<(String, String)> = - [("@next/font".to_owned(), "/local".to_owned())].into(); -} - #[turbo_tasks::value] struct ModuleIssue { ident: AssetIdentVc, @@ -393,8 +383,6 @@ impl AssetContext for ModuleAssetContext { resolve_options: ResolveOptionsVc, reference_type: Value, ) -> Result { - warn_on_unsupported_modules(request, origin_path).await?; - let context_path = origin_path.parent().resolve().await?; let result = resolve(context_path, request, resolve_options); @@ -574,45 +562,6 @@ async fn top_references(list: ReferencesListVc) -> Result { .into()) } -async fn warn_on_unsupported_modules( - request: RequestVc, - origin_path: FileSystemPathVc, -) -> Result<()> { - if let Request::Module { - module, - path, - query: _, - } = &*request.await? - { - // Warn if the package is known not to be supported by Turbopack at the moment. - if UNSUPPORTED_PACKAGES.contains(module) { - UnsupportedModuleIssue { - context: origin_path, - package: module.into(), - package_path: None, - } - .cell() - .as_issue() - .emit(); - } - - if let Pattern::Constant(path) = path { - if UNSUPPORTED_PACKAGE_PATHS.contains(&(module.to_string(), path.to_owned())) { - UnsupportedModuleIssue { - context: origin_path, - package: module.into(), - package_path: Some(path.to_owned()), - } - .cell() - .as_issue() - .emit(); - } - } - } - - Ok(()) -} - pub fn register() { turbo_tasks::register(); turbo_tasks_fs::register(); From 1dfda18425a6dcde22cbd04ffa76611212438d01 Mon Sep 17 00:00:00 2001 From: Leah Date: Wed, 26 Apr 2023 08:31:19 +0200 Subject: [PATCH 11/24] add noop refresh runtime to prevent errors after module execution (#4701) ### Description We add some noop handlers to make sure code won't throw errors when something tries to access the refresh runtime outside the initial module execution next.js does this here https://github.com/vercel/next.js/blob/f12aa35b0d85aa28223e06ee6f6a98247262815e/packages/react-refresh-utils/ReactRefreshWebpackPlugin.ts#L16-L30 I'm not 100% sure this is the correct fix fix WEB-823 --- crates/turbopack-dev/js/src/runtime.js | 8 ++++++++ ...tests_snapshot_basic_async_chunk_input_index_0d348e.js | 8 ++++++++ ...sts_tests_snapshot_basic_chunked_input_index_e77e9f.js | 8 ++++++++ ...sts_tests_snapshot_basic_shebang_input_index_b1f0c2.js | 8 ++++++++ ...s_tests_snapshot_comptime_define_input_index_6b0d2b.js | 8 ++++++++ ...snapshot_css_absolute-uri-import_input_index_fa9a30.js | 8 ++++++++ ...ack-tests_tests_snapshot_css_css_input_index_37a138.js | 8 ++++++++ ...s_tests_snapshot_emotion_emotion_input_index_b080c4.js | 8 ++++++++ ...ack-tests_tests_snapshot_env_env_input_index_29a23f.js | 8 ++++++++ ...t_evaluated_entrry_runtime_entry_input_index_f59cc7.js | 8 ++++++++ ...s_tests_snapshot_example_example_input_index_78b6bf.js | 8 ++++++++ ...tests_snapshot_export-alls_cjs-2_input_index_289ae7.js | 8 ++++++++ ..._snapshot_export-alls_cjs-script_input_index_3e96b7.js | 8 ++++++++ ...s_tests_snapshot_import-meta_cjs_input_index_537553.js | 8 ++++++++ ...napshot_import-meta_esm-multiple_input_index_c00392.js | 8 ++++++++ ...snapshot_import-meta_esm-mutable_input_index_6c9201.js | 8 ++++++++ ..._snapshot_import-meta_esm-object_input_index_6fcf7d.js | 8 ++++++++ ...s_tests_snapshot_import-meta_esm_input_index_c4c88a.js | 8 ++++++++ ...s_tests_snapshot_import-meta_url_input_index_988b57.js | 8 ++++++++ ...s_tests_snapshot_imports_dynamic_input_index_45c162.js | 8 ++++++++ ...ests_tests_snapshot_imports_json_input_index_961ae2.js | 8 ++++++++ ...apshot_imports_resolve_error_cjs_input_index_f8412b.js | 8 ++++++++ ...apshot_imports_resolve_error_esm_input_index_0b3e45.js | 8 ++++++++ ...pshot_imports_static-and-dynamic_input_index_ec8693.js | 8 ++++++++ ...ts_tests_snapshot_imports_static_input_index_885269.js | 8 ++++++++ ...shot_node_node_protocol_external_input_index_667edf.js | 8 ++++++++ ...led_components_styled_components_input_index_afc482.js | 8 ++++++++ ...rms_mono_transforms_input_packages_app_index_4a3d65.js | 8 ++++++++ ...apshot_swc_transforms_preset_env_input_index_9dcfd0.js | 8 ++++++++ ...shot_typescript_jsconfig-baseurl_input_index_8f1e58.js | 8 ++++++++ ...typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js | 8 ++++++++ 31 files changed, 248 insertions(+) diff --git a/crates/turbopack-dev/js/src/runtime.js b/crates/turbopack-dev/js/src/runtime.js index 5a1d0134b82a0..2bf1fae71adec 100644 --- a/crates/turbopack-dev/js/src/runtime.js +++ b/crates/turbopack-dev/js/src/runtime.js @@ -458,6 +458,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js b/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js index 84b20ba93016a..527f8d0795703 100644 --- a/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js +++ b/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js b/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js index 1c29f7f237b3e..4c7715e666fff 100644 --- a/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js +++ b/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js b/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js index 96cdccc754c4d..30e6eafd66c15 100644 --- a/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js +++ b/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js b/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js index 08b94a22b03f4..25dad08bdaf48 100644 --- a/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js +++ b/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js b/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js index 95541a48437d0..b2b8890121996 100644 --- a/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js +++ b/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js index 5aa739135d07b..75b34d83f1b3f 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js b/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js index d33a648cacbb4..773562ea17e8b 100644 --- a/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js +++ b/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js b/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js index 7b7028df89638..9ab80470c1541 100644 --- a/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js +++ b/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js b/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js index 4c6c6b1a278e5..50ac5c13d248f 100644 --- a/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js +++ b/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js b/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js index c104f0824d3ca..94549295687e7 100644 --- a/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js +++ b/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js index 274c8ab0aac89..7e65731fd1b6d 100644 --- a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js +++ b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js index 94db515d8b501..d46afb1b5bfac 100644 --- a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js +++ b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js b/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js index d7f9e048c31b6..328b3153dbb3a 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js index 8825803270f20..16a8ed7cf8101 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js index 4bbefeb29766f..1d73faabe6126 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js index 947e8cee15c51..0099a5b531289 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js index 9a84ce69ca07e..dd419786994af 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js b/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js index ad32637e947c7..a85535ae468bc 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js b/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js index 9eabfdc1af6e7..b45dc00c8600e 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js +++ b/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js b/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js index d9c568bde64b6..04126d4ed9898 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js +++ b/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js index 8ef5a6e6c4182..c555035fbc30b 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js +++ b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js index 2289df6ca917c..2df51dfa62c3f 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js +++ b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js b/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js index 3019a30b46c89..aca7b71e6bd8f 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js +++ b/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js b/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js index 54acf69b2a765..95368f65a2855 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js +++ b/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js b/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js index b3e748c6a00ca..06e034c13a397 100644 --- a/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js +++ b/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js b/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js index 14e5a085d14af..048b65ec66f3e 100644 --- a/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js +++ b/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js b/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js index 1a8aa2a97ec28..649d81472a10a 100644 --- a/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js +++ b/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js b/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js index dc1ded83d19d7..608665af80fef 100644 --- a/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js +++ b/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js b/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js index 3e361371c742f..2bf5c64bb4d80 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * diff --git a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js index 1bda43e6264e5..a29c7477f4702 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js @@ -467,6 +467,14 @@ function runModuleExecutionHooks(module, executeModule) { cleanupReactRefreshIntercept(); } +// noop fns to prevent refresh runtime errors when trying to access the runtime outside of the initial module execution. +globalThis.$RefreshReg$ = function () {}; +globalThis.$RefreshSig$ = function () { + return function (type) { + return type; + }; +}; + /** * Retrieves a module from the cache, or instantiate it if it is not cached. * From bef88f9c2c8519d111099292b0ebe20edd412ebf Mon Sep 17 00:00:00 2001 From: OJ Kwon <1210596+kwonoj@users.noreply.github.com> Date: Tue, 25 Apr 2023 23:42:27 -0700 Subject: [PATCH 12/24] feat(ecma-plugins): extract custom emotion transform (#4662) ### Description First step for WEB-940. Context: https://vercel.slack.com/archives/C03EWR7LGEN/p1681789689115509 Currently all of the ecma transforms are explicitly listed under EcmaInputTransform in turbopack-ecmascript. This makes enum verbose, we have to manually expand it each time adding new transform, as well as turbopack-ecmascript gets larger to contain all of the 3rd party transforms by default. PR extracts non-core transforms into a new crate, named as ecmascript-plugins then utilize EcmaInputTransform::Custom to invoke transforms instead. `EcmaInputTransform::Custom` is renamed to `EcmaInputTransform::Plugin` as well. Goal is extracting all of 3rd party / non-core transforms. This also reduces multiple steps to construct option value between caller (next-*) to actual transform (swcOptions). https://github.com/vercel/next.js/pull/48671 have corresponding next.js changes. --- Cargo.lock | 17 +++ Cargo.toml | 2 + crates/turbo-binding/Cargo.toml | 7 + crates/turbo-binding/src/lib.rs | 2 + crates/turbopack-cli/Cargo.toml | 3 + .../turbopack-cli/src/dev/web_entry_source.rs | 5 +- crates/turbopack-cli/src/lib.rs | 1 + .../turbopack-ecmascript-plugins/Cargo.toml | 26 ++++ crates/turbopack-ecmascript-plugins/build.rs | 5 + .../turbopack-ecmascript-plugins/src/lib.rs | 7 + .../src/transform/emotion.rs | 126 ++++++++++++++++++ .../src/transform/mod.rs | 1 + crates/turbopack-ecmascript/src/lib.rs | 4 +- .../turbopack-ecmascript/src/transform/mod.rs | 57 ++------ crates/turbopack-tests/Cargo.toml | 3 + crates/turbopack-tests/tests/snapshot.rs | 4 +- crates/turbopack/Cargo.toml | 1 + crates/turbopack/src/module_options/mod.rs | 25 ++-- .../module_options/module_options_context.rs | 37 +---- 19 files changed, 226 insertions(+), 107 deletions(-) create mode 100644 crates/turbopack-ecmascript-plugins/Cargo.toml create mode 100644 crates/turbopack-ecmascript-plugins/build.rs create mode 100644 crates/turbopack-ecmascript-plugins/src/lib.rs create mode 100644 crates/turbopack-ecmascript-plugins/src/transform/emotion.rs create mode 100644 crates/turbopack-ecmascript-plugins/src/transform/mod.rs diff --git a/Cargo.lock b/Cargo.lock index cd3cb3e804759..ee245a6fe5980 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8310,6 +8310,7 @@ dependencies = [ "turbopack-dev", "turbopack-dev-server", "turbopack-ecmascript", + "turbopack-ecmascript-plugins", "turbopack-env", "turbopack-image", "turbopack-json", @@ -8555,6 +8556,7 @@ dependencies = [ "turbopack-core", "turbopack-css", "turbopack-ecmascript", + "turbopack-ecmascript-plugins", "turbopack-env", "turbopack-image", "turbopack-json", @@ -8622,6 +8624,7 @@ dependencies = [ "turbopack-core", "turbopack-dev", "turbopack-dev-server", + "turbopack-ecmascript-plugins", "turbopack-env", "turbopack-node", "webbrowser", @@ -8796,6 +8799,19 @@ dependencies = [ "url", ] +[[package]] +name = "turbopack-ecmascript-plugins" +version = "0.1.0" +dependencies = [ + "anyhow", + "serde", + "swc_core", + "swc_emotion", + "turbo-tasks", + "turbo-tasks-build", + "turbopack-ecmascript", +] + [[package]] name = "turbopack-env" version = "0.1.0" @@ -8947,6 +8963,7 @@ dependencies = [ "turbopack", "turbopack-core", "turbopack-dev", + "turbopack-ecmascript-plugins", "turbopack-env", "turbopack-test-utils", ] diff --git a/Cargo.toml b/Cargo.toml index 3c5efc9294c33..9d0049101d176 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,6 +30,7 @@ members = [ "crates/turbopack-dev", "crates/turbopack-dev-server", "crates/turbopack-ecmascript", + "crates/turbopack-ecmascript-plugins", "crates/turbopack-env", "crates/turbopack-image", "crates/turbopack-json", @@ -134,6 +135,7 @@ turbopack-css = { path = "crates/turbopack-css" } turbopack-dev = { path = "crates/turbopack-dev" } turbopack-dev-server = { path = "crates/turbopack-dev-server" } turbopack-ecmascript = { path = "crates/turbopack-ecmascript" } +turbopack-ecmascript-plugins = { path = "crates/turbopack-ecmascript-plugins" } turbopack-env = { path = "crates/turbopack-env" } turbopack-image = { path = "crates/turbopack-image" } turbopack-json = { path = "crates/turbopack-json" } diff --git a/crates/turbo-binding/Cargo.toml b/crates/turbo-binding/Cargo.toml index 46392fe5daf9c..4fe30ed24ac77 100644 --- a/crates/turbo-binding/Cargo.toml +++ b/crates/turbo-binding/Cargo.toml @@ -108,6 +108,12 @@ __turbopack_dev_dynamic_embed_contents = [ ] __turbopack_dev_server = ["__turbopack", "turbopack-dev-server"] __turbopack_ecmascript = ["__turbopack", "turbopack-ecmascript"] +# [Note]: currently all of the transform features are enabled by default +__turbopack_ecmascript_plugin = [ + "__turbopack", + "turbopack-ecmascript-plugins", + "turbopack-ecmascript-plugins/transform_emotion", +] __turbopack_env = ["__turbopack", "turbopack-env"] __turbopack_image = ["__turbopack", "turbopack-image"] __turbopack_image_avif = ["turbopack-image/avif"] @@ -185,6 +191,7 @@ turbopack-css = { optional = true, workspace = true } turbopack-dev = { optional = true, workspace = true } turbopack-dev-server = { optional = true, workspace = true } turbopack-ecmascript = { optional = true, workspace = true } +turbopack-ecmascript-plugins = { optional = true, workspace = true } turbopack-env = { optional = true, workspace = true } turbopack-image = { optional = true, workspace = true } turbopack-json = { optional = true, workspace = true } diff --git a/crates/turbo-binding/src/lib.rs b/crates/turbo-binding/src/lib.rs index 37d5243047b96..e91f641b0ed0a 100644 --- a/crates/turbo-binding/src/lib.rs +++ b/crates/turbo-binding/src/lib.rs @@ -70,6 +70,8 @@ pub mod turbopack { pub use turbopack_dev_server as dev_server; #[cfg(feature = "__turbopack_ecmascript")] pub use turbopack_ecmascript as ecmascript; + #[cfg(feature = "__turbopack_ecmascript_plugin")] + pub use turbopack_ecmascript_plugins as ecmascript_plugin; #[cfg(feature = "__turbopack_env")] pub use turbopack_env as env; #[cfg(feature = "__turbopack_image")] diff --git a/crates/turbopack-cli/Cargo.toml b/crates/turbopack-cli/Cargo.toml index 3b6e82291c853..aff797e464333 100644 --- a/crates/turbopack-cli/Cargo.toml +++ b/crates/turbopack-cli/Cargo.toml @@ -57,6 +57,9 @@ turbopack-cli-utils = { workspace = true } turbopack-core = { workspace = true } turbopack-dev = { workspace = true } turbopack-dev-server = { workspace = true } +turbopack-ecmascript-plugins = { workspace = true, features = [ + "transform_emotion", +] } turbopack-env = { workspace = true } turbopack-node = { workspace = true } webbrowser = { workspace = true } diff --git a/crates/turbopack-cli/src/dev/web_entry_source.rs b/crates/turbopack-cli/src/dev/web_entry_source.rs index 2929d2d3e965d..4dd239591be9f 100644 --- a/crates/turbopack-cli/src/dev/web_entry_source.rs +++ b/crates/turbopack-cli/src/dev/web_entry_source.rs @@ -8,8 +8,8 @@ use turbopack::{ condition::ContextCondition, ecmascript::EcmascriptModuleAssetVc, module_options::{ - EmotionTransformConfigVc, JsxTransformOptions, ModuleOptionsContext, - ModuleOptionsContextVc, StyledComponentsTransformConfigVc, + JsxTransformOptions, ModuleOptionsContext, ModuleOptionsContextVc, + StyledComponentsTransformConfigVc, }, resolve_options_context::{ResolveOptionsContext, ResolveOptionsContextVc}, transition::TransitionsByNameVc, @@ -35,6 +35,7 @@ use turbopack_dev_server::{ html::DevHtmlAssetVc, source::{asset_graph::AssetGraphContentSourceVc, ContentSourceVc}, }; +use turbopack_ecmascript_plugins::transform::emotion::EmotionTransformConfigVc; use turbopack_node::execution_context::ExecutionContextVc; use crate::embed_js::embed_file_path; diff --git a/crates/turbopack-cli/src/lib.rs b/crates/turbopack-cli/src/lib.rs index b90cc3949bc69..93e5a63c3a38f 100644 --- a/crates/turbopack-cli/src/lib.rs +++ b/crates/turbopack-cli/src/lib.rs @@ -8,5 +8,6 @@ pub(crate) mod embed_js; pub fn register() { turbopack::register(); turbopack_dev::register(); + turbopack_ecmascript_plugins::register(); include!(concat!(env!("OUT_DIR"), "/register.rs")); } diff --git a/crates/turbopack-ecmascript-plugins/Cargo.toml b/crates/turbopack-ecmascript-plugins/Cargo.toml new file mode 100644 index 0000000000000..399d9bae5d4cb --- /dev/null +++ b/crates/turbopack-ecmascript-plugins/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "turbopack-ecmascript-plugins" +version = "0.1.0" +description = "TBD" +license = "MPL-2.0" +edition = "2021" +autobenches = false + +[lib] +bench = false + +[features] +transform_emotion = [] + +[dependencies] +anyhow = { workspace = true } +serde = { workspace = true } + +turbo-tasks = { workspace = true } +turbopack-ecmascript = { workspace = true } + +swc_core = { workspace = true, features = ["ecma_ast", "ecma_visit", "common"] } +swc_emotion = { workspace = true } + +[build-dependencies] +turbo-tasks-build = { workspace = true } diff --git a/crates/turbopack-ecmascript-plugins/build.rs b/crates/turbopack-ecmascript-plugins/build.rs new file mode 100644 index 0000000000000..1673efed59cce --- /dev/null +++ b/crates/turbopack-ecmascript-plugins/build.rs @@ -0,0 +1,5 @@ +use turbo_tasks_build::generate_register; + +fn main() { + generate_register(); +} diff --git a/crates/turbopack-ecmascript-plugins/src/lib.rs b/crates/turbopack-ecmascript-plugins/src/lib.rs new file mode 100644 index 0000000000000..4eeb5f3c51497 --- /dev/null +++ b/crates/turbopack-ecmascript-plugins/src/lib.rs @@ -0,0 +1,7 @@ +pub mod transform; + +pub fn register() { + turbo_tasks::register(); + turbopack_ecmascript::register(); + include!(concat!(env!("OUT_DIR"), "/register.rs")); +} diff --git a/crates/turbopack-ecmascript-plugins/src/transform/emotion.rs b/crates/turbopack-ecmascript-plugins/src/transform/emotion.rs new file mode 100644 index 0000000000000..63c1eb184759a --- /dev/null +++ b/crates/turbopack-ecmascript-plugins/src/transform/emotion.rs @@ -0,0 +1,126 @@ +#![allow(unused)] +use std::{ + hash::{Hash, Hasher}, + path::Path, +}; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; +use swc_core::{ + common::util::take::Take, + ecma::{ + ast::{Module, Program}, + visit::FoldWith, + }, +}; +use turbo_tasks::trace::TraceRawVcs; +use turbopack_ecmascript::{CustomTransformer, TransformContext}; + +#[derive(Clone, PartialEq, Eq, Debug, TraceRawVcs, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum EmotionLabelKind { + DevOnly, + Always, + Never, +} + +#[turbo_tasks::value(transparent)] +pub struct OptionEmotionTransformConfig(Option); + +//[TODO]: need to support importmap, there are type mismatch between +//next.config.js to swc's emotion options +#[turbo_tasks::value(shared)] +#[derive(Default, Clone, Debug)] +#[serde(rename_all = "camelCase")] +pub struct EmotionTransformConfig { + pub sourcemap: Option, + pub label_format: Option, + pub auto_label: Option, +} + +#[turbo_tasks::value_impl] +impl EmotionTransformConfigVc { + #[turbo_tasks::function] + pub fn default() -> Self { + Self::cell(Default::default()) + } +} + +impl Default for EmotionTransformConfigVc { + fn default() -> Self { + Self::default() + } +} + +#[derive(Debug)] +pub struct EmotionTransformer { + #[cfg(feature = "transform_emotion")] + config: swc_emotion::EmotionOptions, +} + +#[cfg(feature = "transform_emotion")] +impl EmotionTransformer { + pub fn new(config: &EmotionTransformConfig) -> Option { + let config = swc_emotion::EmotionOptions { + // When you create a transformer structure, it is assumed that you are performing an + // emotion transform. + enabled: Some(true), + sourcemap: config.sourcemap, + label_format: config.label_format.clone(), + auto_label: if let Some(auto_label) = config.auto_label.as_ref() { + match auto_label { + EmotionLabelKind::Always => Some(true), + EmotionLabelKind::Never => Some(false), + // [TODO]: this is not correct coerece, need to be fixed + EmotionLabelKind::DevOnly => None, + } + } else { + None + }, + ..Default::default() + }; + + Some(EmotionTransformer { config }) + } +} + +#[cfg(not(feature = "transform_emotion"))] +impl EmotionTransformer { + pub fn new(_config: &EmotionTransformConfig) -> Option { + None + } +} + +impl CustomTransformer for EmotionTransformer { + fn transform(&self, program: &mut Program, ctx: &TransformContext<'_>) -> Option { + #[cfg(feature = "transform_emotion")] + { + let p = std::mem::replace(program, Program::Module(Module::dummy())); + let hash = { + #[allow(clippy::disallowed_types)] + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + p.hash(&mut hasher); + hasher.finish() + }; + *program = p.fold_with(&mut swc_emotion::emotion( + self.config.clone(), + Path::new(ctx.file_name_str), + hash as u32, + ctx.source_map.clone(), + ctx.comments.clone(), + )); + } + + None + } +} + +pub async fn build_emotion_transformer( + config: &Option, +) -> Result>> { + Ok(if let Some(config) = config { + EmotionTransformer::new(&*config.await?).map(Box::new) + } else { + None + }) +} diff --git a/crates/turbopack-ecmascript-plugins/src/transform/mod.rs b/crates/turbopack-ecmascript-plugins/src/transform/mod.rs new file mode 100644 index 0000000000000..3697907f45254 --- /dev/null +++ b/crates/turbopack-ecmascript-plugins/src/transform/mod.rs @@ -0,0 +1 @@ +pub mod emotion; diff --git a/crates/turbopack-ecmascript/src/lib.rs b/crates/turbopack-ecmascript/src/lib.rs index 39e887d9f2167..21bd1098af58e 100644 --- a/crates/turbopack-ecmascript/src/lib.rs +++ b/crates/turbopack-ecmascript/src/lib.rs @@ -43,8 +43,8 @@ use swc_core::{ }, }; pub use transform::{ - CustomTransform, CustomTransformVc, CustomTransformer, EcmascriptInputTransform, - EcmascriptInputTransformsVc, TransformContext, + CustomTransformer, EcmascriptInputTransform, EcmascriptInputTransformsVc, TransformContext, + TransformPlugin, TransformPluginVc, }; use turbo_tasks::{ primitives::StringVc, trace::TraceRawVcs, RawVc, ReadRef, TryJoinIterExt, Value, ValueToString, diff --git a/crates/turbopack-ecmascript/src/transform/mod.rs b/crates/turbopack-ecmascript/src/transform/mod.rs index cbb69dcbc61d3..aca81923e9077 100644 --- a/crates/turbopack-ecmascript/src/transform/mod.rs +++ b/crates/turbopack-ecmascript/src/transform/mod.rs @@ -1,13 +1,7 @@ mod server_to_client_proxy; mod util; -use std::{ - collections::hash_map::DefaultHasher, - fmt::Debug, - hash::{Hash, Hasher}, - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{fmt::Debug, hash::Hash, path::PathBuf, sync::Arc}; use anyhow::Result; use swc_core::{ @@ -43,13 +37,7 @@ pub enum EcmascriptInputTransform { ClientDirective(StringVc), ServerDirective(StringVc), CommonJs, - Custom(CustomTransformVc), - Emotion { - #[serde(default)] - sourcemap: bool, - label_format: OptionStringVc, - auto_label: Option, - }, + Plugin(TransformPluginVc), PresetEnv(EnvironmentVc), React { #[serde(default)] @@ -93,7 +81,7 @@ pub trait CustomTransformer: Debug { fn transform(&self, program: &mut Program, ctx: &TransformContext<'_>) -> Option; } -/// A wrapper around a CustomTransformer instance, allowing it to operate with +/// A wrapper around a TransformPlugin instance, allowing it to operate with /// the turbo_task caching requirements. #[turbo_tasks::value( transparent, @@ -103,9 +91,9 @@ pub trait CustomTransformer: Debug { cell = "new" )] #[derive(Debug)] -pub struct CustomTransform(#[turbo_tasks(trace_ignore)] Box); +pub struct TransformPlugin(#[turbo_tasks(trace_ignore)] Box); -impl CustomTransformer for CustomTransform { +impl CustomTransformer for TransformPlugin { fn transform(&self, program: &mut Program, ctx: &TransformContext<'_>) -> Option { self.0.transform(program, ctx) } @@ -211,35 +199,6 @@ impl EcmascriptInputTransform { Some(comments.clone()), )); } - EcmascriptInputTransform::Emotion { - sourcemap, - label_format, - auto_label, - } => { - let options = swc_emotion::EmotionOptions { - // this should be always enabled if match arrives here: - // since moduleoptions expect to push emotion transform only if - // there are valid, enabled config values. - enabled: Some(true), - sourcemap: Some(*sourcemap), - label_format: label_format.await?.clone_value(), - auto_label: *auto_label, - ..Default::default() - }; - let p = std::mem::replace(program, Program::Module(Module::dummy())); - let hash = { - let mut hasher = DefaultHasher::new(); - p.hash(&mut hasher); - hasher.finish() - }; - *program = p.fold_with(&mut swc_emotion::emotion( - options, - Path::new(file_name_str), - hash as u32, - source_map.clone(), - comments.clone(), - )) - } EcmascriptInputTransform::PresetEnv(env) => { let versions = env.runtime_versions().await?; let config = swc_core::ecma::preset_env::Config { @@ -283,14 +242,14 @@ impl EcmascriptInputTransform { } let top_level_import_paths = &*top_level_import_paths.await?; - if top_level_import_paths.len() > 0 { + if !top_level_import_paths.is_empty() { options.top_level_import_paths = top_level_import_paths .iter() .map(|s| JsWord::from(s.clone())) .collect(); } let meaningless_file_names = &*meaningless_file_names.await?; - if meaningless_file_names.len() > 0 { + if !meaningless_file_names.is_empty() { options.meaningless_file_names = meaningless_file_names.clone(); } @@ -361,7 +320,7 @@ impl EcmascriptInputTransform { .emit(); } } - EcmascriptInputTransform::Custom(transform) => { + EcmascriptInputTransform::Plugin(transform) => { if let Some(output) = transform.await?.transform(program, ctx) { *program = output; } diff --git a/crates/turbopack-tests/Cargo.toml b/crates/turbopack-tests/Cargo.toml index 7c53ed40cea94..bafc54d128d1d 100644 --- a/crates/turbopack-tests/Cargo.toml +++ b/crates/turbopack-tests/Cargo.toml @@ -26,6 +26,9 @@ turbo-tasks-fs = { workspace = true } turbo-tasks-memory = { workspace = true } turbopack-core = { workspace = true, features = ["issue_path"] } turbopack-dev = { workspace = true } +turbopack-ecmascript-plugins = { workspace = true, features = [ + "transform_emotion", +] } turbopack-env = { workspace = true } turbopack-test-utils = { workspace = true } diff --git a/crates/turbopack-tests/tests/snapshot.rs b/crates/turbopack-tests/tests/snapshot.rs index 6fed765bc0b30..8a818f8cc733f 100644 --- a/crates/turbopack-tests/tests/snapshot.rs +++ b/crates/turbopack-tests/tests/snapshot.rs @@ -21,7 +21,7 @@ use turbopack::{ condition::ContextCondition, ecmascript::EcmascriptModuleAssetVc, module_options::{ - EmotionTransformConfig, JsxTransformOptions, JsxTransformOptionsVc, ModuleOptionsContext, + JsxTransformOptions, JsxTransformOptionsVc, ModuleOptionsContext, StyledComponentsTransformConfigVc, }, resolve_options_context::ResolveOptionsContext, @@ -43,12 +43,14 @@ use turbopack_core::{ source_asset::SourceAssetVc, }; use turbopack_dev::DevChunkingContextVc; +use turbopack_ecmascript_plugins::transform::emotion::EmotionTransformConfig; use turbopack_env::ProcessEnvAssetVc; use turbopack_test_utils::snapshot::{diff, expected, matches_expected, snapshot_issues}; fn register() { turbopack::register(); turbopack_dev::register(); + turbopack_ecmascript_plugins::register(); include!(concat!(env!("OUT_DIR"), "/register_test_snapshot.rs")); } diff --git a/crates/turbopack/Cargo.toml b/crates/turbopack/Cargo.toml index 6fefe53eea027..0b722fc777627 100644 --- a/crates/turbopack/Cargo.toml +++ b/crates/turbopack/Cargo.toml @@ -29,6 +29,7 @@ turbo-tasks-fs = { workspace = true } turbopack-core = { workspace = true } turbopack-css = { workspace = true } turbopack-ecmascript = { workspace = true } +turbopack-ecmascript-plugins = { workspace = true } turbopack-env = { workspace = true } turbopack-image = { workspace = true } turbopack-json = { workspace = true } diff --git a/crates/turbopack/src/module_options/mod.rs b/crates/turbopack/src/module_options/mod.rs index c59567f95211f..14cda637cfedd 100644 --- a/crates/turbopack/src/module_options/mod.rs +++ b/crates/turbopack/src/module_options/mod.rs @@ -15,8 +15,9 @@ use turbopack_core::{ }; use turbopack_css::{CssInputTransform, CssInputTransformsVc}; use turbopack_ecmascript::{ - EcmascriptInputTransform, EcmascriptInputTransformsVc, EcmascriptOptions, + EcmascriptInputTransform, EcmascriptInputTransformsVc, EcmascriptOptions, TransformPluginVc, }; +use turbopack_ecmascript_plugins::transform::emotion::build_emotion_transformer; use turbopack_mdx::MdxTransformOptions; use turbopack_node::transforms::{postcss::PostCssTransformVc, webpack::WebpackLoadersVc}; @@ -99,23 +100,13 @@ impl ModuleOptionsVc { if enable_styled_jsx { transforms.push(EcmascriptInputTransform::StyledJsx); } - if let Some(enable_emotion) = enable_emotion { - let emotion_transform = enable_emotion.await?; - transforms.push(EcmascriptInputTransform::Emotion { - sourcemap: emotion_transform.sourcemap.unwrap_or(false), - label_format: OptionStringVc::cell(emotion_transform.label_format.clone()), - auto_label: if let Some(auto_label) = emotion_transform.auto_label.as_ref() { - match auto_label { - EmotionLabelKind::Always => Some(true), - EmotionLabelKind::Never => Some(false), - // [TODO]: this is not correct coerece, need to be fixed - EmotionLabelKind::DevOnly => None, - } - } else { - None - }, - }); + + if let Some(transformer) = build_emotion_transformer(enable_emotion).await? { + transforms.push(EcmascriptInputTransform::Plugin(TransformPluginVc::cell( + transformer, + ))); } + if let Some(enable_styled_components) = enable_styled_components { let styled_components_transform = &*enable_styled_components.await?; transforms.push(EcmascriptInputTransform::StyledComponents { diff --git a/crates/turbopack/src/module_options/module_options_context.rs b/crates/turbopack/src/module_options/module_options_context.rs index fbd3d9879b580..a3451616007d1 100644 --- a/crates/turbopack/src/module_options/module_options_context.rs +++ b/crates/turbopack/src/module_options/module_options_context.rs @@ -3,6 +3,7 @@ use serde::{Deserialize, Serialize}; use turbo_tasks::trace::TraceRawVcs; use turbopack_core::{environment::EnvironmentVc, resolve::options::ImportMappingVc}; use turbopack_ecmascript::EcmascriptInputTransform; +use turbopack_ecmascript_plugins::transform::emotion::EmotionTransformConfigVc; use turbopack_node::{ execution_context::ExecutionContextVc, transforms::webpack::WebpackLoaderConfigItemsVc, }; @@ -89,42 +90,6 @@ impl Default for TypescriptTransformOptionsVc { } } -#[derive(Clone, PartialEq, Eq, Debug, TraceRawVcs, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub enum EmotionLabelKind { - DevOnly, - Always, - Never, -} - -#[turbo_tasks::value(transparent)] -pub struct OptionEmotionTransformConfig(Option); - -//[TODO]: need to support importmap, there are type mismatch between -//[TODO]: next.config.js to swc's emotion options -#[turbo_tasks::value(shared)] -#[derive(Default, Clone, Debug)] -#[serde(rename_all = "camelCase")] -pub struct EmotionTransformConfig { - pub sourcemap: Option, - pub label_format: Option, - pub auto_label: Option, -} - -#[turbo_tasks::value_impl] -impl EmotionTransformConfigVc { - #[turbo_tasks::function] - pub fn default() -> Self { - Self::cell(Default::default()) - } -} - -impl Default for EmotionTransformConfigVc { - fn default() -> Self { - Self::default() - } -} - impl WebpackLoadersOptions { pub fn is_empty(&self) -> bool { self.extension_to_loaders.is_empty() From 037d91b0b21f05973dd2c20fb0f0f0ce4e434226 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Wed, 26 Apr 2023 08:58:58 +0200 Subject: [PATCH 13/24] update deps (#4700) ### Description sync lockfile with next.js --- Cargo.lock | 212 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 123 insertions(+), 89 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ee245a6fe5980..9985799689030 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -259,7 +259,7 @@ dependencies = [ "log", "parking", "polling", - "rustix 0.37.3", + "rustix 0.37.11", "slab", "socket2", "waker-fn", @@ -309,7 +309,7 @@ checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.8", + "syn 2.0.15", ] [[package]] @@ -369,13 +369,13 @@ checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" [[package]] name = "async-trait" -version = "0.1.67" +version = "0.1.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ea188f25f0255d8f92797797c97ebf5631fa88178beb1a46fdf5622c9a00e4" +checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ "proc-macro2", "quote", - "syn 2.0.8", + "syn 2.0.15", ] [[package]] @@ -557,7 +557,7 @@ dependencies = [ "cc", "cfg-if 1.0.0", "libc", - "miniz_oxide", + "miniz_oxide 0.6.2", "object", "rustc-demangle", ] @@ -1322,9 +1322,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "core-graphics" @@ -1378,9 +1378,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +checksum = "280a9f2d8b3a38871a3c8a46fb80db65e5e5ed97da80c4d08bf27fb63e35e181" dependencies = [ "libc", ] @@ -1552,9 +1552,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -1735,7 +1735,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.8", + "syn 2.0.15", ] [[package]] @@ -1752,7 +1752,7 @@ checksum = "631569015d0d8d54e6c241733f944042623ab6df7bc3be7466874b05fcdb1c5f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.8", + "syn 2.0.15", ] [[package]] @@ -2196,6 +2196,15 @@ dependencies = [ "instant", ] +[[package]] +name = "fdeflate" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d329bdeac514ee06249dabc27877490f17f5d371ec693360768b838e19f3ae10" +dependencies = [ + "simd-adler32", +] + [[package]] name = "fern" version = "0.6.2" @@ -2207,14 +2216,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a3de6e8d11b22ff9edc6d916f890800597d60f8b2da1caf2955c274638d6412" +checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall", - "windows-sys 0.45.0", + "redox_syscall 0.2.16", + "windows-sys 0.48.0", ] [[package]] @@ -2230,7 +2239,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.6.2", ] [[package]] @@ -2385,9 +2394,9 @@ checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" [[package]] name = "futures" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "531ac96c6ff5fd7c62263c5e3c67a603af4fcaee2e1a0ae5565ba3a11e69e549" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", @@ -2400,9 +2409,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "164713a5a0dcc3e7b4b1ed7d3b433cabc18025386f9339346e8daf15963cf7ac" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" dependencies = [ "futures-core", "futures-sink", @@ -2410,15 +2419,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86d7a0c1aa76363dac491de0ee99faf6941128376f1cf96f07db7603b7de69dd" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" [[package]] name = "futures-executor" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1997dd9df74cdac935c76252744c1ed5794fac083242ea4fe77ef3ed60ba0f83" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" dependencies = [ "futures-core", "futures-task", @@ -2427,9 +2436,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d422fa3cbe3b40dca574ab087abb5bc98258ea57eea3fd6f1fa7162c778b91" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" [[package]] name = "futures-lite" @@ -2448,13 +2457,13 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eb14ed937631bd8b8b8977f2c198443447a8355b6e3ca599f38c975e5a963b6" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.15", ] [[package]] @@ -2470,15 +2479,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec93083a4aecafb2a80a885c9de1f0ccae9dbd32c2bb54b0c3a65690e0b8d2f2" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" [[package]] name = "futures-task" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd65540d33b37b16542a0438c12e6aeead10d4ac5d05bd3f805b8f35ab592879" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" [[package]] name = "futures-timer" @@ -2488,9 +2497,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ef6b17e481503ec85211fed8f39d1970f128935ca1f814cd32ac4a6842e84ab" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ "futures-channel", "futures-core", @@ -2524,9 +2533,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -2534,9 +2543,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -2978,9 +2987,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.54" +version = "0.1.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c17cc76786e99f8d2f055c11159e7f0091c42474dcc3189fbab96072e873e6d" +checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3076,9 +3085,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", @@ -3173,13 +3182,13 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09270fd4fa1111bc614ed2246c7ef56239a3063d5be0d1ec3b589c505d400aeb" +checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" dependencies = [ "hermit-abi 0.3.1", "libc", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] @@ -3876,6 +3885,16 @@ dependencies = [ "adler", ] +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", + "simd-adler32", +] + [[package]] name = "mio" version = "0.6.23" @@ -4468,7 +4487,7 @@ checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall", + "redox_syscall 0.2.16", "smallvec", "windows-sys 0.45.0", ] @@ -4744,14 +4763,15 @@ dependencies = [ [[package]] name = "png" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d708eaf860a19b19ce538740d2b4bdeeb8337fa53f7738455e706623ad5c638" +checksum = "aaeebc51f9e7d2c150d3f3bfeb667f2aa985db5ef1e3d212847bdedb488beeaa" dependencies = [ "bitflags 1.3.2", "crc32fast", + "fdeflate", "flate2", - "miniz_oxide", + "miniz_oxide 0.7.1", ] [[package]] @@ -4922,9 +4942,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.53" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba466839c78239c09faf015484e5cc04860f88242cff4d03eb038f04b4699b73" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" dependencies = [ "unicode-ident", ] @@ -5221,6 +5241,15 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "redox_users" version = "0.4.3" @@ -5228,7 +5257,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ "getrandom", - "redox_syscall", + "redox_syscall 0.2.16", "thiserror", ] @@ -5246,9 +5275,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.2" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cce168fea28d3e05f158bda4576cf0c844d5045bc2cc3620fa0292ed5bb5814c" +checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" dependencies = [ "aho-corasick", "memchr", @@ -5481,9 +5510,9 @@ checksum = "cb626abdbed5e93f031baae60d72032f56bc964e11ac2ff65f2ba3ed98d6d3e1" [[package]] name = "rustc-demangle" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" +checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b" [[package]] name = "rustc-hash" @@ -5544,16 +5573,16 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.3" +version = "0.37.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b24138615de35e32031d041a09032ef3487a616d901ca4db224e7d557efae2" +checksum = "85597d61f83914ddeba6a47b3b8ffe7365107221c2e557ed94426489fefb5f77" dependencies = [ "bitflags 1.3.2", "errno 0.3.0", "io-lifetimes", "libc", "linux-raw-sys 0.3.0", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] @@ -5720,9 +5749,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.158" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9" +checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" dependencies = [ "serde_derive", ] @@ -5759,20 +5788,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.158" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad" +checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" dependencies = [ "proc-macro2", "quote", - "syn 2.0.8", + "syn 2.0.15", ] [[package]] name = "serde_json" -version = "1.0.94" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" +checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" dependencies = [ "indexmap", "itoa", @@ -6011,6 +6040,12 @@ dependencies = [ "libc", ] +[[package]] +name = "simd-adler32" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "238abfbb77c1915110ad968465608b68e869e0772622c9656714e73e5a1a522f" + [[package]] name = "simd_helpers" version = "0.1.0" @@ -7450,9 +7485,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.8" +version = "2.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc02725fd69ab9f26eab07fad303e2497fad6fb9eba4f96c4d1687bdf704ad9" +checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" dependencies = [ "proc-macro2", "quote", @@ -7536,15 +7571,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" +checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" dependencies = [ "cfg-if 1.0.0", "fastrand", - "redox_syscall", - "rustix 0.36.11", - "windows-sys 0.42.0", + "redox_syscall 0.3.5", + "rustix 0.37.11", + "windows-sys 0.45.0", ] [[package]] @@ -7582,7 +7617,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" dependencies = [ - "rustix 0.37.3", + "rustix 0.37.11", "windows-sys 0.48.0", ] @@ -7715,7 +7750,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.8", + "syn 2.0.15", ] [[package]] @@ -7861,14 +7896,13 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.26.0" +version = "1.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64" +checksum = "d0de47a4eecbe11f498978a9b29d792f0d2692d1dd003650c24c76510e3bc001" dependencies = [ "autocfg", "bytes", "libc", - "memchr", "mio 0.8.6", "num_cpus", "parking_lot", @@ -7892,13 +7926,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "1.8.2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.15", ] [[package]] @@ -9109,7 +9143,7 @@ version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ - "cfg-if 1.0.0", + "cfg-if 0.1.10", "rand 0.8.5", "static_assertions", ] @@ -10078,11 +10112,11 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.46.0" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.42.2", + "windows-targets 0.48.0", ] [[package]] From 61ce05808c646d209ad926328f6f6b6388c69c97 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Wed, 26 Apr 2023 16:08:14 +0200 Subject: [PATCH 14/24] make library code less verbose in stack traces (#4706) ### Description avoid highlighting library code in stack traces and show no source frame for them --- crates/turbopack-node/src/source_map/mod.rs | 39 +++++++++++++++++---- 1 file changed, 33 insertions(+), 6 deletions(-) diff --git a/crates/turbopack-node/src/source_map/mod.rs b/crates/turbopack-node/src/source_map/mod.rs index 6de40731d3092..f7d64bc69a815 100644 --- a/crates/turbopack-node/src/source_map/mod.rs +++ b/crates/turbopack-node/src/source_map/mod.rs @@ -119,6 +119,21 @@ fn write_resolved( )) )?; } + Ok(ResolvedSourceMapping::MappedLibrary { + frame, + project_path, + }) => { + // There is a mapping to a file in the project directory, but to library code + write!( + writable, + "{PADDING}{}", + formatting_mode.lowlight(&format_args!( + "at {} [{}]", + frame.with_path(&project_path.path), + original_frame.with_name(None) + )) + )?; + } Ok(ResolvedSourceMapping::MappedProject { frame, project_path, @@ -175,6 +190,10 @@ enum ResolvedSourceMapping { project_path: FileSystemPathReadRef, lines: FileLinesContentReadRef, }, + MappedLibrary { + frame: StackFrame<'static>, + project_path: FileSystemPathReadRef, + }, } async fn resolve_source_mapping( @@ -212,6 +231,7 @@ async fn resolve_source_mapping( .await?; match &*trace { TraceResult::Found(frame) => { + let lib_code = frame.file.contains("/node_modules/"); if let Some(project_path) = frame.file.strip_prefix(concatcp!( "/", SOURCE_MAP_ROOT_NAME, @@ -220,12 +240,19 @@ async fn resolve_source_mapping( "]/" )) { let fs_path = project_dir.join(project_path); - let lines = fs_path.read().lines().await?; - return Ok(ResolvedSourceMapping::MappedProject { - frame: frame.clone(), - project_path: fs_path.await?, - lines, - }); + if lib_code { + return Ok(ResolvedSourceMapping::MappedLibrary { + frame: frame.clone(), + project_path: fs_path.await?, + }); + } else { + let lines = fs_path.read().lines().await?; + return Ok(ResolvedSourceMapping::MappedProject { + frame: frame.clone(), + project_path: fs_path.await?, + lines, + }); + } } Ok(ResolvedSourceMapping::Mapped { frame: frame.clone(), From c0565d4731edf24e7e19bb01d118fc1c4dcf4c57 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Wed, 26 Apr 2023 16:27:19 +0200 Subject: [PATCH 15/24] improve error handling in update stream (#4705) ### Description Handle fatal errors in update stream, by sending an not found update with issues. --------- Co-authored-by: Alex Kirszenberg --- crates/turbopack-dev-server/src/http.rs | 39 +++-- .../src/source/resolve.rs | 15 +- .../turbopack-dev-server/src/update/server.rs | 14 +- .../turbopack-dev-server/src/update/stream.rs | 136 ++++++++++++++---- 4 files changed, 136 insertions(+), 68 deletions(-) diff --git a/crates/turbopack-dev-server/src/http.rs b/crates/turbopack-dev-server/src/http.rs index 681b43196d154..5635b564995ad 100644 --- a/crates/turbopack-dev-server/src/http.rs +++ b/crates/turbopack-dev-server/src/http.rs @@ -42,30 +42,27 @@ enum GetFromSourceResult { async fn get_from_source( source: ContentSourceVc, request: TransientInstance, - issue_repoter: IssueReporterVc, ) -> Result { - Ok( - match &*resolve_source_request(source, request, issue_repoter).await? { - ResolveSourceRequestResult::Static(static_content_vc, header_overwrites) => { - let static_content = static_content_vc.await?; - if let AssetContent::File(file) = &*static_content.content.content().await? { - GetFromSourceResult::Static { - content: file.await?, - status_code: static_content.status_code, - headers: static_content.headers.await?, - header_overwrites: header_overwrites.await?, - } - } else { - GetFromSourceResult::NotFound + Ok(match &*resolve_source_request(source, request).await? { + ResolveSourceRequestResult::Static(static_content_vc, header_overwrites) => { + let static_content = static_content_vc.await?; + if let AssetContent::File(file) = &*static_content.content.content().await? { + GetFromSourceResult::Static { + content: file.await?, + status_code: static_content.status_code, + headers: static_content.headers.await?, + header_overwrites: header_overwrites.await?, } + } else { + GetFromSourceResult::NotFound } - ResolveSourceRequestResult::HttpProxy(proxy) => { - GetFromSourceResult::HttpProxy(proxy.await?) - } - ResolveSourceRequestResult::NotFound => GetFromSourceResult::NotFound, } - .cell(), - ) + ResolveSourceRequestResult::HttpProxy(proxy) => { + GetFromSourceResult::HttpProxy(proxy.await?) + } + ResolveSourceRequestResult::NotFound => GetFromSourceResult::NotFound, + } + .cell()) } /// Processes an HTTP request within a given content source and returns the @@ -77,7 +74,7 @@ pub async fn process_request_with_content_source( ) -> Result> { let original_path = request.uri().path().to_string(); let request = http_request_to_source_request(request).await?; - let result = get_from_source(source, TransientInstance::new(request), issue_reporter); + let result = get_from_source(source, TransientInstance::new(request)); handle_issues(result, &original_path, "get_from_source", issue_reporter).await?; match &*result.strongly_consistent().await? { GetFromSourceResult::Static { diff --git a/crates/turbopack-dev-server/src/source/resolve.rs b/crates/turbopack-dev-server/src/source/resolve.rs index 6f5eab09fa77e..10e46ec084db0 100644 --- a/crates/turbopack-dev-server/src/source/resolve.rs +++ b/crates/turbopack-dev-server/src/source/resolve.rs @@ -6,7 +6,6 @@ use std::{ use anyhow::{bail, Result}; use hyper::Uri; use turbo_tasks::{TransientInstance, Value}; -use turbopack_core::issue::IssueReporterVc; use super::{ headers::{HeaderValue, Headers}, @@ -15,10 +14,7 @@ use super::{ ContentSourceContent, ContentSourceDataVary, ContentSourceResult, ContentSourceVc, HeaderListVc, ProxyResultVc, StaticContentVc, }; -use crate::{ - handle_issues, - source::{ContentSource, ContentSourceData, GetContentSourceContent}, -}; +use crate::source::{ContentSource, ContentSourceData, GetContentSourceContent}; /// The result of [`resolve_source_request`]. Similar to a /// `ContentSourceContent`, but without the `Rewrite` variant as this is taken @@ -36,7 +32,6 @@ pub enum ResolveSourceRequestResult { pub async fn resolve_source_request( source: ContentSourceVc, request: TransientInstance, - issue_reporter: IssueReporterVc, ) -> Result { let mut data = ContentSourceData::default(); let mut current_source = source; @@ -47,14 +42,6 @@ pub async fn resolve_source_request( let mut response_header_overwrites = Vec::new(); loop { let result = current_source.get(¤t_asset_path, Value::new(data)); - handle_issues( - result, - &original_path, - "get content from source", - issue_reporter, - ) - .await?; - match &*result.strongly_consistent().await? { ContentSourceResult::NotFound => break Ok(ResolveSourceRequestResult::NotFound.cell()), ContentSourceResult::NeedData(needed) => { diff --git a/crates/turbopack-dev-server/src/update/server.rs b/crates/turbopack-dev-server/src/update/server.rs index 1055466c4c6a3..0bce2239988e8 100644 --- a/crates/turbopack-dev-server/src/update/server.rs +++ b/crates/turbopack-dev-server/src/update/server.rs @@ -67,12 +67,11 @@ impl UpdateServer

{ let source = source_provider.get_source(); resolve_source_request( source, - TransientInstance::new(request), - self.issue_reporter + TransientInstance::new(request) ) } }; - match UpdateStream::new(TransientInstance::new(Box::new(get_content))).await { + match UpdateStream::new(resource.to_string(), TransientInstance::new(Box::new(get_content))).await { Ok(stream) => { streams.insert(resource, stream); } @@ -94,7 +93,14 @@ impl UpdateServer

{ } } Some((resource, update)) = streams.next() => { - Self::send_update(&mut client, &mut streams, resource, &update).await?; + match update { + Ok(update) => { + Self::send_update(&mut client, &mut streams, resource, &update).await?; + } + Err(err) => { + eprintln!("Failed to get update for {resource}: {}", PrettyPrintError(&err)); + } + } } else => break } diff --git a/crates/turbopack-dev-server/src/update/stream.rs b/crates/turbopack-dev-server/src/update/stream.rs index fdfd7bbf41e10..9e62ea35f6e6d 100644 --- a/crates/turbopack-dev-server/src/update/stream.rs +++ b/crates/turbopack-dev-server/src/update/stream.rs @@ -1,12 +1,20 @@ use std::pin::Pin; -use anyhow::{bail, Result}; +use anyhow::Result; use futures::{prelude::*, Stream}; use tokio::sync::mpsc::Sender; use tokio_stream::wrappers::ReceiverStream; -use turbo_tasks::{CollectiblesSource, IntoTraitRef, State, TraitRef, TransientInstance}; +use turbo_tasks::{ + primitives::StringVc, CollectiblesSource, IntoTraitRef, State, TraitRef, TransientInstance, +}; +use turbo_tasks_fs::{FileSystem, FileSystemPathVc}; use turbopack_core::{ - issue::{IssueVc, PlainIssueReadRef}, + error::PrettyPrintError, + issue::{ + Issue, IssueSeverity, IssueSeverityVc, IssueVc, OptionIssueProcessingPathItemsVc, + PlainIssueReadRef, + }, + server_fs::ServerFileSystemVc, version::{ NotFoundVersionVc, PartialUpdate, TotalUpdate, Update, UpdateReadRef, VersionVc, VersionedContent, @@ -38,12 +46,43 @@ fn extend_issues(issues: &mut Vec, new_issues: Vec, ) -> Result { let content = get_content(); + let mut plain_issues = peek_issues(content).await?; + + let content_value = match content.await { + Ok(content) => content, + Err(e) => { + plain_issues.push( + FatalStreamIssue { + resource: resource.to_string(), + description: StringVc::cell(format!("{}", PrettyPrintError(&e))), + } + .cell() + .as_issue() + .into_plain(OptionIssueProcessingPathItemsVc::none()) + .await?, + ); + + let update = Update::Total(TotalUpdate { + to: NotFoundVersionVc::new() + .as_version() + .into_trait_ref() + .await?, + }) + .cell(); + return Ok(UpdateStreamItem::Found { + update: update.await?, + issues: plain_issues, + } + .cell()); + } + }; - match *content.await? { + match *content_value { ResolveSourceRequestResult::Static(static_content_vc, _) => { let static_content = static_content_vc.await?; @@ -56,8 +95,7 @@ async fn get_update_stream_item( let from = from.get(); let update = resolved_content.update(from); - let mut plain_issues = peek_issues(update).await?; - extend_issues(&mut plain_issues, peek_issues(content).await?); + extend_issues(&mut plain_issues, peek_issues(update).await?); let update = update.await?; @@ -74,7 +112,7 @@ async fn get_update_stream_item( return Ok(UpdateStreamItem::NotFound.cell()); } - let plain_issues = peek_issues(proxy_result).await?; + extend_issues(&mut plain_issues, peek_issues(proxy_result).await?); let from = from.get(); if let Some(from) = ProxyResultVc::resolve_from(from).await? { @@ -98,8 +136,6 @@ async fn get_update_stream_item( .cell()) } _ => { - let plain_issues = peek_issues(content).await?; - let update = if plain_issues.is_empty() { // Client requested a non-existing asset // It might be removed in meantime, reload client @@ -127,19 +163,17 @@ async fn get_update_stream_item( #[turbo_tasks::function] async fn compute_update_stream( + resource: &str, from: VersionStateVc, get_content: TransientInstance, - sender: TransientInstance>, -) -> Result<()> { - let item = get_update_stream_item(from, get_content) + sender: TransientInstance>>, +) { + let item = get_update_stream_item(resource, from, get_content) .strongly_consistent() - .await?; - - if sender.send(item).await.is_err() { - bail!("channel closed"); - } + .await; - Ok(()) + // Send update. Ignore channel closed error. + let _ = sender.send(item).await; } #[turbo_tasks::value] @@ -172,10 +206,15 @@ impl VersionStateVc { } } -pub(super) struct UpdateStream(Pin + Send + Sync>>); +pub(super) struct UpdateStream( + Pin> + Send + Sync>>, +); impl UpdateStream { - pub async fn new(get_content: TransientInstance) -> Result { + pub async fn new( + resource: String, + get_content: TransientInstance, + ) -> Result { let (sx, rx) = tokio::sync::mpsc::channel(32); let content = get_content(); @@ -190,13 +229,18 @@ impl UpdateStream { }; let version_state = VersionStateVc::new(version.into_trait_ref().await?).await?; - compute_update_stream(version_state, get_content, TransientInstance::new(sx)); + compute_update_stream( + &resource, + version_state, + get_content, + TransientInstance::new(sx), + ); let mut last_had_issues = false; let stream = ReceiverStream::new(rx).filter_map(move |item| { let (has_issues, issues_changed) = - if let UpdateStreamItem::Found { issues, .. } = &*item { + if let Some(UpdateStreamItem::Found { issues, .. }) = item.as_deref().ok() { let has_issues = !issues.is_empty(); let issues_changed = has_issues != last_had_issues; last_had_issues = has_issues; @@ -206,12 +250,8 @@ impl UpdateStream { }; async move { - match &*item { - UpdateStreamItem::NotFound => { - // Propagate not found updates so we can drop this update stream. - Some(item) - } - UpdateStreamItem::Found { update, .. } => { + match item.as_deref() { + Ok(UpdateStreamItem::Found { update, .. }) => { match &**update { Update::Partial(PartialUpdate { to, .. }) | Update::Total(TotalUpdate { to }) => { @@ -232,6 +272,10 @@ impl UpdateStream { } } } + _ => { + // Propagate other updates + Some(item) + } } } }); @@ -241,7 +285,7 @@ impl UpdateStream { } impl Stream for UpdateStream { - type Item = UpdateStreamItemReadRef; + type Item = Result; fn poll_next( self: Pin<&mut Self>, @@ -260,3 +304,37 @@ pub enum UpdateStreamItem { issues: Vec, }, } + +#[turbo_tasks::value(serialization = "none")] +struct FatalStreamIssue { + description: StringVc, + resource: String, +} + +#[turbo_tasks::value_impl] +impl Issue for FatalStreamIssue { + #[turbo_tasks::function] + fn severity(&self) -> IssueSeverityVc { + IssueSeverity::Fatal.into() + } + + #[turbo_tasks::function] + fn context(&self) -> FileSystemPathVc { + ServerFileSystemVc::new().root().join(&self.resource) + } + + #[turbo_tasks::function] + fn category(&self) -> StringVc { + StringVc::cell("websocket".to_string()) + } + + #[turbo_tasks::function] + fn title(&self) -> StringVc { + StringVc::cell("Fatal error while getting content to stream".to_string()) + } + + #[turbo_tasks::function] + fn description(&self) -> StringVc { + self.description + } +} From 34a0740eb60c40771f8699a09c343d9064a89dfd Mon Sep 17 00:00:00 2001 From: Caleb Webber Date: Wed, 26 Apr 2023 12:14:13 -0400 Subject: [PATCH 16/24] remove box_syntax (#4667) ### Description Box syntax removed in https://github.com/rust-lang/rust/pull/108471 Removed all `#![features(box_syntax)]` and ran `cargo fix --broken-code` to replace all previous box_patterns uses of `box` with `Box::new()`. ### Testing Instructions No testing needed. --- crates/turbo-tasks-fs/src/lib.rs | 12 +- crates/turbo-tasks-memory/src/lib.rs | 1 - crates/turbo-tasks-memory/src/task.rs | 34 +++--- .../turbo-tasks-memory/src/task/meta_state.rs | 8 +- crates/turbo-tasks-testing/src/lib.rs | 2 - crates/turbo-tasks/src/lib.rs | 1 - crates/turbopack-css/src/lib.rs | 1 - crates/turbopack-css/src/parse.rs | 4 +- crates/turbopack-css/src/references/import.rs | 10 +- crates/turbopack-css/src/references/mod.rs | 4 +- crates/turbopack-css/src/references/url.rs | 4 +- .../src/analyzer/builtin.rs | 29 +++-- .../src/analyzer/graph.rs | 44 +++---- .../src/analyzer/imports.rs | 6 +- .../src/analyzer/linker.rs | 2 +- .../turbopack-ecmascript/src/analyzer/mod.rs | 20 ++-- .../src/analyzer/well_known.rs | 110 +++++++++++------- crates/turbopack-ecmascript/src/code_gen.rs | 12 +- crates/turbopack-ecmascript/src/lib.rs | 1 - crates/turbopack-ecmascript/src/parse.rs | 4 +- .../turbopack-ecmascript/src/path_visitor.rs | 4 +- .../src/references/amd.rs | 6 +- .../src/references/cjs.rs | 6 +- .../src/references/esm/base.rs | 4 +- .../src/references/esm/binding.rs | 8 +- .../src/references/esm/dynamic.rs | 2 +- .../src/references/esm/export.rs | 12 +- .../src/references/mod.rs | 49 +++++--- .../src/references/require_context.rs | 2 +- .../src/transform/server_to_client_proxy.rs | 16 +-- .../src/tree_shake/graph.rs | 21 ++-- .../src/webpack/references.rs | 4 +- crates/turbopack/src/lib.rs | 1 - 33 files changed, 237 insertions(+), 207 deletions(-) diff --git a/crates/turbo-tasks-fs/src/lib.rs b/crates/turbo-tasks-fs/src/lib.rs index 2d26969b33fb4..0d2174eda7532 100644 --- a/crates/turbo-tasks-fs/src/lib.rs +++ b/crates/turbo-tasks-fs/src/lib.rs @@ -3,7 +3,6 @@ #![feature(min_specialization)] #![feature(iter_advance_by)] #![feature(io_error_more)] -#![feature(box_syntax)] #![feature(round_char_boundary)] pub mod attach; @@ -1683,9 +1682,9 @@ impl FileContent { let de = &mut serde_json::Deserializer::from_reader(file.read()); match serde_path_to_error::deserialize(de) { Ok(data) => FileJsonContent::Content(data), - Err(e) => FileJsonContent::Unparseable( - box UnparseableJson::from_serde_path_to_error(e), - ), + Err(e) => FileJsonContent::Unparseable(Box::new( + UnparseableJson::from_serde_path_to_error(e), + )), } } FileContent::NotFound => FileJsonContent::NotFound, @@ -1709,9 +1708,8 @@ impl FileContent { "text content doesn't contain any json data", ), }, - Err(e) => FileJsonContent::Unparseable(box UnparseableJson::from_jsonc_error( - e, - string.as_ref(), + Err(e) => FileJsonContent::Unparseable(Box::new( + UnparseableJson::from_jsonc_error(e, string.as_ref()), )), }, Err(_) => FileJsonContent::unparseable("binary is not valid utf-8 text"), diff --git a/crates/turbo-tasks-memory/src/lib.rs b/crates/turbo-tasks-memory/src/lib.rs index dd16d3c64b243..0a94f25b4a226 100644 --- a/crates/turbo-tasks-memory/src/lib.rs +++ b/crates/turbo-tasks-memory/src/lib.rs @@ -1,6 +1,5 @@ #![feature(hash_drain_filter)] #![feature(option_get_or_insert_default)] -#![feature(box_syntax)] #![feature(type_alias_impl_trait)] #![feature(lint_reasons)] #![feature(box_patterns)] diff --git a/crates/turbo-tasks-memory/src/task.rs b/crates/turbo-tasks-memory/src/task.rs index c3a2e383bf2aa..98f6486c6013b 100644 --- a/crates/turbo-tasks-memory/src/task.rs +++ b/crates/turbo-tasks-memory/src/task.rs @@ -528,10 +528,10 @@ impl Task { Self { id, ty, - state: RwLock::new(TaskMetaState::Full(box TaskState::new( + state: RwLock::new(TaskMetaState::Full(Box::new(TaskState::new( description, stats_type, - ))), + )))), } } @@ -546,10 +546,8 @@ impl Task { Self { id, ty, - state: RwLock::new(TaskMetaState::Full(box TaskState::new_scheduled_in_scope( - description, - scope, - stats_type, + state: RwLock::new(TaskMetaState::Full(Box::new( + TaskState::new_scheduled_in_scope(description, scope, stats_type), ))), } } @@ -565,10 +563,8 @@ impl Task { Self { id, ty, - state: RwLock::new(TaskMetaState::Full(box TaskState::new_scheduled_in_scope( - description, - scope, - stats_type, + state: RwLock::new(TaskMetaState::Full(Box::new( + TaskState::new_scheduled_in_scope(description, scope, stats_type), ))), } } @@ -579,18 +575,18 @@ impl Task { trait_type_id: TraitTypeId, stats_type: StatsType, ) -> Self { - let ty = TaskType::ReadScopeCollectibles(box ReadScopeCollectiblesTaskType { + let ty = TaskType::ReadScopeCollectibles(Box::new(ReadScopeCollectiblesTaskType { scope: target_scope, trait_type: trait_type_id, - }); + })); let description = Self::get_event_description_static(id, &ty); Self { id, ty, - state: RwLock::new(TaskMetaState::Full(box TaskState::new( + state: RwLock::new(TaskMetaState::Full(Box::new(TaskState::new( description, stats_type, - ))), + )))), } } @@ -601,19 +597,19 @@ impl Task { trait_type_id: TraitTypeId, stats_type: StatsType, ) -> Self { - let ty = TaskType::ReadTaskCollectibles(box ReadTaskCollectiblesTaskType { + let ty = TaskType::ReadTaskCollectibles(Box::new(ReadTaskCollectiblesTaskType { task: target_task, trait_type: trait_type_id, - }); + })); let description = Self::get_event_description_static(id, &ty); Self { id, ty, - state: RwLock::new(TaskMetaState::Full(box TaskState::new_root_scoped( + state: RwLock::new(TaskMetaState::Full(Box::new(TaskState::new_root_scoped( description, scope, stats_type, - ))), + )))), } } @@ -2787,7 +2783,7 @@ impl Task { if unset { *state = TaskMetaState::Unloaded(UnloadedTaskState { stats_type }); } else { - *state = TaskMetaState::Partial(box PartialTaskState { scopes, stats_type }); + *state = TaskMetaState::Partial(Box::new(PartialTaskState { scopes, stats_type })); } drop(state); diff --git a/crates/turbo-tasks-memory/src/task/meta_state.rs b/crates/turbo-tasks-memory/src/task/meta_state.rs index e7886c1f9fd48..548bcc5441b49 100644 --- a/crates/turbo-tasks-memory/src/task/meta_state.rs +++ b/crates/turbo-tasks-memory/src/task/meta_state.rs @@ -187,7 +187,8 @@ impl<'a> TaskMetaStateWriteGuard<'a> { ) .into_partial() .unwrap(); - *guard = TaskMetaState::Full(box partial.into_full(task.get_event_description())); + *guard = + TaskMetaState::Full(Box::new(partial.into_full(task.get_event_description()))); } TaskMetaState::Unloaded(_) => { let unloaded = replace( @@ -199,7 +200,8 @@ impl<'a> TaskMetaStateWriteGuard<'a> { ) .into_unloaded() .unwrap(); - *guard = TaskMetaState::Full(box unloaded.into_full(task.get_event_description())); + *guard = + TaskMetaState::Full(Box::new(unloaded.into_full(task.get_event_description()))); } } WriteGuard::new(guard, TaskMetaState::as_full, TaskMetaState::as_full_mut) @@ -228,7 +230,7 @@ impl<'a> TaskMetaStateWriteGuard<'a> { ) .into_unloaded() .unwrap(); - *guard = TaskMetaState::Partial(box unloaded.into_partial()); + *guard = TaskMetaState::Partial(Box::new(unloaded.into_partial())); TaskMetaStateWriteGuard::Partial(WriteGuard::new( guard, TaskMetaState::as_partial, diff --git a/crates/turbo-tasks-testing/src/lib.rs b/crates/turbo-tasks-testing/src/lib.rs index 54680d55fe937..a4dd220d1b39c 100644 --- a/crates/turbo-tasks-testing/src/lib.rs +++ b/crates/turbo-tasks-testing/src/lib.rs @@ -1,7 +1,5 @@ //! Testing utilities and macros for turbo-tasks and applications based on it. -#![feature(box_syntax)] - mod macros; pub mod retry; diff --git a/crates/turbo-tasks/src/lib.rs b/crates/turbo-tasks/src/lib.rs index 177042679218c..f9d8b657293d9 100644 --- a/crates/turbo-tasks/src/lib.rs +++ b/crates/turbo-tasks/src/lib.rs @@ -28,7 +28,6 @@ #![feature(hash_drain_filter)] #![deny(unsafe_op_in_unsafe_fn)] #![feature(result_flattening)] -#![feature(box_syntax)] #![feature(error_generic_member_access)] #![feature(provide_any)] #![feature(new_uninit)] diff --git a/crates/turbopack-css/src/lib.rs b/crates/turbopack-css/src/lib.rs index 229661bdc789f..7c18bc012cbb6 100644 --- a/crates/turbopack-css/src/lib.rs +++ b/crates/turbopack-css/src/lib.rs @@ -1,6 +1,5 @@ #![feature(min_specialization)] #![feature(box_patterns)] -#![feature(box_syntax)] #![feature(iter_intersperse)] #![feature(int_roundings)] diff --git a/crates/turbopack-css/src/parse.rs b/crates/turbopack-css/src/parse.rs index fc31976d031c6..e50fae880a1ec 100644 --- a/crates/turbopack-css/src/parse.rs +++ b/crates/turbopack-css/src/parse.rs @@ -161,11 +161,11 @@ async fn parse_content( let handler = Handler::with_emitter( true, false, - box IssueEmitter { + Box::new(IssueEmitter { source, source_map: source_map.clone(), title: Some("Parsing css source code failed".to_string()), - }, + }), ); let fm = source_map.new_source_file(FileName::Custom(ident_str.to_string()), string); diff --git a/crates/turbopack-css/src/references/import.rs b/crates/turbopack-css/src/references/import.rs index 2d5d675f3aed3..7e1bb9d18ad08 100644 --- a/crates/turbopack-css/src/references/import.rs +++ b/crates/turbopack-css/src/references/import.rs @@ -110,7 +110,7 @@ impl ImportAttributes { } // something random that's never gonna be in real css - let mut rule = Rule::ListOfComponentValues(box ListOfComponentValues { + let mut rule = Rule::ListOfComponentValues(Box::new(ListOfComponentValues { span: DUMMY_SP, children: vec![ComponentValue::PreservedToken(Box::new(token( Token::String { @@ -118,23 +118,23 @@ impl ImportAttributes { raw: r#""""__turbopack_placeholder__""""#.into(), }, )))], - }); + })); fn at_rule(name: &str, prelude: AtRulePrelude, inner_rule: Rule) -> Rule { - Rule::AtRule(box AtRule { + Rule::AtRule(Box::new(AtRule { span: DUMMY_SP, name: AtRuleName::Ident(Ident { span: DUMMY_SP, value: name.into(), raw: None, }), - prelude: Some(box prelude), + prelude: Some(Box::new(prelude)), block: Some(SimpleBlock { span: DUMMY_SP, name: token(Token::LBrace), value: vec![ComponentValue::from(inner_rule)], }), - }) + })) } if let Some(media) = &self.media { diff --git a/crates/turbopack-css/src/references/mod.rs b/crates/turbopack-css/src/references/mod.rs index ad846ee20d50f..110817577619b 100644 --- a/crates/turbopack-css/src/references/mod.rs +++ b/crates/turbopack-css/src/references/mod.rs @@ -58,11 +58,11 @@ pub async fn analyze_css_stylesheet( let handler = Handler::with_emitter( true, false, - box IssueEmitter { + Box::new(IssueEmitter { source, source_map: source_map.clone(), title: None, - }, + }), ); let globals = Globals::new(); HANDLER.set(&handler, || { diff --git a/crates/turbopack-css/src/references/url.rs b/crates/turbopack-css/src/references/url.rs index 2adb9f6ff33d2..185fe24f63e6d 100644 --- a/crates/turbopack-css/src/references/url.rs +++ b/crates/turbopack-css/src/references/url.rs @@ -125,11 +125,11 @@ impl CodeGenerateable for UrlAssetReference { visitors.push( create_visitor!((&this.path.await?), visit_mut_url(u: &mut Url) { - u.value = Some(box UrlValue::Str(Str { + u.value = Some(Box::new(UrlValue::Str(Str { span: DUMMY_SP, value: relative_path.as_str().into(), raw: None, - })) + }))) }), ); } diff --git a/crates/turbopack-ecmascript/src/analyzer/builtin.rs b/crates/turbopack-ecmascript/src/analyzer/builtin.rs index 1298fe4986112..c5eaf7e7c577f 100644 --- a/crates/turbopack-ecmascript/src/analyzer/builtin.rs +++ b/crates/turbopack-ecmascript/src/analyzer/builtin.rs @@ -74,7 +74,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { *value = JsValue::alternatives( take(alts) .into_iter() - .map(|alt| JsValue::member(box alt, prop.clone())) + .map(|alt| JsValue::member(Box::new(alt), prop.clone())) .collect(), ); true @@ -87,7 +87,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { } => { fn items_to_alternatives(items: &mut Vec, prop: &mut JsValue) -> JsValue { items.push(JsValue::unknown( - JsValue::member(box JsValue::array(Vec::new()), box take(prop)), + JsValue::member(Box::new(JsValue::array(Vec::new())), Box::new(take(prop))), "unknown array prototype methods or values", )); JsValue::alternatives(take(items)) @@ -105,7 +105,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { true } else { *value = JsValue::unknown( - JsValue::member(box take(obj), box take(prop)), + JsValue::member(Box::new(take(obj)), Box::new(take(prop))), "invalid index", ); true @@ -127,7 +127,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { *value = JsValue::alternatives( take(alts) .into_iter() - .map(|alt| JsValue::member(box obj.clone(), box alt)) + .map(|alt| JsValue::member(Box::new(obj.clone()), Box::new(alt))) .collect(), ); true @@ -160,7 +160,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { ObjectPart::Spread(_) => { values.push(JsValue::unknown( JsValue::member( - box JsValue::object(vec![take(part)]), + Box::new(JsValue::object(vec![take(part)])), prop.clone(), ), "spreaded object", @@ -170,7 +170,10 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { } if include_unknown { values.push(JsValue::unknown( - JsValue::member(box JsValue::object(Vec::new()), box take(prop)), + JsValue::member( + Box::new(JsValue::object(Vec::new())), + Box::new(take(prop)), + ), "unknown object prototype methods or values", )); } @@ -262,7 +265,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { *value = JsValue::alternatives( take(alts) .into_iter() - .map(|alt| JsValue::member(box obj.clone(), box alt)) + .map(|alt| JsValue::member(Box::new(obj.clone()), Box::new(alt))) .collect(), ); true @@ -336,7 +339,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { .enumerate() .map(|(i, item)| { JsValue::call( - box func.clone(), + Box::new(func.clone()), vec![ item, JsValue::Constant(ConstantValue::Num( @@ -361,7 +364,11 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { take(alts) .into_iter() .map(|alt| { - JsValue::member_call(box alt, box prop.clone(), args.clone()) + JsValue::member_call( + Box::new(alt), + Box::new(prop.clone()), + args.clone(), + ) }) .collect(), ); @@ -372,7 +379,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { // without special handling, we convert it into a normal call like // `(obj.prop)(arg1, arg2, ...)` *value = JsValue::call( - box JsValue::member(box take(obj), box take(prop)), + Box::new(JsValue::member(Box::new(take(obj)), Box::new(take(prop)))), take(args), ); true @@ -383,7 +390,7 @@ pub fn replace_builtin(value: &mut JsValue) -> bool { *value = JsValue::alternatives( take(alts) .into_iter() - .map(|alt| JsValue::call(box alt, args.clone())) + .map(|alt| JsValue::call(Box::new(alt), args.clone())) .collect(), ); true diff --git a/crates/turbopack-ecmascript/src/analyzer/graph.rs b/crates/turbopack-ecmascript/src/analyzer/graph.rs index c7ae776c7ab06..d40b27255b899 100644 --- a/crates/turbopack-ecmascript/src/analyzer/graph.rs +++ b/crates/turbopack-ecmascript/src/analyzer/graph.rs @@ -343,7 +343,7 @@ impl EvalContext { }) => { let arg = self.eval(arg); - JsValue::logical_not(box arg) + JsValue::logical_not(Box::new(arg)) } Expr::Bin(BinExpr { @@ -474,7 +474,7 @@ impl EvalContext { .. }) => { let obj = self.eval(obj); - JsValue::member(box obj, box prop.sym.clone().into()) + JsValue::member(Box::new(obj), Box::new(prop.sym.clone().into())) } Expr::Member(MemberExpr { @@ -484,7 +484,7 @@ impl EvalContext { }) => { let obj = self.eval(obj); let prop = self.eval(&computed.expr); - JsValue::member(box obj, box prop) + JsValue::member(Box::new(obj), Box::new(prop)) } Expr::Call(CallExpr { @@ -499,8 +499,8 @@ impl EvalContext { let args = args.iter().map(|arg| self.eval(&arg.expr)).collect(); if let Expr::Member(MemberExpr { obj, prop, .. }) = unparen(callee) { - let obj = box self.eval(obj); - let prop = box match prop { + let obj = Box::new(self.eval(obj)); + let prop = Box::new(match prop { // TODO avoid clone MemberProp::Ident(i) => i.sym.clone().into(), MemberProp::PrivateName(_) => { @@ -509,10 +509,10 @@ impl EvalContext { ); } MemberProp::Computed(ComputedPropName { expr, .. }) => self.eval(expr), - }; + }); JsValue::member_call(obj, prop, args) } else { - let callee = box self.eval(callee); + let callee = Box::new(self.eval(callee)); JsValue::call(callee, args) } @@ -529,7 +529,7 @@ impl EvalContext { } let args = args.iter().map(|arg| self.eval(&arg.expr)).collect(); - let callee = box JsValue::FreeVar(js_word!("import")); + let callee = Box::new(JsValue::FreeVar(js_word!("import"))); JsValue::call(callee, args) } @@ -928,9 +928,9 @@ impl Analyzer<'_> { let values = self.cur_fn_return_values.take().unwrap(); match values.len() { - 0 => box JsValue::FreeVar(js_word!("undefined")), - 1 => box values.into_iter().next().unwrap(), - _ => box JsValue::alternatives(values), + 0 => Box::new(JsValue::FreeVar(js_word!("undefined"))), + 1 => Box::new(values.into_iter().next().unwrap()), + _ => Box::new(JsValue::alternatives(values)), } } } @@ -1302,7 +1302,7 @@ impl VisitAstPath for Analyzer<'_> { expr.visit_children_with_path(self, ast_path); let return_value = self.eval_context.eval(inner_expr); - let fn_val = JsValue::function(self.cur_fn_ident, box return_value); + let fn_val = JsValue::function(self.cur_fn_ident, Box::new(return_value)); self.cur_fn_ident = old_ident; fn_val } @@ -1407,9 +1407,9 @@ impl VisitAstPath for Analyzer<'_> { ast_path.with(AstParentNodeRef::Pat(pat, PatField::Array), |ast_path| { for (idx, elem) in arr.elems.iter().enumerate() { self.current_value = Some(JsValue::member( - box value.clone(), - box JsValue::Constant(ConstantValue::Num(ConstantNumber( - idx as f64, + Box::new(value.clone()), + Box::new(JsValue::Constant(ConstantValue::Num( + ConstantNumber(idx as f64), ))), )); ast_path.with( @@ -1609,7 +1609,7 @@ impl<'a> Analyzer<'a> { } else { self.add_effect(Effect::Conditional { condition, - kind: box cond_kind, + kind: Box::new(cond_kind), ast_path: as_parent_path_with(ast_path, ast_kind), span, in_try: is_in_try(ast_path), @@ -1645,8 +1645,8 @@ impl<'a> Analyzer<'a> { }, ); self.current_value = Some(JsValue::member( - box current_value.clone(), - box key_value, + Box::new(current_value.clone()), + Box::new(key_value), )); ast_path.with( AstParentNodeRef::KeyValuePatProp( @@ -1681,15 +1681,15 @@ impl<'a> Analyzer<'a> { let value = self.eval_context.eval(value); JsValue::alternatives(vec![ JsValue::member( - box current_value.clone(), - box key_value, + Box::new(current_value.clone()), + Box::new(key_value), ), value, ]) } else { JsValue::member( - box current_value.clone(), - box key_value, + Box::new(current_value.clone()), + Box::new(key_value), ) }, ); diff --git a/crates/turbopack-ecmascript/src/analyzer/imports.rs b/crates/turbopack-ecmascript/src/analyzer/imports.rs index f310d21dd8bb7..5d67cc1aece1a 100644 --- a/crates/turbopack-ecmascript/src/analyzer/imports.rs +++ b/crates/turbopack-ecmascript/src/analyzer/imports.rs @@ -124,11 +124,11 @@ impl ImportMap { if let Some((i, i_sym)) = self.imports.get(id) { let r = &self.references[*i]; return Some(JsValue::member( - box JsValue::Module(ModuleValue { + Box::new(JsValue::Module(ModuleValue { module: r.module_path.clone(), annotations: r.annotations.clone(), - }), - box i_sym.clone().into(), + })), + Box::new(i_sym.clone().into()), )); } if let Some(i) = self.namespace_imports.get(id) { diff --git a/crates/turbopack-ecmascript/src/analyzer/linker.rs b/crates/turbopack-ecmascript/src/analyzer/linker.rs index b95ea50d70284..6347a970eb909 100644 --- a/crates/turbopack-ecmascript/src/analyzer/linker.rs +++ b/crates/turbopack-ecmascript/src/analyzer/linker.rs @@ -147,7 +147,7 @@ where } total_nodes += 1; done.push(JsValue::unknown( - JsValue::call(box JsValue::function(func_ident, return_value), args), + JsValue::call(Box::new(JsValue::function(func_ident, return_value)), args), "recursive function call", )); } diff --git a/crates/turbopack-ecmascript/src/analyzer/mod.rs b/crates/turbopack-ecmascript/src/analyzer/mod.rs index 07c717a7b21be..d7637db31d583 100644 --- a/crates/turbopack-ecmascript/src/analyzer/mod.rs +++ b/crates/turbopack-ecmascript/src/analyzer/mod.rs @@ -722,36 +722,36 @@ impl JsValue { pub fn equal(a: JsValue, b: JsValue) -> Self { Self::Binary( 1 + a.total_nodes() + b.total_nodes(), - box a, + Box::new(a), BinaryOperator::Equal, - box b, + Box::new(b), ) } pub fn not_equal(a: JsValue, b: JsValue) -> Self { Self::Binary( 1 + a.total_nodes() + b.total_nodes(), - box a, + Box::new(a), BinaryOperator::NotEqual, - box b, + Box::new(b), ) } pub fn strict_equal(a: JsValue, b: JsValue) -> Self { Self::Binary( 1 + a.total_nodes() + b.total_nodes(), - box a, + Box::new(a), BinaryOperator::StrictEqual, - box b, + Box::new(b), ) } pub fn strict_not_equal(a: JsValue, b: JsValue) -> Self { Self::Binary( 1 + a.total_nodes() + b.total_nodes(), - box a, + Box::new(a), BinaryOperator::StrictNotEqual, - box b, + Box::new(b), ) } @@ -3363,7 +3363,7 @@ mod tests { let new_args = handle_args(args, &mut queue, &var_graph, i).await; resolved.push(( format!("{parent} -> {i} call"), - JsValue::call(box func, new_args), + JsValue::call(Box::new(func), new_args), )); } Effect::FreeVar { var, .. } => { @@ -3377,7 +3377,7 @@ mod tests { let new_args = handle_args(args, &mut queue, &var_graph, i).await; resolved.push(( format!("{parent} -> {i} member call"), - JsValue::member_call(box obj, box prop, new_args), + JsValue::member_call(Box::new(obj), Box::new(prop), new_args), )); } _ => {} diff --git a/crates/turbopack-ecmascript/src/analyzer/well_known.rs b/crates/turbopack-ecmascript/src/analyzer/well_known.rs index 915c877e6fcd2..cdb8006f01812 100644 --- a/crates/turbopack-ecmascript/src/analyzer/well_known.rs +++ b/crates/turbopack-ecmascript/src/analyzer/well_known.rs @@ -57,7 +57,7 @@ pub async fn well_known_function_call( WellKnownFunctionKind::PathDirname => path_dirname(args), WellKnownFunctionKind::PathResolve(cwd) => path_resolve(*cwd, args), WellKnownFunctionKind::Import => JsValue::unknown( - JsValue::call(box JsValue::WellKnownFunction(kind), args), + JsValue::call(Box::new(JsValue::WellKnownFunction(kind)), args), "import() is not supported", ), WellKnownFunctionKind::Require => require(args), @@ -90,7 +90,7 @@ pub async fn well_known_function_call( cwd.clone().into() } else { JsValue::unknown( - JsValue::call(box JsValue::WellKnownFunction(kind), args), + JsValue::call(Box::new(JsValue::WellKnownFunction(kind)), args), "process.cwd is not specified in the environment", ) } @@ -111,7 +111,7 @@ pub async fn well_known_function_call( } _ => JsValue::unknown( - JsValue::call(box JsValue::WellKnownFunction(kind), args), + JsValue::call(Box::new(JsValue::WellKnownFunction(kind)), args), "unsupported function", ), }) @@ -138,7 +138,9 @@ pub fn object_assign(args: Vec) -> JsValue { } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::ObjectAssign), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::ObjectAssign, + )), vec![], ), "empty arguments for Object.assign", @@ -147,7 +149,9 @@ pub fn object_assign(args: Vec) -> JsValue { } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::ObjectAssign), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::ObjectAssign, + )), args, ), "only const object assign is supported", @@ -302,7 +306,9 @@ pub fn path_dirname(mut args: Vec) -> JsValue { } JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::PathDirname), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::PathDirname, + )), args, ), "path.dirname with unsupported arguments", @@ -319,7 +325,7 @@ pub fn require(args: Vec) -> JsValue { } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::Require), + Box::new(JsValue::WellKnownFunction(WellKnownFunctionKind::Require)), args, ), "only constant argument is supported", @@ -328,7 +334,7 @@ pub fn require(args: Vec) -> JsValue { } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::Require), + Box::new(JsValue::WellKnownFunction(WellKnownFunctionKind::Require)), args, ), "only a single argument is supported", @@ -344,7 +350,9 @@ pub async fn require_context_require( if args.is_empty() { return Ok(JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::RequireContextRequire(val)), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::RequireContextRequire(val), + )), args, ), "require.context(...).require() requires an argument specifying the module path", @@ -354,7 +362,7 @@ pub async fn require_context_require( let Some(s) = args[0].as_str() else { return Ok(JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::RequireContextRequire(val)), + Box::new(JsValue::WellKnownFunction(WellKnownFunctionKind::RequireContextRequire(val))), args, ), "require.context(...).require() only accepts a single, constant string argument", @@ -365,7 +373,7 @@ pub async fn require_context_require( let Some(m) = map.get(s) else { return Ok(JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::RequireContextRequire(val)), + Box::new(JsValue::WellKnownFunction(WellKnownFunctionKind::RequireContextRequire(val))), args, ), "require.context(...).require() can only be called with an argument that's in the context", @@ -389,8 +397,8 @@ pub async fn require_context_require_keys( } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::RequireContextRequireKeys( - val, + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::RequireContextRequireKeys(val), )), args, ), @@ -407,9 +415,9 @@ pub async fn require_context_require_resolve( if args.len() != 1 { return Ok(JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction( + Box::new(JsValue::WellKnownFunction( WellKnownFunctionKind::RequireContextRequireResolve(val), - ), + )), args, ), "require.context(...).resolve() only accepts a single, constant string argument", @@ -419,9 +427,9 @@ pub async fn require_context_require_resolve( let Some(s) = args[0].as_str() else { return Ok(JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction( + Box::new(JsValue::WellKnownFunction( WellKnownFunctionKind::RequireContextRequireResolve(val), - ), + )), args, ), "require.context(...).resolve() only accepts a single, constant string argument", @@ -432,9 +440,9 @@ pub async fn require_context_require_resolve( let Some(m) = map.get(s) else { return Ok(JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction( + Box::new(JsValue::WellKnownFunction( WellKnownFunctionKind::RequireContextRequireResolve(val), - ), + )), args, ), "require.context(...).resolve() can only be called with an argument that's in the context", @@ -452,7 +460,9 @@ pub fn path_to_file_url(args: Vec) -> JsValue { .unwrap_or_else(|_| { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::PathToFileUrl), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::PathToFileUrl, + )), args, ), "url not parseable: path is relative or has an invalid prefix", @@ -461,7 +471,9 @@ pub fn path_to_file_url(args: Vec) -> JsValue { } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::PathToFileUrl), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::PathToFileUrl, + )), args, ), "only constant argument is supported", @@ -470,7 +482,9 @@ pub fn path_to_file_url(args: Vec) -> JsValue { } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::PathToFileUrl), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::PathToFileUrl, + )), args, ), "only a single argument is supported", @@ -504,7 +518,7 @@ pub fn well_known_function_member(kind: WellKnownFunctionKind, prop: JsValue) -> #[allow(unreachable_patterns)] (kind, _) => { return ( - JsValue::member(box JsValue::WellKnownFunction(kind), box prop), + JsValue::member(Box::new(JsValue::WellKnownFunction(kind)), Box::new(prop)), false, ) } @@ -541,7 +555,7 @@ pub async fn well_known_object_member( #[allow(unreachable_patterns)] _ => { return Ok(( - JsValue::member(box JsValue::WellKnownObject(kind), box prop), + JsValue::member(Box::new(JsValue::WellKnownObject(kind)), Box::new(prop)), false, )) } @@ -554,8 +568,8 @@ fn global_object(prop: JsValue) -> JsValue { Some("assign") => JsValue::WellKnownFunction(WellKnownFunctionKind::ObjectAssign), _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::GlobalObject), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::GlobalObject)), + Box::new(prop), ), "unsupported property on global Object", ), @@ -568,15 +582,17 @@ pub fn path_module_member(kind: WellKnownObjectKind, prop: JsValue) -> JsValue { (.., Some("dirname")) => JsValue::WellKnownFunction(WellKnownFunctionKind::PathDirname), (.., Some("resolve")) => { // cwd is added while resolving in refernces.rs - JsValue::WellKnownFunction(WellKnownFunctionKind::PathResolve(box JsValue::from(""))) + JsValue::WellKnownFunction(WellKnownFunctionKind::PathResolve(Box::new(JsValue::from( + "", + )))) } (WellKnownObjectKind::PathModule, Some("default")) => { JsValue::WellKnownObject(WellKnownObjectKind::PathModuleDefault) } _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::PathModule), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::PathModule)), + Box::new(prop), ), "unsupported property on Node.js path module", ), @@ -606,8 +622,8 @@ pub fn fs_module_member(kind: WellKnownObjectKind, prop: JsValue) -> JsValue { } JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::FsModule), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::FsModule)), + Box::new(prop), ), "unsupported property on Node.js fs module", ) @@ -623,8 +639,8 @@ pub fn url_module_member(kind: WellKnownObjectKind, prop: JsValue) -> JsValue { } _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::UrlModule), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::UrlModule)), + Box::new(prop), ), "unsupported property on Node.js url module", ), @@ -645,8 +661,8 @@ pub fn child_process_module_member(kind: WellKnownObjectKind, prop: JsValue) -> } _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::ChildProcess), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::ChildProcess)), + Box::new(prop), ), "unsupported property on Node.js child_process module", ), @@ -663,8 +679,8 @@ fn os_module_member(kind: WellKnownObjectKind, prop: JsValue) -> JsValue { } _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::OsModule), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::OsModule)), + Box::new(prop), ), "unsupported property on Node.js os module", ), @@ -694,8 +710,8 @@ async fn node_process_member( Some("env") => JsValue::WellKnownObject(WellKnownObjectKind::NodeProcessEnv), _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::NodeProcess), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::NodeProcess)), + Box::new(prop), ), "unsupported property on Node.js process object", ), @@ -707,8 +723,8 @@ fn node_pre_gyp(prop: JsValue) -> JsValue { Some("find") => JsValue::WellKnownFunction(WellKnownFunctionKind::NodePreGypFind), _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::NodePreGyp), - box prop, + Box::new(JsValue::WellKnownObject(WellKnownObjectKind::NodePreGyp)), + Box::new(prop), ), "unsupported property on @mapbox/node-pre-gyp module", ), @@ -720,8 +736,10 @@ fn express(prop: JsValue) -> JsValue { Some("set") => JsValue::WellKnownFunction(WellKnownFunctionKind::NodeExpressSet), _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::NodeExpressApp), - box prop, + Box::new(JsValue::WellKnownObject( + WellKnownObjectKind::NodeExpressApp, + )), + Box::new(prop), ), "unsupported property on require('express')() object", ), @@ -735,8 +753,10 @@ fn protobuf_loader(prop: JsValue) -> JsValue { } _ => JsValue::unknown( JsValue::member( - box JsValue::WellKnownObject(WellKnownObjectKind::NodeProtobufLoader), - box prop, + Box::new(JsValue::WellKnownObject( + WellKnownObjectKind::NodeProtobufLoader, + )), + Box::new(prop), ), "unsupported property on require('@grpc/proto-loader') object", ), diff --git a/crates/turbopack-ecmascript/src/code_gen.rs b/crates/turbopack-ecmascript/src/code_gen.rs index a35d5fbb9139f..943261825958a 100644 --- a/crates/turbopack-ecmascript/src/code_gen.rs +++ b/crates/turbopack-ecmascript/src/code_gen.rs @@ -87,7 +87,7 @@ macro_rules! create_visitor { for Box> { fn create<'a>(&'a self) -> Box { - box &**self + Box::new(&**self) } } @@ -101,9 +101,9 @@ macro_rules! create_visitor { ( $ast_path, - box box Visitor { + Box::new(Box::new(Visitor { $name: move |$arg: &mut swc_core::ecma::ast::$ty| $b, - } as Box, + })) as Box, ) }}; (visit_mut_program($arg:ident: &mut Program) $b:block) => {{ @@ -115,7 +115,7 @@ macro_rules! create_visitor { for Box> { fn create<'a>(&'a self) -> Box { - box &**self + Box::new(&**self) } } @@ -129,9 +129,9 @@ macro_rules! create_visitor { ( Vec::new(), - box box Visitor { + Box::new(Box::new(Visitor { visit_mut_program: move |$arg: &mut swc_core::ecma::ast::Program| $b, - } as Box, + })) as Box, ) }}; } diff --git a/crates/turbopack-ecmascript/src/lib.rs b/crates/turbopack-ecmascript/src/lib.rs index 21bd1098af58e..03e656557123c 100644 --- a/crates/turbopack-ecmascript/src/lib.rs +++ b/crates/turbopack-ecmascript/src/lib.rs @@ -1,4 +1,3 @@ -#![feature(box_syntax)] #![feature(box_patterns)] #![feature(min_specialization)] #![feature(iter_intersperse)] diff --git a/crates/turbopack-ecmascript/src/parse.rs b/crates/turbopack-ecmascript/src/parse.rs index ddb45aed55da9..84dadad3e4c8f 100644 --- a/crates/turbopack-ecmascript/src/parse.rs +++ b/crates/turbopack-ecmascript/src/parse.rs @@ -196,11 +196,11 @@ async fn parse_content( let handler = Handler::with_emitter( true, false, - box IssueEmitter { + Box::new(IssueEmitter { source, source_map: source_map.clone(), title: Some("Parsing ecmascript source code failed".to_string()), - }, + }), ); let globals = Arc::new(Globals::new()); let globals_ref = &globals; diff --git a/crates/turbopack-ecmascript/src/path_visitor.rs b/crates/turbopack-ecmascript/src/path_visitor.rs index 895029cbc5092..4a64cd3b81af2 100644 --- a/crates/turbopack-ecmascript/src/path_visitor.rs +++ b/crates/turbopack-ecmascript/src/path_visitor.rs @@ -190,7 +190,7 @@ mod tests { impl VisitorFactory for Box> { fn create<'a>(&'a self) -> Box { - box &**self + Box::new(&**self) } } @@ -202,7 +202,7 @@ mod tests { } fn replacer(from: &'static str, to: &'static str) -> impl VisitorFactory { - box StrReplacer { from, to } + Box::new(StrReplacer { from, to }) } fn to_js(m: &Module, cm: &Arc) -> String { diff --git a/crates/turbopack-ecmascript/src/references/amd.rs b/crates/turbopack-ecmascript/src/references/amd.rs index ad6a6ad3ced97..4b58932cda138 100644 --- a/crates/turbopack-ecmascript/src/references/amd.rs +++ b/crates/turbopack-ecmascript/src/references/amd.rs @@ -251,7 +251,7 @@ fn transform_amd_factory( let f = private_ident!("f"); let call_f = Expr::Call(CallExpr { args: deps, - callee: Callee::Expr(box Expr::Ident(f.clone())), + callee: Callee::Expr(Box::new(Expr::Ident(f.clone()))), span: DUMMY_SP, type_args: None, }); @@ -274,12 +274,12 @@ fn transform_amd_factory( "r => r !== undefined && __turbopack_export_value__(r)" )); args.push(ExprOrSpread { - expr: box Expr::Call(CallExpr { + expr: Box::new(Expr::Call(CallExpr { args: deps, callee: Callee::Expr(factory), span: DUMMY_SP, type_args: None, - }), + })), spread: None, }); } diff --git a/crates/turbopack-ecmascript/src/references/cjs.rs b/crates/turbopack-ecmascript/src/references/cjs.rs index 27d31950653c8..aea9912a61155 100644 --- a/crates/turbopack-ecmascript/src/references/cjs.rs +++ b/crates/turbopack-ecmascript/src/references/cjs.rs @@ -175,20 +175,20 @@ impl CodeGenerateable for CjsRequireAssetReference { visitors.push( create_visitor!(exact path, visit_mut_call_expr(call_expr: &mut CallExpr) { call_expr.callee = Callee::Expr( - box Expr::Ident(Ident::new( + Box::new(Expr::Ident(Ident::new( if pm.is_internal_import() { "__turbopack_require__" } else { "__turbopack_external_require__" }.into(), DUMMY_SP - )) + ))) ); let old_args = std::mem::take(&mut call_expr.args); let expr = match old_args.into_iter().next() { Some(ExprOrSpread { expr, spread: None }) => pm.apply(*expr), _ => pm.create(), }; - call_expr.args.push(ExprOrSpread { spread: None, expr: box expr }); + call_expr.args.push(ExprOrSpread { spread: None, expr: Box::new(expr) }); }), ); } diff --git a/crates/turbopack-ecmascript/src/references/esm/base.rs b/crates/turbopack-ecmascript/src/references/esm/base.rs index c85740ee8b2a9..15a14d950d862 100644 --- a/crates/turbopack-ecmascript/src/references/esm/base.rs +++ b/crates/turbopack-ecmascript/src/references/esm/base.rs @@ -288,7 +288,7 @@ pub(crate) fn insert_hoisted_stmt(program: &mut Program, stmt: Stmt) { body.insert( 0, ModuleItem::Stmt(Stmt::Expr(ExprStmt { - expr: box Expr::Lit(Lit::Str((*ESM_HOISTING_LOCATION).into())), + expr: Box::new(Expr::Lit(Lit::Str((*ESM_HOISTING_LOCATION).into()))), span: DUMMY_SP, })), ); @@ -313,7 +313,7 @@ pub(crate) fn insert_hoisted_stmt(program: &mut Program, stmt: Stmt) { body.insert( 0, Stmt::Expr(ExprStmt { - expr: box Expr::Lit(Lit::Str((*ESM_HOISTING_LOCATION).into())), + expr: Box::new(Expr::Lit(Lit::Str((*ESM_HOISTING_LOCATION).into()))), span: DUMMY_SP, }), ); diff --git a/crates/turbopack-ecmascript/src/references/esm/binding.rs b/crates/turbopack-ecmascript/src/references/esm/binding.rs index 971768b8e1b47..fb1962fc46e6d 100644 --- a/crates/turbopack-ecmascript/src/references/esm/binding.rs +++ b/crates/turbopack-ecmascript/src/references/esm/binding.rs @@ -58,14 +58,14 @@ impl CodeGenerateable for EsmBinding { if let Some(export) = export { Expr::Member(MemberExpr { span: DUMMY_SP, - obj: box Expr::Ident(Ident::new(imported_module.into(), DUMMY_SP)), + obj: Box::new(Expr::Ident(Ident::new(imported_module.into(), DUMMY_SP))), prop: MemberProp::Computed(ComputedPropName { span: DUMMY_SP, - expr: box Expr::Lit(Lit::Str(Str { + expr: Box::new(Expr::Lit(Lit::Str(Str { span: DUMMY_SP, value: export.into(), raw: None, - })), + }))), }), }) } else { @@ -99,7 +99,7 @@ impl CodeGenerateable for EsmBinding { if let Prop::Shorthand(ident) = prop { // TODO: Merge with the above condition when https://rust-lang.github.io/rfcs/2497-if-let-chains.html lands. if let Some(imported_ident) = imported_module.as_deref() { - *prop = Prop::KeyValue(KeyValueProp { key: PropName::Ident(ident.clone()), value: box make_expr(imported_ident, this.export.as_deref())}); + *prop = Prop::KeyValue(KeyValueProp { key: PropName::Ident(ident.clone()), value: Box::new(make_expr(imported_ident, this.export.as_deref()))}); } } }), diff --git a/crates/turbopack-ecmascript/src/references/esm/dynamic.rs b/crates/turbopack-ecmascript/src/references/esm/dynamic.rs index 995cd4715f2a2..f1587dfd339ee 100644 --- a/crates/turbopack-ecmascript/src/references/esm/dynamic.rs +++ b/crates/turbopack-ecmascript/src/references/esm/dynamic.rs @@ -179,7 +179,7 @@ impl CodeGenerateableWithAvailabilityInfo for EsmAsyncAssetReference { ]; } else { call_expr.args = vec![ - ExprOrSpread { spread: None, expr: box expr } + ExprOrSpread { spread: None, expr: Box::new(expr) } ] } }) diff --git a/crates/turbopack-ecmascript/src/references/esm/export.rs b/crates/turbopack-ecmascript/src/references/esm/export.rs index 4057b2011c463..97b0b132ab82d 100644 --- a/crates/turbopack-ecmascript/src/references/esm/export.rs +++ b/crates/turbopack-ecmascript/src/references/esm/export.rs @@ -191,14 +191,14 @@ impl CodeGenerateable for EsmExports { "(() => $expr)" as Expr, expr: Expr = Expr::Member(MemberExpr { span: DUMMY_SP, - obj: box Expr::Ident(Ident::new(ident.into(), DUMMY_SP)), + obj: Box::new(Expr::Ident(Ident::new(ident.into(), DUMMY_SP))), prop: MemberProp::Computed(ComputedPropName { span: DUMMY_SP, - expr: box Expr::Lit(Lit::Str(Str { + expr: Box::new(Expr::Lit(Lit::Str(Str { span: DUMMY_SP, value: (name as &str).into(), raw: None, - })) + }))) }) }) ) @@ -215,14 +215,14 @@ impl CodeGenerateable for EsmExports { } }; if let Some(expr) = expr { - props.push(PropOrSpread::Prop(box Prop::KeyValue(KeyValueProp { + props.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp { key: PropName::Str(Str { span: DUMMY_SP, value: exported.as_ref().into(), raw: None, }), - value: box expr, - }))); + value: Box::new(expr), + })))); } } let getters = Expr::Object(ObjectLit { diff --git a/crates/turbopack-ecmascript/src/references/mod.rs b/crates/turbopack-ecmascript/src/references/mod.rs index 3876334efc95f..656d918630b64 100644 --- a/crates/turbopack-ecmascript/src/references/mod.rs +++ b/crates/turbopack-ecmascript/src/references/mod.rs @@ -382,11 +382,11 @@ pub(crate) async fn analyze_ecmascript_module( let handler = Handler::with_emitter( true, false, - box IssueEmitter { + Box::new(IssueEmitter { source, source_map: source_map.clone(), title: None, - }, + }), ); let var_graph = HANDLER.set(&handler, || { GLOBALS.set(globals, || create_graph(program, eval_context)) @@ -801,11 +801,11 @@ pub(crate) async fn analyze_ecmascript_module( let linked_func_call = state .link_value( JsValue::call( - box JsValue::WellKnownFunction( - WellKnownFunctionKind::PathResolve( - box parent_path.path.as_str().into(), - ), - ), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::PathResolve(Box::new( + parent_path.path.as_str().into(), + )), + )), args.clone(), ), in_try, @@ -832,7 +832,9 @@ pub(crate) async fn analyze_ecmascript_module( let linked_func_call = state .link_value( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::PathJoin), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::PathJoin, + )), args.clone(), ), in_try, @@ -860,8 +862,10 @@ pub(crate) async fn analyze_ecmascript_module( let mut show_dynamic_warning = false; let pat = js_value_to_pattern(&args[0]); if pat.is_match("node") && args.len() >= 2 { - let first_arg = - JsValue::member(box args[1].clone(), box 0_f64.into()); + let first_arg = JsValue::member( + Box::new(args[1].clone()), + Box::new(0_f64.into()), + ); let first_arg = state.link_value(first_arg, in_try).await?; let pat = js_value_to_pattern(&first_arg); if !pat.has_constant_parts() { @@ -1062,9 +1066,9 @@ pub(crate) async fn analyze_ecmascript_module( let linked_func_call = state .link_value( JsValue::call( - box JsValue::WellKnownFunction( + Box::new(JsValue::WellKnownFunction( WellKnownFunctionKind::PathJoin, - ), + )), vec![ JsValue::FreeVar( "__dirname".into(), @@ -1125,9 +1129,9 @@ pub(crate) async fn analyze_ecmascript_module( let linked_func_call = state .link_value( JsValue::call( - box JsValue::WellKnownFunction( + Box::new(JsValue::WellKnownFunction( WellKnownFunctionKind::PathJoin, - ), + )), vec![ JsValue::FreeVar("__dirname".into()), p.into(), @@ -1565,7 +1569,10 @@ pub(crate) async fn analyze_ecmascript_module( } let func = analysis_state - .link_value(JsValue::member(box obj.clone(), box prop), in_try) + .link_value( + JsValue::member(Box::new(obj.clone()), Box::new(prop)), + in_try, + ) .await?; handle_call( @@ -2029,7 +2036,9 @@ async fn require_resolve_visitor( match values.len() { 0 => JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::RequireResolve), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::RequireResolve, + )), args, ), "unresolveable request", @@ -2040,7 +2049,9 @@ async fn require_resolve_visitor( } else { JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::RequireResolve), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::RequireResolve, + )), args, ), "only a single argument is supported", @@ -2058,7 +2069,9 @@ async fn require_context_visitor( Err(err) => { return Ok(JsValue::unknown( JsValue::call( - box JsValue::WellKnownFunction(WellKnownFunctionKind::RequireContext), + Box::new(JsValue::WellKnownFunction( + WellKnownFunctionKind::RequireContext, + )), args, ), PrettyPrintError(&err).to_string(), diff --git a/crates/turbopack-ecmascript/src/references/require_context.rs b/crates/turbopack-ecmascript/src/references/require_context.rs index 663dba9f32731..17d4d50bf8a09 100644 --- a/crates/turbopack-ecmascript/src/references/require_context.rs +++ b/crates/turbopack-ecmascript/src/references/require_context.rs @@ -464,7 +464,7 @@ impl EcmascriptChunkItem for RequireContextChunkItem { context_map .props - .push(PropOrSpread::Prop(box Prop::KeyValue(prop))); + .push(PropOrSpread::Prop(Box::new(Prop::KeyValue(prop)))); } let expr = quote_expr!( diff --git a/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs b/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs index c10ee34c081b4..1bb65d4993f6a 100644 --- a/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs +++ b/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs @@ -18,11 +18,11 @@ pub fn create_proxy_module(transition_name: &str, target_import: &str) -> Progra Program::Module(Module { body: vec![ ModuleItem::Stmt(Stmt::Expr(ExprStmt { - expr: box Expr::Lit(Lit::Str(Str { + expr: Box::new(Expr::Lit(Lit::Str(Str { value: format!("TURBOPACK {{ transition: {transition_name} }}").into(), raw: None, span: DUMMY_SP, - })), + }))), span: DUMMY_SP, })), ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl { @@ -30,15 +30,15 @@ pub fn create_proxy_module(transition_name: &str, target_import: &str) -> Progra local: ident.clone(), span: DUMMY_SP, })], - src: box target_import.into(), + src: Box::new(target_import.into()), type_only: false, - asserts: Some(box ObjectLit { + asserts: Some(Box::new(ObjectLit { span: DUMMY_SP, - props: vec![PropOrSpread::Prop(box Prop::KeyValue(KeyValueProp { + props: vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp { key: PropName::Ident(Ident::new(TURBOPACK_HELPER.into(), DUMMY_SP)), - value: box Expr::Lit(true.into()), - }))], - }), + value: Box::new(Expr::Lit(true.into())), + })))], + })), span: DUMMY_SP, })), ModuleItem::Stmt(quote!( diff --git a/crates/turbopack-ecmascript/src/tree_shake/graph.rs b/crates/turbopack-ecmascript/src/tree_shake/graph.rs index 3c59280c546ff..7750f21415a8b 100644 --- a/crates/turbopack-ecmascript/src/tree_shake/graph.rs +++ b/crates/turbopack-ecmascript/src/tree_shake/graph.rs @@ -296,9 +296,9 @@ impl DepGraph { .push(ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl { span: DUMMY_SP, specifiers, - src: box uri_of_module.clone().into(), + src: Box::new(uri_of_module.clone().into()), type_only: false, - asserts: Some(box create_turbopack_chunk_id_assert(dep)), + asserts: Some(Box::new(create_turbopack_chunk_id_assert(dep))), }))); } @@ -312,10 +312,11 @@ impl DepGraph { // Emit `export { foo }` for var in data.write_vars.iter() { if required_vars.remove(var) { - let assertion_prop = PropOrSpread::Prop(box Prop::KeyValue(KeyValueProp { - key: quote_ident!("__turbopack_var__").into(), - value: box true.into(), - })); + let assertion_prop = + PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp { + key: quote_ident!("__turbopack_var__").into(), + value: Box::new(true.into()), + }))); chunk .body @@ -332,10 +333,10 @@ impl DepGraph { )], src: None, type_only: false, - asserts: Some(box ObjectLit { + asserts: Some(Box::new(ObjectLit { span: DUMMY_SP, props: vec![assertion_prop], - }), + })), }, ))); } @@ -841,10 +842,10 @@ const ASSERT_CHUNK_KEY: &str = "__turbopack_chunk__"; fn create_turbopack_chunk_id_assert(dep: u32) -> ObjectLit { ObjectLit { span: DUMMY_SP, - props: vec![PropOrSpread::Prop(box Prop::KeyValue(KeyValueProp { + props: vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp { key: PropName::Ident(Ident::new(ASSERT_CHUNK_KEY.into(), DUMMY_SP)), value: (dep as f64).into(), - }))], + })))], } } diff --git a/crates/turbopack-ecmascript/src/webpack/references.rs b/crates/turbopack-ecmascript/src/webpack/references.rs index 19ae2cb3d3069..81e30f8630ce9 100644 --- a/crates/turbopack-ecmascript/src/webpack/references.rs +++ b/crates/turbopack-ecmascript/src/webpack/references.rs @@ -46,11 +46,11 @@ pub async fn module_references( let handler = Handler::with_emitter( true, false, - box IssueEmitter { + Box::new(IssueEmitter { source, source_map: source_map.clone(), title: Some("Parsing webpack bundle failed".to_string()), - }, + }), ); HANDLER.set(&handler, || { program.visit_with(&mut visitor); diff --git a/crates/turbopack/src/lib.rs b/crates/turbopack/src/lib.rs index 3cc0f0189ec13..9d8697bea6640 100644 --- a/crates/turbopack/src/lib.rs +++ b/crates/turbopack/src/lib.rs @@ -1,5 +1,4 @@ #![feature(box_patterns)] -#![feature(box_syntax)] #![feature(trivial_bounds)] #![feature(min_specialization)] #![feature(map_try_insert)] From 1c8d3aa9621f171a177db0eb068f3027a756ce1a Mon Sep 17 00:00:00 2001 From: Chris Olszewski Date: Wed, 26 Apr 2023 11:21:34 -0700 Subject: [PATCH 17/24] chore: move lockfile ffi boundary (#4629) ### Description In order to avoid Go holding onto memory that's been allocated by Rust we parse the lockfile each time we need to make a lockfile call. This worked fine for the npm implementation as we just needed parse the JSON and it was usable, but Berry requires a lot more work/allocations that makes this strategy unfeasible. A quick sketch of the changes in this PR: - Moves the traversal of the lockfile to be the final step of building the package graph - We now calculate all of the closures of all workspaces at once - Rust FFI now takes the package manager string to select the lockfile implementation. This is a quick prefactor to prepare for hooking up the berry lockfile Reviewer notes: - Each commit can be reviewed on it's own, the first commit ended up getting reverted down the stack so it can be skipped. ### Testing Instructions Existing unit tests/integration tests. Manual verification of correct pruning behavior for the npm and pnpm monorepos created by `create-turbo@latest` --- cli/internal/context/context.go | 109 ++--- cli/internal/context/context_test.go | 37 ++ cli/internal/ffi/bindings.h | 2 +- cli/internal/ffi/ffi.go | 45 +- cli/internal/ffi/proto/messages.pb.go | 511 +++++++++++++++----- cli/internal/fs/package_json.go | 20 + cli/internal/fs/package_json_test.go | 10 + cli/internal/lockfile/lockfile.go | 68 ++- cli/internal/lockfile/lockfile_test.go | 45 ++ cli/internal/lockfile/npm_lockfile.go | 21 - cli/internal/lockfile/npm_lockfile_test.go | 74 +++ crates/turborepo-ffi/messages.proto | 28 +- crates/turborepo-ffi/src/lib.rs | 2 +- crates/turborepo-ffi/src/lockfile.rs | 83 +++- crates/turborepo-lockfiles/src/berry/mod.rs | 3 +- crates/turborepo-lockfiles/src/lib.rs | 4 +- 16 files changed, 811 insertions(+), 251 deletions(-) create mode 100644 cli/internal/lockfile/npm_lockfile_test.go diff --git a/cli/internal/context/context.go b/cli/internal/context/context.go index 2376d2d9eb32a..7e556a758facc 100644 --- a/cli/internal/context/context.go +++ b/cli/internal/context/context.go @@ -17,7 +17,6 @@ import ( "github.com/vercel/turbo/cli/internal/workspace" "github.com/Masterminds/semver" - mapset "github.com/deckarep/golang-set" "github.com/pyr-sh/dag" "golang.org/x/sync/errgroup" ) @@ -179,12 +178,6 @@ func BuildPackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *f } c.PackageManager = packageManager - if lockfile, err := c.PackageManager.ReadLockfile(repoRoot, rootPackageJSON); err != nil { - warnings.append(err) - } else { - c.Lockfile = lockfile - } - if err := c.resolveWorkspaceRootDeps(rootPackageJSON, &warnings); err != nil { // TODO(Gaspar) was this the intended return error? return nil, fmt.Errorf("could not resolve workspaces: %w", err) @@ -232,6 +225,10 @@ func BuildPackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *f } c.WorkspaceInfos.PackageJSONs[util.RootPkgName] = rootPackageJSON + if err := c.populateExternalDeps(repoRoot, rootPackageJSON, &warnings); err != nil { + return nil, err + } + return c, warnings.errorOrNil() } @@ -247,33 +244,6 @@ func (c *Context) resolveWorkspaceRootDeps(rootPackageJSON *fs.PackageJSON, warn for dep, version := range pkg.Dependencies { pkg.UnresolvedExternalDeps[dep] = version } - if c.Lockfile != nil { - depSet, err := lockfile.TransitiveClosure( - pkg.Dir.ToUnixPath(), - pkg.UnresolvedExternalDeps, - c.Lockfile, - ) - if err != nil { - warnings.append(err) - // Return early to skip using results of incomplete dep graph resolution - return nil - } - pkg.TransitiveDeps = make([]lockfile.Package, 0, depSet.Cardinality()) - for _, v := range depSet.ToSlice() { - dep := v.(lockfile.Package) - pkg.TransitiveDeps = append(pkg.TransitiveDeps, dep) - } - sort.Sort(lockfile.ByKey(pkg.TransitiveDeps)) - hashOfExternalDeps, err := fs.HashObject(pkg.TransitiveDeps) - if err != nil { - return err - } - pkg.ExternalDepsHash = hashOfExternalDeps - } else { - pkg.TransitiveDeps = []lockfile.Package{} - pkg.ExternalDepsHash = "" - } - return nil } @@ -326,37 +296,18 @@ func (c *Context) populateWorkspaceGraphForPackageJSON(pkg *fs.PackageJSON, root } } - externalDeps, err := lockfile.TransitiveClosure( - pkg.Dir.ToUnixPath(), - pkg.UnresolvedExternalDeps, - c.Lockfile, - ) - if err != nil { - warnings.append(err) - // reset external deps to original state - externalDeps = mapset.NewSet() - } - // when there are no internal dependencies, we need to still add these leafs to the graph if internalDepsSet.Len() == 0 { c.WorkspaceGraph.Connect(dag.BasicEdge(pkg.Name, core.ROOT_NODE_NAME)) } - pkg.TransitiveDeps = make([]lockfile.Package, 0, externalDeps.Cardinality()) - for _, dependency := range externalDeps.ToSlice() { - dependency := dependency.(lockfile.Package) - pkg.TransitiveDeps = append(pkg.TransitiveDeps, dependency) - } + pkg.InternalDeps = make([]string, 0, internalDepsSet.Len()) for _, v := range internalDepsSet.List() { pkg.InternalDeps = append(pkg.InternalDeps, fmt.Sprintf("%v", v)) } + sort.Strings(pkg.InternalDeps) - sort.Sort(lockfile.ByKey(pkg.TransitiveDeps)) - hashOfExternalDeps, err := fs.HashObject(pkg.TransitiveDeps) - if err != nil { - return err - } - pkg.ExternalDepsHash = hashOfExternalDeps + return nil } @@ -387,6 +338,39 @@ func (c *Context) parsePackageJSON(repoRoot turbopath.AbsoluteSystemPath, pkgJSO return nil } +func (c *Context) externalWorkspaceDeps() map[turbopath.AnchoredUnixPath]map[string]string { + workspaces := make(map[turbopath.AnchoredUnixPath]map[string]string, len(c.WorkspaceInfos.PackageJSONs)) + for _, pkg := range c.WorkspaceInfos.PackageJSONs { + workspaces[pkg.Dir.ToUnixPath()] = pkg.UnresolvedExternalDeps + } + return workspaces +} + +func (c *Context) populateExternalDeps(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON, warnings *Warnings) error { + if lockFile, err := c.PackageManager.ReadLockfile(repoRoot, rootPackageJSON); err != nil { + warnings.append(err) + rootPackageJSON.TransitiveDeps = nil + rootPackageJSON.ExternalDepsHash = "" + } else { + c.Lockfile = lockFile + if closures, err := lockfile.AllTransitiveClosures(c.externalWorkspaceDeps(), c.Lockfile); err != nil { + warnings.append(err) + } else { + for _, pkg := range c.WorkspaceInfos.PackageJSONs { + if closure, ok := closures[pkg.Dir.ToUnixPath()]; ok { + if err := pkg.SetExternalDeps(closure); err != nil { + return err + } + } else { + return fmt.Errorf("Unable to calculate closure for workspace %s", pkg.Dir.ToString()) + } + } + } + } + + return nil +} + // InternalDependencies finds all dependencies required by the slice of starting // packages, as well as the starting packages themselves. func (c *Context) InternalDependencies(start []string) ([]string, error) { @@ -424,13 +408,14 @@ func (c *Context) ChangedPackages(previousLockfile lockfile.Lockfile) ([]string, return nil, fmt.Errorf("Cannot detect changed packages without previous and current lockfile") } + closures, err := lockfile.AllTransitiveClosures(c.externalWorkspaceDeps(), previousLockfile) + if err != nil { + return nil, err + } + didPackageChange := func(pkgName string, pkg *fs.PackageJSON) bool { - previousDeps, err := lockfile.TransitiveClosure( - pkg.Dir.ToUnixPath(), - pkg.UnresolvedExternalDeps, - previousLockfile, - ) - if err != nil || previousDeps.Cardinality() != len(pkg.TransitiveDeps) { + previousDeps, ok := closures[pkg.Dir.ToUnixPath()] + if !ok || previousDeps.Cardinality() != len(pkg.TransitiveDeps) { return true } diff --git a/cli/internal/context/context_test.go b/cli/internal/context/context_test.go index 692c0a8b694a5..7e04590416c25 100644 --- a/cli/internal/context/context_test.go +++ b/cli/internal/context/context_test.go @@ -1,14 +1,20 @@ package context import ( + "errors" "os" "path/filepath" "regexp" + "sync" "testing" testifyAssert "github.com/stretchr/testify/assert" "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/packagemanager" "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/workspace" + "gotest.tools/v3/assert" ) func Test_isWorkspaceReference(t *testing.T) { @@ -144,6 +150,37 @@ func TestBuildPackageGraph_DuplicateNames(t *testing.T) { testifyAssert.Regexp(t, regexp.MustCompile("^Failed to add workspace \"same-name\".+$"), actualErr) } +func Test_populateExternalDeps_NoTransitiveDepsWithoutLockfile(t *testing.T) { + path := getTestDir(t, "dupe-workspace-names") + pkgJSON := &fs.PackageJSON{ + Name: "dupe-workspace-names", + PackageManager: "pnpm@7.15.0", + } + + pm, err := packagemanager.GetPackageManager(path, pkgJSON) + assert.NilError(t, err) + pm.UnmarshalLockfile = func(rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { + return nil, errors.New("bad lockfile") + } + context := Context{ + WorkspaceInfos: workspace.Catalog{ + PackageJSONs: map[string]*fs.PackageJSON{ + "a": {}, + }, + }, + WorkspaceNames: []string{}, + PackageManager: pm, + mutex: sync.Mutex{}, + } + var warnings Warnings + err = context.populateExternalDeps(path, pkgJSON, &warnings) + assert.NilError(t, err) + + assert.DeepEqual(t, pkgJSON.ExternalDepsHash, "") + assert.DeepEqual(t, context.WorkspaceInfos.PackageJSONs["a"].ExternalDepsHash, "") + assert.Assert(t, warnings.errorOrNil() != nil) +} + // This is duplicated from fs.turbo_json_test.go. // I wasn't able to pull it into a helper file/package because // it requires the `fs` package and it would cause cyclical dependencies diff --git a/cli/internal/ffi/bindings.h b/cli/internal/ffi/bindings.h index c2bbcea5d5f06..53f9bd0d86f2f 100644 --- a/cli/internal/ffi/bindings.h +++ b/cli/internal/ffi/bindings.h @@ -16,6 +16,6 @@ struct Buffer changed_files(struct Buffer buffer); struct Buffer previous_content(struct Buffer buffer); -struct Buffer npm_transitive_closure(struct Buffer buf); +struct Buffer transitive_closure(struct Buffer buf); struct Buffer npm_subgraph(struct Buffer buf); diff --git a/cli/internal/ffi/ffi.go b/cli/internal/ffi/ffi.go index d767488509414..82646b19a0836 100644 --- a/cli/internal/ffi/ffi.go +++ b/cli/internal/ffi/ffi.go @@ -18,6 +18,7 @@ import "C" import ( "errors" + "fmt" "reflect" "unsafe" @@ -167,23 +168,28 @@ func PreviousContent(gitRoot, fromCommit, filePath string) ([]byte, error) { return []byte(content), nil } -// NpmTransitiveDeps returns the transitive external deps of a given package based on the deps and specifiers given -func NpmTransitiveDeps(content []byte, pkgDir string, unresolvedDeps map[string]string) ([]*ffi_proto.LockfilePackage, error) { - return transitiveDeps(npmTransitiveDeps, content, pkgDir, unresolvedDeps) -} - -func npmTransitiveDeps(buf C.Buffer) C.Buffer { - return C.npm_transitive_closure(buf) -} - -func transitiveDeps(cFunc func(C.Buffer) C.Buffer, content []byte, pkgDir string, unresolvedDeps map[string]string) ([]*ffi_proto.LockfilePackage, error) { +// TransitiveDeps returns the transitive external deps for all provided workspaces +func TransitiveDeps(content []byte, packageManager string, workspaces map[string]map[string]string) (map[string]*ffi_proto.LockfilePackageList, error) { + flatWorkspaces := make(map[string]*ffi_proto.PackageDependencyList) + for workspace, deps := range workspaces { + packageDependencyList := make([]*ffi_proto.PackageDependency, len(deps)) + i := 0 + for name, version := range deps { + packageDependencyList[i] = &ffi_proto.PackageDependency{ + Name: name, + Range: version, + } + i++ + } + flatWorkspaces[workspace] = &ffi_proto.PackageDependencyList{List: packageDependencyList} + } req := ffi_proto.TransitiveDepsRequest{ Contents: content, - WorkspaceDir: pkgDir, - UnresolvedDeps: unresolvedDeps, + PackageManager: toPackageManager(packageManager), + Workspaces: flatWorkspaces, } reqBuf := Marshal(&req) - resBuf := cFunc(reqBuf) + resBuf := C.transitive_closure(reqBuf) reqBuf.Free() resp := ffi_proto.TransitiveDepsResponse{} @@ -195,8 +201,17 @@ func transitiveDeps(cFunc func(C.Buffer) C.Buffer, content []byte, pkgDir string return nil, errors.New(err) } - list := resp.GetPackages() - return list.GetList(), nil + dependencies := resp.GetDependencies() + return dependencies.GetDependencies(), nil +} + +func toPackageManager(packageManager string) ffi_proto.PackageManager { + switch packageManager { + case "npm": + return ffi_proto.PackageManager_NPM + default: + panic(fmt.Sprintf("Invalid package manager string: %s", packageManager)) + } } // NpmSubgraph returns the contents of a npm lockfile subgraph diff --git a/cli/internal/ffi/proto/messages.pb.go b/cli/internal/ffi/proto/messages.pb.go index 666f32a164416..68a155a44eca1 100644 --- a/cli/internal/ffi/proto/messages.pb.go +++ b/cli/internal/ffi/proto/messages.pb.go @@ -20,6 +20,49 @@ const ( _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) +type PackageManager int32 + +const ( + PackageManager_NPM PackageManager = 0 +) + +// Enum value maps for PackageManager. +var ( + PackageManager_name = map[int32]string{ + 0: "NPM", + } + PackageManager_value = map[string]int32{ + "NPM": 0, + } +) + +func (x PackageManager) Enum() *PackageManager { + p := new(PackageManager) + *p = x + return p +} + +func (x PackageManager) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (PackageManager) Descriptor() protoreflect.EnumDescriptor { + return file_turborepo_ffi_messages_proto_enumTypes[0].Descriptor() +} + +func (PackageManager) Type() protoreflect.EnumType { + return &file_turborepo_ffi_messages_proto_enumTypes[0] +} + +func (x PackageManager) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use PackageManager.Descriptor instead. +func (PackageManager) EnumDescriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{0} +} + type TurboDataDirResp struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -606,20 +649,169 @@ func (*PreviousContentResp_Content) isPreviousContentResp_Response() {} func (*PreviousContentResp_Error) isPreviousContentResp_Response() {} +type PackageDependency struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Range string `protobuf:"bytes,2,opt,name=range,proto3" json:"range,omitempty"` +} + +func (x *PackageDependency) Reset() { + *x = PackageDependency{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PackageDependency) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PackageDependency) ProtoMessage() {} + +func (x *PackageDependency) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PackageDependency.ProtoReflect.Descriptor instead. +func (*PackageDependency) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{9} +} + +func (x *PackageDependency) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *PackageDependency) GetRange() string { + if x != nil { + return x.Range + } + return "" +} + +type PackageDependencyList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + List []*PackageDependency `protobuf:"bytes,1,rep,name=list,proto3" json:"list,omitempty"` +} + +func (x *PackageDependencyList) Reset() { + *x = PackageDependencyList{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PackageDependencyList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PackageDependencyList) ProtoMessage() {} + +func (x *PackageDependencyList) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PackageDependencyList.ProtoReflect.Descriptor instead. +func (*PackageDependencyList) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{10} +} + +func (x *PackageDependencyList) GetList() []*PackageDependency { + if x != nil { + return x.List + } + return nil +} + +type WorkspaceDependencies struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Dependencies map[string]*LockfilePackageList `protobuf:"bytes,1,rep,name=dependencies,proto3" json:"dependencies,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *WorkspaceDependencies) Reset() { + *x = WorkspaceDependencies{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *WorkspaceDependencies) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*WorkspaceDependencies) ProtoMessage() {} + +func (x *WorkspaceDependencies) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use WorkspaceDependencies.ProtoReflect.Descriptor instead. +func (*WorkspaceDependencies) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{11} +} + +func (x *WorkspaceDependencies) GetDependencies() map[string]*LockfilePackageList { + if x != nil { + return x.Dependencies + } + return nil +} + type TransitiveDepsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` - WorkspaceDir string `protobuf:"bytes,2,opt,name=workspace_dir,json=workspaceDir,proto3" json:"workspace_dir,omitempty"` - UnresolvedDeps map[string]string `protobuf:"bytes,3,rep,name=unresolved_deps,json=unresolvedDeps,proto3" json:"unresolved_deps,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` + PackageManager PackageManager `protobuf:"varint,2,opt,name=package_manager,json=packageManager,proto3,enum=PackageManager" json:"package_manager,omitempty"` + Workspaces map[string]*PackageDependencyList `protobuf:"bytes,3,rep,name=workspaces,proto3" json:"workspaces,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *TransitiveDepsRequest) Reset() { *x = TransitiveDepsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[9] + mi := &file_turborepo_ffi_messages_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -632,7 +824,7 @@ func (x *TransitiveDepsRequest) String() string { func (*TransitiveDepsRequest) ProtoMessage() {} func (x *TransitiveDepsRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[9] + mi := &file_turborepo_ffi_messages_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -645,7 +837,7 @@ func (x *TransitiveDepsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use TransitiveDepsRequest.ProtoReflect.Descriptor instead. func (*TransitiveDepsRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{9} + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{12} } func (x *TransitiveDepsRequest) GetContents() []byte { @@ -655,16 +847,16 @@ func (x *TransitiveDepsRequest) GetContents() []byte { return nil } -func (x *TransitiveDepsRequest) GetWorkspaceDir() string { +func (x *TransitiveDepsRequest) GetPackageManager() PackageManager { if x != nil { - return x.WorkspaceDir + return x.PackageManager } - return "" + return PackageManager_NPM } -func (x *TransitiveDepsRequest) GetUnresolvedDeps() map[string]string { +func (x *TransitiveDepsRequest) GetWorkspaces() map[string]*PackageDependencyList { if x != nil { - return x.UnresolvedDeps + return x.Workspaces } return nil } @@ -675,7 +867,7 @@ type TransitiveDepsResponse struct { unknownFields protoimpl.UnknownFields // Types that are assignable to Response: - // *TransitiveDepsResponse_Packages + // *TransitiveDepsResponse_Dependencies // *TransitiveDepsResponse_Error Response isTransitiveDepsResponse_Response `protobuf_oneof:"response"` } @@ -683,7 +875,7 @@ type TransitiveDepsResponse struct { func (x *TransitiveDepsResponse) Reset() { *x = TransitiveDepsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[10] + mi := &file_turborepo_ffi_messages_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -696,7 +888,7 @@ func (x *TransitiveDepsResponse) String() string { func (*TransitiveDepsResponse) ProtoMessage() {} func (x *TransitiveDepsResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[10] + mi := &file_turborepo_ffi_messages_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -709,7 +901,7 @@ func (x *TransitiveDepsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use TransitiveDepsResponse.ProtoReflect.Descriptor instead. func (*TransitiveDepsResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{10} + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{13} } func (m *TransitiveDepsResponse) GetResponse() isTransitiveDepsResponse_Response { @@ -719,9 +911,9 @@ func (m *TransitiveDepsResponse) GetResponse() isTransitiveDepsResponse_Response return nil } -func (x *TransitiveDepsResponse) GetPackages() *LockfilePackageList { - if x, ok := x.GetResponse().(*TransitiveDepsResponse_Packages); ok { - return x.Packages +func (x *TransitiveDepsResponse) GetDependencies() *WorkspaceDependencies { + if x, ok := x.GetResponse().(*TransitiveDepsResponse_Dependencies); ok { + return x.Dependencies } return nil } @@ -737,15 +929,15 @@ type isTransitiveDepsResponse_Response interface { isTransitiveDepsResponse_Response() } -type TransitiveDepsResponse_Packages struct { - Packages *LockfilePackageList `protobuf:"bytes,1,opt,name=packages,proto3,oneof"` +type TransitiveDepsResponse_Dependencies struct { + Dependencies *WorkspaceDependencies `protobuf:"bytes,1,opt,name=dependencies,proto3,oneof"` } type TransitiveDepsResponse_Error struct { Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` } -func (*TransitiveDepsResponse_Packages) isTransitiveDepsResponse_Response() {} +func (*TransitiveDepsResponse_Dependencies) isTransitiveDepsResponse_Response() {} func (*TransitiveDepsResponse_Error) isTransitiveDepsResponse_Response() {} @@ -762,7 +954,7 @@ type LockfilePackage struct { func (x *LockfilePackage) Reset() { *x = LockfilePackage{} if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[11] + mi := &file_turborepo_ffi_messages_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -775,7 +967,7 @@ func (x *LockfilePackage) String() string { func (*LockfilePackage) ProtoMessage() {} func (x *LockfilePackage) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[11] + mi := &file_turborepo_ffi_messages_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -788,7 +980,7 @@ func (x *LockfilePackage) ProtoReflect() protoreflect.Message { // Deprecated: Use LockfilePackage.ProtoReflect.Descriptor instead. func (*LockfilePackage) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{11} + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{14} } func (x *LockfilePackage) GetKey() string { @@ -823,7 +1015,7 @@ type LockfilePackageList struct { func (x *LockfilePackageList) Reset() { *x = LockfilePackageList{} if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[12] + mi := &file_turborepo_ffi_messages_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -836,7 +1028,7 @@ func (x *LockfilePackageList) String() string { func (*LockfilePackageList) ProtoMessage() {} func (x *LockfilePackageList) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[12] + mi := &file_turborepo_ffi_messages_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -849,7 +1041,7 @@ func (x *LockfilePackageList) ProtoReflect() protoreflect.Message { // Deprecated: Use LockfilePackageList.ProtoReflect.Descriptor instead. func (*LockfilePackageList) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{12} + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{15} } func (x *LockfilePackageList) GetList() []*LockfilePackage { @@ -864,15 +1056,16 @@ type SubgraphRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` - Workspaces []string `protobuf:"bytes,2,rep,name=workspaces,proto3" json:"workspaces,omitempty"` - Packages []string `protobuf:"bytes,3,rep,name=packages,proto3" json:"packages,omitempty"` + Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` + PackageManager string `protobuf:"bytes,2,opt,name=package_manager,json=packageManager,proto3" json:"package_manager,omitempty"` + Workspaces []string `protobuf:"bytes,3,rep,name=workspaces,proto3" json:"workspaces,omitempty"` + Packages []string `protobuf:"bytes,4,rep,name=packages,proto3" json:"packages,omitempty"` } func (x *SubgraphRequest) Reset() { *x = SubgraphRequest{} if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[13] + mi := &file_turborepo_ffi_messages_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -885,7 +1078,7 @@ func (x *SubgraphRequest) String() string { func (*SubgraphRequest) ProtoMessage() {} func (x *SubgraphRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[13] + mi := &file_turborepo_ffi_messages_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -898,7 +1091,7 @@ func (x *SubgraphRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use SubgraphRequest.ProtoReflect.Descriptor instead. func (*SubgraphRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{13} + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{16} } func (x *SubgraphRequest) GetContents() []byte { @@ -908,6 +1101,13 @@ func (x *SubgraphRequest) GetContents() []byte { return nil } +func (x *SubgraphRequest) GetPackageManager() string { + if x != nil { + return x.PackageManager + } + return "" +} + func (x *SubgraphRequest) GetWorkspaces() []string { if x != nil { return x.Workspaces @@ -936,7 +1136,7 @@ type SubgraphResponse struct { func (x *SubgraphResponse) Reset() { *x = SubgraphResponse{} if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[14] + mi := &file_turborepo_ffi_messages_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -949,7 +1149,7 @@ func (x *SubgraphResponse) String() string { func (*SubgraphResponse) ProtoMessage() {} func (x *SubgraphResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[14] + mi := &file_turborepo_ffi_messages_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -962,7 +1162,7 @@ func (x *SubgraphResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use SubgraphResponse.ProtoReflect.Descriptor instead. func (*SubgraphResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{14} + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{17} } func (m *SubgraphResponse) GetResponse() isSubgraphResponse_Response { @@ -1058,51 +1258,78 @@ var file_turborepo_ffi_messages_proto_rawDesc = []byte{ 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0xf0, 0x01, 0x0a, 0x15, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, - 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, 0x69, 0x72, 0x12, 0x53, - 0x0a, 0x0f, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, 0x64, 0x65, 0x70, - 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, - 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, - 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x44, 0x65, 0x70, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x0e, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x44, - 0x65, 0x70, 0x73, 0x1a, 0x41, 0x0a, 0x13, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, - 0x64, 0x44, 0x65, 0x70, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x70, 0x0a, 0x16, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, - 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x32, 0x0a, 0x08, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, - 0x6b, 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x08, 0x70, 0x61, 0x63, 0x6b, - 0x61, 0x67, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, - 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x53, 0x0a, 0x0f, 0x4c, 0x6f, 0x63, 0x6b, - 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, - 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x6f, 0x75, 0x6e, 0x64, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x22, 0x3b, 0x0a, - 0x13, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, - 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, - 0x6b, 0x61, 0x67, 0x65, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x69, 0x0a, 0x0f, 0x53, 0x75, - 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, - 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x77, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x63, - 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x63, - 0x6b, 0x61, 0x67, 0x65, 0x73, 0x22, 0x54, 0x0a, 0x10, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, - 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x08, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, - 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x0b, 0x5a, 0x09, 0x66, - 0x66, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6e, 0x73, 0x65, 0x22, 0x3d, 0x0a, 0x11, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x44, 0x65, + 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x61, 0x6e, + 0x67, 0x65, 0x22, 0x3f, 0x0a, 0x15, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x44, 0x65, 0x70, + 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x04, 0x6c, + 0x69, 0x73, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x50, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x04, 0x6c, + 0x69, 0x73, 0x74, 0x22, 0xbc, 0x01, 0x0a, 0x15, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x12, 0x4c, 0x0a, + 0x0c, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, + 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x2e, 0x44, 0x65, 0x70, 0x65, + 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x64, + 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x1a, 0x55, 0x0a, 0x11, 0x44, + 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x14, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x8c, 0x02, 0x0a, 0x15, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x38, 0x0a, 0x0f, 0x70, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x0f, 0x2e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, + 0x65, 0x72, 0x52, 0x0e, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, + 0x65, 0x72, 0x12, 0x46, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, + 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x57, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x1a, 0x55, 0x0a, 0x0f, 0x57, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, + 0x2e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, + 0x63, 0x79, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, + 0x01, 0x22, 0x7a, 0x0a, 0x16, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, + 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3c, 0x0a, 0x0c, 0x64, + 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x16, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, 0x65, 0x70, + 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0c, 0x64, 0x65, 0x70, + 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x53, 0x0a, + 0x0f, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, + 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x75, + 0x6e, 0x64, 0x22, 0x3b, 0x0a, 0x13, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, + 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x6c, 0x69, 0x73, + 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, + 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x22, + 0x92, 0x01, 0x0a, 0x0f, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, + 0x27, 0x0a, 0x0f, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, + 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, + 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x12, 0x1e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x73, 0x22, 0x54, 0x0a, 0x10, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, + 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x19, 0x0a, 0x0e, 0x50, 0x61, + 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x12, 0x07, 0x0a, 0x03, + 0x4e, 0x50, 0x4d, 0x10, 0x00, 0x42, 0x0b, 0x5a, 0x09, 0x66, 0x66, 0x69, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1117,36 +1344,47 @@ func file_turborepo_ffi_messages_proto_rawDescGZIP() []byte { return file_turborepo_ffi_messages_proto_rawDescData } -var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 16) +var file_turborepo_ffi_messages_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 20) var file_turborepo_ffi_messages_proto_goTypes = []interface{}{ - (*TurboDataDirResp)(nil), // 0: TurboDataDirResp - (*GlobReq)(nil), // 1: GlobReq - (*GlobResp)(nil), // 2: GlobResp - (*GlobRespList)(nil), // 3: GlobRespList - (*ChangedFilesReq)(nil), // 4: ChangedFilesReq - (*ChangedFilesResp)(nil), // 5: ChangedFilesResp - (*ChangedFilesList)(nil), // 6: ChangedFilesList - (*PreviousContentReq)(nil), // 7: PreviousContentReq - (*PreviousContentResp)(nil), // 8: PreviousContentResp - (*TransitiveDepsRequest)(nil), // 9: TransitiveDepsRequest - (*TransitiveDepsResponse)(nil), // 10: TransitiveDepsResponse - (*LockfilePackage)(nil), // 11: LockfilePackage - (*LockfilePackageList)(nil), // 12: LockfilePackageList - (*SubgraphRequest)(nil), // 13: SubgraphRequest - (*SubgraphResponse)(nil), // 14: SubgraphResponse - nil, // 15: TransitiveDepsRequest.UnresolvedDepsEntry + (PackageManager)(0), // 0: PackageManager + (*TurboDataDirResp)(nil), // 1: TurboDataDirResp + (*GlobReq)(nil), // 2: GlobReq + (*GlobResp)(nil), // 3: GlobResp + (*GlobRespList)(nil), // 4: GlobRespList + (*ChangedFilesReq)(nil), // 5: ChangedFilesReq + (*ChangedFilesResp)(nil), // 6: ChangedFilesResp + (*ChangedFilesList)(nil), // 7: ChangedFilesList + (*PreviousContentReq)(nil), // 8: PreviousContentReq + (*PreviousContentResp)(nil), // 9: PreviousContentResp + (*PackageDependency)(nil), // 10: PackageDependency + (*PackageDependencyList)(nil), // 11: PackageDependencyList + (*WorkspaceDependencies)(nil), // 12: WorkspaceDependencies + (*TransitiveDepsRequest)(nil), // 13: TransitiveDepsRequest + (*TransitiveDepsResponse)(nil), // 14: TransitiveDepsResponse + (*LockfilePackage)(nil), // 15: LockfilePackage + (*LockfilePackageList)(nil), // 16: LockfilePackageList + (*SubgraphRequest)(nil), // 17: SubgraphRequest + (*SubgraphResponse)(nil), // 18: SubgraphResponse + nil, // 19: WorkspaceDependencies.DependenciesEntry + nil, // 20: TransitiveDepsRequest.WorkspacesEntry } var file_turborepo_ffi_messages_proto_depIdxs = []int32{ - 3, // 0: GlobResp.files:type_name -> GlobRespList - 6, // 1: ChangedFilesResp.files:type_name -> ChangedFilesList - 15, // 2: TransitiveDepsRequest.unresolved_deps:type_name -> TransitiveDepsRequest.UnresolvedDepsEntry - 12, // 3: TransitiveDepsResponse.packages:type_name -> LockfilePackageList - 11, // 4: LockfilePackageList.list:type_name -> LockfilePackage - 5, // [5:5] is the sub-list for method output_type - 5, // [5:5] is the sub-list for method input_type - 5, // [5:5] is the sub-list for extension type_name - 5, // [5:5] is the sub-list for extension extendee - 0, // [0:5] is the sub-list for field type_name + 4, // 0: GlobResp.files:type_name -> GlobRespList + 7, // 1: ChangedFilesResp.files:type_name -> ChangedFilesList + 10, // 2: PackageDependencyList.list:type_name -> PackageDependency + 19, // 3: WorkspaceDependencies.dependencies:type_name -> WorkspaceDependencies.DependenciesEntry + 0, // 4: TransitiveDepsRequest.package_manager:type_name -> PackageManager + 20, // 5: TransitiveDepsRequest.workspaces:type_name -> TransitiveDepsRequest.WorkspacesEntry + 12, // 6: TransitiveDepsResponse.dependencies:type_name -> WorkspaceDependencies + 15, // 7: LockfilePackageList.list:type_name -> LockfilePackage + 16, // 8: WorkspaceDependencies.DependenciesEntry.value:type_name -> LockfilePackageList + 11, // 9: TransitiveDepsRequest.WorkspacesEntry.value:type_name -> PackageDependencyList + 10, // [10:10] is the sub-list for method output_type + 10, // [10:10] is the sub-list for method input_type + 10, // [10:10] is the sub-list for extension type_name + 10, // [10:10] is the sub-list for extension extendee + 0, // [0:10] is the sub-list for field type_name } func init() { file_turborepo_ffi_messages_proto_init() } @@ -1264,7 +1502,7 @@ func file_turborepo_ffi_messages_proto_init() { } } file_turborepo_ffi_messages_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransitiveDepsRequest); i { + switch v := v.(*PackageDependency); i { case 0: return &v.state case 1: @@ -1276,7 +1514,7 @@ func file_turborepo_ffi_messages_proto_init() { } } file_turborepo_ffi_messages_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransitiveDepsResponse); i { + switch v := v.(*PackageDependencyList); i { case 0: return &v.state case 1: @@ -1288,7 +1526,7 @@ func file_turborepo_ffi_messages_proto_init() { } } file_turborepo_ffi_messages_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LockfilePackage); i { + switch v := v.(*WorkspaceDependencies); i { case 0: return &v.state case 1: @@ -1300,7 +1538,7 @@ func file_turborepo_ffi_messages_proto_init() { } } file_turborepo_ffi_messages_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LockfilePackageList); i { + switch v := v.(*TransitiveDepsRequest); i { case 0: return &v.state case 1: @@ -1312,7 +1550,7 @@ func file_turborepo_ffi_messages_proto_init() { } } file_turborepo_ffi_messages_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SubgraphRequest); i { + switch v := v.(*TransitiveDepsResponse); i { case 0: return &v.state case 1: @@ -1324,6 +1562,42 @@ func file_turborepo_ffi_messages_proto_init() { } } file_turborepo_ffi_messages_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LockfilePackage); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LockfilePackageList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SubgraphRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SubgraphResponse); i { case 0: return &v.state @@ -1349,11 +1623,11 @@ func file_turborepo_ffi_messages_proto_init() { (*PreviousContentResp_Content)(nil), (*PreviousContentResp_Error)(nil), } - file_turborepo_ffi_messages_proto_msgTypes[10].OneofWrappers = []interface{}{ - (*TransitiveDepsResponse_Packages)(nil), + file_turborepo_ffi_messages_proto_msgTypes[13].OneofWrappers = []interface{}{ + (*TransitiveDepsResponse_Dependencies)(nil), (*TransitiveDepsResponse_Error)(nil), } - file_turborepo_ffi_messages_proto_msgTypes[14].OneofWrappers = []interface{}{ + file_turborepo_ffi_messages_proto_msgTypes[17].OneofWrappers = []interface{}{ (*SubgraphResponse_Contents)(nil), (*SubgraphResponse_Error)(nil), } @@ -1362,13 +1636,14 @@ func file_turborepo_ffi_messages_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_turborepo_ffi_messages_proto_rawDesc, - NumEnums: 0, - NumMessages: 16, + NumEnums: 1, + NumMessages: 20, NumExtensions: 0, NumServices: 0, }, GoTypes: file_turborepo_ffi_messages_proto_goTypes, DependencyIndexes: file_turborepo_ffi_messages_proto_depIdxs, + EnumInfos: file_turborepo_ffi_messages_proto_enumTypes, MessageInfos: file_turborepo_ffi_messages_proto_msgTypes, }.Build() File_turborepo_ffi_messages_proto = out.File diff --git a/cli/internal/fs/package_json.go b/cli/internal/fs/package_json.go index 883f7a4ac7df0..a8efe3ed5220a 100644 --- a/cli/internal/fs/package_json.go +++ b/cli/internal/fs/package_json.go @@ -3,8 +3,10 @@ package fs import ( "bytes" "encoding/json" + "sort" "sync" + mapset "github.com/deckarep/golang-set" "github.com/vercel/turbo/cli/internal/lockfile" "github.com/vercel/turbo/cli/internal/turbopath" ) @@ -120,6 +122,24 @@ func MarshalPackageJSON(pkgJSON *PackageJSON) ([]byte, error) { return b.Bytes(), nil } +// SetExternalDeps sets TransitiveDeps and populates ExternalDepsHash +func (p *PackageJSON) SetExternalDeps(externalDeps mapset.Set) error { + p.Mu.Lock() + defer p.Mu.Unlock() + p.TransitiveDeps = make([]lockfile.Package, 0, externalDeps.Cardinality()) + for _, dependency := range externalDeps.ToSlice() { + dependency := dependency.(lockfile.Package) + p.TransitiveDeps = append(p.TransitiveDeps, dependency) + } + sort.Sort(lockfile.ByKey(p.TransitiveDeps)) + hashOfExternalDeps, err := HashObject(p.TransitiveDeps) + if err != nil { + return err + } + p.ExternalDepsHash = hashOfExternalDeps + return nil +} + func isEmpty(value interface{}) bool { if value == nil { return true diff --git a/cli/internal/fs/package_json_test.go b/cli/internal/fs/package_json_test.go index 3c1662008aa6f..246e3cbfd7be2 100644 --- a/cli/internal/fs/package_json_test.go +++ b/cli/internal/fs/package_json_test.go @@ -3,6 +3,7 @@ package fs import ( "testing" + mapset "github.com/deckarep/golang-set" "gotest.tools/v3/assert" ) @@ -157,6 +158,15 @@ func Test_MarshalPackageJSON(t *testing.T) { } } +func Test_SetExternalDepsWithEmptySet(t *testing.T) { + pkg := &PackageJSON{} + err := pkg.SetExternalDeps(mapset.NewSet()) + assert.NilError(t, err) + assert.Assert(t, pkg.TransitiveDeps != nil) + assert.Equal(t, len(pkg.TransitiveDeps), 0) + assert.DeepEqual(t, pkg.ExternalDepsHash, "ccab0b28617f1f56") +} + // Asserts that the data section of two PackageJSON structs are equal func assertPackageJSONEqual(t *testing.T, x *PackageJSON, y *PackageJSON) { t.Helper() diff --git a/cli/internal/lockfile/lockfile.go b/cli/internal/lockfile/lockfile.go index bb36eda93f419..b24deee88f818 100644 --- a/cli/internal/lockfile/lockfile.go +++ b/cli/internal/lockfile/lockfile.go @@ -8,6 +8,7 @@ import ( "sort" mapset "github.com/deckarep/golang-set" + "github.com/vercel/turbo/cli/internal/ffi" "github.com/vercel/turbo/cli/internal/turbopath" "golang.org/x/sync/errgroup" ) @@ -61,17 +62,45 @@ func (p ByKey) Less(i, j int) bool { var _ (sort.Interface) = (*ByKey)(nil) -// TransitiveClosure the set of all lockfile keys that pkg depends on -func TransitiveClosure( - workspaceDir turbopath.AnchoredUnixPath, - unresolvedDeps map[string]string, +type closureMsg struct { + workspace turbopath.AnchoredUnixPath + closure mapset.Set +} + +// AllTransitiveClosures computes closures for all workspaces +func AllTransitiveClosures( + workspaces map[turbopath.AnchoredUnixPath]map[string]string, lockFile Lockfile, -) (mapset.Set, error) { +) (map[turbopath.AnchoredUnixPath]mapset.Set, error) { if lf, ok := lockFile.(*NpmLockfile); ok { // We special case as Rust implementations have their own dep crawl - return npmTransitiveDeps(lf, workspaceDir, unresolvedDeps) + return rustTransitiveDeps(lf.contents, "npm", workspaces) } - return transitiveClosure(workspaceDir, unresolvedDeps, lockFile) + + g := new(errgroup.Group) + c := make(chan closureMsg, len(workspaces)) + closures := make(map[turbopath.AnchoredUnixPath]mapset.Set, len(workspaces)) + for workspace, deps := range workspaces { + workspace := workspace + deps := deps + g.Go(func() error { + closure, err := transitiveClosure(workspace, deps, lockFile) + if err != nil { + return err + } + c <- closureMsg{workspace: workspace, closure: closure} + return nil + }) + } + err := g.Wait() + close(c) + if err != nil { + return nil, err + } + for msg := range c { + closures[msg.workspace] = msg.closure + } + return closures, nil } func transitiveClosure( @@ -133,3 +162,28 @@ func transitiveClosureHelper( }) } } + +func rustTransitiveDeps(content []byte, packageManager string, workspaces map[turbopath.AnchoredUnixPath]map[string]string) (map[turbopath.AnchoredUnixPath]mapset.Set, error) { + processedWorkspaces := make(map[string]map[string]string, len(workspaces)) + for workspacePath, workspace := range workspaces { + processedWorkspaces[workspacePath.ToString()] = workspace + } + workspaceDeps, err := ffi.TransitiveDeps(content, packageManager, processedWorkspaces) + if err != nil { + return nil, err + } + resolvedWorkspaces := make(map[turbopath.AnchoredUnixPath]mapset.Set, len(workspaceDeps)) + for workspace, dependencies := range workspaceDeps { + depsSet := mapset.NewSet() + for _, pkg := range dependencies.GetList() { + depsSet.Add(Package{ + Found: pkg.Found, + Key: pkg.Key, + Version: pkg.Version, + }) + } + workspacePath := turbopath.AnchoredUnixPath(workspace) + resolvedWorkspaces[workspacePath] = depsSet + } + return resolvedWorkspaces, nil +} diff --git a/cli/internal/lockfile/lockfile_test.go b/cli/internal/lockfile/lockfile_test.go index 7c666cc4f3000..1ffedc58b1a2d 100644 --- a/cli/internal/lockfile/lockfile_test.go +++ b/cli/internal/lockfile/lockfile_test.go @@ -1,9 +1,12 @@ package lockfile import ( + "io" + "reflect" "sort" "testing" + "github.com/vercel/turbo/cli/internal/turbopath" "gotest.tools/v3/assert" ) @@ -23,3 +26,45 @@ func Test_ByKeySortIsStable(t *testing.T) { assert.DeepEqual(t, packagesA, packagesB) } + +type mockLockfile struct{} + +func (m *mockLockfile) ResolvePackage(_ turbopath.AnchoredUnixPath, _ string, _ string) (Package, error) { + panic("unimplemented") +} + +func (m *mockLockfile) AllDependencies(_ string) (map[string]string, bool) { + panic("unimplemented") +} + +func (m *mockLockfile) Subgraph(_ []turbopath.AnchoredSystemPath, _ []string) (Lockfile, error) { + panic("unimplemented") +} + +func (m *mockLockfile) Encode(_ io.Writer) error { + panic("unimplemented") +} + +func (m *mockLockfile) Patches() []turbopath.AnchoredUnixPath { + panic("unimplemented") +} + +func (m *mockLockfile) GlobalChange(_ Lockfile) bool { + panic("unimplemented") +} + +var _ (Lockfile) = (*mockLockfile)(nil) + +func Test_AllTransitiveClosureReturnsEmptySets(t *testing.T) { + closures, err := AllTransitiveClosures(map[turbopath.AnchoredUnixPath]map[string]string{ + turbopath.AnchoredUnixPath("."): {}, + turbopath.AnchoredUnixPath("a"): {}, + turbopath.AnchoredUnixPath("b"): {}, + }, &mockLockfile{}) + assert.NilError(t, err) + assert.Assert(t, len(closures) == 3) + for _, closure := range closures { + assert.Assert(t, closure != nil && !reflect.ValueOf(closure).IsNil()) + assert.Equal(t, closure.Cardinality(), 0) + } +} diff --git a/cli/internal/lockfile/npm_lockfile.go b/cli/internal/lockfile/npm_lockfile.go index 67cd32af63897..dceb560f4c782 100644 --- a/cli/internal/lockfile/npm_lockfile.go +++ b/cli/internal/lockfile/npm_lockfile.go @@ -4,7 +4,6 @@ import ( "encoding/json" "io" - mapset "github.com/deckarep/golang-set" "github.com/vercel/turbo/cli/internal/ffi" "github.com/vercel/turbo/cli/internal/turbopath" ) @@ -85,23 +84,3 @@ var _ (Lockfile) = (*NpmLockfile)(nil) func DecodeNpmLockfile(contents []byte) (Lockfile, error) { return &NpmLockfile{contents: contents}, nil } - -func npmTransitiveDeps(lockfile *NpmLockfile, workspacePath turbopath.AnchoredUnixPath, unresolvedDeps map[string]string) (mapset.Set, error) { - pkgDir := workspacePath.ToString() - - packages, err := ffi.NpmTransitiveDeps(lockfile.contents, pkgDir, unresolvedDeps) - if err != nil { - return nil, err - } - - deps := make([]interface{}, len(packages)) - for i, pkg := range packages { - deps[i] = Package{ - Found: pkg.Found, - Key: pkg.Key, - Version: pkg.Version, - } - } - - return mapset.NewSetFromSlice(deps), nil -} diff --git a/cli/internal/lockfile/npm_lockfile_test.go b/cli/internal/lockfile/npm_lockfile_test.go new file mode 100644 index 0000000000000..5dfd327b08f7b --- /dev/null +++ b/cli/internal/lockfile/npm_lockfile_test.go @@ -0,0 +1,74 @@ +package lockfile + +import ( + "os" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func getRustFixture(t *testing.T, fixture string) []byte { + defaultCwd, err := os.Getwd() + assert.NilError(t, err, "getRustFixture") + cwd := turbopath.AbsoluteSystemPath(defaultCwd) + lockfilePath := cwd.UntypedJoin("../../../crates/turborepo-lockfiles/fixtures", fixture) + if !lockfilePath.FileExists() { + t.Errorf("unable to find 'turborepo-lockfiles/fixtures/%s'", fixture) + } + bytes, err := os.ReadFile(lockfilePath.ToString()) + assert.NilError(t, err, "unable to read fixture") + return bytes +} + +func getNpmFixture(t *testing.T, fixture string) Lockfile { + bytes := getRustFixture(t, fixture) + lf, err := DecodeNpmLockfile(bytes) + assert.NilError(t, err) + return lf +} + +func TestAllDependenciesNpm(t *testing.T) { + lf := getNpmFixture(t, "npm-lock.json") + closures, err := AllTransitiveClosures(map[turbopath.AnchoredUnixPath]map[string]string{ + turbopath.AnchoredUnixPath(""): { + "turbo": "latest", + "prettier": "latest", + }, + turbopath.AnchoredUnixPath("apps/web"): { + "lodash": "^4.17.21", + "next": "12.3.0", + }, + }, lf) + assert.NilError(t, err) + assert.Equal(t, len(closures), 2) + rootClosure := closures[turbopath.AnchoredUnixPath("")] + webClosure := closures[turbopath.AnchoredUnixPath("apps/web")] + + assert.Assert(t, rootClosure.Contains(Package{ + Key: "node_modules/turbo", + Version: "1.5.5", + Found: true, + })) + assert.Assert(t, rootClosure.Contains(Package{ + Key: "node_modules/turbo-darwin-64", + Version: "1.5.5", + Found: true, + })) + + assert.Assert(t, webClosure.Contains(Package{ + Key: "apps/web/node_modules/lodash", + Version: "4.17.21", + Found: true, + })) + assert.Assert(t, webClosure.Contains(Package{ + Key: "node_modules/next", + Version: "12.3.0", + Found: true, + })) + assert.Assert(t, webClosure.Contains(Package{ + Key: "node_modules/postcss", + Version: "8.4.14", + Found: true, + })) +} diff --git a/crates/turborepo-ffi/messages.proto b/crates/turborepo-ffi/messages.proto index 8749d3b9b527d..31bd10d514d1b 100644 --- a/crates/turborepo-ffi/messages.proto +++ b/crates/turborepo-ffi/messages.proto @@ -54,15 +54,32 @@ message PreviousContentResp { } } +enum PackageManager { + NPM = 0; +} + +message PackageDependency { + string name = 1; + string range = 2; +} + +message PackageDependencyList { + repeated PackageDependency list = 1; +} + +message WorkspaceDependencies { + map dependencies = 1; +} + message TransitiveDepsRequest { bytes contents = 1; - string workspace_dir = 2; - map unresolved_deps = 3; + PackageManager package_manager = 2; + map workspaces = 3; } message TransitiveDepsResponse { oneof response { - LockfilePackageList packages = 1; + WorkspaceDependencies dependencies = 1; string error = 2; } } @@ -79,8 +96,9 @@ message LockfilePackageList { message SubgraphRequest { bytes contents = 1; - repeated string workspaces = 2; - repeated string packages = 3; + string package_manager = 2; + repeated string workspaces = 3; + repeated string packages = 4; } message SubgraphResponse { diff --git a/crates/turborepo-ffi/src/lib.rs b/crates/turborepo-ffi/src/lib.rs index a44fca4761693..720853594463b 100644 --- a/crates/turborepo-ffi/src/lib.rs +++ b/crates/turborepo-ffi/src/lib.rs @@ -6,7 +6,7 @@ mod lockfile; use std::{mem::ManuallyDrop, path::PathBuf}; -pub use lockfile::{npm_subgraph, npm_transitive_closure}; +pub use lockfile::{npm_subgraph, transitive_closure}; mod proto { include!(concat!(env!("OUT_DIR"), "/_.rs")); diff --git a/crates/turborepo-ffi/src/lockfile.rs b/crates/turborepo-ffi/src/lockfile.rs index b02afa34734da..326b6cd11d399 100644 --- a/crates/turborepo-ffi/src/lockfile.rs +++ b/crates/turborepo-ffi/src/lockfile.rs @@ -1,8 +1,11 @@ -use thiserror::Error; -use turborepo_lockfiles::{ - npm_subgraph as real_npm_subgraph, transitive_closure, NpmLockfile, Package, +use std::{ + collections::{HashMap, HashSet}, + fmt, }; +use thiserror::Error; +use turborepo_lockfiles::{self, npm_subgraph as real_npm_subgraph, NpmLockfile, Package}; + use super::{proto, Buffer}; impl From for proto::LockfilePackage { @@ -19,16 +22,16 @@ impl From for proto::LockfilePackage { #[derive(Debug, Error)] enum Error { #[error("error performing lockfile operation")] - LockfileError(#[from] turborepo_lockfiles::Error), + Lockfile(#[from] turborepo_lockfiles::Error), #[error("error decoding protobuf")] - ProtobufError(#[from] prost::DecodeError), + Protobuf(#[from] prost::DecodeError), } #[no_mangle] -pub extern "C" fn npm_transitive_closure(buf: Buffer) -> Buffer { +pub extern "C" fn transitive_closure(buf: Buffer) -> Buffer { use proto::transitive_deps_response::Response; - let response = match npm_transitive_closure_inner(buf) { - Ok(list) => Response::Packages(list), + let response = match transitive_closure_inner(buf) { + Ok(list) => Response::Dependencies(list), Err(err) => Response::Error(err.to_string()), }; proto::TransitiveDepsResponse { @@ -37,17 +40,34 @@ pub extern "C" fn npm_transitive_closure(buf: Buffer) -> Buffer { .into() } -fn npm_transitive_closure_inner(buf: Buffer) -> Result { +fn transitive_closure_inner(buf: Buffer) -> Result { let request: proto::TransitiveDepsRequest = buf.into_proto()?; - let lockfile = NpmLockfile::load(request.contents.as_slice())?; - let transitive_deps = - transitive_closure(&lockfile, request.workspace_dir, request.unresolved_deps)?; - let list: Vec<_> = transitive_deps - .into_iter() - .map(proto::LockfilePackage::from) - .collect(); + match request.package_manager() { + proto::PackageManager::Npm => npm_transitive_closure_inner(request), + } +} - Ok(proto::LockfilePackageList { list }) +fn npm_transitive_closure_inner( + request: proto::TransitiveDepsRequest, +) -> Result { + let proto::TransitiveDepsRequest { + contents, + workspaces, + .. + } = request; + let lockfile = NpmLockfile::load(contents.as_slice())?; + let dependencies = workspaces + .into_iter() + .map(|(workspace_dir, dependencies)| { + let closure = turborepo_lockfiles::transitive_closure( + &lockfile, + &workspace_dir, + dependencies.into(), + )?; + Ok((workspace_dir, proto::LockfilePackageList::from(closure))) + }) + .collect::, Error>>()?; + Ok(proto::WorkspaceDependencies { dependencies }) } #[no_mangle] @@ -67,3 +87,32 @@ fn npm_subgraph_inner(buf: Buffer) -> Result, Error> { let contents = real_npm_subgraph(&request.contents, &request.workspaces, &request.packages)?; Ok(contents) } + +impl From for HashMap { + fn from(other: proto::PackageDependencyList) -> Self { + other + .list + .into_iter() + .map(|proto::PackageDependency { name, range }| (name, range)) + .collect() + } +} + +impl From> for proto::LockfilePackageList { + fn from(value: HashSet) -> Self { + proto::LockfilePackageList { + list: value + .into_iter() + .map(proto::LockfilePackage::from) + .collect(), + } + } +} + +impl fmt::Display for proto::PackageManager { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + proto::PackageManager::Npm => "npm", + }) + } +} diff --git a/crates/turborepo-lockfiles/src/berry/mod.rs b/crates/turborepo-lockfiles/src/berry/mod.rs index e8569313d1410..c778f0a5d02a3 100644 --- a/crates/turborepo-lockfiles/src/berry/mod.rs +++ b/crates/turborepo-lockfiles/src/berry/mod.rs @@ -753,8 +753,7 @@ mod test { .map(|(k, v)| (k.to_string(), v.to_string())) .collect(); - let closure = - transitive_closure(&lockfile, "packages/ui".to_string(), unresolved_deps).unwrap(); + let closure = transitive_closure(&lockfile, "packages/ui", unresolved_deps).unwrap(); assert!(closure.contains(&Package { key: "ajv@npm:8.11.2".into(), diff --git a/crates/turborepo-lockfiles/src/lib.rs b/crates/turborepo-lockfiles/src/lib.rs index 34d9f161e559d..74ccc86f5f939 100644 --- a/crates/turborepo-lockfiles/src/lib.rs +++ b/crates/turborepo-lockfiles/src/lib.rs @@ -36,13 +36,13 @@ pub trait Lockfile { // this should get replaced by petgraph in the future :) pub fn transitive_closure( lockfile: &L, - workspace_path: String, + workspace_path: &str, unresolved_deps: HashMap, ) -> Result, Error> { let mut transitive_deps = HashSet::new(); transitive_closure_helper( lockfile, - &workspace_path, + workspace_path, unresolved_deps, &mut transitive_deps, )?; From a83c911add656cf5522052d95222e6c131e62a53 Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Wed, 26 Apr 2023 13:57:39 -0700 Subject: [PATCH 18/24] Invoke prysk with the directory name to ensure all tests run (#4715) Running `.cram_env/bin/prysk tests/**/*.t` runs 98 tests. But the same thing through a package.json script `pnpm test` runs 74 tests. I'm not sure why and which ones are being excluded. But for an immediate fix, this PR removes the globbing so we run all tests. --- turborepo-tests/integration/package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/turborepo-tests/integration/package.json b/turborepo-tests/integration/package.json index c9acb28f651de..c4e38418d1c01 100644 --- a/turborepo-tests/integration/package.json +++ b/turborepo-tests/integration/package.json @@ -4,9 +4,9 @@ "test:setup_python": "python3 -m venv .cram_env", "test:setup_pip": "pnpm test:setup_python && .cram_env/bin/python3 -m pip install --quiet --upgrade pip", "test:setup_prysk": "pnpm test:setup_pip && .cram_env/bin/pip install prysk", - "test": "pnpm test:setup_prysk && .cram_env/bin/prysk --shell=`which bash` tests/**/*.t", - "test:interactive": ".cram_env/bin/prysk -i --shell=`which bash` tests/**/*.t", - "test:parallel": ".cram_env/bin/pytest -n auto tests/**/*.t --prysk-shell=`which bash`", + "test": "pnpm test:setup_prysk && .cram_env/bin/prysk --shell=`which bash` tests", + "test:interactive": ".cram_env/bin/prysk -i --shell=`which bash` tests", + "test:parallel": ".cram_env/bin/pytest -n auto tests --prysk-shell=`which bash`", "pretest:parallel": ".cram_env/bin/pip3 install --quiet pytest \"prysk[pytest-plugin]\" pytest-xdist" }, "dependencies": { From f60b401659ec3f6c008d9879ab709b96f587cef6 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 26 Apr 2023 14:01:58 -0700 Subject: [PATCH 19/24] Fix errors-only and help output integration tests (#4713) --- turborepo-tests/integration/tests/errors-only.t | 6 ++++-- turborepo-tests/integration/tests/one_script_error.t | 1 + turborepo-tests/integration/tests/turbo_help.t | 2 ++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/turborepo-tests/integration/tests/errors-only.t b/turborepo-tests/integration/tests/errors-only.t index 8ef5173437d61..81eab3ef3c531 100644 --- a/turborepo-tests/integration/tests/errors-only.t +++ b/turborepo-tests/integration/tests/errors-only.t @@ -38,7 +38,7 @@ Setup \xe2\x80\xa2 Packages in scope: app-a (esc) \xe2\x80\xa2 Running builderror in 1 packages (esc) \xe2\x80\xa2 Remote caching disabled (esc) - app-a:builderror: ERROR: command finished with error: command .* npm run builderror exited \(1\) (re) + app-a:builderror: cache miss, executing e66674cd52f90ec9 app-a:builderror: app-a:builderror: > builderror app-a:builderror: > echo "error builderror app-a!" && exit 1 @@ -48,6 +48,7 @@ Setup app-a:builderror: npm ERR! Error: command failed app-a:builderror: npm ERR! in workspace: app-a app-a:builderror: npm ERR! at location: .* (re) + app-a:builderror: ERROR: command finished with error: command .* npm run builderror exited \(1\) (re) command .* npm run builderror exited \(1\) (re) Tasks: 0 successful, 1 total @@ -66,7 +67,7 @@ Setup \xe2\x80\xa2 Packages in scope: app-a (esc) \xe2\x80\xa2 Running builderror2 in 1 packages (esc) \xe2\x80\xa2 Remote caching disabled (esc) - app-a:builderror2: ERROR: command finished with error: command .* npm run builderror2 exited \(1\) (re) + app-a:builderror2: cache miss, executing de9a4f19fc30bada app-a:builderror2: app-a:builderror2: > builderror2 app-a:builderror2: > echo "error builderror2 app-a!" && exit 1 @@ -76,6 +77,7 @@ Setup app-a:builderror2: npm ERR! Error: command failed app-a:builderror2: npm ERR! in workspace: app-a app-a:builderror2: npm ERR! at location: .* (re) + app-a:builderror2: ERROR: command finished with error: command .* npm run builderror2 exited \(1\) (re) command .* npm run builderror2 exited \(1\) (re) Tasks: 0 successful, 1 total diff --git a/turborepo-tests/integration/tests/one_script_error.t b/turborepo-tests/integration/tests/one_script_error.t index d0b170d60c8c2..6f33fe16c5a37 100644 --- a/turborepo-tests/integration/tests/one_script_error.t +++ b/turborepo-tests/integration/tests/one_script_error.t @@ -90,6 +90,7 @@ Make sure error code isn't swallowed with continue my-app:okay2: > echo 'working' my-app:okay2: my-app:okay2: working + command \((.*)/apps/my-app\) npm run error exited \(1\) (re) Tasks: 2 successful, 3 total Cached: 1 cached, 3 total diff --git a/turborepo-tests/integration/tests/turbo_help.t b/turborepo-tests/integration/tests/turbo_help.t index 5b1de8e70def5..297ecf483a760 100644 --- a/turborepo-tests/integration/tests/turbo_help.t +++ b/turborepo-tests/integration/tests/turbo_help.t @@ -137,6 +137,7 @@ Test help flag for link command --no-gitignore Do not create or modify .gitignore (default false) --version --skip-infer Skip any attempts to infer which version of Turbo the project is configured to use + --target Specify what should be linked (default "remote cache") [default: remote-cache] [possible values: remote-cache, spaces] --no-update-notifier Disable the turbo update notification --api Override the endpoint for API calls --color Force color usage in the terminal @@ -163,6 +164,7 @@ Test help flag for unlink command Usage: turbo unlink [OPTIONS] Options: + --target Specify what should be unlinked (default "remote cache") [default: remote-cache] [possible values: remote-cache, spaces] --version --skip-infer Skip any attempts to infer which version of Turbo the project is configured to use --no-update-notifier Disable the turbo update notification From b7e03af0161ac5c3c8bdd88fd5e3030d2cd06d56 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Wed, 26 Apr 2023 23:10:45 +0200 Subject: [PATCH 20/24] chunk hash need to include availability root (#4714) ### Description fixes a case where multiple assets write to the same file --- crates/turbopack-ecmascript/src/chunk/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/turbopack-ecmascript/src/chunk/mod.rs b/crates/turbopack-ecmascript/src/chunk/mod.rs index 3a18e7c3289a1..ee14257bb887f 100644 --- a/crates/turbopack-ecmascript/src/chunk/mod.rs +++ b/crates/turbopack-ecmascript/src/chunk/mod.rs @@ -339,7 +339,7 @@ impl Asset for EcmascriptChunk { let need_root = if let [(_, main_entry)] = &assets[..] { main_entry.resolve().await? != ident.resolve().await? } else { - false + true }; if need_root { let availability_root_key = StringVc::cell("current_availability_root".to_string()); From 2b0654e0e19da62547f968fb95bb2d0fe982ca1c Mon Sep 17 00:00:00 2001 From: Alex Kirszenberg Date: Wed, 26 Apr 2023 23:12:25 +0200 Subject: [PATCH 21/24] Allow the dev server socket to be reused immediately (#4709) ### Description This avoids running into "Address already in use (os error 48)" when restarting the dev server in quick succession. --------- Co-authored-by: Tobias Koppers --- crates/turbopack-dev-server/src/lib.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/turbopack-dev-server/src/lib.rs b/crates/turbopack-dev-server/src/lib.rs index f12ef6a0c2be6..d14d78a40bf86 100644 --- a/crates/turbopack-dev-server/src/lib.rs +++ b/crates/turbopack-dev-server/src/lib.rs @@ -107,6 +107,12 @@ impl DevServer { // real TCP listener, see if it bound, and get its bound address. let socket = Socket::new(Domain::for_address(addr), Type::STREAM, Some(Protocol::TCP)) .context("unable to create socket")?; + // Allow the socket to be reused immediately after closing. This ensures that + // the dev server can be restarted on the same address without a buffer time for + // the OS to release the socket. + // https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse + #[cfg(not(windows))] + let _ = socket.set_reuse_address(true); if matches!(addr, SocketAddr::V6(_)) { // When possible bind to v4 and v6, otherwise ignore the error let _ = socket.set_only_v6(false); From 317c49afd53516b770ba43a91bd3f84dfd23767c Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Wed, 26 Apr 2023 23:22:20 +0200 Subject: [PATCH 22/24] errors lead to consistent exit code in issue detail (#4716) ### Description issue snapshots are flaky due to alternating issue detail with exist code or not. It's a race condition between process exit after sending error and process killing after receiving error. --- crates/turbopack-node/js/src/ipc/index.ts | 3 ++- crates/turbopack-node/src/render/issue.rs | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/turbopack-node/js/src/ipc/index.ts b/crates/turbopack-node/js/src/ipc/index.ts index 53abf1344bc79..d13dd6190e1f4 100644 --- a/crates/turbopack-node/js/src/ipc/index.ts +++ b/crates/turbopack-node/js/src/ipc/index.ts @@ -134,8 +134,9 @@ function createIpc( }); } catch (err) { // ignore and exit anyway + process.exit(1); } - process.exit(1); + process.exit(0); }, }; } diff --git a/crates/turbopack-node/src/render/issue.rs b/crates/turbopack-node/src/render/issue.rs index 569566c28de5d..6e234755df9f2 100644 --- a/crates/turbopack-node/src/render/issue.rs +++ b/crates/turbopack-node/src/render/issue.rs @@ -38,7 +38,9 @@ impl Issue for RenderingIssue { let mut details = vec![]; if let Some(status) = self.status { - details.push(format!("Node.js exit code: {status}")); + if status != 0 { + details.push(format!("Node.js exit code: {status}")); + } } Ok(StringVc::cell(details.join("\n"))) From d636cd4d83dadee777e03449ef1df2b2f927dcef Mon Sep 17 00:00:00 2001 From: Nicholas Yang Date: Wed, 26 Apr 2023 18:19:50 -0400 Subject: [PATCH 23/24] fix(turborepo): Turbostate deserialization (#4712) ### Description Accidentally left the key as `remote_config` on the Go side instead of `api_client_config` ### Testing Instructions Tested locally with ``.cram_env/bin/prysk --shell=`which bash` tests/api-client-config.t`` --- cli/internal/turbostate/turbostate.go | 2 +- crates/turborepo-lib/src/execution_state.rs | 4 ++-- .../integration/tests/api-client-config.t | 18 +++++++++--------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/cli/internal/turbostate/turbostate.go b/cli/internal/turbostate/turbostate.go index 3f34df7a45bd8..ef49b6c649a69 100644 --- a/cli/internal/turbostate/turbostate.go +++ b/cli/internal/turbostate/turbostate.go @@ -89,7 +89,7 @@ type ParsedArgsFromRust struct { // ExecutionState is the entire state of a turbo execution that is passed from the Rust shim. type ExecutionState struct { - APIClientConfig APIClientConfig `json:"remote_config"` + APIClientConfig APIClientConfig `json:"api_client_config"` CLIArgs ParsedArgsFromRust `json:"cli_args"` } diff --git a/crates/turborepo-lib/src/execution_state.rs b/crates/turborepo-lib/src/execution_state.rs index 9f6558e8f8c3d..68ebc84f25118 100644 --- a/crates/turborepo-lib/src/execution_state.rs +++ b/crates/turborepo-lib/src/execution_state.rs @@ -29,7 +29,7 @@ impl<'a> TryFrom<&'a CommandBase> for ExecutionState<'a> { let client_config = base.client_config()?; let args = base.args(); - let remote_config = APIClientConfig { + let api_client_config = APIClientConfig { token: user_config.token(), team_id: repo_config.team_id(), team_slug: repo_config.team_slug(), @@ -39,7 +39,7 @@ impl<'a> TryFrom<&'a CommandBase> for ExecutionState<'a> { }; Ok(ExecutionState { - api_client_config: remote_config, + api_client_config, cli_args: base.args(), }) } diff --git a/turborepo-tests/integration/tests/api-client-config.t b/turborepo-tests/integration/tests/api-client-config.t index a53706b9e5202..40a079fe0796a 100644 --- a/turborepo-tests/integration/tests/api-client-config.t +++ b/turborepo-tests/integration/tests/api-client-config.t @@ -3,7 +3,7 @@ Setup $ . ${TESTDIR}/_helpers/setup_monorepo.sh $(pwd) Run test run - $ ${TURBO} run build --__test-run | jq .remote_config + $ ${TURBO} run build --__test-run | jq .api_client_config { "token": null, "team_id": null, @@ -14,29 +14,29 @@ Run test run } Run test run with api overloaded - $ ${TURBO} run build --__test-run --api http://localhost:8000 | jq .remote_config.api_url - null + $ ${TURBO} run build --__test-run --api http://localhost:8000 | jq .api_client_config.api_url + "http://localhost:8000" Run test run with token overloaded - $ ${TURBO} run build --__test-run --token 1234567890 | jq .remote_config.token + $ ${TURBO} run build --__test-run --token 1234567890 | jq .api_client_config.token "1234567890" Run test run with token overloaded from both TURBO_TOKEN and VERCEL_ARTIFACTS_TOKEN - $ TURBO_TOKEN=turbo VERCEL_ARTIFACTS_TOKEN=vercel ${TURBO} run build --__test-run | jq .remote_config.token + $ TURBO_TOKEN=turbo VERCEL_ARTIFACTS_TOKEN=vercel ${TURBO} run build --__test-run | jq .api_client_config.token "vercel" Run test run with team overloaded - $ ${TURBO} run build --__test-run --team vercel | jq .remote_config.team_slug + $ ${TURBO} run build --__test-run --team vercel | jq .api_client_config.team_slug "vercel" Run test run with team overloaded from both env and flag (flag should take precedence) - $ TURBO_TEAM=vercel ${TURBO} run build --__test-run --team turbo | jq .remote_config.team_slug + $ TURBO_TEAM=vercel ${TURBO} run build --__test-run --team turbo | jq .api_client_config.team_slug "turbo" Run test run with remote cache timeout env variable set - $ TURBO_REMOTE_CACHE_TIMEOUT=123 ${TURBO} run build --__test-run | jq .remote_config.timeout + $ TURBO_REMOTE_CACHE_TIMEOUT=123 ${TURBO} run build --__test-run | jq .api_client_config.timeout 123 Run test run with remote cache timeout from both env and flag (flag should take precedence) - $ TURBO_REMOTE_CACHE_TIMEOUT=123 ${TURBO} run build --__test-run --remote-cache-timeout 456 | jq .remote_config.timeout + $ TURBO_REMOTE_CACHE_TIMEOUT=123 ${TURBO} run build --__test-run --remote-cache-timeout 456 | jq .api_client_config.timeout 456 From c463f845c8384aea9fca5eaf6b9f4378858670dc Mon Sep 17 00:00:00 2001 From: Mehul Kar Date: Wed, 26 Apr 2023 15:21:08 -0700 Subject: [PATCH 24/24] Trigger turborepo tests in CI correctly (#4717) - integration tests when integration tests change or any turborepo src changes - e2e tests when e2e tests or any turborepo src changes - change ids so they're more clear about which file changes matter when --- .github/workflows/test.yml | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 021ddc68c1142..f86eb3ae0a67c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -116,8 +116,8 @@ jobs: crates/turbopack-bench/** !*.md - - name: Turborepo related changes - id: turborepo + - name: Turborepo Rust related changes + id: turborepo_rust uses: technote-space/get-diff-action@v6 with: PATTERNS: | @@ -138,23 +138,28 @@ jobs: !**.md !**.mdx - - name: Go related changes - id: go + - name: Turborepo Go related changes + id: turborepo_go uses: technote-space/get-diff-action@v6 with: PATTERNS: | cli/** - - name: Go E2E related changes - id: go_e2e + - name: Turborepo integration tests changes + id: turborepo_integration uses: technote-space/get-diff-action@v6 with: PATTERNS: | - cli/** - crates/turborepo* - crates/turborepo*/** - crates/turbo-updater - Cargo.lock + turborepo-tests/integration/** + turborepo-tests/helpers/** + + - name: Turborepo e2e tests changes + id: turborepo_e2e + uses: technote-space/get-diff-action@v6 + with: + PATTERNS: | + turborepo-tests/e2e/** + turborepo-tests/helpers/** - name: Examples related changes id: examples @@ -178,10 +183,11 @@ jobs: # We only test workspace dependency changes on main, not on PRs to speed up CI cargo_on_main: ${{ steps.ci.outputs.diff != '' || (steps.cargo.outputs.diff != '' && github.event_name == 'push' && github.ref == 'refs/heads/main') }} turbopack: ${{ steps.ci.outputs.diff != '' || steps.turbopack.outputs.diff != '' }} - turborepo: ${{ steps.ci.outputs.diff != '' || steps.turborepo.outputs.diff != '' }} + turborepo_rust: ${{ steps.ci.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' }} turbopack_bench: ${{ steps.ci.outputs.diff != '' || steps.turbopack_bench.outputs.diff != '' }} - go: ${{ steps.ci.outputs.diff != '' || steps.go.outputs.diff != '' }} - go_e2e: ${{ steps.ci.outputs.diff != '' || steps.go.outputs.diff != '' || steps.go_e2e.outputs.diff != '' }} + go: ${{ steps.ci.outputs.diff != '' || steps.turborepo_go.outputs.diff != '' }} + go_e2e: ${{ steps.ci.outputs.diff != '' || steps.turborepo_go.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' || steps.turborepo_e2e.outputs.diff != '' }} + go_integration: ${{ steps.ci.outputs.diff != '' || steps.turborepo_go.outputs.diff != '' || steps.turborepo_rust.outputs.diff != '' || steps.turborepo_integration.outputs.diff != '' }} examples: ${{ steps.ci.outputs.diff != '' || steps.examples.outputs.diff != '' }} format: ${{ steps.ci.outputs.diff != '' || steps.format.outputs.diff != '' }} push: ${{ steps.ci.outputs.diff != '' || github.event_name == 'push' }} @@ -248,7 +254,7 @@ jobs: go_integration: name: Go Integration Tests needs: determine_jobs - if: needs.determine_jobs.outputs.go_e2e == 'true' + if: needs.determine_jobs.outputs.go_integration == 'true' timeout-minutes: 30 runs-on: ${{ matrix.os.runner }} strategy: @@ -581,7 +587,7 @@ jobs: turborepo_rust_test: needs: [determine_jobs, rust_prepare] # We test dependency changes only on main - if: needs.determine_jobs.outputs.turborepo == 'true' || needs.determine_jobs.outputs.cargo_on_main == 'true' + if: needs.determine_jobs.outputs.turborepo_rust == 'true' || needs.determine_jobs.outputs.cargo_on_main == 'true' strategy: fail-fast: false matrix: