diff --git a/tools/setup-envtest/README.md b/tools/setup-envtest/README.md index 0482dd3162..c03a434037 100644 --- a/tools/setup-envtest/README.md +++ b/tools/setup-envtest/README.md @@ -52,11 +52,6 @@ setup-envtest sideload 1.16.2 < downloaded-envtest.tar.gz # To download from a custom index use the following: setup-envtest use --index https://custom.com/envtest-releases.yaml -# To download from the kubebuilder-tools GCS bucket: (default behavior before v0.18) -# Note: This is a Google-owned bucket and it might be shutdown at any time -# see: https://github.com/kubernetes/k8s.io/issues/2647#event-12439345373 -# Note: This flag will also be removed soon. -setup-envtest use --use-deprecated-gcs ``` ## Where does it put all those binaries? @@ -107,8 +102,7 @@ Then, you have a few options for managing your binaries: `--use-env` makes the command unconditionally use the value of KUBEBUILDER_ASSETS as long as it contains the required binaries, and - `-i` indicates that we only ever want to work with installed binaries - (no reaching out the remote GCS storage). + `-i` indicates that we only ever want to work with installed binaries. As noted about, you can use `ENVTEST_INSTALLED_ONLY=true` to switch `-i` on by default, and you can use `ENVTEST_USE_ENV=true` to switch @@ -123,25 +117,3 @@ Then, you have a few options for managing your binaries: - If you want to talk to some internal source via HTTP, you can simply set `--index` The index must contain references to envtest binary archives in the same format as: https://raw.githubusercontent.com/kubernetes-sigs/controller-tools/master/envtest-releases.yaml - -- If you want to talk to some internal source in a GCS "style", you can use the - `--remote-bucket` and `--remote-server` options together with `--use-deprecated-gcs`. - Note: This is deprecated and will be removed soon. The former sets which - GCS bucket to download from, and the latter sets the host to talk to as - if it were a GCS endpoint. Theoretically, you could use the latter - version to run an internal "mirror" -- the tool expects - - - `HOST/storage/v1/b/BUCKET/o` to return JSON like - - ```json - {"items": [ - {"name": "kubebuilder-tools-X.Y.Z-os-arch.tar.gz", "md5Hash": ""}, - {"name": "kubebuilder-tools-X.Y.Z-os-arch.tar.gz", "md5Hash": ""} - ]} - ``` - - - `HOST/storage/v1/b/BUCKET/o/TARBALL_NAME` to return JSON like - `{"name": "kubebuilder-tools-X.Y.Z-os-arch.tar.gz", "md5Hash": ""}` - - - `HOST/storage/v1/b/BUCKET/o/TARBALL_NAME?alt=media` to return the - actual file contents diff --git a/tools/setup-envtest/env/env.go b/tools/setup-envtest/env/env.go index 24857916d7..6168739eb6 100644 --- a/tools/setup-envtest/env/env.go +++ b/tools/setup-envtest/env/env.go @@ -42,10 +42,6 @@ type Env struct { // contact remote services & re-download. ForceDownload bool - // UseDeprecatedGCS signals if the GCS client is used. - // Note: This will be removed together with remote.GCSClient. - UseDeprecatedGCS bool - // Client is our remote client for contacting remote services. Client remote.Client @@ -291,7 +287,7 @@ func (e *Env) Fetch(ctx context.Context) { } }) - archiveOut, err := e.FS.TempFile("", "*-"+e.Platform.ArchiveName(e.UseDeprecatedGCS, *e.Version.AsConcrete())) + archiveOut, err := e.FS.TempFile("", "*-"+e.Platform.ArchiveName(*e.Version.AsConcrete())) if err != nil { ExitCause(2, err, "unable to open file to write downloaded archive to") } diff --git a/tools/setup-envtest/main.go b/tools/setup-envtest/main.go index 7e2761a4f6..3121e206fd 100644 --- a/tools/setup-envtest/main.go +++ b/tools/setup-envtest/main.go @@ -50,16 +50,7 @@ var ( binDir = flag.String("bin-dir", "", "directory to store binary assets (default: $OS_SPECIFIC_DATA_DIR/envtest-binaries)") - useDeprecatedGCS = flag.Bool("use-deprecated-gcs", false, "use GCS to fetch envtest binaries. Note: This is deprecated and will be removed soon. For more details see: https://github.com/kubernetes-sigs/controller-runtime/pull/2811") - - // These flags are only used with --use-deprecated-gcs. - remoteBucket = flag.String("remote-bucket", "kubebuilder-tools", "remote GCS bucket to download from (only used with --use-deprecated-gcs)") - remoteServer = flag.String("remote-server", "storage.googleapis.com", - "remote server to query from. You can override this if you want to run "+ - "an internal storage server instead, or for testing. (only used with --use-deprecated-gcs)") - - // This flag is only used if --use-deprecated-gcs is not set or false (default). - index = flag.String("index", remote.DefaultIndexURL, "index to discover envtest binaries (only used if --use-deprecated-gcs is not set, or set to false)") + index = flag.String("index", remote.DefaultIndexURL, "index to discover envtest binaries") ) // TODO(directxman12): handle interrupts? @@ -88,29 +79,18 @@ func setupEnv(globalLog logr.Logger, version string) *envp.Env { } log.V(1).Info("using binaries directory", "dir", *binDir) - var client remote.Client - if useDeprecatedGCS != nil && *useDeprecatedGCS { - client = &remote.GCSClient{ //nolint:staticcheck // deprecation accepted for now - Log: globalLog.WithName("storage-client"), - Bucket: *remoteBucket, - Server: *remoteServer, - } - log.V(1).Info("using deprecated GCS client", "bucket", *remoteBucket, "server", *remoteServer) - } else { - client = &remote.HTTPClient{ - Log: globalLog.WithName("storage-client"), - IndexURL: *index, - } - log.V(1).Info("using HTTP client", "index", *index) + client := &remote.HTTPClient{ + Log: globalLog.WithName("storage-client"), + IndexURL: *index, } + log.V(1).Info("using HTTP client", "index", *index) env := &envp.Env{ - Log: globalLog, - UseDeprecatedGCS: useDeprecatedGCS != nil && *useDeprecatedGCS, - Client: client, - VerifySum: *verify, - ForceDownload: *force, - NoDownload: *installedOnly, + Log: globalLog, + Client: client, + VerifySum: *verify, + ForceDownload: *force, + NoDownload: *installedOnly, Platform: versions.PlatformItem{ Platform: versions.Platform{ OS: *targetOS, @@ -189,7 +169,7 @@ Commands: use: get information for the requested version, downloading it if necessary and allowed. - Needs a concrete platform (no wildcards), but wilcard versions are supported. + Needs a concrete platform (no wildcards), but wildcard versions are supported. list: list installed *and* available versions matching the given version & platform. diff --git a/tools/setup-envtest/remote/gcs_client.go b/tools/setup-envtest/remote/gcs_client.go deleted file mode 100644 index 85f321d5c5..0000000000 --- a/tools/setup-envtest/remote/gcs_client.go +++ /dev/null @@ -1,202 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// Copyright 2021 The Kubernetes Authors - -package remote - -import ( - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/url" - "path" - "sort" - - "github.com/go-logr/logr" - "sigs.k8s.io/controller-runtime/tools/setup-envtest/versions" -) - -// objectList is the parts we need of the GCS "list-objects-in-bucket" endpoint. -type objectList struct { - Items []bucketObject `json:"items"` - NextPageToken string `json:"nextPageToken"` -} - -// bucketObject is the parts we need of the GCS object metadata. -type bucketObject struct { - Name string `json:"name"` - Hash string `json:"md5Hash"` -} - -var _ Client = &GCSClient{} - -// GCSClient is a basic client for fetching versions of the envtest binary archives -// from GCS. -// -// Deprecated: This client is deprecated and will be removed soon. -// The kubebuilder GCS bucket that we use with this client might be shutdown at any time, -// see: https://github.com/kubernetes/k8s.io/issues/2647. -type GCSClient struct { - // Bucket is the bucket to fetch from. - Bucket string - - // Server is the GCS-like storage server - Server string - - // Log allows us to log. - Log logr.Logger - - // Insecure uses http for testing - Insecure bool -} - -func (c *GCSClient) scheme() string { - if c.Insecure { - return "http" - } - return "https" -} - -// ListVersions lists all available tools versions in the given bucket, along -// with supported os/arch combos and the corresponding hash. -// -// The results are sorted with newer versions first. -func (c *GCSClient) ListVersions(ctx context.Context) ([]versions.Set, error) { - loc := &url.URL{ - Scheme: c.scheme(), - Host: c.Server, - Path: path.Join("/storage/v1/b/", c.Bucket, "o"), - } - query := make(url.Values) - - knownVersions := map[versions.Concrete][]versions.PlatformItem{} - for cont := true; cont; { - c.Log.V(1).Info("listing bucket to get versions", "bucket", c.Bucket) - - loc.RawQuery = query.Encode() - req, err := http.NewRequestWithContext(ctx, "GET", loc.String(), nil) - if err != nil { - return nil, fmt.Errorf("unable to construct request to list bucket items: %w", err) - } - - resp, err := http.DefaultClient.Do(req) - if err != nil { - return nil, fmt.Errorf("unable to perform request to list bucket items: %w", err) - } - - err = func() error { - defer resp.Body.Close() - if resp.StatusCode != 200 { - return fmt.Errorf("unable list bucket items -- got status %q from GCS", resp.Status) - } - - var list objectList - if err := json.NewDecoder(resp.Body).Decode(&list); err != nil { - return fmt.Errorf("unable unmarshal bucket items list: %w", err) - } - - // continue listing if needed - cont = list.NextPageToken != "" - query.Set("pageToken", list.NextPageToken) - - for _, item := range list.Items { - ver, details := versions.ExtractWithPlatform(versions.ArchiveRE, item.Name) - if ver == nil { - c.Log.V(1).Info("skipping bucket object -- does not appear to be a versioned tools object", "name", item.Name) - continue - } - c.Log.V(1).Info("found version", "version", ver, "platform", details) - knownVersions[*ver] = append(knownVersions[*ver], versions.PlatformItem{ - Platform: details, - Hash: &versions.Hash{ - Type: versions.MD5HashType, - Encoding: versions.Base64HashEncoding, - Value: item.Hash, - }, - }) - } - - return nil - }() - if err != nil { - return nil, err - } - } - - res := make([]versions.Set, 0, len(knownVersions)) - for ver, details := range knownVersions { - res = append(res, versions.Set{Version: ver, Platforms: details}) - } - // sort in inverse order so that the newest one is first - sort.Slice(res, func(i, j int) bool { - first, second := res[i].Version, res[j].Version - return first.NewerThan(second) - }) - - return res, nil -} - -// GetVersion downloads the given concrete version for the given concrete platform, writing it to the out. -func (c *GCSClient) GetVersion(ctx context.Context, version versions.Concrete, platform versions.PlatformItem, out io.Writer) error { - itemName := platform.ArchiveName(true, version) - loc := &url.URL{ - Scheme: c.scheme(), - Host: c.Server, - Path: path.Join("/storage/v1/b/", c.Bucket, "o", itemName), - RawQuery: "alt=media", - } - - req, err := http.NewRequestWithContext(ctx, "GET", loc.String(), nil) - if err != nil { - return fmt.Errorf("unable to construct request to fetch %s: %w", itemName, err) - } - resp, err := http.DefaultClient.Do(req) - if err != nil { - return fmt.Errorf("unable to fetch %s (%s): %w", itemName, req.URL, err) - } - defer resp.Body.Close() - - if resp.StatusCode != 200 { - return fmt.Errorf("unable fetch %s (%s) -- got status %q from GCS", itemName, req.URL, resp.Status) - } - - return readBody(resp, out, itemName, platform) -} - -// FetchSum fetches the checksum for the given concrete version & platform into -// the given platform item. -func (c *GCSClient) FetchSum(ctx context.Context, ver versions.Concrete, pl *versions.PlatformItem) error { - itemName := pl.ArchiveName(true, ver) - loc := &url.URL{ - Scheme: c.scheme(), - Host: c.Server, - Path: path.Join("/storage/v1/b/", c.Bucket, "o", itemName), - } - - req, err := http.NewRequestWithContext(ctx, "GET", loc.String(), nil) - if err != nil { - return fmt.Errorf("unable to construct request to fetch metadata for %s: %w", itemName, err) - } - resp, err := http.DefaultClient.Do(req) - if err != nil { - return fmt.Errorf("unable to fetch metadata for %s: %w", itemName, err) - } - defer resp.Body.Close() - - if resp.StatusCode != 200 { - return fmt.Errorf("unable fetch metadata for %s -- got status %q from GCS", itemName, resp.Status) - } - - var item bucketObject - if err := json.NewDecoder(resp.Body).Decode(&item); err != nil { - return fmt.Errorf("unable to unmarshal metadata for %s: %w", itemName, err) - } - - pl.Hash = &versions.Hash{ - Type: versions.MD5HashType, - Encoding: versions.Base64HashEncoding, - Value: item.Hash, - } - return nil -} diff --git a/tools/setup-envtest/store/store.go b/tools/setup-envtest/store/store.go index 6001eb2a4e..2ee0b64dec 100644 --- a/tools/setup-envtest/store/store.go +++ b/tools/setup-envtest/store/store.go @@ -38,7 +38,7 @@ func (i Item) String() string { } // Filter is a version spec & platform selector (i.e. platform -// potentially with wilcards) to filter store items. +// potentially with wildcards) to filter store items. type Filter struct { Version versions.Spec Platform versions.Platform diff --git a/tools/setup-envtest/store/store_test.go b/tools/setup-envtest/store/store_test.go index f0d83a1f79..b128be5933 100644 --- a/tools/setup-envtest/store/store_test.go +++ b/tools/setup-envtest/store/store_test.go @@ -125,36 +125,6 @@ var _ = Describe("Store", func() { }) }) - Describe("adding items (GCS archives)", func() { - archiveName := "kubebuilder-tools-1.16.3-linux-amd64.tar.gz" - - It("should support .tar.gz input", func() { - Expect(st.Add(logCtx(), newItem, makeFakeArchive(archiveName, "kubebuilder/bin/"))).To(Succeed()) - Expect(st.Has(newItem)).To(BeTrue(), "should have the item after adding it") - }) - - It("should extract binaries from the given archive to a directly to the item's directory, regardless of path", func() { - Expect(st.Add(logCtx(), newItem, makeFakeArchive(archiveName, "kubebuilder/bin/"))).To(Succeed()) - - dirName := newItem.Platform.BaseName(newItem.Version) - Expect(afero.ReadFile(st.Root, filepath.Join("k8s", dirName, "some-file"))).To(HavePrefix(archiveName + "some-file")) - Expect(afero.ReadFile(st.Root, filepath.Join("k8s", dirName, "other-file"))).To(HavePrefix(archiveName + "other-file")) - }) - - It("should clean up any existing item directory before creating the new one", func() { - item := localVersions[0] - Expect(st.Add(logCtx(), item, makeFakeArchive(archiveName, "kubebuilder/bin/"))).To(Succeed()) - Expect(st.Root.Stat(filepath.Join("k8s", item.Platform.BaseName(item.Version)))).NotTo(BeNil(), "new files should exist") - }) - It("should clean up if it errors before finishing", func() { - item := localVersions[0] - Expect(st.Add(logCtx(), item, new(bytes.Buffer))).NotTo(Succeed(), "should fail to extract") - _, err := st.Root.Stat(filepath.Join("k8s", item.Platform.BaseName(item.Version))) - Expect(err).To(HaveOccurred(), "the binaries dir for the item should be gone") - - }) - }) - Describe("adding items (controller-tools archives)", func() { archiveName := "envtest-v1.16.3-linux-amd64.tar.gz" diff --git a/tools/setup-envtest/versions/misc_test.go b/tools/setup-envtest/versions/misc_test.go index dcb87be8b2..a609f4dc60 100644 --- a/tools/setup-envtest/versions/misc_test.go +++ b/tools/setup-envtest/versions/misc_test.go @@ -95,8 +95,7 @@ var _ = Describe("Platform", func() { Specify("knows how to produce an archive name", func() { plat := Platform{OS: "linux", Arch: "amd64"} ver := Concrete{Major: 1, Minor: 16, Patch: 3} - Expect(plat.ArchiveName(true, ver)).To(Equal("kubebuilder-tools-1.16.3-linux-amd64.tar.gz")) - Expect(plat.ArchiveName(false, ver)).To(Equal("envtest-v1.16.3-linux-amd64.tar.gz")) + Expect(plat.ArchiveName(ver)).To(Equal("envtest-v1.16.3-linux-amd64.tar.gz")) }) Describe("parsing", func() { @@ -111,17 +110,6 @@ var _ = Describe("Platform", func() { Expect(ver).To(BeNil()) }) }) - Context("for archive names (GCS)", func() { - It("should accept strings of the form kubebuilder-tools-x.y.z-os-arch.tar.gz", func() { - ver, plat := ExtractWithPlatform(ArchiveRE, "kubebuilder-tools-1.16.3-linux-amd64.tar.gz") - Expect(ver).To(Equal(&Concrete{Major: 1, Minor: 16, Patch: 3})) - Expect(plat).To(Equal(Platform{OS: "linux", Arch: "amd64"})) - }) - It("should reject nonsense strings", func() { - ver, _ := ExtractWithPlatform(ArchiveRE, "kubebuilder-tools-1.16.3-linux-amd64.tar.sum") - Expect(ver).To(BeNil()) - }) - }) Context("for archive names (controller-tools)", func() { It("should accept strings of the form envtest-vx.y.z-os-arch.tar.gz", func() { ver, plat := ExtractWithPlatform(ArchiveRE, "envtest-v1.16.3-linux-amd64.tar.gz") diff --git a/tools/setup-envtest/versions/parse.go b/tools/setup-envtest/versions/parse.go index 21d38bb345..cd25710b2b 100644 --- a/tools/setup-envtest/versions/parse.go +++ b/tools/setup-envtest/versions/parse.go @@ -107,7 +107,7 @@ func PatchSelectorFromMatch(match []string, re *regexp.Regexp) PatchSelector { panic("invalid input passed as patch selector (invalid state)") } - // patch is optional, means wilcard if left off + // patch is optional, means wildcard if left off patch := AnyPoint if patchRaw := match[re.SubexpIndex("patch")]; patchRaw != "" { patch = PointVersionFromValidString(patchRaw) diff --git a/tools/setup-envtest/versions/platform.go b/tools/setup-envtest/versions/platform.go index 8b32ccd5bc..1cfbd05c06 100644 --- a/tools/setup-envtest/versions/platform.go +++ b/tools/setup-envtest/versions/platform.go @@ -37,11 +37,7 @@ func (p Platform) BaseName(ver Concrete) string { } // ArchiveName returns the full archive name for this version and platform. -// useGCS is deprecated and will be removed when the remote.GCSClient is removed. -func (p Platform) ArchiveName(useGCS bool, ver Concrete) string { - if useGCS { - return "kubebuilder-tools-" + p.BaseName(ver) + ".tar.gz" - } +func (p Platform) ArchiveName(ver Concrete) string { return "envtest-v" + p.BaseName(ver) + ".tar.gz" } @@ -56,11 +52,11 @@ type PlatformItem struct { // Hash of an archive with envtest binaries. type Hash struct { // Type of the hash. - // GCS uses MD5HashType, controller-tools uses SHA512HashType. + // controller-tools uses SHA512HashType. Type HashType // Encoding of the hash value. - // GCS uses Base64HashEncoding, controller-tools uses HexHashEncoding. + // controller-tools uses HexHashEncoding. Encoding HashEncoding // Value of the hash. @@ -122,7 +118,6 @@ var ( // VersionPlatformRE matches concrete version-platform strings. VersionPlatformRE = regexp.MustCompile(`^` + versionPlatformREBase + `$`) // ArchiveRE matches concrete version-platform.tar.gz strings. - // The archives published to GCS by kubebuilder use the "kubebuilder-tools-" prefix (e.g. "kubebuilder-tools-1.30.0-darwin-amd64.tar.gz"). // The archives published to GitHub releases by controller-tools use the "envtest-v" prefix (e.g. "envtest-v1.30.0-darwin-amd64.tar.gz"). - ArchiveRE = regexp.MustCompile(`^(kubebuilder-tools-|envtest-v)` + versionPlatformREBase + `\.tar\.gz$`) + ArchiveRE = regexp.MustCompile(`^envtest-v` + versionPlatformREBase + `\.tar\.gz$`) ) diff --git a/tools/setup-envtest/versions/version.go b/tools/setup-envtest/versions/version.go index 582ed7794e..945a95006f 100644 --- a/tools/setup-envtest/versions/version.go +++ b/tools/setup-envtest/versions/version.go @@ -72,7 +72,7 @@ func (s PatchSelector) AsConcrete() *Concrete { return &Concrete{ Major: s.Major, Minor: s.Minor, - Patch: int(s.Patch), // safe to cast, we've just checked wilcards above + Patch: int(s.Patch), // safe to cast, we've just checked wildcards above } } diff --git a/tools/setup-envtest/workflows/workflows_test.go b/tools/setup-envtest/workflows/workflows_test.go index 8c4007a415..27d4ec6770 100644 --- a/tools/setup-envtest/workflows/workflows_test.go +++ b/tools/setup-envtest/workflows/workflows_test.go @@ -48,288 +48,302 @@ const ( testStorePath = ".teststore" ) -const ( - gcsMode = "GCS" - httpMode = "HTTP" -) - -var _ = Describe("GCS Client", func() { - WorkflowTest(gcsMode) -}) +var _ = Describe("Workflows", func() { + var ( + env *envp.Env + out *bytes.Buffer + server *ghttp.Server + remoteHTTPItems itemsHTTP + ) + BeforeEach(func() { + out = new(bytes.Buffer) + baseFs := afero.Afero{Fs: afero.NewMemMapFs()} + + server = ghttp.NewServer() + + client := &remote.HTTPClient{ + Log: testLog.WithName("http-client"), + IndexURL: fmt.Sprintf("http://%s/%s", server.Addr(), "envtest-releases.yaml"), + } + + env = &envp.Env{ + Log: testLog, + VerifySum: true, // on by default + FS: baseFs, + Store: &store.Store{Root: afero.NewBasePathFs(baseFs, testStorePath)}, + Out: out, + Platform: versions.PlatformItem{ // default + Platform: versions.Platform{ + OS: "linux", + Arch: "amd64", + }, + }, + Client: client, + } -var _ = Describe("HTTP Client", func() { - WorkflowTest(httpMode) -}) + fakeStore(env.FS, testStorePath) + remoteHTTPItems = remoteVersionsHTTP + }) + JustBeforeEach(func() { + handleRemoteVersionsHTTP(server, remoteHTTPItems) + }) + AfterEach(func() { + server.Close() + server = nil + }) -func WorkflowTest(testMode string) { - Describe("Workflows", func() { - var ( - env *envp.Env - out *bytes.Buffer - server *ghttp.Server - remoteGCSItems []item - remoteHTTPItems itemsHTTP - ) + Describe("use", func() { + var flow wf.Use BeforeEach(func() { - out = new(bytes.Buffer) - baseFs := afero.Afero{Fs: afero.NewMemMapFs()} - - server = ghttp.NewServer() - - var client remote.Client - switch testMode { - case gcsMode: - client = &remote.GCSClient{ //nolint:staticcheck // deprecation accepted for now - Log: testLog.WithName("gcs-client"), - Bucket: "kubebuilder-tools-test", // test custom bucket functionality too - Server: server.Addr(), - Insecure: true, // no https in httptest :-( - } - case httpMode: - client = &remote.HTTPClient{ - Log: testLog.WithName("http-client"), - IndexURL: fmt.Sprintf("http://%s/%s", server.Addr(), "envtest-releases.yaml"), - } + // some defaults for most tests + env.Version = versions.Spec{ + Selector: ver(1, 16, 0), } - - env = &envp.Env{ - Log: testLog, - VerifySum: true, // on by default - FS: baseFs, - Store: &store.Store{Root: afero.NewBasePathFs(baseFs, testStorePath)}, - Out: out, - Platform: versions.PlatformItem{ // default - Platform: versions.Platform{ - OS: "linux", - Arch: "amd64", - }, - }, - Client: client, - } - - fakeStore(env.FS, testStorePath) - remoteGCSItems = remoteVersionsGCS - remoteHTTPItems = remoteVersionsHTTP - }) - JustBeforeEach(func() { - switch testMode { - case gcsMode: - handleRemoteVersionsGCS(server, remoteGCSItems) - case httpMode: - handleRemoteVersionsHTTP(server, remoteHTTPItems) + flow = wf.Use{ + PrintFormat: envp.PrintPath, } }) - AfterEach(func() { - server.Close() - server = nil + + It("should initialize the store if it doesn't exist", func() { + Expect(env.FS.RemoveAll(testStorePath)).To(Succeed()) + // need to set this to a valid remote version cause our store is now empty + env.Version = versions.Spec{Selector: ver(1, 16, 4)} + flow.Do(env) + Expect(env.FS.Stat(testStorePath)).NotTo(BeNil()) }) - Describe("use", func() { - var flow wf.Use + Context("when use env is set", func() { BeforeEach(func() { - // some defaults for most tests - env.Version = versions.Spec{ - Selector: ver(1, 16, 0), - } - flow = wf.Use{ - PrintFormat: envp.PrintPath, - } + flow.UseEnv = true }) - - It("should initialize the store if it doesn't exist", func() { - Expect(env.FS.RemoveAll(testStorePath)).To(Succeed()) - // need to set this to a valid remote version cause our store is now empty - env.Version = versions.Spec{Selector: ver(1, 16, 4)} + It("should fall back to normal behavior when the env is not set", func() { flow.Do(env) - Expect(env.FS.Stat(testStorePath)).NotTo(BeNil()) + Expect(out.String()).To(HaveSuffix("/1.16.0-linux-amd64"), "should fall back to a local version") }) - - Context("when use env is set", func() { - BeforeEach(func() { - flow.UseEnv = true - }) - It("should fall back to normal behavior when the env is not set", func() { - flow.Do(env) - Expect(out.String()).To(HaveSuffix("/1.16.0-linux-amd64"), "should fall back to a local version") - }) - It("should fall back to normal behavior if binaries are missing", func() { - flow.AssetsPath = ".teststore/missing-binaries" - flow.Do(env) - Expect(out.String()).To(HaveSuffix("/1.16.0-linux-amd64"), "should fall back to a local version") - }) - It("should use the value of the env if it contains the right binaries", func() { - flow.AssetsPath = ".teststore/good-version" - flow.Do(env) - Expect(out.String()).To(Equal(flow.AssetsPath)) - }) - It("should not try and check the version of the binaries", func() { - flow.AssetsPath = ".teststore/wrong-version" - flow.Do(env) - Expect(out.String()).To(Equal(flow.AssetsPath)) - }) - It("should not need to contact the network", func() { - server.Close() - flow.AssetsPath = ".teststore/good-version" - flow.Do(env) - // expect to not get a panic -- if we do, it'll cause the test to fail - }) + It("should fall back to normal behavior if binaries are missing", func() { + flow.AssetsPath = ".teststore/missing-binaries" + flow.Do(env) + Expect(out.String()).To(HaveSuffix("/1.16.0-linux-amd64"), "should fall back to a local version") }) - - Context("when downloads are disabled", func() { - BeforeEach(func() { - env.NoDownload = true - server.Close() - }) - - // It("should not contact the network") is a gimme here, because we - // call server.Close() above. - - It("should error if no matches are found locally", func() { - defer shouldHaveError() - env.Version.Selector = versions.Concrete{Major: 9001} - flow.Do(env) - }) - It("should settle for the latest local match if latest is requested", func() { - env.Version = versions.Spec{ - CheckLatest: true, - Selector: versions.PatchSelector{ - Major: 1, - Minor: 16, - Patch: versions.AnyPoint, - }, - } - - flow.Do(env) - - // latest on "server" is 1.16.4, shouldn't use that - Expect(out.String()).To(HaveSuffix("/1.16.1-linux-amd64"), "should use the latest local version") - }) + It("should use the value of the env if it contains the right binaries", func() { + flow.AssetsPath = ".teststore/good-version" + flow.Do(env) + Expect(out.String()).To(Equal(flow.AssetsPath)) + }) + It("should not try and check the version of the binaries", func() { + flow.AssetsPath = ".teststore/wrong-version" + flow.Do(env) + Expect(out.String()).To(Equal(flow.AssetsPath)) }) + It("should not need to contact the network", func() { + server.Close() + flow.AssetsPath = ".teststore/good-version" + flow.Do(env) + // expect to not get a panic -- if we do, it'll cause the test to fail + }) + }) - Context("if latest is requested", func() { - It("should contact the network to see if there's anything newer", func() { - env.Version = versions.Spec{ - CheckLatest: true, - Selector: versions.PatchSelector{ - Major: 1, Minor: 16, Patch: versions.AnyPoint, - }, - } - flow.Do(env) - Expect(out.String()).To(HaveSuffix("/1.16.4-linux-amd64"), "should use the latest remote version") - }) - It("should still use the latest local if the network doesn't have anything newer", func() { - env.Version = versions.Spec{ - CheckLatest: true, - Selector: versions.PatchSelector{ - Major: 1, Minor: 14, Patch: versions.AnyPoint, - }, - } + Context("when downloads are disabled", func() { + BeforeEach(func() { + env.NoDownload = true + server.Close() + }) - flow.Do(env) + // It("should not contact the network") is a gimme here, because we + // call server.Close() above. - // latest on the server is 1.14.1, latest local is 1.14.26 - Expect(out.String()).To(HaveSuffix("/1.14.26-linux-amd64"), "should use the latest local version") - }) + It("should error if no matches are found locally", func() { + defer shouldHaveError() + env.Version.Selector = versions.Concrete{Major: 9001} + flow.Do(env) }) - - It("should check local for a match first", func() { - server.Close() // confirm no network + It("should settle for the latest local match if latest is requested", func() { env.Version = versions.Spec{ - Selector: versions.TildeSelector{Concrete: ver(1, 16, 0)}, + CheckLatest: true, + Selector: versions.PatchSelector{ + Major: 1, + Minor: 16, + Patch: versions.AnyPoint, + }, } + flow.Do(env) - // latest on the server is 1.16.4, latest local is 1.16.1 + + // latest on "server" is 1.16.4, shouldn't use that Expect(out.String()).To(HaveSuffix("/1.16.1-linux-amd64"), "should use the latest local version") }) + }) - It("should fall back to the network if no local matches are found", func() { + Context("if latest is requested", func() { + It("should contact the network to see if there's anything newer", func() { env.Version = versions.Spec{ - Selector: versions.TildeSelector{Concrete: ver(1, 19, 0)}, + CheckLatest: true, + Selector: versions.PatchSelector{ + Major: 1, Minor: 16, Patch: versions.AnyPoint, + }, } flow.Do(env) - Expect(out.String()).To(HaveSuffix("/1.19.2-linux-amd64"), "should have a remote version") + Expect(out.String()).To(HaveSuffix("/1.16.4-linux-amd64"), "should use the latest remote version") }) - - It("should error out if no matches can be found anywhere", func() { - defer shouldHaveError() + It("should still use the latest local if the network doesn't have anything newer", func() { env.Version = versions.Spec{ - Selector: versions.TildeSelector{Concrete: ver(0, 0, 1)}, + CheckLatest: true, + Selector: versions.PatchSelector{ + Major: 1, Minor: 14, Patch: versions.AnyPoint, + }, } + flow.Do(env) + + // latest on the server is 1.14.1, latest local is 1.14.26 + Expect(out.String()).To(HaveSuffix("/1.14.26-linux-amd64"), "should use the latest local version") }) + }) - It("should skip local versions matches with non-matching platforms", func() { - env.NoDownload = true // so we get an error - defer shouldHaveError() - env.Version = versions.Spec{ - // has non-matching local versions - Selector: ver(1, 13, 0), - } + It("should check local for a match first", func() { + server.Close() // confirm no network + env.Version = versions.Spec{ + Selector: versions.TildeSelector{Concrete: ver(1, 16, 0)}, + } + flow.Do(env) + // latest on the server is 1.16.4, latest local is 1.16.1 + Expect(out.String()).To(HaveSuffix("/1.16.1-linux-amd64"), "should use the latest local version") + }) - flow.Do(env) - }) + It("should fall back to the network if no local matches are found", func() { + env.Version = versions.Spec{ + Selector: versions.TildeSelector{Concrete: ver(1, 19, 0)}, + } + flow.Do(env) + Expect(out.String()).To(HaveSuffix("/1.19.2-linux-amd64"), "should have a remote version") + }) + + It("should error out if no matches can be found anywhere", func() { + defer shouldHaveError() + env.Version = versions.Spec{ + Selector: versions.TildeSelector{Concrete: ver(0, 0, 1)}, + } + flow.Do(env) + }) + + It("should skip local versions matches with non-matching platforms", func() { + env.NoDownload = true // so we get an error + defer shouldHaveError() + env.Version = versions.Spec{ + // has non-matching local versions + Selector: ver(1, 13, 0), + } + + flow.Do(env) + }) + + It("should skip remote version matches with non-matching platforms", func() { + defer shouldHaveError() + env.Version = versions.Spec{ + // has a non-matching remote version + Selector: versions.TildeSelector{Concrete: ver(1, 11, 1)}, + } + flow.Do(env) + }) + + Describe("verifying the checksum", func() { + BeforeEach(func() { + // Recreate remoteHTTPItems to not impact others tests. + remoteHTTPItems = makeContentsHTTP(remoteNamesHTTP) + remoteHTTPItems.index.Releases["v86.75.309"] = map[string]remote.Archive{ + "envtest-v86.75.309-linux-amd64.tar.gz": { + SelfLink: "not used in this test", + Hash: "nottherightone!", + }, + } + // need a valid tar.gz file to not error from that + remoteHTTPItems.contents["envtest-v86.75.309-linux-amd64.tar.gz"] = remoteHTTPItems.contents["envtest-v1.10-darwin-amd64.tar.gz"] - It("should skip remote version matches with non-matching platforms", func() { - defer shouldHaveError() env.Version = versions.Spec{ - // has a non-matching remote version - Selector: versions.TildeSelector{Concrete: ver(1, 11, 1)}, + Selector: ver(86, 75, 309), } + }) + Specify("when enabled, should fail if the downloaded hash doesn't match", func() { + defer shouldHaveError() flow.Do(env) }) - - Describe("verifying the checksum", func() { - BeforeEach(func() { - remoteGCSItems = append(remoteGCSItems, item{ - meta: bucketObject{ - Name: "kubebuilder-tools-86.75.309-linux-amd64.tar.gz", - Hash: "nottherightone!", - }, - contents: remoteGCSItems[0].contents, // need a valid tar.gz file to not error from that - }) - // Recreate remoteHTTPItems to not impact others tests. - remoteHTTPItems = makeContentsHTTP(remoteNamesHTTP) - remoteHTTPItems.index.Releases["v86.75.309"] = map[string]remote.Archive{ - "envtest-v86.75.309-linux-amd64.tar.gz": { - SelfLink: "not used in this test", - Hash: "nottherightone!", - }, - } - // need a valid tar.gz file to not error from that - remoteHTTPItems.contents["envtest-v86.75.309-linux-amd64.tar.gz"] = remoteHTTPItems.contents["envtest-v1.10-darwin-amd64.tar.gz"] - - env.Version = versions.Spec{ - Selector: ver(86, 75, 309), - } - }) - Specify("when enabled, should fail if the downloaded hash doesn't match", func() { - defer shouldHaveError() - flow.Do(env) - }) - Specify("when disabled, shouldn't check the checksum at all", func() { - env.VerifySum = false - flow.Do(env) - }) + Specify("when disabled, shouldn't check the checksum at all", func() { + env.VerifySum = false + flow.Do(env) }) }) + }) - Describe("list", func() { - // split by fields so we're not matching on whitespace - listFields := func() [][]string { - resLines := strings.Split(strings.TrimSpace(out.String()), "\n") - resFields := make([][]string, len(resLines)) - for i, line := range resLines { - resFields[i] = strings.Fields(line) - } - return resFields + Describe("list", func() { + // split by fields so we're not matching on whitespace + listFields := func() [][]string { + resLines := strings.Split(strings.TrimSpace(out.String()), "\n") + resFields := make([][]string, len(resLines)) + for i, line := range resLines { + resFields[i] = strings.Fields(line) } + return resFields + } - Context("when downloads are disabled", func() { + Context("when downloads are disabled", func() { + BeforeEach(func() { + server.Close() // ensure no network + env.NoDownload = true + }) + It("should include local contents sorted by version", func() { + env.Version = versions.AnyVersion + env.Platform.Platform = versions.Platform{OS: "*", Arch: "*"} + wf.List{}.Do(env) + + Expect(listFields()).To(Equal([][]string{ + {"(installed)", "v1.17.9", "linux/amd64"}, + {"(installed)", "v1.16.2", "ifonlysingularitywasstillathing/amd64"}, + {"(installed)", "v1.16.2", "linux/yourimagination"}, + {"(installed)", "v1.16.1", "linux/amd64"}, + {"(installed)", "v1.16.0", "linux/amd64"}, + {"(installed)", "v1.14.26", "hyperwarp/pixiedust"}, + {"(installed)", "v1.14.26", "linux/amd64"}, + })) + }) + It("should skip non-matching local contents", func() { + env.Version.Selector = versions.PatchSelector{ + Major: 1, Minor: 16, Patch: versions.AnyPoint, + } + env.Platform.Arch = "*" + wf.List{}.Do(env) + + Expect(listFields()).To(Equal([][]string{ + {"(installed)", "v1.16.2", "linux/yourimagination"}, + {"(installed)", "v1.16.1", "linux/amd64"}, + {"(installed)", "v1.16.0", "linux/amd64"}, + })) + }) + }) + Context("when downloads are enabled", func() { + Context("when sorting", func() { BeforeEach(func() { - server.Close() // ensure no network - env.NoDownload = true + // Recreate remoteHTTPItems to not impact others tests. + remoteHTTPItems = makeContentsHTTP(remoteNamesHTTP) + // Also only keep the first 7 items. + // Get the first 7 archive names + var archiveNames []string + for _, release := range remoteHTTPItems.index.Releases { + for archiveName := range release { + archiveNames = append(archiveNames, archiveName) + } + } + sort.Strings(archiveNames) + archiveNamesSet := sets.Set[string]{}.Insert(archiveNames[:7]...) + // Delete all other archives + for _, release := range remoteHTTPItems.index.Releases { + for archiveName := range release { + if !archiveNamesSet.Has(archiveName) { + delete(release, archiveName) + } + } + } }) - It("should include local contents sorted by version", func() { + It("should sort local & remote by version", func() { env.Version = versions.AnyVersion env.Platform.Platform = versions.Platform{OS: "*", Arch: "*"} wf.List{}.Do(env) @@ -342,160 +356,91 @@ func WorkflowTest(testMode string) { {"(installed)", "v1.16.0", "linux/amd64"}, {"(installed)", "v1.14.26", "hyperwarp/pixiedust"}, {"(installed)", "v1.14.26", "linux/amd64"}, - })) - }) - It("should skip non-matching local contents", func() { - env.Version.Selector = versions.PatchSelector{ - Major: 1, Minor: 16, Patch: versions.AnyPoint, - } - env.Platform.Arch = "*" - wf.List{}.Do(env) - - Expect(listFields()).To(Equal([][]string{ - {"(installed)", "v1.16.2", "linux/yourimagination"}, - {"(installed)", "v1.16.1", "linux/amd64"}, - {"(installed)", "v1.16.0", "linux/amd64"}, + {"(available)", "v1.11.1", "potato/cherrypie"}, + {"(available)", "v1.11.0", "darwin/amd64"}, + {"(available)", "v1.11.0", "linux/amd64"}, + {"(available)", "v1.10.1", "darwin/amd64"}, + {"(available)", "v1.10.1", "linux/amd64"}, })) }) }) - Context("when downloads are enabled", func() { - Context("when sorting", func() { - BeforeEach(func() { - // shorten the list a bit for expediency - remoteGCSItems = remoteGCSItems[:7] - - // Recreate remoteHTTPItems to not impact others tests. - remoteHTTPItems = makeContentsHTTP(remoteNamesHTTP) - // Also only keep the first 7 items. - // Get the first 7 archive names - var archiveNames []string - for _, release := range remoteHTTPItems.index.Releases { - for archiveName := range release { - archiveNames = append(archiveNames, archiveName) - } - } - sort.Strings(archiveNames) - archiveNamesSet := sets.Set[string]{}.Insert(archiveNames[:7]...) - // Delete all other archives - for _, release := range remoteHTTPItems.index.Releases { - for archiveName := range release { - if !archiveNamesSet.Has(archiveName) { - delete(release, archiveName) - } - } - } - }) - It("should sort local & remote by version", func() { - env.Version = versions.AnyVersion - env.Platform.Platform = versions.Platform{OS: "*", Arch: "*"} - wf.List{}.Do(env) - - Expect(listFields()).To(Equal([][]string{ - {"(installed)", "v1.17.9", "linux/amd64"}, - {"(installed)", "v1.16.2", "ifonlysingularitywasstillathing/amd64"}, - {"(installed)", "v1.16.2", "linux/yourimagination"}, - {"(installed)", "v1.16.1", "linux/amd64"}, - {"(installed)", "v1.16.0", "linux/amd64"}, - {"(installed)", "v1.14.26", "hyperwarp/pixiedust"}, - {"(installed)", "v1.14.26", "linux/amd64"}, - {"(available)", "v1.11.1", "potato/cherrypie"}, - {"(available)", "v1.11.0", "darwin/amd64"}, - {"(available)", "v1.11.0", "linux/amd64"}, - {"(available)", "v1.10.1", "darwin/amd64"}, - {"(available)", "v1.10.1", "linux/amd64"}, - })) - }) - }) - It("should skip non-matching remote contents", func() { - env.Version.Selector = versions.PatchSelector{ - Major: 1, Minor: 16, Patch: versions.AnyPoint, - } - env.Platform.Arch = "*" - wf.List{}.Do(env) - - Expect(listFields()).To(Equal([][]string{ - {"(installed)", "v1.16.2", "linux/yourimagination"}, - {"(installed)", "v1.16.1", "linux/amd64"}, - {"(installed)", "v1.16.0", "linux/amd64"}, - {"(available)", "v1.16.4", "linux/amd64"}, - })) - }) + It("should skip non-matching remote contents", func() { + env.Version.Selector = versions.PatchSelector{ + Major: 1, Minor: 16, Patch: versions.AnyPoint, + } + env.Platform.Arch = "*" + wf.List{}.Do(env) + + Expect(listFields()).To(Equal([][]string{ + {"(installed)", "v1.16.2", "linux/yourimagination"}, + {"(installed)", "v1.16.1", "linux/amd64"}, + {"(installed)", "v1.16.0", "linux/amd64"}, + {"(available)", "v1.16.4", "linux/amd64"}, + })) }) }) + }) - Describe("cleanup", func() { - BeforeEach(func() { - server.Close() // ensure no network - flow := wf.Cleanup{} - env.Version = versions.AnyVersion - env.Platform.Arch = "*" - flow.Do(env) - }) + Describe("cleanup", func() { + BeforeEach(func() { + server.Close() // ensure no network + flow := wf.Cleanup{} + env.Version = versions.AnyVersion + env.Platform.Arch = "*" + flow.Do(env) + }) - It("should remove matching versions from the store & keep non-matching ones", func() { - entries, err := env.FS.ReadDir(".teststore/k8s") - Expect(err).NotTo(HaveOccurred(), "should be able to read the store") - Expect(entries).To(ConsistOf( - WithTransform(fs.FileInfo.Name, Equal("1.16.2-ifonlysingularitywasstillathing-amd64")), - WithTransform(fs.FileInfo.Name, Equal("1.14.26-hyperwarp-pixiedust")), - )) - }) + It("should remove matching versions from the store & keep non-matching ones", func() { + entries, err := env.FS.ReadDir(".teststore/k8s") + Expect(err).NotTo(HaveOccurred(), "should be able to read the store") + Expect(entries).To(ConsistOf( + WithTransform(fs.FileInfo.Name, Equal("1.16.2-ifonlysingularitywasstillathing-amd64")), + WithTransform(fs.FileInfo.Name, Equal("1.14.26-hyperwarp-pixiedust")), + )) }) + }) - Describe("sideload", func() { - var ( - flow wf.Sideload - ) + Describe("sideload", func() { + var ( + flow wf.Sideload + ) - var expectedPrefix string - if testMode == gcsMode { - // remote version fake contents are prefixed by the - // name for easier debugging, so we can use that here - expectedPrefix = remoteVersionsGCS[0].meta.Name - } - if testMode == httpMode { - // hard coding to one of the archives in remoteVersionsHTTP as we can't pick the "first" of a map. - expectedPrefix = "envtest-v1.10-darwin-amd64.tar.gz" - } + // hard coding to one of the archives in remoteVersionsHTTP as we can't pick the "first" of a map. + expectedPrefix := "envtest-v1.10-darwin-amd64.tar.gz" - BeforeEach(func() { - server.Close() // ensure no network - var content []byte - if testMode == gcsMode { - content = remoteVersionsGCS[0].contents - } - if testMode == httpMode { - content = remoteVersionsHTTP.contents[expectedPrefix] - } - flow.Input = bytes.NewReader(content) - flow.PrintFormat = envp.PrintPath - }) - It("should initialize the store if it doesn't exist", func() { - env.Version.Selector = ver(1, 10, 0) - Expect(env.FS.RemoveAll(testStorePath)).To(Succeed()) - flow.Do(env) - Expect(env.FS.Stat(testStorePath)).NotTo(BeNil()) - }) - It("should fail if a non-concrete version is given", func() { - defer shouldHaveError() - env.Version = versions.LatestVersion - flow.Do(env) - }) - It("should fail if a non-concrete platform is given", func() { - defer shouldHaveError() - env.Version.Selector = ver(1, 10, 0) - env.Platform.Arch = "*" - flow.Do(env) - }) - It("should load the given gizipped tarball into our store as the given version", func() { - env.Version.Selector = ver(1, 10, 0) - flow.Do(env) - baseName := env.Platform.BaseName(*env.Version.AsConcrete()) - expectedPath := filepath.Join(".teststore/k8s", baseName, "some-file") - outContents, err := env.FS.ReadFile(expectedPath) - Expect(err).NotTo(HaveOccurred(), "should be able to load the unzipped file") - Expect(string(outContents)).To(HavePrefix(expectedPrefix), "should have the debugging prefix") - }) + BeforeEach(func() { + server.Close() // ensure no network + + content := remoteVersionsHTTP.contents[expectedPrefix] + + flow.Input = bytes.NewReader(content) + flow.PrintFormat = envp.PrintPath + }) + It("should initialize the store if it doesn't exist", func() { + env.Version.Selector = ver(1, 10, 0) + Expect(env.FS.RemoveAll(testStorePath)).To(Succeed()) + flow.Do(env) + Expect(env.FS.Stat(testStorePath)).NotTo(BeNil()) + }) + It("should fail if a non-concrete version is given", func() { + defer shouldHaveError() + env.Version = versions.LatestVersion + flow.Do(env) + }) + It("should fail if a non-concrete platform is given", func() { + defer shouldHaveError() + env.Version.Selector = ver(1, 10, 0) + env.Platform.Arch = "*" + flow.Do(env) + }) + It("should load the given gizipped tarball into our store as the given version", func() { + env.Version.Selector = ver(1, 10, 0) + flow.Do(env) + baseName := env.Platform.BaseName(*env.Version.AsConcrete()) + expectedPath := filepath.Join(".teststore/k8s", baseName, "some-file") + outContents, err := env.FS.ReadFile(expectedPath) + Expect(err).NotTo(HaveOccurred(), "should be able to load the unzipped file") + Expect(string(outContents)).To(HavePrefix(expectedPrefix), "should have the debugging prefix") }) }) -} +}) diff --git a/tools/setup-envtest/workflows/workflows_testutils_test.go b/tools/setup-envtest/workflows/workflows_testutils_test.go index e796e5d16c..6bf6db38c3 100644 --- a/tools/setup-envtest/workflows/workflows_testutils_test.go +++ b/tools/setup-envtest/workflows/workflows_testutils_test.go @@ -7,10 +7,8 @@ import ( "archive/tar" "bytes" "compress/gzip" - "crypto/md5" //nolint:gosec "crypto/rand" "crypto/sha512" - "encoding/base64" "encoding/hex" "fmt" "net/http" @@ -27,45 +25,6 @@ import ( ) var ( - remoteNamesGCS = []string{ - "kubebuilder-tools-1.10-darwin-amd64.tar.gz", - "kubebuilder-tools-1.10-linux-amd64.tar.gz", - "kubebuilder-tools-1.10.1-darwin-amd64.tar.gz", - "kubebuilder-tools-1.10.1-linux-amd64.tar.gz", - "kubebuilder-tools-1.11.0-darwin-amd64.tar.gz", - "kubebuilder-tools-1.11.0-linux-amd64.tar.gz", - "kubebuilder-tools-1.11.1-potato-cherrypie.tar.gz", - "kubebuilder-tools-1.12.3-darwin-amd64.tar.gz", - "kubebuilder-tools-1.12.3-linux-amd64.tar.gz", - "kubebuilder-tools-1.13.1-darwin-amd64.tar.gz", - "kubebuilder-tools-1.13.1-linux-amd64.tar.gz", - "kubebuilder-tools-1.14.1-darwin-amd64.tar.gz", - "kubebuilder-tools-1.14.1-linux-amd64.tar.gz", - "kubebuilder-tools-1.15.5-darwin-amd64.tar.gz", - "kubebuilder-tools-1.15.5-linux-amd64.tar.gz", - "kubebuilder-tools-1.16.4-darwin-amd64.tar.gz", - "kubebuilder-tools-1.16.4-linux-amd64.tar.gz", - "kubebuilder-tools-1.17.9-darwin-amd64.tar.gz", - "kubebuilder-tools-1.17.9-linux-amd64.tar.gz", - "kubebuilder-tools-1.19.0-darwin-amd64.tar.gz", - "kubebuilder-tools-1.19.0-linux-amd64.tar.gz", - "kubebuilder-tools-1.19.2-darwin-amd64.tar.gz", - "kubebuilder-tools-1.19.2-linux-amd64.tar.gz", - "kubebuilder-tools-1.19.2-linux-arm64.tar.gz", - "kubebuilder-tools-1.19.2-linux-ppc64le.tar.gz", - "kubebuilder-tools-1.20.2-darwin-amd64.tar.gz", - "kubebuilder-tools-1.20.2-linux-amd64.tar.gz", - "kubebuilder-tools-1.20.2-linux-arm64.tar.gz", - "kubebuilder-tools-1.20.2-linux-ppc64le.tar.gz", - "kubebuilder-tools-1.9-darwin-amd64.tar.gz", - "kubebuilder-tools-1.9-linux-amd64.tar.gz", - "kubebuilder-tools-v1.19.2-darwin-amd64.tar.gz", - "kubebuilder-tools-v1.19.2-linux-amd64.tar.gz", - "kubebuilder-tools-v1.19.2-linux-arm64.tar.gz", - "kubebuilder-tools-v1.19.2-linux-ppc64le.tar.gz", - } - remoteVersionsGCS = makeContentsGCS(remoteNamesGCS) - remoteNamesHTTP = remote.Index{ Releases: map[string]remote.Release{ "v1.10.0": map[string]remote.Archive{ @@ -149,85 +108,6 @@ var ( } ) -type item struct { - meta bucketObject - contents []byte -} - -// objectList is the parts we need of the GCS "list-objects-in-bucket" endpoint. -type objectList struct { - Items []bucketObject `json:"items"` -} - -// bucketObject is the parts we need of the GCS object metadata. -type bucketObject struct { - Name string `json:"name"` - Hash string `json:"md5Hash"` -} - -func makeContentsGCS(names []string) []item { - res := make([]item, len(names)) - for i, name := range names { - var chunk [1024 * 48]byte // 1.5 times our chunk read size in GetVersion - copy(chunk[:], name) - if _, err := rand.Read(chunk[len(name):]); err != nil { - panic(err) - } - res[i] = verWithGCS(name, chunk[:]) - } - return res -} - -func verWithGCS(name string, contents []byte) item { - out := new(bytes.Buffer) - gzipWriter := gzip.NewWriter(out) - tarWriter := tar.NewWriter(gzipWriter) - err := tarWriter.WriteHeader(&tar.Header{ - Name: "kubebuilder/bin/some-file", - Size: int64(len(contents)), - Mode: 0777, // so we can check that we fix this later - }) - if err != nil { - panic(err) - } - _, err = tarWriter.Write(contents) - if err != nil { - panic(err) - } - tarWriter.Close() - gzipWriter.Close() - res := item{ - meta: bucketObject{Name: name}, - contents: out.Bytes(), - } - hash := md5.Sum(res.contents) //nolint:gosec - res.meta.Hash = base64.StdEncoding.EncodeToString(hash[:]) - return res -} - -func handleRemoteVersionsGCS(server *ghttp.Server, versions []item) { - list := objectList{Items: make([]bucketObject, len(versions))} - for i, ver := range versions { - ver := ver // copy to avoid capturing the iteration variable - list.Items[i] = ver.meta - server.RouteToHandler("GET", "/storage/v1/b/kubebuilder-tools-test/o/"+ver.meta.Name, func(resp http.ResponseWriter, req *http.Request) { - if req.URL.Query().Get("alt") == "media" { - resp.WriteHeader(http.StatusOK) - Expect(resp.Write(ver.contents)).To(Equal(len(ver.contents))) - } else { - ghttp.RespondWithJSONEncoded( - http.StatusOK, - ver.meta, - )(resp, req) - } - }) - } - server.RouteToHandler("GET", "/storage/v1/b/kubebuilder-tools-test/o", ghttp.RespondWithJSONEncoded( - http.StatusOK, - list, - )) -} - type itemsHTTP struct { index remote.Index contents map[string][]byte