Skip to content

Commit

Permalink
feat: add patches to bcr entries
Browse files Browse the repository at this point in the history
  • Loading branch information
kormide committed Sep 25, 2023
1 parent 8717f83 commit 262c9e1
Show file tree
Hide file tree
Showing 5 changed files with 246 additions and 13 deletions.
19 changes: 19 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,25 @@ You can work around this by setting a [fixed releaser](./templates/README.md#opt

You can publish BCR entries for multiple modules that exist in your git repository by configuring [`moduleRoots`](./templates/README.md#optional-configyml).

## Including patches

Include patches in the BCR entry by adding them under `.bcr/patches` in your ruleset repository. All patches must have the `.patch` extension and be in the `-p1` format.

For example, a patch in `.bcr/patches/remove_dev_deps.patch` will be included in the entry's pull request and will be referenced in the
corresponding `source.json` file:

```json
{
...
"patch_strip": 0,
"patches": {
"remove_dev_deps.patch": "sha256-DXvBJbXZWf3hITOIjeJbgER6UOXIB6ogpgullT+oP4k="
}
}
```

To patch in a submodule, add the patch to a patches folder under the submodule path `.bcr/[sub/module]/patches` where sub/module is the path to the WORKSPACE folder relative to the repository root.

## Reporting issues

Create an issue in this repository for support.
153 changes: 146 additions & 7 deletions src/domain/create-entry.spec.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { beforeEach, describe, expect, jest, test } from "@jest/globals";
import { mocked, Mocked } from "jest-mock";
import { Mocked, mocked } from "jest-mock";
import { randomUUID } from "node:crypto";
import fs from "node:fs";
import fs, { PathLike } from "node:fs";
import os from "node:os";
import path from "node:path";
import { GitClient } from "../infrastructure/git";
import { GitHubClient } from "../infrastructure/github";
Expand Down Expand Up @@ -54,6 +55,12 @@ beforeEach(() => {
]);
}) as any);

mocked(fs.readdirSync).mockImplementation(((p: PathLike, options: any) => {
return Object.keys(mockedFileReads)
.filter((f) => path.dirname(f) === p)
.map((f) => path.basename(f));
}) as any);

mocked(fs.existsSync).mockImplementation(((path: string) => {
if (path in mockedFileReads) {
return true;
Expand All @@ -70,12 +77,13 @@ beforeEach(() => {
extractModuleFile: jest.fn(async () => {
return new ModuleFile(EXTRACTED_MODULE_PATH);
}),
diskPath: path.join(os.tmpdir(), "archive.tar.gz"),
} as unknown as ReleaseArchive;
});

mockGitClient = mocked(new GitClient());
mockGithubClient = mocked(new GitHubClient());
mocked(computeIntegrityHash).mockReturnValue(randomUUID());
mocked(computeIntegrityHash).mockReturnValue(`sha256-${randomUUID()}`);
Repository.gitClient = mockGitClient;
createEntryService = new CreateEntryService(mockGitClient, mockGithubClient);
});
Expand Down Expand Up @@ -536,7 +544,7 @@ describe("createEntryFiles", () => {
).toEqual(hash);
});

test("sets the patch_strip to 0 when a release version patch is added", async () => {
test("sets the patch_strip to 1 when a release version patch is added", async () => {
mockRulesetFiles({
extractedModuleName: "rules_bar",
extractedModuleVersion: "1.2.3",
Expand All @@ -555,10 +563,69 @@ describe("createEntryFiles", () => {
(call[0] as string).includes("source.json")
);
const writtenSourceContent = JSON.parse(writeSourceCall[1] as string);
expect(writtenSourceContent.patch_strip).toEqual(0);
expect(writtenSourceContent.patch_strip).toEqual(1);
});

test("adds a patch entry for each patch in the patches folder", async () => {
mockRulesetFiles({
extractedModuleName: "rules_bar",
extractedModuleVersion: "1.2.3",
patches: {
"patch1.patch": randomUUID(),
"patch2.patch": randomUUID(),
},
});

const tag = "v1.2.3";
const rulesetRepo = await RulesetRepository.create("repo", "owner", tag);
const bcrRepo = CANONICAL_BCR;

const hash1 = `sha256-${randomUUID()}`;
const hash2 = `sha256-${randomUUID()}`;

mocked(computeIntegrityHash).mockReturnValueOnce(
`sha256-${randomUUID()}`
); // release archive
mocked(computeIntegrityHash).mockReturnValueOnce(hash1);
mocked(computeIntegrityHash).mockReturnValueOnce(hash2);

await createEntryService.createEntryFiles(rulesetRepo, bcrRepo, tag, ".");

const writeSourceCall = mocked(fs.writeFileSync).mock.calls.find((call) =>
(call[0] as string).includes("source.json")
);
const writtenSourceContent = JSON.parse(writeSourceCall[1] as string);
expect(writtenSourceContent.patches["patch1.patch"]).toEqual(hash1);
expect(writtenSourceContent.patches["patch2.patch"]).toEqual(hash2);
});
});

test("sets the patch_strip to 1 when a patch is added", async () => {
mockRulesetFiles({
extractedModuleName: "rules_bar",
extractedModuleVersion: "1.2.3",
patches: {
"patch.patch": randomUUID(),
},
});

const tag = "v1.2.3";
const rulesetRepo = await RulesetRepository.create("repo", "owner", tag);
const bcrRepo = CANONICAL_BCR;

const hash = `sha256-${randomUUID()}`;
mocked(computeIntegrityHash).mockReturnValueOnce(`sha256-${randomUUID()}`); // release archive
mocked(computeIntegrityHash).mockReturnValueOnce(hash);

await createEntryService.createEntryFiles(rulesetRepo, bcrRepo, tag, ".");

const writeSourceCall = mocked(fs.writeFileSync).mock.calls.find((call) =>
(call[0] as string).includes("source.json")
);
const writtenSourceContent = JSON.parse(writeSourceCall[1] as string);
expect(writtenSourceContent.patch_strip).toEqual(1);
});

describe("patches", () => {
test("creates a patch file when the release version does not match the archived version", async () => {
mockRulesetFiles({
Expand Down Expand Up @@ -590,8 +657,8 @@ describe("createEntryFiles", () => {
const writtenPatchContent = writePatchCall[1] as string;
expect(
writtenPatchContent.includes(`\
--- MODULE.bazel
+++ MODULE.bazel
--- a/MODULE.bazel
+++ b/MODULE.bazel
@@ -1,6 +1,6 @@
module(
name = "rules_bar",
Expand All @@ -602,6 +669,70 @@ describe("createEntryFiles", () => {
).toEqual(true);
});
});

test("includes patches in the patches folder", async () => {
mockRulesetFiles({
extractedModuleName: "rules_bar",
extractedModuleVersion: "1.2.3",
patches: {
"my_patch.patch": randomUUID(),
},
});

const tag = "v1.2.3";
const rulesetRepo = await RulesetRepository.create("repo", "owner", tag);
const bcrRepo = CANONICAL_BCR;

await createEntryService.createEntryFiles(rulesetRepo, bcrRepo, tag, ".");

const expectedPatchPath = path.join(
bcrRepo.diskPath,
"modules",
"rules_bar",
"1.2.3",
"patches",
"my_patch.patch"
);
expect(fs.copyFileSync).toHaveBeenCalledWith(
path.join(rulesetRepo.patchesPath("."), "my_patch.patch"),
expectedPatchPath
);
});

test("includes patches in a different module root", async () => {
mockRulesetFiles({
extractedModuleName: "rules_bar",
extractedModuleVersion: "1.2.3",
patches: {
"submodule.patch": randomUUID(),
},
moduleRoot: "submodule",
});

const tag = "v1.2.3";
const rulesetRepo = await RulesetRepository.create("repo", "owner", tag);
const bcrRepo = CANONICAL_BCR;

await createEntryService.createEntryFiles(
rulesetRepo,
bcrRepo,
tag,
"submodule"
);

const expectedPatchPath = path.join(
bcrRepo.diskPath,
"modules",
"rules_bar",
"1.2.3",
"patches",
"submodule.patch"
);
expect(fs.copyFileSync).toHaveBeenCalledWith(
path.join(rulesetRepo.patchesPath("submodule"), "submodule.patch"),
expectedPatchPath
);
});
});

describe("commitEntryToNewBranch", () => {
Expand Down Expand Up @@ -829,6 +960,7 @@ function mockRulesetFiles(
sourceUrl?: string;
sourceStripPrefix?: string;
moduleRoot?: string;
patches?: { [path: string]: string };
} = {}
) {
mockGitClient.checkout.mockImplementation(
Expand Down Expand Up @@ -860,6 +992,13 @@ function mockRulesetFiles(
yankedVersions: options.metadataYankedVersions,
homepage: options.metadataHomepage,
});
if (options.patches) {
for (const patch of Object.keys(options.patches)) {
mockedFileReads[
path.join(templatesDir, moduleRoot, "patches", patch)
] = options.patches[patch];
}
}
}
);
}
Expand Down
46 changes: 41 additions & 5 deletions src/domain/create-entry.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { createPatch } from "diff";
import { createTwoFilesPatch } from "diff";
import { randomBytes } from "node:crypto";
import fs from "node:fs";
import path from "node:path";
Expand Down Expand Up @@ -80,6 +80,13 @@ export class CreateEntryService {

fs.mkdirSync(bcrVersionEntryPath);

this.addPatches(
rulesetRepo,
sourceTemplate,
bcrVersionEntryPath,
moduleRoot
);

this.patchModuleVersionIfMismatch(
moduleFile,
version,
Expand Down Expand Up @@ -140,6 +147,32 @@ export class CreateEntryService {
await this.gitClient.push(bcr.diskPath, "authed-fork", branch);
}

private addPatches(
rulesetRepo: RulesetRepository,
sourceTemplate: SourceTemplate,
bcrVersionEntryPath: string,
moduleRoot: string
): void {
const patchesPath = rulesetRepo.patchesPath(moduleRoot);
const patches = fs
.readdirSync(patchesPath)
.filter((f) => f.endsWith(".patch"));

if (
patches.length &&
!fs.existsSync(path.join(bcrVersionEntryPath, "patches"))
) {
fs.mkdirSync(path.join(bcrVersionEntryPath, "patches"));
}

for (const patch of patches) {
const patchDest = path.join(bcrVersionEntryPath, "patches", patch);
fs.mkdirSync;
fs.copyFileSync(path.join(patchesPath, patch), patchDest);
sourceTemplate.addPatch(patch, computeIntegrityHash(patchDest), 1);
}
}

// The version in the archived MODULE.bazel version should match the release version.
// If it doesn't, add a patch to set the correct version. This is useful when a release
// archive is just an archive of the source, and the source MODULE.bazel is kept unstamped
Expand All @@ -159,21 +192,24 @@ export class CreateEntryService {
moduleFile.stampVersion(version);
const stampedContent = moduleFile.content;

const patch = createPatch(
"MODULE.bazel",
const patch = createTwoFilesPatch(
"a/MODULE.bazel",
"b/MODULE.bazel",
existingContent,
stampedContent
);

const patchesDir = path.join(bcrVersionEntryPath, "patches");
fs.mkdirSync(path.join(bcrVersionEntryPath, "patches"));
if (!fs.existsSync(path.join(bcrVersionEntryPath, "patches"))) {
fs.mkdirSync(path.join(bcrVersionEntryPath, "patches"));
}
const patchFilePath = path.join(patchesDir, patchFileName);
fs.writeFileSync(patchFilePath, patch);

sourceTemplate.addPatch(
patchFileName,
computeIntegrityHash(patchFilePath),
0
1
);
}
}
Expand Down
32 changes: 31 additions & 1 deletion src/domain/ruleset-repository.spec.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { beforeEach, describe, expect, jest, test } from "@jest/globals";
import { mocked, Mocked } from "jest-mock";
import { Mocked, mocked } from "jest-mock";
import fs from "node:fs";
import path from "node:path";
import { GitClient } from "../infrastructure/git";
Expand Down Expand Up @@ -352,6 +352,36 @@ describe("sourceTemplatePath", () => {
});
});

describe("patchesPath", () => {
test("gets path to the patches folder", async () => {
mockRulesetFiles();
const rulesetRepo = await RulesetRepository.create("foo", "bar", "main");

expect(rulesetRepo.patchesPath(".")).toEqual(
path.join(
rulesetRepo.diskPath,
RulesetRepository.BCR_TEMPLATE_DIR,
"patches"
)
);
});

test("gets path to the patches in a different module root", async () => {
mockRulesetFiles();
const rulesetRepo = await RulesetRepository.create("foo", "bar", "main");

expect(rulesetRepo.patchesPath("sub/dir")).toEqual(
path.join(
rulesetRepo.diskPath,
RulesetRepository.BCR_TEMPLATE_DIR,
"sub",
"dir",
"patches"
)
);
});
});

function mockRulesetFiles(
options: {
skipMetadataFile?: boolean;
Expand Down
9 changes: 9 additions & 0 deletions src/domain/ruleset-repository.ts
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,15 @@ export class RulesetRepository extends Repository {
);
}

public patchesPath(moduleRoot: string): string {
return path.resolve(
this.diskPath,
RulesetRepository.BCR_TEMPLATE_DIR,
moduleRoot,
"patches"
);
}

public sourceTemplate(moduleRoot: string): SourceTemplate {
return this._sourceTemplate[moduleRoot];
}
Expand Down

0 comments on commit 262c9e1

Please sign in to comment.