Skip to content

Commit

Permalink
Use FileSystem instance for crawl previous state instead of raw `Fi…
Browse files Browse the repository at this point in the history
…leData`

Summary:
- Add a `getDifference` method to `FileSystem` to return  a `{changedFiles: FileData, removedFiles: Set<string>}` given a `FileData` representing the complete *current* set of files in the system, treating the `FileSystem` instance as base state.
 - Pass a `FileSystem` to crawler backends so that they may use `getDifference` instead of directly comparing with the raw deserialised `files` map.

This consolidates some diffing logic previously repeated  in crawler backends, and allows us to make the data structure for `files` an implementation detail of `FileSystem` - in particular, we want to avoid maintaining both a flat map and a prefix tree in `TreeFS`.

Changelog: Internal

Reviewed By: huntie

Differential Revision: D46515321

fbshipit-source-id: 1cbbaca4a83e92af42ee4c52b77ad13de3655eec
  • Loading branch information
robhogan authored and facebook-github-bot committed Jun 24, 2023
1 parent 42854be commit d376529
Show file tree
Hide file tree
Showing 10 changed files with 132 additions and 108 deletions.
3 changes: 1 addition & 2 deletions packages/metro-file-map/src/__tests__/index-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ jest.mock('../crawlers/watchman', () =>
]);
}
} else {
const fileData = previousState.files.get(relativeFilePath);
if (fileData) {
if (previousState.fileSystem.exists(relativeFilePath)) {
removedFiles.add(relativeFilePath);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
* @oncall react_native
*/

import TreeFS from '../../lib/TreeFS';
import nodeCrawl from '../node';
import watchmanCrawl from '../watchman';
import {execSync} from 'child_process';
Expand Down Expand Up @@ -109,7 +110,10 @@ describe.each(Object.keys(CRAWLERS))(
invariant(crawl, 'crawl should not be null within maybeTest');
const result = await crawl({
previousState: {
files: new Map([['removed.js', ['', 123, 234, 0, '', null, 0]]]),
fileSystem: new TreeFS({
rootDir: FIXTURES_DIR,
files: new Map([['removed.js', ['', 123, 234, 0, '', null, 0]]]),
}),
clocks: new Map(),
},
includeSymlinks,
Expand Down
39 changes: 14 additions & 25 deletions packages/metro-file-map/src/crawlers/__tests__/node-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
*/

import {AbortController} from 'node-abort-controller';
import TreeFS from '../../lib/TreeFS';

jest.useRealTimers();

Expand Down Expand Up @@ -124,6 +125,8 @@ const createMap = obj =>
new Map(Object.keys(obj).map(key => [normalize(key), obj[key]]));

const rootDir = '/project';
const emptyFS = new TreeFS({rootDir, files: new Map()});
const getFS = (files: FileData) => new TreeFS({rootDir, files});
let mockResponse;
let mockSpawnExit;
let nodeCrawl;
Expand Down Expand Up @@ -154,9 +157,7 @@ describe('node crawler', () => {
].join('\n');

const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {
files: new Map(),
},
previousState: {fileSystem: emptyFS},
extensions: ['js', 'json'],
ignore: pearMatcher,
rootDir,
Expand Down Expand Up @@ -205,7 +206,7 @@ describe('node crawler', () => {
});

const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {files},
previousState: {fileSystem: getFS(files)},
extensions: ['js'],
ignore: pearMatcher,
rootDir,
Expand Down Expand Up @@ -234,7 +235,7 @@ describe('node crawler', () => {
});

const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {files},
previousState: {fileSystem: getFS(files)},
extensions: ['js'],
ignore: pearMatcher,
rootDir,
Expand All @@ -258,9 +259,7 @@ describe('node crawler', () => {
nodeCrawl = require('../node');

const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {
files: new Map(),
},
previousState: {fileSystem: emptyFS},
extensions: ['js'],
ignore: pearMatcher,
rootDir,
Expand Down Expand Up @@ -289,9 +288,7 @@ describe('node crawler', () => {
nodeCrawl = require('../node');

const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {
files: new Map(),
},
previousState: {fileSystem: emptyFS},
extensions: ['js'],
ignore: pearMatcher,
rootDir,
Expand All @@ -311,9 +308,8 @@ describe('node crawler', () => {
childProcess = require('child_process');
nodeCrawl = require('../node');

const files = new Map();
const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {files},
previousState: {fileSystem: emptyFS},
extensions: ['js'],
forceNodeFilesystemAPI: true,
ignore: pearMatcher,
Expand All @@ -334,9 +330,8 @@ describe('node crawler', () => {
it('completes with empty roots', async () => {
nodeCrawl = require('../node');

const files = new Map();
const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {files},
previousState: {fileSystem: emptyFS},
extensions: ['js'],
forceNodeFilesystemAPI: true,
ignore: pearMatcher,
Expand All @@ -351,9 +346,8 @@ describe('node crawler', () => {
it('completes with fs.readdir throwing an error', async () => {
nodeCrawl = require('../node');

const files = new Map();
const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {files},
previousState: {fileSystem: emptyFS},
extensions: ['js'],
forceNodeFilesystemAPI: true,
ignore: pearMatcher,
Expand All @@ -369,9 +363,8 @@ describe('node crawler', () => {
nodeCrawl = require('../node');
const fs = require('graceful-fs');

const files = new Map();
const {changedFiles, removedFiles} = await nodeCrawl({
previousState: {files},
previousState: {fileSystem: emptyFS},
extensions: ['js'],
forceNodeFilesystemAPI: true,
ignore: pearMatcher,
Expand All @@ -398,9 +391,7 @@ describe('node crawler', () => {
await expect(
nodeCrawl({
abortSignal: abortSignalWithReason(err),
previousState: {
files: new Map(),
},
previousState: {fileSystem: emptyFS},
extensions: ['js', 'json'],
ignore: pearMatcher,
rootDir,
Expand Down Expand Up @@ -431,9 +422,7 @@ describe('node crawler', () => {
nodeCrawl({
perfLogger: fakePerfLogger,
abortSignal: abortController.signal,
previousState: {
files: new Map(),
},
previousState: {fileSystem: emptyFS},
extensions: ['js', 'json'],
ignore: pearMatcher,
rootDir,
Expand Down
14 changes: 8 additions & 6 deletions packages/metro-file-map/src/crawlers/__tests__/watchman-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
*/

import {AbortController} from 'node-abort-controller';
import TreeFS from '../../lib/TreeFS';

const path = require('path');

Expand Down Expand Up @@ -45,6 +46,7 @@ let watchman;
let watchmanCrawl;
let mockResponse;
let mockFiles;
const getFS = files => new TreeFS({files, rootDir: ROOT_MOCK});

const ROOT_MOCK = path.sep === '/' ? '/root-mock' : 'M:\\root-mock';
const FRUITS_RELATIVE = 'fruits';
Expand Down Expand Up @@ -129,7 +131,7 @@ describe('watchman watch', () => {
const {changedFiles, clocks, removedFiles} = await watchmanCrawl({
previousState: {
clocks: new Map(),
files: new Map(),
fileSystem: getFS(new Map()),
},
extensions: ['js', 'json'],
ignore: pearMatcher,
Expand Down Expand Up @@ -204,7 +206,7 @@ describe('watchman watch', () => {
clocks: createMap({
'': 'c:fake-clock:1',
}),
files: mockFiles,
fileSystem: getFS(mockFiles),
},
extensions: ['js', 'json'],
ignore: pearMatcher,
Expand Down Expand Up @@ -272,7 +274,7 @@ describe('watchman watch', () => {
clocks: createMap({
'': 'c:fake-clock:1',
}),
files: mockFiles,
fileSystem: getFS(mockFiles),
},
extensions: ['js', 'json'],
ignore: pearMatcher,
Expand Down Expand Up @@ -352,7 +354,7 @@ describe('watchman watch', () => {
[FRUITS_RELATIVE]: 'c:fake-clock:1',
[VEGETABLES_RELATIVE]: 'c:fake-clock:2',
}),
files: mockFiles,
fileSystem: getFS(mockFiles),
},
extensions: ['js', 'json'],
ignore: pearMatcher,
Expand Down Expand Up @@ -407,7 +409,7 @@ describe('watchman watch', () => {
const {changedFiles, clocks, removedFiles} = await watchmanCrawl({
previousState: {
clocks: new Map(),
files: new Map(),
fileSystem: getFS(new Map()),
},
extensions: ['js', 'json'],
ignore: pearMatcher,
Expand Down Expand Up @@ -470,7 +472,7 @@ describe('watchman watch', () => {
computeSha1: true,
previousState: {
clocks: new Map(),
files: new Map(),
fileSystem: getFS(new Map()),
},
extensions: ['js', 'json'],
rootDir: ROOT_MOCK,
Expand Down
21 changes: 2 additions & 19 deletions packages/metro-file-map/src/crawlers/node/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
* @oncall react_native
*/

import type {Path, FileMetaData} from '../../flow-types';
import type {
CanonicalPath,
CrawlerOptions,
Expand All @@ -18,7 +17,6 @@ import type {
} from '../../flow-types';

import hasNativeFindSupport from './hasNativeFindSupport';
import H from '../../constants';
import * as fastPath from '../../lib/fast_path';
import {spawn} from 'child_process';
import * as fs from 'graceful-fs';
Expand Down Expand Up @@ -194,19 +192,7 @@ module.exports = async function nodeCrawl(options: CrawlerOptions): Promise<{

return new Promise((resolve, reject) => {
const callback = (fileData: FileData) => {
const changedFiles = new Map<Path, FileMetaData>();
const removedFiles = new Set(previousState.files.keys());
for (const [normalPath, fileMetaData] of fileData) {
const existingFile = previousState.files.get(normalPath);
removedFiles.delete(normalPath);
if (
existingFile == null ||
existingFile[H.MTIME] !== fileMetaData[H.MTIME]
) {
// See ../constants.js; SHA-1 will always be null and fulfilled later.
changedFiles.set(normalPath, fileMetaData);
}
}
const difference = previousState.fileSystem.getDifference(fileData);

perfLogger?.point('nodeCrawl_end');

Expand All @@ -216,10 +202,7 @@ module.exports = async function nodeCrawl(options: CrawlerOptions): Promise<{
} catch (e) {
reject(e);
}
resolve({
changedFiles,
removedFiles,
});
resolve(difference);
};

if (useNativeFind) {
Expand Down
Loading

0 comments on commit d376529

Please sign in to comment.