Skip to content

Commit

Permalink
Merge branch 'main' into patch-2
Browse files Browse the repository at this point in the history
  • Loading branch information
jwbth authored May 14, 2024
2 parents 47efdbe + c0ac54b commit ab88f4d
Show file tree
Hide file tree
Showing 27 changed files with 752 additions and 661 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/native-wsl.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@ name: Native and WSL

on: [push, pull_request]

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}

jobs:
build:
runs-on: ${{ matrix.os }}
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/node-4+.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@ name: 'Tests: node.js'

on: [pull_request, push]

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}

permissions:
contents: read

Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@ name: 'Tests: packages'

on: [pull_request, push]

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}

permissions:
contents: read

Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ This change log adheres to standards from [Keep a CHANGELOG](https://keepachange
- [Tests] appveyor -> GHA (run tests on Windows in both pwsh and WSL + Ubuntu) ([#2987], thanks [@joeyguerra])
- [actions] migrate OSX tests to GHA ([ljharb#37], thanks [@aks-])
- [Refactor] `exportMapBuilder`: avoid hoisting ([#2989], thanks [@soryy708])
- [Refactor] `ExportMap`: extract "builder" logic to separate files ([#2991], thanks [@soryy708])

## [2.29.1] - 2023-12-14

Expand Down Expand Up @@ -1114,6 +1115,7 @@ for info on changes for earlier releases.

[`memo-parser`]: ./memo-parser/README.md

[#2991]: https://github.com/import-js/eslint-plugin-import/pull/2991
[#2989]: https://github.com/import-js/eslint-plugin-import/pull/2989
[#2987]: https://github.com/import-js/eslint-plugin-import/pull/2987
[#2985]: https://github.com/import-js/eslint-plugin-import/pull/2985
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
"eslint-plugin-eslint-plugin": "^2.3.0",
"eslint-plugin-import": "2.x",
"eslint-plugin-json": "^2.1.2",
"find-babel-config": "=1.2.0",
"fs-copy-file-sync": "^1.1.1",
"glob": "^7.2.3",
"in-publish": "^2.0.1",
Expand Down
206 changes: 206 additions & 0 deletions src/exportMap/builder.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,206 @@
import fs from 'fs';

import doctrine from 'doctrine';

import debug from 'debug';

import parse from 'eslint-module-utils/parse';
import visit from 'eslint-module-utils/visit';
import resolve from 'eslint-module-utils/resolve';
import isIgnored, { hasValidExtension } from 'eslint-module-utils/ignore';

import { hashObject } from 'eslint-module-utils/hash';
import * as unambiguous from 'eslint-module-utils/unambiguous';

import ExportMap from '.';
import childContext from './childContext';
import { isEsModuleInterop } from './typescript';
import { RemotePath } from './remotePath';
import ImportExportVisitorBuilder from './visitor';

const log = debug('eslint-plugin-import:ExportMap');

const exportCache = new Map();

/**
* The creation of this closure is isolated from other scopes
* to avoid over-retention of unrelated variables, which has
* caused memory leaks. See #1266.
*/
function thunkFor(p, context) {
// eslint-disable-next-line no-use-before-define
return () => ExportMapBuilder.for(childContext(p, context));
}

export default class ExportMapBuilder {
static get(source, context) {
const path = resolve(source, context);
if (path == null) { return null; }

return ExportMapBuilder.for(childContext(path, context));
}

static for(context) {
const { path } = context;

const cacheKey = context.cacheKey || hashObject(context).digest('hex');
let exportMap = exportCache.get(cacheKey);

// return cached ignore
if (exportMap === null) { return null; }

const stats = fs.statSync(path);
if (exportMap != null) {
// date equality check
if (exportMap.mtime - stats.mtime === 0) {
return exportMap;
}
// future: check content equality?
}

// check valid extensions first
if (!hasValidExtension(path, context)) {
exportCache.set(cacheKey, null);
return null;
}

// check for and cache ignore
if (isIgnored(path, context)) {
log('ignored path due to ignore settings:', path);
exportCache.set(cacheKey, null);
return null;
}

const content = fs.readFileSync(path, { encoding: 'utf8' });

// check for and cache unambiguous modules
if (!unambiguous.test(content)) {
log('ignored path due to unambiguous regex:', path);
exportCache.set(cacheKey, null);
return null;
}

log('cache miss', cacheKey, 'for path', path);
exportMap = ExportMapBuilder.parse(path, content, context);

// ambiguous modules return null
if (exportMap == null) {
log('ignored path due to ambiguous parse:', path);
exportCache.set(cacheKey, null);
return null;
}

exportMap.mtime = stats.mtime;

exportCache.set(cacheKey, exportMap);
return exportMap;
}

static parse(path, content, context) {
const exportMap = new ExportMap(path);
const isEsModuleInteropTrue = isEsModuleInterop(context);

let ast;
let visitorKeys;
try {
const result = parse(path, content, context);
ast = result.ast;
visitorKeys = result.visitorKeys;
} catch (err) {
exportMap.errors.push(err);
return exportMap; // can't continue
}

exportMap.visitorKeys = visitorKeys;

let hasDynamicImports = false;

const remotePathResolver = new RemotePath(path, context);

function processDynamicImport(source) {
hasDynamicImports = true;
if (source.type !== 'Literal') {
return null;
}
const p = remotePathResolver.resolve(source.value);
if (p == null) {
return null;
}
const importedSpecifiers = new Set();
importedSpecifiers.add('ImportNamespaceSpecifier');
const getter = thunkFor(p, context);
exportMap.imports.set(p, {
getter,
declarations: new Set([{
source: {
// capturing actual node reference holds full AST in memory!
value: source.value,
loc: source.loc,
},
importedSpecifiers,
dynamic: true,
}]),
});
}

visit(ast, visitorKeys, {
ImportExpression(node) {
processDynamicImport(node.source);
},
CallExpression(node) {
if (node.callee.type === 'Import') {
processDynamicImport(node.arguments[0]);
}
},
});

const unambiguouslyESM = unambiguous.isModule(ast);
if (!unambiguouslyESM && !hasDynamicImports) { return null; }

// attempt to collect module doc
if (ast.comments) {
ast.comments.some((c) => {
if (c.type !== 'Block') { return false; }
try {
const doc = doctrine.parse(c.value, { unwrap: true });
if (doc.tags.some((t) => t.title === 'module')) {
exportMap.doc = doc;
return true;
}
} catch (err) { /* ignore */ }
return false;
});
}

const visitorBuilder = new ImportExportVisitorBuilder(
path,
context,
exportMap,
ExportMapBuilder,
content,
ast,
isEsModuleInteropTrue,
thunkFor,
);
ast.body.forEach(function (astNode) {
const visitor = visitorBuilder.build(astNode);

if (visitor[astNode.type]) {
visitor[astNode.type].call(visitorBuilder);
}
});

if (
isEsModuleInteropTrue // esModuleInterop is on in tsconfig
&& exportMap.namespace.size > 0 // anything is exported
&& !exportMap.namespace.has('default') // and default isn't added already
) {
exportMap.namespace.set('default', {}); // add default export
}

if (unambiguouslyESM) {
exportMap.parseGoal = 'Module';
}
return exportMap;
}
}
60 changes: 60 additions & 0 deletions src/exportMap/captureDependency.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
export function captureDependency(
{ source },
isOnlyImportingTypes,
remotePathResolver,
exportMap,
context,
thunkFor,
importedSpecifiers = new Set(),
) {
if (source == null) { return null; }

const p = remotePathResolver.resolve(source.value);
if (p == null) { return null; }

const declarationMetadata = {
// capturing actual node reference holds full AST in memory!
source: { value: source.value, loc: source.loc },
isOnlyImportingTypes,
importedSpecifiers,
};

const existing = exportMap.imports.get(p);
if (existing != null) {
existing.declarations.add(declarationMetadata);
return existing.getter;
}

const getter = thunkFor(p, context);
exportMap.imports.set(p, { getter, declarations: new Set([declarationMetadata]) });
return getter;
}

const supportedImportTypes = new Set(['ImportDefaultSpecifier', 'ImportNamespaceSpecifier']);

export function captureDependencyWithSpecifiers(
n,
remotePathResolver,
exportMap,
context,
thunkFor,
) {
// import type { Foo } (TS and Flow); import typeof { Foo } (Flow)
const declarationIsType = n.importKind === 'type' || n.importKind === 'typeof';
// import './foo' or import {} from './foo' (both 0 specifiers) is a side effect and
// shouldn't be considered to be just importing types
let specifiersOnlyImportingTypes = n.specifiers.length > 0;
const importedSpecifiers = new Set();
n.specifiers.forEach((specifier) => {
if (specifier.type === 'ImportSpecifier') {
importedSpecifiers.add(specifier.imported.name || specifier.imported.value);
} else if (supportedImportTypes.has(specifier.type)) {
importedSpecifiers.add(specifier.type);
}

// import { type Foo } (Flow); import { typeof Foo } (Flow)
specifiersOnlyImportingTypes = specifiersOnlyImportingTypes
&& (specifier.importKind === 'type' || specifier.importKind === 'typeof');
});
captureDependency(n, declarationIsType || specifiersOnlyImportingTypes, remotePathResolver, exportMap, context, thunkFor, importedSpecifiers);
}
32 changes: 32 additions & 0 deletions src/exportMap/childContext.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { hashObject } from 'eslint-module-utils/hash';

let parserOptionsHash = '';
let prevParserOptions = '';
let settingsHash = '';
let prevSettings = '';

/**
* don't hold full context object in memory, just grab what we need.
* also calculate a cacheKey, where parts of the cacheKey hash are memoized
*/
export default function childContext(path, context) {
const { settings, parserOptions, parserPath } = context;

if (JSON.stringify(settings) !== prevSettings) {
settingsHash = hashObject({ settings }).digest('hex');
prevSettings = JSON.stringify(settings);
}

if (JSON.stringify(parserOptions) !== prevParserOptions) {
parserOptionsHash = hashObject({ parserOptions }).digest('hex');
prevParserOptions = JSON.stringify(parserOptions);
}

return {
cacheKey: String(parserPath) + parserOptionsHash + settingsHash + String(path),
settings,
parserOptions,
parserPath,
path,
};
}
Loading

0 comments on commit ab88f4d

Please sign in to comment.