diff --git a/cmd/runtime_options.go b/cmd/runtime_options.go index d63766615ff9..61e67cfdf0c6 100644 --- a/cmd/runtime_options.go +++ b/cmd/runtime_options.go @@ -23,10 +23,11 @@ func runtimeOptionFlagSet(includeSysEnv bool) *pflag.FlagSet { flags.SortFlags = false flags.Bool("include-system-env-vars", includeSysEnv, "pass the real system environment variables to the runtime") flags.String("compatibility-mode", "extended", - `JavaScript compiler compatibility mode, "extended" or "base" + `JavaScript compiler compatibility mode, "extended" or "base" or "experimental_enhanced" base: pure goja - Golang JS VM supporting ES5.1+ extended: base + Babel with parts of ES2015 preset slower to compile in case the script uses syntax unsupported by base +experimental_enhanced: esbuild-based transpiling for TypeScript and ES6+ support `) flags.StringP("type", "t", "", "override test type, \"js\" or \"archive\"") flags.StringArrayP("env", "e", nil, "add/override environment variable with `VAR=value`") diff --git a/cmd/runtime_options_test.go b/cmd/runtime_options_test.go index 66faab4a22cb..7ca20d5d9408 100644 --- a/cmd/runtime_options_test.go +++ b/cmd/runtime_options_test.go @@ -109,6 +109,7 @@ func TestRuntimeOptions(t *testing.T) { defaultCompatMode = null.NewString("extended", false) baseCompatMode = null.NewString("base", true) extendedCompatMode = null.NewString("extended", true) + enhancedCompatMode = null.NewString("experimental_enhanced", true) defaultTracesOutput = null.NewString("none", false) ) @@ -143,6 +144,16 @@ func TestRuntimeOptions(t *testing.T) { TracesOutput: defaultTracesOutput, }, }, + "disabled sys env by default with experimental_enhanced compat mode": { + useSysEnv: false, + systemEnv: map[string]string{"test1": "val1", "K6_COMPATIBILITY_MODE": "experimental_enhanced"}, + expRTOpts: lib.RuntimeOptions{ + IncludeSystemEnvVars: null.NewBool(false, false), + CompatibilityMode: enhancedCompatMode, + Env: map[string]string{}, + TracesOutput: defaultTracesOutput, + }, + }, "disabled sys env by cli 1": { useSysEnv: true, systemEnv: map[string]string{"test1": "val1", "K6_COMPATIBILITY_MODE": "base"}, diff --git a/examples/enhanced/abort.ts b/examples/enhanced/abort.ts new file mode 100644 index 000000000000..e391e52c0544 --- /dev/null +++ b/examples/enhanced/abort.ts @@ -0,0 +1,5 @@ +import exec from "k6/execution"; + +export default function () { + exec.test.abort("failed"); +} diff --git a/examples/enhanced/script.ts b/examples/enhanced/script.ts new file mode 100644 index 000000000000..520a07171666 --- /dev/null +++ b/examples/enhanced/script.ts @@ -0,0 +1,6 @@ +import { User, newUser } from "./user.ts"; + +export default () => { + const user: User = newUser("John"); + console.log(user); +}; diff --git a/examples/enhanced/user.ts b/examples/enhanced/user.ts new file mode 100644 index 000000000000..0e539be259f4 --- /dev/null +++ b/examples/enhanced/user.ts @@ -0,0 +1,20 @@ +interface User { + name: string; + id: number; +} + +class UserAccount implements User { + name: string; + id: number; + + constructor(name: string) { + this.name = name; + this.id = Math.floor(Math.random() * Number.MAX_SAFE_INTEGER); + } +} + +function newUser(name: string): User { + return new UserAccount(name); +} + +export { User, newUser }; diff --git a/go.mod b/go.mod index a892da67faa4..2f6bc07f28c0 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/Soontao/goHttpDigestClient v0.0.0-20170320082612-6d28bb1415c5 github.com/andybalholm/brotli v1.1.0 github.com/dop251/goja v0.0.0-20240220182346-e401ed450204 + github.com/evanw/esbuild v0.21.2 github.com/fatih/color v1.16.0 github.com/go-sourcemap/sourcemap v2.1.4+incompatible github.com/golang/protobuf v1.5.4 diff --git a/go.sum b/go.sum index 1f1b8a2a0ac1..263a203a68fe 100644 --- a/go.sum +++ b/go.sum @@ -57,6 +57,8 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/evanw/esbuild v0.21.2 h1:CLplcGi794CfHLVmUbvVfTMKkykm+nyIHU8SU60KUTA= +github.com/evanw/esbuild v0.21.2/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= diff --git a/js/bundle_test.go b/js/bundle_test.go index 670a63027e3f..6bdc191443af 100644 --- a/js/bundle_test.go +++ b/js/bundle_test.go @@ -159,7 +159,7 @@ func TestNewBundle(t *testing.T) { }{ { "InvalidCompat", "es1", `export default function() {};`, - `invalid compatibility mode "es1". Use: "extended", "base"`, + `invalid compatibility mode "es1". Use: "extended", "base", "experimental_enhanced"`, }, // ES2015 modules are not supported { diff --git a/js/compiler/compiler.go b/js/compiler/compiler.go index 564dbe34f723..be336fff57e4 100644 --- a/js/compiler/compiler.go +++ b/js/compiler/compiler.go @@ -239,19 +239,33 @@ func (c *Compiler) compileImpl( c.logger.WithError(state.srcMapError).Warnf("Couldn't load source map for %s", filename) ast, err = parser.ParseFile(nil, filename, code, 0, parser.WithDisableSourceMaps) } - if err != nil { - if compatibilityMode == lib.CompatibilityModeExtended { - code, state.srcMap, err = c.Transform(src, filename, state.srcMap) - if err != nil { - return nil, code, err - } - // the compatibility mode "decreases" here as we shouldn't transform twice - return c.compileImpl(code, filename, wrap, lib.CompatibilityModeBase, state.srcMap) + + if err == nil { + pgm, err := goja.CompileAST(ast, c.Options.Strict) + return pgm, code, err + } + + if compatibilityMode == lib.CompatibilityModeExtended { + code, state.srcMap, err = c.Transform(src, filename, state.srcMap) + if err != nil { + return nil, code, err + } + // the compatibility mode "decreases" here as we shouldn't transform twice + return c.compileImpl(code, filename, wrap, lib.CompatibilityModeBase, state.srcMap) + } + + if compatibilityMode == lib.CompatibilityModeExperimentalEnhanced { + code, state.srcMap, err = esbuildTransform(src, filename) + if err != nil { + return nil, code, err + } + if c.Options.SourceMapLoader != nil { + // This hack is required for the source map to work + code += "\n//# sourceMappingURL=" + sourceMapURLFromBabel } - return nil, code, err + return c.compileImpl(code, filename, wrap, lib.CompatibilityModeBase, state.srcMap) } - pgm, err := goja.CompileAST(ast, c.Options.Strict) - return pgm, code, err + return nil, code, err } type babel struct { diff --git a/js/compiler/enhanced.go b/js/compiler/enhanced.go new file mode 100644 index 000000000000..228686a44243 --- /dev/null +++ b/js/compiler/enhanced.go @@ -0,0 +1,55 @@ +package compiler + +import ( + "path/filepath" + + "github.com/dop251/goja/file" + "github.com/dop251/goja/parser" + "github.com/evanw/esbuild/pkg/api" +) + +func esbuildTransform(src, filename string) (code string, srcMap []byte, err error) { + opts := api.TransformOptions{ + Sourcefile: filename, + Loader: api.LoaderJS, + Target: api.ESNext, + Format: api.FormatCommonJS, + Sourcemap: api.SourceMapExternal, + SourcesContent: api.SourcesContentInclude, + LegalComments: api.LegalCommentsNone, + Platform: api.PlatformNeutral, + LogLevel: api.LogLevelSilent, + Charset: api.CharsetUTF8, + } + + if filepath.Ext(filename) == ".ts" { + opts.Loader = api.LoaderTS + } + + result := api.Transform(src, opts) + + if hasError, err := esbuildCheckError(&result); hasError { + return "", nil, err + } + + return string(result.Code), result.Map, nil +} + +func esbuildCheckError(result *api.TransformResult) (bool, error) { + if len(result.Errors) == 0 { + return false, nil + } + + msg := result.Errors[0] + err := &parser.Error{Message: msg.Text} + + if msg.Location != nil { + err.Position = file.Position{ + Filename: msg.Location.File, + Line: msg.Location.Line, + Column: msg.Location.Column, + } + } + + return true, err +} diff --git a/js/compiler/enhanced_test.go b/js/compiler/enhanced_test.go new file mode 100644 index 000000000000..c272861977d0 --- /dev/null +++ b/js/compiler/enhanced_test.go @@ -0,0 +1,97 @@ +package compiler + +import ( + "errors" + "testing" + + "github.com/dop251/goja" + "github.com/dop251/goja/parser" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.k6.io/k6/lib" + "go.k6.io/k6/lib/testutils" +) + +func Test_esbuildTransform_js(t *testing.T) { + t.Parallel() + + code, srcMap, err := esbuildTransform(`export default function(name) { return "Hello, " + name }`, "script.js") + + require.NoError(t, err) + require.NotNil(t, srcMap) + require.NotEmpty(t, code) +} + +func Test_esbuildTransform_ts(t *testing.T) { + t.Parallel() + + script := `export function hello(name:string) : string { return "Hello, " + name}` + + code, srcMap, err := esbuildTransform(script, "script.ts") + + require.NoError(t, err) + require.NotNil(t, srcMap) + require.NotEmpty(t, code) +} + +func Test_esbuildTransform_error(t *testing.T) { + t.Parallel() + + script := `export function hello(name:string) : string { return "Hello, " + name}` + + _, _, err := esbuildTransform(script, "script.js") + + require.Error(t, err) + + var perr *parser.Error + + require.True(t, errors.As(err, &perr)) + require.NotNil(t, perr.Position) + require.Equal(t, "script.js", perr.Position.Filename) + require.Equal(t, 1, perr.Position.Line) + require.Equal(t, 26, perr.Position.Column) + require.Equal(t, "Expected \")\" but found \":\"", perr.Message) +} + +func TestCompile_experimental_enhanced(t *testing.T) { + t.Parallel() + + t.Run("experimental_enhanced Invalid", func(t *testing.T) { + t.Parallel() + c := New(testutils.NewLogger(t)) + src := `1+(function() { return 2; )()` + c.Options.CompatibilityMode = lib.CompatibilityModeExperimentalEnhanced + _, _, err := c.Compile(src, "script.js", false) + assert.IsType(t, &parser.Error{}, err) + assert.Contains(t, err.Error(), `script.js: Line 1:26 Unexpected ")"`) + }) + t.Run("experimental_enhanced", func(t *testing.T) { + t.Parallel() + c := New(testutils.NewLogger(t)) + c.Options.CompatibilityMode = lib.CompatibilityModeExperimentalEnhanced + pgm, code, err := c.Compile(`import "something"`, "script.js", true) + require.NoError(t, err) + assert.Equal(t, `var import_something = require("something"); +`, code) + rt := goja.New() + var requireCalled bool + require.NoError(t, rt.Set("require", func(s string) { + assert.Equal(t, "something", s) + requireCalled = true + })) + _, err = rt.RunProgram(pgm) + require.NoError(t, err) + require.True(t, requireCalled) + }) + t.Run("experimental_enhanced sourcemap", func(t *testing.T) { + t.Parallel() + c := New(testutils.NewLogger(t)) + c.Options.CompatibilityMode = lib.CompatibilityModeExperimentalEnhanced + c.Options.SourceMapLoader = func(_ string) ([]byte, error) { return nil, nil } + _, code, err := c.Compile(`import "something"`, "script.js", true) + require.NoError(t, err) + assert.Equal(t, `var import_something = require("something"); + +//# sourceMappingURL=k6://internal-should-not-leak/file.map`, code) + }) +} diff --git a/js/tc39/README.md b/js/tc39/README.md index a8f0d30d3dd6..cc1dbc21979c 100644 --- a/js/tc39/README.md +++ b/js/tc39/README.md @@ -1,19 +1,17 @@ # Introduction to a k6's TC39 testing -The point of this module is to test k6 goja+babel combo against the tc39 test suite. +The point of this module is to test k6 goja+babel and k6 goja+esbuild combo against the tc39 test suite. Ways to use it: 1. run ./checkout.sh to checkout the last commit sha of [test262](https://github.com/tc39/test262) that was tested with this module 2. Run `go test &> out.log` -If there are failures there will be a JSON with what failed. -The full list of failing tests, and the error, is in `breaking_test_errors.json`. All errors list there with the corresponding error will *not* be counted as errors - this is what the test expects, those specific errors. +The full list of failing tests, and the error, is in `breaking_test_errors-*.json`. All errors list there with the corresponding error will *not* be counted as errors - this is what the test expects, those specific errors. Due to changes to goja it is not uncommon for the error to change, or there to be now a new error on previously passing test, or (hopefully) a test that was not passing but now is. -In all of those cases `breaking_test_errors.json` needs to be updated. Currently, the output is the *difference* in errors, so we need to "null" the file. To that we set it to an empty JSON `echo '{}' > breaking_test_errors.json`. -Run the test with output to a file `go test &> breaking_test_errors.json`. And then edit `out.log` so only the JSON is left. I personally search for `FAIL` and that should be the first thing just *after* the JSON, delete till the end of file. This is easiest done with sed(or vim) as in `sed -i '/FAIL/,$d' breaking_test_errors.json`. +In all of those cases `breaking_test_errors-*.json` needs to be updated. Run the test with `-update` flag to update: `go test -update` -NOTE: some text editors/IDEs will try to parse files ending in `json` as JSON, which given the size of `breaking_test_errors.json` might be a problem when it's not actually a JSON (before the edit). So it might be a better idea to name it something different if editing by hand and fix it later. +NOTE: some text editors/IDEs will try to parse files ending in `json` as JSON, which given the size of `breaking_test_errors-*.json` might be a problem when it's not actually a JSON (before the edit). So it might be a better idea to name it something different if editing by hand and fix it later. This is a modified version of [the code in the goja repo](https://github.com/dop251/goja/blob/master/tc39_test.go) diff --git a/js/tc39/breaking_test_errors-experimental_enhanced.json b/js/tc39/breaking_test_errors-experimental_enhanced.json new file mode 100644 index 000000000000..7cab7aff1174 --- /dev/null +++ b/js/tc39/breaking_test_errors-experimental_enhanced.json @@ -0,0 +1,162 @@ +{ + "test/annexB/built-ins/Function/createdynfn-html-close-comment-body.js-strict:true": "test/annexB/built-ins/Function/createdynfn-html-close-comment-body.js: SyntaxError: SyntaxError: : Line 4:3 Unexpected token > (and 3 more errors) ", + "test/annexB/built-ins/Function/createdynfn-html-close-comment-params.js-strict:true": "test/annexB/built-ins/Function/createdynfn-html-close-comment-params.js: SyntaxError: SyntaxError: : Line 2:1 Unexpected token -- (and 5 more errors) ", + "test/annexB/built-ins/Function/createdynfn-html-open-comment-body.js-strict:true": "test/annexB/built-ins/Function/createdynfn-html-open-comment-body.js: SyntaxError: SyntaxError: : Line 3:1 Unexpected token < (and 2 more errors) ", + "test/annexB/built-ins/Function/createdynfn-html-open-comment-params.js-strict:true": "test/annexB/built-ins/Function/createdynfn-html-open-comment-params.js: SyntaxError: SyntaxError: : Line 1:21 Unexpected token < (and 5 more errors) ", + "test/annexB/built-ins/Function/createdynfn-no-line-terminator-html-close-comment-body.js-strict:true": "test/annexB/built-ins/Function/createdynfn-no-line-terminator-html-close-comment-body.js: SyntaxError: SyntaxError: : Line 3:3 Unexpected token > (and 3 more errors) ", + "test/annexB/built-ins/RegExp/RegExp-control-escape-russian-letter.js-strict:true": "test/annexB/built-ins/RegExp/RegExp-control-escape-russian-letter.js: Test262Error: invalid \\c escape matched c rather than \\c when followed by А Expected SameValue(«cА», «null») to be true ", + "test/annexB/built-ins/RegExp/RegExp-invalid-control-escape-character-class.js-strict:true": "test/annexB/built-ins/RegExp/RegExp-invalid-control-escape-character-class.js: Test262Error: Character \\ missing from character class [\\c\u0000] ", + "test/annexB/built-ins/RegExp/RegExp-leading-escape-BMP.js-strict:true": "test/annexB/built-ins/RegExp/RegExp-leading-escape-BMP.js: Test262Error: Code unit: d800 Expected SameValue(«\\\\\\ud800», «\\�») to be true ", + "test/annexB/built-ins/RegExp/RegExp-trailing-escape-BMP.js-strict:true": "test/annexB/built-ins/RegExp/RegExp-trailing-escape-BMP.js: Test262Error: Code unit: d800 Expected SameValue(«a\\\\\\ud800», «a\\�») to be true ", + "test/annexB/built-ins/RegExp/prototype/compile/pattern-string-invalid-u.js-strict:true": "test/annexB/built-ins/RegExp/prototype/compile/pattern-string-invalid-u.js: Test262Error: invalid pattern: { Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/AsyncArrowFunction/is-a-constructor.js-strict:true": "test/built-ins/AsyncArrowFunction/is-a-constructor.js: SyntaxError: Async generators are not supported yet ", + "test/built-ins/AsyncFunction/AsyncFunction-is-subclass.js-strict:true": "test/built-ins/AsyncFunction/AsyncFunction-is-subclass.js: Test262Error: Prototype of constructor is Function Expected SameValue(«function () { [native code] }», «function Function() { [native code] }») to be true ", + "test/built-ins/AsyncFunction/is-a-constructor.js-strict:true": "test/built-ins/AsyncFunction/is-a-constructor.js: SyntaxError: Async generators are not supported yet ", + "test/built-ins/Date/UTC/fp-evaluation-order.js-strict:true": "test/built-ins/Date/UTC/fp-evaluation-order.js: Test262Error: order of operations / precision in MakeTime Expected SameValue(«29256», «29312») to be true ", + "test/built-ins/Function/internals/Construct/base-ctor-revoked-proxy.js-strict:true": "test/built-ins/Function/internals/Construct/base-ctor-revoked-proxy.js: Test262Error: Expected a TypeError to be thrown but no exception was thrown at all ", + "test/built-ins/GeneratorFunction/is-a-constructor.js-strict:true": "test/built-ins/GeneratorFunction/is-a-constructor.js: SyntaxError: Async generators are not supported yet ", + "test/built-ins/Object/seal/seal-bigint64array.js-strict:true": "test/built-ins/Object/seal/seal-bigint64array.js: ReferenceError: BigInt64Array is not defined ", + "test/built-ins/Object/seal/seal-biguint64array.js-strict:true": "test/built-ins/Object/seal/seal-biguint64array.js: ReferenceError: BigUint64Array is not defined ", + "test/built-ins/RegExp/quantifier-integer-limit.js-strict:true": "test/built-ins/RegExp/quantifier-integer-limit.js: SyntaxError: Invalid regular expression (re2): b{9007199254740991} (error parsing regexp: invalid repeat count: `{9007199254740991}`) ", + "test/built-ins/RegExp/unicode_restricted_brackets.js-strict:true": "test/built-ins/RegExp/unicode_restricted_brackets.js: Test262Error: RegExp(\"]\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_character_class_escape.js-strict:true": "test/built-ins/RegExp/unicode_restricted_character_class_escape.js: Test262Error: RegExp(\"[\\d-a]\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_identity_escape.js-strict:true": "test/built-ins/RegExp/unicode_restricted_identity_escape.js: Test262Error: Invalid IdentityEscape in AtomEscape: '\\\u0000' Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_identity_escape_alpha.js-strict:true": "test/built-ins/RegExp/unicode_restricted_identity_escape_alpha.js: Test262Error: IdentityEscape in AtomEscape: 'A' Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_identity_escape_c.js-strict:true": "test/built-ins/RegExp/unicode_restricted_identity_escape_c.js: Test262Error: Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_identity_escape_u.js-strict:true": "test/built-ins/RegExp/unicode_restricted_identity_escape_u.js: Test262Error: RegExp(\"\\u\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_identity_escape_x.js-strict:true": "test/built-ins/RegExp/unicode_restricted_identity_escape_x.js: Test262Error: RegExp(\"\\x\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_incomplete_quantifier.js-strict:true": "test/built-ins/RegExp/unicode_restricted_incomplete_quantifier.js: Test262Error: RegExp(\"a{\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_octal_escape.js-strict:true": "test/built-ins/RegExp/unicode_restricted_octal_escape.js: Test262Error: RegExp(\"\\1\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/RegExp/unicode_restricted_quantifiable_assertion.js-strict:true": "test/built-ins/RegExp/unicode_restricted_quantifiable_assertion.js: Test262Error: RegExp(\"(?=.)*\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", + "test/built-ins/TypedArrayConstructors/BigUint64Array/is-a-constructor.js-strict:true": "test/built-ins/TypedArrayConstructors/BigUint64Array/is-a-constructor.js: ReferenceError: BigUint64Array is not defined ", + "test/language/comments/hashbang/function-constructor.js-strict:true": "test/language/comments/hashbang/function-constructor.js: SyntaxError: Async generators are not supported yet ", + "test/language/comments/hashbang/module.js-strict:false": "test/language/comments/hashbang/module.js: file://TestTC39/test262/test/language/comments/hashbang/module.js: Line 1:28 Unexpected token ILLEGAL (and 2 more errors)", + "test/language/destructuring/binding/syntax/destructuring-array-parameters-function-arguments-length.js-strict:true": "test/language/destructuring/binding/syntax/destructuring-array-parameters-function-arguments-length.js: SyntaxError: Async generators are not supported yet ", + "test/language/destructuring/binding/syntax/destructuring-object-parameters-function-arguments-length.js-strict:true": "test/language/destructuring/binding/syntax/destructuring-object-parameters-function-arguments-length.js: SyntaxError: Async generators are not supported yet ", + "test/language/expressions/assignment/fn-name-lhs-cover.js-strict:true": "test/language/expressions/assignment/fn-name-lhs-cover.js: Test262Error: descriptor value should be ", + "test/language/expressions/class/class-name-ident-await-escaped-module.js-strict:true": "test/language/expressions/class/class-name-ident-await-escaped-module.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/expressions/class/class-name-ident-await-module.js-strict:true": "test/language/expressions/class/class-name-ident-await-module.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/expressions/class/elements/class-name-static-initializer-default-export.js-strict:true": "test/language/expressions/class/elements/class-name-static-initializer-default-export.js: Test262Error: Expected SameValue(«class_name_static_initializer_default_export_default», «default») to be true ", + "test/language/expressions/class/elements/private-getter-is-not-a-own-property.js-strict:true": "test/language/expressions/class/elements/private-getter-is-not-a-own-property.js: TypeError: Object has no member '__lookupGetter__' ", + "test/language/expressions/class/elements/private-setter-is-not-a-own-property.js-strict:true": "test/language/expressions/class/elements/private-setter-is-not-a-own-property.js: TypeError: Object has no member '__lookupSetter__' ", + "test/language/expressions/optional-chaining/iteration-statement-for-await-of.js-strict:true": "test/language/expressions/optional-chaining/iteration-statement-for-await-of.js: test/language/expressions/optional-chaining/iteration-statement-for-await-of.js: Line 19:7 Unexpected token await (and 9 more errors)", + "test/language/expressions/optional-chaining/member-expression.js-strict:true": "test/language/expressions/optional-chaining/member-expression.js: SyntaxError: Async generators are not supported yet ", + "test/language/literals/regexp/S7.8.5_A1.1_T2.js-strict:true": "test/language/literals/regexp/S7.8.5_A1.1_T2.js: Test262Error: Code unit: d800 Expected SameValue(«\\ud800», «�») to be true ", + "test/language/literals/regexp/S7.8.5_A1.4_T2.js-strict:true": "test/language/literals/regexp/S7.8.5_A1.4_T2.js: Test262Error: Code unit: d800 Expected SameValue(«\\\\\\ud800», «\\�») to be true ", + "test/language/literals/regexp/S7.8.5_A2.1_T2.js-strict:true": "test/language/literals/regexp/S7.8.5_A2.1_T2.js: Test262Error: Code unit: d800 Expected SameValue(«nnnn\\ud800», «nnnn�») to be true ", + "test/language/literals/regexp/S7.8.5_A2.4_T2.js-strict:true": "test/language/literals/regexp/S7.8.5_A2.4_T2.js: Test262Error: Code unit: d800 Expected SameValue(«a\\\\\\ud800», «a\\�») to be true ", + "test/language/literals/regexp/invalid-optional-lookbehind.js-strict:true": "test/language/literals/regexp/invalid-optional-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/invalid-optional-negative-lookbehind.js-strict:true": "test/language/literals/regexp/invalid-optional-negative-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/invalid-range-lookbehind.js-strict:true": "test/language/literals/regexp/invalid-range-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/invalid-range-negative-lookbehind.js-strict:true": "test/language/literals/regexp/invalid-range-negative-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-case-mapping.js-strict:true": "test/language/literals/regexp/u-case-mapping.js: Test262Error: Case mapping is not applied in the absence of the `u` flag Expected SameValue(«true», «false») to be true ", + "test/language/literals/regexp/u-invalid-class-escape.js-strict:true": "test/language/literals/regexp/u-invalid-class-escape.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-extended-pattern-char.js-strict:true": "test/language/literals/regexp/u-invalid-extended-pattern-char.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-identity-escape.js-strict:true": "test/language/literals/regexp/u-invalid-identity-escape.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-legacy-octal-escape.js-strict:true": "test/language/literals/regexp/u-invalid-legacy-octal-escape.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-non-empty-class-ranges-no-dash-a.js-strict:true": "test/language/literals/regexp/u-invalid-non-empty-class-ranges-no-dash-a.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-oob-decimal-escape.js-strict:true": "test/language/literals/regexp/u-invalid-oob-decimal-escape.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-optional-lookahead.js-strict:true": "test/language/literals/regexp/u-invalid-optional-lookahead.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-optional-lookbehind.js-strict:true": "test/language/literals/regexp/u-invalid-optional-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-optional-negative-lookahead.js-strict:true": "test/language/literals/regexp/u-invalid-optional-negative-lookahead.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-optional-negative-lookbehind.js-strict:true": "test/language/literals/regexp/u-invalid-optional-negative-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-range-lookahead.js-strict:true": "test/language/literals/regexp/u-invalid-range-lookahead.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-range-lookbehind.js-strict:true": "test/language/literals/regexp/u-invalid-range-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-range-negative-lookahead.js-strict:true": "test/language/literals/regexp/u-invalid-range-negative-lookahead.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-invalid-range-negative-lookbehind.js-strict:true": "test/language/literals/regexp/u-invalid-range-negative-lookbehind.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/regexp/u-unicode-esc-non-hex.js-strict:true": "test/language/literals/regexp/u-unicode-esc-non-hex.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/string/S7.8.4_A4.3_T1.js-strict:true": "test/language/literals/string/S7.8.4_A4.3_T1.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/literals/string/S7.8.4_A4.3_T2.js-strict:true": "test/language/literals/string/S7.8.4_A4.3_T2.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/module-code/comment-multi-line-html-close.js-strict:true": "test/language/module-code/comment-multi-line-html-close.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/module-code/comment-single-line-html-close.js-strict:true": "test/language/module-code/comment-single-line-html-close.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/module-code/comment-single-line-html-open.js-strict:true": "test/language/module-code/comment-single-line-html-open.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/module-code/early-dup-top-function.js-strict:true": "test/language/module-code/early-dup-top-function.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/module-code/early-new-target.js-strict:true": "test/language/module-code/early-new-target.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/module-code/eval-export-dflt-cls-anon.js-strict:true": "test/language/module-code/eval-export-dflt-cls-anon.js: Test262Error: correct name is assigned Expected SameValue(«eval_export_dflt_cls_anon_default», «default») to be true ", + "test/language/module-code/eval-export-dflt-expr-cls-anon.js-strict:true": "test/language/module-code/eval-export-dflt-expr-cls-anon.js: Test262Error: correct name is assigned Expected SameValue(«eval_export_dflt_expr_cls_anon_default», «default») to be true ", + "test/language/module-code/eval-export-dflt-expr-fn-anon.js-strict:true": "test/language/module-code/eval-export-dflt-expr-fn-anon.js: Test262Error: correct name is assigned Expected SameValue(«eval_export_dflt_expr_fn_anon_default», «default») to be true ", + "test/language/module-code/eval-export-dflt-expr-gen-anon.js-strict:true": "test/language/module-code/eval-export-dflt-expr-gen-anon.js: Test262Error: correct name is assigned Expected SameValue(«eval_export_dflt_expr_gen_anon_default», «default») to be true ", + "test/language/module-code/eval-self-once.js-strict:true": "test/language/module-code/eval-self-once.js: Test262Error: global property initially unset Expected SameValue(«262», «undefined») to be true ", + "test/language/module-code/eval-this.js-strict:true": "test/language/module-code/eval-this.js: Test262Error: Expected SameValue(«[object Object]», «undefined») to be true ", + "test/language/module-code/export-default-asyncgenerator-declaration-binding.js-strict:true": "test/language/module-code/export-default-asyncgenerator-declaration-binding.js: SyntaxError: Async generators are not supported yet ", + "test/language/module-code/export-expname-binding-index.js-strict:true": "test/language/module-code/export-expname-binding-index.js: Test262Error: Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/export-expname-from-star.js-strict:true": "test/language/module-code/export-expname-from-star.js: Test262Error: Expected SameValue(«undefined», «function Mercury() {\n}») to be true ", + "test/language/module-code/instn-iee-bndng-cls.js-strict:true": "test/language/module-code/instn-iee-bndng-cls.js: Test262Error: binding is created but not initialized Expected a ReferenceError but got a TypeError ", + "test/language/module-code/instn-iee-bndng-const.js-strict:true": "test/language/module-code/instn-iee-bndng-const.js: Test262Error: binding is created but not initialized Expected a ReferenceError but got a TypeError ", + "test/language/module-code/instn-iee-bndng-fun.js-strict:true": "test/language/module-code/instn-iee-bndng-fun.js: TypeError: Cannot read property 'A' of undefined ", + "test/language/module-code/instn-iee-bndng-gen.js-strict:true": "test/language/module-code/instn-iee-bndng-gen.js: TypeError: Cannot read property 'A' of undefined ", + "test/language/module-code/instn-iee-bndng-let.js-strict:true": "test/language/module-code/instn-iee-bndng-let.js: Test262Error: binding is created but not initialized Expected a ReferenceError but got a TypeError ", + "test/language/module-code/instn-iee-bndng-var.js-strict:true": "test/language/module-code/instn-iee-bndng-var.js: TypeError: Cannot read property 'A' of undefined ", + "test/language/module-code/instn-iee-err-ambiguous-as.js-strict:true": "test/language/module-code/instn-iee-err-ambiguous-as.js: Expected error: ", + "test/language/module-code/instn-iee-err-ambiguous.js-strict:true": "test/language/module-code/instn-iee-err-ambiguous.js: Expected error: ", + "test/language/module-code/instn-iee-err-circular-as.js-strict:true": "test/language/module-code/instn-iee-err-circular-as.js: Expected error: ", + "test/language/module-code/instn-iee-err-circular.js-strict:true": "test/language/module-code/instn-iee-err-circular.js: Expected error: ", + "test/language/module-code/instn-iee-err-dflt-thru-star-as.js-strict:true": "test/language/module-code/instn-iee-err-dflt-thru-star-as.js: Expected error: ", + "test/language/module-code/instn-iee-err-dflt-thru-star.js-strict:true": "test/language/module-code/instn-iee-err-dflt-thru-star.js: Expected error: ", + "test/language/module-code/instn-iee-err-not-found-as.js-strict:true": "test/language/module-code/instn-iee-err-not-found-as.js: Expected error: ", + "test/language/module-code/instn-iee-err-not-found.js-strict:true": "test/language/module-code/instn-iee-err-not-found.js: Expected error: ", + "test/language/module-code/instn-named-bndng-cls.js-strict:true": "test/language/module-code/instn-named-bndng-cls.js: Test262Error: binding is created but not initialized Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-const.js-strict:true": "test/language/module-code/instn-named-bndng-const.js: Test262Error: binding is created but not initialized Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-dflt-cls.js-strict:true": "test/language/module-code/instn-named-bndng-dflt-cls.js: Test262Error: Binding is created but not initialized. Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-dflt-expr.js-strict:true": "test/language/module-code/instn-named-bndng-dflt-expr.js: Test262Error: binding is created but not initialized Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-dflt-fun-anon.js-strict:true": "test/language/module-code/instn-named-bndng-dflt-fun-anon.js: Test262Error: correct name is assigned Expected SameValue(«instn_named_bndng_dflt_fun_anon_default», «default») to be true ", + "test/language/module-code/instn-named-bndng-dflt-gen-anon.js-strict:true": "test/language/module-code/instn-named-bndng-dflt-gen-anon.js: Test262Error: correct name is assigned Expected SameValue(«instn_named_bndng_dflt_gen_anon_default», «default») to be true ", + "test/language/module-code/instn-named-bndng-dflt-named.js-strict:true": "test/language/module-code/instn-named-bndng-dflt-named.js: Test262Error: Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-dflt-star.js-strict:true": "test/language/module-code/instn-named-bndng-dflt-star.js: Test262Error: Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-fun.js-strict:true": "test/language/module-code/instn-named-bndng-fun.js: Test262Error: binding rejects assignment Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-gen.js-strict:true": "test/language/module-code/instn-named-bndng-gen.js: Test262Error: binding rejects assignment Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-let.js-strict:true": "test/language/module-code/instn-named-bndng-let.js: Test262Error: binding is created but not initialized Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-trlng-comma.js-strict:true": "test/language/module-code/instn-named-bndng-trlng-comma.js: Test262Error: binding rejects assignment Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-bndng-var.js-strict:true": "test/language/module-code/instn-named-bndng-var.js: Test262Error: binding rejects assignment Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-named-err-ambiguous-as.js-strict:true": "test/language/module-code/instn-named-err-ambiguous-as.js: Expected error: ", + "test/language/module-code/instn-named-err-ambiguous.js-strict:true": "test/language/module-code/instn-named-err-ambiguous.js: Expected error: ", + "test/language/module-code/instn-named-err-dflt-thru-star-as.js-strict:true": "test/language/module-code/instn-named-err-dflt-thru-star-as.js: Expected error: ", + "test/language/module-code/instn-named-err-dflt-thru-star-dflt.js-strict:true": "test/language/module-code/instn-named-err-dflt-thru-star-dflt.js: Expected error: ", + "test/language/module-code/instn-named-err-not-found-as.js-strict:true": "test/language/module-code/instn-named-err-not-found-as.js: Expected error: ", + "test/language/module-code/instn-named-err-not-found-dflt.js-strict:true": "test/language/module-code/instn-named-err-not-found-dflt.js: Expected error: ", + "test/language/module-code/instn-named-err-not-found.js-strict:true": "test/language/module-code/instn-named-err-not-found.js: Expected error: ", + "test/language/module-code/instn-named-iee-cycle.js-strict:true": "test/language/module-code/instn-named-iee-cycle.js: TypeError: Cannot read property 'b' of undefined ", + "test/language/module-code/instn-resolve-empty-export.js-strict:true": "test/language/module-code/instn-resolve-empty-export.js: unexpected error type (GoError), expected (SyntaxError)", + "test/language/module-code/instn-resolve-empty-import.js-strict:true": "test/language/module-code/instn-resolve-empty-import.js: unexpected error type (GoError), expected (SyntaxError)", + "test/language/module-code/instn-resolve-err-syntax-1.js-strict:true": "test/language/module-code/instn-resolve-err-syntax-1.js: unexpected error type (GoError), expected (SyntaxError)", + "test/language/module-code/instn-resolve-err-syntax-2.js-strict:true": "test/language/module-code/instn-resolve-err-syntax-2.js: unexpected error type (GoError), expected (SyntaxError)", + "test/language/module-code/instn-resolve-order-depth.js-strict:true": "test/language/module-code/instn-resolve-order-depth.js: unexpected error type (GoError), expected (SyntaxError)", + "test/language/module-code/instn-resolve-order-src.js-strict:true": "test/language/module-code/instn-resolve-order-src.js: unexpected error type (GoError), expected (SyntaxError)", + "test/language/module-code/instn-star-ambiguous.js-strict:true": "test/language/module-code/instn-star-ambiguous.js: Test262Error: Ambiguous export is not present Expected SameValue(«true», «false») to be true ", + "test/language/module-code/instn-star-binding.js-strict:true": "test/language/module-code/instn-star-binding.js: Test262Error: binding rejects assignment Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/instn-star-equality.js-strict:true": "test/language/module-code/instn-star-equality.js: Test262Error: Local namespace objects from consecutive declarations Expected SameValue(«[object Object]», «[object Object]») to be true ", + "test/language/module-code/instn-star-err-not-found.js-strict:true": "test/language/module-code/instn-star-err-not-found.js: Expected error: ", + "test/language/module-code/instn-star-iee-cycle.js-strict:true": "test/language/module-code/instn-star-iee-cycle.js: TypeError: Cannot read property 'b' of undefined ", + "test/language/module-code/namespace/Symbol.toStringTag.js-strict:true": "test/language/module-code/namespace/Symbol.toStringTag.js: Test262Error: Expected SameValue(«undefined», «Module») to be true ", + "test/language/module-code/namespace/internals/define-own-property.js-strict:true": "test/language/module-code/namespace/internals/define-own-property.js: Test262Error: Reflect.defineProperty: local2 Expected SameValue(«true», «false») to be true ", + "test/language/module-code/namespace/internals/delete-exported-init.js-strict:true": "test/language/module-code/namespace/internals/delete-exported-init.js: Test262Error: delete: local1 Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/delete-exported-uninit.js-strict:true": "test/language/module-code/namespace/internals/delete-exported-uninit.js: Test262Error: delete: local1 Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/delete-non-exported.js-strict:true": "test/language/module-code/namespace/internals/delete-non-exported.js: Test262Error: delete: default ", + "test/language/module-code/namespace/internals/enumerate-binding-uninit.js-strict:true": "test/language/module-code/namespace/internals/enumerate-binding-uninit.js: Test262Error: Expected a ReferenceError but got a Test262Error ", + "test/language/module-code/namespace/internals/get-own-property-str-found-init.js-strict:true": "test/language/module-code/namespace/internals/get-own-property-str-found-init.js: Test262Error: Expected SameValue(«undefined», «201») to be true ", + "test/language/module-code/namespace/internals/get-own-property-str-found-uninit.js-strict:true": "test/language/module-code/namespace/internals/get-own-property-str-found-uninit.js: Test262Error: hasOwnProperty: local1 Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/get-own-property-sym.js-strict:true": "test/language/module-code/namespace/internals/get-own-property-sym.js: Test262Error: Expected SameValue(«false», «true») to be true ", + "test/language/module-code/namespace/internals/get-prototype-of.js-strict:true": "test/language/module-code/namespace/internals/get-prototype-of.js: Test262Error: Expected SameValue(«true», «false») to be true ", + "test/language/module-code/namespace/internals/get-str-found-uninit.js-strict:true": "test/language/module-code/namespace/internals/get-str-found-uninit.js: Test262Error: Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/get-str-not-found.js-strict:true": "test/language/module-code/namespace/internals/get-str-not-found.js: Test262Error: key: __proto__ Expected SameValue(«[object Object]», «undefined») to be true ", + "test/language/module-code/namespace/internals/get-sym-found.js-strict:true": "test/language/module-code/namespace/internals/get-sym-found.js: Test262Error: Expected SameValue(«undefined», «string») to be true ", + "test/language/module-code/namespace/internals/has-property-str-not-found.js-strict:true": "test/language/module-code/namespace/internals/has-property-str-not-found.js: Test262Error: in: __proto__ Expected SameValue(«true», «false») to be true ", + "test/language/module-code/namespace/internals/has-property-sym-found.js-strict:true": "test/language/module-code/namespace/internals/has-property-sym-found.js: Test262Error: in: Symbol.toStringTag ", + "test/language/module-code/namespace/internals/is-extensible.js-strict:true": "test/language/module-code/namespace/internals/is-extensible.js: Test262Error: Expected SameValue(«true», «false») to be true ", + "test/language/module-code/namespace/internals/object-hasOwnProperty-binding-uninit.js-strict:true": "test/language/module-code/namespace/internals/object-hasOwnProperty-binding-uninit.js: Test262Error: Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/object-keys-binding-uninit.js-strict:true": "test/language/module-code/namespace/internals/object-keys-binding-uninit.js: Test262Error: Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/object-propertyIsEnumerable-binding-uninit.js-strict:true": "test/language/module-code/namespace/internals/object-propertyIsEnumerable-binding-uninit.js: Test262Error: Expected a ReferenceError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/own-property-keys-binding-types.js-strict:true": "test/language/module-code/namespace/internals/own-property-keys-binding-types.js: Test262Error: Expected SameValue(«8», «10») to be true ", + "test/language/module-code/namespace/internals/own-property-keys-sort.js-strict:true": "test/language/module-code/namespace/internals/own-property-keys-sort.js: Test262Error: Expected SameValue(«17», «16») to be true ", + "test/language/module-code/namespace/internals/set-prototype-of.js-strict:true": "test/language/module-code/namespace/internals/set-prototype-of.js: Test262Error: Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/namespace/internals/set.js-strict:true": "test/language/module-code/namespace/internals/set.js: Test262Error: AssignmentExpression: local1 Expected a TypeError to be thrown but no exception was thrown at all ", + "test/language/module-code/parse-err-hoist-lex-fun.js-strict:true": "test/language/module-code/parse-err-hoist-lex-fun.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/module-code/parse-err-return.js-strict:true": "test/language/module-code/parse-err-return.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/reserved-words/await-module.js-strict:true": "test/language/reserved-words/await-module.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/statements/class/class-name-ident-await-escaped-module.js-strict:true": "test/language/statements/class/class-name-ident-await-escaped-module.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/statements/class/class-name-ident-await-module.js-strict:true": "test/language/statements/class/class-name-ident-await-module.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/statements/class/elements/private-getter-is-not-a-own-property.js-strict:true": "test/language/statements/class/elements/private-getter-is-not-a-own-property.js: TypeError: Object has no member '__lookupGetter__' ", + "test/language/statements/class/elements/private-setter-is-not-a-own-property.js-strict:true": "test/language/statements/class/elements/private-setter-is-not-a-own-property.js: TypeError: Object has no member '__lookupSetter__' ", + "test/language/statements/labeled/value-await-module-escaped.js-strict:true": "test/language/statements/labeled/value-await-module-escaped.js: error is not an object (Test262: This statement should not be evaluated.)", + "test/language/statements/labeled/value-await-module.js-strict:true": "test/language/statements/labeled/value-await-module.js: error is not an object (Test262: This statement should not be evaluated.)" +} diff --git a/js/tc39/breaking_test_errors.json b/js/tc39/breaking_test_errors-extended.json similarity index 96% rename from js/tc39/breaking_test_errors.json rename to js/tc39/breaking_test_errors-extended.json index 97d4fd0a8309..7652b0256200 100644 --- a/js/tc39/breaking_test_errors.json +++ b/js/tc39/breaking_test_errors-extended.json @@ -29,10 +29,10 @@ "test/built-ins/RegExp/unicode_restricted_octal_escape.js-strict:true": "test/built-ins/RegExp/unicode_restricted_octal_escape.js: Test262Error: RegExp(\"\\1\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", "test/built-ins/RegExp/unicode_restricted_quantifiable_assertion.js-strict:true": "test/built-ins/RegExp/unicode_restricted_quantifiable_assertion.js: Test262Error: RegExp(\"(?=.)*\", \"u\"): Expected a SyntaxError to be thrown but no exception was thrown at all ", "test/built-ins/TypedArrayConstructors/BigUint64Array/is-a-constructor.js-strict:true": "test/built-ins/TypedArrayConstructors/BigUint64Array/is-a-constructor.js: ReferenceError: BigUint64Array is not defined ", - "test/language/comments/hashbang/function-constructor.js-strict:true": "test/language/comments/hashbang/function-constructor.js: SyntaxError: test/language/comments/hashbang/function-constructor.js: Unexpected token (17:47)\n 15 | const AsyncFunction = (async function (){}).constructor;\n 16 | const GeneratorFunction = (function *(){}).constructor;\n> 17 | const AsyncGeneratorFunction = (async function *(){}).constructor;\n | ^\n 18 | for (const ctor of [\n 19 | Function,\n 20 | AsyncFunction, ", + "test/language/comments/hashbang/function-constructor.js-strict:true": "test/language/comments/hashbang/function-constructor.js: SyntaxError: Async generators are not supported yet ", "test/language/comments/hashbang/module.js-strict:false": "test/language/comments/hashbang/module.js: file://TestTC39/test262/test/language/comments/hashbang/module.js: Line 1:28 Unexpected token ILLEGAL (and 2 more errors)", - "test/language/destructuring/binding/syntax/destructuring-array-parameters-function-arguments-length.js-strict:true": "test/language/destructuring/binding/syntax/destructuring-array-parameters-function-arguments-length.js: SyntaxError: test/language/destructuring/binding/syntax/destructuring-array-parameters-function-arguments-length.js: Unexpected token (35:33)\n 33 | assert.sameValue((async ([a,b]) => {}).length, 1);\n 34 | assert.sameValue((async function([a,b]) {}).length, 1);\n> 35 | assert.sameValue((async function * ([a,b]) {}).length, 1);\n | ^\n 36 | \n 37 | ", - "test/language/destructuring/binding/syntax/destructuring-object-parameters-function-arguments-length.js-strict:true": "test/language/destructuring/binding/syntax/destructuring-object-parameters-function-arguments-length.js: SyntaxError: test/language/destructuring/binding/syntax/destructuring-object-parameters-function-arguments-length.js: Unexpected token (35:33)\n 33 | assert.sameValue((async ({a,b}) => {}).length, 1);\n 34 | assert.sameValue((async function({a,b}) {}).length, 1);\n> 35 | assert.sameValue((async function * ({a,b}) {}).length, 1);\n | ^\n 36 | \n 37 | ", + "test/language/destructuring/binding/syntax/destructuring-array-parameters-function-arguments-length.js-strict:true": "test/language/destructuring/binding/syntax/destructuring-array-parameters-function-arguments-length.js: SyntaxError: Async generators are not supported yet ", + "test/language/destructuring/binding/syntax/destructuring-object-parameters-function-arguments-length.js-strict:true": "test/language/destructuring/binding/syntax/destructuring-object-parameters-function-arguments-length.js: SyntaxError: Async generators are not supported yet ", "test/language/export/escaped-as-export-specifier.js-strict:true": "test/language/export/escaped-as-export-specifier.js: error is not an object (Test262: This statement should not be evaluated.)", "test/language/export/escaped-from.js-strict:true": "test/language/export/escaped-from.js: error is not an object (Test262: This statement should not be evaluated.)", "test/language/expressions/assignment/fn-name-lhs-cover.js-strict:true": "test/language/expressions/assignment/fn-name-lhs-cover.js: Test262Error: descriptor value should be ", @@ -47,7 +47,7 @@ "test/language/expressions/class/elements/private-setter-is-not-a-own-property.js-strict:true": "test/language/expressions/class/elements/private-setter-is-not-a-own-property.js: TypeError: Object has no member '__lookupSetter__' ", "test/language/expressions/object/cpn-obj-lit-computed-property-name-from-integer-separators.js-strict:true": "test/language/expressions/object/cpn-obj-lit-computed-property-name-from-integer-separators.js: SyntaxError: test/language/expressions/object/cpn-obj-lit-computed-property-name-from-integer-separators.js: Identifier directly after number (29:4)\n 27 | \n 28 | let o = {\n> 29 | [1_2_3_4_5_6_7_8]: 1_2_3_4_5_6_7_8\n | ^\n 30 | };\n 31 | \n 32 | assert.sameValue( ", "test/language/expressions/optional-chaining/iteration-statement-for-await-of.js-strict:true": "test/language/expressions/optional-chaining/iteration-statement-for-await-of.js: SyntaxError: test/language/expressions/optional-chaining/iteration-statement-for-await-of.js: Unexpected token, expected ( (31:6)\n 29 | async function checkAssertions() {\n 30 | let count = 0;\n> 31 | for await (const num of obj?.iterable) {\n | ^\n 32 | count += num;\n 33 | }\n 34 | assert.sameValue(3, count); ", - "test/language/expressions/optional-chaining/member-expression.js-strict:true": "test/language/expressions/optional-chaining/member-expression.js: SyntaxError: test/language/expressions/optional-chaining/member-expression.js: Unexpected token (18:23)\n 16 | // IdentifierReference\n 17 | const a = {b: 22};\n> 18 | assert.sameValue(22, a?.b);\n | ^\n 19 | // this\n 20 | function fn () {\n 21 | return this?.a ", + "test/language/expressions/optional-chaining/member-expression.js-strict:true": "test/language/expressions/optional-chaining/member-expression.js: SyntaxError: Async generators are not supported yet ", "test/language/import/dup-bound-names.js-strict:true": "test/language/import/dup-bound-names.js: unexpected error type (TypeError), expected (SyntaxError)", "test/language/import/escaped-as-import-specifier.js-strict:true": "test/language/import/escaped-as-import-specifier.js: error is not an object (Test262: This statement should not be evaluated.)", "test/language/import/escaped-as-namespace-import.js-strict:true": "test/language/import/escaped-as-namespace-import.js: error is not an object (Test262: This statement should not be evaluated.)", diff --git a/js/tc39/tc39_test.go b/js/tc39/tc39_test.go index ec2730d17505..56ab0419886f 100644 --- a/js/tc39/tc39_test.go +++ b/js/tc39/tc39_test.go @@ -7,12 +7,14 @@ import ( "bytes" "encoding/json" "errors" + "flag" "fmt" "io" "io/fs" "net/url" "os" "path" + "path/filepath" "runtime" "sort" "strings" @@ -230,6 +232,8 @@ var ( "test/language/identifiers/start-unicode-14.", "test/language/identifiers/part-unicode-14.", } + + update = flag.Bool("update", false, "update breaking_test_errors-*.json files") //nolint:gochecknoglobals ) //nolint:unused,structcheck @@ -259,6 +263,8 @@ type tc39TestCtx struct { errorsLock sync.Mutex errors map[string]string + + compatibilityMode lib.CompatibilityMode } type TC39MetaNegative struct { @@ -559,21 +565,27 @@ func (ctx *tc39TestCtx) runTC39File(name string, t testing.TB) { } } +func breakingTestErrorsFilename(compatibilityMode lib.CompatibilityMode) string { + return fmt.Sprintf("./breaking_test_errors-%s.json", compatibilityMode) +} + func (ctx *tc39TestCtx) init() { ctx.prgCache = make(map[string]*goja.Program) ctx.errors = make(map[string]string) - b, err := os.ReadFile("./breaking_test_errors.json") - if err != nil { - panic(err) - } - b = bytes.TrimSpace(b) - if len(b) > 0 { - ctx.expectedErrors = make(map[string]string, 1000) - err = json.Unmarshal(b, &ctx.expectedErrors) + if !*update { + b, err := os.ReadFile(breakingTestErrorsFilename(ctx.compatibilityMode)) if err != nil { panic(err) } + b = bytes.TrimSpace(b) + if len(b) > 0 { + ctx.expectedErrors = make(map[string]string, 1000) + err = json.Unmarshal(b, &ctx.expectedErrors) + if err != nil { + panic(err) + } + } } } @@ -598,7 +610,7 @@ func (ctx *tc39TestCtx) compile(base, name string) (*goja.Program, error) { str := string(b) comp := ctx.compilerPool.Get() defer ctx.compilerPool.Put(comp) - comp.Options = compiler.Options{Strict: false, CompatibilityMode: lib.CompatibilityModeExtended} + comp.Options = compiler.Options{Strict: false, CompatibilityMode: ctx.compatibilityMode} prg, _, err = comp.Compile(str, name, true) if err != nil { return nil, err @@ -644,10 +656,8 @@ func (ctx *tc39TestCtx) runTC39Script(name, src string, includes []string, vm *g p, _, err = comp.Compile(src, name, true) origErr = err if err != nil && !expectsError { - src, _, err = comp.Transform(src, name, nil) - if err == nil { - p, _, err = comp.Compile(src, name, true) - } + comp.Options.CompatibilityMode = ctx.compatibilityMode + p, _, err = comp.Compile(src, name, true) } if err != nil { @@ -687,7 +697,7 @@ func (ctx *tc39TestCtx) runTC39Module(name, src string, includes []string, vm *g comp := ctx.compilerPool.Get() defer ctx.compilerPool.Put(comp) - comp.Options = compiler.Options{Strict: false, CompatibilityMode: lib.CompatibilityModeExtended} + comp.Options = compiler.Options{Strict: false, CompatibilityMode: ctx.compatibilityMode} mr := modules.NewModuleResolver(nil, func(specifier *url.URL, _ string) ([]byte, error) { @@ -754,13 +764,21 @@ func TestTC39(t *testing.T) { t.Skip() } + runTestTC39(t, lib.CompatibilityModeExtended) + runTestTC39(t, lib.CompatibilityModeExperimentalEnhanced) +} + +func runTestTC39(t *testing.T, compatibilityMode lib.CompatibilityMode) { + t.Helper() + if _, err := os.Stat(tc39BASE); err != nil { t.Skipf("If you want to run tc39 tests, you need to run the 'checkout.sh` script in the directory to get https://github.com/tc39/test262 at the correct last tested commit (%v)", err) } ctx := &tc39TestCtx{ - base: tc39BASE, - compilerPool: compiler.NewPool(testutils.NewLogger(t), runtime.GOMAXPROCS(0)), + base: tc39BASE, + compilerPool: compiler.NewPool(testutils.NewLogger(t), runtime.GOMAXPROCS(0)), + compatibilityMode: compatibilityMode, } ctx.init() // ctx.enableBench = true @@ -787,13 +805,23 @@ func TestTC39(t *testing.T) { fmt.Printf("%s\t%d\n", item.name, item.duration/time.Millisecond) } } - if len(ctx.errors) > 0 { - enc := json.NewEncoder(os.Stdout) + if len(ctx.errors) > 0 && *update { + filename := breakingTestErrorsFilename(ctx.compatibilityMode) + file, err := os.Create(filepath.Clean(filename)) + if err != nil { + t.Logf("Error while creating %s: %s", filename, err) + } + + enc := json.NewEncoder(file) enc.SetIndent("", " ") enc.SetEscapeHTML(false) - err := enc.Encode(ctx.errors) + err = enc.Encode(ctx.errors) if err != nil { t.Logf("Error while json encoding errors: %s", err) } + err = file.Close() + if err != nil { + t.Logf("Error while closing %s: %s", filename, err) + } } } diff --git a/lib/compatibility_mode_gen.go b/lib/compatibility_mode_gen.go index 75b62e6de4ba..d9f527c40fe5 100644 --- a/lib/compatibility_mode_gen.go +++ b/lib/compatibility_mode_gen.go @@ -1,15 +1,14 @@ // Code generated by "enumer -type=CompatibilityMode -transform=snake -trimprefix CompatibilityMode -output compatibility_mode_gen.go"; DO NOT EDIT. -// package lib import ( "fmt" ) -const _CompatibilityModeName = "extendedbase" +const _CompatibilityModeName = "extendedbaseexperimental_enhanced" -var _CompatibilityModeIndex = [...]uint8{0, 8, 12} +var _CompatibilityModeIndex = [...]uint8{0, 8, 12, 33} func (i CompatibilityMode) String() string { i -= 1 @@ -19,11 +18,12 @@ func (i CompatibilityMode) String() string { return _CompatibilityModeName[_CompatibilityModeIndex[i]:_CompatibilityModeIndex[i+1]] } -var _CompatibilityModeValues = []CompatibilityMode{1, 2} +var _CompatibilityModeValues = []CompatibilityMode{1, 2, 3} var _CompatibilityModeNameToValueMap = map[string]CompatibilityMode{ - _CompatibilityModeName[0:8]: 1, - _CompatibilityModeName[8:12]: 2, + _CompatibilityModeName[0:8]: 1, + _CompatibilityModeName[8:12]: 2, + _CompatibilityModeName[12:33]: 3, } // CompatibilityModeString retrieves an enum value from the enum constants string name. diff --git a/lib/runtime_options.go b/lib/runtime_options.go index 56dff7d789ce..d7f95b0bebd1 100644 --- a/lib/runtime_options.go +++ b/lib/runtime_options.go @@ -17,6 +17,8 @@ const ( CompatibilityModeExtended CompatibilityMode = iota + 1 // CompatibilityModeBase is standard goja ES5.1+ CompatibilityModeBase + // CompatibilityModeExperimentalEnhanced achieves TypeScript and ES6+ compatibility with esbuild + CompatibilityModeExperimentalEnhanced ) // RuntimeOptions are settings passed onto the goja JS runtime diff --git a/vendor/github.com/evanw/esbuild/LICENSE.md b/vendor/github.com/evanw/esbuild/LICENSE.md new file mode 100644 index 000000000000..2027e8dcf378 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Evan Wallace + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go b/vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go new file mode 100644 index 000000000000..3b36fe25a30c --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go @@ -0,0 +1,7 @@ +package api_helpers + +// This flag is set by the CLI to activate the timer. It's put here instead of +// by the timer to discourage code from checking this flag. Only the code that +// creates the root timer should check this flag. Other code should check that +// the timer is not null to detect if the timer is being used or not. +var UseTimer bool diff --git a/vendor/github.com/evanw/esbuild/internal/ast/ast.go b/vendor/github.com/evanw/esbuild/internal/ast/ast.go new file mode 100644 index 000000000000..67d2e5b8a540 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/ast/ast.go @@ -0,0 +1,812 @@ +package ast + +// This file contains data structures that are used with the AST packages for +// both JavaScript and CSS. This helps the bundler treat both AST formats in +// a somewhat format-agnostic manner. + +import ( + "sort" + + "github.com/evanw/esbuild/internal/helpers" + "github.com/evanw/esbuild/internal/logger" +) + +type ImportKind uint8 + +const ( + // An entry point provided by the user + ImportEntryPoint ImportKind = iota + + // An ES6 import or re-export statement + ImportStmt + + // A call to "require()" + ImportRequire + + // An "import()" expression with a string argument + ImportDynamic + + // A call to "require.resolve()" + ImportRequireResolve + + // A CSS "@import" rule + ImportAt + + // A CSS "composes" declaration + ImportComposesFrom + + // A CSS "url(...)" token + ImportURL +) + +func (kind ImportKind) StringForMetafile() string { + switch kind { + case ImportStmt: + return "import-statement" + case ImportRequire: + return "require-call" + case ImportDynamic: + return "dynamic-import" + case ImportRequireResolve: + return "require-resolve" + case ImportAt: + return "import-rule" + case ImportComposesFrom: + return "composes-from" + case ImportURL: + return "url-token" + case ImportEntryPoint: + return "entry-point" + default: + panic("Internal error") + } +} + +func (kind ImportKind) IsFromCSS() bool { + switch kind { + case ImportAt, ImportComposesFrom, ImportURL: + return true + } + return false +} + +func (kind ImportKind) MustResolveToCSS() bool { + switch kind { + case ImportAt, ImportComposesFrom: + return true + } + return false +} + +type ImportRecordFlags uint16 + +const ( + // Sometimes the parser creates an import record and decides it isn't needed. + // For example, TypeScript code may have import statements that later turn + // out to be type-only imports after analyzing the whole file. + IsUnused ImportRecordFlags = 1 << iota + + // If this is true, the import contains syntax like "* as ns". This is used + // to determine whether modules that have no exports need to be wrapped in a + // CommonJS wrapper or not. + ContainsImportStar + + // If this is true, the import contains an import for the alias "default", + // either via the "import x from" or "import {default as x} from" syntax. + ContainsDefaultAlias + + // If this is true, the import contains an import for the alias "__esModule", + // via the "import {__esModule} from" syntax. + ContainsESModuleAlias + + // If true, this "export * from 'path'" statement is evaluated at run-time by + // calling the "__reExport()" helper function + CallsRunTimeReExportFn + + // Tell the printer to wrap this call to "require()" in "__toESM(...)" + WrapWithToESM + + // Tell the printer to wrap this ESM exports object in "__toCJS(...)" + WrapWithToCJS + + // Tell the printer to use the runtime "__require()" instead of "require()" + CallRuntimeRequire + + // True for the following cases: + // + // try { require('x') } catch { handle } + // try { await import('x') } catch { handle } + // try { require.resolve('x') } catch { handle } + // import('x').catch(handle) + // import('x').then(_, handle) + // + // In these cases we shouldn't generate an error if the path could not be + // resolved. + HandlesImportErrors + + // If true, this was originally written as a bare "import 'file'" statement + WasOriginallyBareImport + + // If true, this import can be removed if it's unused + IsExternalWithoutSideEffects + + // If true, "assert { type: 'json' }" was present + AssertTypeJSON + + // If true, do not generate "external": true in the metafile + ShouldNotBeExternalInMetafile + + // CSS "@import" of an empty file should be removed + WasLoadedWithEmptyLoader + + // Unique keys are randomly-generated strings that are used to replace paths + // in the source code after it's printed. These must not ever be split apart. + ContainsUniqueKey +) + +func (flags ImportRecordFlags) Has(flag ImportRecordFlags) bool { + return (flags & flag) != 0 +} + +type ImportRecord struct { + AssertOrWith *ImportAssertOrWith + GlobPattern *GlobPattern + Path logger.Path + Range logger.Range + + // If the "HandlesImportErrors" flag is present, then this is the location + // of the error handler. This is used for error reporting. + ErrorHandlerLoc logger.Loc + + // The resolved source index for an internal import (within the bundle) or + // invalid for an external import (not included in the bundle) + SourceIndex Index32 + + // Files imported via the "copy" loader use this instead of "SourceIndex" + // because they are sort of like external imports, and are not bundled. + CopySourceIndex Index32 + + Flags ImportRecordFlags + Kind ImportKind +} + +type AssertOrWithKeyword uint8 + +const ( + AssertKeyword AssertOrWithKeyword = iota + WithKeyword +) + +func (kw AssertOrWithKeyword) String() string { + if kw == AssertKeyword { + return "assert" + } + return "with" +} + +type ImportAssertOrWith struct { + Entries []AssertOrWithEntry + KeywordLoc logger.Loc + InnerOpenBraceLoc logger.Loc + InnerCloseBraceLoc logger.Loc + OuterOpenBraceLoc logger.Loc + OuterCloseBraceLoc logger.Loc + Keyword AssertOrWithKeyword +} + +type AssertOrWithEntry struct { + Key []uint16 // An identifier or a string + Value []uint16 // Always a string + KeyLoc logger.Loc + ValueLoc logger.Loc + PreferQuotedKey bool +} + +func FindAssertOrWithEntry(assertions []AssertOrWithEntry, name string) *AssertOrWithEntry { + for _, assertion := range assertions { + if helpers.UTF16EqualsString(assertion.Key, name) { + return &assertion + } + } + return nil +} + +type GlobPattern struct { + Parts []helpers.GlobPart + ExportAlias string + Kind ImportKind +} + +// This stores a 32-bit index where the zero value is an invalid index. This is +// a better alternative to storing the index as a pointer since that has the +// same properties but takes up more space and costs an extra pointer traversal. +type Index32 struct { + flippedBits uint32 +} + +func MakeIndex32(index uint32) Index32 { + return Index32{flippedBits: ^index} +} + +func (i Index32) IsValid() bool { + return i.flippedBits != 0 +} + +func (i Index32) GetIndex() uint32 { + return ^i.flippedBits +} + +type SymbolKind uint8 + +const ( + // An unbound symbol is one that isn't declared in the file it's referenced + // in. For example, using "window" without declaring it will be unbound. + SymbolUnbound SymbolKind = iota + + // This has special merging behavior. You're allowed to re-declare these + // symbols more than once in the same scope. These symbols are also hoisted + // out of the scope they are declared in to the closest containing function + // or module scope. These are the symbols with this kind: + // + // - Function arguments + // - Function statements + // - Variables declared using "var" + // + SymbolHoisted + SymbolHoistedFunction + + // There's a weird special case where catch variables declared using a simple + // identifier (i.e. not a binding pattern) block hoisted variables instead of + // becoming an error: + // + // var e = 0; + // try { throw 1 } catch (e) { + // print(e) // 1 + // var e = 2 + // print(e) // 2 + // } + // print(e) // 0 (since the hoisting stops at the catch block boundary) + // + // However, other forms are still a syntax error: + // + // try {} catch (e) { let e } + // try {} catch ({e}) { var e } + // + // This symbol is for handling this weird special case. + SymbolCatchIdentifier + + // Generator and async functions are not hoisted, but still have special + // properties such as being able to overwrite previous functions with the + // same name + SymbolGeneratorOrAsyncFunction + + // This is the special "arguments" variable inside functions + SymbolArguments + + // Classes can merge with TypeScript namespaces. + SymbolClass + + // Class names are not allowed to be referenced by computed property keys + SymbolClassInComputedPropertyKey + + // A class-private identifier (i.e. "#foo"). + SymbolPrivateField + SymbolPrivateMethod + SymbolPrivateGet + SymbolPrivateSet + SymbolPrivateGetSetPair + SymbolPrivateStaticField + SymbolPrivateStaticMethod + SymbolPrivateStaticGet + SymbolPrivateStaticSet + SymbolPrivateStaticGetSetPair + + // Labels are in their own namespace + SymbolLabel + + // TypeScript enums can merge with TypeScript namespaces and other TypeScript + // enums. + SymbolTSEnum + + // TypeScript namespaces can merge with classes, functions, TypeScript enums, + // and other TypeScript namespaces. + SymbolTSNamespace + + // In TypeScript, imports are allowed to silently collide with symbols within + // the module. Presumably this is because the imports may be type-only. + SymbolImport + + // Assigning to a "const" symbol will throw a TypeError at runtime + SymbolConst + + // Injected symbols can be overridden by provided defines + SymbolInjected + + // Properties can optionally be renamed to shorter names + SymbolMangledProp + + // CSS identifiers that are never renamed + SymbolGlobalCSS + + // CSS identifiers that are renamed to be unique to the file they are in + SymbolLocalCSS + + // This annotates all other symbols that don't have special behavior + SymbolOther +) + +func (kind SymbolKind) IsPrivate() bool { + return kind >= SymbolPrivateField && kind <= SymbolPrivateStaticGetSetPair +} + +func (kind SymbolKind) IsHoisted() bool { + return kind == SymbolHoisted || kind == SymbolHoistedFunction +} + +func (kind SymbolKind) IsHoistedOrFunction() bool { + return kind.IsHoisted() || kind == SymbolGeneratorOrAsyncFunction +} + +func (kind SymbolKind) IsFunction() bool { + return kind == SymbolHoistedFunction || kind == SymbolGeneratorOrAsyncFunction +} + +func (kind SymbolKind) IsUnboundOrInjected() bool { + return kind == SymbolUnbound || kind == SymbolInjected +} + +var InvalidRef Ref = Ref{^uint32(0), ^uint32(0)} + +// Files are parsed in parallel for speed. We want to allow each parser to +// generate symbol IDs that won't conflict with each other. We also want to be +// able to quickly merge symbol tables from all files into one giant symbol +// table. +// +// We can accomplish both goals by giving each symbol ID two parts: a source +// index that is unique to the parser goroutine, and an inner index that +// increments as the parser generates new symbol IDs. Then a symbol map can +// be an array of arrays indexed first by source index, then by inner index. +// The maps can be merged quickly by creating a single outer array containing +// all inner arrays from all parsed files. +type Ref struct { + SourceIndex uint32 + InnerIndex uint32 +} + +type LocRef struct { + Loc logger.Loc + Ref Ref +} + +type ImportItemStatus uint8 + +const ( + ImportItemNone ImportItemStatus = iota + + // The linker doesn't report import/export mismatch errors + ImportItemGenerated + + // The printer will replace this import with "undefined" + ImportItemMissing +) + +type SymbolFlags uint16 + +const ( + // Certain symbols must not be renamed or minified. For example, the + // "arguments" variable is declared by the runtime for every function. + // Renaming can also break any identifier used inside a "with" statement. + MustNotBeRenamed SymbolFlags = 1 << iota + + // In React's version of JSX, lower-case names are strings while upper-case + // names are identifiers. If we are preserving JSX syntax (i.e. not + // transforming it), then we need to be careful to name the identifiers + // something with a capital letter so further JSX processing doesn't treat + // them as strings instead. + MustStartWithCapitalLetterForJSX + + // If true, this symbol is the target of a "__name" helper function call. + // This call is special because it deliberately doesn't count as a use + // of the symbol (otherwise keeping names would disable tree shaking) + // so "UseCountEstimate" is not incremented. This flag helps us know to + // avoid optimizing this symbol when "UseCountEstimate" is 1 in this case. + DidKeepName + + // Sometimes we lower private symbols even if they are supported. For example, + // consider the following TypeScript code: + // + // class Foo { + // #foo = 123 + // bar = this.#foo + // } + // + // If "useDefineForClassFields: false" is set in "tsconfig.json", then "bar" + // must use assignment semantics instead of define semantics. We can compile + // that to this code: + // + // class Foo { + // constructor() { + // this.#foo = 123; + // this.bar = this.#foo; + // } + // #foo; + // } + // + // However, we can't do the same for static fields: + // + // class Foo { + // static #foo = 123 + // static bar = this.#foo + // } + // + // Compiling these static fields to something like this would be invalid: + // + // class Foo { + // static #foo; + // } + // Foo.#foo = 123; + // Foo.bar = Foo.#foo; + // + // Thus "#foo" must be lowered even though it's supported. Another case is + // when we're converting top-level class declarations to class expressions + // to avoid the TDZ and the class shadowing symbol is referenced within the + // class body: + // + // class Foo { + // static #foo = Foo + // } + // + // This cannot be converted into something like this: + // + // var Foo = class { + // static #foo; + // }; + // Foo.#foo = Foo; + // + PrivateSymbolMustBeLowered + + // This is used to remove the all but the last function re-declaration if a + // function is re-declared multiple times like this: + // + // function foo() { console.log(1) } + // function foo() { console.log(2) } + // + RemoveOverwrittenFunctionDeclaration + + // This flag is to avoid warning about this symbol more than once. It only + // applies to the "module" and "exports" unbound symbols. + DidWarnAboutCommonJSInESM + + // If this is present, the symbol could potentially be overwritten. This means + // it's not safe to make assumptions about this symbol from the initializer. + CouldPotentiallyBeMutated + + // This flags all symbols that were exported from the module using the ES6 + // "export" keyword, either directly on the declaration or using "export {}". + WasExported + + // This means the symbol is a normal function that has no body statements. + IsEmptyFunction + + // This means the symbol is a normal function that takes a single argument + // and returns that argument. + IsIdentityFunction + + // If true, calls to this symbol can be unwrapped (i.e. removed except for + // argument side effects) if the result is unused. + CallCanBeUnwrappedIfUnused +) + +func (flags SymbolFlags) Has(flag SymbolFlags) bool { + return (flags & flag) != 0 +} + +// Note: the order of values in this struct matters to reduce struct size. +type Symbol struct { + // This is used for symbols that represent items in the import clause of an + // ES6 import statement. These should always be referenced by EImportIdentifier + // instead of an EIdentifier. When this is present, the expression should + // be printed as a property access off the namespace instead of as a bare + // identifier. + // + // For correctness, this must be stored on the symbol instead of indirectly + // associated with the Ref for the symbol somehow. In ES6 "flat bundling" + // mode, re-exported symbols are collapsed using MergeSymbols() and renamed + // symbols from other files that end up at this symbol must be able to tell + // if it has a namespace alias. + NamespaceAlias *NamespaceAlias + + // This is the name that came from the parser. Printed names may be renamed + // during minification or to avoid name collisions. Do not use the original + // name during printing. + OriginalName string + + // Used by the parser for single pass parsing. Symbols that have been merged + // form a linked-list where the last link is the symbol to use. This link is + // an invalid ref if it's the last link. If this isn't invalid, you need to + // FollowSymbols to get the real one. + Link Ref + + // An estimate of the number of uses of this symbol. This is used to detect + // whether a symbol is used or not. For example, TypeScript imports that are + // unused must be removed because they are probably type-only imports. This + // is an estimate and may not be completely accurate due to oversights in the + // code. But it should always be non-zero when the symbol is used. + UseCountEstimate uint32 + + // This is for generating cross-chunk imports and exports for code splitting. + ChunkIndex Index32 + + // This is used for minification. Symbols that are declared in sibling scopes + // can share a name. A good heuristic (from Google Closure Compiler) is to + // assign names to symbols from sibling scopes in declaration order. That way + // local variable names are reused in each global function like this, which + // improves gzip compression: + // + // function x(a, b) { ... } + // function y(a, b, c) { ... } + // + // The parser fills this in for symbols inside nested scopes. There are three + // slot namespaces: regular symbols, label symbols, and private symbols. + NestedScopeSlot Index32 + + // Boolean values should all be flags instead to save space + Flags SymbolFlags + + Kind SymbolKind + + // We automatically generate import items for property accesses off of + // namespace imports. This lets us remove the expensive namespace imports + // while bundling in many cases, replacing them with a cheap import item + // instead: + // + // import * as ns from 'path' + // ns.foo() + // + // That can often be replaced by this, which avoids needing the namespace: + // + // import {foo} from 'path' + // foo() + // + // However, if the import is actually missing then we don't want to report a + // compile-time error like we do for real import items. This status lets us + // avoid this. We also need to be able to replace such import items with + // undefined, which this status is also used for. + ImportItemStatus ImportItemStatus +} + +// You should call "MergeSymbols" instead of calling this directly +func (newSymbol *Symbol) MergeContentsWith(oldSymbol *Symbol) { + newSymbol.UseCountEstimate += oldSymbol.UseCountEstimate + if oldSymbol.Flags.Has(MustNotBeRenamed) && !newSymbol.Flags.Has(MustNotBeRenamed) { + newSymbol.OriginalName = oldSymbol.OriginalName + newSymbol.Flags |= MustNotBeRenamed + } + if oldSymbol.Flags.Has(MustStartWithCapitalLetterForJSX) { + newSymbol.Flags |= MustStartWithCapitalLetterForJSX + } +} + +type SlotNamespace uint8 + +const ( + SlotDefault SlotNamespace = iota + SlotLabel + SlotPrivateName + SlotMangledProp + SlotMustNotBeRenamed +) + +func (s *Symbol) SlotNamespace() SlotNamespace { + if s.Kind == SymbolUnbound || s.Flags.Has(MustNotBeRenamed) { + return SlotMustNotBeRenamed + } + if s.Kind.IsPrivate() { + return SlotPrivateName + } + if s.Kind == SymbolLabel { + return SlotLabel + } + if s.Kind == SymbolMangledProp { + return SlotMangledProp + } + return SlotDefault +} + +type SlotCounts [4]uint32 + +func (a *SlotCounts) UnionMax(b SlotCounts) { + for i := range *a { + ai := &(*a)[i] + bi := b[i] + if *ai < bi { + *ai = bi + } + } +} + +type NamespaceAlias struct { + Alias string + NamespaceRef Ref +} + +type SymbolMap struct { + // This could be represented as a "map[Ref]Symbol" but a two-level array was + // more efficient in profiles. This appears to be because it doesn't involve + // a hash. This representation also makes it trivial to quickly merge symbol + // maps from multiple files together. Each file only generates symbols in a + // single inner array, so you can join the maps together by just make a + // single outer array containing all of the inner arrays. See the comment on + // "Ref" for more detail. + SymbolsForSource [][]Symbol +} + +func NewSymbolMap(sourceCount int) SymbolMap { + return SymbolMap{make([][]Symbol, sourceCount)} +} + +func (sm SymbolMap) Get(ref Ref) *Symbol { + return &sm.SymbolsForSource[ref.SourceIndex][ref.InnerIndex] +} + +// Returns the canonical ref that represents the ref for the provided symbol. +// This may not be the provided ref if the symbol has been merged with another +// symbol. +func FollowSymbols(symbols SymbolMap, ref Ref) Ref { + symbol := symbols.Get(ref) + if symbol.Link == InvalidRef { + return ref + } + + link := FollowSymbols(symbols, symbol.Link) + + // Only write if needed to avoid concurrent map update hazards + if symbol.Link != link { + symbol.Link = link + } + + return link +} + +// Use this before calling "FollowSymbols" from separate threads to avoid +// concurrent map update hazards. In Go, mutating a map is not threadsafe +// but reading from a map is. Calling "FollowAllSymbols" first ensures that +// all mutation is done up front. +func FollowAllSymbols(symbols SymbolMap) { + for sourceIndex, inner := range symbols.SymbolsForSource { + for symbolIndex := range inner { + FollowSymbols(symbols, Ref{uint32(sourceIndex), uint32(symbolIndex)}) + } + } +} + +// Makes "old" point to "new" by joining the linked lists for the two symbols +// together. That way "FollowSymbols" on both "old" and "new" will result in +// the same ref. +func MergeSymbols(symbols SymbolMap, old Ref, new Ref) Ref { + if old == new { + return new + } + + oldSymbol := symbols.Get(old) + if oldSymbol.Link != InvalidRef { + oldSymbol.Link = MergeSymbols(symbols, oldSymbol.Link, new) + return oldSymbol.Link + } + + newSymbol := symbols.Get(new) + if newSymbol.Link != InvalidRef { + newSymbol.Link = MergeSymbols(symbols, old, newSymbol.Link) + return newSymbol.Link + } + + oldSymbol.Link = new + newSymbol.MergeContentsWith(oldSymbol) + return new +} + +// This is a histogram of character frequencies for minification +type CharFreq [64]int32 + +func (freq *CharFreq) Scan(text string, delta int32) { + if delta == 0 { + return + } + + // This matches the order in "DefaultNameMinifier" + for i, n := 0, len(text); i < n; i++ { + c := text[i] + switch { + case c >= 'a' && c <= 'z': + (*freq)[c-'a'] += delta + case c >= 'A' && c <= 'Z': + (*freq)[c-('A'-26)] += delta + case c >= '0' && c <= '9': + (*freq)[c+(52-'0')] += delta + case c == '_': + (*freq)[62] += delta + case c == '$': + (*freq)[63] += delta + } + } +} + +func (freq *CharFreq) Include(other *CharFreq) { + for i := 0; i < 64; i++ { + (*freq)[i] += (*other)[i] + } +} + +type NameMinifier struct { + head string + tail string +} + +var DefaultNameMinifierJS = NameMinifier{ + head: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_$", + tail: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_$", +} + +var DefaultNameMinifierCSS = NameMinifier{ + head: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_", + tail: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_", +} + +type charAndCount struct { + char string + count int32 + index byte +} + +// This type is just so we can use Go's native sort function +type charAndCountArray []charAndCount + +func (a charAndCountArray) Len() int { return len(a) } +func (a charAndCountArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } + +func (a charAndCountArray) Less(i int, j int) bool { + ai := a[i] + aj := a[j] + return ai.count > aj.count || (ai.count == aj.count && ai.index < aj.index) +} + +func (source NameMinifier) ShuffleByCharFreq(freq CharFreq) NameMinifier { + // Sort the histogram in descending order by count + array := make(charAndCountArray, 64) + for i := 0; i < len(source.tail); i++ { + array[i] = charAndCount{ + char: source.tail[i : i+1], + index: byte(i), + count: freq[i], + } + } + sort.Sort(array) + + // Compute the identifier start and identifier continue sequences + minifier := NameMinifier{} + for _, item := range array { + if item.char < "0" || item.char > "9" { + minifier.head += item.char + } + minifier.tail += item.char + } + return minifier +} + +func (minifier NameMinifier) NumberToMinifiedName(i int) string { + n_head := len(minifier.head) + n_tail := len(minifier.tail) + + j := i % n_head + name := minifier.head[j : j+1] + i = i / n_head + + for i > 0 { + i-- + j := i % n_tail + name += minifier.tail[j : j+1] + i = i / n_tail + } + + return name +} diff --git a/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go b/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go new file mode 100644 index 000000000000..e267f1ee5d14 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go @@ -0,0 +1,3310 @@ +package bundler + +// The bundler is the core of the "build" and "transform" API calls. Each +// operation has two phases. The first phase scans the module graph, and is +// represented by the "ScanBundle" function. The second phase generates the +// output files from the module graph, and is implemented by the "Compile" +// function. + +import ( + "bytes" + "encoding/base32" + "encoding/base64" + "fmt" + "math/rand" + "net/http" + "sort" + "strings" + "sync" + "syscall" + "time" + "unicode" + "unicode/utf8" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/cache" + "github.com/evanw/esbuild/internal/compat" + "github.com/evanw/esbuild/internal/config" + "github.com/evanw/esbuild/internal/css_parser" + "github.com/evanw/esbuild/internal/fs" + "github.com/evanw/esbuild/internal/graph" + "github.com/evanw/esbuild/internal/helpers" + "github.com/evanw/esbuild/internal/js_ast" + "github.com/evanw/esbuild/internal/js_lexer" + "github.com/evanw/esbuild/internal/js_parser" + "github.com/evanw/esbuild/internal/logger" + "github.com/evanw/esbuild/internal/resolver" + "github.com/evanw/esbuild/internal/runtime" + "github.com/evanw/esbuild/internal/sourcemap" + "github.com/evanw/esbuild/internal/xxhash" +) + +type scannerFile struct { + // If "AbsMetadataFile" is present, this will be filled out with information + // about this file in JSON format. This is a partial JSON file that will be + // fully assembled later. + jsonMetadataChunk string + + pluginData interface{} + inputFile graph.InputFile +} + +// This is data related to source maps. It's computed in parallel with linking +// and must be ready by the time printing happens. This is beneficial because +// it is somewhat expensive to produce. +type DataForSourceMap struct { + // This data is for the printer. It maps from byte offsets in the file (which + // are stored at every AST node) to UTF-16 column offsets (required by source + // maps). + LineOffsetTables []sourcemap.LineOffsetTable + + // This contains the quoted contents of the original source file. It's what + // needs to be embedded in the "sourcesContent" array in the final source + // map. Quoting is precomputed because it's somewhat expensive. + QuotedContents [][]byte +} + +type Bundle struct { + // The unique key prefix is a random string that is unique to every bundling + // operation. It is used as a prefix for the unique keys assigned to every + // chunk during linking. These unique keys are used to identify each chunk + // before the final output paths have been computed. + uniqueKeyPrefix string + + fs fs.FS + res *resolver.Resolver + files []scannerFile + entryPoints []graph.EntryPoint + options config.Options +} + +type parseArgs struct { + fs fs.FS + log logger.Log + res *resolver.Resolver + caches *cache.CacheSet + prettyPath string + importSource *logger.Source + importWith *ast.ImportAssertOrWith + sideEffects graph.SideEffects + pluginData interface{} + results chan parseResult + inject chan config.InjectedFile + uniqueKeyPrefix string + keyPath logger.Path + options config.Options + importPathRange logger.Range + sourceIndex uint32 + skipResolve bool +} + +type parseResult struct { + resolveResults []*resolver.ResolveResult + globResolveResults map[uint32]globResolveResult + file scannerFile + tlaCheck tlaCheck + ok bool +} + +type globResolveResult struct { + resolveResults map[string]resolver.ResolveResult + absPath string + prettyPath string + exportAlias string +} + +type tlaCheck struct { + parent ast.Index32 + depth uint32 + importRecordIndex uint32 +} + +func parseFile(args parseArgs) { + source := logger.Source{ + Index: args.sourceIndex, + KeyPath: args.keyPath, + PrettyPath: args.prettyPath, + IdentifierName: js_ast.GenerateNonUniqueNameFromPath(args.keyPath.Text), + } + + var loader config.Loader + var absResolveDir string + var pluginName string + var pluginData interface{} + + if stdin := args.options.Stdin; stdin != nil { + // Special-case stdin + source.Contents = stdin.Contents + loader = stdin.Loader + if loader == config.LoaderNone { + loader = config.LoaderJS + } + absResolveDir = args.options.Stdin.AbsResolveDir + } else { + result, ok := runOnLoadPlugins( + args.options.Plugins, + args.fs, + &args.caches.FSCache, + args.log, + &source, + args.importSource, + args.importPathRange, + args.importWith, + args.pluginData, + args.options.WatchMode, + ) + if !ok { + if args.inject != nil { + args.inject <- config.InjectedFile{ + Source: source, + } + } + args.results <- parseResult{} + return + } + loader = result.loader + absResolveDir = result.absResolveDir + pluginName = result.pluginName + pluginData = result.pluginData + } + + _, base, ext := logger.PlatformIndependentPathDirBaseExt(source.KeyPath.Text) + + // The special "default" loader determines the loader from the file path + if loader == config.LoaderDefault { + loader = loaderFromFileExtension(args.options.ExtensionToLoader, base+ext) + } + + if loader == config.LoaderEmpty { + source.Contents = "" + } + + result := parseResult{ + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Loader: loader, + SideEffects: args.sideEffects, + }, + pluginData: pluginData, + }, + } + + defer func() { + r := recover() + if r != nil { + args.log.AddErrorWithNotes(nil, logger.Range{}, + fmt.Sprintf("panic: %v (while parsing %q)", r, source.PrettyPath), + []logger.MsgData{{Text: helpers.PrettyPrintedStack()}}) + args.results <- result + } + }() + + switch loader { + case config.LoaderJS, config.LoaderEmpty: + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderJSX: + args.options.JSX.Parse = true + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderTS, config.LoaderTSNoAmbiguousLessThan: + args.options.TS.Parse = true + args.options.TS.NoAmbiguousLessThan = loader == config.LoaderTSNoAmbiguousLessThan + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderTSX: + args.options.TS.Parse = true + args.options.JSX.Parse = true + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderCSS, config.LoaderGlobalCSS, config.LoaderLocalCSS: + ast := args.caches.CSSCache.Parse(args.log, source, css_parser.OptionsFromConfig(loader, &args.options)) + result.file.inputFile.Repr = &graph.CSSRepr{AST: ast} + result.ok = true + + case config.LoaderJSON, config.LoaderWithTypeJSON: + expr, ok := args.caches.JSONCache.Parse(args.log, source, js_parser.JSONOptions{ + UnsupportedJSFeatures: args.options.UnsupportedJSFeatures, + }) + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + if loader == config.LoaderWithTypeJSON { + // The exports kind defaults to "none", in which case the linker picks + // either ESM or CommonJS depending on the situation. Dynamic imports + // causes the linker to pick CommonJS which uses "require()" and then + // converts the return value to ESM, which adds extra properties that + // aren't supposed to be there when "{ with: { type: 'json' } }" is + // present. So if there's an import attribute, we force the type to + // be ESM to avoid this. + ast.ExportsKind = js_ast.ExportsESM + } + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderText: + encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents)) + expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(source.Contents)}} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = "data:text/plain;base64," + encoded + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderBase64: + mimeType := guessMimeType(ext, source.Contents) + encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents)) + expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(encoded)}} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = "data:" + mimeType + ";base64," + encoded + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderBinary: + encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents)) + expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(encoded)}} + helper := "__toBinary" + if args.options.Platform == config.PlatformNode { + helper = "__toBinaryNode" + } + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, helper) + ast.URLForCSS = "data:application/octet-stream;base64," + encoded + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderDataURL: + mimeType := guessMimeType(ext, source.Contents) + url := helpers.EncodeStringAsShortestDataURL(mimeType, source.Contents) + expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(url)}} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = url + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderFile: + uniqueKey := fmt.Sprintf("%sA%08d", args.uniqueKeyPrefix, args.sourceIndex) + uniqueKeyPath := uniqueKey + source.KeyPath.IgnoredSuffix + expr := js_ast.Expr{Data: &js_ast.EString{ + Value: helpers.StringToUTF16(uniqueKeyPath), + ContainsUniqueKey: true, + }} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = uniqueKeyPath + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + // Mark that this file is from the "file" loader + result.file.inputFile.UniqueKeyForAdditionalFile = uniqueKey + + case config.LoaderCopy: + uniqueKey := fmt.Sprintf("%sA%08d", args.uniqueKeyPrefix, args.sourceIndex) + uniqueKeyPath := uniqueKey + source.KeyPath.IgnoredSuffix + result.file.inputFile.Repr = &graph.CopyRepr{ + URLForCode: uniqueKeyPath, + } + result.ok = true + + // Mark that this file is from the "copy" loader + result.file.inputFile.UniqueKeyForAdditionalFile = uniqueKey + + default: + var message string + if source.KeyPath.Namespace == "file" && ext != "" { + message = fmt.Sprintf("No loader is configured for %q files: %s", ext, source.PrettyPath) + } else { + message = fmt.Sprintf("Do not know how to load path: %s", source.PrettyPath) + } + tracker := logger.MakeLineColumnTracker(args.importSource) + args.log.AddError(&tracker, args.importPathRange, message) + } + + // Only continue now if parsing was successful + if result.ok { + // Run the resolver on the parse thread so it's not run on the main thread. + // That way the main thread isn't blocked if the resolver takes a while. + if recordsPtr := result.file.inputFile.Repr.ImportRecords(); args.options.Mode == config.ModeBundle && !args.skipResolve && recordsPtr != nil { + // Clone the import records because they will be mutated later + records := append([]ast.ImportRecord{}, *recordsPtr...) + *recordsPtr = records + result.resolveResults = make([]*resolver.ResolveResult, len(records)) + + if len(records) > 0 { + type cacheEntry struct { + resolveResult *resolver.ResolveResult + debug resolver.DebugMeta + didLogError bool + } + + type cacheKey struct { + kind ast.ImportKind + path string + attrs logger.ImportAttributes + } + resolverCache := make(map[cacheKey]cacheEntry) + tracker := logger.MakeLineColumnTracker(&source) + + for importRecordIndex := range records { + // Don't try to resolve imports that are already resolved + record := &records[importRecordIndex] + if record.SourceIndex.IsValid() { + continue + } + + // Special-case glob pattern imports + if record.GlobPattern != nil { + prettyPath := helpers.GlobPatternToString(record.GlobPattern.Parts) + switch record.GlobPattern.Kind { + case ast.ImportRequire: + prettyPath = fmt.Sprintf("require(%q)", prettyPath) + case ast.ImportDynamic: + prettyPath = fmt.Sprintf("import(%q)", prettyPath) + } + if results, msg := args.res.ResolveGlob(absResolveDir, record.GlobPattern.Parts, record.GlobPattern.Kind, prettyPath); results != nil { + if msg != nil { + args.log.AddID(msg.ID, msg.Kind, &tracker, record.Range, msg.Data.Text) + } + if result.globResolveResults == nil { + result.globResolveResults = make(map[uint32]globResolveResult) + } + result.globResolveResults[uint32(importRecordIndex)] = globResolveResult{ + resolveResults: results, + absPath: args.fs.Join(absResolveDir, "(glob)"), + prettyPath: fmt.Sprintf("%s in %s", prettyPath, result.file.inputFile.Source.PrettyPath), + exportAlias: record.GlobPattern.ExportAlias, + } + } else { + args.log.AddError(&tracker, record.Range, fmt.Sprintf("Could not resolve %s", prettyPath)) + } + continue + } + + // Ignore records that the parser has discarded. This is used to remove + // type-only imports in TypeScript files. + if record.Flags.Has(ast.IsUnused) { + continue + } + + // Encode the import attributes + var attrs logger.ImportAttributes + if record.AssertOrWith != nil && record.AssertOrWith.Keyword == ast.WithKeyword { + data := make(map[string]string, len(record.AssertOrWith.Entries)) + for _, entry := range record.AssertOrWith.Entries { + data[helpers.UTF16ToString(entry.Key)] = helpers.UTF16ToString(entry.Value) + } + attrs = logger.EncodeImportAttributes(data) + } + + // Cache the path in case it's imported multiple times in this file + cacheKey := cacheKey{ + kind: record.Kind, + path: record.Path.Text, + attrs: attrs, + } + entry, ok := resolverCache[cacheKey] + if ok { + result.resolveResults[importRecordIndex] = entry.resolveResult + } else { + // Run the resolver and log an error if the path couldn't be resolved + resolveResult, didLogError, debug := RunOnResolvePlugins( + args.options.Plugins, + args.res, + args.log, + args.fs, + &args.caches.FSCache, + &source, + record.Range, + source.KeyPath, + record.Path.Text, + record.Kind, + absResolveDir, + pluginData, + ) + if resolveResult != nil { + resolveResult.PathPair.Primary.ImportAttributes = attrs + if resolveResult.PathPair.HasSecondary() { + resolveResult.PathPair.Secondary.ImportAttributes = attrs + } + } + entry = cacheEntry{ + resolveResult: resolveResult, + debug: debug, + didLogError: didLogError, + } + resolverCache[cacheKey] = entry + + // All "require.resolve()" imports should be external because we don't + // want to waste effort traversing into them + if record.Kind == ast.ImportRequireResolve { + if resolveResult != nil && resolveResult.PathPair.IsExternal { + // Allow path substitution as long as the result is external + result.resolveResults[importRecordIndex] = resolveResult + } else if !record.Flags.Has(ast.HandlesImportErrors) { + args.log.AddID(logger.MsgID_Bundler_RequireResolveNotExternal, logger.Warning, &tracker, record.Range, + fmt.Sprintf("%q should be marked as external for use with \"require.resolve\"", record.Path.Text)) + } + continue + } + } + + // Check whether we should log an error every time the result is nil, + // even if it's from the cache. Do this because the error may not + // have been logged for nil entries if the previous instances had + // the "HandlesImportErrors" flag. + if entry.resolveResult == nil { + // Failed imports inside a try/catch are silently turned into + // external imports instead of causing errors. This matches a common + // code pattern for conditionally importing a module with a graceful + // fallback. + if !entry.didLogError && !record.Flags.Has(ast.HandlesImportErrors) { + // Report an error + text, suggestion, notes := ResolveFailureErrorTextSuggestionNotes(args.res, record.Path.Text, record.Kind, + pluginName, args.fs, absResolveDir, args.options.Platform, source.PrettyPath, entry.debug.ModifiedImportPath) + entry.debug.LogErrorMsg(args.log, &source, record.Range, text, suggestion, notes) + + // Only report this error once per unique import path in the file + entry.didLogError = true + resolverCache[cacheKey] = entry + } else if !entry.didLogError && record.Flags.Has(ast.HandlesImportErrors) { + // Report a debug message about why there was no error + args.log.AddIDWithNotes(logger.MsgID_Bundler_IgnoredDynamicImport, logger.Debug, &tracker, record.Range, + fmt.Sprintf("Importing %q was allowed even though it could not be resolved because dynamic import failures appear to be handled here:", + record.Path.Text), []logger.MsgData{tracker.MsgData(js_lexer.RangeOfIdentifier(source, record.ErrorHandlerLoc), + "The handler for dynamic import failures is here:")}) + } + continue + } + + result.resolveResults[importRecordIndex] = entry.resolveResult + } + } + } + + // Attempt to parse the source map if present + if loader.CanHaveSourceMap() && args.options.SourceMap != config.SourceMapNone { + var sourceMapComment logger.Span + switch repr := result.file.inputFile.Repr.(type) { + case *graph.JSRepr: + sourceMapComment = repr.AST.SourceMapComment + case *graph.CSSRepr: + sourceMapComment = repr.AST.SourceMapComment + } + + if sourceMapComment.Text != "" { + tracker := logger.MakeLineColumnTracker(&source) + + if path, contents := extractSourceMapFromComment(args.log, args.fs, &args.caches.FSCache, + &source, &tracker, sourceMapComment, absResolveDir); contents != nil { + prettyPath := resolver.PrettyPath(args.fs, path) + log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, args.log.Overrides) + + sourceMap := js_parser.ParseSourceMap(log, logger.Source{ + KeyPath: path, + PrettyPath: prettyPath, + Contents: *contents, + }) + + if msgs := log.Done(); len(msgs) > 0 { + var text string + if path.Namespace == "file" { + text = fmt.Sprintf("The source map %q was referenced by the file %q here:", prettyPath, args.prettyPath) + } else { + text = fmt.Sprintf("This source map came from the file %q here:", args.prettyPath) + } + note := tracker.MsgData(sourceMapComment.Range, text) + for _, msg := range msgs { + msg.Notes = append(msg.Notes, note) + args.log.AddMsg(msg) + } + } + + // If "sourcesContent" entries aren't present, try filling them in + // using the file system. This includes both generating the entire + // "sourcesContent" array if it's absent as well as filling in + // individual null entries in the array if the array is present. + if sourceMap != nil && !args.options.ExcludeSourcesContent { + // Make sure "sourcesContent" is big enough + if len(sourceMap.SourcesContent) < len(sourceMap.Sources) { + slice := make([]sourcemap.SourceContent, len(sourceMap.Sources)) + copy(slice, sourceMap.SourcesContent) + sourceMap.SourcesContent = slice + } + + // Attempt to fill in null entries using the file system + for i, source := range sourceMap.Sources { + if sourceMap.SourcesContent[i].Value == nil { + var absPath string + if args.fs.IsAbs(source) { + absPath = source + } else if path.Namespace == "file" { + absPath = args.fs.Join(args.fs.Dir(path.Text), source) + } else { + continue + } + if contents, err, _ := args.caches.FSCache.ReadFile(args.fs, absPath); err == nil { + sourceMap.SourcesContent[i].Value = helpers.StringToUTF16(contents) + } + } + } + } + + result.file.inputFile.InputSourceMap = sourceMap + } + } + } + } + + // Note: We must always send on the "inject" channel before we send on the + // "results" channel to avoid deadlock + if args.inject != nil { + var exports []config.InjectableExport + + if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok { + aliases := make([]string, 0, len(repr.AST.NamedExports)) + for alias := range repr.AST.NamedExports { + aliases = append(aliases, alias) + } + sort.Strings(aliases) // Sort for determinism + exports = make([]config.InjectableExport, len(aliases)) + for i, alias := range aliases { + exports[i] = config.InjectableExport{ + Alias: alias, + Loc: repr.AST.NamedExports[alias].AliasLoc, + } + } + } + + // Once we send on the "inject" channel, the main thread may mutate the + // "options" object to populate the "InjectedFiles" field. So we must + // only send on the "inject" channel after we're done using the "options" + // object so we don't introduce a data race. + isCopyLoader := loader == config.LoaderCopy + if isCopyLoader && args.skipResolve { + // This is not allowed because the import path would have to be rewritten, + // but import paths are not rewritten when bundling isn't enabled. + args.log.AddError(nil, logger.Range{}, + fmt.Sprintf("Cannot inject %q with the \"copy\" loader without bundling enabled", source.PrettyPath)) + } + args.inject <- config.InjectedFile{ + Source: source, + Exports: exports, + IsCopyLoader: isCopyLoader, + } + } + + args.results <- result +} + +func ResolveFailureErrorTextSuggestionNotes( + res *resolver.Resolver, + path string, + kind ast.ImportKind, + pluginName string, + fs fs.FS, + absResolveDir string, + platform config.Platform, + originatingFilePath string, + modifiedImportPath string, +) (text string, suggestion string, notes []logger.MsgData) { + if modifiedImportPath != "" { + text = fmt.Sprintf("Could not resolve %q (originally %q)", modifiedImportPath, path) + notes = append(notes, logger.MsgData{Text: fmt.Sprintf( + "The path %q was remapped to %q using the alias feature, which then couldn't be resolved. "+ + "Keep in mind that import path aliases are resolved in the current working directory.", + path, modifiedImportPath)}) + path = modifiedImportPath + } else { + text = fmt.Sprintf("Could not resolve %q", path) + } + hint := "" + + if resolver.IsPackagePath(path) && !fs.IsAbs(path) { + hint = fmt.Sprintf("You can mark the path %q as external to exclude it from the bundle, which will remove this error and leave the unresolved path in the bundle.", path) + if kind == ast.ImportRequire { + hint += " You can also surround this \"require\" call with a try/catch block to handle this failure at run-time instead of bundle-time." + } else if kind == ast.ImportDynamic { + hint += " You can also add \".catch()\" here to handle this failure at run-time instead of bundle-time." + } + if pluginName == "" && !fs.IsAbs(path) { + if query, _ := res.ProbeResolvePackageAsRelative(absResolveDir, path, kind); query != nil { + hint = fmt.Sprintf("Use the relative path %q to reference the file %q. "+ + "Without the leading \"./\", the path %q is being interpreted as a package path instead.", + "./"+path, resolver.PrettyPath(fs, query.PathPair.Primary), path) + suggestion = string(helpers.QuoteForJSON("./"+path, false)) + } + } + } + + if platform != config.PlatformNode { + pkg := strings.TrimPrefix(path, "node:") + if resolver.BuiltInNodeModules[pkg] { + var how string + switch logger.API { + case logger.CLIAPI: + how = "--platform=node" + case logger.JSAPI: + how = "platform: 'node'" + case logger.GoAPI: + how = "Platform: api.PlatformNode" + } + hint = fmt.Sprintf("The package %q wasn't found on the file system but is built into node. "+ + "Are you trying to bundle for node? You can use %q to do that, which will remove this error.", path, how) + } + } + + if absResolveDir == "" && pluginName != "" { + where := "" + if originatingFilePath != "" { + where = fmt.Sprintf(" for the file %q", originatingFilePath) + } + hint = fmt.Sprintf("The plugin %q didn't set a resolve directory%s, "+ + "so esbuild did not search for %q on the file system.", pluginName, where, path) + } + + if hint != "" { + if modifiedImportPath != "" { + // Add a newline if there's already a paragraph of text + notes = append(notes, logger.MsgData{}) + + // Don't add a suggestion if the path was rewritten using an alias + suggestion = "" + } + notes = append(notes, logger.MsgData{Text: hint}) + } + return +} + +func isASCIIOnly(text string) bool { + for _, c := range text { + if c < 0x20 || c > 0x7E { + return false + } + } + return true +} + +func guessMimeType(extension string, contents string) string { + mimeType := helpers.MimeTypeByExtension(extension) + if mimeType == "" { + mimeType = http.DetectContentType([]byte(contents)) + } + + // Turn "text/plain; charset=utf-8" into "text/plain;charset=utf-8" + return strings.ReplaceAll(mimeType, "; ", ";") +} + +func extractSourceMapFromComment( + log logger.Log, + fs fs.FS, + fsCache *cache.FSCache, + source *logger.Source, + tracker *logger.LineColumnTracker, + comment logger.Span, + absResolveDir string, +) (logger.Path, *string) { + // Support data URLs + if parsed, ok := resolver.ParseDataURL(comment.Text); ok { + if contents, err := parsed.DecodeData(); err == nil { + return logger.Path{Text: source.PrettyPath, IgnoredSuffix: "#sourceMappingURL"}, &contents + } else { + log.AddID(logger.MsgID_SourceMap_UnsupportedSourceMapComment, logger.Warning, tracker, comment.Range, + fmt.Sprintf("Unsupported source map comment: %s", err.Error())) + return logger.Path{}, nil + } + } + + // Relative path in a file with an absolute path + if absResolveDir != "" { + absPath := fs.Join(absResolveDir, comment.Text) + path := logger.Path{Text: absPath, Namespace: "file"} + contents, err, originalError := fsCache.ReadFile(fs, absPath) + if log.Level <= logger.LevelDebug && originalError != nil { + log.AddID(logger.MsgID_None, logger.Debug, tracker, comment.Range, fmt.Sprintf("Failed to read file %q: %s", resolver.PrettyPath(fs, path), originalError.Error())) + } + if err != nil { + kind := logger.Warning + if err == syscall.ENOENT { + // Don't report a warning because this is likely unactionable + kind = logger.Debug + } + log.AddID(logger.MsgID_SourceMap_MissingSourceMap, kind, tracker, comment.Range, + fmt.Sprintf("Cannot read file %q: %s", resolver.PrettyPath(fs, path), err.Error())) + return logger.Path{}, nil + } + return path, &contents + } + + // Anything else is unsupported + return logger.Path{}, nil +} + +func sanitizeLocation(fs fs.FS, loc *logger.MsgLocation) { + if loc != nil { + if loc.Namespace == "" { + loc.Namespace = "file" + } + if loc.File != "" { + loc.File = resolver.PrettyPath(fs, logger.Path{Text: loc.File, Namespace: loc.Namespace}) + } + } +} + +func logPluginMessages( + fs fs.FS, + log logger.Log, + name string, + msgs []logger.Msg, + thrown error, + importSource *logger.Source, + importPathRange logger.Range, +) bool { + didLogError := false + tracker := logger.MakeLineColumnTracker(importSource) + + // Report errors and warnings generated by the plugin + for _, msg := range msgs { + if msg.PluginName == "" { + msg.PluginName = name + } + if msg.Kind == logger.Error { + didLogError = true + } + + // Sanitize the locations + for _, note := range msg.Notes { + sanitizeLocation(fs, note.Location) + } + if msg.Data.Location == nil { + msg.Data.Location = tracker.MsgLocationOrNil(importPathRange) + } else { + sanitizeLocation(fs, msg.Data.Location) + if importSource != nil { + if msg.Data.Location.File == "" { + msg.Data.Location.File = importSource.PrettyPath + } + msg.Notes = append(msg.Notes, tracker.MsgData(importPathRange, + fmt.Sprintf("The plugin %q was triggered by this import", name))) + } + } + + log.AddMsg(msg) + } + + // Report errors thrown by the plugin itself + if thrown != nil { + didLogError = true + text := thrown.Error() + log.AddMsg(logger.Msg{ + PluginName: name, + Kind: logger.Error, + Data: logger.MsgData{ + Text: text, + Location: tracker.MsgLocationOrNil(importPathRange), + UserDetail: thrown, + }, + }) + } + + return didLogError +} + +func RunOnResolvePlugins( + plugins []config.Plugin, + res *resolver.Resolver, + log logger.Log, + fs fs.FS, + fsCache *cache.FSCache, + importSource *logger.Source, + importPathRange logger.Range, + importer logger.Path, + path string, + kind ast.ImportKind, + absResolveDir string, + pluginData interface{}, +) (*resolver.ResolveResult, bool, resolver.DebugMeta) { + resolverArgs := config.OnResolveArgs{ + Path: path, + ResolveDir: absResolveDir, + Kind: kind, + PluginData: pluginData, + Importer: importer, + } + applyPath := logger.Path{ + Text: path, + Namespace: importer.Namespace, + } + tracker := logger.MakeLineColumnTracker(importSource) + + // Apply resolver plugins in order until one succeeds + for _, plugin := range plugins { + for _, onResolve := range plugin.OnResolve { + if !config.PluginAppliesToPath(applyPath, onResolve.Filter, onResolve.Namespace) { + continue + } + + result := onResolve.Callback(resolverArgs) + pluginName := result.PluginName + if pluginName == "" { + pluginName = plugin.Name + } + didLogError := logPluginMessages(fs, log, pluginName, result.Msgs, result.ThrownError, importSource, importPathRange) + + // Plugins can also provide additional file system paths to watch + for _, file := range result.AbsWatchFiles { + fsCache.ReadFile(fs, file) + } + for _, dir := range result.AbsWatchDirs { + if entries, err, _ := fs.ReadDirectory(dir); err == nil { + entries.SortedKeys() + } + } + + // Stop now if there was an error + if didLogError { + return nil, true, resolver.DebugMeta{} + } + + // The "file" namespace is the default for non-external paths, but not + // for external paths. External paths must explicitly specify the "file" + // namespace. + nsFromPlugin := result.Path.Namespace + if result.Path.Namespace == "" && !result.External { + result.Path.Namespace = "file" + } + + // Otherwise, continue on to the next resolver if this loader didn't succeed + if result.Path.Text == "" { + if result.External { + result.Path = logger.Path{Text: path} + } else { + continue + } + } + + // Paths in the file namespace must be absolute paths + if result.Path.Namespace == "file" && !fs.IsAbs(result.Path.Text) { + if nsFromPlugin == "file" { + log.AddError(&tracker, importPathRange, + fmt.Sprintf("Plugin %q returned a path in the \"file\" namespace that is not an absolute path: %s", pluginName, result.Path.Text)) + } else { + log.AddError(&tracker, importPathRange, + fmt.Sprintf("Plugin %q returned a non-absolute path: %s (set a namespace if this is not a file path)", pluginName, result.Path.Text)) + } + return nil, true, resolver.DebugMeta{} + } + + var sideEffectsData *resolver.SideEffectsData + if result.IsSideEffectFree { + sideEffectsData = &resolver.SideEffectsData{ + PluginName: pluginName, + } + } + + return &resolver.ResolveResult{ + PathPair: resolver.PathPair{Primary: result.Path, IsExternal: result.External}, + PluginData: result.PluginData, + PrimarySideEffectsData: sideEffectsData, + }, false, resolver.DebugMeta{} + } + } + + // Resolve relative to the resolve directory by default. All paths in the + // "file" namespace automatically have a resolve directory. Loader plugins + // can also configure a custom resolve directory for files in other namespaces. + result, debug := res.Resolve(absResolveDir, path, kind) + + // Warn when the case used for importing differs from the actual file name + if result != nil && result.DifferentCase != nil && !helpers.IsInsideNodeModules(absResolveDir) { + diffCase := *result.DifferentCase + log.AddID(logger.MsgID_Bundler_DifferentPathCase, logger.Warning, &tracker, importPathRange, fmt.Sprintf( + "Use %q instead of %q to avoid issues with case-sensitive file systems", + resolver.PrettyPath(fs, logger.Path{Text: fs.Join(diffCase.Dir, diffCase.Actual), Namespace: "file"}), + resolver.PrettyPath(fs, logger.Path{Text: fs.Join(diffCase.Dir, diffCase.Query), Namespace: "file"}), + )) + } + + return result, false, debug +} + +type loaderPluginResult struct { + pluginData interface{} + absResolveDir string + pluginName string + loader config.Loader +} + +func runOnLoadPlugins( + plugins []config.Plugin, + fs fs.FS, + fsCache *cache.FSCache, + log logger.Log, + source *logger.Source, + importSource *logger.Source, + importPathRange logger.Range, + importWith *ast.ImportAssertOrWith, + pluginData interface{}, + isWatchMode bool, +) (loaderPluginResult, bool) { + loaderArgs := config.OnLoadArgs{ + Path: source.KeyPath, + PluginData: pluginData, + } + tracker := logger.MakeLineColumnTracker(importSource) + + // Apply loader plugins in order until one succeeds + for _, plugin := range plugins { + for _, onLoad := range plugin.OnLoad { + if !config.PluginAppliesToPath(source.KeyPath, onLoad.Filter, onLoad.Namespace) { + continue + } + + result := onLoad.Callback(loaderArgs) + pluginName := result.PluginName + if pluginName == "" { + pluginName = plugin.Name + } + didLogError := logPluginMessages(fs, log, pluginName, result.Msgs, result.ThrownError, importSource, importPathRange) + + // Plugins can also provide additional file system paths to watch + for _, file := range result.AbsWatchFiles { + fsCache.ReadFile(fs, file) + } + for _, dir := range result.AbsWatchDirs { + if entries, err, _ := fs.ReadDirectory(dir); err == nil { + entries.SortedKeys() + } + } + + // Stop now if there was an error + if didLogError { + if isWatchMode && source.KeyPath.Namespace == "file" { + fsCache.ReadFile(fs, source.KeyPath.Text) // Read the file for watch mode tracking + } + return loaderPluginResult{}, false + } + + // Otherwise, continue on to the next loader if this loader didn't succeed + if result.Contents == nil { + continue + } + + source.Contents = *result.Contents + loader := result.Loader + if loader == config.LoaderNone { + loader = config.LoaderJS + } + if result.AbsResolveDir == "" && source.KeyPath.Namespace == "file" { + result.AbsResolveDir = fs.Dir(source.KeyPath.Text) + } + if isWatchMode && source.KeyPath.Namespace == "file" { + fsCache.ReadFile(fs, source.KeyPath.Text) // Read the file for watch mode tracking + } + return loaderPluginResult{ + loader: loader, + absResolveDir: result.AbsResolveDir, + pluginName: pluginName, + pluginData: result.PluginData, + }, true + } + } + + // Reject unsupported import attributes + loader := config.LoaderDefault + for _, attr := range source.KeyPath.ImportAttributes.Decode() { + if attr.Key == "type" { + if attr.Value == "json" { + loader = config.LoaderWithTypeJSON + } else { + r := importPathRange + if importWith != nil { + r = js_lexer.RangeOfImportAssertOrWith(*importSource, *ast.FindAssertOrWithEntry(importWith.Entries, attr.Key), js_lexer.ValueRange) + } + log.AddError(&tracker, r, fmt.Sprintf("Importing with a type attribute of %q is not supported", attr.Value)) + return loaderPluginResult{}, false + } + } else { + r := importPathRange + if importWith != nil { + r = js_lexer.RangeOfImportAssertOrWith(*importSource, *ast.FindAssertOrWithEntry(importWith.Entries, attr.Key), js_lexer.KeyRange) + } + log.AddError(&tracker, r, fmt.Sprintf("Importing with the %q attribute is not supported", attr.Key)) + return loaderPluginResult{}, false + } + } + + // Force disabled modules to be empty + if source.KeyPath.IsDisabled() { + return loaderPluginResult{loader: config.LoaderEmpty}, true + } + + // Read normal modules from disk + if source.KeyPath.Namespace == "file" { + if contents, err, originalError := fsCache.ReadFile(fs, source.KeyPath.Text); err == nil { + source.Contents = contents + return loaderPluginResult{ + loader: loader, + absResolveDir: fs.Dir(source.KeyPath.Text), + }, true + } else { + if log.Level <= logger.LevelDebug && originalError != nil { + log.AddID(logger.MsgID_None, logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read file %q: %s", source.KeyPath.Text, originalError.Error())) + } + if err == syscall.ENOENT { + log.AddError(&tracker, importPathRange, + fmt.Sprintf("Could not read from file: %s", source.KeyPath.Text)) + return loaderPluginResult{}, false + } else { + log.AddError(&tracker, importPathRange, + fmt.Sprintf("Cannot read file %q: %s", resolver.PrettyPath(fs, source.KeyPath), err.Error())) + return loaderPluginResult{}, false + } + } + } + + // Native support for data URLs. This is supported natively by node: + // https://nodejs.org/docs/latest/api/esm.html#esm_data_imports + if source.KeyPath.Namespace == "dataurl" { + if parsed, ok := resolver.ParseDataURL(source.KeyPath.Text); ok { + if contents, err := parsed.DecodeData(); err != nil { + log.AddError(&tracker, importPathRange, + fmt.Sprintf("Could not load data URL: %s", err.Error())) + return loaderPluginResult{loader: config.LoaderNone}, true + } else { + source.Contents = contents + if loader != config.LoaderDefault { + return loaderPluginResult{loader: loader}, true + } + if mimeType := parsed.DecodeMIMEType(); mimeType != resolver.MIMETypeUnsupported { + switch mimeType { + case resolver.MIMETypeTextCSS: + return loaderPluginResult{loader: config.LoaderCSS}, true + case resolver.MIMETypeTextJavaScript: + return loaderPluginResult{loader: config.LoaderJS}, true + case resolver.MIMETypeApplicationJSON: + return loaderPluginResult{loader: config.LoaderJSON}, true + } + } + } + } + } + + // Otherwise, fail to load the path + return loaderPluginResult{loader: config.LoaderNone}, true +} + +func loaderFromFileExtension(extensionToLoader map[string]config.Loader, base string) config.Loader { + // Pick the loader with the longest matching extension. So if there's an + // extension for ".css" and for ".module.css", we want to match the one for + // ".module.css" before the one for ".css". + if i := strings.IndexByte(base, '.'); i != -1 { + for { + if loader, ok := extensionToLoader[base[i:]]; ok { + return loader + } + base = base[i+1:] + i = strings.IndexByte(base, '.') + if i == -1 { + break + } + } + } else { + // If there's no extension, explicitly check for an extensionless loader + if loader, ok := extensionToLoader[""]; ok { + return loader + } + } + return config.LoaderNone +} + +// Identify the path by its lowercase absolute path name with Windows-specific +// slashes substituted for standard slashes. This should hopefully avoid path +// issues on Windows where multiple different paths can refer to the same +// underlying file. +func canonicalFileSystemPathForWindows(absPath string) string { + return strings.ReplaceAll(strings.ToLower(absPath), "\\", "/") +} + +func HashForFileName(hashBytes []byte) string { + return base32.StdEncoding.EncodeToString(hashBytes)[:8] +} + +type scanner struct { + log logger.Log + fs fs.FS + res *resolver.Resolver + caches *cache.CacheSet + timer *helpers.Timer + uniqueKeyPrefix string + + // These are not guarded by a mutex because it's only ever modified by a single + // thread. Note that not all results in the "results" array are necessarily + // valid. Make sure to check the "ok" flag before using them. + results []parseResult + visited map[logger.Path]visitedFile + resultChannel chan parseResult + + options config.Options + + // Also not guarded by a mutex for the same reason + remaining int +} + +type visitedFile struct { + sourceIndex uint32 +} + +type EntryPoint struct { + InputPath string + OutputPath string + InputPathInFileNamespace bool +} + +func generateUniqueKeyPrefix() (string, error) { + var data [12]byte + rand.Seed(time.Now().UnixNano()) + if _, err := rand.Read(data[:]); err != nil { + return "", err + } + + // This is 16 bytes and shouldn't generate escape characters when put into strings + return base64.URLEncoding.EncodeToString(data[:]), nil +} + +// This creates a bundle by scanning over the whole module graph starting from +// the entry points until all modules are reached. Each module has some number +// of import paths which are resolved to module identifiers (i.e. "onResolve" +// in the plugin API). Each unique module identifier is loaded once (i.e. +// "onLoad" in the plugin API). +func ScanBundle( + call config.APICall, + log logger.Log, + fs fs.FS, + caches *cache.CacheSet, + entryPoints []EntryPoint, + options config.Options, + timer *helpers.Timer, +) Bundle { + timer.Begin("Scan phase") + defer timer.End("Scan phase") + + applyOptionDefaults(&options) + + // Run "onStart" plugins in parallel. IMPORTANT: We always need to run all + // "onStart" callbacks even when the build is cancelled, because plugins may + // rely on invariants that are started in "onStart" and ended in "onEnd". + // This works because "onEnd" callbacks are always run as well. + timer.Begin("On-start callbacks") + onStartWaitGroup := sync.WaitGroup{} + for _, plugin := range options.Plugins { + for _, onStart := range plugin.OnStart { + onStartWaitGroup.Add(1) + go func(plugin config.Plugin, onStart config.OnStart) { + result := onStart.Callback() + logPluginMessages(fs, log, plugin.Name, result.Msgs, result.ThrownError, nil, logger.Range{}) + onStartWaitGroup.Done() + }(plugin, onStart) + } + } + + // Each bundling operation gets a separate unique key + uniqueKeyPrefix, err := generateUniqueKeyPrefix() + if err != nil { + log.AddError(nil, logger.Range{}, fmt.Sprintf("Failed to read from randomness source: %s", err.Error())) + } + + // This may mutate "options" by the "tsconfig.json" override settings + res := resolver.NewResolver(call, fs, log, caches, &options) + + s := scanner{ + log: log, + fs: fs, + res: res, + caches: caches, + options: options, + timer: timer, + results: make([]parseResult, 0, caches.SourceIndexCache.LenHint()), + visited: make(map[logger.Path]visitedFile), + resultChannel: make(chan parseResult), + uniqueKeyPrefix: uniqueKeyPrefix, + } + + // Always start by parsing the runtime file + s.results = append(s.results, parseResult{}) + s.remaining++ + go func() { + source, ast, ok := globalRuntimeCache.parseRuntime(&options) + s.resultChannel <- parseResult{ + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Repr: &graph.JSRepr{ + AST: ast, + }, + OmitFromSourceMapsAndMetafile: true, + }, + }, + ok: ok, + } + }() + + // Wait for all "onStart" plugins here before continuing. People sometimes run + // setup code in "onStart" that "onLoad" expects to be able to use without + // "onLoad" needing to block on the completion of their "onStart" callback. + // + // We want to enable this: + // + // let plugin = { + // name: 'example', + // setup(build) { + // let started = false + // build.onStart(() => started = true) + // build.onLoad({ filter: /.*/ }, () => { + // assert(started === true) + // }) + // }, + // } + // + // without people having to write something like this: + // + // let plugin = { + // name: 'example', + // setup(build) { + // let started = {} + // started.promise = new Promise(resolve => { + // started.resolve = resolve + // }) + // build.onStart(() => { + // started.resolve(true) + // }) + // build.onLoad({ filter: /.*/ }, async () => { + // assert(await started.promise === true) + // }) + // }, + // } + // + onStartWaitGroup.Wait() + timer.End("On-start callbacks") + + // We can check the cancel flag now that all "onStart" callbacks are done + if options.CancelFlag.DidCancel() { + return Bundle{options: options} + } + + s.preprocessInjectedFiles() + + if options.CancelFlag.DidCancel() { + return Bundle{options: options} + } + + entryPointMeta := s.addEntryPoints(entryPoints) + + if options.CancelFlag.DidCancel() { + return Bundle{options: options} + } + + s.scanAllDependencies() + + if options.CancelFlag.DidCancel() { + return Bundle{options: options} + } + + files := s.processScannedFiles(entryPointMeta) + + if options.CancelFlag.DidCancel() { + return Bundle{options: options} + } + + return Bundle{ + fs: fs, + res: s.res, + files: files, + entryPoints: entryPointMeta, + uniqueKeyPrefix: uniqueKeyPrefix, + options: s.options, + } +} + +type inputKind uint8 + +const ( + inputKindNormal inputKind = iota + inputKindEntryPoint + inputKindStdin +) + +// This returns the source index of the resulting file +func (s *scanner) maybeParseFile( + resolveResult resolver.ResolveResult, + prettyPath string, + importSource *logger.Source, + importPathRange logger.Range, + importWith *ast.ImportAssertOrWith, + kind inputKind, + inject chan config.InjectedFile, +) uint32 { + path := resolveResult.PathPair.Primary + visitedKey := path + if visitedKey.Namespace == "file" { + visitedKey.Text = canonicalFileSystemPathForWindows(visitedKey.Text) + } + + // Only parse a given file path once + visited, ok := s.visited[visitedKey] + if ok { + if inject != nil { + inject <- config.InjectedFile{} + } + return visited.sourceIndex + } + + visited = visitedFile{ + sourceIndex: s.allocateSourceIndex(visitedKey, cache.SourceIndexNormal), + } + s.visited[visitedKey] = visited + s.remaining++ + optionsClone := s.options + if kind != inputKindStdin { + optionsClone.Stdin = nil + } + + // Allow certain properties to be overridden by "tsconfig.json" + resolveResult.TSConfigJSX.ApplyTo(&optionsClone.JSX) + if resolveResult.TSConfig != nil { + optionsClone.TS.Config = *resolveResult.TSConfig + } + if resolveResult.TSAlwaysStrict != nil { + optionsClone.TSAlwaysStrict = resolveResult.TSAlwaysStrict + } + + // Set the module type preference using node's module type rules + if strings.HasSuffix(path.Text, ".mjs") { + optionsClone.ModuleTypeData.Type = js_ast.ModuleESM_MJS + } else if strings.HasSuffix(path.Text, ".mts") { + optionsClone.ModuleTypeData.Type = js_ast.ModuleESM_MTS + } else if strings.HasSuffix(path.Text, ".cjs") { + optionsClone.ModuleTypeData.Type = js_ast.ModuleCommonJS_CJS + } else if strings.HasSuffix(path.Text, ".cts") { + optionsClone.ModuleTypeData.Type = js_ast.ModuleCommonJS_CTS + } else if strings.HasSuffix(path.Text, ".js") || strings.HasSuffix(path.Text, ".jsx") || + strings.HasSuffix(path.Text, ".ts") || strings.HasSuffix(path.Text, ".tsx") { + optionsClone.ModuleTypeData = resolveResult.ModuleTypeData + } else { + // The "type" setting in "package.json" only applies to ".js" files + optionsClone.ModuleTypeData.Type = js_ast.ModuleUnknown + } + + // Enable bundling for injected files so we always do tree shaking. We + // never want to include unnecessary code from injected files since they + // are essentially bundled. However, if we do this we should skip the + // resolving step when we're not bundling. It'd be strange to get + // resolution errors when the top-level bundling controls are disabled. + skipResolve := false + if inject != nil && optionsClone.Mode != config.ModeBundle { + optionsClone.Mode = config.ModeBundle + skipResolve = true + } + + // Special-case pretty-printed paths for data URLs + if path.Namespace == "dataurl" { + if _, ok := resolver.ParseDataURL(path.Text); ok { + prettyPath = path.Text + if len(prettyPath) > 65 { + prettyPath = prettyPath[:65] + } + prettyPath = strings.ReplaceAll(prettyPath, "\n", "\\n") + if len(prettyPath) > 64 { + prettyPath = prettyPath[:64] + "..." + } + prettyPath = fmt.Sprintf("<%s>", prettyPath) + } + } + + var sideEffects graph.SideEffects + if resolveResult.PrimarySideEffectsData != nil { + sideEffects.Kind = graph.NoSideEffects_PackageJSON + sideEffects.Data = resolveResult.PrimarySideEffectsData + } + + go parseFile(parseArgs{ + fs: s.fs, + log: s.log, + res: s.res, + caches: s.caches, + keyPath: path, + prettyPath: prettyPath, + sourceIndex: visited.sourceIndex, + importSource: importSource, + sideEffects: sideEffects, + importPathRange: importPathRange, + importWith: importWith, + pluginData: resolveResult.PluginData, + options: optionsClone, + results: s.resultChannel, + inject: inject, + skipResolve: skipResolve, + uniqueKeyPrefix: s.uniqueKeyPrefix, + }) + + return visited.sourceIndex +} + +func (s *scanner) allocateSourceIndex(path logger.Path, kind cache.SourceIndexKind) uint32 { + // Allocate a source index using the shared source index cache so that + // subsequent builds reuse the same source index and therefore use the + // cached parse results for increased speed. + sourceIndex := s.caches.SourceIndexCache.Get(path, kind) + + // Grow the results array to fit this source index + if newLen := int(sourceIndex) + 1; len(s.results) < newLen { + // Reallocate to a bigger array + if cap(s.results) < newLen { + s.results = append(make([]parseResult, 0, 2*newLen), s.results...) + } + + // Grow in place + s.results = s.results[:newLen] + } + + return sourceIndex +} + +func (s *scanner) allocateGlobSourceIndex(parentSourceIndex uint32, globIndex uint32) uint32 { + // Allocate a source index using the shared source index cache so that + // subsequent builds reuse the same source index and therefore use the + // cached parse results for increased speed. + sourceIndex := s.caches.SourceIndexCache.GetGlob(parentSourceIndex, globIndex) + + // Grow the results array to fit this source index + if newLen := int(sourceIndex) + 1; len(s.results) < newLen { + // Reallocate to a bigger array + if cap(s.results) < newLen { + s.results = append(make([]parseResult, 0, 2*newLen), s.results...) + } + + // Grow in place + s.results = s.results[:newLen] + } + + return sourceIndex +} + +func (s *scanner) preprocessInjectedFiles() { + s.timer.Begin("Preprocess injected files") + defer s.timer.End("Preprocess injected files") + + injectedFiles := make([]config.InjectedFile, 0, len(s.options.InjectedDefines)+len(s.options.InjectPaths)) + + // These are virtual paths that are generated for compound "--define" values. + // They are special-cased and are not available for plugins to intercept. + for _, define := range s.options.InjectedDefines { + // These should be unique by construction so no need to check for collisions + visitedKey := logger.Path{Text: fmt.Sprintf("", define.Name)} + sourceIndex := s.allocateSourceIndex(visitedKey, cache.SourceIndexNormal) + s.visited[visitedKey] = visitedFile{sourceIndex: sourceIndex} + source := logger.Source{ + Index: sourceIndex, + KeyPath: visitedKey, + PrettyPath: resolver.PrettyPath(s.fs, visitedKey), + IdentifierName: js_ast.EnsureValidIdentifier(visitedKey.Text), + } + + // The first "len(InjectedDefine)" injected files intentionally line up + // with the injected defines by index. The index will be used to import + // references to them in the parser. + injectedFiles = append(injectedFiles, config.InjectedFile{ + Source: source, + DefineName: define.Name, + }) + + // Generate the file inline here since it has already been parsed + expr := js_ast.Expr{Data: define.Data} + ast := js_parser.LazyExportAST(s.log, source, js_parser.OptionsFromConfig(&s.options), expr, "") + result := parseResult{ + ok: true, + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Repr: &graph.JSRepr{AST: ast}, + Loader: config.LoaderJSON, + SideEffects: graph.SideEffects{ + Kind: graph.NoSideEffects_PureData, + }, + }, + }, + } + + // Append to the channel on a goroutine in case it blocks due to capacity + s.remaining++ + go func() { s.resultChannel <- result }() + } + + // Add user-specified injected files. Run resolver plugins on these files + // so plugins can alter where they resolve to. These are run in parallel in + // case any of these plugins block. + injectResolveResults := make([]*resolver.ResolveResult, len(s.options.InjectPaths)) + injectAbsResolveDir := s.fs.Cwd() + injectResolveWaitGroup := sync.WaitGroup{} + injectResolveWaitGroup.Add(len(s.options.InjectPaths)) + for i, importPath := range s.options.InjectPaths { + go func(i int, importPath string) { + var importer logger.Path + + // Add a leading "./" if it's missing, similar to entry points + absPath := importPath + if !s.fs.IsAbs(absPath) { + absPath = s.fs.Join(injectAbsResolveDir, absPath) + } + dir := s.fs.Dir(absPath) + base := s.fs.Base(absPath) + if entries, err, originalError := s.fs.ReadDirectory(dir); err == nil { + if entry, _ := entries.Get(base); entry != nil && entry.Kind(s.fs) == fs.FileEntry { + importer.Namespace = "file" + if !s.fs.IsAbs(importPath) && resolver.IsPackagePath(importPath) { + importPath = "./" + importPath + } + } + } else if s.log.Level <= logger.LevelDebug && originalError != nil { + s.log.AddID(logger.MsgID_None, logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read directory %q: %s", absPath, originalError.Error())) + } + + // Run the resolver and log an error if the path couldn't be resolved + resolveResult, didLogError, debug := RunOnResolvePlugins( + s.options.Plugins, + s.res, + s.log, + s.fs, + &s.caches.FSCache, + nil, + logger.Range{}, + importer, + importPath, + ast.ImportEntryPoint, + injectAbsResolveDir, + nil, + ) + if resolveResult != nil { + if resolveResult.PathPair.IsExternal { + s.log.AddError(nil, logger.Range{}, fmt.Sprintf("The injected path %q cannot be marked as external", importPath)) + } else { + injectResolveResults[i] = resolveResult + } + } else if !didLogError { + debug.LogErrorMsg(s.log, nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", importPath), "", nil) + } + injectResolveWaitGroup.Done() + }(i, importPath) + } + injectResolveWaitGroup.Wait() + + if s.options.CancelFlag.DidCancel() { + return + } + + // Parse all entry points that were resolved successfully + results := make([]config.InjectedFile, len(s.options.InjectPaths)) + j := 0 + var injectWaitGroup sync.WaitGroup + for _, resolveResult := range injectResolveResults { + if resolveResult != nil { + channel := make(chan config.InjectedFile, 1) + s.maybeParseFile(*resolveResult, resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary), nil, logger.Range{}, nil, inputKindNormal, channel) + injectWaitGroup.Add(1) + + // Wait for the results in parallel. The results slice is large enough so + // it is not reallocated during the computations. + go func(i int) { + results[i] = <-channel + injectWaitGroup.Done() + }(j) + j++ + } + } + injectWaitGroup.Wait() + injectedFiles = append(injectedFiles, results[:j]...) + + // It's safe to mutate the options object to add the injected files here + // because there aren't any concurrent "parseFile" goroutines at this point. + // The only ones that were created by this point are the ones we created + // above, and we've already waited for all of them to finish using the + // "options" object. + s.options.InjectedFiles = injectedFiles +} + +func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint { + s.timer.Begin("Add entry points") + defer s.timer.End("Add entry points") + + // Reserve a slot for each entry point + entryMetas := make([]graph.EntryPoint, 0, len(entryPoints)+1) + + // Treat stdin as an extra entry point + if stdin := s.options.Stdin; stdin != nil { + stdinPath := logger.Path{Text: ""} + if stdin.SourceFile != "" { + if stdin.AbsResolveDir == "" { + stdinPath = logger.Path{Text: stdin.SourceFile} + } else if s.fs.IsAbs(stdin.SourceFile) { + stdinPath = logger.Path{Text: stdin.SourceFile, Namespace: "file"} + } else { + stdinPath = logger.Path{Text: s.fs.Join(stdin.AbsResolveDir, stdin.SourceFile), Namespace: "file"} + } + } + resolveResult := resolver.ResolveResult{PathPair: resolver.PathPair{Primary: stdinPath}} + sourceIndex := s.maybeParseFile(resolveResult, resolver.PrettyPath(s.fs, stdinPath), nil, logger.Range{}, nil, inputKindStdin, nil) + entryMetas = append(entryMetas, graph.EntryPoint{ + OutputPath: "stdin", + SourceIndex: sourceIndex, + }) + } + + if s.options.CancelFlag.DidCancel() { + return nil + } + + // Check each entry point ahead of time to see if it's a real file + entryPointAbsResolveDir := s.fs.Cwd() + for i := range entryPoints { + entryPoint := &entryPoints[i] + absPath := entryPoint.InputPath + if strings.ContainsRune(absPath, '*') { + continue // Ignore glob patterns + } + if !s.fs.IsAbs(absPath) { + absPath = s.fs.Join(entryPointAbsResolveDir, absPath) + } + dir := s.fs.Dir(absPath) + base := s.fs.Base(absPath) + if entries, err, originalError := s.fs.ReadDirectory(dir); err == nil { + if entry, _ := entries.Get(base); entry != nil && entry.Kind(s.fs) == fs.FileEntry { + entryPoint.InputPathInFileNamespace = true + + // Entry point paths without a leading "./" are interpreted as package + // paths. This happens because they go through general path resolution + // like all other import paths so that plugins can run on them. Requiring + // a leading "./" for a relative path simplifies writing plugins because + // entry points aren't a special case. + // + // However, requiring a leading "./" also breaks backward compatibility + // and makes working with the CLI more difficult. So attempt to insert + // "./" automatically when needed. We don't want to unconditionally insert + // a leading "./" because the path may not be a file system path. For + // example, it may be a URL. So only insert a leading "./" when the path + // is an exact match for an existing file. + if !s.fs.IsAbs(entryPoint.InputPath) && resolver.IsPackagePath(entryPoint.InputPath) { + entryPoint.InputPath = "./" + entryPoint.InputPath + } + } + } else if s.log.Level <= logger.LevelDebug && originalError != nil { + s.log.AddID(logger.MsgID_None, logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read directory %q: %s", absPath, originalError.Error())) + } + } + + if s.options.CancelFlag.DidCancel() { + return nil + } + + // Add any remaining entry points. Run resolver plugins on these entry points + // so plugins can alter where they resolve to. These are run in parallel in + // case any of these plugins block. + type entryPointInfo struct { + results []resolver.ResolveResult + isGlob bool + } + entryPointInfos := make([]entryPointInfo, len(entryPoints)) + entryPointWaitGroup := sync.WaitGroup{} + entryPointWaitGroup.Add(len(entryPoints)) + for i, entryPoint := range entryPoints { + go func(i int, entryPoint EntryPoint) { + var importer logger.Path + if entryPoint.InputPathInFileNamespace { + importer.Namespace = "file" + } + + // Special-case glob patterns here + if strings.ContainsRune(entryPoint.InputPath, '*') { + if pattern := helpers.ParseGlobPattern(entryPoint.InputPath); len(pattern) > 1 { + prettyPattern := fmt.Sprintf("%q", entryPoint.InputPath) + if results, msg := s.res.ResolveGlob(entryPointAbsResolveDir, pattern, ast.ImportEntryPoint, prettyPattern); results != nil { + keys := make([]string, 0, len(results)) + for key := range results { + keys = append(keys, key) + } + sort.Strings(keys) + info := entryPointInfo{isGlob: true} + for _, key := range keys { + info.results = append(info.results, results[key]) + } + entryPointInfos[i] = info + if msg != nil { + s.log.AddID(msg.ID, msg.Kind, nil, logger.Range{}, msg.Data.Text) + } + } else { + s.log.AddError(nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", entryPoint.InputPath)) + } + entryPointWaitGroup.Done() + return + } + } + + // Run the resolver and log an error if the path couldn't be resolved + resolveResult, didLogError, debug := RunOnResolvePlugins( + s.options.Plugins, + s.res, + s.log, + s.fs, + &s.caches.FSCache, + nil, + logger.Range{}, + importer, + entryPoint.InputPath, + ast.ImportEntryPoint, + entryPointAbsResolveDir, + nil, + ) + if resolveResult != nil { + if resolveResult.PathPair.IsExternal { + s.log.AddError(nil, logger.Range{}, fmt.Sprintf("The entry point %q cannot be marked as external", entryPoint.InputPath)) + } else { + entryPointInfos[i] = entryPointInfo{results: []resolver.ResolveResult{*resolveResult}} + } + } else if !didLogError { + var notes []logger.MsgData + if !s.fs.IsAbs(entryPoint.InputPath) { + if query, _ := s.res.ProbeResolvePackageAsRelative(entryPointAbsResolveDir, entryPoint.InputPath, ast.ImportEntryPoint); query != nil { + notes = append(notes, logger.MsgData{ + Text: fmt.Sprintf("Use the relative path %q to reference the file %q. "+ + "Without the leading \"./\", the path %q is being interpreted as a package path instead.", + "./"+entryPoint.InputPath, resolver.PrettyPath(s.fs, query.PathPair.Primary), entryPoint.InputPath), + }) + } + } + debug.LogErrorMsg(s.log, nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", entryPoint.InputPath), "", notes) + } + entryPointWaitGroup.Done() + }(i, entryPoint) + } + entryPointWaitGroup.Wait() + + if s.options.CancelFlag.DidCancel() { + return nil + } + + // Parse all entry points that were resolved successfully + for i, info := range entryPointInfos { + if info.results == nil { + continue + } + + for _, resolveResult := range info.results { + prettyPath := resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary) + sourceIndex := s.maybeParseFile(resolveResult, prettyPath, nil, logger.Range{}, nil, inputKindEntryPoint, nil) + outputPath := entryPoints[i].OutputPath + outputPathWasAutoGenerated := false + + // If the output path is missing, automatically generate one from the input path + if outputPath == "" { + if info.isGlob { + outputPath = prettyPath + } else { + outputPath = entryPoints[i].InputPath + } + windowsVolumeLabel := "" + + // The ":" character is invalid in file paths on Windows except when + // it's used as a volume separator. Special-case that here so volume + // labels don't break on Windows. + if s.fs.IsAbs(outputPath) && len(outputPath) >= 3 && outputPath[1] == ':' { + if c := outputPath[0]; (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') { + if c := outputPath[2]; c == '/' || c == '\\' { + windowsVolumeLabel = outputPath[:3] + outputPath = outputPath[3:] + } + } + } + + // For cross-platform robustness, do not allow characters in the output + // path that are invalid on Windows. This is especially relevant when + // the input path is something other than a file path, such as a URL. + outputPath = sanitizeFilePathForVirtualModulePath(outputPath) + if windowsVolumeLabel != "" { + outputPath = windowsVolumeLabel + outputPath + } + outputPathWasAutoGenerated = true + } + + entryMetas = append(entryMetas, graph.EntryPoint{ + OutputPath: outputPath, + SourceIndex: sourceIndex, + OutputPathWasAutoGenerated: outputPathWasAutoGenerated, + }) + } + } + + // Turn all automatically-generated output paths into absolute paths + for i := range entryMetas { + entryPoint := &entryMetas[i] + if entryPoint.OutputPathWasAutoGenerated && !s.fs.IsAbs(entryPoint.OutputPath) { + entryPoint.OutputPath = s.fs.Join(entryPointAbsResolveDir, entryPoint.OutputPath) + } + } + + // Automatically compute "outbase" if it wasn't provided + if s.options.AbsOutputBase == "" { + s.options.AbsOutputBase = lowestCommonAncestorDirectory(s.fs, entryMetas) + if s.options.AbsOutputBase == "" { + s.options.AbsOutputBase = entryPointAbsResolveDir + } + } + + // Turn all output paths back into relative paths, but this time relative to + // the "outbase" value we computed above + for i := range entryMetas { + entryPoint := &entryMetas[i] + if s.fs.IsAbs(entryPoint.OutputPath) { + if !entryPoint.OutputPathWasAutoGenerated { + // If an explicit absolute output path was specified, use the path + // relative to the "outdir" directory + if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, entryPoint.OutputPath); ok { + entryPoint.OutputPath = relPath + } + } else { + // Otherwise if the absolute output path was derived from the input + // path, use the path relative to the "outbase" directory + if relPath, ok := s.fs.Rel(s.options.AbsOutputBase, entryPoint.OutputPath); ok { + entryPoint.OutputPath = relPath + } + + // Strip the file extension from the output path if there is one so the + // "out extension" setting is used instead + if last := strings.LastIndexAny(entryPoint.OutputPath, "/.\\"); last != -1 && entryPoint.OutputPath[last] == '.' { + entryPoint.OutputPath = entryPoint.OutputPath[:last] + } + } + } + } + + return entryMetas +} + +func lowestCommonAncestorDirectory(fs fs.FS, entryPoints []graph.EntryPoint) string { + // Ignore any explicitly-specified output paths + absPaths := make([]string, 0, len(entryPoints)) + for _, entryPoint := range entryPoints { + if entryPoint.OutputPathWasAutoGenerated { + absPaths = append(absPaths, entryPoint.OutputPath) + } + } + + if len(absPaths) == 0 { + return "" + } + + lowestAbsDir := fs.Dir(absPaths[0]) + + for _, absPath := range absPaths[1:] { + absDir := fs.Dir(absPath) + lastSlash := 0 + a := 0 + b := 0 + + for { + runeA, widthA := utf8.DecodeRuneInString(absDir[a:]) + runeB, widthB := utf8.DecodeRuneInString(lowestAbsDir[b:]) + boundaryA := widthA == 0 || runeA == '/' || runeA == '\\' + boundaryB := widthB == 0 || runeB == '/' || runeB == '\\' + + if boundaryA && boundaryB { + if widthA == 0 || widthB == 0 { + // Truncate to the smaller path if one path is a prefix of the other + lowestAbsDir = absDir[:a] + break + } else { + // Track the longest common directory so far + lastSlash = a + } + } else if boundaryA != boundaryB || unicode.ToLower(runeA) != unicode.ToLower(runeB) { + // If we're at the top-level directory, then keep the slash + if lastSlash < len(absDir) && !strings.ContainsAny(absDir[:lastSlash], "\\/") { + lastSlash++ + } + + // If both paths are different at this point, stop and set the lowest so + // far to the common parent directory. Compare using a case-insensitive + // comparison to handle paths on Windows. + lowestAbsDir = absDir[:lastSlash] + break + } + + a += widthA + b += widthB + } + } + + return lowestAbsDir +} + +func (s *scanner) scanAllDependencies() { + s.timer.Begin("Scan all dependencies") + defer s.timer.End("Scan all dependencies") + + // Continue scanning until all dependencies have been discovered + for s.remaining > 0 { + if s.options.CancelFlag.DidCancel() { + return + } + + result := <-s.resultChannel + s.remaining-- + if !result.ok { + continue + } + + // Don't try to resolve paths if we're not bundling + if recordsPtr := result.file.inputFile.Repr.ImportRecords(); s.options.Mode == config.ModeBundle && recordsPtr != nil { + records := *recordsPtr + for importRecordIndex := range records { + record := &records[importRecordIndex] + + // This is used for error messages + var with *ast.ImportAssertOrWith + if record.AssertOrWith != nil && record.AssertOrWith.Keyword == ast.WithKeyword { + with = record.AssertOrWith + } + + // Skip this import record if the previous resolver call failed + resolveResult := result.resolveResults[importRecordIndex] + if resolveResult == nil { + if globResults := result.globResolveResults[uint32(importRecordIndex)]; globResults.resolveResults != nil { + sourceIndex := s.allocateGlobSourceIndex(result.file.inputFile.Source.Index, uint32(importRecordIndex)) + record.SourceIndex = ast.MakeIndex32(sourceIndex) + s.results[sourceIndex] = s.generateResultForGlobResolve(sourceIndex, globResults.absPath, + &result.file.inputFile.Source, record.Range, with, record.GlobPattern.Kind, globResults, record.AssertOrWith) + } + continue + } + + path := resolveResult.PathPair.Primary + if !resolveResult.PathPair.IsExternal { + // Handle a path within the bundle + sourceIndex := s.maybeParseFile(*resolveResult, resolver.PrettyPath(s.fs, path), + &result.file.inputFile.Source, record.Range, with, inputKindNormal, nil) + record.SourceIndex = ast.MakeIndex32(sourceIndex) + } else { + // Allow this import statement to be removed if something marked it as "sideEffects: false" + if resolveResult.PrimarySideEffectsData != nil { + record.Flags |= ast.IsExternalWithoutSideEffects + } + + // If the path to the external module is relative to the source + // file, rewrite the path to be relative to the working directory + if path.Namespace == "file" { + if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, path.Text); ok { + // Prevent issues with path separators being different on Windows + relPath = strings.ReplaceAll(relPath, "\\", "/") + if resolver.IsPackagePath(relPath) { + relPath = "./" + relPath + } + record.Path.Text = relPath + } else { + record.Path = path + } + } else { + record.Path = path + } + } + } + } + + s.results[result.file.inputFile.Source.Index] = result + } +} + +func (s *scanner) generateResultForGlobResolve( + sourceIndex uint32, + fakeSourcePath string, + importSource *logger.Source, + importRange logger.Range, + importWith *ast.ImportAssertOrWith, + kind ast.ImportKind, + result globResolveResult, + assertions *ast.ImportAssertOrWith, +) parseResult { + keys := make([]string, 0, len(result.resolveResults)) + for key := range result.resolveResults { + keys = append(keys, key) + } + sort.Strings(keys) + + object := js_ast.EObject{Properties: make([]js_ast.Property, 0, len(result.resolveResults))} + importRecords := make([]ast.ImportRecord, 0, len(result.resolveResults)) + resolveResults := make([]*resolver.ResolveResult, 0, len(result.resolveResults)) + + for _, key := range keys { + resolveResult := result.resolveResults[key] + var value js_ast.Expr + + importRecordIndex := uint32(len(importRecords)) + var sourceIndex ast.Index32 + + if !resolveResult.PathPair.IsExternal { + sourceIndex = ast.MakeIndex32(s.maybeParseFile( + resolveResult, + resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary), + importSource, + importRange, + importWith, + inputKindNormal, + nil, + )) + } + + path := resolveResult.PathPair.Primary + + // If the path to the external module is relative to the source + // file, rewrite the path to be relative to the working directory + if path.Namespace == "file" { + if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, path.Text); ok { + // Prevent issues with path separators being different on Windows + relPath = strings.ReplaceAll(relPath, "\\", "/") + if resolver.IsPackagePath(relPath) { + relPath = "./" + relPath + } + path.Text = relPath + } + } + + resolveResults = append(resolveResults, &resolveResult) + importRecords = append(importRecords, ast.ImportRecord{ + Path: path, + SourceIndex: sourceIndex, + AssertOrWith: assertions, + Kind: kind, + }) + + switch kind { + case ast.ImportDynamic: + value.Data = &js_ast.EImportString{ImportRecordIndex: importRecordIndex} + case ast.ImportRequire: + value.Data = &js_ast.ERequireString{ImportRecordIndex: importRecordIndex} + default: + panic("Internal error") + } + + object.Properties = append(object.Properties, js_ast.Property{ + Key: js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(key)}}, + ValueOrNil: js_ast.Expr{Data: &js_ast.EArrow{ + Body: js_ast.FnBody{Block: js_ast.SBlock{Stmts: []js_ast.Stmt{{Data: &js_ast.SReturn{ValueOrNil: value}}}}}, + PreferExpr: true, + }}, + }) + } + + source := logger.Source{ + KeyPath: logger.Path{Text: fakeSourcePath, Namespace: "file"}, + PrettyPath: result.prettyPath, + Index: sourceIndex, + } + ast := js_parser.GlobResolveAST(s.log, source, importRecords, &object, result.exportAlias) + + // Fill out "nil" for any additional imports (i.e. from the runtime) + for len(resolveResults) < len(ast.ImportRecords) { + resolveResults = append(resolveResults, nil) + } + + return parseResult{ + resolveResults: resolveResults, + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Repr: &graph.JSRepr{ + AST: ast, + }, + OmitFromSourceMapsAndMetafile: true, + }, + }, + ok: true, + } +} + +func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scannerFile { + s.timer.Begin("Process scanned files") + defer s.timer.End("Process scanned files") + + // Build a set of entry point source indices for quick lookup + entryPointSourceIndexToMetaIndex := make(map[uint32]uint32, len(entryPointMeta)) + for i, meta := range entryPointMeta { + entryPointSourceIndexToMetaIndex[meta.SourceIndex] = uint32(i) + } + + // Check for pretty-printed path collisions + importAttributeNameCollisions := make(map[string][]uint32) + for sourceIndex := range s.results { + if result := &s.results[sourceIndex]; result.ok { + prettyPath := result.file.inputFile.Source.PrettyPath + importAttributeNameCollisions[prettyPath] = append(importAttributeNameCollisions[prettyPath], uint32(sourceIndex)) + } + } + + // Import attributes can result in the same file being imported multiple + // times in different ways. If that happens, append the import attributes + // to the pretty-printed file names to disambiguate them. This renaming + // must happen before we construct the metafile JSON chunks below. + for _, sourceIndices := range importAttributeNameCollisions { + if len(sourceIndices) == 1 { + continue + } + + for _, sourceIndex := range sourceIndices { + source := &s.results[sourceIndex].file.inputFile.Source + attrs := source.KeyPath.ImportAttributes.Decode() + if len(attrs) == 0 { + continue + } + + var sb strings.Builder + sb.WriteString(" with {") + for i, attr := range attrs { + if i > 0 { + sb.WriteByte(',') + } + sb.WriteByte(' ') + if js_ast.IsIdentifier(attr.Key) { + sb.WriteString(attr.Key) + } else { + sb.Write(helpers.QuoteSingle(attr.Key, false)) + } + sb.WriteString(": ") + sb.Write(helpers.QuoteSingle(attr.Value, false)) + } + sb.WriteString(" }") + source.PrettyPath += sb.String() + } + } + + // Now that all files have been scanned, process the final file import records + for sourceIndex, result := range s.results { + if !result.ok { + continue + } + + sb := strings.Builder{} + isFirstImport := true + + // Begin the metadata chunk + if s.options.NeedsMetafile { + sb.Write(helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly)) + sb.WriteString(fmt.Sprintf(": {\n \"bytes\": %d,\n \"imports\": [", len(result.file.inputFile.Source.Contents))) + } + + // Don't try to resolve paths if we're not bundling + if recordsPtr := result.file.inputFile.Repr.ImportRecords(); s.options.Mode == config.ModeBundle && recordsPtr != nil { + records := *recordsPtr + tracker := logger.MakeLineColumnTracker(&result.file.inputFile.Source) + + for importRecordIndex := range records { + record := &records[importRecordIndex] + + // Save the import attributes to the metafile + var metafileWith string + if s.options.NeedsMetafile { + if with := record.AssertOrWith; with != nil && with.Keyword == ast.WithKeyword && len(with.Entries) > 0 { + data := strings.Builder{} + data.WriteString(",\n \"with\": {") + for i, entry := range with.Entries { + if i > 0 { + data.WriteByte(',') + } + data.WriteString("\n ") + data.Write(helpers.QuoteForJSON(helpers.UTF16ToString(entry.Key), s.options.ASCIIOnly)) + data.WriteString(": ") + data.Write(helpers.QuoteForJSON(helpers.UTF16ToString(entry.Value), s.options.ASCIIOnly)) + } + data.WriteString("\n }") + metafileWith = data.String() + } + } + + // Skip this import record if the previous resolver call failed + resolveResult := result.resolveResults[importRecordIndex] + if resolveResult == nil || !record.SourceIndex.IsValid() { + if s.options.NeedsMetafile { + if isFirstImport { + isFirstImport = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + sb.WriteString(fmt.Sprintf("{\n \"path\": %s,\n \"kind\": %s,\n \"external\": true%s\n }", + helpers.QuoteForJSON(record.Path.Text, s.options.ASCIIOnly), + helpers.QuoteForJSON(record.Kind.StringForMetafile(), s.options.ASCIIOnly), + metafileWith)) + } + continue + } + + // Now that all files have been scanned, look for packages that are imported + // both with "import" and "require". Rewrite any imports that reference the + // "module" package.json field to the "main" package.json field instead. + // + // This attempts to automatically avoid the "dual package hazard" where a + // package has both a CommonJS module version and an ECMAScript module + // version and exports a non-object in CommonJS (often a function). If we + // pick the "module" field and the package is imported with "require" then + // code expecting a function will crash. + if resolveResult.PathPair.HasSecondary() { + secondaryKey := resolveResult.PathPair.Secondary + if secondaryKey.Namespace == "file" { + secondaryKey.Text = canonicalFileSystemPathForWindows(secondaryKey.Text) + } + if secondaryVisited, ok := s.visited[secondaryKey]; ok { + record.SourceIndex = ast.MakeIndex32(secondaryVisited.sourceIndex) + } + } + + // Generate metadata about each import + otherResult := &s.results[record.SourceIndex.GetIndex()] + otherFile := &otherResult.file + if s.options.NeedsMetafile { + if isFirstImport { + isFirstImport = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + sb.WriteString(fmt.Sprintf("{\n \"path\": %s,\n \"kind\": %s,\n \"original\": %s%s\n }", + helpers.QuoteForJSON(otherFile.inputFile.Source.PrettyPath, s.options.ASCIIOnly), + helpers.QuoteForJSON(record.Kind.StringForMetafile(), s.options.ASCIIOnly), + helpers.QuoteForJSON(record.Path.Text, s.options.ASCIIOnly), + metafileWith)) + } + + // Validate that imports with "assert { type: 'json' }" were imported + // with the JSON loader. This is done to match the behavior of these + // import assertions in a real JavaScript runtime. In addition, we also + // allow the copy loader since this is sort of like marking the path + // as external (the import assertions are kept and the real JavaScript + // runtime evaluates them, not us). + if record.Flags.Has(ast.AssertTypeJSON) && otherResult.ok && otherFile.inputFile.Loader != config.LoaderJSON && otherFile.inputFile.Loader != config.LoaderCopy { + s.log.AddErrorWithNotes(&tracker, record.Range, + fmt.Sprintf("The file %q was loaded with the %q loader", otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader]), + []logger.MsgData{ + tracker.MsgData(js_lexer.RangeOfImportAssertOrWith(result.file.inputFile.Source, + *ast.FindAssertOrWithEntry(record.AssertOrWith.Entries, "type"), js_lexer.KeyAndValueRange), + "This import assertion requires the loader to be \"json\" instead:"), + {Text: "You need to either reconfigure esbuild to ensure that the loader for this file is \"json\" or you need to remove this import assertion."}}) + } + + switch record.Kind { + case ast.ImportComposesFrom: + // Using a JavaScript file with CSS "composes" is not allowed + if _, ok := otherFile.inputFile.Repr.(*graph.JSRepr); ok && otherFile.inputFile.Loader != config.LoaderEmpty { + s.log.AddErrorWithNotes(&tracker, record.Range, + fmt.Sprintf("Cannot use \"composes\" with %q", otherFile.inputFile.Source.PrettyPath), + []logger.MsgData{{Text: fmt.Sprintf( + "You can only use \"composes\" with CSS files and %q is not a CSS file (it was loaded with the %q loader).", + otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}}) + } + + case ast.ImportAt: + // Using a JavaScript file with CSS "@import" is not allowed + if _, ok := otherFile.inputFile.Repr.(*graph.JSRepr); ok && otherFile.inputFile.Loader != config.LoaderEmpty { + s.log.AddErrorWithNotes(&tracker, record.Range, + fmt.Sprintf("Cannot import %q into a CSS file", otherFile.inputFile.Source.PrettyPath), + []logger.MsgData{{Text: fmt.Sprintf( + "An \"@import\" rule can only be used to import another CSS file and %q is not a CSS file (it was loaded with the %q loader).", + otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}}) + } + + case ast.ImportURL: + // Using a JavaScript or CSS file with CSS "url()" is not allowed + switch otherRepr := otherFile.inputFile.Repr.(type) { + case *graph.CSSRepr: + s.log.AddErrorWithNotes(&tracker, record.Range, + fmt.Sprintf("Cannot use %q as a URL", otherFile.inputFile.Source.PrettyPath), + []logger.MsgData{{Text: fmt.Sprintf( + "You can't use a \"url()\" token to reference a CSS file, and %q is a CSS file (it was loaded with the %q loader).", + otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}}) + + case *graph.JSRepr: + if otherRepr.AST.URLForCSS == "" && otherFile.inputFile.Loader != config.LoaderEmpty { + s.log.AddErrorWithNotes(&tracker, record.Range, + fmt.Sprintf("Cannot use %q as a URL", otherFile.inputFile.Source.PrettyPath), + []logger.MsgData{{Text: fmt.Sprintf( + "You can't use a \"url()\" token to reference the file %q because it was loaded with the %q loader, which doesn't provide a URL to embed in the resulting CSS.", + otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}}) + } + } + } + + // If the imported file uses the "copy" loader, then move it from + // "SourceIndex" to "CopySourceIndex" so we don't end up bundling it. + if _, ok := otherFile.inputFile.Repr.(*graph.CopyRepr); ok { + record.CopySourceIndex = record.SourceIndex + record.SourceIndex = ast.Index32{} + continue + } + + // If an import from a JavaScript file targets a CSS file, generate a + // JavaScript stub to ensure that JavaScript files only ever import + // other JavaScript files. + if _, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok { + if css, ok := otherFile.inputFile.Repr.(*graph.CSSRepr); ok { + if s.options.WriteToStdout { + s.log.AddError(&tracker, record.Range, + fmt.Sprintf("Cannot import %q into a JavaScript file without an output path configured", otherFile.inputFile.Source.PrettyPath)) + } else if !css.JSSourceIndex.IsValid() { + stubKey := otherFile.inputFile.Source.KeyPath + if stubKey.Namespace == "file" { + stubKey.Text = canonicalFileSystemPathForWindows(stubKey.Text) + } + sourceIndex := s.allocateSourceIndex(stubKey, cache.SourceIndexJSStubForCSS) + source := otherFile.inputFile.Source + source.Index = sourceIndex + s.results[sourceIndex] = parseResult{ + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Loader: otherFile.inputFile.Loader, + Repr: &graph.JSRepr{ + // Note: The actual export object will be filled in by the linker + AST: js_parser.LazyExportAST(s.log, source, + js_parser.OptionsFromConfig(&s.options), js_ast.Expr{Data: js_ast.ENullShared}, ""), + CSSSourceIndex: ast.MakeIndex32(record.SourceIndex.GetIndex()), + }, + }, + }, + ok: true, + } + css.JSSourceIndex = ast.MakeIndex32(sourceIndex) + } + record.SourceIndex = css.JSSourceIndex + if !css.JSSourceIndex.IsValid() { + continue + } + } + } + + // Warn about this import if it's a bare import statement without any + // imported names (i.e. a side-effect-only import) and the module has + // been marked as having no side effects. + // + // Except don't do this if this file is inside "node_modules" since + // it's a bug in the package and the user won't be able to do anything + // about it. Note that this can result in esbuild silently generating + // broken code. If this actually happens for people, it's probably worth + // re-enabling the warning about code inside "node_modules". + if record.Flags.Has(ast.WasOriginallyBareImport) && !s.options.IgnoreDCEAnnotations && + !helpers.IsInsideNodeModules(result.file.inputFile.Source.KeyPath.Text) { + if otherModule := &s.results[record.SourceIndex.GetIndex()].file.inputFile; otherModule.SideEffects.Kind != graph.HasSideEffects && + // Do not warn if this is from a plugin, since removing the import + // would cause the plugin to not run, and running a plugin is a side + // effect. + otherModule.SideEffects.Kind != graph.NoSideEffects_PureData_FromPlugin && + + // Do not warn if this has no side effects because the parsed AST + // is empty. This is the case for ".d.ts" files, for example. + otherModule.SideEffects.Kind != graph.NoSideEffects_EmptyAST { + + var notes []logger.MsgData + var by string + if data := otherModule.SideEffects.Data; data != nil { + if data.PluginName != "" { + by = fmt.Sprintf(" by plugin %q", data.PluginName) + } else { + var text string + if data.IsSideEffectsArrayInJSON { + text = "It was excluded from the \"sideEffects\" array in the enclosing \"package.json\" file:" + } else { + text = "\"sideEffects\" is false in the enclosing \"package.json\" file:" + } + tracker := logger.MakeLineColumnTracker(data.Source) + notes = append(notes, tracker.MsgData(data.Range, text)) + } + } + s.log.AddIDWithNotes(logger.MsgID_Bundler_IgnoredBareImport, logger.Warning, &tracker, record.Range, + fmt.Sprintf("Ignoring this import because %q was marked as having no side effects%s", + otherModule.Source.PrettyPath, by), notes) + } + } + } + } + + // End the metadata chunk + if s.options.NeedsMetafile { + if !isFirstImport { + sb.WriteString("\n ") + } + if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok && + (repr.AST.ExportsKind == js_ast.ExportsCommonJS || repr.AST.ExportsKind == js_ast.ExportsESM) { + format := "cjs" + if repr.AST.ExportsKind == js_ast.ExportsESM { + format = "esm" + } + sb.WriteString(fmt.Sprintf("],\n \"format\": %q", format)) + } else { + sb.WriteString("]") + } + if attrs := result.file.inputFile.Source.KeyPath.ImportAttributes.Decode(); len(attrs) > 0 { + sb.WriteString(",\n \"with\": {") + for i, attr := range attrs { + if i > 0 { + sb.WriteByte(',') + } + sb.WriteString(fmt.Sprintf("\n %s: %s", + helpers.QuoteForJSON(attr.Key, s.options.ASCIIOnly), + helpers.QuoteForJSON(attr.Value, s.options.ASCIIOnly), + )) + } + sb.WriteString("\n }") + } + sb.WriteString("\n }") + } + + result.file.jsonMetadataChunk = sb.String() + + // If this file is from the "file" or "copy" loaders, generate an additional file + if result.file.inputFile.UniqueKeyForAdditionalFile != "" { + bytes := []byte(result.file.inputFile.Source.Contents) + template := s.options.AssetPathTemplate + + // Use the entry path template instead of the asset path template if this + // file is an entry point and uses the "copy" loader. With the "file" loader + // the JS stub is the entry point, but with the "copy" loader the file is + // the entry point itself. + customFilePath := "" + useOutputFile := false + if result.file.inputFile.Loader == config.LoaderCopy { + if metaIndex, ok := entryPointSourceIndexToMetaIndex[uint32(sourceIndex)]; ok { + template = s.options.EntryPathTemplate + customFilePath = entryPointMeta[metaIndex].OutputPath + useOutputFile = s.options.AbsOutputFile != "" + } + } + + // Add a hash to the file name to prevent multiple files with the same name + // but different contents from colliding + var hash string + if config.HasPlaceholder(template, config.HashPlaceholder) { + h := xxhash.New() + h.Write(bytes) + hash = HashForFileName(h.Sum(nil)) + } + + // This should use similar logic to how the linker computes output paths + var dir, base, ext string + if useOutputFile { + // If the output path was configured explicitly, use it verbatim + dir = "/" + base = s.fs.Base(s.options.AbsOutputFile) + ext = s.fs.Ext(base) + base = base[:len(base)-len(ext)] + } else { + // Otherwise, derive the output path from the input path + // Generate the input for the template + _, _, originalExt := logger.PlatformIndependentPathDirBaseExt(result.file.inputFile.Source.KeyPath.Text) + dir, base = PathRelativeToOutbase( + &result.file.inputFile, + &s.options, + s.fs, + /* avoidIndex */ false, + customFilePath, + ) + ext = originalExt + } + + // Apply the path template + templateExt := strings.TrimPrefix(ext, ".") + relPath := config.TemplateToString(config.SubstituteTemplate(template, config.PathPlaceholders{ + Dir: &dir, + Name: &base, + Hash: &hash, + Ext: &templateExt, + })) + ext + + // Optionally add metadata about the file + var jsonMetadataChunk string + if s.options.NeedsMetafile { + inputs := fmt.Sprintf("{\n %s: {\n \"bytesInOutput\": %d\n }\n }", + helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly), + len(bytes), + ) + jsonMetadataChunk = fmt.Sprintf( + "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d\n }", + inputs, + len(bytes), + ) + } + + // Generate the additional file to copy into the output directory + result.file.inputFile.AdditionalFiles = []graph.OutputFile{{ + AbsPath: s.fs.Join(s.options.AbsOutputDir, relPath), + Contents: bytes, + JSONMetadataChunk: jsonMetadataChunk, + }} + } + + s.results[sourceIndex] = result + } + + // The linker operates on an array of files, so construct that now. This + // can't be constructed earlier because we generate new parse results for + // JavaScript stub files for CSS imports above. + files := make([]scannerFile, len(s.results)) + for sourceIndex := range s.results { + if result := &s.results[sourceIndex]; result.ok { + s.validateTLA(uint32(sourceIndex)) + files[sourceIndex] = result.file + } + } + + return files +} + +func (s *scanner) validateTLA(sourceIndex uint32) tlaCheck { + result := &s.results[sourceIndex] + + if result.ok && result.tlaCheck.depth == 0 { + if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok { + result.tlaCheck.depth = 1 + if repr.AST.LiveTopLevelAwaitKeyword.Len > 0 { + result.tlaCheck.parent = ast.MakeIndex32(sourceIndex) + } + + for importRecordIndex, record := range repr.AST.ImportRecords { + if record.SourceIndex.IsValid() && (record.Kind == ast.ImportRequire || record.Kind == ast.ImportStmt) { + parent := s.validateTLA(record.SourceIndex.GetIndex()) + if !parent.parent.IsValid() { + continue + } + + // Follow any import chains + if record.Kind == ast.ImportStmt && (!result.tlaCheck.parent.IsValid() || parent.depth < result.tlaCheck.depth) { + result.tlaCheck.depth = parent.depth + 1 + result.tlaCheck.parent = record.SourceIndex + result.tlaCheck.importRecordIndex = uint32(importRecordIndex) + continue + } + + // Require of a top-level await chain is forbidden + if record.Kind == ast.ImportRequire { + var notes []logger.MsgData + var tlaPrettyPath string + otherSourceIndex := record.SourceIndex.GetIndex() + + // Build up a chain of relevant notes for all of the imports + for { + parentResult := &s.results[otherSourceIndex] + parentRepr := parentResult.file.inputFile.Repr.(*graph.JSRepr) + + if parentRepr.AST.LiveTopLevelAwaitKeyword.Len > 0 { + tlaPrettyPath = parentResult.file.inputFile.Source.PrettyPath + tracker := logger.MakeLineColumnTracker(&parentResult.file.inputFile.Source) + notes = append(notes, tracker.MsgData(parentRepr.AST.LiveTopLevelAwaitKeyword, + fmt.Sprintf("The top-level await in %q is here:", tlaPrettyPath))) + break + } + + if !parentResult.tlaCheck.parent.IsValid() { + notes = append(notes, logger.MsgData{Text: "unexpected invalid index"}) + break + } + + otherSourceIndex = parentResult.tlaCheck.parent.GetIndex() + + tracker := logger.MakeLineColumnTracker(&parentResult.file.inputFile.Source) + notes = append(notes, tracker.MsgData( + parentRepr.AST.ImportRecords[parentResult.tlaCheck.importRecordIndex].Range, + fmt.Sprintf("The file %q imports the file %q here:", + parentResult.file.inputFile.Source.PrettyPath, s.results[otherSourceIndex].file.inputFile.Source.PrettyPath))) + } + + var text string + importedPrettyPath := s.results[record.SourceIndex.GetIndex()].file.inputFile.Source.PrettyPath + + if importedPrettyPath == tlaPrettyPath { + text = fmt.Sprintf("This require call is not allowed because the imported file %q contains a top-level await", + importedPrettyPath) + } else { + text = fmt.Sprintf("This require call is not allowed because the transitive dependency %q contains a top-level await", + tlaPrettyPath) + } + + tracker := logger.MakeLineColumnTracker(&result.file.inputFile.Source) + s.log.AddErrorWithNotes(&tracker, record.Range, text, notes) + } + } + } + + // Make sure that if we wrap this module in a closure, the closure is also + // async. This happens when you call "import()" on this module and code + // splitting is off. + if result.tlaCheck.parent.IsValid() { + repr.Meta.IsAsyncOrHasAsyncDependency = true + } + } + } + + return result.tlaCheck +} + +func DefaultExtensionToLoaderMap() map[string]config.Loader { + return map[string]config.Loader{ + "": config.LoaderJS, // This represents files without an extension + ".js": config.LoaderJS, + ".mjs": config.LoaderJS, + ".cjs": config.LoaderJS, + ".jsx": config.LoaderJSX, + ".ts": config.LoaderTS, + ".cts": config.LoaderTSNoAmbiguousLessThan, + ".mts": config.LoaderTSNoAmbiguousLessThan, + ".tsx": config.LoaderTSX, + ".css": config.LoaderCSS, + ".module.css": config.LoaderLocalCSS, + ".json": config.LoaderJSON, + ".txt": config.LoaderText, + } +} + +func applyOptionDefaults(options *config.Options) { + if options.ExtensionToLoader == nil { + options.ExtensionToLoader = DefaultExtensionToLoaderMap() + } + if options.OutputExtensionJS == "" { + options.OutputExtensionJS = ".js" + } + if options.OutputExtensionCSS == "" { + options.OutputExtensionCSS = ".css" + } + + // Configure default path templates + if len(options.EntryPathTemplate) == 0 { + options.EntryPathTemplate = []config.PathTemplate{ + {Data: "./", Placeholder: config.DirPlaceholder}, + {Data: "/", Placeholder: config.NamePlaceholder}, + } + } + if len(options.ChunkPathTemplate) == 0 { + options.ChunkPathTemplate = []config.PathTemplate{ + {Data: "./", Placeholder: config.NamePlaceholder}, + {Data: "-", Placeholder: config.HashPlaceholder}, + } + } + if len(options.AssetPathTemplate) == 0 { + options.AssetPathTemplate = []config.PathTemplate{ + {Data: "./", Placeholder: config.NamePlaceholder}, + {Data: "-", Placeholder: config.HashPlaceholder}, + } + } + + options.ProfilerNames = !options.MinifyIdentifiers + + // Automatically fix invalid configurations of unsupported features + fixInvalidUnsupportedJSFeatureOverrides(options, compat.AsyncAwait, compat.AsyncGenerator|compat.ForAwait|compat.TopLevelAwait) + fixInvalidUnsupportedJSFeatureOverrides(options, compat.Generator, compat.AsyncGenerator) + fixInvalidUnsupportedJSFeatureOverrides(options, compat.ObjectAccessors, compat.ClassPrivateAccessor|compat.ClassPrivateStaticAccessor) + fixInvalidUnsupportedJSFeatureOverrides(options, compat.ClassField, compat.ClassPrivateField) + fixInvalidUnsupportedJSFeatureOverrides(options, compat.ClassStaticField, compat.ClassPrivateStaticField) + fixInvalidUnsupportedJSFeatureOverrides(options, compat.Class, + compat.ClassField|compat.ClassPrivateAccessor|compat.ClassPrivateBrandCheck|compat.ClassPrivateField| + compat.ClassPrivateMethod|compat.ClassPrivateStaticAccessor|compat.ClassPrivateStaticField| + compat.ClassPrivateStaticMethod|compat.ClassStaticBlocks|compat.ClassStaticField) + + // If we're not building for the browser, automatically disable support for + // inline and tags if there aren't currently any overrides + if options.Platform != config.PlatformBrowser { + if !options.UnsupportedJSFeatureOverridesMask.Has(compat.InlineScript) { + options.UnsupportedJSFeatures |= compat.InlineScript + } + if !options.UnsupportedCSSFeatureOverridesMask.Has(compat.InlineStyle) { + options.UnsupportedCSSFeatures |= compat.InlineStyle + } + } +} + +func fixInvalidUnsupportedJSFeatureOverrides(options *config.Options, implies compat.JSFeature, implied compat.JSFeature) { + // If this feature is unsupported, that implies that the other features must also be unsupported + if options.UnsupportedJSFeatureOverrides.Has(implies) { + options.UnsupportedJSFeatures |= implied + options.UnsupportedJSFeatureOverrides |= implied + options.UnsupportedJSFeatureOverridesMask |= implied + } +} + +type Linker func( + options *config.Options, + timer *helpers.Timer, + log logger.Log, + fs fs.FS, + res *resolver.Resolver, + inputFiles []graph.InputFile, + entryPoints []graph.EntryPoint, + uniqueKeyPrefix string, + reachableFiles []uint32, + dataForSourceMaps func() []DataForSourceMap, +) []graph.OutputFile + +func (b *Bundle) Compile(log logger.Log, timer *helpers.Timer, mangleCache map[string]interface{}, link Linker) ([]graph.OutputFile, string) { + timer.Begin("Compile phase") + defer timer.End("Compile phase") + + if b.options.CancelFlag.DidCancel() { + return nil, "" + } + + options := b.options + + // In most cases we don't need synchronized access to the mangle cache + cssUsedLocalNames := make(map[string]bool) + options.ExclusiveMangleCacheUpdate = func(cb func( + mangleCache map[string]interface{}, + cssUsedLocalNames map[string]bool, + )) { + cb(mangleCache, cssUsedLocalNames) + } + + files := make([]graph.InputFile, len(b.files)) + for i, file := range b.files { + files[i] = file.inputFile + } + + // Get the base path from the options or choose the lowest common ancestor of all entry points + allReachableFiles := findReachableFiles(files, b.entryPoints) + + // Compute source map data in parallel with linking + timer.Begin("Spawn source map tasks") + dataForSourceMaps := b.computeDataForSourceMapsInParallel(&options, allReachableFiles) + timer.End("Spawn source map tasks") + + var resultGroups [][]graph.OutputFile + if options.CodeSplitting || len(b.entryPoints) == 1 { + // If code splitting is enabled or if there's only one entry point, link all entry points together + resultGroups = [][]graph.OutputFile{link(&options, timer, log, b.fs, b.res, + files, b.entryPoints, b.uniqueKeyPrefix, allReachableFiles, dataForSourceMaps)} + } else { + // Otherwise, link each entry point with the runtime file separately + waitGroup := sync.WaitGroup{} + resultGroups = make([][]graph.OutputFile, len(b.entryPoints)) + serializer := helpers.MakeSerializer(len(b.entryPoints)) + for i, entryPoint := range b.entryPoints { + waitGroup.Add(1) + go func(i int, entryPoint graph.EntryPoint) { + entryPoints := []graph.EntryPoint{entryPoint} + forked := timer.Fork() + + // Each goroutine needs a separate options object + optionsClone := options + optionsClone.ExclusiveMangleCacheUpdate = func(cb func( + mangleCache map[string]interface{}, + cssUsedLocalNames map[string]bool, + )) { + // Serialize all accesses to the mangle cache in entry point order for determinism + serializer.Enter(i) + defer serializer.Leave(i) + cb(mangleCache, cssUsedLocalNames) + } + + resultGroups[i] = link(&optionsClone, forked, log, b.fs, b.res, files, entryPoints, + b.uniqueKeyPrefix, findReachableFiles(files, entryPoints), dataForSourceMaps) + timer.Join(forked) + waitGroup.Done() + }(i, entryPoint) + } + waitGroup.Wait() + } + + // Join the results in entry point order for determinism + var outputFiles []graph.OutputFile + for _, group := range resultGroups { + outputFiles = append(outputFiles, group...) + } + + // Also generate the metadata file if necessary + var metafileJSON string + if options.NeedsMetafile { + timer.Begin("Generate metadata JSON") + metafileJSON = b.generateMetadataJSON(outputFiles, allReachableFiles, options.ASCIIOnly) + timer.End("Generate metadata JSON") + } + + if !options.WriteToStdout { + // Make sure an output file never overwrites an input file + if !options.AllowOverwrite { + sourceAbsPaths := make(map[string]uint32) + for _, sourceIndex := range allReachableFiles { + keyPath := b.files[sourceIndex].inputFile.Source.KeyPath + if keyPath.Namespace == "file" { + absPathKey := canonicalFileSystemPathForWindows(keyPath.Text) + sourceAbsPaths[absPathKey] = sourceIndex + } + } + for _, outputFile := range outputFiles { + absPathKey := canonicalFileSystemPathForWindows(outputFile.AbsPath) + if sourceIndex, ok := sourceAbsPaths[absPathKey]; ok { + hint := "" + switch logger.API { + case logger.CLIAPI: + hint = " (use \"--allow-overwrite\" to allow this)" + case logger.JSAPI: + hint = " (use \"allowOverwrite: true\" to allow this)" + case logger.GoAPI: + hint = " (use \"AllowOverwrite: true\" to allow this)" + } + log.AddError(nil, logger.Range{}, + fmt.Sprintf("Refusing to overwrite input file %q%s", + b.files[sourceIndex].inputFile.Source.PrettyPath, hint)) + } + } + } + + // Make sure an output file never overwrites another output file. This + // is almost certainly unintentional and would otherwise happen silently. + // + // Make an exception for files that have identical contents. In that case + // the duplicate is just silently filtered out. This can happen with the + // "file" loader, for example. + outputFileMap := make(map[string][]byte) + end := 0 + for _, outputFile := range outputFiles { + absPathKey := canonicalFileSystemPathForWindows(outputFile.AbsPath) + contents, ok := outputFileMap[absPathKey] + + // If this isn't a duplicate, keep the output file + if !ok { + outputFileMap[absPathKey] = outputFile.Contents + outputFiles[end] = outputFile + end++ + continue + } + + // If the names and contents are both the same, only keep the first one + if bytes.Equal(contents, outputFile.Contents) { + continue + } + + // Otherwise, generate an error + outputPath := outputFile.AbsPath + if relPath, ok := b.fs.Rel(b.fs.Cwd(), outputPath); ok { + outputPath = relPath + } + log.AddError(nil, logger.Range{}, "Two output files share the same path but have different contents: "+outputPath) + } + outputFiles = outputFiles[:end] + } + + return outputFiles, metafileJSON +} + +// Find all files reachable from all entry points. This order should be +// deterministic given that the entry point order is deterministic, since the +// returned order is the postorder of the graph traversal and import record +// order within a given file is deterministic. +func findReachableFiles(files []graph.InputFile, entryPoints []graph.EntryPoint) []uint32 { + visited := make(map[uint32]bool) + var order []uint32 + var visit func(uint32) + + // Include this file and all files it imports + visit = func(sourceIndex uint32) { + if !visited[sourceIndex] { + visited[sourceIndex] = true + file := &files[sourceIndex] + if repr, ok := file.Repr.(*graph.JSRepr); ok && repr.CSSSourceIndex.IsValid() { + visit(repr.CSSSourceIndex.GetIndex()) + } + if recordsPtr := file.Repr.ImportRecords(); recordsPtr != nil { + for _, record := range *recordsPtr { + if record.SourceIndex.IsValid() { + visit(record.SourceIndex.GetIndex()) + } else if record.CopySourceIndex.IsValid() { + visit(record.CopySourceIndex.GetIndex()) + } + } + } + + // Each file must come after its dependencies + order = append(order, sourceIndex) + } + } + + // The runtime is always included in case it's needed + visit(runtime.SourceIndex) + + // Include all files reachable from any entry point + for _, entryPoint := range entryPoints { + visit(entryPoint.SourceIndex) + } + + return order +} + +// This is done in parallel with linking because linking is a mostly serial +// phase and there are extra resources for parallelism. This could also be done +// during parsing but that would slow down parsing and delay the start of the +// linking phase, which then delays the whole bundling process. +// +// However, doing this during parsing would allow it to be cached along with +// the parsed ASTs which would then speed up incremental builds. In the future +// it could be good to optionally have this be computed during the parsing +// phase when incremental builds are active but otherwise still have it be +// computed during linking for optimal speed during non-incremental builds. +func (b *Bundle) computeDataForSourceMapsInParallel(options *config.Options, reachableFiles []uint32) func() []DataForSourceMap { + if options.SourceMap == config.SourceMapNone { + return func() []DataForSourceMap { + return nil + } + } + + var waitGroup sync.WaitGroup + results := make([]DataForSourceMap, len(b.files)) + + for _, sourceIndex := range reachableFiles { + if f := &b.files[sourceIndex]; f.inputFile.Loader.CanHaveSourceMap() { + var approximateLineCount int32 + switch repr := f.inputFile.Repr.(type) { + case *graph.JSRepr: + approximateLineCount = repr.AST.ApproximateLineCount + case *graph.CSSRepr: + approximateLineCount = repr.AST.ApproximateLineCount + } + waitGroup.Add(1) + go func(sourceIndex uint32, f *scannerFile, approximateLineCount int32) { + result := &results[sourceIndex] + result.LineOffsetTables = sourcemap.GenerateLineOffsetTables(f.inputFile.Source.Contents, approximateLineCount) + sm := f.inputFile.InputSourceMap + if !options.ExcludeSourcesContent { + if sm == nil { + // Simple case: no nested source map + result.QuotedContents = [][]byte{helpers.QuoteForJSON(f.inputFile.Source.Contents, options.ASCIIOnly)} + } else { + // Complex case: nested source map + result.QuotedContents = make([][]byte, len(sm.Sources)) + nullContents := []byte("null") + for i := range sm.Sources { + // Missing contents become a "null" literal + quotedContents := nullContents + if i < len(sm.SourcesContent) { + if value := sm.SourcesContent[i]; value.Quoted != "" && (!options.ASCIIOnly || !isASCIIOnly(value.Quoted)) { + // Just use the value directly from the input file + quotedContents = []byte(value.Quoted) + } else if value.Value != nil { + // Re-quote non-ASCII values if output is ASCII-only. + // Also quote values that haven't been quoted yet + // (happens when the entire "sourcesContent" array is + // absent and the source has been found on the file + // system using the "sources" array). + quotedContents = helpers.QuoteForJSON(helpers.UTF16ToString(value.Value), options.ASCIIOnly) + } + } + result.QuotedContents[i] = quotedContents + } + } + } + waitGroup.Done() + }(sourceIndex, f, approximateLineCount) + } + } + + return func() []DataForSourceMap { + waitGroup.Wait() + return results + } +} + +func (b *Bundle) generateMetadataJSON(results []graph.OutputFile, allReachableFiles []uint32, asciiOnly bool) string { + sb := strings.Builder{} + sb.WriteString("{\n \"inputs\": {") + + // Write inputs + isFirst := true + for _, sourceIndex := range allReachableFiles { + if b.files[sourceIndex].inputFile.OmitFromSourceMapsAndMetafile { + continue + } + if file := &b.files[sourceIndex]; len(file.jsonMetadataChunk) > 0 { + if isFirst { + isFirst = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + sb.WriteString(file.jsonMetadataChunk) + } + } + + sb.WriteString("\n },\n \"outputs\": {") + + // Write outputs + isFirst = true + paths := make(map[string]bool) + for _, result := range results { + if len(result.JSONMetadataChunk) > 0 { + path := resolver.PrettyPath(b.fs, logger.Path{Text: result.AbsPath, Namespace: "file"}) + if paths[path] { + // Don't write out the same path twice (can happen with the "file" loader) + continue + } + if isFirst { + isFirst = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + paths[path] = true + sb.WriteString(fmt.Sprintf("%s: ", helpers.QuoteForJSON(path, asciiOnly))) + sb.WriteString(result.JSONMetadataChunk) + } + } + + sb.WriteString("\n }\n}\n") + return sb.String() +} + +type runtimeCacheKey struct { + unsupportedJSFeatures compat.JSFeature + minifySyntax bool + minifyIdentifiers bool +} + +type runtimeCache struct { + astMap map[runtimeCacheKey]js_ast.AST + astMutex sync.Mutex +} + +var globalRuntimeCache runtimeCache + +func (cache *runtimeCache) parseRuntime(options *config.Options) (source logger.Source, runtimeAST js_ast.AST, ok bool) { + key := runtimeCacheKey{ + // All configuration options that the runtime code depends on must go here + unsupportedJSFeatures: options.UnsupportedJSFeatures, + minifySyntax: options.MinifySyntax, + minifyIdentifiers: options.MinifyIdentifiers, + } + + // Determine which source to use + source = runtime.Source(key.unsupportedJSFeatures) + + // Cache hit? + (func() { + cache.astMutex.Lock() + defer cache.astMutex.Unlock() + if cache.astMap != nil { + runtimeAST, ok = cache.astMap[key] + } + })() + if ok { + return + } + + // Cache miss + log := logger.NewDeferLog(logger.DeferLogAll, nil) + runtimeAST, ok = js_parser.Parse(log, source, js_parser.OptionsFromConfig(&config.Options{ + // These configuration options must only depend on the key + UnsupportedJSFeatures: key.unsupportedJSFeatures, + MinifySyntax: key.minifySyntax, + MinifyIdentifiers: key.minifyIdentifiers, + + // Always do tree shaking for the runtime because we never want to + // include unnecessary runtime code + TreeShaking: true, + })) + if log.HasErrors() { + msgs := "Internal error: failed to parse runtime:\n" + for _, msg := range log.Done() { + msgs += msg.String(logger.OutputOptions{IncludeSource: true}, logger.TerminalInfo{}) + } + panic(msgs[:len(msgs)-1]) + } + + // Cache for next time + if ok { + cache.astMutex.Lock() + defer cache.astMutex.Unlock() + if cache.astMap == nil { + cache.astMap = make(map[runtimeCacheKey]js_ast.AST) + } + cache.astMap[key] = runtimeAST + } + return +} + +// Returns the path of this file relative to "outbase", which is then ready to +// be joined with the absolute output directory path. The directory and name +// components are returned separately for convenience. +func PathRelativeToOutbase( + inputFile *graph.InputFile, + options *config.Options, + fs fs.FS, + avoidIndex bool, + customFilePath string, +) (relDir string, baseName string) { + relDir = "/" + absPath := inputFile.Source.KeyPath.Text + + if customFilePath != "" { + // Use the configured output path if present + absPath = customFilePath + if !fs.IsAbs(absPath) { + absPath = fs.Join(options.AbsOutputBase, absPath) + } + } else if inputFile.Source.KeyPath.Namespace != "file" { + // Come up with a path for virtual paths (i.e. non-file-system paths) + dir, base, _ := logger.PlatformIndependentPathDirBaseExt(absPath) + if avoidIndex && base == "index" { + _, base, _ = logger.PlatformIndependentPathDirBaseExt(dir) + } + baseName = sanitizeFilePathForVirtualModulePath(base) + return + } else { + // Heuristic: If the file is named something like "index.js", then use + // the name of the parent directory instead. This helps avoid the + // situation where many chunks are named "index" because of people + // dynamically-importing npm packages that make use of node's implicit + // "index" file name feature. + if avoidIndex { + base := fs.Base(absPath) + base = base[:len(base)-len(fs.Ext(base))] + if base == "index" { + absPath = fs.Dir(absPath) + } + } + } + + // Try to get a relative path to the base directory + relPath, ok := fs.Rel(options.AbsOutputBase, absPath) + if !ok { + // This can fail in some situations such as on different drives on + // Windows. In that case we just use the file name. + baseName = fs.Base(absPath) + } else { + // Now we finally have a relative path + relDir = fs.Dir(relPath) + "/" + baseName = fs.Base(relPath) + + // Use platform-independent slashes + relDir = strings.ReplaceAll(relDir, "\\", "/") + + // Replace leading "../" so we don't try to write outside of the output + // directory. This normally can't happen because "AbsOutputBase" is + // automatically computed to contain all entry point files, but it can + // happen if someone sets it manually via the "outbase" API option. + // + // Note that we can't just strip any leading "../" because that could + // cause two separate entry point paths to collide. For example, there + // could be both "src/index.js" and "../src/index.js" as entry points. + dotDotCount := 0 + for strings.HasPrefix(relDir[dotDotCount*3:], "../") { + dotDotCount++ + } + if dotDotCount > 0 { + // The use of "_.._" here is somewhat arbitrary but it is unlikely to + // collide with a folder named by a human and it works on Windows + // (Windows doesn't like names that end with a "."). And not starting + // with a "." means that it will not be hidden on Unix. + relDir = strings.Repeat("_.._/", dotDotCount) + relDir[dotDotCount*3:] + } + for strings.HasSuffix(relDir, "/") { + relDir = relDir[:len(relDir)-1] + } + relDir = "/" + relDir + if strings.HasSuffix(relDir, "/.") { + relDir = relDir[:len(relDir)-1] + } + } + + // Strip the file extension if the output path is an input file + if customFilePath == "" { + ext := fs.Ext(baseName) + baseName = baseName[:len(baseName)-len(ext)] + } + return +} + +func sanitizeFilePathForVirtualModulePath(path string) string { + // Convert it to a safe file path. See: https://stackoverflow.com/a/31976060 + sb := strings.Builder{} + needsGap := false + for _, c := range path { + switch c { + case 0: + // These characters are forbidden on Unix and Windows + + case '<', '>', ':', '"', '|', '?', '*': + // These characters are forbidden on Windows + + default: + if c < 0x20 { + // These characters are forbidden on Windows + break + } + + // Turn runs of invalid characters into a '_' + if needsGap { + sb.WriteByte('_') + needsGap = false + } + + sb.WriteRune(c) + continue + } + + if sb.Len() > 0 { + needsGap = true + } + } + + // Make sure the name isn't empty + if sb.Len() == 0 { + return "_" + } + + // Note: An extension will be added to this base name, so there is no need to + // avoid forbidden file names such as ".." since ".js" is a valid file name. + return sb.String() +} diff --git a/vendor/github.com/evanw/esbuild/internal/cache/cache.go b/vendor/github.com/evanw/esbuild/internal/cache/cache.go new file mode 100644 index 000000000000..8b1dd8c401e5 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/cache/cache.go @@ -0,0 +1,115 @@ +package cache + +import ( + "sync" + + "github.com/evanw/esbuild/internal/logger" + "github.com/evanw/esbuild/internal/runtime" +) + +// This is a cache of the parsed contents of a set of files. The idea is to be +// able to reuse the results of parsing between builds and make subsequent +// builds faster by avoiding redundant parsing work. This only works if: +// +// - The AST information in the cache must be considered immutable. There is +// no way to enforce this in Go, but please be disciplined about this. The +// ASTs are shared in between builds. Any information that must be mutated +// in the AST during a build must be done on a shallow clone of the data if +// the mutation happens after parsing (i.e. a clone that clones everything +// that will be mutated and shares only the parts that won't be mutated). +// +// - The information in the cache must not depend at all on the contents of +// any file other than the file being cached. Invalidating an entry in the +// cache does not also invalidate any entries that depend on that file, so +// caching information that depends on other files can result in incorrect +// results due to reusing stale data. For example, do not "bake in" some +// value imported from another file. +// +// - Cached ASTs must only be reused if the parsing options are identical +// between builds. For example, it would be bad if the AST parser depended +// on options inherited from a nearby "package.json" file but those options +// were not part of the cache key. Then the cached AST could incorrectly be +// reused even if the contents of that "package.json" file have changed. +type CacheSet struct { + FSCache FSCache + CSSCache CSSCache + JSONCache JSONCache + JSCache JSCache + SourceIndexCache SourceIndexCache +} + +func MakeCacheSet() *CacheSet { + return &CacheSet{ + SourceIndexCache: SourceIndexCache{ + globEntries: make(map[uint64]uint32), + entries: make(map[sourceIndexKey]uint32), + nextSourceIndex: runtime.SourceIndex + 1, + }, + FSCache: FSCache{ + entries: make(map[string]*fsEntry), + }, + CSSCache: CSSCache{ + entries: make(map[logger.Path]*cssCacheEntry), + }, + JSONCache: JSONCache{ + entries: make(map[logger.Path]*jsonCacheEntry), + }, + JSCache: JSCache{ + entries: make(map[logger.Path]*jsCacheEntry), + }, + } +} + +type SourceIndexCache struct { + globEntries map[uint64]uint32 + entries map[sourceIndexKey]uint32 + mutex sync.Mutex + nextSourceIndex uint32 +} + +type SourceIndexKind uint8 + +const ( + SourceIndexNormal SourceIndexKind = iota + SourceIndexJSStubForCSS +) + +type sourceIndexKey struct { + path logger.Path + kind SourceIndexKind +} + +func (c *SourceIndexCache) LenHint() uint32 { + c.mutex.Lock() + defer c.mutex.Unlock() + + // Add some extra room at the end for a new file or two without reallocating + const someExtraRoom = 16 + return c.nextSourceIndex + someExtraRoom +} + +func (c *SourceIndexCache) Get(path logger.Path, kind SourceIndexKind) uint32 { + key := sourceIndexKey{path: path, kind: kind} + c.mutex.Lock() + defer c.mutex.Unlock() + if sourceIndex, ok := c.entries[key]; ok { + return sourceIndex + } + sourceIndex := c.nextSourceIndex + c.nextSourceIndex++ + c.entries[key] = sourceIndex + return sourceIndex +} + +func (c *SourceIndexCache) GetGlob(parentSourceIndex uint32, globIndex uint32) uint32 { + key := (uint64(parentSourceIndex) << 32) | uint64(globIndex) + c.mutex.Lock() + defer c.mutex.Unlock() + if sourceIndex, ok := c.globEntries[key]; ok { + return sourceIndex + } + sourceIndex := c.nextSourceIndex + c.nextSourceIndex++ + c.globEntries[key] = sourceIndex + return sourceIndex +} diff --git a/vendor/github.com/evanw/esbuild/internal/cache/cache_ast.go b/vendor/github.com/evanw/esbuild/internal/cache/cache_ast.go new file mode 100644 index 000000000000..c976f8951648 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/cache/cache_ast.go @@ -0,0 +1,190 @@ +package cache + +import ( + "sync" + + "github.com/evanw/esbuild/internal/css_ast" + "github.com/evanw/esbuild/internal/css_parser" + "github.com/evanw/esbuild/internal/js_ast" + "github.com/evanw/esbuild/internal/js_parser" + "github.com/evanw/esbuild/internal/logger" +) + +// This cache intends to avoid unnecessarily re-parsing files in subsequent +// builds. For a given path, parsing can be avoided if the contents of the file +// and the options for the parser are the same as last time. Even if the +// contents of the file are the same, the options for the parser may have +// changed if they depend on some other file ("package.json" for example). +// +// This cache checks if the file contents have changed even though we have +// the ability to detect if a file has changed on the file system by reading +// its metadata. First of all, if the file contents are cached then they should +// be the same pointer, which makes the comparison trivial. Also we want to +// cache the AST for plugins in the common case that the plugin output stays +// the same. + +//////////////////////////////////////////////////////////////////////////////// +// CSS + +type CSSCache struct { + entries map[logger.Path]*cssCacheEntry + mutex sync.Mutex +} + +type cssCacheEntry struct { + source logger.Source + msgs []logger.Msg + ast css_ast.AST + options css_parser.Options +} + +func (c *CSSCache) Parse(log logger.Log, source logger.Source, options css_parser.Options) css_ast.AST { + // Check the cache + entry := func() *cssCacheEntry { + c.mutex.Lock() + defer c.mutex.Unlock() + return c.entries[source.KeyPath] + }() + + // Cache hit + if entry != nil && entry.source == source && entry.options.Equal(&options) { + for _, msg := range entry.msgs { + log.AddMsg(msg) + } + return entry.ast + } + + // Cache miss + tempLog := logger.NewDeferLog(logger.DeferLogAll, log.Overrides) + ast := css_parser.Parse(tempLog, source, options) + msgs := tempLog.Done() + for _, msg := range msgs { + log.AddMsg(msg) + } + + // Create the cache entry + entry = &cssCacheEntry{ + source: source, + options: options, + ast: ast, + msgs: msgs, + } + + // Save for next time + c.mutex.Lock() + defer c.mutex.Unlock() + c.entries[source.KeyPath] = entry + return ast +} + +//////////////////////////////////////////////////////////////////////////////// +// JSON + +type JSONCache struct { + entries map[logger.Path]*jsonCacheEntry + mutex sync.Mutex +} + +type jsonCacheEntry struct { + expr js_ast.Expr + msgs []logger.Msg + source logger.Source + options js_parser.JSONOptions + ok bool +} + +func (c *JSONCache) Parse(log logger.Log, source logger.Source, options js_parser.JSONOptions) (js_ast.Expr, bool) { + // Check the cache + entry := func() *jsonCacheEntry { + c.mutex.Lock() + defer c.mutex.Unlock() + return c.entries[source.KeyPath] + }() + + // Cache hit + if entry != nil && entry.source == source && entry.options == options { + for _, msg := range entry.msgs { + log.AddMsg(msg) + } + return entry.expr, entry.ok + } + + // Cache miss + tempLog := logger.NewDeferLog(logger.DeferLogAll, log.Overrides) + expr, ok := js_parser.ParseJSON(tempLog, source, options) + msgs := tempLog.Done() + for _, msg := range msgs { + log.AddMsg(msg) + } + + // Create the cache entry + entry = &jsonCacheEntry{ + source: source, + options: options, + expr: expr, + ok: ok, + msgs: msgs, + } + + // Save for next time + c.mutex.Lock() + defer c.mutex.Unlock() + c.entries[source.KeyPath] = entry + return expr, ok +} + +//////////////////////////////////////////////////////////////////////////////// +// JS + +type JSCache struct { + entries map[logger.Path]*jsCacheEntry + mutex sync.Mutex +} + +type jsCacheEntry struct { + source logger.Source + msgs []logger.Msg + options js_parser.Options + ast js_ast.AST + ok bool +} + +func (c *JSCache) Parse(log logger.Log, source logger.Source, options js_parser.Options) (js_ast.AST, bool) { + // Check the cache + entry := func() *jsCacheEntry { + c.mutex.Lock() + defer c.mutex.Unlock() + return c.entries[source.KeyPath] + }() + + // Cache hit + if entry != nil && entry.source == source && entry.options.Equal(&options) { + for _, msg := range entry.msgs { + log.AddMsg(msg) + } + return entry.ast, entry.ok + } + + // Cache miss + tempLog := logger.NewDeferLog(logger.DeferLogAll, log.Overrides) + ast, ok := js_parser.Parse(tempLog, source, options) + msgs := tempLog.Done() + for _, msg := range msgs { + log.AddMsg(msg) + } + + // Create the cache entry + entry = &jsCacheEntry{ + source: source, + options: options, + ast: ast, + ok: ok, + msgs: msgs, + } + + // Save for next time + c.mutex.Lock() + defer c.mutex.Unlock() + c.entries[source.KeyPath] = entry + return ast, ok +} diff --git a/vendor/github.com/evanw/esbuild/internal/cache/cache_fs.go b/vendor/github.com/evanw/esbuild/internal/cache/cache_fs.go new file mode 100644 index 000000000000..ab4d08ef0cc8 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/cache/cache_fs.go @@ -0,0 +1,52 @@ +package cache + +import ( + "sync" + + "github.com/evanw/esbuild/internal/fs" +) + +// This cache uses information from the "stat" syscall to try to avoid re- +// reading files from the file system during subsequent builds if the file +// hasn't changed. The assumption is reading the file metadata is faster than +// reading the file contents. + +type FSCache struct { + entries map[string]*fsEntry + mutex sync.Mutex +} + +type fsEntry struct { + contents string + modKey fs.ModKey + isModKeyUsable bool +} + +func (c *FSCache) ReadFile(fs fs.FS, path string) (contents string, canonicalError error, originalError error) { + entry := func() *fsEntry { + c.mutex.Lock() + defer c.mutex.Unlock() + return c.entries[path] + }() + + // If the file's modification key hasn't changed since it was cached, assume + // the contents of the file are also the same and skip reading the file. + modKey, modKeyErr := fs.ModKey(path) + if entry != nil && entry.isModKeyUsable && modKeyErr == nil && entry.modKey == modKey { + return entry.contents, nil, nil + } + + contents, err, originalError := fs.ReadFile(path) + if err != nil { + return "", err, originalError + } + + c.mutex.Lock() + defer c.mutex.Unlock() + c.entries[path] = &fsEntry{ + contents: contents, + modKey: modKey, + isModKeyUsable: modKeyErr == nil, + } + return contents, nil, nil +} diff --git a/vendor/github.com/evanw/esbuild/internal/compat/compat.go b/vendor/github.com/evanw/esbuild/internal/compat/compat.go new file mode 100644 index 000000000000..bd2d0ffd1ff1 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/compat/compat.go @@ -0,0 +1,92 @@ +package compat + +import ( + "strconv" + "strings" + + "github.com/evanw/esbuild/internal/ast" +) + +type v struct { + major uint16 + minor uint8 + patch uint8 +} + +type Semver struct { + // "1.2.3-alpha" => { Parts: {1, 2, 3}, PreRelease: "-alpha" } + Parts []int + PreRelease string +} + +func (v Semver) String() string { + b := strings.Builder{} + for _, part := range v.Parts { + if b.Len() > 0 { + b.WriteRune('.') + } + b.WriteString(strconv.Itoa(part)) + } + b.WriteString(v.PreRelease) + return b.String() +} + +// Returns <0 if "a < b" +// Returns 0 if "a == b" +// Returns >0 if "a > b" +func compareVersions(a v, b Semver) int { + diff := int(a.major) + if len(b.Parts) > 0 { + diff -= b.Parts[0] + } + if diff == 0 { + diff = int(a.minor) + if len(b.Parts) > 1 { + diff -= b.Parts[1] + } + } + if diff == 0 { + diff = int(a.patch) + if len(b.Parts) > 2 { + diff -= b.Parts[2] + } + } + if diff == 0 && len(b.PreRelease) != 0 { + return 1 // "1.0.0" > "1.0.0-alpha" + } + return diff +} + +// The start is inclusive and the end is exclusive +type versionRange struct { + start v + end v // Use 0.0.0 for "no end" +} + +func isVersionSupported(ranges []versionRange, version Semver) bool { + for _, r := range ranges { + if compareVersions(r.start, version) <= 0 && (r.end == (v{}) || compareVersions(r.end, version) > 0) { + return true + } + } + return false +} + +func SymbolFeature(kind ast.SymbolKind) JSFeature { + switch kind { + case ast.SymbolPrivateField: + return ClassPrivateField + case ast.SymbolPrivateMethod: + return ClassPrivateMethod + case ast.SymbolPrivateGet, ast.SymbolPrivateSet, ast.SymbolPrivateGetSetPair: + return ClassPrivateAccessor + case ast.SymbolPrivateStaticField: + return ClassPrivateStaticField + case ast.SymbolPrivateStaticMethod: + return ClassPrivateStaticMethod + case ast.SymbolPrivateStaticGet, ast.SymbolPrivateStaticSet, ast.SymbolPrivateStaticGetSetPair: + return ClassPrivateStaticAccessor + default: + return 0 + } +} diff --git a/vendor/github.com/evanw/esbuild/internal/compat/css_table.go b/vendor/github.com/evanw/esbuild/internal/compat/css_table.go new file mode 100644 index 000000000000..5ceb52efae22 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/compat/css_table.go @@ -0,0 +1,361 @@ +// This file was automatically generated by "css_table.ts" + +package compat + +import ( + "github.com/evanw/esbuild/internal/css_ast" +) + +type CSSFeature uint16 + +const ( + ColorFunctions CSSFeature = 1 << iota + GradientDoublePosition + GradientInterpolation + GradientMidpoints + HWB + HexRGBA + InlineStyle + InsetProperty + IsPseudoClass + Modern_RGB_HSL + Nesting + RebeccaPurple +) + +var StringToCSSFeature = map[string]CSSFeature{ + "color-functions": ColorFunctions, + "gradient-double-position": GradientDoublePosition, + "gradient-interpolation": GradientInterpolation, + "gradient-midpoints": GradientMidpoints, + "hwb": HWB, + "hex-rgba": HexRGBA, + "inline-style": InlineStyle, + "inset-property": InsetProperty, + "is-pseudo-class": IsPseudoClass, + "modern-rgb-hsl": Modern_RGB_HSL, + "nesting": Nesting, + "rebecca-purple": RebeccaPurple, +} + +func (features CSSFeature) Has(feature CSSFeature) bool { + return (features & feature) != 0 +} + +func (features CSSFeature) ApplyOverrides(overrides CSSFeature, mask CSSFeature) CSSFeature { + return (features & ^mask) | (overrides & mask) +} + +var cssTable = map[CSSFeature]map[Engine][]versionRange{ + ColorFunctions: { + Chrome: {{start: v{111, 0, 0}}}, + Edge: {{start: v{111, 0, 0}}}, + Firefox: {{start: v{113, 0, 0}}}, + IOS: {{start: v{15, 4, 0}}}, + Opera: {{start: v{97, 0, 0}}}, + Safari: {{start: v{15, 4, 0}}}, + }, + GradientDoublePosition: { + Chrome: {{start: v{72, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + Firefox: {{start: v{83, 0, 0}}}, + IOS: {{start: v{12, 2, 0}}}, + Opera: {{start: v{60, 0, 0}}}, + Safari: {{start: v{12, 1, 0}}}, + }, + GradientInterpolation: { + Chrome: {{start: v{111, 0, 0}}}, + Edge: {{start: v{111, 0, 0}}}, + IOS: {{start: v{16, 2, 0}}}, + Opera: {{start: v{97, 0, 0}}}, + Safari: {{start: v{16, 2, 0}}}, + }, + GradientMidpoints: { + Chrome: {{start: v{40, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + Firefox: {{start: v{36, 0, 0}}}, + IOS: {{start: v{7, 0, 0}}}, + Opera: {{start: v{27, 0, 0}}}, + Safari: {{start: v{7, 0, 0}}}, + }, + HWB: { + Chrome: {{start: v{101, 0, 0}}}, + Edge: {{start: v{101, 0, 0}}}, + Firefox: {{start: v{96, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Opera: {{start: v{87, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + HexRGBA: { + Chrome: {{start: v{62, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + Firefox: {{start: v{49, 0, 0}}}, + IOS: {{start: v{9, 3, 0}}}, + Opera: {{start: v{49, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + InlineStyle: {}, + InsetProperty: { + Chrome: {{start: v{87, 0, 0}}}, + Edge: {{start: v{87, 0, 0}}}, + Firefox: {{start: v{66, 0, 0}}}, + IOS: {{start: v{14, 5, 0}}}, + Opera: {{start: v{73, 0, 0}}}, + Safari: {{start: v{14, 1, 0}}}, + }, + IsPseudoClass: { + Chrome: {{start: v{88, 0, 0}}}, + Edge: {{start: v{88, 0, 0}}}, + Firefox: {{start: v{78, 0, 0}}}, + IOS: {{start: v{14, 0, 0}}}, + Opera: {{start: v{75, 0, 0}}}, + Safari: {{start: v{14, 0, 0}}}, + }, + Modern_RGB_HSL: { + Chrome: {{start: v{66, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + Firefox: {{start: v{52, 0, 0}}}, + IOS: {{start: v{12, 2, 0}}}, + Opera: {{start: v{53, 0, 0}}}, + Safari: {{start: v{12, 1, 0}}}, + }, + Nesting: { + Chrome: {{start: v{120, 0, 0}}}, + Edge: {{start: v{120, 0, 0}}}, + Firefox: {{start: v{117, 0, 0}}}, + IOS: {{start: v{17, 2, 0}}}, + Opera: {{start: v{106, 0, 0}}}, + Safari: {{start: v{17, 2, 0}}}, + }, + RebeccaPurple: { + Chrome: {{start: v{38, 0, 0}}}, + Edge: {{start: v{12, 0, 0}}}, + Firefox: {{start: v{33, 0, 0}}}, + IE: {{start: v{11, 0, 0}}}, + IOS: {{start: v{8, 0, 0}}}, + Opera: {{start: v{25, 0, 0}}}, + Safari: {{start: v{9, 0, 0}}}, + }, +} + +// Return all features that are not available in at least one environment +func UnsupportedCSSFeatures(constraints map[Engine]Semver) (unsupported CSSFeature) { + for feature, engines := range cssTable { + if feature == InlineStyle { + continue // This is purely user-specified + } + for engine, version := range constraints { + if !engine.IsBrowser() { + // Specifying "--target=es2020" shouldn't affect CSS + continue + } + if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) { + unsupported |= feature + } + } + } + return +} + +type CSSPrefix uint8 + +const ( + KhtmlPrefix CSSPrefix = 1 << iota + MozPrefix + MsPrefix + OPrefix + WebkitPrefix + + NoPrefix CSSPrefix = 0 +) + +type prefixData struct { + // Note: In some cases, earlier versions did not require a prefix but later + // ones do. This is the case for Microsoft Edge for example, which switched + // the underlying browser engine from a custom one to the one from Chrome. + // However, we assume that users specifying a browser version for CSS mean + // "works in this version or newer", so we still add a prefix when a target + // is an old Edge version. + engine Engine + withoutPrefix v + prefix CSSPrefix +} + +var cssPrefixTable = map[css_ast.D][]prefixData{ + css_ast.DAppearance: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{84, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{84, 0, 0}}, + {engine: Firefox, prefix: MozPrefix, withoutPrefix: v{80, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{73, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DBackdropFilter: { + {engine: IOS, prefix: WebkitPrefix}, + {engine: Safari, prefix: WebkitPrefix}, + }, + css_ast.DBackgroundClip: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: Edge, prefix: MsPrefix, withoutPrefix: v{15, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}}, + }, + css_ast.DBoxDecorationBreak: { + {engine: Chrome, prefix: WebkitPrefix}, + {engine: Edge, prefix: WebkitPrefix}, + {engine: IOS, prefix: WebkitPrefix}, + {engine: Opera, prefix: WebkitPrefix}, + {engine: Safari, prefix: WebkitPrefix}, + }, + css_ast.DClipPath: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{55, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{13, 0, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{42, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{13, 1, 0}}, + }, + css_ast.DFontKerning: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{33, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 0, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{20, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{9, 1, 0}}, + }, + css_ast.DHyphens: { + {engine: Edge, prefix: MsPrefix, withoutPrefix: v{79, 0, 0}}, + {engine: Firefox, prefix: MozPrefix, withoutPrefix: v{43, 0, 0}}, + {engine: IE, prefix: MsPrefix}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{17, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{17, 0, 0}}, + }, + css_ast.DInitialLetter: { + {engine: IOS, prefix: WebkitPrefix}, + {engine: Safari, prefix: WebkitPrefix}, + }, + css_ast.DMaskComposite: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DMaskImage: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + {engine: Opera, prefix: WebkitPrefix}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DMaskOrigin: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DMaskPosition: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DMaskRepeat: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DMaskSize: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DPosition: { + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{13, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{13, 0, 0}}, + }, + css_ast.DPrintColorAdjust: { + {engine: Chrome, prefix: WebkitPrefix}, + {engine: Edge, prefix: WebkitPrefix}, + {engine: Opera, prefix: WebkitPrefix}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, + }, + css_ast.DTabSize: { + {engine: Firefox, prefix: MozPrefix, withoutPrefix: v{91, 0, 0}}, + {engine: Opera, prefix: OPrefix, withoutPrefix: v{15, 0, 0}}, + }, + css_ast.DTextDecorationColor: { + {engine: Firefox, prefix: MozPrefix, withoutPrefix: v{36, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 2, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{12, 1, 0}}, + }, + css_ast.DTextDecorationLine: { + {engine: Firefox, prefix: MozPrefix, withoutPrefix: v{36, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 2, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{12, 1, 0}}, + }, + css_ast.DTextDecorationSkip: { + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 2, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{12, 1, 0}}, + }, + css_ast.DTextEmphasisColor: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{85, 0, 0}}, + }, + css_ast.DTextEmphasisPosition: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{85, 0, 0}}, + }, + css_ast.DTextEmphasisStyle: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{85, 0, 0}}, + }, + css_ast.DTextOrientation: { + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}}, + }, + css_ast.DTextSizeAdjust: { + {engine: Edge, prefix: MsPrefix, withoutPrefix: v{79, 0, 0}}, + {engine: IOS, prefix: WebkitPrefix}, + }, + css_ast.DUserSelect: { + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{54, 0, 0}}, + {engine: Edge, prefix: MsPrefix, withoutPrefix: v{79, 0, 0}}, + {engine: Firefox, prefix: MozPrefix, withoutPrefix: v{69, 0, 0}}, + {engine: IE, prefix: MsPrefix}, + {engine: IOS, prefix: WebkitPrefix}, + {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{41, 0, 0}}, + {engine: Safari, prefix: KhtmlPrefix, withoutPrefix: v{3, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix}, + }, +} + +func CSSPrefixData(constraints map[Engine]Semver) (entries map[css_ast.D]CSSPrefix) { + for property, items := range cssPrefixTable { + prefixes := NoPrefix + for engine, version := range constraints { + if !engine.IsBrowser() { + // Specifying "--target=es2020" shouldn't affect CSS + continue + } + for _, item := range items { + if item.engine == engine && (item.withoutPrefix == v{} || compareVersions(item.withoutPrefix, version) > 0) { + prefixes |= item.prefix + } + } + } + if prefixes != NoPrefix { + if entries == nil { + entries = make(map[css_ast.D]CSSPrefix) + } + entries[property] = prefixes + } + } + return +} diff --git a/vendor/github.com/evanw/esbuild/internal/compat/js_table.go b/vendor/github.com/evanw/esbuild/internal/compat/js_table.go new file mode 100644 index 000000000000..808eca3c69b5 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/compat/js_table.go @@ -0,0 +1,903 @@ +// This file was automatically generated by "js_table.ts" + +package compat + +type Engine uint8 + +const ( + Chrome Engine = iota + Deno + Edge + ES + Firefox + Hermes + IE + IOS + Node + Opera + Rhino + Safari +) + +func (e Engine) String() string { + switch e { + case Chrome: + return "chrome" + case Deno: + return "deno" + case Edge: + return "edge" + case ES: + return "es" + case Firefox: + return "firefox" + case Hermes: + return "hermes" + case IE: + return "ie" + case IOS: + return "ios" + case Node: + return "node" + case Opera: + return "opera" + case Rhino: + return "rhino" + case Safari: + return "safari" + } + return "" +} + +func (e Engine) IsBrowser() bool { + switch e { + case Chrome, Edge, Firefox, IE, IOS, Opera, Safari: + return true + } + return false +} + +type JSFeature uint64 + +const ( + ArbitraryModuleNamespaceNames JSFeature = 1 << iota + ArraySpread + Arrow + AsyncAwait + AsyncGenerator + Bigint + Class + ClassField + ClassPrivateAccessor + ClassPrivateBrandCheck + ClassPrivateField + ClassPrivateMethod + ClassPrivateStaticAccessor + ClassPrivateStaticField + ClassPrivateStaticMethod + ClassStaticBlocks + ClassStaticField + ConstAndLet + Decorators + DefaultArgument + Destructuring + DynamicImport + ExponentOperator + ExportStarAs + ForAwait + ForOf + FunctionNameConfigurable + FunctionOrClassPropertyAccess + Generator + Hashbang + ImportAssertions + ImportAttributes + ImportMeta + InlineScript + LogicalAssignment + NestedRestBinding + NewTarget + NodeColonPrefixImport + NodeColonPrefixRequire + NullishCoalescing + ObjectAccessors + ObjectExtensions + ObjectRestSpread + OptionalCatchBinding + OptionalChain + RegexpDotAllFlag + RegexpLookbehindAssertions + RegexpMatchIndices + RegexpNamedCaptureGroups + RegexpSetNotation + RegexpStickyAndUnicodeFlags + RegexpUnicodePropertyEscapes + RestArgument + TemplateLiteral + TopLevelAwait + TypeofExoticObjectIsObject + UnicodeEscapes + Using +) + +var StringToJSFeature = map[string]JSFeature{ + "arbitrary-module-namespace-names": ArbitraryModuleNamespaceNames, + "array-spread": ArraySpread, + "arrow": Arrow, + "async-await": AsyncAwait, + "async-generator": AsyncGenerator, + "bigint": Bigint, + "class": Class, + "class-field": ClassField, + "class-private-accessor": ClassPrivateAccessor, + "class-private-brand-check": ClassPrivateBrandCheck, + "class-private-field": ClassPrivateField, + "class-private-method": ClassPrivateMethod, + "class-private-static-accessor": ClassPrivateStaticAccessor, + "class-private-static-field": ClassPrivateStaticField, + "class-private-static-method": ClassPrivateStaticMethod, + "class-static-blocks": ClassStaticBlocks, + "class-static-field": ClassStaticField, + "const-and-let": ConstAndLet, + "decorators": Decorators, + "default-argument": DefaultArgument, + "destructuring": Destructuring, + "dynamic-import": DynamicImport, + "exponent-operator": ExponentOperator, + "export-star-as": ExportStarAs, + "for-await": ForAwait, + "for-of": ForOf, + "function-name-configurable": FunctionNameConfigurable, + "function-or-class-property-access": FunctionOrClassPropertyAccess, + "generator": Generator, + "hashbang": Hashbang, + "import-assertions": ImportAssertions, + "import-attributes": ImportAttributes, + "import-meta": ImportMeta, + "inline-script": InlineScript, + "logical-assignment": LogicalAssignment, + "nested-rest-binding": NestedRestBinding, + "new-target": NewTarget, + "node-colon-prefix-import": NodeColonPrefixImport, + "node-colon-prefix-require": NodeColonPrefixRequire, + "nullish-coalescing": NullishCoalescing, + "object-accessors": ObjectAccessors, + "object-extensions": ObjectExtensions, + "object-rest-spread": ObjectRestSpread, + "optional-catch-binding": OptionalCatchBinding, + "optional-chain": OptionalChain, + "regexp-dot-all-flag": RegexpDotAllFlag, + "regexp-lookbehind-assertions": RegexpLookbehindAssertions, + "regexp-match-indices": RegexpMatchIndices, + "regexp-named-capture-groups": RegexpNamedCaptureGroups, + "regexp-set-notation": RegexpSetNotation, + "regexp-sticky-and-unicode-flags": RegexpStickyAndUnicodeFlags, + "regexp-unicode-property-escapes": RegexpUnicodePropertyEscapes, + "rest-argument": RestArgument, + "template-literal": TemplateLiteral, + "top-level-await": TopLevelAwait, + "typeof-exotic-object-is-object": TypeofExoticObjectIsObject, + "unicode-escapes": UnicodeEscapes, + "using": Using, +} + +func (features JSFeature) Has(feature JSFeature) bool { + return (features & feature) != 0 +} + +func (features JSFeature) ApplyOverrides(overrides JSFeature, mask JSFeature) JSFeature { + return (features & ^mask) | (overrides & mask) +} + +var jsTable = map[JSFeature]map[Engine][]versionRange{ + ArbitraryModuleNamespaceNames: { + Chrome: {{start: v{90, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{87, 0, 0}}}, + IOS: {{start: v{14, 5, 0}}}, + Node: {{start: v{16, 0, 0}}}, + Safari: {{start: v{14, 1, 0}}}, + }, + ArraySpread: { + // Note: The latest version of "IE" failed 15 tests including: spread syntax for iterable objects: spreading non-iterables is a runtime error + // Note: The latest version of "Rhino" failed 15 tests including: spread syntax for iterable objects: spreading non-iterables is a runtime error + Chrome: {{start: v{46, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{13, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{36, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{5, 0, 0}}}, + Opera: {{start: v{33, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + Arrow: { + // Note: The latest version of "Hermes" failed 3 tests including: arrow functions: lexical "super" binding in constructors + // Note: The latest version of "IE" failed 13 tests including: arrow functions: "this" unchanged by call or apply + // Note: The latest version of "Rhino" failed 3 tests including: arrow functions: lexical "new.target" binding + Chrome: {{start: v{49, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{13, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{45, 0, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{36, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + AsyncAwait: { + // Note: The latest version of "Hermes" failed 4 tests including: async functions: async arrow functions + // Note: The latest version of "IE" failed 16 tests including: async functions: async arrow functions + // Note: The latest version of "Rhino" failed 16 tests including: async functions: async arrow functions + Chrome: {{start: v{55, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{15, 0, 0}}}, + ES: {{start: v{2017, 0, 0}}}, + Firefox: {{start: v{52, 0, 0}}}, + IOS: {{start: v{11, 0, 0}}}, + Node: {{start: v{7, 6, 0}}}, + Opera: {{start: v{42, 0, 0}}}, + Safari: {{start: v{11, 0, 0}}}, + }, + AsyncGenerator: { + // Note: The latest version of "Hermes" failed this test: Asynchronous Iterators: async generators + // Note: The latest version of "IE" failed this test: Asynchronous Iterators: async generators + // Note: The latest version of "Rhino" failed this test: Asynchronous Iterators: async generators + Chrome: {{start: v{63, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2018, 0, 0}}}, + Firefox: {{start: v{57, 0, 0}}}, + IOS: {{start: v{12, 0, 0}}}, + Node: {{start: v{10, 0, 0}}}, + Opera: {{start: v{50, 0, 0}}}, + Safari: {{start: v{12, 0, 0}}}, + }, + Bigint: { + // Note: The latest version of "IE" failed this test: BigInt: basic functionality + Chrome: {{start: v{67, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2020, 0, 0}}}, + Firefox: {{start: v{68, 0, 0}}}, + Hermes: {{start: v{0, 12, 0}}}, + IOS: {{start: v{14, 0, 0}}}, + Node: {{start: v{10, 4, 0}}}, + Opera: {{start: v{54, 0, 0}}}, + Rhino: {{start: v{1, 7, 14}}}, + Safari: {{start: v{14, 0, 0}}}, + }, + Class: { + // Note: The latest version of "Hermes" failed 24 tests including: class: accessor properties + // Note: The latest version of "IE" failed 24 tests including: class: accessor properties + // Note: The latest version of "Rhino" failed 24 tests including: class: accessor properties + Chrome: {{start: v{49, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{13, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{45, 0, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{36, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + ClassField: { + // Note: The latest version of "Hermes" failed 2 tests including: instance class fields: computed instance class fields + // Note: The latest version of "IE" failed 2 tests including: instance class fields: computed instance class fields + // Note: The latest version of "Rhino" failed 2 tests including: instance class fields: computed instance class fields + Chrome: {{start: v{73, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{69, 0, 0}}}, + IOS: {{start: v{14, 0, 0}}}, + Node: {{start: v{12, 0, 0}}}, + Opera: {{start: v{60, 0, 0}}}, + Safari: {{start: v{14, 0, 0}}}, + }, + ClassPrivateAccessor: { + // Note: The latest version of "Hermes" failed this test: private class methods: private accessor properties + // Note: The latest version of "IE" failed this test: private class methods: private accessor properties + // Note: The latest version of "Rhino" failed this test: private class methods: private accessor properties + Chrome: {{start: v{84, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{84, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{90, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Node: {{start: v{14, 6, 0}}}, + Opera: {{start: v{70, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + ClassPrivateBrandCheck: { + // Note: The latest version of "Hermes" failed this test: Ergonomic brand checks for private fields + // Note: The latest version of "IE" failed this test: Ergonomic brand checks for private fields + // Note: The latest version of "Rhino" failed this test: Ergonomic brand checks for private fields + Chrome: {{start: v{91, 0, 0}}}, + Deno: {{start: v{1, 9, 0}}}, + Edge: {{start: v{91, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{90, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Node: {{start: v{16, 4, 0}}}, + Opera: {{start: v{77, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + ClassPrivateField: { + // Note: The latest version of "Hermes" failed 4 tests including: instance class fields: optional deep private instance class fields access + // Note: The latest version of "IE" failed 4 tests including: instance class fields: optional deep private instance class fields access + // Note: The latest version of "Rhino" failed 4 tests including: instance class fields: optional deep private instance class fields access + Chrome: {{start: v{84, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{84, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{90, 0, 0}}}, + IOS: {{start: v{14, 5, 0}}}, + Node: {{start: v{14, 6, 0}}}, + Opera: {{start: v{70, 0, 0}}}, + Safari: {{start: v{14, 1, 0}}}, + }, + ClassPrivateMethod: { + // Note: The latest version of "Hermes" failed this test: private class methods: private instance methods + // Note: The latest version of "IE" failed this test: private class methods: private instance methods + // Note: The latest version of "Rhino" failed this test: private class methods: private instance methods + Chrome: {{start: v{84, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{84, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{90, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Node: {{start: v{14, 6, 0}}}, + Opera: {{start: v{70, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + ClassPrivateStaticAccessor: { + // Note: The latest version of "Hermes" failed this test: private class methods: private static accessor properties + // Note: The latest version of "IE" failed this test: private class methods: private static accessor properties + // Note: The latest version of "Rhino" failed this test: private class methods: private static accessor properties + Chrome: {{start: v{84, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{84, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{90, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Node: {{start: v{14, 6, 0}}}, + Opera: {{start: v{70, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + ClassPrivateStaticField: { + // Note: The latest version of "Hermes" failed this test: static class fields: private static class fields + // Note: The latest version of "IE" failed this test: static class fields: private static class fields + // Note: The latest version of "Rhino" failed this test: static class fields: private static class fields + Chrome: {{start: v{74, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{90, 0, 0}}}, + IOS: {{start: v{14, 5, 0}}}, + Node: {{start: v{12, 0, 0}}}, + Opera: {{start: v{62, 0, 0}}}, + Safari: {{start: v{14, 1, 0}}}, + }, + ClassPrivateStaticMethod: { + // Note: The latest version of "Hermes" failed this test: private class methods: private static methods + // Note: The latest version of "IE" failed this test: private class methods: private static methods + // Note: The latest version of "Rhino" failed this test: private class methods: private static methods + Chrome: {{start: v{84, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{84, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{90, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Node: {{start: v{14, 6, 0}}}, + Opera: {{start: v{70, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + ClassStaticBlocks: { + Chrome: {{start: v{91, 0, 0}}}, + Deno: {{start: v{1, 14, 0}}}, + Edge: {{start: v{94, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{93, 0, 0}}}, + IOS: {{start: v{16, 4, 0}}}, + Node: {{start: v{16, 11, 0}}}, + Opera: {{start: v{80, 0, 0}}}, + Safari: {{start: v{16, 4, 0}}}, + }, + ClassStaticField: { + // Note: The latest version of "Hermes" failed 2 tests including: static class fields: computed static class fields + // Note: The latest version of "IE" failed 2 tests including: static class fields: computed static class fields + // Note: The latest version of "Rhino" failed 2 tests including: static class fields: computed static class fields + Chrome: {{start: v{73, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{75, 0, 0}}}, + IOS: {{start: v{14, 5, 0}}}, + Node: {{start: v{12, 0, 0}}}, + Opera: {{start: v{60, 0, 0}}}, + Safari: {{start: v{14, 1, 0}}}, + }, + ConstAndLet: { + // Note: The latest version of "Hermes" failed 20 tests including: const: for loop statement scope + // Note: The latest version of "IE" failed 6 tests including: const: for-in loop iteration scope + // Note: The latest version of "Rhino" failed 22 tests including: const: cannot be in statements + Chrome: {{start: v{49, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{14, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{51, 0, 0}}}, + IOS: {{start: v{11, 0, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{36, 0, 0}}}, + Safari: {{start: v{11, 0, 0}}}, + }, + Decorators: {}, + DefaultArgument: { + // Note: The latest version of "Hermes" failed 2 tests including: default function parameters: separate scope + // Note: The latest version of "IE" failed 7 tests including: default function parameters: arguments object interaction + // Note: The latest version of "Rhino" failed 7 tests including: default function parameters: arguments object interaction + Chrome: {{start: v{49, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{14, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{53, 0, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{36, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + Destructuring: { + // Note: The latest version of "Hermes" failed 3 tests including: destructuring, declarations: defaults, let temporal dead zone + // Note: The latest version of "IE" failed 71 tests including: destructuring, assignment: chained iterable destructuring + // Note: The latest version of "Rhino" failed 33 tests including: destructuring, assignment: computed properties + Chrome: {{start: v{51, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{18, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{53, 0, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{6, 5, 0}}}, + Opera: {{start: v{38, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + DynamicImport: { + Chrome: {{start: v{63, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{67, 0, 0}}}, + IOS: {{start: v{11, 0, 0}}}, + Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{13, 2, 0}}}, + Opera: {{start: v{50, 0, 0}}}, + Safari: {{start: v{11, 1, 0}}}, + }, + ExponentOperator: { + // Note: The latest version of "IE" failed 3 tests including: exponentiation (**) operator: assignment + Chrome: {{start: v{52, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{14, 0, 0}}}, + ES: {{start: v{2016, 0, 0}}}, + Firefox: {{start: v{52, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{10, 3, 0}}}, + Node: {{start: v{7, 0, 0}}}, + Opera: {{start: v{39, 0, 0}}}, + Rhino: {{start: v{1, 7, 14}}}, + Safari: {{start: v{10, 1, 0}}}, + }, + ExportStarAs: { + Chrome: {{start: v{72, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2020, 0, 0}}}, + Firefox: {{start: v{80, 0, 0}}}, + IOS: {{start: v{14, 5, 0}}}, + Node: {{start: v{13, 2, 0}}}, + Opera: {{start: v{60, 0, 0}}}, + Safari: {{start: v{14, 1, 0}}}, + }, + ForAwait: { + // Note: The latest version of "Hermes" failed this test: Asynchronous Iterators: for-await-of loops + // Note: The latest version of "IE" failed this test: Asynchronous Iterators: for-await-of loops + // Note: The latest version of "Rhino" failed this test: Asynchronous Iterators: for-await-of loops + Chrome: {{start: v{63, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2018, 0, 0}}}, + Firefox: {{start: v{57, 0, 0}}}, + IOS: {{start: v{12, 0, 0}}}, + Node: {{start: v{10, 0, 0}}}, + Opera: {{start: v{50, 0, 0}}}, + Safari: {{start: v{12, 0, 0}}}, + }, + ForOf: { + // Note: The latest version of "IE" failed 9 tests including: for..of loops: iterator closing, break + // Note: The latest version of "Rhino" failed 4 tests including: for..of loops: iterator closing, break + Chrome: {{start: v{51, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{15, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{53, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{6, 5, 0}}}, + Opera: {{start: v{38, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + FunctionNameConfigurable: { + // Note: The latest version of "IE" failed this test: function "name" property: isn't writable, is configurable + // Note: The latest version of "Rhino" failed this test: function "name" property: isn't writable, is configurable + Chrome: {{start: v{43, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{12, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{38, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{4, 0, 0}}}, + Opera: {{start: v{30, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + FunctionOrClassPropertyAccess: { + Chrome: {{start: v{0, 0, 0}}}, + Deno: {{start: v{0, 0, 0}}}, + Edge: {{start: v{0, 0, 0}}}, + ES: {{start: v{0, 0, 0}}}, + Firefox: {{start: v{0, 0, 0}}}, + Hermes: {{start: v{0, 0, 0}}}, + IE: {{start: v{0, 0, 0}}}, + IOS: {{start: v{0, 0, 0}}}, + Node: {{start: v{0, 0, 0}}}, + Opera: {{start: v{0, 0, 0}}}, + Rhino: {{start: v{0, 0, 0}}}, + Safari: {{start: v{16, 3, 0}}}, + }, + Generator: { + // Note: The latest version of "Hermes" failed 3 tests including: generators: computed shorthand generators, classes + // Note: The latest version of "IE" failed 27 tests including: generators: %GeneratorPrototype% + // Note: The latest version of "Rhino" failed 15 tests including: generators: %GeneratorPrototype% + Chrome: {{start: v{50, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{13, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{53, 0, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{37, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + Hashbang: { + // Note: The latest version of "IE" failed this test: Hashbang Grammar + // Note: The latest version of "Rhino" failed this test: Hashbang Grammar + Chrome: {{start: v{74, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + Firefox: {{start: v{67, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{13, 4, 0}}}, + Node: {{start: v{12, 5, 0}}}, + Opera: {{start: v{62, 0, 0}}}, + Safari: {{start: v{13, 1, 0}}}, + }, + ImportAssertions: { + Chrome: {{start: v{91, 0, 0}}}, + Deno: {{start: v{1, 17, 0}}}, + Edge: {{start: v{91, 0, 0}}}, + Node: {{start: v{16, 14, 0}}}, + }, + ImportAttributes: { + Chrome: {{start: v{123, 0, 0}}}, + Deno: {{start: v{1, 37, 0}}}, + Edge: {{start: v{123, 0, 0}}}, + IOS: {{start: v{17, 2, 0}}}, + Node: {{start: v{20, 10, 0}}}, + Opera: {{start: v{109, 0, 0}}}, + Safari: {{start: v{17, 2, 0}}}, + }, + ImportMeta: { + Chrome: {{start: v{64, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2020, 0, 0}}}, + Firefox: {{start: v{62, 0, 0}}}, + IOS: {{start: v{12, 0, 0}}}, + Node: {{start: v{10, 4, 0}}}, + Opera: {{start: v{51, 0, 0}}}, + Safari: {{start: v{11, 1, 0}}}, + }, + InlineScript: {}, + LogicalAssignment: { + // Note: The latest version of "IE" failed 9 tests including: Logical Assignment: &&= basic support + // Note: The latest version of "Rhino" failed 9 tests including: Logical Assignment: &&= basic support + Chrome: {{start: v{85, 0, 0}}}, + Deno: {{start: v{1, 2, 0}}}, + Edge: {{start: v{85, 0, 0}}}, + ES: {{start: v{2021, 0, 0}}}, + Firefox: {{start: v{79, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{14, 0, 0}}}, + Node: {{start: v{15, 0, 0}}}, + Opera: {{start: v{71, 0, 0}}}, + Safari: {{start: v{14, 0, 0}}}, + }, + NestedRestBinding: { + // Note: The latest version of "IE" failed 2 tests including: nested rest destructuring, declarations + // Note: The latest version of "Rhino" failed 2 tests including: nested rest destructuring, declarations + Chrome: {{start: v{49, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{14, 0, 0}}}, + ES: {{start: v{2016, 0, 0}}}, + Firefox: {{start: v{47, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{10, 3, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{36, 0, 0}}}, + Safari: {{start: v{10, 1, 0}}}, + }, + NewTarget: { + // Note: The latest version of "IE" failed 2 tests including: new.target: assignment is an early error + // Note: The latest version of "Rhino" failed 2 tests including: new.target: assignment is an early error + Chrome: {{start: v{46, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{14, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{41, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{5, 0, 0}}}, + Opera: {{start: v{33, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + NodeColonPrefixImport: { + Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{14, 13, 1}}}, + }, + NodeColonPrefixRequire: { + Node: {{start: v{14, 18, 0}, end: v{15, 0, 0}}, {start: v{16, 0, 0}}}, + }, + NullishCoalescing: { + // Note: The latest version of "IE" failed this test: nullish coalescing operator (??) + // Note: The latest version of "Rhino" failed this test: nullish coalescing operator (??) + Chrome: {{start: v{80, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{80, 0, 0}}}, + ES: {{start: v{2020, 0, 0}}}, + Firefox: {{start: v{72, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{13, 4, 0}}}, + Node: {{start: v{14, 0, 0}}}, + Opera: {{start: v{67, 0, 0}}}, + Safari: {{start: v{13, 1, 0}}}, + }, + ObjectAccessors: { + Chrome: {{start: v{5, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{12, 0, 0}}}, + ES: {{start: v{5, 0, 0}}}, + Firefox: {{start: v{2, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IE: {{start: v{9, 0, 0}}}, + IOS: {{start: v{6, 0, 0}}}, + Node: {{start: v{0, 4, 0}}}, + Opera: {{start: v{10, 10, 0}}}, + Rhino: {{start: v{1, 7, 13}}}, + Safari: {{start: v{3, 1, 0}}}, + }, + ObjectExtensions: { + // Note: The latest version of "IE" failed 6 tests including: object literal extensions: computed accessors + // Note: The latest version of "Rhino" failed 3 tests including: object literal extensions: computed accessors + Chrome: {{start: v{44, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{12, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{34, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{4, 0, 0}}}, + Opera: {{start: v{31, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + ObjectRestSpread: { + // Note: The latest version of "IE" failed 2 tests including: object rest/spread properties: object rest properties + // Note: The latest version of "Rhino" failed 2 tests including: object rest/spread properties: object rest properties + Chrome: {{start: v{60, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2018, 0, 0}}}, + Firefox: {{start: v{55, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{11, 3, 0}}}, + Node: {{start: v{8, 3, 0}}}, + Opera: {{start: v{47, 0, 0}}}, + Safari: {{start: v{11, 1, 0}}}, + }, + OptionalCatchBinding: { + // Note: The latest version of "IE" failed 3 tests including: optional catch binding: await + // Note: The latest version of "Rhino" failed 3 tests including: optional catch binding: await + Chrome: {{start: v{66, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2019, 0, 0}}}, + Firefox: {{start: v{58, 0, 0}}}, + Hermes: {{start: v{0, 12, 0}}}, + IOS: {{start: v{11, 3, 0}}}, + Node: {{start: v{10, 0, 0}}}, + Opera: {{start: v{53, 0, 0}}}, + Safari: {{start: v{11, 1, 0}}}, + }, + OptionalChain: { + // Note: The latest version of "IE" failed 5 tests including: optional chaining operator (?.): optional bracket access + // Note: The latest version of "Rhino" failed 5 tests including: optional chaining operator (?.): optional bracket access + Chrome: {{start: v{91, 0, 0}}}, + Deno: {{start: v{1, 9, 0}}}, + Edge: {{start: v{91, 0, 0}}}, + ES: {{start: v{2020, 0, 0}}}, + Firefox: {{start: v{74, 0, 0}}}, + Hermes: {{start: v{0, 12, 0}}}, + IOS: {{start: v{13, 4, 0}}}, + Node: {{start: v{16, 1, 0}}}, + Opera: {{start: v{77, 0, 0}}}, + Safari: {{start: v{13, 1, 0}}}, + }, + RegexpDotAllFlag: { + // Note: The latest version of "IE" failed this test: s (dotAll) flag for regular expressions + // Note: The latest version of "Rhino" failed this test: s (dotAll) flag for regular expressions + Chrome: {{start: v{62, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2018, 0, 0}}}, + Firefox: {{start: v{78, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{11, 3, 0}}}, + Node: {{start: v{8, 10, 0}}}, + Opera: {{start: v{49, 0, 0}}}, + Safari: {{start: v{11, 1, 0}}}, + }, + RegexpLookbehindAssertions: { + // Note: The latest version of "IE" failed this test: RegExp Lookbehind Assertions + // Note: The latest version of "Rhino" failed this test: RegExp Lookbehind Assertions + Chrome: {{start: v{62, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2018, 0, 0}}}, + Firefox: {{start: v{78, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{16, 4, 0}}}, + Node: {{start: v{8, 10, 0}}}, + Opera: {{start: v{49, 0, 0}}}, + Safari: {{start: v{16, 4, 0}}}, + }, + RegexpMatchIndices: { + Chrome: {{start: v{90, 0, 0}}}, + Deno: {{start: v{1, 8, 0}}}, + Edge: {{start: v{90, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{88, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Node: {{start: v{16, 0, 0}}}, + Opera: {{start: v{76, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + RegexpNamedCaptureGroups: { + // Note: The latest version of "Hermes" failed this test: RegExp named capture groups + // Note: The latest version of "IE" failed this test: RegExp named capture groups + // Note: The latest version of "Rhino" failed this test: RegExp named capture groups + Chrome: {{start: v{64, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2018, 0, 0}}}, + Firefox: {{start: v{78, 0, 0}}}, + IOS: {{start: v{11, 3, 0}}}, + Node: {{start: v{10, 0, 0}}}, + Opera: {{start: v{51, 0, 0}}}, + Safari: {{start: v{11, 1, 0}}}, + }, + RegexpSetNotation: {}, + RegexpStickyAndUnicodeFlags: { + // Note: The latest version of "IE" failed 6 tests including: RegExp "y" and "u" flags: "u" flag + // Note: The latest version of "Rhino" failed 6 tests including: RegExp "y" and "u" flags: "u" flag + Chrome: {{start: v{50, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{13, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{46, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{12, 0, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{37, 0, 0}}}, + Safari: {{start: v{12, 0, 0}}}, + }, + RegexpUnicodePropertyEscapes: { + // Note: The latest version of "Chrome" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 + // Note: The latest version of "Firefox" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 + // Note: The latest version of "Hermes" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11 + // Note: The latest version of "IE" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11 + // Note: The latest version of "IOS" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 + // Note: The latest version of "Rhino" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11 + // Note: The latest version of "Safari" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 + ES: {{start: v{2018, 0, 0}}}, + Node: {{start: v{18, 20, 0}, end: v{19, 0, 0}}, {start: v{20, 12, 0}, end: v{21, 0, 0}}, {start: v{21, 3, 0}}}, + }, + RestArgument: { + // Note: The latest version of "Hermes" failed this test: rest parameters: function 'length' property + // Note: The latest version of "IE" failed 5 tests including: rest parameters: arguments object interaction + // Note: The latest version of "Rhino" failed 5 tests including: rest parameters: arguments object interaction + Chrome: {{start: v{47, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{12, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{43, 0, 0}}}, + IOS: {{start: v{10, 0, 0}}}, + Node: {{start: v{6, 0, 0}}}, + Opera: {{start: v{34, 0, 0}}}, + Safari: {{start: v{10, 0, 0}}}, + }, + TemplateLiteral: { + // Note: The latest version of "Hermes" failed this test: template literals: TemplateStrings call site caching + // Note: The latest version of "IE" failed 7 tests including: template literals: TemplateStrings call site caching + // Note: The latest version of "Rhino" failed 2 tests including: template literals: basic functionality + Chrome: {{start: v{41, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{13, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{34, 0, 0}}}, + IOS: {{start: v{13, 0, 0}}}, + Node: {{start: v{10, 0, 0}}}, + Opera: {{start: v{28, 0, 0}}}, + Safari: {{start: v{13, 0, 0}}}, + }, + TopLevelAwait: { + Chrome: {{start: v{89, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{89, 0, 0}}}, + ES: {{start: v{2022, 0, 0}}}, + Firefox: {{start: v{89, 0, 0}}}, + IOS: {{start: v{15, 0, 0}}}, + Node: {{start: v{14, 8, 0}}}, + Opera: {{start: v{75, 0, 0}}}, + Safari: {{start: v{15, 0, 0}}}, + }, + TypeofExoticObjectIsObject: { + Chrome: {{start: v{0, 0, 0}}}, + Deno: {{start: v{0, 0, 0}}}, + Edge: {{start: v{0, 0, 0}}}, + ES: {{start: v{2020, 0, 0}}}, + Firefox: {{start: v{0, 0, 0}}}, + Hermes: {{start: v{0, 0, 0}}}, + IOS: {{start: v{0, 0, 0}}}, + Node: {{start: v{0, 0, 0}}}, + Opera: {{start: v{0, 0, 0}}}, + Rhino: {{start: v{0, 0, 0}}}, + Safari: {{start: v{0, 0, 0}}}, + }, + UnicodeEscapes: { + // Note: The latest version of "IE" failed 2 tests including: Unicode code point escapes: in identifiers + // Note: The latest version of "Rhino" failed 4 tests including: Unicode code point escapes: in identifiers + Chrome: {{start: v{44, 0, 0}}}, + Deno: {{start: v{1, 0, 0}}}, + Edge: {{start: v{12, 0, 0}}}, + ES: {{start: v{2015, 0, 0}}}, + Firefox: {{start: v{53, 0, 0}}}, + Hermes: {{start: v{0, 7, 0}}}, + IOS: {{start: v{9, 0, 0}}}, + Node: {{start: v{4, 0, 0}}}, + Opera: {{start: v{31, 0, 0}}}, + Safari: {{start: v{9, 0, 0}}}, + }, + Using: {}, +} + +// Return all features that are not available in at least one environment +func UnsupportedJSFeatures(constraints map[Engine]Semver) (unsupported JSFeature) { + for feature, engines := range jsTable { + if feature == InlineScript { + continue // This is purely user-specified + } + for engine, version := range constraints { + if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) { + unsupported |= feature + } + } + } + return +} diff --git a/vendor/github.com/evanw/esbuild/internal/config/config.go b/vendor/github.com/evanw/esbuild/internal/config/config.go new file mode 100644 index 000000000000..3dd2e553100d --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/config/config.go @@ -0,0 +1,841 @@ +package config + +import ( + "fmt" + "regexp" + "strings" + "sync" + "sync/atomic" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/compat" + "github.com/evanw/esbuild/internal/css_ast" + "github.com/evanw/esbuild/internal/js_ast" + "github.com/evanw/esbuild/internal/logger" +) + +type JSXOptions struct { + Factory DefineExpr + Fragment DefineExpr + Parse bool + Preserve bool + AutomaticRuntime bool + ImportSource string + Development bool + SideEffects bool +} + +type TSJSX uint8 + +const ( + TSJSXNone TSJSX = iota + TSJSXPreserve + TSJSXReactNative + TSJSXReact + TSJSXReactJSX + TSJSXReactJSXDev +) + +type TSOptions struct { + Config TSConfig + Parse bool + NoAmbiguousLessThan bool +} + +type TSConfigJSX struct { + // If not empty, these should override the default values + JSXFactory []string // Default if empty: "React.createElement" + JSXFragmentFactory []string // Default if empty: "React.Fragment" + JSXImportSource *string // Default if empty: "react" + JSX TSJSX +} + +// This is used for "extends" in "tsconfig.json" +func (derived *TSConfigJSX) ApplyExtendedConfig(base TSConfigJSX) { + if base.JSXFactory != nil { + derived.JSXFactory = base.JSXFactory + } + if base.JSXFragmentFactory != nil { + derived.JSXFragmentFactory = base.JSXFragmentFactory + } + if base.JSXImportSource != nil { + derived.JSXImportSource = base.JSXImportSource + } + if base.JSX != TSJSXNone { + derived.JSX = base.JSX + } +} + +func (tsConfig *TSConfigJSX) ApplyTo(jsxOptions *JSXOptions) { + switch tsConfig.JSX { + case TSJSXPreserve, TSJSXReactNative: + // Deliberately don't set "Preserve = true" here. Some tools from Vercel + // apparently automatically set "jsx": "preserve" in "tsconfig.json" and + // people are then confused when esbuild preserves their JSX. Ignoring this + // value means you now have to explicitly pass "--jsx=preserve" to esbuild + // to get this behavior. + + case TSJSXReact: + jsxOptions.AutomaticRuntime = false + jsxOptions.Development = false + + case TSJSXReactJSX: + jsxOptions.AutomaticRuntime = true + // Deliberately don't set "Development = false" here. People want to be + // able to have "react-jsx" in their "tsconfig.json" file and then swap + // that to "react-jsxdev" by passing "--jsx-dev" to esbuild. + + case TSJSXReactJSXDev: + jsxOptions.AutomaticRuntime = true + jsxOptions.Development = true + } + + if len(tsConfig.JSXFactory) > 0 { + jsxOptions.Factory = DefineExpr{Parts: tsConfig.JSXFactory} + } + + if len(tsConfig.JSXFragmentFactory) > 0 { + jsxOptions.Fragment = DefineExpr{Parts: tsConfig.JSXFragmentFactory} + } + + if tsConfig.JSXImportSource != nil { + jsxOptions.ImportSource = *tsConfig.JSXImportSource + } +} + +// Note: This can currently only contain primitive values. It's compared +// for equality using a structural equality comparison by the JS parser. +type TSConfig struct { + ExperimentalDecorators MaybeBool + ImportsNotUsedAsValues TSImportsNotUsedAsValues + PreserveValueImports MaybeBool + Target TSTarget + UseDefineForClassFields MaybeBool + VerbatimModuleSyntax MaybeBool +} + +// This is used for "extends" in "tsconfig.json" +func (derived *TSConfig) ApplyExtendedConfig(base TSConfig) { + if base.ExperimentalDecorators != Unspecified { + derived.ExperimentalDecorators = base.ExperimentalDecorators + } + if base.ImportsNotUsedAsValues != TSImportsNotUsedAsValues_None { + derived.ImportsNotUsedAsValues = base.ImportsNotUsedAsValues + } + if base.PreserveValueImports != Unspecified { + derived.PreserveValueImports = base.PreserveValueImports + } + if base.Target != TSTargetUnspecified { + derived.Target = base.Target + } + if base.UseDefineForClassFields != Unspecified { + derived.UseDefineForClassFields = base.UseDefineForClassFields + } + if base.VerbatimModuleSyntax != Unspecified { + derived.VerbatimModuleSyntax = base.VerbatimModuleSyntax + } +} + +func (cfg *TSConfig) UnusedImportFlags() (flags TSUnusedImportFlags) { + if cfg.VerbatimModuleSyntax == True { + return TSUnusedImport_KeepStmt | TSUnusedImport_KeepValues + } + if cfg.PreserveValueImports == True { + flags |= TSUnusedImport_KeepValues + } + if cfg.ImportsNotUsedAsValues == TSImportsNotUsedAsValues_Preserve || cfg.ImportsNotUsedAsValues == TSImportsNotUsedAsValues_Error { + flags |= TSUnusedImport_KeepStmt + } + return +} + +type Platform uint8 + +const ( + PlatformBrowser Platform = iota + PlatformNode + PlatformNeutral +) + +type SourceMap uint8 + +const ( + SourceMapNone SourceMap = iota + SourceMapInline + SourceMapLinkedWithComment + SourceMapExternalWithoutComment + SourceMapInlineAndExternal +) + +type LegalComments uint8 + +const ( + LegalCommentsInline LegalComments = iota + LegalCommentsNone + LegalCommentsEndOfFile + LegalCommentsLinkedWithComment + LegalCommentsExternalWithoutComment +) + +func (lc LegalComments) HasExternalFile() bool { + return lc == LegalCommentsLinkedWithComment || lc == LegalCommentsExternalWithoutComment +} + +type Loader uint8 + +const ( + LoaderNone Loader = iota + LoaderBase64 + LoaderBinary + LoaderCopy + LoaderCSS + LoaderDataURL + LoaderDefault + LoaderEmpty + LoaderFile + LoaderGlobalCSS + LoaderJS + LoaderJSON + LoaderWithTypeJSON // Has a "with { type: 'json' }" attribute + LoaderJSX + LoaderLocalCSS + LoaderText + LoaderTS + LoaderTSNoAmbiguousLessThan // Used with ".mts" and ".cts" + LoaderTSX +) + +var LoaderToString = []string{ + "none", + "base64", + "binary", + "copy", + "css", + "dataurl", + "default", + "empty", + "file", + "global-css", + "js", + "json", + "json", + "jsx", + "local-css", + "text", + "ts", + "ts", + "tsx", +} + +func (loader Loader) IsTypeScript() bool { + switch loader { + case LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX: + return true + } + return false +} + +func (loader Loader) IsCSS() bool { + switch loader { + case + LoaderCSS, LoaderGlobalCSS, LoaderLocalCSS: + return true + } + return false +} + +func (loader Loader) CanHaveSourceMap() bool { + switch loader { + case + LoaderJS, LoaderJSX, + LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX, + LoaderCSS, LoaderGlobalCSS, LoaderLocalCSS, + LoaderJSON, LoaderWithTypeJSON, LoaderText: + return true + } + return false +} + +type Format uint8 + +const ( + // This is used when not bundling. It means to preserve whatever form the + // import or export was originally in. ES6 syntax stays ES6 syntax and + // CommonJS syntax stays CommonJS syntax. + FormatPreserve Format = iota + + // IIFE stands for immediately-invoked function expression. That looks like + // this: + // + // (() => { + // ... bundled code ... + // })(); + // + // If the optional GlobalName is configured, then we'll write out this: + // + // let globalName = (() => { + // ... bundled code ... + // return exports; + // })(); + // + FormatIIFE + + // The CommonJS format looks like this: + // + // ... bundled code ... + // module.exports = exports; + // + FormatCommonJS + + // The ES module format looks like this: + // + // ... bundled code ... + // export {...}; + // + FormatESModule +) + +func (f Format) KeepESMImportExportSyntax() bool { + return f == FormatPreserve || f == FormatESModule +} + +func (f Format) String() string { + switch f { + case FormatIIFE: + return "iife" + case FormatCommonJS: + return "cjs" + case FormatESModule: + return "esm" + } + return "" +} + +type StdinInfo struct { + Contents string + SourceFile string + AbsResolveDir string + Loader Loader +} + +type WildcardPattern struct { + Prefix string + Suffix string +} + +type ExternalMatchers struct { + Exact map[string]bool + Patterns []WildcardPattern +} + +func (matchers ExternalMatchers) HasMatchers() bool { + return len(matchers.Exact) > 0 || len(matchers.Patterns) > 0 +} + +type ExternalSettings struct { + PreResolve ExternalMatchers + PostResolve ExternalMatchers +} + +type APICall uint8 + +const ( + BuildCall APICall = iota + TransformCall +) + +type Mode uint8 + +const ( + ModePassThrough Mode = iota + ModeConvertFormat + ModeBundle +) + +type MaybeBool uint8 + +const ( + Unspecified MaybeBool = iota + True + False +) + +type CancelFlag struct { + uint32 +} + +func (flag *CancelFlag) Cancel() { + atomic.StoreUint32(&flag.uint32, 1) +} + +// This checks for nil in one place so we don't have to do that everywhere +func (flag *CancelFlag) DidCancel() bool { + return flag != nil && atomic.LoadUint32(&flag.uint32) != 0 +} + +type Options struct { + ModuleTypeData js_ast.ModuleTypeData + Defines *ProcessedDefines + TSAlwaysStrict *TSAlwaysStrict + MangleProps *regexp.Regexp + ReserveProps *regexp.Regexp + CancelFlag *CancelFlag + + // When mangling property names, call this function with a callback and do + // the property name mangling inside the callback. The callback takes an + // argument which is the mangle cache map to mutate. These callbacks are + // serialized so mutating the map does not require extra synchronization. + // + // This is a callback for determinism reasons. We may be building multiple + // entry points in parallel that are supposed to share a single cache. We + // don't want the order that each entry point mangles properties in to cause + // the output to change, so we serialize the property mangling over all entry + // points in entry point order. However, we still want to link everything in + // parallel so only property mangling is serialized, which is implemented by + // this function blocking until the previous entry point's property mangling + // has finished. + ExclusiveMangleCacheUpdate func(cb func( + mangleCache map[string]interface{}, + cssUsedLocalNames map[string]bool, + )) + + // This is the original information that was used to generate the + // unsupported feature sets above. It's used for error messages. + OriginalTargetEnv string + + DropLabels []string + ExtensionOrder []string + MainFields []string + Conditions []string + AbsNodePaths []string // The "NODE_PATH" variable from Node.js + ExternalSettings ExternalSettings + ExternalPackages bool + PackageAliases map[string]string + + AbsOutputFile string + AbsOutputDir string + AbsOutputBase string + OutputExtensionJS string + OutputExtensionCSS string + GlobalName []string + TSConfigPath string + TSConfigRaw string + ExtensionToLoader map[string]Loader + + PublicPath string + InjectPaths []string + InjectedDefines []InjectedDefine + InjectedFiles []InjectedFile + + JSBanner string + JSFooter string + CSSBanner string + CSSFooter string + + EntryPathTemplate []PathTemplate + ChunkPathTemplate []PathTemplate + AssetPathTemplate []PathTemplate + + Plugins []Plugin + SourceRoot string + Stdin *StdinInfo + JSX JSXOptions + LineLimit int + + CSSPrefixData map[css_ast.D]compat.CSSPrefix + UnsupportedJSFeatures compat.JSFeature + UnsupportedCSSFeatures compat.CSSFeature + + UnsupportedJSFeatureOverrides compat.JSFeature + UnsupportedJSFeatureOverridesMask compat.JSFeature + UnsupportedCSSFeatureOverrides compat.CSSFeature + UnsupportedCSSFeatureOverridesMask compat.CSSFeature + + TS TSOptions + Mode Mode + PreserveSymlinks bool + MinifyWhitespace bool + MinifyIdentifiers bool + MinifySyntax bool + ProfilerNames bool + CodeSplitting bool + WatchMode bool + AllowOverwrite bool + LegalComments LegalComments + + // If true, make sure to generate a single file that can be written to stdout + WriteToStdout bool + + OmitRuntimeForTests bool + OmitJSXRuntimeForTests bool + ASCIIOnly bool + KeepNames bool + IgnoreDCEAnnotations bool + TreeShaking bool + DropDebugger bool + MangleQuoted bool + Platform Platform + OutputFormat Format + NeedsMetafile bool + SourceMap SourceMap + ExcludeSourcesContent bool +} + +type TSImportsNotUsedAsValues uint8 + +const ( + TSImportsNotUsedAsValues_None TSImportsNotUsedAsValues = iota + TSImportsNotUsedAsValues_Remove + TSImportsNotUsedAsValues_Preserve + TSImportsNotUsedAsValues_Error +) + +// These flags represent the following separate "tsconfig.json" settings: +// +// - importsNotUsedAsValues +// - preserveValueImports +// - verbatimModuleSyntax +// +// TypeScript prefers for people to use "verbatimModuleSyntax" and has +// deprecated the other two settings, but we must still support them. +// All settings are combined into these two behavioral flags for us. +type TSUnusedImportFlags uint8 + +// With !TSUnusedImport_KeepStmt && !TSUnusedImport_KeepValues: +// +// "import 'foo'" => "import 'foo'" +// "import * as unused from 'foo'" => "" +// "import { unused } from 'foo'" => "" +// "import { type unused } from 'foo'" => "" +// +// With TSUnusedImport_KeepStmt && !TSUnusedImport_KeepValues: +// +// "import 'foo'" => "import 'foo'" +// "import * as unused from 'foo'" => "import 'foo'" +// "import { unused } from 'foo'" => "import 'foo'" +// "import { type unused } from 'foo'" => "import 'foo'" +// +// With !TSUnusedImport_KeepStmt && TSUnusedImport_KeepValues: +// +// "import 'foo'" => "import 'foo'" +// "import * as unused from 'foo'" => "import * as unused from 'foo'" +// "import { unused } from 'foo'" => "import { unused } from 'foo'" +// "import { type unused } from 'foo'" => "" +// +// With TSUnusedImport_KeepStmt && TSUnusedImport_KeepValues: +// +// "import 'foo'" => "import 'foo'" +// "import * as unused from 'foo'" => "import * as unused from 'foo'" +// "import { unused } from 'foo'" => "import { unused } from 'foo'" +// "import { type unused } from 'foo'" => "import {} from 'foo'" +const ( + TSUnusedImport_KeepStmt TSUnusedImportFlags = 1 << iota // "importsNotUsedAsValues" != "remove" + TSUnusedImport_KeepValues // "preserveValueImports" == true +) + +type TSTarget uint8 + +const ( + TSTargetUnspecified TSTarget = iota + TSTargetBelowES2022 // "useDefineForClassFields" defaults to false + TSTargetAtOrAboveES2022 // "useDefineForClassFields" defaults to true +) + +type TSAlwaysStrict struct { + // This information is only used for error messages + Name string + Source logger.Source + Range logger.Range + + // This information can affect code transformation + Value bool +} + +type PathPlaceholder uint8 + +const ( + NoPlaceholder PathPlaceholder = iota + + // The relative path from the original parent directory to the configured + // "outbase" directory, or to the lowest common ancestor directory + DirPlaceholder + + // The original name of the file, or the manual chunk name, or the name of + // the type of output file ("entry" or "chunk" or "asset") + NamePlaceholder + + // A hash of the contents of this file, and the contents and output paths of + // all dependencies (except for their hash placeholders) + HashPlaceholder + + // The original extension of the file, or the name of the output file + // (e.g. "css", "svg", "png") + ExtPlaceholder +) + +type PathTemplate struct { + Data string + Placeholder PathPlaceholder +} + +type PathPlaceholders struct { + Dir *string + Name *string + Hash *string + Ext *string +} + +func (placeholders PathPlaceholders) Get(placeholder PathPlaceholder) *string { + switch placeholder { + case DirPlaceholder: + return placeholders.Dir + case NamePlaceholder: + return placeholders.Name + case HashPlaceholder: + return placeholders.Hash + case ExtPlaceholder: + return placeholders.Ext + } + return nil +} + +func TemplateToString(template []PathTemplate) string { + if len(template) == 1 && template[0].Placeholder == NoPlaceholder { + // Avoid allocations in this case + return template[0].Data + } + sb := strings.Builder{} + for _, part := range template { + sb.WriteString(part.Data) + switch part.Placeholder { + case DirPlaceholder: + sb.WriteString("[dir]") + case NamePlaceholder: + sb.WriteString("[name]") + case HashPlaceholder: + sb.WriteString("[hash]") + case ExtPlaceholder: + sb.WriteString("[ext]") + } + } + return sb.String() +} + +func HasPlaceholder(template []PathTemplate, placeholder PathPlaceholder) bool { + for _, part := range template { + if part.Placeholder == placeholder { + return true + } + } + return false +} + +func SubstituteTemplate(template []PathTemplate, placeholders PathPlaceholders) []PathTemplate { + // Don't allocate if no substitution is possible and the template is already minimal + shouldSubstitute := false + for i, part := range template { + if placeholders.Get(part.Placeholder) != nil || (part.Placeholder == NoPlaceholder && i+1 < len(template)) { + shouldSubstitute = true + break + } + } + if !shouldSubstitute { + return template + } + + // Otherwise, substitute and merge as appropriate + result := make([]PathTemplate, 0, len(template)) + for _, part := range template { + if sub := placeholders.Get(part.Placeholder); sub != nil { + part.Data += *sub + part.Placeholder = NoPlaceholder + } + if last := len(result) - 1; last >= 0 && result[last].Placeholder == NoPlaceholder { + last := &result[last] + last.Data += part.Data + last.Placeholder = part.Placeholder + } else { + result = append(result, part) + } + } + return result +} + +func ShouldCallRuntimeRequire(mode Mode, outputFormat Format) bool { + return mode == ModeBundle && outputFormat != FormatCommonJS +} + +type InjectedDefine struct { + Data js_ast.E + Name string + Source logger.Source +} + +type InjectedFile struct { + Exports []InjectableExport + DefineName string // For injected files generated when you "--define" a non-literal + Source logger.Source + IsCopyLoader bool // If you set the loader to "copy" (see https://github.com/evanw/esbuild/issues/3041) +} + +type InjectableExport struct { + Alias string + Loc logger.Loc +} + +var filterMutex sync.Mutex +var filterCache map[string]*regexp.Regexp + +func compileFilter(filter string) (result *regexp.Regexp) { + if filter == "" { + // Must provide a filter + return nil + } + ok := false + + // Cache hit? + (func() { + filterMutex.Lock() + defer filterMutex.Unlock() + if filterCache != nil { + result, ok = filterCache[filter] + } + })() + if ok { + return + } + + // Cache miss + result, err := regexp.Compile(filter) + if err != nil { + return nil + } + + // Cache for next time + filterMutex.Lock() + defer filterMutex.Unlock() + if filterCache == nil { + filterCache = make(map[string]*regexp.Regexp) + } + filterCache[filter] = result + return +} + +func CompileFilterForPlugin(pluginName string, kind string, filter string) (*regexp.Regexp, error) { + if filter == "" { + return nil, fmt.Errorf("[%s] %q is missing a filter", pluginName, kind) + } + + result := compileFilter(filter) + if result == nil { + return nil, fmt.Errorf("[%s] %q filter is not a valid Go regular expression: %q", pluginName, kind, filter) + } + + return result, nil +} + +func PluginAppliesToPath(path logger.Path, filter *regexp.Regexp, namespace string) bool { + return (namespace == "" || path.Namespace == namespace) && filter.MatchString(path.Text) +} + +//////////////////////////////////////////////////////////////////////////////// +// Plugin API + +type Plugin struct { + Name string + OnStart []OnStart + OnResolve []OnResolve + OnLoad []OnLoad +} + +type OnStart struct { + Callback func() OnStartResult + Name string +} + +type OnStartResult struct { + ThrownError error + Msgs []logger.Msg +} + +type OnResolve struct { + Filter *regexp.Regexp + Callback func(OnResolveArgs) OnResolveResult + Name string + Namespace string +} + +type OnResolveArgs struct { + Path string + ResolveDir string + PluginData interface{} + Importer logger.Path + Kind ast.ImportKind +} + +type OnResolveResult struct { + PluginName string + + Msgs []logger.Msg + ThrownError error + + AbsWatchFiles []string + AbsWatchDirs []string + + PluginData interface{} + Path logger.Path + External bool + IsSideEffectFree bool +} + +type OnLoad struct { + Filter *regexp.Regexp + Callback func(OnLoadArgs) OnLoadResult + Name string + Namespace string +} + +type OnLoadArgs struct { + PluginData interface{} + Path logger.Path +} + +type OnLoadResult struct { + PluginName string + + Contents *string + AbsResolveDir string + PluginData interface{} + + Msgs []logger.Msg + ThrownError error + + AbsWatchFiles []string + AbsWatchDirs []string + + Loader Loader +} + +func PrettyPrintTargetEnvironment(originalTargetEnv string, unsupportedJSFeatureOverridesMask compat.JSFeature) (where string) { + where = "the configured target environment" + overrides := "" + if unsupportedJSFeatureOverridesMask != 0 { + count := 0 + mask := unsupportedJSFeatureOverridesMask + for mask != 0 { + if (mask & 1) != 0 { + count++ + } + mask >>= 1 + } + s := "s" + if count == 1 { + s = "" + } + overrides = fmt.Sprintf(" + %d override%s", count, s) + } + if originalTargetEnv != "" { + where = fmt.Sprintf("%s (%s%s)", where, originalTargetEnv, overrides) + } + return +} diff --git a/vendor/github.com/evanw/esbuild/internal/config/globals.go b/vendor/github.com/evanw/esbuild/internal/config/globals.go new file mode 100644 index 000000000000..4a77c0267c08 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/config/globals.go @@ -0,0 +1,1014 @@ +package config + +import ( + "math" + "strings" + "sync" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/helpers" + "github.com/evanw/esbuild/internal/js_ast" +) + +var processedGlobalsMutex sync.Mutex +var processedGlobals *ProcessedDefines + +// If something is in this list, then a direct identifier expression or property +// access chain matching this will be assumed to have no side effects and will +// be removed. +// +// This also means code is allowed to be reordered past things in this list. For +// example, if "console.log" is in this list, permitting reordering allows for +// "if (a) console.log(b); else console.log(c)" to be reordered and transformed +// into "console.log(a ? b : c)". Notice that "a" and "console.log" are in a +// different order, which can only happen if evaluating the "console.log" +// property access can be assumed to not change the value of "a". +// +// Note that membership in this list says nothing about whether calling any of +// these functions has any side effects. It only says something about +// referencing these function without calling them. +var knownGlobals = [][]string{ + // These global identifiers should exist in all JavaScript environments. This + // deliberately omits "NaN", "Infinity", and "undefined" because these are + // treated as automatically-inlined constants instead of identifiers. + {"Array"}, + {"Boolean"}, + {"Function"}, + {"Math"}, + {"Number"}, + {"Object"}, + {"RegExp"}, + {"String"}, + + // Object: Static methods + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Static_methods + {"Object", "assign"}, + {"Object", "create"}, + {"Object", "defineProperties"}, + {"Object", "defineProperty"}, + {"Object", "entries"}, + {"Object", "freeze"}, + {"Object", "fromEntries"}, + {"Object", "getOwnPropertyDescriptor"}, + {"Object", "getOwnPropertyDescriptors"}, + {"Object", "getOwnPropertyNames"}, + {"Object", "getOwnPropertySymbols"}, + {"Object", "getPrototypeOf"}, + {"Object", "is"}, + {"Object", "isExtensible"}, + {"Object", "isFrozen"}, + {"Object", "isSealed"}, + {"Object", "keys"}, + {"Object", "preventExtensions"}, + {"Object", "seal"}, + {"Object", "setPrototypeOf"}, + {"Object", "values"}, + + // Object: Instance methods + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Instance_methods + {"Object", "prototype", "__defineGetter__"}, + {"Object", "prototype", "__defineSetter__"}, + {"Object", "prototype", "__lookupGetter__"}, + {"Object", "prototype", "__lookupSetter__"}, + {"Object", "prototype", "hasOwnProperty"}, + {"Object", "prototype", "isPrototypeOf"}, + {"Object", "prototype", "propertyIsEnumerable"}, + {"Object", "prototype", "toLocaleString"}, + {"Object", "prototype", "toString"}, + {"Object", "prototype", "unwatch"}, + {"Object", "prototype", "valueOf"}, + {"Object", "prototype", "watch"}, + + // Symbol: Static properties + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol#static_properties + {"Symbol", "asyncDispose"}, + {"Symbol", "asyncIterator"}, + {"Symbol", "dispose"}, + {"Symbol", "hasInstance"}, + {"Symbol", "isConcatSpreadable"}, + {"Symbol", "iterator"}, + {"Symbol", "match"}, + {"Symbol", "matchAll"}, + {"Symbol", "replace"}, + {"Symbol", "search"}, + {"Symbol", "species"}, + {"Symbol", "split"}, + {"Symbol", "toPrimitive"}, + {"Symbol", "toStringTag"}, + {"Symbol", "unscopables"}, + + // Math: Static properties + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_properties + {"Math", "E"}, + {"Math", "LN10"}, + {"Math", "LN2"}, + {"Math", "LOG10E"}, + {"Math", "LOG2E"}, + {"Math", "PI"}, + {"Math", "SQRT1_2"}, + {"Math", "SQRT2"}, + + // Math: Static methods + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_methods + {"Math", "abs"}, + {"Math", "acos"}, + {"Math", "acosh"}, + {"Math", "asin"}, + {"Math", "asinh"}, + {"Math", "atan"}, + {"Math", "atan2"}, + {"Math", "atanh"}, + {"Math", "cbrt"}, + {"Math", "ceil"}, + {"Math", "clz32"}, + {"Math", "cos"}, + {"Math", "cosh"}, + {"Math", "exp"}, + {"Math", "expm1"}, + {"Math", "floor"}, + {"Math", "fround"}, + {"Math", "hypot"}, + {"Math", "imul"}, + {"Math", "log"}, + {"Math", "log10"}, + {"Math", "log1p"}, + {"Math", "log2"}, + {"Math", "max"}, + {"Math", "min"}, + {"Math", "pow"}, + {"Math", "random"}, + {"Math", "round"}, + {"Math", "sign"}, + {"Math", "sin"}, + {"Math", "sinh"}, + {"Math", "sqrt"}, + {"Math", "tan"}, + {"Math", "tanh"}, + {"Math", "trunc"}, + + // Reflect: Static methods + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect#static_methods + {"Reflect", "apply"}, + {"Reflect", "construct"}, + {"Reflect", "defineProperty"}, + {"Reflect", "deleteProperty"}, + {"Reflect", "get"}, + {"Reflect", "getOwnPropertyDescriptor"}, + {"Reflect", "getPrototypeOf"}, + {"Reflect", "has"}, + {"Reflect", "isExtensible"}, + {"Reflect", "ownKeys"}, + {"Reflect", "preventExtensions"}, + {"Reflect", "set"}, + {"Reflect", "setPrototypeOf"}, + + // JSON: Static Methods + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON#static_methods + {"JSON", "parse"}, + {"JSON", "stringify"}, + + // Other globals present in both the browser and node (except "eval" because + // it has special behavior) + {"AbortController"}, + {"AbortSignal"}, + {"AggregateError"}, + {"ArrayBuffer"}, + {"BigInt"}, + {"DataView"}, + {"Date"}, + {"Error"}, + {"EvalError"}, + {"Event"}, + {"EventTarget"}, + {"Float32Array"}, + {"Float64Array"}, + {"Int16Array"}, + {"Int32Array"}, + {"Int8Array"}, + {"Intl"}, + {"JSON"}, + {"Map"}, + {"MessageChannel"}, + {"MessageEvent"}, + {"MessagePort"}, + {"Promise"}, + {"Proxy"}, + {"RangeError"}, + {"ReferenceError"}, + {"Reflect"}, + {"Set"}, + {"Symbol"}, + {"SyntaxError"}, + {"TextDecoder"}, + {"TextEncoder"}, + {"TypeError"}, + {"URIError"}, + {"URL"}, + {"URLSearchParams"}, + {"Uint16Array"}, + {"Uint32Array"}, + {"Uint8Array"}, + {"Uint8ClampedArray"}, + {"WeakMap"}, + {"WeakSet"}, + {"WebAssembly"}, + {"clearInterval"}, + {"clearTimeout"}, + {"console"}, + {"decodeURI"}, + {"decodeURIComponent"}, + {"encodeURI"}, + {"encodeURIComponent"}, + {"escape"}, + {"globalThis"}, + {"isFinite"}, + {"isNaN"}, + {"parseFloat"}, + {"parseInt"}, + {"queueMicrotask"}, + {"setInterval"}, + {"setTimeout"}, + {"unescape"}, + + // Console method references are assumed to have no side effects + // https://developer.mozilla.org/en-US/docs/Web/API/console + {"console", "assert"}, + {"console", "clear"}, + {"console", "count"}, + {"console", "countReset"}, + {"console", "debug"}, + {"console", "dir"}, + {"console", "dirxml"}, + {"console", "error"}, + {"console", "group"}, + {"console", "groupCollapsed"}, + {"console", "groupEnd"}, + {"console", "info"}, + {"console", "log"}, + {"console", "table"}, + {"console", "time"}, + {"console", "timeEnd"}, + {"console", "timeLog"}, + {"console", "trace"}, + {"console", "warn"}, + + // CSSOM APIs + {"CSSAnimation"}, + {"CSSFontFaceRule"}, + {"CSSImportRule"}, + {"CSSKeyframeRule"}, + {"CSSKeyframesRule"}, + {"CSSMediaRule"}, + {"CSSNamespaceRule"}, + {"CSSPageRule"}, + {"CSSRule"}, + {"CSSRuleList"}, + {"CSSStyleDeclaration"}, + {"CSSStyleRule"}, + {"CSSStyleSheet"}, + {"CSSSupportsRule"}, + {"CSSTransition"}, + + // SVG DOM + {"SVGAElement"}, + {"SVGAngle"}, + {"SVGAnimateElement"}, + {"SVGAnimateMotionElement"}, + {"SVGAnimateTransformElement"}, + {"SVGAnimatedAngle"}, + {"SVGAnimatedBoolean"}, + {"SVGAnimatedEnumeration"}, + {"SVGAnimatedInteger"}, + {"SVGAnimatedLength"}, + {"SVGAnimatedLengthList"}, + {"SVGAnimatedNumber"}, + {"SVGAnimatedNumberList"}, + {"SVGAnimatedPreserveAspectRatio"}, + {"SVGAnimatedRect"}, + {"SVGAnimatedString"}, + {"SVGAnimatedTransformList"}, + {"SVGAnimationElement"}, + {"SVGCircleElement"}, + {"SVGClipPathElement"}, + {"SVGComponentTransferFunctionElement"}, + {"SVGDefsElement"}, + {"SVGDescElement"}, + {"SVGElement"}, + {"SVGEllipseElement"}, + {"SVGFEBlendElement"}, + {"SVGFEColorMatrixElement"}, + {"SVGFEComponentTransferElement"}, + {"SVGFECompositeElement"}, + {"SVGFEConvolveMatrixElement"}, + {"SVGFEDiffuseLightingElement"}, + {"SVGFEDisplacementMapElement"}, + {"SVGFEDistantLightElement"}, + {"SVGFEDropShadowElement"}, + {"SVGFEFloodElement"}, + {"SVGFEFuncAElement"}, + {"SVGFEFuncBElement"}, + {"SVGFEFuncGElement"}, + {"SVGFEFuncRElement"}, + {"SVGFEGaussianBlurElement"}, + {"SVGFEImageElement"}, + {"SVGFEMergeElement"}, + {"SVGFEMergeNodeElement"}, + {"SVGFEMorphologyElement"}, + {"SVGFEOffsetElement"}, + {"SVGFEPointLightElement"}, + {"SVGFESpecularLightingElement"}, + {"SVGFESpotLightElement"}, + {"SVGFETileElement"}, + {"SVGFETurbulenceElement"}, + {"SVGFilterElement"}, + {"SVGForeignObjectElement"}, + {"SVGGElement"}, + {"SVGGeometryElement"}, + {"SVGGradientElement"}, + {"SVGGraphicsElement"}, + {"SVGImageElement"}, + {"SVGLength"}, + {"SVGLengthList"}, + {"SVGLineElement"}, + {"SVGLinearGradientElement"}, + {"SVGMPathElement"}, + {"SVGMarkerElement"}, + {"SVGMaskElement"}, + {"SVGMatrix"}, + {"SVGMetadataElement"}, + {"SVGNumber"}, + {"SVGNumberList"}, + {"SVGPathElement"}, + {"SVGPatternElement"}, + {"SVGPoint"}, + {"SVGPointList"}, + {"SVGPolygonElement"}, + {"SVGPolylineElement"}, + {"SVGPreserveAspectRatio"}, + {"SVGRadialGradientElement"}, + {"SVGRect"}, + {"SVGRectElement"}, + {"SVGSVGElement"}, + {"SVGScriptElement"}, + {"SVGSetElement"}, + {"SVGStopElement"}, + {"SVGStringList"}, + {"SVGStyleElement"}, + {"SVGSwitchElement"}, + {"SVGSymbolElement"}, + {"SVGTSpanElement"}, + {"SVGTextContentElement"}, + {"SVGTextElement"}, + {"SVGTextPathElement"}, + {"SVGTextPositioningElement"}, + {"SVGTitleElement"}, + {"SVGTransform"}, + {"SVGTransformList"}, + {"SVGUnitTypes"}, + {"SVGUseElement"}, + {"SVGViewElement"}, + + // Other browser APIs + // + // This list contains all globals present in modern versions of Chrome, Safari, + // and Firefox except for the following properties, since they have a side effect + // of triggering layout (https://gist.github.com/paulirish/5d52fb081b3570c81e3a): + // + // - scrollX + // - scrollY + // - innerWidth + // - innerHeight + // - pageXOffset + // - pageYOffset + // + // The following globals have also been removed since they sometimes throw an + // exception when accessed, which is a side effect (for more information see + // https://stackoverflow.com/a/33047477): + // + // - localStorage + // - sessionStorage + // + {"AnalyserNode"}, + {"Animation"}, + {"AnimationEffect"}, + {"AnimationEvent"}, + {"AnimationPlaybackEvent"}, + {"AnimationTimeline"}, + {"Attr"}, + {"Audio"}, + {"AudioBuffer"}, + {"AudioBufferSourceNode"}, + {"AudioDestinationNode"}, + {"AudioListener"}, + {"AudioNode"}, + {"AudioParam"}, + {"AudioProcessingEvent"}, + {"AudioScheduledSourceNode"}, + {"BarProp"}, + {"BeforeUnloadEvent"}, + {"BiquadFilterNode"}, + {"Blob"}, + {"BlobEvent"}, + {"ByteLengthQueuingStrategy"}, + {"CDATASection"}, + {"CSS"}, + {"CanvasGradient"}, + {"CanvasPattern"}, + {"CanvasRenderingContext2D"}, + {"ChannelMergerNode"}, + {"ChannelSplitterNode"}, + {"CharacterData"}, + {"ClipboardEvent"}, + {"CloseEvent"}, + {"Comment"}, + {"CompositionEvent"}, + {"ConvolverNode"}, + {"CountQueuingStrategy"}, + {"Crypto"}, + {"CustomElementRegistry"}, + {"CustomEvent"}, + {"DOMException"}, + {"DOMImplementation"}, + {"DOMMatrix"}, + {"DOMMatrixReadOnly"}, + {"DOMParser"}, + {"DOMPoint"}, + {"DOMPointReadOnly"}, + {"DOMQuad"}, + {"DOMRect"}, + {"DOMRectList"}, + {"DOMRectReadOnly"}, + {"DOMStringList"}, + {"DOMStringMap"}, + {"DOMTokenList"}, + {"DataTransfer"}, + {"DataTransferItem"}, + {"DataTransferItemList"}, + {"DelayNode"}, + {"Document"}, + {"DocumentFragment"}, + {"DocumentTimeline"}, + {"DocumentType"}, + {"DragEvent"}, + {"DynamicsCompressorNode"}, + {"Element"}, + {"ErrorEvent"}, + {"EventSource"}, + {"File"}, + {"FileList"}, + {"FileReader"}, + {"FocusEvent"}, + {"FontFace"}, + {"FormData"}, + {"GainNode"}, + {"Gamepad"}, + {"GamepadButton"}, + {"GamepadEvent"}, + {"Geolocation"}, + {"GeolocationPositionError"}, + {"HTMLAllCollection"}, + {"HTMLAnchorElement"}, + {"HTMLAreaElement"}, + {"HTMLAudioElement"}, + {"HTMLBRElement"}, + {"HTMLBaseElement"}, + {"HTMLBodyElement"}, + {"HTMLButtonElement"}, + {"HTMLCanvasElement"}, + {"HTMLCollection"}, + {"HTMLDListElement"}, + {"HTMLDataElement"}, + {"HTMLDataListElement"}, + {"HTMLDetailsElement"}, + {"HTMLDirectoryElement"}, + {"HTMLDivElement"}, + {"HTMLDocument"}, + {"HTMLElement"}, + {"HTMLEmbedElement"}, + {"HTMLFieldSetElement"}, + {"HTMLFontElement"}, + {"HTMLFormControlsCollection"}, + {"HTMLFormElement"}, + {"HTMLFrameElement"}, + {"HTMLFrameSetElement"}, + {"HTMLHRElement"}, + {"HTMLHeadElement"}, + {"HTMLHeadingElement"}, + {"HTMLHtmlElement"}, + {"HTMLIFrameElement"}, + {"HTMLImageElement"}, + {"HTMLInputElement"}, + {"HTMLLIElement"}, + {"HTMLLabelElement"}, + {"HTMLLegendElement"}, + {"HTMLLinkElement"}, + {"HTMLMapElement"}, + {"HTMLMarqueeElement"}, + {"HTMLMediaElement"}, + {"HTMLMenuElement"}, + {"HTMLMetaElement"}, + {"HTMLMeterElement"}, + {"HTMLModElement"}, + {"HTMLOListElement"}, + {"HTMLObjectElement"}, + {"HTMLOptGroupElement"}, + {"HTMLOptionElement"}, + {"HTMLOptionsCollection"}, + {"HTMLOutputElement"}, + {"HTMLParagraphElement"}, + {"HTMLParamElement"}, + {"HTMLPictureElement"}, + {"HTMLPreElement"}, + {"HTMLProgressElement"}, + {"HTMLQuoteElement"}, + {"HTMLScriptElement"}, + {"HTMLSelectElement"}, + {"HTMLSlotElement"}, + {"HTMLSourceElement"}, + {"HTMLSpanElement"}, + {"HTMLStyleElement"}, + {"HTMLTableCaptionElement"}, + {"HTMLTableCellElement"}, + {"HTMLTableColElement"}, + {"HTMLTableElement"}, + {"HTMLTableRowElement"}, + {"HTMLTableSectionElement"}, + {"HTMLTemplateElement"}, + {"HTMLTextAreaElement"}, + {"HTMLTimeElement"}, + {"HTMLTitleElement"}, + {"HTMLTrackElement"}, + {"HTMLUListElement"}, + {"HTMLUnknownElement"}, + {"HTMLVideoElement"}, + {"HashChangeEvent"}, + {"Headers"}, + {"History"}, + {"IDBCursor"}, + {"IDBCursorWithValue"}, + {"IDBDatabase"}, + {"IDBFactory"}, + {"IDBIndex"}, + {"IDBKeyRange"}, + {"IDBObjectStore"}, + {"IDBOpenDBRequest"}, + {"IDBRequest"}, + {"IDBTransaction"}, + {"IDBVersionChangeEvent"}, + {"Image"}, + {"ImageData"}, + {"InputEvent"}, + {"IntersectionObserver"}, + {"IntersectionObserverEntry"}, + {"KeyboardEvent"}, + {"KeyframeEffect"}, + {"Location"}, + {"MediaCapabilities"}, + {"MediaElementAudioSourceNode"}, + {"MediaEncryptedEvent"}, + {"MediaError"}, + {"MediaList"}, + {"MediaQueryList"}, + {"MediaQueryListEvent"}, + {"MediaRecorder"}, + {"MediaSource"}, + {"MediaStream"}, + {"MediaStreamAudioDestinationNode"}, + {"MediaStreamAudioSourceNode"}, + {"MediaStreamTrack"}, + {"MediaStreamTrackEvent"}, + {"MimeType"}, + {"MimeTypeArray"}, + {"MouseEvent"}, + {"MutationEvent"}, + {"MutationObserver"}, + {"MutationRecord"}, + {"NamedNodeMap"}, + {"Navigator"}, + {"Node"}, + {"NodeFilter"}, + {"NodeIterator"}, + {"NodeList"}, + {"Notification"}, + {"OfflineAudioCompletionEvent"}, + {"Option"}, + {"OscillatorNode"}, + {"PageTransitionEvent"}, + {"Path2D"}, + {"Performance"}, + {"PerformanceEntry"}, + {"PerformanceMark"}, + {"PerformanceMeasure"}, + {"PerformanceNavigation"}, + {"PerformanceObserver"}, + {"PerformanceObserverEntryList"}, + {"PerformanceResourceTiming"}, + {"PerformanceTiming"}, + {"PeriodicWave"}, + {"Plugin"}, + {"PluginArray"}, + {"PointerEvent"}, + {"PopStateEvent"}, + {"ProcessingInstruction"}, + {"ProgressEvent"}, + {"PromiseRejectionEvent"}, + {"RTCCertificate"}, + {"RTCDTMFSender"}, + {"RTCDTMFToneChangeEvent"}, + {"RTCDataChannel"}, + {"RTCDataChannelEvent"}, + {"RTCIceCandidate"}, + {"RTCPeerConnection"}, + {"RTCPeerConnectionIceEvent"}, + {"RTCRtpReceiver"}, + {"RTCRtpSender"}, + {"RTCRtpTransceiver"}, + {"RTCSessionDescription"}, + {"RTCStatsReport"}, + {"RTCTrackEvent"}, + {"RadioNodeList"}, + {"Range"}, + {"ReadableStream"}, + {"Request"}, + {"ResizeObserver"}, + {"ResizeObserverEntry"}, + {"Response"}, + {"Screen"}, + {"ScriptProcessorNode"}, + {"SecurityPolicyViolationEvent"}, + {"Selection"}, + {"ShadowRoot"}, + {"SourceBuffer"}, + {"SourceBufferList"}, + {"SpeechSynthesisEvent"}, + {"SpeechSynthesisUtterance"}, + {"StaticRange"}, + {"Storage"}, + {"StorageEvent"}, + {"StyleSheet"}, + {"StyleSheetList"}, + {"Text"}, + {"TextMetrics"}, + {"TextTrack"}, + {"TextTrackCue"}, + {"TextTrackCueList"}, + {"TextTrackList"}, + {"TimeRanges"}, + {"TrackEvent"}, + {"TransitionEvent"}, + {"TreeWalker"}, + {"UIEvent"}, + {"VTTCue"}, + {"ValidityState"}, + {"VisualViewport"}, + {"WaveShaperNode"}, + {"WebGLActiveInfo"}, + {"WebGLBuffer"}, + {"WebGLContextEvent"}, + {"WebGLFramebuffer"}, + {"WebGLProgram"}, + {"WebGLQuery"}, + {"WebGLRenderbuffer"}, + {"WebGLRenderingContext"}, + {"WebGLSampler"}, + {"WebGLShader"}, + {"WebGLShaderPrecisionFormat"}, + {"WebGLSync"}, + {"WebGLTexture"}, + {"WebGLUniformLocation"}, + {"WebKitCSSMatrix"}, + {"WebSocket"}, + {"WheelEvent"}, + {"Window"}, + {"Worker"}, + {"XMLDocument"}, + {"XMLHttpRequest"}, + {"XMLHttpRequestEventTarget"}, + {"XMLHttpRequestUpload"}, + {"XMLSerializer"}, + {"XPathEvaluator"}, + {"XPathExpression"}, + {"XPathResult"}, + {"XSLTProcessor"}, + {"alert"}, + {"atob"}, + {"blur"}, + {"btoa"}, + {"cancelAnimationFrame"}, + {"captureEvents"}, + {"close"}, + {"closed"}, + {"confirm"}, + {"customElements"}, + {"devicePixelRatio"}, + {"document"}, + {"event"}, + {"fetch"}, + {"find"}, + {"focus"}, + {"frameElement"}, + {"frames"}, + {"getComputedStyle"}, + {"getSelection"}, + {"history"}, + {"indexedDB"}, + {"isSecureContext"}, + {"length"}, + {"location"}, + {"locationbar"}, + {"matchMedia"}, + {"menubar"}, + {"moveBy"}, + {"moveTo"}, + {"name"}, + {"navigator"}, + {"onabort"}, + {"onafterprint"}, + {"onanimationend"}, + {"onanimationiteration"}, + {"onanimationstart"}, + {"onbeforeprint"}, + {"onbeforeunload"}, + {"onblur"}, + {"oncanplay"}, + {"oncanplaythrough"}, + {"onchange"}, + {"onclick"}, + {"oncontextmenu"}, + {"oncuechange"}, + {"ondblclick"}, + {"ondrag"}, + {"ondragend"}, + {"ondragenter"}, + {"ondragleave"}, + {"ondragover"}, + {"ondragstart"}, + {"ondrop"}, + {"ondurationchange"}, + {"onemptied"}, + {"onended"}, + {"onerror"}, + {"onfocus"}, + {"ongotpointercapture"}, + {"onhashchange"}, + {"oninput"}, + {"oninvalid"}, + {"onkeydown"}, + {"onkeypress"}, + {"onkeyup"}, + {"onlanguagechange"}, + {"onload"}, + {"onloadeddata"}, + {"onloadedmetadata"}, + {"onloadstart"}, + {"onlostpointercapture"}, + {"onmessage"}, + {"onmousedown"}, + {"onmouseenter"}, + {"onmouseleave"}, + {"onmousemove"}, + {"onmouseout"}, + {"onmouseover"}, + {"onmouseup"}, + {"onoffline"}, + {"ononline"}, + {"onpagehide"}, + {"onpageshow"}, + {"onpause"}, + {"onplay"}, + {"onplaying"}, + {"onpointercancel"}, + {"onpointerdown"}, + {"onpointerenter"}, + {"onpointerleave"}, + {"onpointermove"}, + {"onpointerout"}, + {"onpointerover"}, + {"onpointerup"}, + {"onpopstate"}, + {"onprogress"}, + {"onratechange"}, + {"onrejectionhandled"}, + {"onreset"}, + {"onresize"}, + {"onscroll"}, + {"onseeked"}, + {"onseeking"}, + {"onselect"}, + {"onstalled"}, + {"onstorage"}, + {"onsubmit"}, + {"onsuspend"}, + {"ontimeupdate"}, + {"ontoggle"}, + {"ontransitioncancel"}, + {"ontransitionend"}, + {"ontransitionrun"}, + {"ontransitionstart"}, + {"onunhandledrejection"}, + {"onunload"}, + {"onvolumechange"}, + {"onwaiting"}, + {"onwebkitanimationend"}, + {"onwebkitanimationiteration"}, + {"onwebkitanimationstart"}, + {"onwebkittransitionend"}, + {"onwheel"}, + {"open"}, + {"opener"}, + {"origin"}, + {"outerHeight"}, + {"outerWidth"}, + {"parent"}, + {"performance"}, + {"personalbar"}, + {"postMessage"}, + {"print"}, + {"prompt"}, + {"releaseEvents"}, + {"requestAnimationFrame"}, + {"resizeBy"}, + {"resizeTo"}, + {"screen"}, + {"screenLeft"}, + {"screenTop"}, + {"screenX"}, + {"screenY"}, + {"scroll"}, + {"scrollBy"}, + {"scrollTo"}, + {"scrollbars"}, + {"self"}, + {"speechSynthesis"}, + {"status"}, + {"statusbar"}, + {"stop"}, + {"toolbar"}, + {"top"}, + {"webkitURL"}, + {"window"}, +} + +// We currently only support compile-time replacement with certain expressions: +// +// - Primitive literals +// - Identifiers +// - "Entity names" which are identifiers followed by property accesses +// +// We don't support arbitrary expressions because arbitrary expressions may +// require the full AST. For example, there could be "import()" or "require()" +// expressions that need an import record. We also need to re-generate some +// nodes such as identifiers within the injected context so that they can +// bind to symbols in that context. Other expressions such as "this" may +// also be contextual. +type DefineExpr struct { + Constant js_ast.E + Parts []string + InjectedDefineIndex ast.Index32 +} + +type DefineData struct { + DefineExpr *DefineExpr + Flags DefineFlags +} + +type DefineFlags uint8 + +const ( + // True if accessing this value is known to not have any side effects. For + // example, a bare reference to "Object.create" can be removed because it + // does not have any observable side effects. + CanBeRemovedIfUnused DefineFlags = 1 << iota + + // True if a call to this value is known to not have any side effects. For + // example, a bare call to "Object()" can be removed because it does not + // have any observable side effects. + CallCanBeUnwrappedIfUnused + + // If true, the user has indicated that every direct calls to a property on + // this object and all of that call's arguments are to be removed from the + // output, even when the arguments have side effects. This is used to + // implement the "--drop:console" flag. + MethodCallsMustBeReplacedWithUndefined + + // Symbol values are known to not have side effects when used as property + // names in class declarations and object literals. + IsSymbolInstance +) + +func (flags DefineFlags) Has(flag DefineFlags) bool { + return (flags & flag) != 0 +} + +func mergeDefineData(old DefineData, new DefineData) DefineData { + new.Flags |= old.Flags + return new +} + +type DotDefine struct { + Data DefineData + Parts []string +} + +type ProcessedDefines struct { + IdentifierDefines map[string]DefineData + DotDefines map[string][]DotDefine +} + +// This transformation is expensive, so we only want to do it once. Make sure +// to only call processDefines() once per compilation. Unfortunately Golang +// doesn't have an efficient way to copy a map and the overhead of copying +// all of the properties into a new map once for every new parser noticeably +// slows down our benchmarks. +func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines { + // Optimization: reuse known globals if there are no user-specified defines + hasUserDefines := len(userDefines) != 0 + if !hasUserDefines { + processedGlobalsMutex.Lock() + if processedGlobals != nil { + defer processedGlobalsMutex.Unlock() + return *processedGlobals + } + processedGlobalsMutex.Unlock() + } + + result := ProcessedDefines{ + IdentifierDefines: make(map[string]DefineData), + DotDefines: make(map[string][]DotDefine), + } + + // Mark these property accesses as free of side effects. That means they can + // be removed if their result is unused. We can't just remove all unused + // property accesses since property accesses can have side effects. For + // example, the property access "a.b.c" has the side effect of throwing an + // exception if "a.b" is undefined. + for _, parts := range knownGlobals { + tail := parts[len(parts)-1] + if len(parts) == 1 { + result.IdentifierDefines[tail] = DefineData{Flags: CanBeRemovedIfUnused} + } else { + flags := CanBeRemovedIfUnused + + // All properties on the "Symbol" global are currently symbol instances + // (i.e. "typeof Symbol.iterator === 'symbol'"). This is used to avoid + // treating properties with these names as having side effects. + if parts[0] == "Symbol" { + flags |= IsSymbolInstance + } + + result.DotDefines[tail] = append(result.DotDefines[tail], DotDefine{Parts: parts, Data: DefineData{Flags: flags}}) + } + } + + // Swap in certain literal values because those can be constant folded + result.IdentifierDefines["undefined"] = DefineData{ + DefineExpr: &DefineExpr{Constant: js_ast.EUndefinedShared}, + } + result.IdentifierDefines["NaN"] = DefineData{ + DefineExpr: &DefineExpr{Constant: &js_ast.ENumber{Value: math.NaN()}}, + } + result.IdentifierDefines["Infinity"] = DefineData{ + DefineExpr: &DefineExpr{Constant: &js_ast.ENumber{Value: math.Inf(1)}}, + } + + // Then copy the user-specified defines in afterwards, which will overwrite + // any known globals above. + for key, data := range userDefines { + parts := strings.Split(key, ".") + + // Identifier defines are special-cased + if len(parts) == 1 { + result.IdentifierDefines[key] = mergeDefineData(result.IdentifierDefines[key], data) + continue + } + + tail := parts[len(parts)-1] + dotDefines := result.DotDefines[tail] + found := false + + // Try to merge with existing dot defines first + for i, define := range dotDefines { + if helpers.StringArraysEqual(parts, define.Parts) { + define := &dotDefines[i] + define.Data = mergeDefineData(define.Data, data) + found = true + break + } + } + + if !found { + dotDefines = append(dotDefines, DotDefine{Parts: parts, Data: data}) + } + result.DotDefines[tail] = dotDefines + } + + // Potentially cache the result for next time + if !hasUserDefines { + processedGlobalsMutex.Lock() + defer processedGlobalsMutex.Unlock() + if processedGlobals == nil { + processedGlobals = &result + } + } + return result +} diff --git a/vendor/github.com/evanw/esbuild/internal/css_ast/css_ast.go b/vendor/github.com/evanw/esbuild/internal/css_ast/css_ast.go new file mode 100644 index 000000000000..b7a173162100 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/css_ast/css_ast.go @@ -0,0 +1,1205 @@ +package css_ast + +import ( + "strconv" + "strings" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/css_lexer" + "github.com/evanw/esbuild/internal/helpers" + "github.com/evanw/esbuild/internal/logger" +) + +// CSS syntax comes in two layers: a minimal syntax that generally accepts +// anything that looks vaguely like CSS, and a large set of built-in rules +// (the things browsers actually interpret). That way CSS parsers can read +// unknown rules and skip over them without having to stop due to errors. +// +// This AST format is mostly just the minimal syntax. It parses unknown rules +// into a tree with enough information that it can write them back out again. +// There are some additional layers of syntax including selectors and @-rules +// which allow for better pretty-printing and minification. +// +// Most of the AST just references ranges of the original file by keeping the +// original "Token" values around from the lexer. This is a memory-efficient +// representation that helps provide good parsing and printing performance. + +type AST struct { + Symbols []ast.Symbol + CharFreq *ast.CharFreq + ImportRecords []ast.ImportRecord + Rules []Rule + SourceMapComment logger.Span + ApproximateLineCount int32 + LocalSymbols []ast.LocRef + LocalScope map[string]ast.LocRef + GlobalScope map[string]ast.LocRef + Composes map[ast.Ref]*Composes + + // These contain all layer names in the file. It can be used to replace the + // layer-related side effects of importing this file. They are split into two + // groups (those before and after "@import" rules) so that the linker can put + // them in the right places. + LayersPreImport [][]string + LayersPostImport [][]string +} + +type Composes struct { + // Note that each of these can be either local or global. Local examples: + // + // .foo { composes: bar } + // .bar { color: red } + // + // Global examples: + // + // .foo { composes: bar from global } + // .foo :global { composes: bar } + // .foo { :global { composes: bar } } + // :global .bar { color: red } + // + Names []ast.LocRef + + // Each of these is local in another file. For example: + // + // .foo { composes: bar from "bar.css" } + // .foo { composes: bar from url(bar.css) } + // + ImportedNames []ImportedComposesName + + // This tracks what CSS properties each class uses so that we can warn when + // "composes" is used incorrectly to compose two classes from separate files + // that declare the same CSS properties. + Properties map[string]logger.Loc +} + +type ImportedComposesName struct { + Alias string + AliasLoc logger.Loc + ImportRecordIndex uint32 +} + +// We create a lot of tokens, so make sure this layout is memory-efficient. +// The layout here isn't optimal because it biases for convenience (e.g. +// "string" could be shorter) but at least the ordering of fields was +// deliberately chosen to minimize size. +type Token struct { + // Contains the child tokens for component values that are simple blocks. + // These are either "(", "{", "[", or function tokens. The closing token is + // implicit and is not stored. + Children *[]Token // 8 bytes + + // This is the raw contents of the token most of the time. However, it + // contains the decoded string contents for "TString" tokens. + Text string // 16 bytes + + // The source location at the start of the token + Loc logger.Loc // 4 bytes + + // URL tokens have an associated import record at the top-level of the AST. + // This index points to that import record. + // + // Symbol tokens have an associated symbol. This index is the "InnerIndex" + // of the "Ref" for this symbol. The "SourceIndex" for the "Ref" is just + // the source index of the file for this AST. + PayloadIndex uint32 // 4 bytes + + // The division between the number and the unit for "TDimension" tokens. + UnitOffset uint16 // 2 bytes + + // This will never be "TWhitespace" because whitespace isn't stored as a + // token directly. Instead it is stored in "HasWhitespaceAfter" on the + // previous token. This is to make it easier to pattern-match against + // tokens when handling CSS rules, since whitespace almost always doesn't + // matter. That way you can pattern match against e.g. "rgb(r, g, b)" and + // not have to handle all possible combinations of embedded whitespace + // tokens. + // + // There is one exception to this: when in verbatim whitespace mode and + // the token list is non-empty and is only whitespace tokens. In that case + // a single whitespace token is emitted. This is because otherwise there + // would be no tokens to attach the whitespace before/after flags to. + Kind css_lexer.T // 1 byte + + // These flags indicate the presence of a "TWhitespace" token before or after + // this token. There should be whitespace printed between two tokens if either + // token indicates that there should be whitespace. Note that whitespace may + // be altered by processing in certain situations (e.g. minification). + Whitespace WhitespaceFlags // 1 byte +} + +type WhitespaceFlags uint8 + +const ( + WhitespaceBefore WhitespaceFlags = 1 << iota + WhitespaceAfter +) + +// This is necessary when comparing tokens between two different files +type CrossFileEqualityCheck struct { + ImportRecordsA []ast.ImportRecord + ImportRecordsB []ast.ImportRecord + Symbols ast.SymbolMap + SourceIndexA uint32 + SourceIndexB uint32 +} + +func (check *CrossFileEqualityCheck) RefsAreEquivalent(a ast.Ref, b ast.Ref) bool { + if a == b { + return true + } + if check == nil || check.Symbols.SymbolsForSource == nil { + return false + } + a = ast.FollowSymbols(check.Symbols, a) + b = ast.FollowSymbols(check.Symbols, b) + if a == b { + return true + } + symbolA := check.Symbols.Get(a) + symbolB := check.Symbols.Get(b) + return symbolA.Kind == ast.SymbolGlobalCSS && symbolB.Kind == ast.SymbolGlobalCSS && symbolA.OriginalName == symbolB.OriginalName +} + +func (a Token) Equal(b Token, check *CrossFileEqualityCheck) bool { + if a.Kind == b.Kind && a.Text == b.Text && a.Whitespace == b.Whitespace { + // URLs should be compared based on the text of the associated import record + // (which is what will actually be printed) instead of the original text + if a.Kind == css_lexer.TURL { + if check == nil { + // If both tokens are in the same file, just compare the index + if a.PayloadIndex != b.PayloadIndex { + return false + } + } else { + // If the tokens come from separate files, compare the import records + // themselves instead of comparing the indices. This can happen when + // the linker runs a "DuplicateRuleRemover" during bundling. This + // doesn't compare the source indices because at this point during + // linking, paths inside the bundle (e.g. due to the "copy" loader) + // should have already been converted into text (e.g. the "unique key" + // string). + if check.ImportRecordsA[a.PayloadIndex].Path.Text != + check.ImportRecordsB[b.PayloadIndex].Path.Text { + return false + } + } + } + + // Symbols should be compared based on the symbol reference instead of the + // original text + if a.Kind == css_lexer.TSymbol { + if check == nil { + // If both tokens are in the same file, just compare the index + if a.PayloadIndex != b.PayloadIndex { + return false + } + } else { + // If the tokens come from separate files, compare the symbols themselves + refA := ast.Ref{SourceIndex: check.SourceIndexA, InnerIndex: a.PayloadIndex} + refB := ast.Ref{SourceIndex: check.SourceIndexB, InnerIndex: b.PayloadIndex} + if !check.RefsAreEquivalent(refA, refB) { + return false + } + } + } + + if a.Children == nil && b.Children == nil { + return true + } + + if a.Children != nil && b.Children != nil && TokensEqual(*a.Children, *b.Children, check) { + return true + } + } + + return false +} + +func TokensEqual(a []Token, b []Token, check *CrossFileEqualityCheck) bool { + if len(a) != len(b) { + return false + } + for i, ai := range a { + if !ai.Equal(b[i], check) { + return false + } + } + return true +} + +func HashTokens(hash uint32, tokens []Token) uint32 { + hash = helpers.HashCombine(hash, uint32(len(tokens))) + + for _, t := range tokens { + hash = helpers.HashCombine(hash, uint32(t.Kind)) + if t.Kind != css_lexer.TURL { + hash = helpers.HashCombineString(hash, t.Text) + } + if t.Children != nil { + hash = HashTokens(hash, *t.Children) + } + } + + return hash +} + +func (a Token) EqualIgnoringWhitespace(b Token) bool { + if a.Kind == b.Kind && a.Text == b.Text && a.PayloadIndex == b.PayloadIndex { + if a.Children == nil && b.Children == nil { + return true + } + + if a.Children != nil && b.Children != nil && TokensEqualIgnoringWhitespace(*a.Children, *b.Children) { + return true + } + } + + return false +} + +func TokensEqualIgnoringWhitespace(a []Token, b []Token) bool { + if len(a) != len(b) { + return false + } + for i, c := range a { + if !c.EqualIgnoringWhitespace(b[i]) { + return false + } + } + return true +} + +func TokensAreCommaSeparated(tokens []Token) bool { + if n := len(tokens); (n & 1) != 0 { + for i := 1; i < n; i += 2 { + if tokens[i].Kind != css_lexer.TComma { + return false + } + } + return true + } + return false +} + +type PercentageFlags uint8 + +const ( + AllowPercentageBelow0 PercentageFlags = 1 << iota + AllowPercentageAbove100 + AllowAnyPercentage = AllowPercentageBelow0 | AllowPercentageAbove100 +) + +func (t Token) NumberOrFractionForPercentage(percentReferenceRange float64, flags PercentageFlags) (float64, bool) { + switch t.Kind { + case css_lexer.TNumber: + if f, err := strconv.ParseFloat(t.Text, 64); err == nil { + return f, true + } + + case css_lexer.TPercentage: + if f, err := strconv.ParseFloat(t.PercentageValue(), 64); err == nil { + if (flags&AllowPercentageBelow0) == 0 && f < 0 { + return 0, true + } + if (flags&AllowPercentageAbove100) == 0 && f > 100 { + return percentReferenceRange, true + } + return f / 100 * percentReferenceRange, true + } + } + + return 0, false +} + +func (t Token) ClampedFractionForPercentage() (float64, bool) { + if t.Kind == css_lexer.TPercentage { + if f, err := strconv.ParseFloat(t.PercentageValue(), 64); err == nil { + if f < 0 { + return 0, true + } + if f > 100 { + return 1, true + } + return f / 100, true + } + } + + return 0, false +} + +// https://drafts.csswg.org/css-values-3/#lengths +// For zero lengths the unit identifier is optional +// (i.e. can be syntactically represented as the 0). +func (t *Token) TurnLengthIntoNumberIfZero() bool { + if t.Kind == css_lexer.TDimension && t.DimensionValue() == "0" { + t.Kind = css_lexer.TNumber + t.Text = "0" + return true + } + return false +} + +func (t *Token) TurnLengthOrPercentageIntoNumberIfZero() bool { + if t.Kind == css_lexer.TPercentage && t.PercentageValue() == "0" { + t.Kind = css_lexer.TNumber + t.Text = "0" + return true + } + return t.TurnLengthIntoNumberIfZero() +} + +func (t Token) PercentageValue() string { + return t.Text[:len(t.Text)-1] +} + +func (t Token) DimensionValue() string { + return t.Text[:t.UnitOffset] +} + +func (t Token) DimensionUnit() string { + return t.Text[t.UnitOffset:] +} + +func (t Token) DimensionUnitIsSafeLength() bool { + switch strings.ToLower(t.DimensionUnit()) { + // These units can be reasonably expected to be supported everywhere. + // Information used: https://developer.mozilla.org/en-US/docs/Web/CSS/length + case "cm", "em", "in", "mm", "pc", "pt", "px": + return true + } + return false +} + +func (t Token) IsZero() bool { + return t.Kind == css_lexer.TNumber && t.Text == "0" +} + +func (t Token) IsOne() bool { + return t.Kind == css_lexer.TNumber && t.Text == "1" +} + +func (t Token) IsAngle() bool { + if t.Kind == css_lexer.TDimension { + unit := strings.ToLower(t.DimensionUnit()) + return unit == "deg" || unit == "grad" || unit == "rad" || unit == "turn" + } + return false +} + +func CloneTokensWithoutImportRecords(tokensIn []Token) (tokensOut []Token) { + for _, t := range tokensIn { + if t.Children != nil { + children := CloneTokensWithoutImportRecords(*t.Children) + t.Children = &children + } + tokensOut = append(tokensOut, t) + } + return +} + +func CloneTokensWithImportRecords( + tokensIn []Token, importRecordsIn []ast.ImportRecord, + tokensOut []Token, importRecordsOut []ast.ImportRecord, +) ([]Token, []ast.ImportRecord) { + // Preallocate the output array if we can + if tokensOut == nil { + tokensOut = make([]Token, 0, len(tokensIn)) + } + + for _, t := range tokensIn { + // Clear the source mapping if this token is being used in another file + t.Loc.Start = 0 + + // If this is a URL token, also clone the import record + if t.Kind == css_lexer.TURL { + importRecordIndex := uint32(len(importRecordsOut)) + importRecordsOut = append(importRecordsOut, importRecordsIn[t.PayloadIndex]) + t.PayloadIndex = importRecordIndex + } + + // Also search for URL tokens in this token's children + if t.Children != nil { + var children []Token + children, importRecordsOut = CloneTokensWithImportRecords(*t.Children, importRecordsIn, children, importRecordsOut) + t.Children = &children + } + + tokensOut = append(tokensOut, t) + } + + return tokensOut, importRecordsOut +} + +type Rule struct { + Data R + Loc logger.Loc +} + +type R interface { + Equal(rule R, check *CrossFileEqualityCheck) bool + Hash() (uint32, bool) +} + +func RulesEqual(a []Rule, b []Rule, check *CrossFileEqualityCheck) bool { + if len(a) != len(b) { + return false + } + for i, ai := range a { + if !ai.Data.Equal(b[i].Data, check) { + return false + } + } + return true +} + +func HashRules(hash uint32, rules []Rule) uint32 { + hash = helpers.HashCombine(hash, uint32(len(rules))) + for _, child := range rules { + if childHash, ok := child.Data.Hash(); ok { + hash = helpers.HashCombine(hash, childHash) + } else { + hash = helpers.HashCombine(hash, 0) + } + } + return hash +} + +type RAtCharset struct { + Encoding string +} + +func (a *RAtCharset) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RAtCharset) + return ok && a.Encoding == b.Encoding +} + +func (r *RAtCharset) Hash() (uint32, bool) { + hash := uint32(1) + hash = helpers.HashCombineString(hash, r.Encoding) + return hash, true +} + +type ImportConditions struct { + // The syntax for "@import" has been extended with optional conditions that + // behave as if the imported file was wrapped in a "@layer", "@supports", + // and/or "@media" rule. The possible syntax combinations are as follows: + // + // @import url(...); + // @import url(...) layer; + // @import url(...) layer(layer-name); + // @import url(...) layer(layer-name) supports(supports-condition); + // @import url(...) layer(layer-name) supports(supports-condition) list-of-media-queries; + // @import url(...) layer(layer-name) list-of-media-queries; + // @import url(...) supports(supports-condition); + // @import url(...) supports(supports-condition) list-of-media-queries; + // @import url(...) list-of-media-queries; + // + // From: https://developer.mozilla.org/en-US/docs/Web/CSS/@import#syntax + Media []Token + + // These two fields will only ever have zero or one tokens. However, they are + // implemented as arrays for convenience because most of esbuild's helper + // functions that operate on tokens take arrays instead of individual tokens. + Layers []Token + Supports []Token +} + +func (c *ImportConditions) CloneWithImportRecords(importRecordsIn []ast.ImportRecord, importRecordsOut []ast.ImportRecord) (ImportConditions, []ast.ImportRecord) { + result := ImportConditions{} + result.Layers, importRecordsOut = CloneTokensWithImportRecords(c.Layers, importRecordsIn, nil, importRecordsOut) + result.Supports, importRecordsOut = CloneTokensWithImportRecords(c.Supports, importRecordsIn, nil, importRecordsOut) + result.Media, importRecordsOut = CloneTokensWithImportRecords(c.Media, importRecordsIn, nil, importRecordsOut) + return result, importRecordsOut +} + +type RAtImport struct { + ImportConditions *ImportConditions + ImportRecordIndex uint32 +} + +func (*RAtImport) Equal(rule R, check *CrossFileEqualityCheck) bool { + return false +} + +func (r *RAtImport) Hash() (uint32, bool) { + return 0, false +} + +type RAtKeyframes struct { + AtToken string + Name ast.LocRef + Blocks []KeyframeBlock + CloseBraceLoc logger.Loc +} + +type KeyframeBlock struct { + Selectors []string + Rules []Rule + Loc logger.Loc + CloseBraceLoc logger.Loc +} + +func (a *RAtKeyframes) Equal(rule R, check *CrossFileEqualityCheck) bool { + if b, ok := rule.(*RAtKeyframes); ok && strings.EqualFold(a.AtToken, b.AtToken) && check.RefsAreEquivalent(a.Name.Ref, b.Name.Ref) && len(a.Blocks) == len(b.Blocks) { + for i, ai := range a.Blocks { + bi := b.Blocks[i] + if len(ai.Selectors) != len(bi.Selectors) { + return false + } + for j, aj := range ai.Selectors { + if aj != bi.Selectors[j] { + return false + } + } + if !RulesEqual(ai.Rules, bi.Rules, check) { + return false + } + } + return true + } + return false +} + +func (r *RAtKeyframes) Hash() (uint32, bool) { + hash := uint32(2) + hash = helpers.HashCombineString(hash, r.AtToken) + hash = helpers.HashCombine(hash, uint32(len(r.Blocks))) + for _, block := range r.Blocks { + hash = helpers.HashCombine(hash, uint32(len(block.Selectors))) + for _, sel := range block.Selectors { + hash = helpers.HashCombineString(hash, sel) + } + hash = HashRules(hash, block.Rules) + } + return hash, true +} + +type RKnownAt struct { + AtToken string + Prelude []Token + Rules []Rule + CloseBraceLoc logger.Loc +} + +func (a *RKnownAt) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RKnownAt) + return ok && strings.EqualFold(a.AtToken, b.AtToken) && TokensEqual(a.Prelude, b.Prelude, check) && RulesEqual(a.Rules, b.Rules, check) +} + +func (r *RKnownAt) Hash() (uint32, bool) { + hash := uint32(3) + hash = helpers.HashCombineString(hash, r.AtToken) + hash = HashTokens(hash, r.Prelude) + hash = HashRules(hash, r.Rules) + return hash, true +} + +type RUnknownAt struct { + AtToken string + Prelude []Token + Block []Token +} + +func (a *RUnknownAt) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RUnknownAt) + return ok && strings.EqualFold(a.AtToken, b.AtToken) && TokensEqual(a.Prelude, b.Prelude, check) && TokensEqual(a.Block, b.Block, check) +} + +func (r *RUnknownAt) Hash() (uint32, bool) { + hash := uint32(4) + hash = helpers.HashCombineString(hash, r.AtToken) + hash = HashTokens(hash, r.Prelude) + hash = HashTokens(hash, r.Block) + return hash, true +} + +type RSelector struct { + Selectors []ComplexSelector + Rules []Rule + CloseBraceLoc logger.Loc +} + +func (a *RSelector) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RSelector) + return ok && ComplexSelectorsEqual(a.Selectors, b.Selectors, check) && RulesEqual(a.Rules, b.Rules, check) +} + +func (r *RSelector) Hash() (uint32, bool) { + hash := uint32(5) + hash = helpers.HashCombine(hash, uint32(len(r.Selectors))) + hash = HashComplexSelectors(hash, r.Selectors) + hash = HashRules(hash, r.Rules) + return hash, true +} + +type RQualified struct { + Prelude []Token + Rules []Rule + CloseBraceLoc logger.Loc +} + +func (a *RQualified) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RQualified) + return ok && TokensEqual(a.Prelude, b.Prelude, check) && RulesEqual(a.Rules, b.Rules, check) +} + +func (r *RQualified) Hash() (uint32, bool) { + hash := uint32(6) + hash = HashTokens(hash, r.Prelude) + hash = HashRules(hash, r.Rules) + return hash, true +} + +type RDeclaration struct { + KeyText string + Value []Token + KeyRange logger.Range + Key D // Compare using this instead of "Key" for speed + Important bool +} + +func (a *RDeclaration) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RDeclaration) + return ok && a.KeyText == b.KeyText && TokensEqual(a.Value, b.Value, check) && a.Important == b.Important +} + +func (r *RDeclaration) Hash() (uint32, bool) { + var hash uint32 + if r.Key == DUnknown { + if r.Important { + hash = uint32(7) + } else { + hash = uint32(8) + } + hash = helpers.HashCombineString(hash, r.KeyText) + } else { + if r.Important { + hash = uint32(9) + } else { + hash = uint32(10) + } + hash = helpers.HashCombine(hash, uint32(r.Key)) + } + hash = HashTokens(hash, r.Value) + return hash, true +} + +type RBadDeclaration struct { + Tokens []Token +} + +func (a *RBadDeclaration) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RBadDeclaration) + return ok && TokensEqual(a.Tokens, b.Tokens, check) +} + +func (r *RBadDeclaration) Hash() (uint32, bool) { + hash := uint32(11) + hash = HashTokens(hash, r.Tokens) + return hash, true +} + +type RComment struct { + Text string +} + +func (a *RComment) Equal(rule R, check *CrossFileEqualityCheck) bool { + b, ok := rule.(*RComment) + return ok && a.Text == b.Text +} + +func (r *RComment) Hash() (uint32, bool) { + hash := uint32(12) + hash = helpers.HashCombineString(hash, r.Text) + return hash, true +} + +type RAtLayer struct { + Names [][]string + Rules []Rule + CloseBraceLoc logger.Loc +} + +func (a *RAtLayer) Equal(rule R, check *CrossFileEqualityCheck) bool { + if b, ok := rule.(*RAtLayer); ok && len(a.Names) == len(b.Names) && len(a.Rules) == len(b.Rules) { + for i, ai := range a.Names { + bi := b.Names[i] + if len(ai) != len(bi) { + return false + } + for j, aj := range ai { + if aj != bi[j] { + return false + } + } + } + if !RulesEqual(a.Rules, b.Rules, check) { + return false + } + } + return false +} + +func (r *RAtLayer) Hash() (uint32, bool) { + hash := uint32(13) + hash = helpers.HashCombine(hash, uint32(len(r.Names))) + for _, parts := range r.Names { + hash = helpers.HashCombine(hash, uint32(len(parts))) + for _, part := range parts { + hash = helpers.HashCombineString(hash, part) + } + } + hash = HashRules(hash, r.Rules) + return hash, true +} + +type ComplexSelector struct { + Selectors []CompoundSelector +} + +func ComplexSelectorsEqual(a []ComplexSelector, b []ComplexSelector, check *CrossFileEqualityCheck) bool { + if len(a) != len(b) { + return false + } + for i, ai := range a { + if !ai.Equal(b[i], check) { + return false + } + } + return true +} + +func HashComplexSelectors(hash uint32, selectors []ComplexSelector) uint32 { + for _, complex := range selectors { + hash = helpers.HashCombine(hash, uint32(len(complex.Selectors))) + for _, sel := range complex.Selectors { + if sel.TypeSelector != nil { + hash = helpers.HashCombineString(hash, sel.TypeSelector.Name.Text) + } else { + hash = helpers.HashCombine(hash, 0) + } + hash = helpers.HashCombine(hash, uint32(len(sel.SubclassSelectors))) + for _, ss := range sel.SubclassSelectors { + hash = helpers.HashCombine(hash, ss.Data.Hash()) + } + hash = helpers.HashCombine(hash, uint32(sel.Combinator.Byte)) + } + } + return hash +} + +func (s ComplexSelector) CloneWithoutLeadingCombinator() ComplexSelector { + clone := ComplexSelector{Selectors: make([]CompoundSelector, len(s.Selectors))} + for i, sel := range s.Selectors { + if i == 0 { + sel.Combinator = Combinator{} + } + clone.Selectors[i] = sel.Clone() + } + return clone +} + +func (sel ComplexSelector) IsRelative() bool { + if sel.Selectors[0].Combinator.Byte == 0 { + for _, inner := range sel.Selectors { + if inner.HasNestingSelector() { + return false + } + for _, ss := range inner.SubclassSelectors { + if pseudo, ok := ss.Data.(*SSPseudoClassWithSelectorList); ok { + for _, nested := range pseudo.Selectors { + if !nested.IsRelative() { + return false + } + } + } + } + } + } + return true +} + +func tokensContainAmpersandRecursive(tokens []Token) bool { + for _, t := range tokens { + if t.Kind == css_lexer.TDelimAmpersand { + return true + } + if children := t.Children; children != nil && tokensContainAmpersandRecursive(*children) { + return true + } + } + return false +} + +func (sel ComplexSelector) UsesPseudoElement() bool { + for _, sel := range sel.Selectors { + for _, ss := range sel.SubclassSelectors { + if class, ok := ss.Data.(*SSPseudoClass); ok { + if class.IsElement { + return true + } + + // https://www.w3.org/TR/selectors-4/#single-colon-pseudos + // The four Level 2 pseudo-elements (::before, ::after, ::first-line, + // and ::first-letter) may, for legacy reasons, be represented using + // the grammar, with only a single ":" + // character at their start. + switch class.Name { + case "before", "after", "first-line", "first-letter": + return true + } + } + } + } + return false +} + +func (a ComplexSelector) Equal(b ComplexSelector, check *CrossFileEqualityCheck) bool { + if len(a.Selectors) != len(b.Selectors) { + return false + } + + for i, ai := range a.Selectors { + bi := b.Selectors[i] + if ai.HasNestingSelector() != bi.HasNestingSelector() || ai.Combinator.Byte != bi.Combinator.Byte { + return false + } + + if ats, bts := ai.TypeSelector, bi.TypeSelector; (ats == nil) != (bts == nil) { + return false + } else if ats != nil && bts != nil && !ats.Equal(*bts) { + return false + } + + if len(ai.SubclassSelectors) != len(bi.SubclassSelectors) { + return false + } + for j, aj := range ai.SubclassSelectors { + if !aj.Data.Equal(bi.SubclassSelectors[j].Data, check) { + return false + } + } + } + + return true +} + +type Combinator struct { + Loc logger.Loc + Byte uint8 // Optional, may be 0 for no combinator +} + +type CompoundSelector struct { + TypeSelector *NamespacedName + SubclassSelectors []SubclassSelector + NestingSelectorLoc ast.Index32 // "&" + Combinator Combinator // Optional, may be 0 + + // If this is true, this is a "&" that was generated by a bare ":local" or ":global" + WasEmptyFromLocalOrGlobal bool +} + +func (sel *CompoundSelector) HasNestingSelector() bool { + return sel.NestingSelectorLoc.IsValid() +} + +func (sel CompoundSelector) IsSingleAmpersand() bool { + return sel.HasNestingSelector() && sel.Combinator.Byte == 0 && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0 +} + +func (sel CompoundSelector) IsInvalidBecauseEmpty() bool { + return !sel.HasNestingSelector() && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0 +} + +func (sel CompoundSelector) Range() (r logger.Range) { + if sel.Combinator.Byte != 0 { + r = logger.Range{Loc: sel.Combinator.Loc, Len: 1} + } + if sel.TypeSelector != nil { + r.ExpandBy(sel.TypeSelector.Range()) + } + if sel.NestingSelectorLoc.IsValid() { + r.ExpandBy(logger.Range{Loc: logger.Loc{Start: int32(sel.NestingSelectorLoc.GetIndex())}, Len: 1}) + } + if len(sel.SubclassSelectors) > 0 { + for _, ss := range sel.SubclassSelectors { + r.ExpandBy(ss.Range) + } + } + return +} + +func (sel CompoundSelector) Clone() CompoundSelector { + clone := sel + + if sel.TypeSelector != nil { + t := sel.TypeSelector.Clone() + clone.TypeSelector = &t + } + + if sel.SubclassSelectors != nil { + selectors := make([]SubclassSelector, len(sel.SubclassSelectors)) + for i, ss := range sel.SubclassSelectors { + ss.Data = ss.Data.Clone() + selectors[i] = ss + } + clone.SubclassSelectors = selectors + } + + return clone +} + +type NameToken struct { + Text string + Range logger.Range + Kind css_lexer.T +} + +func (a NameToken) Equal(b NameToken) bool { + return a.Text == b.Text && a.Kind == b.Kind +} + +type NamespacedName struct { + // If present, this is an identifier or "*" and is followed by a "|" character + NamespacePrefix *NameToken + + // This is an identifier or "*" + Name NameToken +} + +func (n NamespacedName) Range() logger.Range { + if n.NamespacePrefix != nil { + loc := n.NamespacePrefix.Range.Loc + return logger.Range{Loc: loc, Len: n.Name.Range.End() - loc.Start} + } + return n.Name.Range +} + +func (n NamespacedName) Clone() NamespacedName { + clone := n + if n.NamespacePrefix != nil { + prefix := *n.NamespacePrefix + clone.NamespacePrefix = &prefix + } + return clone +} + +func (a NamespacedName) Equal(b NamespacedName) bool { + return a.Name.Equal(b.Name) && (a.NamespacePrefix == nil) == (b.NamespacePrefix == nil) && + (a.NamespacePrefix == nil || b.NamespacePrefix == nil || a.NamespacePrefix.Equal(b.Name)) +} + +type SubclassSelector struct { + Data SS + Range logger.Range +} + +type SS interface { + Equal(ss SS, check *CrossFileEqualityCheck) bool + Hash() uint32 + Clone() SS +} + +type SSHash struct { + Name ast.LocRef +} + +func (a *SSHash) Equal(ss SS, check *CrossFileEqualityCheck) bool { + b, ok := ss.(*SSHash) + return ok && check.RefsAreEquivalent(a.Name.Ref, b.Name.Ref) +} + +func (ss *SSHash) Hash() uint32 { + hash := uint32(1) + return hash +} + +func (ss *SSHash) Clone() SS { + clone := *ss + return &clone +} + +type SSClass struct { + Name ast.LocRef +} + +func (a *SSClass) Equal(ss SS, check *CrossFileEqualityCheck) bool { + b, ok := ss.(*SSClass) + return ok && check.RefsAreEquivalent(a.Name.Ref, b.Name.Ref) +} + +func (ss *SSClass) Hash() uint32 { + hash := uint32(2) + return hash +} + +func (ss *SSClass) Clone() SS { + clone := *ss + return &clone +} + +type SSAttribute struct { + MatcherOp string // Either "" or one of: "=" "~=" "|=" "^=" "$=" "*=" + MatcherValue string + NamespacedName NamespacedName + MatcherModifier byte // Either 0 or one of: 'i' 'I' 's' 'S' +} + +func (a *SSAttribute) Equal(ss SS, check *CrossFileEqualityCheck) bool { + b, ok := ss.(*SSAttribute) + return ok && a.NamespacedName.Equal(b.NamespacedName) && a.MatcherOp == b.MatcherOp && + a.MatcherValue == b.MatcherValue && a.MatcherModifier == b.MatcherModifier +} + +func (ss *SSAttribute) Hash() uint32 { + hash := uint32(3) + hash = helpers.HashCombineString(hash, ss.NamespacedName.Name.Text) + hash = helpers.HashCombineString(hash, ss.MatcherOp) + hash = helpers.HashCombineString(hash, ss.MatcherValue) + return hash +} + +func (ss *SSAttribute) Clone() SS { + clone := *ss + clone.NamespacedName = ss.NamespacedName.Clone() + return &clone +} + +type SSPseudoClass struct { + Name string + Args []Token + IsElement bool // If true, this is prefixed by "::" instead of ":" +} + +func (a *SSPseudoClass) Equal(ss SS, check *CrossFileEqualityCheck) bool { + b, ok := ss.(*SSPseudoClass) + return ok && a.Name == b.Name && TokensEqual(a.Args, b.Args, check) && a.IsElement == b.IsElement +} + +func (ss *SSPseudoClass) Hash() uint32 { + hash := uint32(4) + hash = helpers.HashCombineString(hash, ss.Name) + hash = HashTokens(hash, ss.Args) + return hash +} + +func (ss *SSPseudoClass) Clone() SS { + clone := *ss + if ss.Args != nil { + ss.Args = CloneTokensWithoutImportRecords(ss.Args) + } + return &clone +} + +type PseudoClassKind uint8 + +const ( + PseudoClassGlobal PseudoClassKind = iota + PseudoClassHas + PseudoClassIs + PseudoClassLocal + PseudoClassNot + PseudoClassNthChild + PseudoClassNthLastChild + PseudoClassNthLastOfType + PseudoClassNthOfType + PseudoClassWhere +) + +func (kind PseudoClassKind) HasNthIndex() bool { + return kind >= PseudoClassNthChild && kind <= PseudoClassNthOfType +} + +func (kind PseudoClassKind) String() string { + switch kind { + case PseudoClassGlobal: + return "global" + case PseudoClassHas: + return "has" + case PseudoClassIs: + return "is" + case PseudoClassLocal: + return "local" + case PseudoClassNot: + return "not" + case PseudoClassNthChild: + return "nth-child" + case PseudoClassNthLastChild: + return "nth-last-child" + case PseudoClassNthLastOfType: + return "nth-last-of-type" + case PseudoClassNthOfType: + return "nth-of-type" + case PseudoClassWhere: + return "where" + default: + panic("Internal error") + } +} + +// This is the "An+B" syntax +type NthIndex struct { + A string + B string // May be "even" or "odd" +} + +func (index *NthIndex) Minify() { + // "even" => "2n" + if index.B == "even" { + index.A = "2" + index.B = "" + return + } + + // "2n+1" => "odd" + if index.A == "2" && index.B == "1" { + index.A = "" + index.B = "odd" + return + } + + // "0n+1" => "1" + if index.A == "0" { + index.A = "" + if index.B == "" { + // "0n" => "0" + index.B = "0" + } + return + } + + // "1n+0" => "1n" + if index.B == "0" && index.A != "" { + index.B = "" + } +} + +// See https://drafts.csswg.org/selectors/#grouping +type SSPseudoClassWithSelectorList struct { + Selectors []ComplexSelector + Index NthIndex + Kind PseudoClassKind +} + +func (a *SSPseudoClassWithSelectorList) Equal(ss SS, check *CrossFileEqualityCheck) bool { + b, ok := ss.(*SSPseudoClassWithSelectorList) + return ok && a.Kind == b.Kind && a.Index == b.Index && ComplexSelectorsEqual(a.Selectors, b.Selectors, check) +} + +func (ss *SSPseudoClassWithSelectorList) Hash() uint32 { + hash := uint32(5) + hash = helpers.HashCombine(hash, uint32(ss.Kind)) + hash = helpers.HashCombineString(hash, ss.Index.A) + hash = helpers.HashCombineString(hash, ss.Index.B) + hash = HashComplexSelectors(hash, ss.Selectors) + return hash +} + +func (ss *SSPseudoClassWithSelectorList) Clone() SS { + clone := *ss + clone.Selectors = make([]ComplexSelector, len(ss.Selectors)) + for i, sel := range ss.Selectors { + clone.Selectors[i] = sel.CloneWithoutLeadingCombinator() + } + return &clone +} diff --git a/vendor/github.com/evanw/esbuild/internal/css_ast/css_decl_table.go b/vendor/github.com/evanw/esbuild/internal/css_ast/css_decl_table.go new file mode 100644 index 000000000000..231eac47231d --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/css_ast/css_decl_table.go @@ -0,0 +1,698 @@ +package css_ast + +import ( + "strings" + "sync" + + "github.com/evanw/esbuild/internal/helpers" +) + +type D uint16 + +const ( + DUnknown D = iota + DAlignContent + DAlignItems + DAlignSelf + DAlignmentBaseline + DAll + DAnimation + DAnimationDelay + DAnimationDirection + DAnimationDuration + DAnimationFillMode + DAnimationIterationCount + DAnimationName + DAnimationPlayState + DAnimationTimingFunction + DAppearance + DBackdropFilter + DBackfaceVisibility + DBackground + DBackgroundAttachment + DBackgroundClip + DBackgroundColor + DBackgroundImage + DBackgroundOrigin + DBackgroundPosition + DBackgroundPositionX + DBackgroundPositionY + DBackgroundRepeat + DBackgroundSize + DBaselineShift + DBlockSize + DBorder + DBorderBlockEnd + DBorderBlockEndColor + DBorderBlockEndStyle + DBorderBlockEndWidth + DBorderBlockStart + DBorderBlockStartColor + DBorderBlockStartStyle + DBorderBlockStartWidth + DBorderBottom + DBorderBottomColor + DBorderBottomLeftRadius + DBorderBottomRightRadius + DBorderBottomStyle + DBorderBottomWidth + DBorderCollapse + DBorderColor + DBorderImage + DBorderImageOutset + DBorderImageRepeat + DBorderImageSlice + DBorderImageSource + DBorderImageWidth + DBorderInlineEnd + DBorderInlineEndColor + DBorderInlineEndStyle + DBorderInlineEndWidth + DBorderInlineStart + DBorderInlineStartColor + DBorderInlineStartStyle + DBorderInlineStartWidth + DBorderLeft + DBorderLeftColor + DBorderLeftStyle + DBorderLeftWidth + DBorderRadius + DBorderRight + DBorderRightColor + DBorderRightStyle + DBorderRightWidth + DBorderSpacing + DBorderStyle + DBorderTop + DBorderTopColor + DBorderTopLeftRadius + DBorderTopRightRadius + DBorderTopStyle + DBorderTopWidth + DBorderWidth + DBottom + DBoxDecorationBreak + DBoxShadow + DBoxSizing + DBreakAfter + DBreakBefore + DBreakInside + DCaptionSide + DCaretColor + DClear + DClip + DClipPath + DClipRule + DColor + DColorInterpolation + DColorInterpolationFilters + DColumnCount + DColumnFill + DColumnGap + DColumnRule + DColumnRuleColor + DColumnRuleStyle + DColumnRuleWidth + DColumnSpan + DColumnWidth + DColumns + DComposes + DContainer + DContainerName + DContainerType + DContent + DCounterIncrement + DCounterReset + DCssFloat + DCssText + DCursor + DDirection + DDisplay + DDominantBaseline + DEmptyCells + DFill + DFillOpacity + DFillRule + DFilter + DFlex + DFlexBasis + DFlexDirection + DFlexFlow + DFlexGrow + DFlexShrink + DFlexWrap + DFloat + DFloodColor + DFloodOpacity + DFont + DFontFamily + DFontFeatureSettings + DFontKerning + DFontSize + DFontSizeAdjust + DFontStretch + DFontStyle + DFontSynthesis + DFontVariant + DFontVariantCaps + DFontVariantEastAsian + DFontVariantLigatures + DFontVariantNumeric + DFontVariantPosition + DFontWeight + DGap + DGlyphOrientationVertical + DGrid + DGridArea + DGridAutoColumns + DGridAutoFlow + DGridAutoRows + DGridColumn + DGridColumnEnd + DGridColumnGap + DGridColumnStart + DGridGap + DGridRow + DGridRowEnd + DGridRowGap + DGridRowStart + DGridTemplate + DGridTemplateAreas + DGridTemplateColumns + DGridTemplateRows + DHeight + DHyphens + DImageOrientation + DImageRendering + DInitialLetter + DInlineSize + DInset + DJustifyContent + DJustifyItems + DJustifySelf + DLeft + DLetterSpacing + DLightingColor + DLineBreak + DLineHeight + DListStyle + DListStyleImage + DListStylePosition + DListStyleType + DMargin + DMarginBlockEnd + DMarginBlockStart + DMarginBottom + DMarginInlineEnd + DMarginInlineStart + DMarginLeft + DMarginRight + DMarginTop + DMarker + DMarkerEnd + DMarkerMid + DMarkerStart + DMask + DMaskComposite + DMaskImage + DMaskOrigin + DMaskPosition + DMaskRepeat + DMaskSize + DMaskType + DMaxBlockSize + DMaxHeight + DMaxInlineSize + DMaxWidth + DMinBlockSize + DMinHeight + DMinInlineSize + DMinWidth + DObjectFit + DObjectPosition + DOpacity + DOrder + DOrphans + DOutline + DOutlineColor + DOutlineOffset + DOutlineStyle + DOutlineWidth + DOverflow + DOverflowAnchor + DOverflowWrap + DOverflowX + DOverflowY + DOverscrollBehavior + DOverscrollBehaviorBlock + DOverscrollBehaviorInline + DOverscrollBehaviorX + DOverscrollBehaviorY + DPadding + DPaddingBlockEnd + DPaddingBlockStart + DPaddingBottom + DPaddingInlineEnd + DPaddingInlineStart + DPaddingLeft + DPaddingRight + DPaddingTop + DPageBreakAfter + DPageBreakBefore + DPageBreakInside + DPaintOrder + DPerspective + DPerspectiveOrigin + DPlaceContent + DPlaceItems + DPlaceSelf + DPointerEvents + DPosition + DPrintColorAdjust + DQuotes + DResize + DRight + DRotate + DRowGap + DRubyAlign + DRubyPosition + DScale + DScrollBehavior + DShapeRendering + DStopColor + DStopOpacity + DStroke + DStrokeDasharray + DStrokeDashoffset + DStrokeLinecap + DStrokeLinejoin + DStrokeMiterlimit + DStrokeOpacity + DStrokeWidth + DTabSize + DTableLayout + DTextAlign + DTextAlignLast + DTextAnchor + DTextCombineUpright + DTextDecoration + DTextDecorationColor + DTextDecorationLine + DTextDecorationSkip + DTextDecorationStyle + DTextEmphasis + DTextEmphasisColor + DTextEmphasisPosition + DTextEmphasisStyle + DTextIndent + DTextJustify + DTextOrientation + DTextOverflow + DTextRendering + DTextShadow + DTextSizeAdjust + DTextTransform + DTextUnderlinePosition + DTop + DTouchAction + DTransform + DTransformBox + DTransformOrigin + DTransformStyle + DTransition + DTransitionDelay + DTransitionDuration + DTransitionProperty + DTransitionTimingFunction + DTranslate + DUnicodeBidi + DUserSelect + DVerticalAlign + DVisibility + DWhiteSpace + DWidows + DWidth + DWillChange + DWordBreak + DWordSpacing + DWordWrap + DWritingMode + DZIndex + DZoom +) + +var KnownDeclarations = map[string]D{ + "align-content": DAlignContent, + "align-items": DAlignItems, + "align-self": DAlignSelf, + "alignment-baseline": DAlignmentBaseline, + "all": DAll, + "animation": DAnimation, + "animation-delay": DAnimationDelay, + "animation-direction": DAnimationDirection, + "animation-duration": DAnimationDuration, + "animation-fill-mode": DAnimationFillMode, + "animation-iteration-count": DAnimationIterationCount, + "animation-name": DAnimationName, + "animation-play-state": DAnimationPlayState, + "animation-timing-function": DAnimationTimingFunction, + "appearance": DAppearance, + "backdrop-filter": DBackdropFilter, + "backface-visibility": DBackfaceVisibility, + "background": DBackground, + "background-attachment": DBackgroundAttachment, + "background-clip": DBackgroundClip, + "background-color": DBackgroundColor, + "background-image": DBackgroundImage, + "background-origin": DBackgroundOrigin, + "background-position": DBackgroundPosition, + "background-position-x": DBackgroundPositionX, + "background-position-y": DBackgroundPositionY, + "background-repeat": DBackgroundRepeat, + "background-size": DBackgroundSize, + "baseline-shift": DBaselineShift, + "block-size": DBlockSize, + "border": DBorder, + "border-block-end": DBorderBlockEnd, + "border-block-end-color": DBorderBlockEndColor, + "border-block-end-style": DBorderBlockEndStyle, + "border-block-end-width": DBorderBlockEndWidth, + "border-block-start": DBorderBlockStart, + "border-block-start-color": DBorderBlockStartColor, + "border-block-start-style": DBorderBlockStartStyle, + "border-block-start-width": DBorderBlockStartWidth, + "border-bottom": DBorderBottom, + "border-bottom-color": DBorderBottomColor, + "border-bottom-left-radius": DBorderBottomLeftRadius, + "border-bottom-right-radius": DBorderBottomRightRadius, + "border-bottom-style": DBorderBottomStyle, + "border-bottom-width": DBorderBottomWidth, + "border-collapse": DBorderCollapse, + "border-color": DBorderColor, + "border-image": DBorderImage, + "border-image-outset": DBorderImageOutset, + "border-image-repeat": DBorderImageRepeat, + "border-image-slice": DBorderImageSlice, + "border-image-source": DBorderImageSource, + "border-image-width": DBorderImageWidth, + "border-inline-end": DBorderInlineEnd, + "border-inline-end-color": DBorderInlineEndColor, + "border-inline-end-style": DBorderInlineEndStyle, + "border-inline-end-width": DBorderInlineEndWidth, + "border-inline-start": DBorderInlineStart, + "border-inline-start-color": DBorderInlineStartColor, + "border-inline-start-style": DBorderInlineStartStyle, + "border-inline-start-width": DBorderInlineStartWidth, + "border-left": DBorderLeft, + "border-left-color": DBorderLeftColor, + "border-left-style": DBorderLeftStyle, + "border-left-width": DBorderLeftWidth, + "border-radius": DBorderRadius, + "border-right": DBorderRight, + "border-right-color": DBorderRightColor, + "border-right-style": DBorderRightStyle, + "border-right-width": DBorderRightWidth, + "border-spacing": DBorderSpacing, + "border-style": DBorderStyle, + "border-top": DBorderTop, + "border-top-color": DBorderTopColor, + "border-top-left-radius": DBorderTopLeftRadius, + "border-top-right-radius": DBorderTopRightRadius, + "border-top-style": DBorderTopStyle, + "border-top-width": DBorderTopWidth, + "border-width": DBorderWidth, + "bottom": DBottom, + "box-decoration-break": DBoxDecorationBreak, + "box-shadow": DBoxShadow, + "box-sizing": DBoxSizing, + "break-after": DBreakAfter, + "break-before": DBreakBefore, + "break-inside": DBreakInside, + "caption-side": DCaptionSide, + "caret-color": DCaretColor, + "clear": DClear, + "clip": DClip, + "clip-path": DClipPath, + "clip-rule": DClipRule, + "color": DColor, + "color-interpolation": DColorInterpolation, + "color-interpolation-filters": DColorInterpolationFilters, + "column-count": DColumnCount, + "column-fill": DColumnFill, + "column-gap": DColumnGap, + "column-rule": DColumnRule, + "column-rule-color": DColumnRuleColor, + "column-rule-style": DColumnRuleStyle, + "column-rule-width": DColumnRuleWidth, + "column-span": DColumnSpan, + "column-width": DColumnWidth, + "columns": DColumns, + "composes": DComposes, + "container": DContainer, + "container-name": DContainerName, + "container-type": DContainerType, + "content": DContent, + "counter-increment": DCounterIncrement, + "counter-reset": DCounterReset, + "css-float": DCssFloat, + "css-text": DCssText, + "cursor": DCursor, + "direction": DDirection, + "display": DDisplay, + "dominant-baseline": DDominantBaseline, + "empty-cells": DEmptyCells, + "fill": DFill, + "fill-opacity": DFillOpacity, + "fill-rule": DFillRule, + "filter": DFilter, + "flex": DFlex, + "flex-basis": DFlexBasis, + "flex-direction": DFlexDirection, + "flex-flow": DFlexFlow, + "flex-grow": DFlexGrow, + "flex-shrink": DFlexShrink, + "flex-wrap": DFlexWrap, + "float": DFloat, + "flood-color": DFloodColor, + "flood-opacity": DFloodOpacity, + "font": DFont, + "font-family": DFontFamily, + "font-feature-settings": DFontFeatureSettings, + "font-kerning": DFontKerning, + "font-size": DFontSize, + "font-size-adjust": DFontSizeAdjust, + "font-stretch": DFontStretch, + "font-style": DFontStyle, + "font-synthesis": DFontSynthesis, + "font-variant": DFontVariant, + "font-variant-caps": DFontVariantCaps, + "font-variant-east-asian": DFontVariantEastAsian, + "font-variant-ligatures": DFontVariantLigatures, + "font-variant-numeric": DFontVariantNumeric, + "font-variant-position": DFontVariantPosition, + "font-weight": DFontWeight, + "gap": DGap, + "glyph-orientation-vertical": DGlyphOrientationVertical, + "grid": DGrid, + "grid-area": DGridArea, + "grid-auto-columns": DGridAutoColumns, + "grid-auto-flow": DGridAutoFlow, + "grid-auto-rows": DGridAutoRows, + "grid-column": DGridColumn, + "grid-column-end": DGridColumnEnd, + "grid-column-gap": DGridColumnGap, + "grid-column-start": DGridColumnStart, + "grid-gap": DGridGap, + "grid-row": DGridRow, + "grid-row-end": DGridRowEnd, + "grid-row-gap": DGridRowGap, + "grid-row-start": DGridRowStart, + "grid-template": DGridTemplate, + "grid-template-areas": DGridTemplateAreas, + "grid-template-columns": DGridTemplateColumns, + "grid-template-rows": DGridTemplateRows, + "height": DHeight, + "hyphens": DHyphens, + "image-orientation": DImageOrientation, + "image-rendering": DImageRendering, + "initial-letter": DInitialLetter, + "inline-size": DInlineSize, + "inset": DInset, + "justify-content": DJustifyContent, + "justify-items": DJustifyItems, + "justify-self": DJustifySelf, + "left": DLeft, + "letter-spacing": DLetterSpacing, + "lighting-color": DLightingColor, + "line-break": DLineBreak, + "line-height": DLineHeight, + "list-style": DListStyle, + "list-style-image": DListStyleImage, + "list-style-position": DListStylePosition, + "list-style-type": DListStyleType, + "margin": DMargin, + "margin-block-end": DMarginBlockEnd, + "margin-block-start": DMarginBlockStart, + "margin-bottom": DMarginBottom, + "margin-inline-end": DMarginInlineEnd, + "margin-inline-start": DMarginInlineStart, + "margin-left": DMarginLeft, + "margin-right": DMarginRight, + "margin-top": DMarginTop, + "marker": DMarker, + "marker-end": DMarkerEnd, + "marker-mid": DMarkerMid, + "marker-start": DMarkerStart, + "mask": DMask, + "mask-composite": DMaskComposite, + "mask-image": DMaskImage, + "mask-origin": DMaskOrigin, + "mask-position": DMaskPosition, + "mask-repeat": DMaskRepeat, + "mask-size": DMaskSize, + "mask-type": DMaskType, + "max-block-size": DMaxBlockSize, + "max-height": DMaxHeight, + "max-inline-size": DMaxInlineSize, + "max-width": DMaxWidth, + "min-block-size": DMinBlockSize, + "min-height": DMinHeight, + "min-inline-size": DMinInlineSize, + "min-width": DMinWidth, + "object-fit": DObjectFit, + "object-position": DObjectPosition, + "opacity": DOpacity, + "order": DOrder, + "orphans": DOrphans, + "outline": DOutline, + "outline-color": DOutlineColor, + "outline-offset": DOutlineOffset, + "outline-style": DOutlineStyle, + "outline-width": DOutlineWidth, + "overflow": DOverflow, + "overflow-anchor": DOverflowAnchor, + "overflow-wrap": DOverflowWrap, + "overflow-x": DOverflowX, + "overflow-y": DOverflowY, + "overscroll-behavior": DOverscrollBehavior, + "overscroll-behavior-block": DOverscrollBehaviorBlock, + "overscroll-behavior-inline": DOverscrollBehaviorInline, + "overscroll-behavior-x": DOverscrollBehaviorX, + "overscroll-behavior-y": DOverscrollBehaviorY, + "padding": DPadding, + "padding-block-end": DPaddingBlockEnd, + "padding-block-start": DPaddingBlockStart, + "padding-bottom": DPaddingBottom, + "padding-inline-end": DPaddingInlineEnd, + "padding-inline-start": DPaddingInlineStart, + "padding-left": DPaddingLeft, + "padding-right": DPaddingRight, + "padding-top": DPaddingTop, + "page-break-after": DPageBreakAfter, + "page-break-before": DPageBreakBefore, + "page-break-inside": DPageBreakInside, + "paint-order": DPaintOrder, + "perspective": DPerspective, + "perspective-origin": DPerspectiveOrigin, + "place-content": DPlaceContent, + "place-items": DPlaceItems, + "place-self": DPlaceSelf, + "pointer-events": DPointerEvents, + "position": DPosition, + "print-color-adjust": DPrintColorAdjust, + "quotes": DQuotes, + "resize": DResize, + "right": DRight, + "rotate": DRotate, + "row-gap": DRowGap, + "ruby-align": DRubyAlign, + "ruby-position": DRubyPosition, + "scale": DScale, + "scroll-behavior": DScrollBehavior, + "shape-rendering": DShapeRendering, + "stop-color": DStopColor, + "stop-opacity": DStopOpacity, + "stroke": DStroke, + "stroke-dasharray": DStrokeDasharray, + "stroke-dashoffset": DStrokeDashoffset, + "stroke-linecap": DStrokeLinecap, + "stroke-linejoin": DStrokeLinejoin, + "stroke-miterlimit": DStrokeMiterlimit, + "stroke-opacity": DStrokeOpacity, + "stroke-width": DStrokeWidth, + "tab-size": DTabSize, + "table-layout": DTableLayout, + "text-align": DTextAlign, + "text-align-last": DTextAlignLast, + "text-anchor": DTextAnchor, + "text-combine-upright": DTextCombineUpright, + "text-decoration": DTextDecoration, + "text-decoration-color": DTextDecorationColor, + "text-decoration-line": DTextDecorationLine, + "text-decoration-skip": DTextDecorationSkip, + "text-decoration-style": DTextDecorationStyle, + "text-emphasis": DTextEmphasis, + "text-emphasis-color": DTextEmphasisColor, + "text-emphasis-position": DTextEmphasisPosition, + "text-emphasis-style": DTextEmphasisStyle, + "text-indent": DTextIndent, + "text-justify": DTextJustify, + "text-orientation": DTextOrientation, + "text-overflow": DTextOverflow, + "text-rendering": DTextRendering, + "text-shadow": DTextShadow, + "text-size-adjust": DTextSizeAdjust, + "text-transform": DTextTransform, + "text-underline-position": DTextUnderlinePosition, + "top": DTop, + "touch-action": DTouchAction, + "transform": DTransform, + "transform-box": DTransformBox, + "transform-origin": DTransformOrigin, + "transform-style": DTransformStyle, + "transition": DTransition, + "transition-delay": DTransitionDelay, + "transition-duration": DTransitionDuration, + "transition-property": DTransitionProperty, + "transition-timing-function": DTransitionTimingFunction, + "translate": DTranslate, + "unicode-bidi": DUnicodeBidi, + "user-select": DUserSelect, + "vertical-align": DVerticalAlign, + "visibility": DVisibility, + "white-space": DWhiteSpace, + "widows": DWidows, + "width": DWidth, + "will-change": DWillChange, + "word-break": DWordBreak, + "word-spacing": DWordSpacing, + "word-wrap": DWordWrap, + "writing-mode": DWritingMode, + "z-index": DZIndex, + "zoom": DZoom, +} + +var typoDetector *helpers.TypoDetector +var typoDetectorMutex sync.Mutex + +func MaybeCorrectDeclarationTypo(text string) (string, bool) { + // Ignore CSS variables, which should not be corrected to CSS properties + if strings.HasPrefix(text, "--") { + return "", false + } + + typoDetectorMutex.Lock() + defer typoDetectorMutex.Unlock() + + // Lazily-initialize the typo detector for speed when it's not needed + if typoDetector == nil { + valid := make([]string, 0, len(KnownDeclarations)) + for key := range KnownDeclarations { + valid = append(valid, key) + } + detector := helpers.MakeTypoDetector(valid) + typoDetector = &detector + } + + return typoDetector.MaybeCorrectTypo(text) +} diff --git a/vendor/github.com/evanw/esbuild/internal/css_lexer/css_lexer.go b/vendor/github.com/evanw/esbuild/internal/css_lexer/css_lexer.go new file mode 100644 index 000000000000..9e8c0883abc5 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/css_lexer/css_lexer.go @@ -0,0 +1,1081 @@ +package css_lexer + +import ( + "strings" + "unicode/utf8" + + "github.com/evanw/esbuild/internal/logger" +) + +// The lexer converts a source file to a stream of tokens. Unlike esbuild's +// JavaScript lexer, this CSS lexer runs to completion before the CSS parser +// begins, resulting in a single array of all tokens in the file. + +type T uint8 + +const eof = -1 + +const ( + TEndOfFile T = iota + + TAtKeyword + TUnterminatedString + TBadURL + TCDC // "-->" + TCDO // "\"", + "\"' + lexer.step() + switch lexer.codePoint { + case '=': + lexer.step() + lexer.Token = TMinusEquals + case '-': + lexer.step() + + // Handle legacy HTML-style comments + if lexer.codePoint == '>' && lexer.HasNewlineBefore { + lexer.step() + lexer.LegacyHTMLCommentRange = lexer.Range() + lexer.log.AddID(logger.MsgID_JS_HTMLCommentInJS, logger.Warning, &lexer.tracker, lexer.Range(), + "Treating \"-->\" as the start of a legacy HTML single-line comment") + singleLineHTMLCloseComment: + for { + switch lexer.codePoint { + case '\r', '\n', '\u2028', '\u2029': + break singleLineHTMLCloseComment + + case -1: // This indicates the end of the file + break singleLineHTMLCloseComment + } + lexer.step() + } + continue + } + + lexer.Token = TMinusMinus + default: + lexer.Token = TMinus + if lexer.json == JSON && lexer.codePoint != '.' && (lexer.codePoint < '0' || lexer.codePoint > '9') { + lexer.Unexpected() + } + } + + case '*': + // '*' or '*=' or '**' or '**=' + lexer.step() + switch lexer.codePoint { + case '=': + lexer.step() + lexer.Token = TAsteriskEquals + + case '*': + lexer.step() + switch lexer.codePoint { + case '=': + lexer.step() + lexer.Token = TAsteriskAsteriskEquals + + default: + lexer.Token = TAsteriskAsterisk + } + + default: + lexer.Token = TAsterisk + } + + case '/': + // '/' or '/=' or '//' or '/* ... */' + lexer.step() + if lexer.forGlobalName { + lexer.Token = TSlash + break + } + switch lexer.codePoint { + case '=': + lexer.step() + lexer.Token = TSlashEquals + + case '/': + singleLineComment: + for { + lexer.step() + switch lexer.codePoint { + case '\r', '\n', '\u2028', '\u2029': + break singleLineComment + + case -1: // This indicates the end of the file + break singleLineComment + } + } + if lexer.json == JSON { + lexer.addRangeError(lexer.Range(), "JSON does not support comments") + } + lexer.scanCommentText() + continue + + case '*': + lexer.step() + startRange := lexer.Range() + multiLineComment: + for { + switch lexer.codePoint { + case '*': + lexer.step() + if lexer.codePoint == '/' { + lexer.step() + break multiLineComment + } + + case '\r', '\n', '\u2028', '\u2029': + lexer.step() + lexer.HasNewlineBefore = true + + case -1: // This indicates the end of the file + lexer.start = lexer.end + lexer.AddRangeErrorWithNotes(logger.Range{Loc: lexer.Loc()}, "Expected \"*/\" to terminate multi-line comment", + []logger.MsgData{lexer.tracker.MsgData(startRange, "The multi-line comment starts here:")}) + panic(LexerPanic{}) + + default: + lexer.step() + } + } + if lexer.json == JSON { + lexer.addRangeError(lexer.Range(), "JSON does not support comments") + } + lexer.scanCommentText() + continue + + default: + lexer.Token = TSlash + } + + case '=': + // '=' or '=>' or '==' or '===' + lexer.step() + switch lexer.codePoint { + case '>': + lexer.step() + lexer.Token = TEqualsGreaterThan + case '=': + lexer.step() + switch lexer.codePoint { + case '=': + lexer.step() + lexer.Token = TEqualsEqualsEquals + default: + lexer.Token = TEqualsEquals + } + default: + lexer.Token = TEquals + } + + case '<': + // '<' or '<<' or '<=' or '<<=' or '