diff --git a/go.mod b/go.mod index 3f755af..a2d3259 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/evanw/esbuild v0.14.5 github.com/goyek/goyek v0.6.0 github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71 + github.com/otiai10/copy v1.7.0 github.com/radovskyb/watcher v1.0.7 ) diff --git a/go.sum b/go.sum index 74854b2..5cc5c00 100644 --- a/go.sum +++ b/go.sum @@ -10,6 +10,13 @@ github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71 h1:24NdJ5N6 github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71/go.mod h1:ozZLfjiLmXytkIUh200wMeuoQJ4ww06wN+KZtFP6j3g= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/otiai10/copy v1.7.0 h1:hVoPiN+t+7d2nzzwMiDHPSOogsWAStewq3TwU05+clE= +github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.3 h1:7JgpsBaN0uMkyju4tbYHu0mnM55hNKVYLsXmwr15NQI= +github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/radovskyb/watcher v1.0.7 h1:AYePLih6dpmS32vlHfhCeli8127LzkIgwJGcwwe8tUE= github.com/radovskyb/watcher v1.0.7/go.mod h1:78okwvY5wPdzcb1UYnip1pvrZNIVEIh/Cm+ZuvsUYIg= github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= diff --git a/main.go b/main.go index 130d482..658b5bd 100644 --- a/main.go +++ b/main.go @@ -5,12 +5,14 @@ import ( "fmt" "os" "os/signal" + "path/filepath" "syscall" "time" "github.com/evanw/esbuild/pkg/api" "github.com/goyek/goyek" "github.com/jaschaephraim/lrserver" + "github.com/otiai10/copy" "github.com/radovskyb/watcher" ) @@ -21,6 +23,10 @@ type options struct { Watch struct { Path string } + Copy []struct { + Src string + Dest string + } } func main() { @@ -63,6 +69,7 @@ func main() { select { case event := <-w.Event: fmt.Printf("File %s changed\n", event.Name()) + cp(opts) build(opts) case err := <-w.Error: fmt.Println(err.Error()) @@ -105,12 +112,50 @@ func main() { flow.Main() } +func cp(opts options) { + if len(opts.Copy) == 0 { + fmt.Println("Nothing to copy") + return + } + for _, op := range opts.Copy { + paths, err := filepath.Glob(op.Src) + if err != nil { + fmt.Printf("Invalid glob pattern: %s\n", op.Src) + continue + } + + destIsDir := isDir(op.Dest) + for _, p := range paths { + d := op.Dest + + if destIsDir && isFile(p) { + d = filepath.Join(d, filepath.Base(p)) + } + err := copy.Copy(p, d) + fmt.Printf("Copying %s to %s\n", p, d) + + if err != nil { + fmt.Printf("Failed to copy %s: %v\n", p, err) + continue + } + } + } +} + +func isFile(path string) bool { + stat, _ := os.Stat(path) + return !stat.IsDir() +} + +func isDir(path string) bool { + stat, _ := os.Stat(path) + return stat.IsDir() +} + func build(opts options) { result := api.Build(opts.ESBuild) - if len(result.Errors) > 0 { - os.Exit(1) - } else { + if len(result.Errors) == 0 { triggerReload <- struct{}{} } } diff --git a/sample.gowebbuild.json b/sample.gowebbuild.json new file mode 100644 index 0000000..b671b94 --- /dev/null +++ b/sample.gowebbuild.json @@ -0,0 +1,41 @@ +{ + "Watch": { + "Path": "./frontend/src" + }, + "Copy": [ + { + "Src": "./frontend/index.html", + "Dest": "./api/frontend-dist" + }, + { + "Src": "./frontend/src/audio", + "Dest": "./api/frontend-dist/audio" + }, + { + "Src": "./frontend/src/icon-*.png", + "Dest": "./api/frontend-dist" + }, + { + "Src": "./frontend/src/manifest.webmanifest", + "Dest": "./api/frontend-dist" + }, + { + "Src": "./frontend/src/workbox-config.js", + "Dest": "./api/frontend-dist" + } + ], + "ESBuild": { + "EntryPoints": [ + "./frontend/src/the-app.js", + "./frontend/src/serviceworker.js" + ], + "Outdir": "./api/frontend-dist", + "Sourcemap": 1, + "Format": 3, + "Splitting": true, + "Platform": 0, + "Bundle": true, + "Write": true, + "LogLevel": 3 + } +} \ No newline at end of file diff --git a/vendor/github.com/evanw/esbuild/LICENSE.md b/vendor/github.com/evanw/esbuild/LICENSE.md new file mode 100644 index 0000000..2027e8d --- /dev/null +++ b/vendor/github.com/evanw/esbuild/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Evan Wallace + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go b/vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go new file mode 100644 index 0000000..3b36fe2 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go @@ -0,0 +1,7 @@ +package api_helpers + +// This flag is set by the CLI to activate the timer. It's put here instead of +// by the timer to discourage code from checking this flag. Only the code that +// creates the root timer should check this flag. Other code should check that +// the timer is not null to detect if the timer is being used or not. +var UseTimer bool diff --git a/vendor/github.com/evanw/esbuild/internal/ast/ast.go b/vendor/github.com/evanw/esbuild/internal/ast/ast.go new file mode 100644 index 0000000..f909468 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/ast/ast.go @@ -0,0 +1,141 @@ +package ast + +import "github.com/evanw/esbuild/internal/logger" + +// This file contains data structures that are used with the AST packages for +// both JavaScript and CSS. This helps the bundler treat both AST formats in +// a somewhat format-agnostic manner. + +type ImportKind uint8 + +const ( + // An entry point provided by the user + ImportEntryPoint ImportKind = iota + + // An ES6 import or re-export statement + ImportStmt + + // A call to "require()" + ImportRequire + + // An "import()" expression with a string argument + ImportDynamic + + // A call to "require.resolve()" + ImportRequireResolve + + // A CSS "@import" rule + ImportAt + + // A CSS "@import" rule with import conditions + ImportAtConditional + + // A CSS "url(...)" token + ImportURL +) + +func (kind ImportKind) StringForMetafile() string { + switch kind { + case ImportStmt: + return "import-statement" + case ImportRequire: + return "require-call" + case ImportDynamic: + return "dynamic-import" + case ImportRequireResolve: + return "require-resolve" + case ImportAt, ImportAtConditional: + return "import-rule" + case ImportURL: + return "url-token" + case ImportEntryPoint: + return "entry-point" + default: + panic("Internal error") + } +} + +func (kind ImportKind) IsFromCSS() bool { + return kind == ImportAt || kind == ImportURL +} + +type ImportRecord struct { + Range logger.Range + Path logger.Path + Assertions *[]AssertEntry + + // The resolved source index for an internal import (within the bundle) or + // nil for an external import (not included in the bundle) + SourceIndex Index32 + + // Sometimes the parser creates an import record and decides it isn't needed. + // For example, TypeScript code may have import statements that later turn + // out to be type-only imports after analyzing the whole file. + IsUnused bool + + // If this is true, the import contains syntax like "* as ns". This is used + // to determine whether modules that have no exports need to be wrapped in a + // CommonJS wrapper or not. + ContainsImportStar bool + + // If this is true, the import contains an import for the alias "default", + // either via the "import x from" or "import {default as x} from" syntax. + ContainsDefaultAlias bool + + // If true, this "export * from 'path'" statement is evaluated at run-time by + // calling the "__reExport()" helper function + CallsRunTimeReExportFn bool + + // Tell the printer to wrap this call to "require()" in "__toESM(...)" + WrapWithToESM bool + + // Tell the printer to wrap this ESM exports object in "__toCJS(...)" + WrapWithToCJS bool + + // Tell the printer to use the runtime "__require()" instead of "require()" + CallRuntimeRequire bool + + // True for the following cases: + // + // try { require('x') } catch { handle } + // try { await import('x') } catch { handle } + // try { require.resolve('x') } catch { handle } + // import('x').catch(handle) + // import('x').then(_, handle) + // + // In these cases we shouldn't generate an error if the path could not be + // resolved. + HandlesImportErrors bool + + // If true, this was originally written as a bare "import 'file'" statement + WasOriginallyBareImport bool + + Kind ImportKind +} + +type AssertEntry struct { + Key []uint16 // An identifier or a string + Value []uint16 // Always a string + KeyLoc logger.Loc + ValueLoc logger.Loc + PreferQuotedKey bool +} + +// This stores a 32-bit index where the zero value is an invalid index. This is +// a better alternative to storing the index as a pointer since that has the +// same properties but takes up more space and costs an extra pointer traversal. +type Index32 struct { + flippedBits uint32 +} + +func MakeIndex32(index uint32) Index32 { + return Index32{flippedBits: ^index} +} + +func (i Index32) IsValid() bool { + return i.flippedBits != 0 +} + +func (i Index32) GetIndex() uint32 { + return ^i.flippedBits +} diff --git a/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go b/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go new file mode 100644 index 0000000..b42a2e1 --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go @@ -0,0 +1,2373 @@ +package bundler + +import ( + "bytes" + "encoding/base32" + "encoding/base64" + "fmt" + "math/rand" + "net/http" + "sort" + "strings" + "sync" + "syscall" + "time" + "unicode" + "unicode/utf8" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/cache" + "github.com/evanw/esbuild/internal/compat" + "github.com/evanw/esbuild/internal/config" + "github.com/evanw/esbuild/internal/css_parser" + "github.com/evanw/esbuild/internal/fs" + "github.com/evanw/esbuild/internal/graph" + "github.com/evanw/esbuild/internal/helpers" + "github.com/evanw/esbuild/internal/js_ast" + "github.com/evanw/esbuild/internal/js_lexer" + "github.com/evanw/esbuild/internal/js_parser" + "github.com/evanw/esbuild/internal/js_printer" + "github.com/evanw/esbuild/internal/logger" + "github.com/evanw/esbuild/internal/resolver" + "github.com/evanw/esbuild/internal/runtime" + "github.com/evanw/esbuild/internal/sourcemap" + "github.com/evanw/esbuild/internal/xxhash" +) + +type scannerFile struct { + inputFile graph.InputFile + pluginData interface{} + + // If "AbsMetadataFile" is present, this will be filled out with information + // about this file in JSON format. This is a partial JSON file that will be + // fully assembled later. + jsonMetadataChunk string +} + +// This is data related to source maps. It's computed in parallel with linking +// and must be ready by the time printing happens. This is beneficial because +// it is somewhat expensive to produce. +type dataForSourceMap struct { + // This data is for the printer. It maps from byte offsets in the file (which + // are stored at every AST node) to UTF-16 column offsets (required by source + // maps). + lineOffsetTables []sourcemap.LineOffsetTable + + // This contains the quoted contents of the original source file. It's what + // needs to be embedded in the "sourcesContent" array in the final source + // map. Quoting is precomputed because it's somewhat expensive. + quotedContents [][]byte +} + +type Bundle struct { + fs fs.FS + res resolver.Resolver + files []scannerFile + entryPoints []graph.EntryPoint + + // The unique key prefix is a random string that is unique to every bundling + // operation. It is used as a prefix for the unique keys assigned to every + // chunk during linking. These unique keys are used to identify each chunk + // before the final output paths have been computed. + uniqueKeyPrefix string +} + +type parseArgs struct { + fs fs.FS + log logger.Log + res resolver.Resolver + caches *cache.CacheSet + keyPath logger.Path + prettyPath string + sourceIndex uint32 + importSource *logger.Source + sideEffects graph.SideEffects + importPathRange logger.Range + pluginData interface{} + options config.Options + results chan parseResult + inject chan config.InjectedFile + skipResolve bool + uniqueKeyPrefix string +} + +type parseResult struct { + file scannerFile + resolveResults []*resolver.ResolveResult + tlaCheck tlaCheck + ok bool +} + +type tlaCheck struct { + parent ast.Index32 + depth uint32 + importRecordIndex uint32 +} + +func parseFile(args parseArgs) { + source := logger.Source{ + Index: args.sourceIndex, + KeyPath: args.keyPath, + PrettyPath: args.prettyPath, + IdentifierName: js_ast.GenerateNonUniqueNameFromPath(args.keyPath.Text), + } + + var loader config.Loader + var absResolveDir string + var pluginName string + var pluginData interface{} + + if stdin := args.options.Stdin; stdin != nil { + // Special-case stdin + source.Contents = stdin.Contents + loader = stdin.Loader + if loader == config.LoaderNone { + loader = config.LoaderJS + } + absResolveDir = args.options.Stdin.AbsResolveDir + } else { + result, ok := runOnLoadPlugins( + args.options.Plugins, + args.res, + args.fs, + &args.caches.FSCache, + args.log, + &source, + args.importSource, + args.importPathRange, + args.pluginData, + args.options.WatchMode, + ) + if !ok { + if args.inject != nil { + args.inject <- config.InjectedFile{ + Source: source, + } + } + args.results <- parseResult{} + return + } + loader = result.loader + absResolveDir = result.absResolveDir + pluginName = result.pluginName + pluginData = result.pluginData + } + + _, base, ext := logger.PlatformIndependentPathDirBaseExt(source.KeyPath.Text) + + // The special "default" loader determines the loader from the file path + if loader == config.LoaderDefault { + loader = loaderFromFileExtension(args.options.ExtensionToLoader, base+ext) + } + + result := parseResult{ + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Loader: loader, + SideEffects: args.sideEffects, + }, + pluginData: pluginData, + }, + } + + defer func() { + r := recover() + if r != nil { + args.log.AddWithNotes(logger.Error, nil, logger.Range{}, + fmt.Sprintf("panic: %v (while parsing %q)", r, source.PrettyPath), + []logger.MsgData{{Text: helpers.PrettyPrintedStack()}}) + args.results <- result + } + }() + + switch loader { + case config.LoaderJS: + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderJSX: + args.options.JSX.Parse = true + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderTS, config.LoaderTSNoAmbiguousLessThan: + args.options.TS.Parse = true + args.options.TS.NoAmbiguousLessThan = loader == config.LoaderTSNoAmbiguousLessThan + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderTSX: + args.options.TS.Parse = true + args.options.JSX.Parse = true + ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options)) + if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderCSS: + ast := args.caches.CSSCache.Parse(args.log, source, css_parser.Options{ + MangleSyntax: args.options.MangleSyntax, + RemoveWhitespace: args.options.RemoveWhitespace, + UnsupportedCSSFeatures: args.options.UnsupportedCSSFeatures, + }) + result.file.inputFile.Repr = &graph.CSSRepr{AST: ast} + result.ok = true + + case config.LoaderJSON: + expr, ok := args.caches.JSONCache.Parse(args.log, source, js_parser.JSONOptions{}) + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = ok + + case config.LoaderText: + encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents)) + expr := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(source.Contents)}} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = "data:text/plain;base64," + encoded + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderBase64: + mimeType := guessMimeType(ext, source.Contents) + encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents)) + expr := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(encoded)}} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = "data:" + mimeType + ";base64," + encoded + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderBinary: + encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents)) + expr := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(encoded)}} + helper := "__toBinary" + if args.options.Platform == config.PlatformNode { + helper = "__toBinaryNode" + } + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, helper) + ast.URLForCSS = "data:application/octet-stream;base64," + encoded + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderDataURL: + mimeType := guessMimeType(ext, source.Contents) + encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents)) + url := fmt.Sprintf("data:%s;base64,%s", mimeType, encoded) + expr := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(url)}} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = url + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + case config.LoaderFile: + uniqueKey := fmt.Sprintf("%sA%08d", args.uniqueKeyPrefix, args.sourceIndex) + uniqueKeyPath := uniqueKey + source.KeyPath.IgnoredSuffix + expr := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(uniqueKeyPath)}} + ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "") + ast.URLForCSS = uniqueKeyPath + if pluginName != "" { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin + } else { + result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData + } + result.file.inputFile.Repr = &graph.JSRepr{AST: ast} + result.ok = true + + // Mark that this file is from the "file" loader + result.file.inputFile.UniqueKeyForFileLoader = uniqueKey + + default: + var message string + if source.KeyPath.Namespace == "file" && ext != "" { + message = fmt.Sprintf("No loader is configured for %q files: %s", ext, source.PrettyPath) + } else { + message = fmt.Sprintf("Do not know how to load path: %s", source.PrettyPath) + } + tracker := logger.MakeLineColumnTracker(args.importSource) + args.log.Add(logger.Error, &tracker, args.importPathRange, message) + } + + // This must come before we send on the "results" channel to avoid deadlock + if args.inject != nil { + var exports []config.InjectableExport + if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok { + aliases := make([]string, 0, len(repr.AST.NamedExports)) + for alias := range repr.AST.NamedExports { + aliases = append(aliases, alias) + } + sort.Strings(aliases) // Sort for determinism + exports = make([]config.InjectableExport, len(aliases)) + for i, alias := range aliases { + exports[i] = config.InjectableExport{ + Alias: alias, + Loc: repr.AST.NamedExports[alias].AliasLoc, + } + } + } + args.inject <- config.InjectedFile{ + Source: source, + Exports: exports, + } + } + + // Stop now if parsing failed + if !result.ok { + args.results <- result + return + } + + // Run the resolver on the parse thread so it's not run on the main thread. + // That way the main thread isn't blocked if the resolver takes a while. + if args.options.Mode == config.ModeBundle && !args.skipResolve { + // Clone the import records because they will be mutated later + recordsPtr := result.file.inputFile.Repr.ImportRecords() + records := append([]ast.ImportRecord{}, *recordsPtr...) + *recordsPtr = records + result.resolveResults = make([]*resolver.ResolveResult, len(records)) + + if len(records) > 0 { + resolverCache := make(map[ast.ImportKind]map[string]*resolver.ResolveResult) + tracker := logger.MakeLineColumnTracker(&source) + + for importRecordIndex := range records { + // Don't try to resolve imports that are already resolved + record := &records[importRecordIndex] + if record.SourceIndex.IsValid() { + continue + } + + // Ignore records that the parser has discarded. This is used to remove + // type-only imports in TypeScript files. + if record.IsUnused { + continue + } + + // Cache the path in case it's imported multiple times in this file + cache, ok := resolverCache[record.Kind] + if !ok { + cache = make(map[string]*resolver.ResolveResult) + resolverCache[record.Kind] = cache + } + if resolveResult, ok := cache[record.Path.Text]; ok { + result.resolveResults[importRecordIndex] = resolveResult + continue + } + + // Run the resolver and log an error if the path couldn't be resolved + resolveResult, didLogError, debug := runOnResolvePlugins( + args.options.Plugins, + args.res, + args.log, + args.fs, + &args.caches.FSCache, + &source, + record.Range, + source.KeyPath.Namespace, + record.Path.Text, + record.Kind, + absResolveDir, + pluginData, + ) + cache[record.Path.Text] = resolveResult + + // All "require.resolve()" imports should be external because we don't + // want to waste effort traversing into them + if record.Kind == ast.ImportRequireResolve { + if resolveResult != nil && resolveResult.IsExternal { + // Allow path substitution as long as the result is external + result.resolveResults[importRecordIndex] = resolveResult + } else if !record.HandlesImportErrors { + args.log.Add(logger.Warning, &tracker, record.Range, + fmt.Sprintf("%q should be marked as external for use with \"require.resolve\"", record.Path.Text)) + } + continue + } + + if resolveResult == nil { + // Failed imports inside a try/catch are silently turned into + // external imports instead of causing errors. This matches a common + // code pattern for conditionally importing a module with a graceful + // fallback. + if !didLogError && !record.HandlesImportErrors { + hint := "" + if resolver.IsPackagePath(record.Path.Text) { + hint = fmt.Sprintf("You can mark the path %q as external to exclude it from the bundle, which will remove this error.", record.Path.Text) + if record.Kind == ast.ImportRequire { + hint += " You can also surround this \"require\" call with a try/catch block to handle this failure at run-time instead of bundle-time." + } else if record.Kind == ast.ImportDynamic { + hint += " You can also add \".catch()\" here to handle this failure at run-time instead of bundle-time." + } + if pluginName == "" && !args.fs.IsAbs(record.Path.Text) { + if query := args.res.ProbeResolvePackageAsRelative(absResolveDir, record.Path.Text, record.Kind); query != nil { + hint = fmt.Sprintf("Use the relative path %q to reference the file %q. "+ + "Without the leading \"./\", the path %q is being interpreted as a package path instead.", + "./"+record.Path.Text, args.res.PrettyPath(query.PathPair.Primary), record.Path.Text) + } + } + } + if args.options.Platform != config.PlatformNode { + if _, ok := resolver.BuiltInNodeModules[record.Path.Text]; ok { + var how string + switch logger.API { + case logger.CLIAPI: + how = "--platform=node" + case logger.JSAPI: + how = "platform: 'node'" + case logger.GoAPI: + how = "Platform: api.PlatformNode" + } + hint = fmt.Sprintf("The package %q wasn't found on the file system but is built into node. "+ + "Are you trying to bundle for node? You can use %q to do that, which will remove this error.", record.Path.Text, how) + } + } + if absResolveDir == "" && pluginName != "" { + hint = fmt.Sprintf("The plugin %q didn't set a resolve directory for the file %q, "+ + "so esbuild did not search for %q on the file system.", pluginName, source.PrettyPath, record.Path.Text) + } + var notes []logger.MsgData + if hint != "" { + notes = append(notes, logger.MsgData{Text: hint}) + } + debug.LogErrorMsg(args.log, &source, record.Range, fmt.Sprintf("Could not resolve %q", record.Path.Text), notes) + } else if args.log.Level <= logger.LevelDebug && !didLogError && record.HandlesImportErrors { + args.log.Add(logger.Debug, &tracker, record.Range, + fmt.Sprintf("Importing %q was allowed even though it could not be resolved because dynamic import failures appear to be handled here:", + record.Path.Text)) + } + continue + } + + result.resolveResults[importRecordIndex] = resolveResult + } + } + } + + // Attempt to parse the source map if present + if loader.CanHaveSourceMap() && args.options.SourceMap != config.SourceMapNone { + var sourceMapComment logger.Span + switch repr := result.file.inputFile.Repr.(type) { + case *graph.JSRepr: + sourceMapComment = repr.AST.SourceMapComment + case *graph.CSSRepr: + sourceMapComment = repr.AST.SourceMapComment + } + if sourceMapComment.Text != "" { + if path, contents := extractSourceMapFromComment(args.log, args.fs, &args.caches.FSCache, + args.res, &source, sourceMapComment, absResolveDir); contents != nil { + result.file.inputFile.InputSourceMap = js_parser.ParseSourceMap(args.log, logger.Source{ + KeyPath: path, + PrettyPath: args.res.PrettyPath(path), + Contents: *contents, + }) + } + } + } + + args.results <- result +} + +func joinWithPublicPath(publicPath string, relPath string) string { + if strings.HasPrefix(relPath, "./") { + relPath = relPath[2:] + + // Strip any amount of further no-op slashes (i.e. ".///././/x/y" => "x/y") + for { + if strings.HasPrefix(relPath, "/") { + relPath = relPath[1:] + } else if strings.HasPrefix(relPath, "./") { + relPath = relPath[2:] + } else { + break + } + } + } + + // Use a relative path if there is no public path + if publicPath == "" { + publicPath = "." + } + + // Join with a slash + slash := "/" + if strings.HasSuffix(publicPath, "/") { + slash = "" + } + return fmt.Sprintf("%s%s%s", publicPath, slash, relPath) +} + +func isASCIIOnly(text string) bool { + for _, c := range text { + if c < 0x20 || c > 0x7E { + return false + } + } + return true +} + +func guessMimeType(extension string, contents string) string { + mimeType := helpers.MimeTypeByExtension(extension) + if mimeType == "" { + mimeType = http.DetectContentType([]byte(contents)) + } + + // Turn "text/plain; charset=utf-8" into "text/plain;charset=utf-8" + return strings.ReplaceAll(mimeType, "; ", ";") +} + +func extractSourceMapFromComment( + log logger.Log, + fs fs.FS, + fsCache *cache.FSCache, + res resolver.Resolver, + source *logger.Source, + comment logger.Span, + absResolveDir string, +) (logger.Path, *string) { + tracker := logger.MakeLineColumnTracker(source) + + // Support data URLs + if parsed, ok := resolver.ParseDataURL(comment.Text); ok { + if contents, err := parsed.DecodeData(); err == nil { + return logger.Path{Text: source.PrettyPath, IgnoredSuffix: "#sourceMappingURL"}, &contents + } else { + log.Add(logger.Warning, &tracker, comment.Range, fmt.Sprintf("Unsupported source map comment: %s", err.Error())) + return logger.Path{}, nil + } + } + + // Relative path in a file with an absolute path + if absResolveDir != "" { + absPath := fs.Join(absResolveDir, comment.Text) + path := logger.Path{Text: absPath, Namespace: "file"} + contents, err, originalError := fsCache.ReadFile(fs, absPath) + if log.Level <= logger.LevelDebug && originalError != nil { + log.Add(logger.Debug, &tracker, comment.Range, fmt.Sprintf("Failed to read file %q: %s", res.PrettyPath(path), originalError.Error())) + } + if err != nil { + if err == syscall.ENOENT { + // Don't report a warning because this is likely unactionable + return logger.Path{}, nil + } + log.Add(logger.Warning, &tracker, comment.Range, fmt.Sprintf("Cannot read file %q: %s", res.PrettyPath(path), err.Error())) + return logger.Path{}, nil + } + return path, &contents + } + + // Anything else is unsupported + return logger.Path{}, nil +} + +func sanitizeLocation(res resolver.Resolver, loc *logger.MsgLocation) { + if loc != nil { + if loc.Namespace == "" { + loc.Namespace = "file" + } + if loc.File != "" { + loc.File = res.PrettyPath(logger.Path{Text: loc.File, Namespace: loc.Namespace}) + } + } +} + +func logPluginMessages( + res resolver.Resolver, + log logger.Log, + name string, + msgs []logger.Msg, + thrown error, + importSource *logger.Source, + importPathRange logger.Range, +) bool { + didLogError := false + tracker := logger.MakeLineColumnTracker(importSource) + + // Report errors and warnings generated by the plugin + for _, msg := range msgs { + if msg.PluginName == "" { + msg.PluginName = name + } + if msg.Kind == logger.Error { + didLogError = true + } + + // Sanitize the locations + for _, note := range msg.Notes { + sanitizeLocation(res, note.Location) + } + if msg.Data.Location == nil { + msg.Data.Location = tracker.MsgLocationOrNil(importPathRange) + } else { + sanitizeLocation(res, msg.Data.Location) + if msg.Data.Location.File == "" && importSource != nil { + msg.Data.Location.File = importSource.PrettyPath + } + if importSource != nil { + msg.Notes = append(msg.Notes, tracker.MsgData(importPathRange, + fmt.Sprintf("The plugin %q was triggered by this import", name))) + } + } + + log.AddMsg(msg) + } + + // Report errors thrown by the plugin itself + if thrown != nil { + didLogError = true + text := thrown.Error() + log.AddMsg(logger.Msg{ + PluginName: name, + Kind: logger.Error, + Data: logger.MsgData{ + Text: text, + Location: tracker.MsgLocationOrNil(importPathRange), + UserDetail: thrown, + }, + }) + } + + return didLogError +} + +func runOnResolvePlugins( + plugins []config.Plugin, + res resolver.Resolver, + log logger.Log, + fs fs.FS, + fsCache *cache.FSCache, + importSource *logger.Source, + importPathRange logger.Range, + importNamespace string, + path string, + kind ast.ImportKind, + absResolveDir string, + pluginData interface{}, +) (*resolver.ResolveResult, bool, resolver.DebugMeta) { + resolverArgs := config.OnResolveArgs{ + Path: path, + ResolveDir: absResolveDir, + Kind: kind, + PluginData: pluginData, + } + applyPath := logger.Path{ + Text: path, + Namespace: importNamespace, + } + if importSource != nil { + resolverArgs.Importer = importSource.KeyPath + } else { + resolverArgs.Importer.Namespace = importNamespace + } + tracker := logger.MakeLineColumnTracker(importSource) + + // Apply resolver plugins in order until one succeeds + for _, plugin := range plugins { + for _, onResolve := range plugin.OnResolve { + if !config.PluginAppliesToPath(applyPath, onResolve.Filter, onResolve.Namespace) { + continue + } + + result := onResolve.Callback(resolverArgs) + pluginName := result.PluginName + if pluginName == "" { + pluginName = plugin.Name + } + didLogError := logPluginMessages(res, log, pluginName, result.Msgs, result.ThrownError, importSource, importPathRange) + + // Plugins can also provide additional file system paths to watch + for _, file := range result.AbsWatchFiles { + fsCache.ReadFile(fs, file) + } + for _, dir := range result.AbsWatchDirs { + if entries, err, _ := fs.ReadDirectory(dir); err == nil { + entries.SortedKeys() + } + } + + // Stop now if there was an error + if didLogError { + return nil, true, resolver.DebugMeta{} + } + + // The "file" namespace is the default for non-external paths, but not + // for external paths. External paths must explicitly specify the "file" + // namespace. + nsFromPlugin := result.Path.Namespace + if result.Path.Namespace == "" && !result.External { + result.Path.Namespace = "file" + } + + // Otherwise, continue on to the next resolver if this loader didn't succeed + if result.Path.Text == "" { + if result.External { + result.Path = logger.Path{Text: path} + } else { + continue + } + } + + // Paths in the file namespace must be absolute paths + if result.Path.Namespace == "file" && !fs.IsAbs(result.Path.Text) { + if nsFromPlugin == "file" { + log.Add(logger.Error, &tracker, importPathRange, + fmt.Sprintf("Plugin %q returned a path in the \"file\" namespace that is not an absolute path: %s", pluginName, result.Path.Text)) + } else { + log.Add(logger.Error, &tracker, importPathRange, + fmt.Sprintf("Plugin %q returned a non-absolute path: %s (set a namespace if this is not a file path)", pluginName, result.Path.Text)) + } + return nil, true, resolver.DebugMeta{} + } + + var sideEffectsData *resolver.SideEffectsData + if result.IsSideEffectFree { + sideEffectsData = &resolver.SideEffectsData{ + PluginName: pluginName, + } + } + + return &resolver.ResolveResult{ + PathPair: resolver.PathPair{Primary: result.Path}, + IsExternal: result.External, + PluginData: result.PluginData, + PrimarySideEffectsData: sideEffectsData, + }, false, resolver.DebugMeta{} + } + } + + // Resolve relative to the resolve directory by default. All paths in the + // "file" namespace automatically have a resolve directory. Loader plugins + // can also configure a custom resolve directory for files in other namespaces. + result, debug := res.Resolve(absResolveDir, path, kind) + + // Warn when the case used for importing differs from the actual file name + if result != nil && result.DifferentCase != nil && !helpers.IsInsideNodeModules(absResolveDir) { + diffCase := *result.DifferentCase + log.Add(logger.Warning, &tracker, importPathRange, fmt.Sprintf( + "Use %q instead of %q to avoid issues with case-sensitive file systems", + res.PrettyPath(logger.Path{Text: fs.Join(diffCase.Dir, diffCase.Actual), Namespace: "file"}), + res.PrettyPath(logger.Path{Text: fs.Join(diffCase.Dir, diffCase.Query), Namespace: "file"}), + )) + } + + return result, false, debug +} + +type loaderPluginResult struct { + loader config.Loader + absResolveDir string + pluginName string + pluginData interface{} +} + +func runOnLoadPlugins( + plugins []config.Plugin, + res resolver.Resolver, + fs fs.FS, + fsCache *cache.FSCache, + log logger.Log, + source *logger.Source, + importSource *logger.Source, + importPathRange logger.Range, + pluginData interface{}, + isWatchMode bool, +) (loaderPluginResult, bool) { + loaderArgs := config.OnLoadArgs{ + Path: source.KeyPath, + PluginData: pluginData, + } + tracker := logger.MakeLineColumnTracker(importSource) + + // Apply loader plugins in order until one succeeds + for _, plugin := range plugins { + for _, onLoad := range plugin.OnLoad { + if !config.PluginAppliesToPath(source.KeyPath, onLoad.Filter, onLoad.Namespace) { + continue + } + + result := onLoad.Callback(loaderArgs) + pluginName := result.PluginName + if pluginName == "" { + pluginName = plugin.Name + } + didLogError := logPluginMessages(res, log, pluginName, result.Msgs, result.ThrownError, importSource, importPathRange) + + // Plugins can also provide additional file system paths to watch + for _, file := range result.AbsWatchFiles { + fsCache.ReadFile(fs, file) + } + for _, dir := range result.AbsWatchDirs { + if entries, err, _ := fs.ReadDirectory(dir); err == nil { + entries.SortedKeys() + } + } + + // Stop now if there was an error + if didLogError { + if isWatchMode && source.KeyPath.Namespace == "file" { + fsCache.ReadFile(fs, source.KeyPath.Text) // Read the file for watch mode tracking + } + return loaderPluginResult{}, false + } + + // Otherwise, continue on to the next loader if this loader didn't succeed + if result.Contents == nil { + continue + } + + source.Contents = *result.Contents + loader := result.Loader + if loader == config.LoaderNone { + loader = config.LoaderJS + } + if result.AbsResolveDir == "" && source.KeyPath.Namespace == "file" { + result.AbsResolveDir = fs.Dir(source.KeyPath.Text) + } + if isWatchMode && source.KeyPath.Namespace == "file" { + fsCache.ReadFile(fs, source.KeyPath.Text) // Read the file for watch mode tracking + } + return loaderPluginResult{ + loader: loader, + absResolveDir: result.AbsResolveDir, + pluginName: pluginName, + pluginData: result.PluginData, + }, true + } + } + + // Force disabled modules to be empty + if source.KeyPath.IsDisabled() { + return loaderPluginResult{loader: config.LoaderJS}, true + } + + // Read normal modules from disk + if source.KeyPath.Namespace == "file" { + if contents, err, originalError := fsCache.ReadFile(fs, source.KeyPath.Text); err == nil { + source.Contents = contents + return loaderPluginResult{ + loader: config.LoaderDefault, + absResolveDir: fs.Dir(source.KeyPath.Text), + }, true + } else { + if log.Level <= logger.LevelDebug && originalError != nil { + log.Add(logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read file %q: %s", source.KeyPath.Text, originalError.Error())) + } + if err == syscall.ENOENT { + log.Add(logger.Error, &tracker, importPathRange, + fmt.Sprintf("Could not read from file: %s", source.KeyPath.Text)) + return loaderPluginResult{}, false + } else { + log.Add(logger.Error, &tracker, importPathRange, + fmt.Sprintf("Cannot read file %q: %s", res.PrettyPath(source.KeyPath), err.Error())) + return loaderPluginResult{}, false + } + } + } + + // Native support for data URLs. This is supported natively by node: + // https://nodejs.org/docs/latest/api/esm.html#esm_data_imports + if source.KeyPath.Namespace == "dataurl" { + if parsed, ok := resolver.ParseDataURL(source.KeyPath.Text); ok { + if mimeType := parsed.DecodeMIMEType(); mimeType != resolver.MIMETypeUnsupported { + if contents, err := parsed.DecodeData(); err != nil { + log.Add(logger.Error, &tracker, importPathRange, + fmt.Sprintf("Could not load data URL: %s", err.Error())) + return loaderPluginResult{loader: config.LoaderNone}, true + } else { + source.Contents = contents + switch mimeType { + case resolver.MIMETypeTextCSS: + return loaderPluginResult{loader: config.LoaderCSS}, true + case resolver.MIMETypeTextJavaScript: + return loaderPluginResult{loader: config.LoaderJS}, true + case resolver.MIMETypeApplicationJSON: + return loaderPluginResult{loader: config.LoaderJSON}, true + } + } + } + } + } + + // Otherwise, fail to load the path + return loaderPluginResult{loader: config.LoaderNone}, true +} + +func loaderFromFileExtension(extensionToLoader map[string]config.Loader, base string) config.Loader { + // Pick the loader with the longest matching extension. So if there's an + // extension for ".css" and for ".module.css", we want to match the one for + // ".module.css" before the one for ".css". + for { + i := strings.IndexByte(base, '.') + if i == -1 { + break + } + if loader, ok := extensionToLoader[base[i:]]; ok { + return loader + } + base = base[i+1:] + } + return config.LoaderNone +} + +// Identify the path by its lowercase absolute path name with Windows-specific +// slashes substituted for standard slashes. This should hopefully avoid path +// issues on Windows where multiple different paths can refer to the same +// underlying file. +func canonicalFileSystemPathForWindows(absPath string) string { + return strings.ReplaceAll(strings.ToLower(absPath), "\\", "/") +} + +func hashForFileName(hashBytes []byte) string { + return base32.StdEncoding.EncodeToString(hashBytes)[:8] +} + +type scanner struct { + log logger.Log + fs fs.FS + res resolver.Resolver + caches *cache.CacheSet + options config.Options + timer *helpers.Timer + uniqueKeyPrefix string + + // This is not guarded by a mutex because it's only ever modified by a single + // thread. Note that not all results in the "results" array are necessarily + // valid. Make sure to check the "ok" flag before using them. + results []parseResult + visited map[logger.Path]uint32 + resultChannel chan parseResult + remaining int +} + +type EntryPoint struct { + InputPath string + OutputPath string + IsFile bool +} + +func generateUniqueKeyPrefix() (string, error) { + var data [12]byte + rand.Seed(time.Now().UnixNano()) + if _, err := rand.Read(data[:]); err != nil { + return "", err + } + + // This is 16 bytes and shouldn't generate escape characters when put into strings + return base64.URLEncoding.EncodeToString(data[:]), nil +} + +func ScanBundle( + log logger.Log, + fs fs.FS, + res resolver.Resolver, + caches *cache.CacheSet, + entryPoints []EntryPoint, + options config.Options, + timer *helpers.Timer, +) Bundle { + timer.Begin("Scan phase") + defer timer.End("Scan phase") + + applyOptionDefaults(&options) + + // Run "onStart" plugins in parallel + onStartWaitGroup := sync.WaitGroup{} + for _, plugin := range options.Plugins { + for _, onStart := range plugin.OnStart { + onStartWaitGroup.Add(1) + go func(plugin config.Plugin, onStart config.OnStart) { + result := onStart.Callback() + logPluginMessages(res, log, plugin.Name, result.Msgs, result.ThrownError, nil, logger.Range{}) + onStartWaitGroup.Done() + }(plugin, onStart) + } + } + + // Each bundling operation gets a separate unique key + uniqueKeyPrefix, err := generateUniqueKeyPrefix() + if err != nil { + log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Failed to read from randomness source: %s", err.Error())) + } + + s := scanner{ + log: log, + fs: fs, + res: res, + caches: caches, + options: options, + timer: timer, + results: make([]parseResult, 0, caches.SourceIndexCache.LenHint()), + visited: make(map[logger.Path]uint32), + resultChannel: make(chan parseResult), + uniqueKeyPrefix: uniqueKeyPrefix, + } + + // Always start by parsing the runtime file + s.results = append(s.results, parseResult{}) + s.remaining++ + go func() { + source, ast, ok := globalRuntimeCache.parseRuntime(&options) + s.resultChannel <- parseResult{ + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Repr: &graph.JSRepr{AST: ast}, + }, + }, + ok: ok, + } + }() + + s.preprocessInjectedFiles() + entryPointMeta := s.addEntryPoints(entryPoints) + s.scanAllDependencies() + files := s.processScannedFiles() + + onStartWaitGroup.Wait() + return Bundle{ + fs: fs, + res: res, + files: files, + entryPoints: entryPointMeta, + uniqueKeyPrefix: uniqueKeyPrefix, + } +} + +type inputKind uint8 + +const ( + inputKindNormal inputKind = iota + inputKindEntryPoint + inputKindStdin +) + +// This returns the source index of the resulting file +func (s *scanner) maybeParseFile( + resolveResult resolver.ResolveResult, + prettyPath string, + importSource *logger.Source, + importPathRange logger.Range, + pluginData interface{}, + kind inputKind, + inject chan config.InjectedFile, +) uint32 { + path := resolveResult.PathPair.Primary + visitedKey := path + if visitedKey.Namespace == "file" { + visitedKey.Text = canonicalFileSystemPathForWindows(visitedKey.Text) + } + + // Only parse a given file path once + sourceIndex, ok := s.visited[visitedKey] + if ok { + return sourceIndex + } + + sourceIndex = s.allocateSourceIndex(visitedKey, cache.SourceIndexNormal) + s.visited[visitedKey] = sourceIndex + s.remaining++ + optionsClone := s.options + if kind != inputKindStdin { + optionsClone.Stdin = nil + } + + // Allow certain properties to be overridden + if len(resolveResult.JSXFactory) > 0 { + optionsClone.JSX.Factory = config.JSXExpr{Parts: resolveResult.JSXFactory} + } + if len(resolveResult.JSXFragment) > 0 { + optionsClone.JSX.Fragment = config.JSXExpr{Parts: resolveResult.JSXFragment} + } + if resolveResult.UseDefineForClassFieldsTS != config.Unspecified { + optionsClone.UseDefineForClassFields = resolveResult.UseDefineForClassFieldsTS + } + if resolveResult.UnusedImportsTS != config.UnusedImportsRemoveStmt { + optionsClone.UnusedImportsTS = resolveResult.UnusedImportsTS + } + optionsClone.TSTarget = resolveResult.TSTarget + + // Set the module type preference using node's module type rules + if strings.HasSuffix(path.Text, ".mjs") || strings.HasSuffix(path.Text, ".mts") { + optionsClone.ModuleType = js_ast.ModuleESM + } else if strings.HasSuffix(path.Text, ".cjs") || strings.HasSuffix(path.Text, ".cts") { + optionsClone.ModuleType = js_ast.ModuleCommonJS + } else { + optionsClone.ModuleType = resolveResult.ModuleType + } + + // Enable bundling for injected files so we always do tree shaking. We + // never want to include unnecessary code from injected files since they + // are essentially bundled. However, if we do this we should skip the + // resolving step when we're not bundling. It'd be strange to get + // resolution errors when the top-level bundling controls are disabled. + skipResolve := false + if inject != nil && optionsClone.Mode != config.ModeBundle { + optionsClone.Mode = config.ModeBundle + skipResolve = true + } + + // Special-case pretty-printed paths for data URLs + if path.Namespace == "dataurl" { + if _, ok := resolver.ParseDataURL(path.Text); ok { + prettyPath = path.Text + if len(prettyPath) > 64 { + prettyPath = prettyPath[:64] + "..." + } + prettyPath = fmt.Sprintf("<%s>", prettyPath) + } + } + + var sideEffects graph.SideEffects + if resolveResult.PrimarySideEffectsData != nil { + sideEffects.Kind = graph.NoSideEffects_PackageJSON + sideEffects.Data = resolveResult.PrimarySideEffectsData + } + + go parseFile(parseArgs{ + fs: s.fs, + log: s.log, + res: s.res, + caches: s.caches, + keyPath: path, + prettyPath: prettyPath, + sourceIndex: sourceIndex, + importSource: importSource, + sideEffects: sideEffects, + importPathRange: importPathRange, + pluginData: pluginData, + options: optionsClone, + results: s.resultChannel, + inject: inject, + skipResolve: skipResolve, + uniqueKeyPrefix: s.uniqueKeyPrefix, + }) + + return sourceIndex +} + +func (s *scanner) allocateSourceIndex(path logger.Path, kind cache.SourceIndexKind) uint32 { + // Allocate a source index using the shared source index cache so that + // subsequent builds reuse the same source index and therefore use the + // cached parse results for increased speed. + sourceIndex := s.caches.SourceIndexCache.Get(path, kind) + + // Grow the results array to fit this source index + if newLen := int(sourceIndex) + 1; len(s.results) < newLen { + // Reallocate to a bigger array + if cap(s.results) < newLen { + s.results = append(make([]parseResult, 0, 2*newLen), s.results...) + } + + // Grow in place + s.results = s.results[:newLen] + } + + return sourceIndex +} + +func (s *scanner) preprocessInjectedFiles() { + s.timer.Begin("Preprocess injected files") + defer s.timer.End("Preprocess injected files") + + injectedFiles := make([]config.InjectedFile, 0, len(s.options.InjectedDefines)+len(s.options.InjectAbsPaths)) + duplicateInjectedFiles := make(map[string]bool) + injectWaitGroup := sync.WaitGroup{} + + // These are virtual paths that are generated for compound "--define" values. + // They are special-cased and are not available for plugins to intercept. + for _, define := range s.options.InjectedDefines { + // These should be unique by construction so no need to check for collisions + visitedKey := logger.Path{Text: fmt.Sprintf("", define.Name)} + sourceIndex := s.allocateSourceIndex(visitedKey, cache.SourceIndexNormal) + s.visited[visitedKey] = sourceIndex + source := logger.Source{ + Index: sourceIndex, + KeyPath: visitedKey, + PrettyPath: s.res.PrettyPath(visitedKey), + IdentifierName: js_ast.EnsureValidIdentifier(visitedKey.Text), + } + + // The first "len(InjectedDefine)" injected files intentionally line up + // with the injected defines by index. The index will be used to import + // references to them in the parser. + injectedFiles = append(injectedFiles, config.InjectedFile{ + Source: source, + DefineName: define.Name, + }) + + // Generate the file inline here since it has already been parsed + expr := js_ast.Expr{Data: define.Data} + ast := js_parser.LazyExportAST(s.log, source, js_parser.OptionsFromConfig(&s.options), expr, "") + result := parseResult{ + ok: true, + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Repr: &graph.JSRepr{AST: ast}, + Loader: config.LoaderJSON, + SideEffects: graph.SideEffects{ + Kind: graph.NoSideEffects_PureData, + }, + }, + }, + } + + // Append to the channel on a goroutine in case it blocks due to capacity + s.remaining++ + go func() { s.resultChannel <- result }() + } + + results := make([]config.InjectedFile, len(s.options.InjectAbsPaths)) + j := 0 + for _, absPath := range s.options.InjectAbsPaths { + prettyPath := s.res.PrettyPath(logger.Path{Text: absPath, Namespace: "file"}) + absPathKey := canonicalFileSystemPathForWindows(absPath) + + if duplicateInjectedFiles[absPathKey] { + s.log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Duplicate injected file %q", prettyPath)) + continue + } + + duplicateInjectedFiles[absPathKey] = true + resolveResult := s.res.ResolveAbs(absPath) + + if resolveResult == nil { + s.log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", prettyPath)) + continue + } + + channel := make(chan config.InjectedFile) + s.maybeParseFile(*resolveResult, prettyPath, nil, logger.Range{}, nil, inputKindNormal, channel) + + // Wait for the results in parallel. The results slice is large enough so + // it is not reallocated during the computations. + injectWaitGroup.Add(1) + go func(i int) { + results[i] = <-channel + injectWaitGroup.Done() + }(j) + j++ + } + + injectWaitGroup.Wait() + injectedFiles = append(injectedFiles, results[:j]...) + + s.options.InjectedFiles = injectedFiles +} + +func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint { + s.timer.Begin("Add entry points") + defer s.timer.End("Add entry points") + + // Reserve a slot for each entry point + entryMetas := make([]graph.EntryPoint, 0, len(entryPoints)+1) + + // Treat stdin as an extra entry point + if stdin := s.options.Stdin; stdin != nil { + stdinPath := logger.Path{Text: ""} + if stdin.SourceFile != "" { + if stdin.AbsResolveDir == "" { + stdinPath = logger.Path{Text: stdin.SourceFile} + } else if s.fs.IsAbs(stdin.SourceFile) { + stdinPath = logger.Path{Text: stdin.SourceFile, Namespace: "file"} + } else { + stdinPath = logger.Path{Text: s.fs.Join(stdin.AbsResolveDir, stdin.SourceFile), Namespace: "file"} + } + } + resolveResult := resolver.ResolveResult{PathPair: resolver.PathPair{Primary: stdinPath}} + sourceIndex := s.maybeParseFile(resolveResult, s.res.PrettyPath(stdinPath), nil, logger.Range{}, nil, inputKindStdin, nil) + entryMetas = append(entryMetas, graph.EntryPoint{ + OutputPath: "stdin", + SourceIndex: sourceIndex, + }) + } + + // Check each entry point ahead of time to see if it's a real file + entryPointAbsResolveDir := s.fs.Cwd() + for i := range entryPoints { + entryPoint := &entryPoints[i] + absPath := entryPoint.InputPath + if !s.fs.IsAbs(absPath) { + absPath = s.fs.Join(entryPointAbsResolveDir, absPath) + } + dir := s.fs.Dir(absPath) + base := s.fs.Base(absPath) + if entries, err, originalError := s.fs.ReadDirectory(dir); err == nil { + if entry, _ := entries.Get(base); entry != nil && entry.Kind(s.fs) == fs.FileEntry { + entryPoint.IsFile = true + + // Entry point paths without a leading "./" are interpreted as package + // paths. This happens because they go through general path resolution + // like all other import paths so that plugins can run on them. Requiring + // a leading "./" for a relative path simplifies writing plugins because + // entry points aren't a special case. + // + // However, requiring a leading "./" also breaks backward compatibility + // and makes working with the CLI more difficult. So attempt to insert + // "./" automatically when needed. We don't want to unconditionally insert + // a leading "./" because the path may not be a file system path. For + // example, it may be a URL. So only insert a leading "./" when the path + // is an exact match for an existing file. + if !s.fs.IsAbs(entryPoint.InputPath) && resolver.IsPackagePath(entryPoint.InputPath) { + entryPoint.InputPath = "./" + entryPoint.InputPath + } + } + } else if s.log.Level <= logger.LevelDebug && originalError != nil { + s.log.Add(logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read directory %q: %s", absPath, originalError.Error())) + } + } + + // Add any remaining entry points. Run resolver plugins on these entry points + // so plugins can alter where they resolve to. These are run in parallel in + // case any of these plugins block. + entryPointResolveResults := make([]*resolver.ResolveResult, len(entryPoints)) + entryPointWaitGroup := sync.WaitGroup{} + entryPointWaitGroup.Add(len(entryPoints)) + for i, entryPoint := range entryPoints { + go func(i int, entryPoint EntryPoint) { + namespace := "" + if entryPoint.IsFile { + namespace = "file" + } + + // Run the resolver and log an error if the path couldn't be resolved + resolveResult, didLogError, debug := runOnResolvePlugins( + s.options.Plugins, + s.res, + s.log, + s.fs, + &s.caches.FSCache, + nil, + logger.Range{}, + namespace, + entryPoint.InputPath, + ast.ImportEntryPoint, + entryPointAbsResolveDir, + nil, + ) + if resolveResult != nil { + if resolveResult.IsExternal { + s.log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("The entry point %q cannot be marked as external", entryPoint.InputPath)) + } else { + entryPointResolveResults[i] = resolveResult + } + } else if !didLogError { + var notes []logger.MsgData + if !s.fs.IsAbs(entryPoint.InputPath) { + if strings.ContainsRune(entryPoint.InputPath, '*') { + notes = append(notes, logger.MsgData{ + Text: "It looks like you are trying to use glob syntax (i.e. \"*\") with esbuild. " + + "This syntax is typically handled by your shell, and isn't handled by esbuild itself. " + + "You must expand glob syntax first before passing your paths to esbuild.", + }) + } else if query := s.res.ProbeResolvePackageAsRelative(entryPointAbsResolveDir, entryPoint.InputPath, ast.ImportEntryPoint); query != nil { + notes = append(notes, logger.MsgData{ + Text: fmt.Sprintf("Use the relative path %q to reference the file %q. "+ + "Without the leading \"./\", the path %q is being interpreted as a package path instead.", + "./"+entryPoint.InputPath, s.res.PrettyPath(query.PathPair.Primary), entryPoint.InputPath), + }) + } + } + debug.LogErrorMsg(s.log, nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", entryPoint.InputPath), notes) + } + entryPointWaitGroup.Done() + }(i, entryPoint) + } + entryPointWaitGroup.Wait() + + // Parse all entry points that were resolved successfully + for i, resolveResult := range entryPointResolveResults { + if resolveResult != nil { + prettyPath := s.res.PrettyPath(resolveResult.PathPair.Primary) + sourceIndex := s.maybeParseFile(*resolveResult, prettyPath, nil, logger.Range{}, resolveResult.PluginData, inputKindEntryPoint, nil) + outputPath := entryPoints[i].OutputPath + outputPathWasAutoGenerated := false + + // If the output path is missing, automatically generate one from the input path + if outputPath == "" { + outputPath = entryPoints[i].InputPath + windowsVolumeLabel := "" + + // The ":" character is invalid in file paths on Windows except when + // it's used as a volume separator. Special-case that here so volume + // labels don't break on Windows. + if s.fs.IsAbs(outputPath) && len(outputPath) >= 3 && outputPath[1] == ':' { + if c := outputPath[0]; (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') { + if c := outputPath[2]; c == '/' || c == '\\' { + windowsVolumeLabel = outputPath[:3] + outputPath = outputPath[3:] + } + } + } + + // For cross-platform robustness, do not allow characters in the output + // path that are invalid on Windows. This is especially relevant when + // the input path is something other than a file path, such as a URL. + outputPath = sanitizeFilePathForVirtualModulePath(outputPath) + if windowsVolumeLabel != "" { + outputPath = windowsVolumeLabel + outputPath + } + outputPathWasAutoGenerated = true + + // Strip the file extension from the output path if there is one so the + // "out extension" setting is used instead + if last := strings.LastIndexAny(outputPath, "/.\\"); last != -1 && outputPath[last] == '.' { + outputPath = outputPath[:last] + } + } + + entryMetas = append(entryMetas, graph.EntryPoint{ + OutputPath: outputPath, + SourceIndex: sourceIndex, + OutputPathWasAutoGenerated: outputPathWasAutoGenerated, + }) + } + } + + // Turn all automatically-generated output paths into absolute paths + for i := range entryMetas { + entryPoint := &entryMetas[i] + if entryPoint.OutputPathWasAutoGenerated && !s.fs.IsAbs(entryPoint.OutputPath) { + entryPoint.OutputPath = s.fs.Join(entryPointAbsResolveDir, entryPoint.OutputPath) + } + } + + // Automatically compute "outbase" if it wasn't provided + if s.options.AbsOutputBase == "" { + s.options.AbsOutputBase = lowestCommonAncestorDirectory(s.fs, entryMetas) + if s.options.AbsOutputBase == "" { + s.options.AbsOutputBase = entryPointAbsResolveDir + } + } + + // Turn all output paths back into relative paths, but this time relative to + // the "outbase" value we computed above + for i := range entryMetas { + entryPoint := &entryMetas[i] + if s.fs.IsAbs(entryPoint.OutputPath) { + if !entryPoint.OutputPathWasAutoGenerated { + // If an explicit absolute output path was specified, use the path + // relative to the "outdir" directory + if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, entryPoint.OutputPath); ok { + entryPoint.OutputPath = relPath + } + } else { + // Otherwise if the absolute output path was derived from the input + // path, use the path relative to the "outbase" directory + if relPath, ok := s.fs.Rel(s.options.AbsOutputBase, entryPoint.OutputPath); ok { + entryPoint.OutputPath = relPath + } + } + } + } + + return entryMetas +} + +func lowestCommonAncestorDirectory(fs fs.FS, entryPoints []graph.EntryPoint) string { + // Ignore any explicitly-specified output paths + absPaths := make([]string, 0, len(entryPoints)) + for _, entryPoint := range entryPoints { + if entryPoint.OutputPathWasAutoGenerated { + absPaths = append(absPaths, entryPoint.OutputPath) + } + } + + if len(absPaths) == 0 { + return "" + } + + lowestAbsDir := fs.Dir(absPaths[0]) + + for _, absPath := range absPaths[1:] { + absDir := fs.Dir(absPath) + lastSlash := 0 + a := 0 + b := 0 + + for { + runeA, widthA := utf8.DecodeRuneInString(absDir[a:]) + runeB, widthB := utf8.DecodeRuneInString(lowestAbsDir[b:]) + boundaryA := widthA == 0 || runeA == '/' || runeA == '\\' + boundaryB := widthB == 0 || runeB == '/' || runeB == '\\' + + if boundaryA && boundaryB { + if widthA == 0 || widthB == 0 { + // Truncate to the smaller path if one path is a prefix of the other + lowestAbsDir = absDir[:a] + break + } else { + // Track the longest common directory so far + lastSlash = a + } + } else if boundaryA != boundaryB || unicode.ToLower(runeA) != unicode.ToLower(runeB) { + // If both paths are different at this point, stop and set the lowest so + // far to the common parent directory. Compare using a case-insensitive + // comparison to handle paths on Windows. + lowestAbsDir = absDir[:lastSlash] + break + } + + a += widthA + b += widthB + } + } + + return lowestAbsDir +} + +func (s *scanner) scanAllDependencies() { + s.timer.Begin("Scan all dependencies") + defer s.timer.End("Scan all dependencies") + + // Continue scanning until all dependencies have been discovered + for s.remaining > 0 { + result := <-s.resultChannel + s.remaining-- + if !result.ok { + continue + } + + // Don't try to resolve paths if we're not bundling + if s.options.Mode == config.ModeBundle { + records := *result.file.inputFile.Repr.ImportRecords() + for importRecordIndex := range records { + record := &records[importRecordIndex] + + // Skip this import record if the previous resolver call failed + resolveResult := result.resolveResults[importRecordIndex] + if resolveResult == nil { + continue + } + + path := resolveResult.PathPair.Primary + if !resolveResult.IsExternal { + // Handle a path within the bundle + sourceIndex := s.maybeParseFile(*resolveResult, s.res.PrettyPath(path), + &result.file.inputFile.Source, record.Range, resolveResult.PluginData, inputKindNormal, nil) + record.SourceIndex = ast.MakeIndex32(sourceIndex) + } else { + // If the path to the external module is relative to the source + // file, rewrite the path to be relative to the working directory + if path.Namespace == "file" { + if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, path.Text); ok { + // Prevent issues with path separators being different on Windows + relPath = strings.ReplaceAll(relPath, "\\", "/") + if resolver.IsPackagePath(relPath) { + relPath = "./" + relPath + } + record.Path.Text = relPath + } else { + record.Path = path + } + } else { + record.Path = path + } + } + } + } + + s.results[result.file.inputFile.Source.Index] = result + } +} + +func (s *scanner) processScannedFiles() []scannerFile { + s.timer.Begin("Process scanned files") + defer s.timer.End("Process scanned files") + + // Now that all files have been scanned, process the final file import records + for i, result := range s.results { + if !result.ok { + continue + } + + sb := strings.Builder{} + isFirstImport := true + + // Begin the metadata chunk + if s.options.NeedsMetafile { + sb.Write(js_printer.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly)) + sb.WriteString(fmt.Sprintf(": {\n \"bytes\": %d,\n \"imports\": [", len(result.file.inputFile.Source.Contents))) + } + + // Don't try to resolve paths if we're not bundling + if s.options.Mode == config.ModeBundle { + records := *result.file.inputFile.Repr.ImportRecords() + tracker := logger.MakeLineColumnTracker(&result.file.inputFile.Source) + + for importRecordIndex := range records { + record := &records[importRecordIndex] + + // Skip this import record if the previous resolver call failed + resolveResult := result.resolveResults[importRecordIndex] + if resolveResult == nil || !record.SourceIndex.IsValid() { + continue + } + + // Now that all files have been scanned, look for packages that are imported + // both with "import" and "require". Rewrite any imports that reference the + // "module" package.json field to the "main" package.json field instead. + // + // This attempts to automatically avoid the "dual package hazard" where a + // package has both a CommonJS module version and an ECMAScript module + // version and exports a non-object in CommonJS (often a function). If we + // pick the "module" field and the package is imported with "require" then + // code expecting a function will crash. + if resolveResult.PathPair.HasSecondary() { + secondaryKey := resolveResult.PathPair.Secondary + if secondaryKey.Namespace == "file" { + secondaryKey.Text = canonicalFileSystemPathForWindows(secondaryKey.Text) + } + if secondarySourceIndex, ok := s.visited[secondaryKey]; ok { + record.SourceIndex = ast.MakeIndex32(secondarySourceIndex) + } + } + + // Generate metadata about each import + if s.options.NeedsMetafile { + if isFirstImport { + isFirstImport = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + sb.WriteString(fmt.Sprintf("{\n \"path\": %s,\n \"kind\": %s\n }", + js_printer.QuoteForJSON(s.results[record.SourceIndex.GetIndex()].file.inputFile.Source.PrettyPath, s.options.ASCIIOnly), + js_printer.QuoteForJSON(record.Kind.StringForMetafile(), s.options.ASCIIOnly))) + } + + switch record.Kind { + case ast.ImportAt, ast.ImportAtConditional: + // Using a JavaScript file with CSS "@import" is not allowed + otherFile := &s.results[record.SourceIndex.GetIndex()].file + if _, ok := otherFile.inputFile.Repr.(*graph.JSRepr); ok { + s.log.Add(logger.Error, &tracker, record.Range, + fmt.Sprintf("Cannot import %q into a CSS file", otherFile.inputFile.Source.PrettyPath)) + } else if record.Kind == ast.ImportAtConditional { + s.log.Add(logger.Error, &tracker, record.Range, + "Bundling with conditional \"@import\" rules is not currently supported") + } + + case ast.ImportURL: + // Using a JavaScript or CSS file with CSS "url()" is not allowed + otherFile := &s.results[record.SourceIndex.GetIndex()].file + switch otherRepr := otherFile.inputFile.Repr.(type) { + case *graph.CSSRepr: + s.log.Add(logger.Error, &tracker, record.Range, + fmt.Sprintf("Cannot use %q as a URL", otherFile.inputFile.Source.PrettyPath)) + + case *graph.JSRepr: + if otherRepr.AST.URLForCSS == "" { + s.log.Add(logger.Error, &tracker, record.Range, + fmt.Sprintf("Cannot use %q as a URL", otherFile.inputFile.Source.PrettyPath)) + } + } + } + + // If an import from a JavaScript file targets a CSS file, generate a + // JavaScript stub to ensure that JavaScript files only ever import + // other JavaScript files. + if _, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok { + otherFile := &s.results[record.SourceIndex.GetIndex()].file + if css, ok := otherFile.inputFile.Repr.(*graph.CSSRepr); ok { + if s.options.WriteToStdout { + s.log.Add(logger.Error, &tracker, record.Range, + fmt.Sprintf("Cannot import %q into a JavaScript file without an output path configured", otherFile.inputFile.Source.PrettyPath)) + } else if !css.JSSourceIndex.IsValid() { + stubKey := otherFile.inputFile.Source.KeyPath + if stubKey.Namespace == "file" { + stubKey.Text = canonicalFileSystemPathForWindows(stubKey.Text) + } + sourceIndex := s.allocateSourceIndex(stubKey, cache.SourceIndexJSStubForCSS) + source := logger.Source{ + Index: sourceIndex, + PrettyPath: otherFile.inputFile.Source.PrettyPath, + } + s.results[sourceIndex] = parseResult{ + file: scannerFile{ + inputFile: graph.InputFile{ + Source: source, + Repr: &graph.JSRepr{ + AST: js_parser.LazyExportAST(s.log, source, + js_parser.OptionsFromConfig(&s.options), js_ast.Expr{Data: &js_ast.EObject{}}, ""), + CSSSourceIndex: ast.MakeIndex32(record.SourceIndex.GetIndex()), + }, + }, + }, + ok: true, + } + css.JSSourceIndex = ast.MakeIndex32(sourceIndex) + } + record.SourceIndex = css.JSSourceIndex + if !css.JSSourceIndex.IsValid() { + continue + } + } + } + + // Warn about this import if it's a bare import statement without any + // imported names (i.e. a side-effect-only import) and the module has + // been marked as having no side effects. + // + // Except don't do this if this file is inside "node_modules" since + // it's a bug in the package and the user won't be able to do anything + // about it. Note that this can result in esbuild silently generating + // broken code. If this actually happens for people, it's probably worth + // re-enabling the warning about code inside "node_modules". + if record.WasOriginallyBareImport && !s.options.IgnoreDCEAnnotations && + !helpers.IsInsideNodeModules(result.file.inputFile.Source.KeyPath.Text) { + if otherModule := &s.results[record.SourceIndex.GetIndex()].file.inputFile; otherModule.SideEffects.Kind != graph.HasSideEffects && + // Do not warn if this is from a plugin, since removing the import + // would cause the plugin to not run, and running a plugin is a side + // effect. + otherModule.SideEffects.Kind != graph.NoSideEffects_PureData_FromPlugin && + + // Do not warn if this has no side effects because the parsed AST + // is empty. This is the case for ".d.ts" files, for example. + otherModule.SideEffects.Kind != graph.NoSideEffects_EmptyAST { + + var notes []logger.MsgData + var by string + if data := otherModule.SideEffects.Data; data != nil { + if data.PluginName != "" { + by = fmt.Sprintf(" by plugin %q", data.PluginName) + } else { + var text string + if data.IsSideEffectsArrayInJSON { + text = "It was excluded from the \"sideEffects\" array in the enclosing \"package.json\" file" + } else { + text = "\"sideEffects\" is false in the enclosing \"package.json\" file" + } + tracker := logger.MakeLineColumnTracker(data.Source) + notes = append(notes, tracker.MsgData(data.Range, text)) + } + } + s.log.AddWithNotes(logger.Warning, &tracker, record.Range, + fmt.Sprintf("Ignoring this import because %q was marked as having no side effects%s", + otherModule.Source.PrettyPath, by), notes) + } + } + } + } + + // End the metadata chunk + if s.options.NeedsMetafile { + if !isFirstImport { + sb.WriteString("\n ") + } + sb.WriteString("]\n }") + } + + result.file.jsonMetadataChunk = sb.String() + + // If this file is from the "file" loader, generate an additional file + if result.file.inputFile.UniqueKeyForFileLoader != "" { + bytes := []byte(result.file.inputFile.Source.Contents) + + // Add a hash to the file name to prevent multiple files with the same name + // but different contents from colliding + var hash string + if config.HasPlaceholder(s.options.AssetPathTemplate, config.HashPlaceholder) { + h := xxhash.New() + h.Write(bytes) + hash = hashForFileName(h.Sum(nil)) + } + + // Generate the input for the template + _, _, originalExt := logger.PlatformIndependentPathDirBaseExt(result.file.inputFile.Source.KeyPath.Text) + dir, base := pathRelativeToOutbase( + &result.file.inputFile, + &s.options, + s.fs, + /* avoidIndex */ false, + /* customFilePath */ "", + ) + + // Apply the asset path template + templateExt := strings.TrimPrefix(originalExt, ".") + relPath := config.TemplateToString(config.SubstituteTemplate(s.options.AssetPathTemplate, config.PathPlaceholders{ + Dir: &dir, + Name: &base, + Hash: &hash, + Ext: &templateExt, + })) + originalExt + + // Optionally add metadata about the file + var jsonMetadataChunk string + if s.options.NeedsMetafile { + inputs := fmt.Sprintf("{\n %s: {\n \"bytesInOutput\": %d\n }\n }", + js_printer.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly), + len(bytes), + ) + jsonMetadataChunk = fmt.Sprintf( + "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d\n }", + inputs, + len(bytes), + ) + } + + // Generate the additional file to copy into the output directory + result.file.inputFile.AdditionalFiles = []graph.OutputFile{{ + AbsPath: s.fs.Join(s.options.AbsOutputDir, relPath), + Contents: bytes, + JSONMetadataChunk: jsonMetadataChunk, + }} + } + + s.results[i] = result + } + + // The linker operates on an array of files, so construct that now. This + // can't be constructed earlier because we generate new parse results for + // JavaScript stub files for CSS imports above. + files := make([]scannerFile, len(s.results)) + for sourceIndex := range s.results { + if result := &s.results[sourceIndex]; result.ok { + s.validateTLA(uint32(sourceIndex)) + files[sourceIndex] = result.file + } + } + return files +} + +func (s *scanner) validateTLA(sourceIndex uint32) tlaCheck { + result := &s.results[sourceIndex] + + if result.ok && result.tlaCheck.depth == 0 { + if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok { + result.tlaCheck.depth = 1 + if repr.AST.TopLevelAwaitKeyword.Len > 0 { + result.tlaCheck.parent = ast.MakeIndex32(sourceIndex) + } + + for importRecordIndex, record := range repr.AST.ImportRecords { + if record.SourceIndex.IsValid() && (record.Kind == ast.ImportRequire || record.Kind == ast.ImportStmt) { + parent := s.validateTLA(record.SourceIndex.GetIndex()) + if !parent.parent.IsValid() { + continue + } + + // Follow any import chains + if record.Kind == ast.ImportStmt && (!result.tlaCheck.parent.IsValid() || parent.depth < result.tlaCheck.depth) { + result.tlaCheck.depth = parent.depth + 1 + result.tlaCheck.parent = record.SourceIndex + result.tlaCheck.importRecordIndex = uint32(importRecordIndex) + continue + } + + // Require of a top-level await chain is forbidden + if record.Kind == ast.ImportRequire { + var notes []logger.MsgData + var tlaPrettyPath string + otherSourceIndex := record.SourceIndex.GetIndex() + + // Build up a chain of relevant notes for all of the imports + for { + parentResult := &s.results[otherSourceIndex] + parentRepr := parentResult.file.inputFile.Repr.(*graph.JSRepr) + + if parentRepr.AST.TopLevelAwaitKeyword.Len > 0 { + tlaPrettyPath = parentResult.file.inputFile.Source.PrettyPath + tracker := logger.MakeLineColumnTracker(&parentResult.file.inputFile.Source) + notes = append(notes, tracker.MsgData(parentRepr.AST.TopLevelAwaitKeyword, + fmt.Sprintf("The top-level await in %q is here:", tlaPrettyPath))) + break + } + + if !parentResult.tlaCheck.parent.IsValid() { + notes = append(notes, logger.MsgData{Text: "unexpected invalid index"}) + break + } + + otherSourceIndex = parentResult.tlaCheck.parent.GetIndex() + + tracker := logger.MakeLineColumnTracker(&parentResult.file.inputFile.Source) + notes = append(notes, tracker.MsgData( + parentRepr.AST.ImportRecords[parent.importRecordIndex].Range, + fmt.Sprintf("The file %q imports the file %q here:", + parentResult.file.inputFile.Source.PrettyPath, s.results[otherSourceIndex].file.inputFile.Source.PrettyPath))) + } + + var text string + importedPrettyPath := s.results[record.SourceIndex.GetIndex()].file.inputFile.Source.PrettyPath + + if importedPrettyPath == tlaPrettyPath { + text = fmt.Sprintf("This require call is not allowed because the imported file %q contains a top-level await", + importedPrettyPath) + } else { + text = fmt.Sprintf("This require call is not allowed because the transitive dependency %q contains a top-level await", + tlaPrettyPath) + } + + tracker := logger.MakeLineColumnTracker(&result.file.inputFile.Source) + s.log.AddWithNotes(logger.Error, &tracker, record.Range, text, notes) + } + } + } + + // Make sure that if we wrap this module in a closure, the closure is also + // async. This happens when you call "import()" on this module and code + // splitting is off. + if result.tlaCheck.parent.IsValid() { + repr.Meta.IsAsyncOrHasAsyncDependency = true + } + } + } + + return result.tlaCheck +} + +func DefaultExtensionToLoaderMap() map[string]config.Loader { + return map[string]config.Loader{ + ".js": config.LoaderJS, + ".mjs": config.LoaderJS, + ".cjs": config.LoaderJS, + ".jsx": config.LoaderJSX, + ".ts": config.LoaderTS, + ".cts": config.LoaderTSNoAmbiguousLessThan, + ".mts": config.LoaderTSNoAmbiguousLessThan, + ".tsx": config.LoaderTSX, + ".css": config.LoaderCSS, + ".json": config.LoaderJSON, + ".txt": config.LoaderText, + } +} + +func applyOptionDefaults(options *config.Options) { + if options.ExtensionToLoader == nil { + options.ExtensionToLoader = DefaultExtensionToLoaderMap() + } + if options.OutputExtensionJS == "" { + options.OutputExtensionJS = ".js" + } + if options.OutputExtensionCSS == "" { + options.OutputExtensionCSS = ".css" + } + + // Configure default path templates + if len(options.EntryPathTemplate) == 0 { + options.EntryPathTemplate = []config.PathTemplate{ + {Data: "./", Placeholder: config.DirPlaceholder}, + {Data: "/", Placeholder: config.NamePlaceholder}, + } + } + if len(options.ChunkPathTemplate) == 0 { + options.ChunkPathTemplate = []config.PathTemplate{ + {Data: "./", Placeholder: config.NamePlaceholder}, + {Data: "-", Placeholder: config.HashPlaceholder}, + } + } + if len(options.AssetPathTemplate) == 0 { + options.AssetPathTemplate = []config.PathTemplate{ + {Data: "./", Placeholder: config.NamePlaceholder}, + {Data: "-", Placeholder: config.HashPlaceholder}, + } + } + + options.ProfilerNames = !options.MinifyIdentifiers +} + +func (b *Bundle) Compile(log logger.Log, options config.Options, timer *helpers.Timer) ([]graph.OutputFile, string) { + timer.Begin("Compile phase") + defer timer.End("Compile phase") + + applyOptionDefaults(&options) + + // The format can't be "preserve" while bundling + if options.Mode == config.ModeBundle && options.OutputFormat == config.FormatPreserve { + options.OutputFormat = config.FormatESModule + } + + files := make([]graph.InputFile, len(b.files)) + for i, file := range b.files { + files[i] = file.inputFile + } + + // Get the base path from the options or choose the lowest common ancestor of all entry points + allReachableFiles := findReachableFiles(files, b.entryPoints) + + // Compute source map data in parallel with linking + timer.Begin("Spawn source map tasks") + dataForSourceMaps := b.computeDataForSourceMapsInParallel(&options, allReachableFiles) + timer.End("Spawn source map tasks") + + var resultGroups [][]graph.OutputFile + if options.CodeSplitting || len(b.entryPoints) == 1 { + // If code splitting is enabled or if there's only one entry point, link all entry points together + resultGroups = [][]graph.OutputFile{link( + &options, timer, log, b.fs, b.res, files, b.entryPoints, b.uniqueKeyPrefix, allReachableFiles, dataForSourceMaps)} + } else { + // Otherwise, link each entry point with the runtime file separately + waitGroup := sync.WaitGroup{} + resultGroups = make([][]graph.OutputFile, len(b.entryPoints)) + for i, entryPoint := range b.entryPoints { + waitGroup.Add(1) + go func(i int, entryPoint graph.EntryPoint) { + entryPoints := []graph.EntryPoint{entryPoint} + forked := timer.Fork() + reachableFiles := findReachableFiles(files, entryPoints) + resultGroups[i] = link( + &options, forked, log, b.fs, b.res, files, entryPoints, b.uniqueKeyPrefix, reachableFiles, dataForSourceMaps) + timer.Join(forked) + waitGroup.Done() + }(i, entryPoint) + } + waitGroup.Wait() + } + + // Join the results in entry point order for determinism + var outputFiles []graph.OutputFile + for _, group := range resultGroups { + outputFiles = append(outputFiles, group...) + } + + // Also generate the metadata file if necessary + var metafileJSON string + if options.NeedsMetafile { + timer.Begin("Generate metadata JSON") + metafileJSON = b.generateMetadataJSON(outputFiles, allReachableFiles, options.ASCIIOnly) + timer.End("Generate metadata JSON") + } + + if !options.WriteToStdout { + // Make sure an output file never overwrites an input file + if !options.AllowOverwrite { + sourceAbsPaths := make(map[string]uint32) + for _, sourceIndex := range allReachableFiles { + keyPath := b.files[sourceIndex].inputFile.Source.KeyPath + if keyPath.Namespace == "file" { + absPathKey := canonicalFileSystemPathForWindows(keyPath.Text) + sourceAbsPaths[absPathKey] = sourceIndex + } + } + for _, outputFile := range outputFiles { + absPathKey := canonicalFileSystemPathForWindows(outputFile.AbsPath) + if sourceIndex, ok := sourceAbsPaths[absPathKey]; ok { + hint := "" + switch logger.API { + case logger.CLIAPI: + hint = " (use \"--allow-overwrite\" to allow this)" + case logger.JSAPI: + hint = " (use \"allowOverwrite: true\" to allow this)" + case logger.GoAPI: + hint = " (use \"AllowOverwrite: true\" to allow this)" + } + log.Add(logger.Error, nil, logger.Range{}, + fmt.Sprintf("Refusing to overwrite input file %q%s", + b.files[sourceIndex].inputFile.Source.PrettyPath, hint)) + } + } + } + + // Make sure an output file never overwrites another output file. This + // is almost certainly unintentional and would otherwise happen silently. + // + // Make an exception for files that have identical contents. In that case + // the duplicate is just silently filtered out. This can happen with the + // "file" loader, for example. + outputFileMap := make(map[string][]byte) + end := 0 + for _, outputFile := range outputFiles { + absPathKey := canonicalFileSystemPathForWindows(outputFile.AbsPath) + contents, ok := outputFileMap[absPathKey] + + // If this isn't a duplicate, keep the output file + if !ok { + outputFileMap[absPathKey] = outputFile.Contents + outputFiles[end] = outputFile + end++ + continue + } + + // If the names and contents are both the same, only keep the first one + if bytes.Equal(contents, outputFile.Contents) { + continue + } + + // Otherwise, generate an error + outputPath := outputFile.AbsPath + if relPath, ok := b.fs.Rel(b.fs.Cwd(), outputPath); ok { + outputPath = relPath + } + log.Add(logger.Error, nil, logger.Range{}, "Two output files share the same path but have different contents: "+outputPath) + } + outputFiles = outputFiles[:end] + } + + return outputFiles, metafileJSON +} + +// Find all files reachable from all entry points. This order should be +// deterministic given that the entry point order is deterministic, since the +// returned order is the postorder of the graph traversal and import record +// order within a given file is deterministic. +func findReachableFiles(files []graph.InputFile, entryPoints []graph.EntryPoint) []uint32 { + visited := make(map[uint32]bool) + var order []uint32 + var visit func(uint32) + + // Include this file and all files it imports + visit = func(sourceIndex uint32) { + if !visited[sourceIndex] { + visited[sourceIndex] = true + file := &files[sourceIndex] + if repr, ok := file.Repr.(*graph.JSRepr); ok && repr.CSSSourceIndex.IsValid() { + visit(repr.CSSSourceIndex.GetIndex()) + } + for _, record := range *file.Repr.ImportRecords() { + if record.SourceIndex.IsValid() { + visit(record.SourceIndex.GetIndex()) + } + } + + // Each file must come after its dependencies + order = append(order, sourceIndex) + } + } + + // The runtime is always included in case it's needed + visit(runtime.SourceIndex) + + // Include all files reachable from any entry point + for _, entryPoint := range entryPoints { + visit(entryPoint.SourceIndex) + } + + return order +} + +// This is done in parallel with linking because linking is a mostly serial +// phase and there are extra resources for parallelism. This could also be done +// during parsing but that would slow down parsing and delay the start of the +// linking phase, which then delays the whole bundling process. +// +// However, doing this during parsing would allow it to be cached along with +// the parsed ASTs which would then speed up incremental builds. In the future +// it could be good to optionally have this be computed during the parsing +// phase when incremental builds are active but otherwise still have it be +// computed during linking for optimal speed during non-incremental builds. +func (b *Bundle) computeDataForSourceMapsInParallel(options *config.Options, reachableFiles []uint32) func() []dataForSourceMap { + if options.SourceMap == config.SourceMapNone { + return func() []dataForSourceMap { + return nil + } + } + + var waitGroup sync.WaitGroup + results := make([]dataForSourceMap, len(b.files)) + + for _, sourceIndex := range reachableFiles { + if f := &b.files[sourceIndex]; f.inputFile.Loader.CanHaveSourceMap() { + var approximateLineCount int32 + switch repr := f.inputFile.Repr.(type) { + case *graph.JSRepr: + approximateLineCount = repr.AST.ApproximateLineCount + case *graph.CSSRepr: + approximateLineCount = repr.AST.ApproximateLineCount + } + waitGroup.Add(1) + go func(sourceIndex uint32, f *scannerFile, approximateLineCount int32) { + result := &results[sourceIndex] + result.lineOffsetTables = sourcemap.GenerateLineOffsetTables(f.inputFile.Source.Contents, approximateLineCount) + sm := f.inputFile.InputSourceMap + if !options.ExcludeSourcesContent { + if sm == nil { + // Simple case: no nested source map + result.quotedContents = [][]byte{js_printer.QuoteForJSON(f.inputFile.Source.Contents, options.ASCIIOnly)} + } else { + // Complex case: nested source map + result.quotedContents = make([][]byte, len(sm.Sources)) + nullContents := []byte("null") + for i := range sm.Sources { + // Missing contents become a "null" literal + quotedContents := nullContents + if i < len(sm.SourcesContent) { + if value := sm.SourcesContent[i]; value.Quoted != "" { + if options.ASCIIOnly && !isASCIIOnly(value.Quoted) { + // Re-quote non-ASCII values if output is ASCII-only + quotedContents = js_printer.QuoteForJSON(js_lexer.UTF16ToString(value.Value), options.ASCIIOnly) + } else { + // Otherwise just use the value directly from the input file + quotedContents = []byte(value.Quoted) + } + } + } + result.quotedContents[i] = quotedContents + } + } + } + waitGroup.Done() + }(sourceIndex, f, approximateLineCount) + } + } + + return func() []dataForSourceMap { + waitGroup.Wait() + return results + } +} + +func (b *Bundle) generateMetadataJSON(results []graph.OutputFile, allReachableFiles []uint32, asciiOnly bool) string { + sb := strings.Builder{} + sb.WriteString("{\n \"inputs\": {") + + // Write inputs + isFirst := true + for _, sourceIndex := range allReachableFiles { + if sourceIndex == runtime.SourceIndex { + continue + } + if file := &b.files[sourceIndex]; len(file.jsonMetadataChunk) > 0 { + if isFirst { + isFirst = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + sb.WriteString(file.jsonMetadataChunk) + } + } + + sb.WriteString("\n },\n \"outputs\": {") + + // Write outputs + isFirst = true + paths := make(map[string]bool) + for _, result := range results { + if len(result.JSONMetadataChunk) > 0 { + path := b.res.PrettyPath(logger.Path{Text: result.AbsPath, Namespace: "file"}) + if paths[path] { + // Don't write out the same path twice (can happen with the "file" loader) + continue + } + if isFirst { + isFirst = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + paths[path] = true + sb.WriteString(fmt.Sprintf("%s: ", js_printer.QuoteForJSON(path, asciiOnly))) + sb.WriteString(result.JSONMetadataChunk) + } + } + + sb.WriteString("\n }\n}\n") + return sb.String() +} + +type runtimeCacheKey struct { + MangleSyntax bool + MinifyIdentifiers bool + ES6 bool +} + +type runtimeCache struct { + astMutex sync.Mutex + astMap map[runtimeCacheKey]js_ast.AST +} + +var globalRuntimeCache runtimeCache + +func (cache *runtimeCache) parseRuntime(options *config.Options) (source logger.Source, runtimeAST js_ast.AST, ok bool) { + key := runtimeCacheKey{ + // All configuration options that the runtime code depends on must go here + MangleSyntax: options.MangleSyntax, + MinifyIdentifiers: options.MinifyIdentifiers, + ES6: runtime.CanUseES6(options.UnsupportedJSFeatures), + } + + // Determine which source to use + if key.ES6 { + source = runtime.ES6Source + } else { + source = runtime.ES5Source + } + + // Cache hit? + (func() { + cache.astMutex.Lock() + defer cache.astMutex.Unlock() + if cache.astMap != nil { + runtimeAST, ok = cache.astMap[key] + } + })() + if ok { + return + } + + // Cache miss + var constraint int + if key.ES6 { + constraint = 2015 + } else { + constraint = 5 + } + log := logger.NewDeferLog(logger.DeferLogAll) + runtimeAST, ok = js_parser.Parse(log, source, js_parser.OptionsFromConfig(&config.Options{ + // These configuration options must only depend on the key + MangleSyntax: key.MangleSyntax, + MinifyIdentifiers: key.MinifyIdentifiers, + UnsupportedJSFeatures: compat.UnsupportedJSFeatures( + map[compat.Engine][]int{compat.ES: {constraint}}), + + // Always do tree shaking for the runtime because we never want to + // include unnecessary runtime code + TreeShaking: true, + })) + if log.HasErrors() { + msgs := "Internal error: failed to parse runtime:\n" + for _, msg := range log.Done() { + msgs += msg.String(logger.OutputOptions{}, logger.TerminalInfo{}) + } + panic(msgs[:len(msgs)-1]) + } + + // Cache for next time + if ok { + cache.astMutex.Lock() + defer cache.astMutex.Unlock() + if cache.astMap == nil { + cache.astMap = make(map[runtimeCacheKey]js_ast.AST) + } + cache.astMap[key] = runtimeAST + } + return +} diff --git a/vendor/github.com/evanw/esbuild/internal/bundler/debug.go b/vendor/github.com/evanw/esbuild/internal/bundler/debug.go new file mode 100644 index 0000000..731a34f --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/bundler/debug.go @@ -0,0 +1,132 @@ +package bundler + +import ( + "fmt" + "strings" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/graph" + "github.com/evanw/esbuild/internal/js_ast" + "github.com/evanw/esbuild/internal/js_printer" +) + +// Set this to true and then load the resulting metafile in "graph-debugger.html" +// to debug graph information. +// +// This is deliberately not exposed in the final binary. It is *very* internal +// and only exists to help debug esbuild itself. Make sure this is always set +// back to false before committing. +const debugVerboseMetafile = false + +func (c *linkerContext) generateExtraDataForFileJS(sourceIndex uint32) string { + if !debugVerboseMetafile { + return "" + } + + file := &c.graph.Files[sourceIndex] + repr := file.InputFile.Repr.(*graph.JSRepr) + sb := strings.Builder{} + + quoteSym := func(ref js_ast.Ref) string { + name := fmt.Sprintf("%d:%d [%s]", ref.SourceIndex, ref.InnerIndex, c.graph.Symbols.Get(ref).OriginalName) + return string(js_printer.QuoteForJSON(name, c.options.ASCIIOnly)) + } + + sb.WriteString(`,"parts":[`) + for partIndex, part := range repr.AST.Parts { + if partIndex > 0 { + sb.WriteByte(',') + } + var isFirst bool + code := "" + + sb.WriteString(fmt.Sprintf(`{"isLive":%v`, part.IsLive)) + sb.WriteString(fmt.Sprintf(`,"canBeRemovedIfUnused":%v`, part.CanBeRemovedIfUnused)) + + if partIndex == int(js_ast.NSExportPartIndex) { + sb.WriteString(`,"nsExportPartIndex":true`) + } else if ast.MakeIndex32(uint32(partIndex)) == repr.Meta.WrapperPartIndex { + sb.WriteString(`,"wrapperPartIndex":true`) + } else if len(part.Stmts) > 0 { + start := part.Stmts[0].Loc.Start + end := len(file.InputFile.Source.Contents) + if partIndex+1 < len(repr.AST.Parts) { + if nextStmts := repr.AST.Parts[partIndex+1].Stmts; len(nextStmts) > 0 { + if nextStart := nextStmts[0].Loc.Start; nextStart >= start { + end = int(nextStart) + } + } + } + code = file.InputFile.Source.Contents[start:end] + } + + // importRecords + sb.WriteString(`,"importRecords":[`) + isFirst = true + for _, importRecordIndex := range part.ImportRecordIndices { + record := repr.AST.ImportRecords[importRecordIndex] + if !record.SourceIndex.IsValid() { + continue + } + if isFirst { + isFirst = false + } else { + sb.WriteByte(',') + } + path := c.graph.Files[record.SourceIndex.GetIndex()].InputFile.Source.PrettyPath + sb.WriteString(fmt.Sprintf(`{"source":%s}`, js_printer.QuoteForJSON(path, c.options.ASCIIOnly))) + } + sb.WriteByte(']') + + // declaredSymbols + sb.WriteString(`,"declaredSymbols":[`) + isFirst = true + for _, declSym := range part.DeclaredSymbols { + if !declSym.IsTopLevel { + continue + } + if isFirst { + isFirst = false + } else { + sb.WriteByte(',') + } + sb.WriteString(fmt.Sprintf(`{"name":%s}`, quoteSym(declSym.Ref))) + } + sb.WriteByte(']') + + // symbolUses + sb.WriteString(`,"symbolUses":[`) + isFirst = true + for ref, uses := range part.SymbolUses { + if isFirst { + isFirst = false + } else { + sb.WriteByte(',') + } + sb.WriteString(fmt.Sprintf(`{"name":%s,"countEstimate":%d}`, quoteSym(ref), uses.CountEstimate)) + } + sb.WriteByte(']') + + // dependencies + sb.WriteString(`,"dependencies":[`) + for i, dep := range part.Dependencies { + if i > 0 { + sb.WriteByte(',') + } + sb.WriteString(fmt.Sprintf(`{"source":%s,"partIndex":%d}`, + js_printer.QuoteForJSON(c.graph.Files[dep.SourceIndex].InputFile.Source.PrettyPath, c.options.ASCIIOnly), + dep.PartIndex, + )) + } + sb.WriteByte(']') + + // code + sb.WriteString(`,"code":`) + sb.Write(js_printer.QuoteForJSON(strings.TrimRight(code, "\n"), c.options.ASCIIOnly)) + + sb.WriteByte('}') + } + sb.WriteString(`]`) + + return sb.String() +} diff --git a/vendor/github.com/evanw/esbuild/internal/bundler/linker.go b/vendor/github.com/evanw/esbuild/internal/bundler/linker.go new file mode 100644 index 0000000..31b102f --- /dev/null +++ b/vendor/github.com/evanw/esbuild/internal/bundler/linker.go @@ -0,0 +1,5581 @@ +package bundler + +import ( + "bytes" + "encoding/base64" + "encoding/binary" + "fmt" + "hash" + "path" + "sort" + "strings" + "sync" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/compat" + "github.com/evanw/esbuild/internal/config" + "github.com/evanw/esbuild/internal/css_ast" + "github.com/evanw/esbuild/internal/css_printer" + "github.com/evanw/esbuild/internal/fs" + "github.com/evanw/esbuild/internal/graph" + "github.com/evanw/esbuild/internal/helpers" + "github.com/evanw/esbuild/internal/js_ast" + "github.com/evanw/esbuild/internal/js_lexer" + "github.com/evanw/esbuild/internal/js_printer" + "github.com/evanw/esbuild/internal/logger" + "github.com/evanw/esbuild/internal/renamer" + "github.com/evanw/esbuild/internal/resolver" + "github.com/evanw/esbuild/internal/runtime" + "github.com/evanw/esbuild/internal/sourcemap" + "github.com/evanw/esbuild/internal/xxhash" +) + +type linkerContext struct { + options *config.Options + timer *helpers.Timer + log logger.Log + fs fs.FS + res resolver.Resolver + graph graph.LinkerGraph + + // This helps avoid an infinite loop when matching imports to exports + cycleDetector []importTracker + + // We may need to refer to the CommonJS "module" symbol for exports + unboundModuleRef js_ast.Ref + + // We may need to refer to the "__esm" and/or "__commonJS" runtime symbols + cjsRuntimeRef js_ast.Ref + esmRuntimeRef js_ast.Ref + + // This represents the parallel computation of source map related data. + // Calling this will block until the computation is done. The resulting value + // is shared between threads and must be treated as immutable. + dataForSourceMaps func() []dataForSourceMap + + // This is passed to us from the bundling phase + uniqueKeyPrefix string + uniqueKeyPrefixBytes []byte // This is just "uniqueKeyPrefix" in byte form +} + +type partRange struct { + sourceIndex uint32 + partIndexBegin uint32 + partIndexEnd uint32 +} + +type chunkInfo struct { + // This is a random string and is used to represent the output path of this + // chunk before the final output path has been computed. + uniqueKey string + + filesWithPartsInChunk map[uint32]bool + entryBits helpers.BitSet + + // This information is only useful if "isEntryPoint" is true + isEntryPoint bool + sourceIndex uint32 // An index into "c.sources" + entryPointBit uint // An index into "c.graph.EntryPoints" + + // For code splitting + crossChunkImports []chunkImport + + // This is the representation-specific information + chunkRepr chunkRepr + + // This is the final path of this chunk relative to the output directory, but + // without the substitution of the final hash (since it hasn't been computed). + finalTemplate []config.PathTemplate + + // This is the final path of this chunk relative to the output directory. It + // is the substitution of the final hash into "finalTemplate". + finalRelPath string + + // If non-empty, this chunk needs to generate an external legal comments file. + externalLegalComments []byte + + // When this chunk is initially generated in isolation, the output pieces + // will contain slices of the output with the unique keys of other chunks + // omitted. + intermediateOutput intermediateOutput + + // This contains the hash for just this chunk without including information + // from the hashes of other chunks. Later on in the linking process, the + // final hash for this chunk will be constructed by merging the isolated + // hashes of all transitive dependencies of this chunk. This is separated + // into two phases like this to handle cycles in the chunk import graph. + waitForIsolatedHash func() []byte + + // Other fields relating to the output file for this chunk + jsonMetadataChunkCallback func(finalOutputSize int) helpers.Joiner + outputSourceMap sourcemap.SourceMapPieces + isExecutable bool +} + +type chunkImport struct { + chunkIndex uint32 + importKind ast.ImportKind +} + +type outputPieceIndexKind uint8 + +const ( + outputPieceNone outputPieceIndexKind = iota + outputPieceAssetIndex + outputPieceChunkIndex +) + +// This is a chunk of source code followed by a reference to another chunk. For +// example, the file "@import 'CHUNK0001'; body { color: black; }" would be +// represented by two pieces, one with the data "@import '" and another with the +// data "'; body { color: black; }". The first would have the chunk index 1 and +// the second would have an invalid chunk index. +type outputPiece struct { + data []byte + + // Note: The "kind" may be "outputPieceNone" in which case there is one piece + // with data and no chunk index. For example, the chunk may not contain any + // imports. + index uint32 + kind outputPieceIndexKind +} + +type intermediateOutput struct { + // If the chunk doesn't have any references to other chunks, then "pieces" is + // nil and "joiner" contains the contents of the chunk. This is more efficient + // because it avoids doing a join operation twice. + joiner helpers.Joiner + + // Otherwise, "pieces" contains the contents of the chunk and "joiner" should + // not be used. Another joiner will have to be constructed later when merging + // the pieces together. + pieces []outputPiece +} + +type chunkRepr interface{ isChunk() } + +func (*chunkReprJS) isChunk() {} +func (*chunkReprCSS) isChunk() {} + +type chunkReprJS struct { + filesInChunkInOrder []uint32 + partsInChunkInOrder []partRange + + // For code splitting + crossChunkPrefixStmts []js_ast.Stmt + crossChunkSuffixStmts []js_ast.Stmt + exportsToOtherChunks map[js_ast.Ref]string + importsFromOtherChunks map[uint32]crossChunkImportItemArray +} + +type chunkReprCSS struct { + externalImportsInOrder []externalImportCSS + filesInChunkInOrder []uint32 +} + +type externalImportCSS struct { + path logger.Path + conditions []css_ast.Token + conditionImportRecords []ast.ImportRecord +} + +// Returns a log where "log.HasErrors()" only returns true if any errors have +// been logged since this call. This is useful when there have already been +// errors logged by other linkers that share the same log. +func wrappedLog(log logger.Log) logger.Log { + var mutex sync.Mutex + var hasErrors bool + addMsg := log.AddMsg + + log.AddMsg = func(msg logger.Msg) { + if msg.Kind == logger.Error { + mutex.Lock() + defer mutex.Unlock() + hasErrors = true + } + addMsg(msg) + } + + log.HasErrors = func() bool { + mutex.Lock() + defer mutex.Unlock() + return hasErrors + } + + return log +} + +func link( + options *config.Options, + timer *helpers.Timer, + log logger.Log, + fs fs.FS, + res resolver.Resolver, + inputFiles []graph.InputFile, + entryPoints []graph.EntryPoint, + uniqueKeyPrefix string, + reachableFiles []uint32, + dataForSourceMaps func() []dataForSourceMap, +) []graph.OutputFile { + timer.Begin("Link") + defer timer.End("Link") + + log = wrappedLog(log) + + timer.Begin("Clone linker graph") + c := linkerContext{ + options: options, + timer: timer, + log: log, + fs: fs, + res: res, + dataForSourceMaps: dataForSourceMaps, + uniqueKeyPrefix: uniqueKeyPrefix, + uniqueKeyPrefixBytes: []byte(uniqueKeyPrefix), + graph: graph.CloneLinkerGraph( + inputFiles, + reachableFiles, + entryPoints, + options.CodeSplitting, + ), + } + timer.End("Clone linker graph") + + // Use a smaller version of these functions if we don't need profiler names + runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) + if c.options.ProfilerNames { + c.cjsRuntimeRef = runtimeRepr.AST.NamedExports["__commonJS"].Ref + c.esmRuntimeRef = runtimeRepr.AST.NamedExports["__esm"].Ref + } else { + c.cjsRuntimeRef = runtimeRepr.AST.NamedExports["__commonJSMin"].Ref + c.esmRuntimeRef = runtimeRepr.AST.NamedExports["__esmMin"].Ref + } + + for _, entryPoint := range entryPoints { + if repr, ok := c.graph.Files[entryPoint.SourceIndex].InputFile.Repr.(*graph.JSRepr); ok { + // Loaders default to CommonJS when they are the entry point and the output + // format is not ESM-compatible since that avoids generating the ESM-to-CJS + // machinery. + if repr.AST.HasLazyExport && (c.options.Mode == config.ModePassThrough || + (c.options.Mode == config.ModeConvertFormat && !c.options.OutputFormat.KeepES6ImportExportSyntax())) { + repr.AST.ExportsKind = js_ast.ExportsCommonJS + } + + // Entry points with ES6 exports must generate an exports object when + // targeting non-ES6 formats. Note that the IIFE format only needs this + // when the global name is present, since that's the only way the exports + // can actually be observed externally. + if repr.AST.ExportKeyword.Len > 0 && (options.OutputFormat == config.FormatCommonJS || + (options.OutputFormat == config.FormatIIFE && len(options.GlobalName) > 0)) { + repr.AST.UsesExportsRef = true + repr.Meta.ForceIncludeExportsForEntryPoint = true + } + } + } + + // Allocate a new unbound symbol called "module" in case we need it later + if c.options.OutputFormat == config.FormatCommonJS { + c.unboundModuleRef = c.graph.GenerateNewSymbol(runtime.SourceIndex, js_ast.SymbolUnbound, "module") + } else { + c.unboundModuleRef = js_ast.InvalidRef + } + + c.scanImportsAndExports() + + // Stop now if there were errors + if c.log.HasErrors() { + return []graph.OutputFile{} + } + + c.treeShakingAndCodeSplitting() + + if c.options.Mode == config.ModePassThrough { + for _, entryPoint := range c.graph.EntryPoints() { + c.preventExportsFromBeingRenamed(entryPoint.SourceIndex) + } + } + + chunks := c.computeChunks() + c.computeCrossChunkDependencies(chunks) + + // Make sure calls to "js_ast.FollowSymbols()" in parallel goroutines after this + // won't hit concurrent map mutation hazards + js_ast.FollowAllSymbols(c.graph.Symbols) + + return c.generateChunksInParallel(chunks) +} + +// Currently the automatic chunk generation algorithm should by construction +// never generate chunks that import each other since files are allocated to +// chunks based on which entry points they are reachable from. +// +// This will change in the future when we allow manual chunk labels. But before +// we allow manual chunk labels, we'll need to rework module initialization to +// allow code splitting chunks to be lazily-initialized. +// +// Since that work hasn't been finished yet, cycles in the chunk import graph +// can cause initialization bugs. So let's forbid these cycles for now to guard +// against code splitting bugs that could cause us to generate buggy chunks. +func (c *linkerContext) enforceNoCyclicChunkImports(chunks []chunkInfo) { + var validate func(int, []int) + validate = func(chunkIndex int, path []int) { + for _, otherChunkIndex := range path { + if chunkIndex == otherChunkIndex { + c.log.Add(logger.Error, nil, logger.Range{}, "Internal error: generated chunks contain a circular import") + return + } + } + path = append(path, chunkIndex) + for _, chunkImport := range chunks[chunkIndex].crossChunkImports { + // Ignore cycles caused by dynamic "import()" expressions. These are fine + // because they don't necessarily cause initialization order issues and + // they don't indicate a bug in our chunk generation algorithm. They arise + // normally in real code (e.g. two files that import each other). + if chunkImport.importKind != ast.ImportDynamic { + validate(int(chunkImport.chunkIndex), path) + } + } + } + path := make([]int, 0, len(chunks)) + for i := range chunks { + validate(i, path) + } +} + +func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo) []graph.OutputFile { + c.timer.Begin("Generate chunks") + defer c.timer.End("Generate chunks") + + // Generate each chunk on a separate goroutine + generateWaitGroup := sync.WaitGroup{} + generateWaitGroup.Add(len(chunks)) + for chunkIndex := range chunks { + switch chunks[chunkIndex].chunkRepr.(type) { + case *chunkReprJS: + go c.generateChunkJS(chunks, chunkIndex, &generateWaitGroup) + case *chunkReprCSS: + go c.generateChunkCSS(chunks, chunkIndex, &generateWaitGroup) + } + } + c.enforceNoCyclicChunkImports(chunks) + generateWaitGroup.Wait() + + // Compute the final hashes of each chunk. This can technically be done in + // parallel but it probably doesn't matter so much because we're not hashing + // that much data. + visited := make([]uint32, len(chunks)) + var finalBytes []byte + for chunkIndex := range chunks { + chunk := &chunks[chunkIndex] + var hashSubstitution *string + + // Only wait for the hash if necessary + if config.HasPlaceholder(chunk.finalTemplate, config.HashPlaceholder) { + // Compute the final hash using the isolated hashes of the dependencies + hash := xxhash.New() + appendIsolatedHashesForImportedChunks(hash, chunks, uint32(chunkIndex), visited, ^uint32(chunkIndex)) + finalBytes = hash.Sum(finalBytes[:0]) + finalString := hashForFileName(finalBytes) + hashSubstitution = &finalString + } + + // Render the last remaining placeholder in the template + chunk.finalRelPath = config.TemplateToString(config.SubstituteTemplate(chunk.finalTemplate, config.PathPlaceholders{ + Hash: hashSubstitution, + })) + } + + // Generate the final output files by joining file pieces together + c.timer.Begin("Generate final output files") + var resultsWaitGroup sync.WaitGroup + results := make([][]graph.OutputFile, len(chunks)) + resultsWaitGroup.Add(len(chunks)) + for chunkIndex, chunk := range chunks { + go func(chunkIndex int, chunk chunkInfo) { + var outputFiles []graph.OutputFile + + // Each file may optionally contain additional files to be copied to the + // output directory. This is used by the "file" loader. + var commentPrefix string + var commentSuffix string + switch chunkRepr := chunk.chunkRepr.(type) { + case *chunkReprJS: + for _, sourceIndex := range chunkRepr.filesInChunkInOrder { + outputFiles = append(outputFiles, c.graph.Files[sourceIndex].InputFile.AdditionalFiles...) + } + commentPrefix = "//" + + case *chunkReprCSS: + for _, sourceIndex := range chunkRepr.filesInChunkInOrder { + outputFiles = append(outputFiles, c.graph.Files[sourceIndex].InputFile.AdditionalFiles...) + } + commentPrefix = "/*" + commentSuffix = " */" + } + + // Path substitution for the chunk itself + finalRelDir := c.fs.Dir(chunk.finalRelPath) + outputContentsJoiner, outputSourceMapShifts := c.substituteFinalPaths(chunks, chunk.intermediateOutput, + func(finalRelPathForImport string) string { + return c.pathBetweenChunks(finalRelDir, finalRelPathForImport) + }) + + // Generate the optional legal comments file for this chunk + if chunk.externalLegalComments != nil { + finalRelPathForLegalComments := chunk.finalRelPath + ".LEGAL.txt" + + // Link the file to the legal comments + if c.options.LegalComments == config.LegalCommentsLinkedWithComment { + importPath := c.pathBetweenChunks(finalRelDir, finalRelPathForLegalComments) + importPath = strings.TrimPrefix(importPath, "./") + outputContentsJoiner.EnsureNewlineAtEnd() + outputContentsJoiner.AddString("/*! For license information please see ") + outputContentsJoiner.AddString(importPath) + outputContentsJoiner.AddString(" */\n") + } + + // Write the external legal comments file + outputFiles = append(outputFiles, graph.OutputFile{ + AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForLegalComments), + Contents: chunk.externalLegalComments, + JSONMetadataChunk: fmt.Sprintf( + "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(chunk.externalLegalComments)), + }) + } + + // Generate the optional source map for this chunk + if c.options.SourceMap != config.SourceMapNone && chunk.outputSourceMap.HasContent() { + outputSourceMap := chunk.outputSourceMap.Finalize(outputSourceMapShifts) + finalRelPathForSourceMap := chunk.finalRelPath + ".map" + + // Potentially write a trailing source map comment + switch c.options.SourceMap { + case config.SourceMapLinkedWithComment: + importPath := c.pathBetweenChunks(finalRelDir, finalRelPathForSourceMap) + importPath = strings.TrimPrefix(importPath, "./") + outputContentsJoiner.EnsureNewlineAtEnd() + outputContentsJoiner.AddString(commentPrefix) + outputContentsJoiner.AddString("# sourceMappingURL=") + outputContentsJoiner.AddString(importPath) + outputContentsJoiner.AddString(commentSuffix) + outputContentsJoiner.AddString("\n") + + case config.SourceMapInline, config.SourceMapInlineAndExternal: + outputContentsJoiner.EnsureNewlineAtEnd() + outputContentsJoiner.AddString(commentPrefix) + outputContentsJoiner.AddString("# sourceMappingURL=data:application/json;base64,") + outputContentsJoiner.AddString(base64.StdEncoding.EncodeToString(outputSourceMap)) + outputContentsJoiner.AddString(commentSuffix) + outputContentsJoiner.AddString("\n") + } + + // Potentially write the external source map file + switch c.options.SourceMap { + case config.SourceMapLinkedWithComment, config.SourceMapInlineAndExternal, config.SourceMapExternalWithoutComment: + outputFiles = append(outputFiles, graph.OutputFile{ + AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForSourceMap), + Contents: outputSourceMap, + JSONMetadataChunk: fmt.Sprintf( + "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(outputSourceMap)), + }) + } + } + + // Finalize the output contents + outputContents := outputContentsJoiner.Done() + + // Path substitution for the JSON metadata + var jsonMetadataChunk string + if c.options.NeedsMetafile { + jsonMetadataChunkPieces := c.breakOutputIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents)), uint32(len(chunks))) + jsonMetadataChunkBytes, _ := c.substituteFinalPaths(chunks, jsonMetadataChunkPieces, func(finalRelPathForImport string) string { + return c.res.PrettyPath(logger.Path{Text: c.fs.Join(c.options.AbsOutputDir, finalRelPathForImport), Namespace: "file"}) + }) + jsonMetadataChunk = string(jsonMetadataChunkBytes.Done()) + } + + // Generate the output file for this chunk + outputFiles = append(outputFiles, graph.OutputFile{ + AbsPath: c.fs.Join(c.options.AbsOutputDir, chunk.finalRelPath), + Contents: outputContents, + JSONMetadataChunk: jsonMetadataChunk, + IsExecutable: chunk.isExecutable, + }) + + results[chunkIndex] = outputFiles + resultsWaitGroup.Done() + }(chunkIndex, chunk) + } + resultsWaitGroup.Wait() + c.timer.End("Generate final output files") + + // Merge the output files from the different goroutines together in order + outputFilesLen := 0 + for _, result := range results { + outputFilesLen += len(result) + } + outputFiles := make([]graph.OutputFile, 0, outputFilesLen) + for _, result := range results { + outputFiles = append(outputFiles, result...) + } + return outputFiles +} + +// Given a set of output pieces (i.e. a buffer already divided into the spans +// between import paths), substitute the final import paths in and then join +// everything into a single byte buffer. +func (c *linkerContext) substituteFinalPaths( + chunks []chunkInfo, + intermediateOutput intermediateOutput, + modifyPath func(string) string, +) (j helpers.Joiner, shifts []sourcemap.SourceMapShift) { + // Optimization: If there can be no substitutions, just reuse the initial + // joiner that was used when generating the intermediate chunk output + // instead of creating another one and copying the whole file into it. + if intermediateOutput.pieces == nil { + return intermediateOutput.joiner, []sourcemap.SourceMapShift{{}} + } + + var shift sourcemap.SourceMapShift + shifts = make([]sourcemap.SourceMapShift, 0, len(intermediateOutput.pieces)) + shifts = append(shifts, shift) + + for _, piece := range intermediateOutput.pieces { + var dataOffset sourcemap.LineColumnOffset + j.AddBytes(piece.data) + dataOffset.AdvanceBytes(piece.data) + shift.Before.Add(dataOffset) + shift.After.Add(dataOffset) + + switch piece.kind { + case outputPieceAssetIndex: + file := c.graph.Files[piece.index] + if len(file.InputFile.AdditionalFiles) != 1 { + panic("Internal error") + } + relPath, _ := c.fs.Rel(c.options.AbsOutputDir, file.InputFile.AdditionalFiles[0].AbsPath) + + // Make sure to always use forward slashes, even on Windows + relPath = strings.ReplaceAll(relPath, "\\", "/") + + importPath := modifyPath(relPath) + j.AddString(importPath) + shift.Before.AdvanceString(file.InputFile.UniqueKeyForFileLoader) + shift.After.AdvanceString(importPath) + shifts = append(shifts, shift) + + case outputPieceChunkIndex: + chunk := chunks[piece.index] + importPath := modifyPath(chunk.finalRelPath) + j.AddString(importPath) + shift.Before.AdvanceString(chunk.uniqueKey) + shift.After.AdvanceString(importPath) + shifts = append(shifts, shift) + } + } + + return +} + +func (c *linkerContext) pathBetweenChunks(fromRelDir string, toRelPath string) string { + // Join with the public path if it has been configured + if c.options.PublicPath != "" { + return joinWithPublicPath(c.options.PublicPath, toRelPath) + } + + // Otherwise, return a relative path + relPath, ok := c.fs.Rel(fromRelDir, toRelPath) + if !ok { + c.log.Add(logger.Error, nil, logger.Range{}, + fmt.Sprintf("Cannot traverse from directory %q to chunk %q", fromRelDir, toRelPath)) + return "" + } + + // Make sure to always use forward slashes, even on Windows + relPath = strings.ReplaceAll(relPath, "\\", "/") + + // Make sure the relative path doesn't start with a name, since that could + // be interpreted as a package path instead of a relative path + if !strings.HasPrefix(relPath, "./") && !strings.HasPrefix(relPath, "../") { + relPath = "./" + relPath + } + + return relPath +} + +// Returns the path of this file relative to "outbase", which is then ready to +// be joined with the absolute output directory path. The directory and name +// components are returned separately for convenience. +func pathRelativeToOutbase( + inputFile *graph.InputFile, + options *config.Options, + fs fs.FS, + avoidIndex bool, + customFilePath string, +) (relDir string, baseName string) { + relDir = "/" + absPath := inputFile.Source.KeyPath.Text + + if customFilePath != "" { + // Use the configured output path if present + absPath = customFilePath + if !fs.IsAbs(absPath) { + absPath = fs.Join(options.AbsOutputBase, absPath) + } + } else if inputFile.Source.KeyPath.Namespace != "file" { + // Come up with a path for virtual paths (i.e. non-file-system paths) + dir, base, _ := logger.PlatformIndependentPathDirBaseExt(absPath) + if avoidIndex && base == "index" { + _, base, _ = logger.PlatformIndependentPathDirBaseExt(dir) + } + baseName = sanitizeFilePathForVirtualModulePath(base) + return + } else { + // Heuristic: If the file is named something like "index.js", then use + // the name of the parent directory instead. This helps avoid the + // situation where many chunks are named "index" because of people + // dynamically-importing npm packages that make use of node's implicit + // "index" file name feature. + if avoidIndex { + base := fs.Base(absPath) + base = base[:len(base)-len(fs.Ext(base))] + if base == "index" { + absPath = fs.Dir(absPath) + } + } + } + + // Try to get a relative path to the base directory + relPath, ok := fs.Rel(options.AbsOutputBase, absPath) + if !ok { + // This can fail in some situations such as on different drives on + // Windows. In that case we just use the file name. + baseName = fs.Base(absPath) + } else { + // Now we finally have a relative path + relDir = fs.Dir(relPath) + "/" + baseName = fs.Base(relPath) + + // Use platform-independent slashes + relDir = strings.ReplaceAll(relDir, "\\", "/") + + // Replace leading "../" so we don't try to write outside of the output + // directory. This normally can't happen because "AbsOutputBase" is + // automatically computed to contain all entry point files, but it can + // happen if someone sets it manually via the "outbase" API option. + // + // Note that we can't just strip any leading "../" because that could + // cause two separate entry point paths to collide. For example, there + // could be both "src/index.js" and "../src/index.js" as entry points. + dotDotCount := 0 + for strings.HasPrefix(relDir[dotDotCount*3:], "../") { + dotDotCount++ + } + if dotDotCount > 0 { + // The use of "_.._" here is somewhat arbitrary but it is unlikely to + // collide with a folder named by a human and it works on Windows + // (Windows doesn't like names that end with a "."). And not starting + // with a "." means that it will not be hidden on Unix. + relDir = strings.Repeat("_.._/", dotDotCount) + relDir[dotDotCount*3:] + } + for strings.HasSuffix(relDir, "/") { + relDir = relDir[:len(relDir)-1] + } + relDir = "/" + relDir + if strings.HasSuffix(relDir, "/.") { + relDir = relDir[:len(relDir)-1] + } + } + + // Strip the file extension if the output path is an input file + if customFilePath == "" { + ext := fs.Ext(baseName) + baseName = baseName[:len(baseName)-len(ext)] + } + return +} + +func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { + c.timer.Begin("Compute cross-chunk dependencies") + defer c.timer.End("Compute cross-chunk dependencies") + + jsChunks := 0 + for _, chunk := range chunks { + if _, ok := chunk.chunkRepr.(*chunkReprJS); ok { + jsChunks++ + } + } + if jsChunks < 2 { + // No need to compute cross-chunk dependencies if there can't be any + return + } + + type chunkMeta struct { + imports map[js_ast.Ref]bool + exports map[js_ast.Ref]bool + dynamicImports map[int]bool + } + + chunkMetas := make([]chunkMeta, len(chunks)) + + // For each chunk, see what symbols it uses from other chunks. Do this in + // parallel because it's the most expensive part of this function. + waitGroup := sync.WaitGroup{} + waitGroup.Add(len(chunks)) + for chunkIndex, chunk := range chunks { + go func(chunkIndex int, chunk chunkInfo) { + chunkMeta := &chunkMetas[chunkIndex] + imports := make(map[js_ast.Ref]bool) + chunkMeta.imports = imports + chunkMeta.exports = make(map[js_ast.Ref]bool) + + // Go over each file in this chunk + for sourceIndex := range chunk.filesWithPartsInChunk { + // Go over each part in this file that's marked for inclusion in this chunk + switch repr := c.graph.Files[sourceIndex].InputFile.Repr.(type) { + case *graph.JSRepr: + for partIndex, partMeta := range repr.AST.Parts { + if !partMeta.IsLive { + continue + } + part := &repr.AST.Parts[partIndex] + + // Rewrite external dynamic imports to point to the chunk for that entry point + for _, importRecordIndex := range part.ImportRecordIndices { + record := &repr.AST.ImportRecords[importRecordIndex] + if record.SourceIndex.IsValid() && c.isExternalDynamicImport(record, sourceIndex) { + otherChunkIndex := c.graph.Files[record.SourceIndex.GetIndex()].EntryPointChunkIndex + record.Path.Text = chunks[otherChunkIndex].uniqueKey + record.SourceIndex = ast.Index32{} + + // Track this cross-chunk dynamic import so we make sure to + // include its hash when we're calculating the hashes of all + // dependencies of this chunk. + if int(otherChunkIndex) != chunkIndex { + if chunkMeta.dynamicImports == nil { + chunkMeta.dynamicImports = make(map[int]bool) + } + chunkMeta.dynamicImports[int(otherChunkIndex)] = true + } + } + } + + // Remember what chunk each top-level symbol is declared in. Symbols + // with multiple declarations such as repeated "var" statements with + // the same name should already be marked as all being in a single + // chunk. In that case this will overwrite the same value below which + // is fine. + for _, declared := range part.DeclaredSymbols { + if declared.IsTopLevel { + c.graph.Symbols.Get(declared.Ref).ChunkIndex = ast.MakeIndex32(uint32(chunkIndex)) + } + } + + // Record each symbol used in this part. This will later be matched up + // with our map of which chunk a given symbol is declared in to + // determine if the symbol needs to be imported from another chunk. + for ref := range part.SymbolUses { + symbol := c.graph.Symbols.Get(ref) + + // Ignore unbound symbols, which don't have declarations + if symbol.Kind == js_ast.SymbolUnbound { + continue + } + + // Ignore symbols that are going to be replaced by undefined + if symbol.ImportItemStatus == js_ast.ImportItemMissing { + continue + } + + // If this is imported from another file, follow the import + // reference and reference the symbol in that file instead + if importData, ok := repr.Meta.ImportsToBind[ref]; ok { + ref = importData.Ref + symbol = c.graph.Symbols.Get(ref) + } else if repr.Meta.Wrap == graph.WrapCJS && ref != repr.AST.WrapperRef { + // The only internal symbol that wrapped CommonJS files export + // is the wrapper itself. + continue + } + + // If this is an ES6 import from a CommonJS file, it will become a + // property access off the namespace symbol instead of a bare + // identifier. In that case we want to pull in the namespace symbol + // instead. The namespace symbol stores the result of "require()". + if symbol.NamespaceAlias != nil { + ref = symbol.NamespaceAlias.NamespaceRef + } + + // We must record this relationship even for symbols that are not + // imports. Due to code splitting, the definition of a symbol may + // be moved to a separate chunk than the use of a symbol even if + // the definition and use of that symbol are originally from the + // same source file. + imports[ref] = true + } + } + } + } + + // Include the exports if this is an entry point chunk + if chunk.isEntryPoint { + if repr, ok := c.graph.Files[chunk.sourceIndex].InputFile.Repr.(*graph.JSRepr); ok { + if repr.Meta.Wrap != graph.WrapCJS { + for _, alias := range repr.Meta.SortedAndFilteredExportAliases { + export := repr.Meta.ResolvedExports[alias] + targetRef := export.Ref + + // If this is an import, then target what the import points to + if importData, ok := c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr).Meta.ImportsToBind[targetRef]; ok { + targetRef = importData.Ref + } + + // If this is an ES6 import from a CommonJS file, it will become a + // property access off the namespace symbol instead of a bare + // identifier. In that case we want to pull in the namespace symbol + // instead. The namespace symbol stores the result of "require()". + if symbol := c.graph.Symbols.Get(targetRef); symbol.NamespaceAlias != nil { + targetRef = symbol.NamespaceAlias.NamespaceRef + } + + imports[targetRef] = true + } + } + + // Ensure "exports" is included if the current output format needs it + if repr.Meta.ForceIncludeExportsForEntryPoint { + imports[repr.AST.ExportsRef] = true + } + + // Include the wrapper if present + if repr.Meta.Wrap != graph.WrapNone { + imports[repr.AST.WrapperRef] = true + } + } + } + + waitGroup.Done() + }(chunkIndex, chunk) + } + waitGroup.Wait() + + // Mark imported symbols as exported in the chunk from which they are declared + for chunkIndex := range chunks { + chunk := &chunks[chunkIndex] + chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS) + if !ok { + continue + } + chunkMeta := chunkMetas[chunkIndex] + + // Find all uses in this chunk of symbols from other chunks + chunkRepr.importsFromOtherChunks = make(map[uint32]crossChunkImportItemArray) + for importRef := range chunkMeta.imports { + // Ignore uses that aren't top-level symbols + if otherChunkIndex := c.graph.Symbols.Get(importRef).ChunkIndex; otherChunkIndex.IsValid() { + if otherChunkIndex := otherChunkIndex.GetIndex(); otherChunkIndex != uint32(chunkIndex) { + chunkRepr.importsFromOtherChunks[otherChunkIndex] = + append(chunkRepr.importsFromOtherChunks[otherChunkIndex], crossChunkImportItem{ref: importRef}) + chunkMetas[otherChunkIndex].exports[importRef] = true + } + } + } + + // If this is an entry point, make sure we import all chunks belonging to + // this entry point, even if there are no imports. We need to make sure + // these chunks are evaluated for their side effects too. + if chunk.isEntryPoint { + for otherChunkIndex, otherChunk := range chunks { + if _, ok := otherChunk.chunkRepr.(*chunkReprJS); ok && chunkIndex != otherChunkIndex && otherChunk.entryBits.HasBit(chunk.entryPointBit) { + imports := chunkRepr.importsFromOtherChunks[uint32(otherChunkIndex)] + chunkRepr.importsFromOtherChunks[uint32(otherChunkIndex)] = imports + } + } + } + + // Make sure we also track dynamic cross-chunk imports. These need to be + // tracked so we count them as dependencies of this chunk for the purpose + // of hash calculation. + if chunkMeta.dynamicImports != nil { + sortedDynamicImports := make([]int, 0, len(chunkMeta.dynamicImports)) + for chunkIndex := range chunkMeta.dynamicImports { + sortedDynamicImports = append(sortedDynamicImports, chunkIndex) + } + sort.Ints(sortedDynamicImports) + for _, chunkIndex := range sortedDynamicImports { + chunk.crossChunkImports = append(chunk.crossChunkImports, chunkImport{ + importKind: ast.ImportDynamic, + chunkIndex: uint32(chunkIndex), + }) + } + } + } + + // Generate cross-chunk exports. These must be computed before cross-chunk + // imports because of export alias renaming, which must consider all export + // aliases simultaneously to avoid collisions. + for chunkIndex := range chunks { + chunk := &chunks[chunkIndex] + chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS) + if !ok { + continue + } + + chunkRepr.exportsToOtherChunks = make(map[js_ast.Ref]string) + switch c.options.OutputFormat { + case config.FormatESModule: + r := renamer.ExportRenamer{} + var items []js_ast.ClauseItem + for _, export := range c.sortedCrossChunkExportItems(chunkMetas[chunkIndex].exports) { + var alias string + if c.options.MinifyIdentifiers { + alias = r.NextMinifiedName() + } else { + alias = r.NextRenamedName(c.graph.Symbols.Get(export.Ref).OriginalName) + } + items = append(items, js_ast.ClauseItem{Name: js_ast.LocRef{Ref: export.Ref}, Alias: alias}) + chunkRepr.exportsToOtherChunks[export.Ref] = alias + } + if len(items) > 0 { + chunkRepr.crossChunkSuffixStmts = []js_ast.Stmt{{Data: &js_ast.SExportClause{ + Items: items, + }}} + } + + default: + panic("Internal error") + } + } + + // Generate cross-chunk imports. These must be computed after cross-chunk + // exports because the export aliases must already be finalized so they can + // be embedded in the generated import statements. + for chunkIndex := range chunks { + chunk := &chunks[chunkIndex] + chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS) + if !ok { + continue + } + + var crossChunkPrefixStmts []js_ast.Stmt + + for _, crossChunkImport := range c.sortedCrossChunkImports(chunks, chunkRepr.importsFromOtherChunks) { + switch c.options.OutputFormat { + case config.FormatESModule: + var items []js_ast.ClauseItem + for _, item := range crossChunkImport.sortedImportItems { + items = append(items, js_ast.ClauseItem{Name: js_ast.LocRef{Ref: item.ref}, Alias: item.exportAlias}) + } + importRecordIndex := uint32(len(chunk.crossChunkImports)) + chunk.crossChunkImports = append(chunk.crossChunkImports, chunkImport{ + importKind: ast.ImportStmt, + chunkIndex: crossChunkImport.chunkIndex, + }) + if len(items) > 0 { + // "import {a, b} from './chunk.js'" + crossChunkPrefixStmts = append(crossChunkPrefixStmts, js_ast.Stmt{Data: &js_ast.SImport{ + Items: &items, + ImportRecordIndex: importRecordIndex, + }}) + } else { + // "import './chunk.js'" + crossChunkPrefixStmts = append(crossChunkPrefixStmts, js_ast.Stmt{Data: &js_ast.SImport{ + ImportRecordIndex: importRecordIndex, + }}) + } + + default: + panic("Internal error") + } + } + + chunkRepr.crossChunkPrefixStmts = crossChunkPrefixStmts + } +} + +type crossChunkImport struct { + chunkIndex uint32 + sortedImportItems crossChunkImportItemArray +} + +// This type is just so we can use Go's native sort function +type crossChunkImportArray []crossChunkImport + +func (a crossChunkImportArray) Len() int { return len(a) } +func (a crossChunkImportArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } + +func (a crossChunkImportArray) Less(i int, j int) bool { + return a[i].chunkIndex < a[j].chunkIndex +} + +// Sort cross-chunk imports by chunk name for determinism +func (c *linkerContext) sortedCrossChunkImports(chunks []chunkInfo, importsFromOtherChunks map[uint32]crossChunkImportItemArray) crossChunkImportArray { + result := make(crossChunkImportArray, 0, len(importsFromOtherChunks)) + + for otherChunkIndex, importItems := range importsFromOtherChunks { + // Sort imports from a single chunk by alias for determinism + otherChunk := &chunks[otherChunkIndex] + exportsToOtherChunks := otherChunk.chunkRepr.(*chunkReprJS).exportsToOtherChunks + for i, item := range importItems { + importItems[i].exportAlias = exportsToOtherChunks[item.ref] + } + sort.Sort(importItems) + result = append(result, crossChunkImport{ + chunkIndex: otherChunkIndex, + sortedImportItems: importItems, + }) + } + + sort.Sort(result) + return result +} + +type crossChunkImportItem struct { + ref js_ast.Ref + exportAlias string +} + +// This type is just so we can use Go's native sort function +type crossChunkImportItemArray []crossChunkImportItem + +func (a crossChunkImportItemArray) Len() int { return len(a) } +func (a crossChunkImportItemArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } + +func (a crossChunkImportItemArray) Less(i int, j int) bool { + return a[i].exportAlias < a[j].exportAlias +} + +// The sort order here is arbitrary but needs to be consistent between builds. +// The InnerIndex should be stable because the parser for a single file is +// single-threaded and deterministically assigns out InnerIndex values +// sequentially. But the SourceIndex should be unstable because the main thread +// assigns out source index values sequentially to newly-discovered dependencies +// in a multi-threaded producer/consumer relationship. So instead we use the +// index of the source in the DFS order over all entry points for stability. +type stableRef struct { + StableSourceIndex uint32 + Ref js_ast.Ref +} + +// This type is just so we can use Go's native sort function +type stableRefArray []stableRef + +func (a stableRefArray) Len() int { return len(a) } +func (a stableRefArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } +func (a stableRefArray) Less(i int, j int) bool { + ai, aj := a[i], a[j] + return ai.StableSourceIndex < aj.StableSourceIndex || + (ai.StableSourceIndex == aj.StableSourceIndex && ai.Ref.InnerIndex < aj.Ref.InnerIndex) +} + +// Sort cross-chunk exports by chunk name for determinism +func (c *linkerContext) sortedCrossChunkExportItems(exportRefs map[js_ast.Ref]bool) stableRefArray { + result := make(stableRefArray, 0, len(exportRefs)) + for ref := range exportRefs { + result = append(result, stableRef{ + StableSourceIndex: c.graph.StableSourceIndices[ref.SourceIndex], + Ref: ref, + }) + } + sort.Sort(result) + return result +} + +func (c *linkerContext) scanImportsAndExports() { + c.timer.Begin("Scan imports and exports") + defer c.timer.End("Scan imports and exports") + + // Step 1: Figure out what modules must be CommonJS + c.timer.Begin("Step 1") + for _, sourceIndex := range c.graph.ReachableFiles { + file := &c.graph.Files[sourceIndex] + switch repr := file.InputFile.Repr.(type) { + case *graph.CSSRepr: + // Inline URLs for non-CSS files into the CSS file + var additionalFiles []graph.OutputFile + for importRecordIndex := range repr.AST.ImportRecords { + if record := &repr.AST.ImportRecords[importRecordIndex]; record.SourceIndex.IsValid() { + otherFile := &c.graph.Files[record.SourceIndex.GetIndex()] + if otherRepr, ok := otherFile.InputFile.Repr.(*graph.JSRepr); ok { + record.Path.Text = otherRepr.AST.URLForCSS + record.Path.Namespace = "" + record.SourceIndex = ast.Index32{} + + // Copy the additional files to the output directory + additionalFiles = append(additionalFiles, otherFile.InputFile.AdditionalFiles...) + } + } + } + file.InputFile.AdditionalFiles = additionalFiles + + case *graph.JSRepr: + for importRecordIndex := range repr.AST.ImportRecords { + record := &repr.AST.ImportRecords[importRecordIndex] + if !record.SourceIndex.IsValid() { + continue + } + + otherFile := &c.graph.Files[record.SourceIndex.GetIndex()] + otherRepr := otherFile.InputFile.Repr.(*graph.JSRepr) + + switch record.Kind { + case ast.ImportStmt: + // Importing using ES6 syntax from a file without any ES6 syntax + // causes that module to be considered CommonJS-style, even if it + // doesn't have any CommonJS exports. + // + // That means the ES6 imports will become undefined instead of + // causing errors. This is for compatibility with older CommonJS- + // style bundlers. + // + // We emit a warning in this case but try to avoid turning the module + // into a CommonJS module if possible. This is possible with named + // imports (the module stays an ECMAScript module but the imports are + // rewritten with undefined) but is not possible with star or default + // imports: + // + // import * as ns from './empty-file' + // import defVal from './empty-file' + // console.log(ns, defVal) + // + // In that case the module *is* considered a CommonJS module because + // the namespace object must be created. + if (record.ContainsImportStar || record.ContainsDefaultAlias) && otherRepr.AST.ExportsKind == js_ast.ExportsNone && !otherRepr.AST.HasLazyExport { + otherRepr.Meta.Wrap = graph.WrapCJS + otherRepr.AST.ExportsKind = js_ast.ExportsCommonJS + } + + case ast.ImportRequire: + // Files that are imported with require() must be CommonJS modules + if otherRepr.AST.ExportsKind == js_ast.ExportsESM { + otherRepr.Meta.Wrap = graph.WrapESM + } else { + otherRepr.Meta.Wrap = graph.WrapCJS + otherRepr.AST.ExportsKind = js_ast.ExportsCommonJS + } + + case ast.ImportDynamic: + if !c.options.CodeSplitting { + // If we're not splitting, then import() is just a require() that + // returns a promise, so the imported file must be a CommonJS module + if otherRepr.AST.ExportsKind == js_ast.ExportsESM { + otherRepr.Meta.Wrap = graph.WrapESM + } else { + otherRepr.Meta.Wrap = graph.WrapCJS + otherRepr.AST.ExportsKind = js_ast.ExportsCommonJS + } + } + } + } + + // If the output format doesn't have an implicit CommonJS wrapper, any file + // that uses CommonJS features will need to be wrapped, even though the + // resulting wrapper won't be invoked by other files. An exception is made + // for entry point files in CommonJS format (or when in pass-through mode). + if repr.AST.ExportsKind == js_ast.ExportsCommonJS && (!file.IsEntryPoint() || + c.options.OutputFormat == config.FormatIIFE || c.options.OutputFormat == config.FormatESModule) { + repr.Meta.Wrap = graph.WrapCJS + } + } + } + c.timer.End("Step 1") + + // Step 2: Propagate dynamic export status for export star statements that + // are re-exports from a module whose exports are not statically analyzable. + // In this case the export star must be evaluated at run time instead of at + // bundle time. + c.timer.Begin("Step 2") + for _, sourceIndex := range c.graph.ReachableFiles { + repr, ok := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) + if !ok { + continue + } + + if repr.Meta.Wrap != graph.WrapNone { + c.recursivelyWrapDependencies(sourceIndex) + } + + if len(repr.AST.ExportStarImportRecords) > 0 { + visited := make(map[uint32]bool) + c.hasDynamicExportsDueToExportStar(sourceIndex, visited) + } + + // Even if the output file is CommonJS-like, we may still need to wrap + // CommonJS-style files. Any file that imports a CommonJS-style file will + // cause that file to need to be wrapped. This is because the import + // method, whatever it is, will need to invoke the wrapper. Note that + // this can include entry points (e.g. an entry point that imports a file + // that imports that entry point). + for _, record := range repr.AST.ImportRecords { + if record.SourceIndex.IsValid() { + otherRepr := c.graph.Files[record.SourceIndex.GetIndex()].InputFile.Repr.(*graph.JSRepr) + if otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { + c.recursivelyWrapDependencies(record.SourceIndex.GetIndex()) + } + } + } + } + c.timer.End("Step 2") + + // Step 3: Resolve "export * from" statements. This must be done after we + // discover all modules that can have dynamic exports because export stars + // are ignored for those modules. + c.timer.Begin("Step 3") + exportStarStack := make([]uint32, 0, 32) + for _, sourceIndex := range c.graph.ReachableFiles { + repr, ok := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) + if !ok { + continue + } + + // Expression-style loaders defer code generation until linking. Code + // generation is done here because at this point we know that the + // "ExportsKind" field has its final value and will not be changed. + if repr.AST.HasLazyExport { + c.generateCodeForLazyExport(sourceIndex) + } + + // Propagate exports for export star statements + if len(repr.AST.ExportStarImportRecords) > 0 { + c.addExportsForExportStar(repr.Meta.ResolvedExports, sourceIndex, exportStarStack) + } + + // Also add a special export so import stars can bind to it. This must be + // done in this step because it must come after CommonJS module discovery + // but before matching imports with exports. + repr.Meta.ResolvedExportStar = &graph.ExportData{ + Ref: repr.AST.ExportsRef, + SourceIndex: sourceIndex, + } + } + c.timer.End("Step 3") + + // Step 4: Match imports with exports. This must be done after we process all + // export stars because imports can bind to export star re-exports. + c.timer.Begin("Step 4") + for _, sourceIndex := range c.graph.ReachableFiles { + file := &c.graph.Files[sourceIndex] + repr, ok := file.InputFile.Repr.(*graph.JSRepr) + if !ok { + continue + } + + if len(repr.AST.NamedImports) > 0 { + c.matchImportsWithExportsForFile(uint32(sourceIndex)) + } + + // If we're exporting as CommonJS and this file was originally CommonJS, + // then we'll be using the actual CommonJS "exports" and/or "module" + // symbols. In that case make sure to mark them as such so they don't + // get minified. + if file.IsEntryPoint() && repr.AST.ExportsKind == js_ast.ExportsCommonJS && repr.Meta.Wrap == graph.WrapNone && + (c.options.OutputFormat == config.FormatPreserve || c.options.OutputFormat == config.FormatCommonJS) { + exportsRef := js_ast.FollowSymbols(c.graph.Symbols, repr.AST.ExportsRef) + moduleRef := js_ast.FollowSymbols(c.graph.Symbols, repr.AST.ModuleRef) + c.graph.Symbols.Get(exportsRef).Kind = js_ast.SymbolUnbound + c.graph.Symbols.Get(moduleRef).Kind = js_ast.SymbolUnbound + } else if repr.Meta.ForceIncludeExportsForEntryPoint || repr.AST.ExportsKind != js_ast.ExportsCommonJS { + repr.Meta.NeedsExportsVariable = true + } + + // Create the wrapper part for wrapped files. This is needed by a later step. + c.createWrapperForFile(uint32(sourceIndex)) + } + c.timer.End("Step 4") + + // Step 5: Create namespace exports for every file. This is always necessary + // for CommonJS files, and is also necessary for other files if they are + // imported using an import star statement. + c.timer.Begin("Step 5") + waitGroup := sync.WaitGroup{} + for _, sourceIndex := range c.graph.ReachableFiles { + repr, ok := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) + if !ok { + continue + } + + // This is the slowest step and is also parallelizable, so do this in parallel. + waitGroup.Add(1) + go func(sourceIndex uint32, repr *graph.JSRepr) { + // Now that all exports have been resolved, sort and filter them to create + // something we can iterate over later. + aliases := make([]string, 0, len(repr.Meta.ResolvedExports)) + nextAlias: + for alias, export := range repr.Meta.ResolvedExports { + // Re-exporting multiple symbols with the same name causes an ambiguous + // export. These names cannot be used and should not end up in generated code. + otherRepr := c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr) + if len(export.PotentiallyAmbiguousExportStarRefs) > 0 { + mainRef := export.Ref + if imported, ok := otherRepr.Meta.ImportsToBind[export.Ref]; ok { + mainRef = imported.Ref + } + for _, ambiguousExport := range export.PotentiallyAmbiguousExportStarRefs { + ambiguousRepr := c.graph.Files[ambiguousExport.SourceIndex].InputFile.Repr.(*graph.JSRepr) + ambiguousRef := ambiguousExport.Ref + if imported, ok := ambiguousRepr.Meta.ImportsToBind[ambiguousExport.Ref]; ok { + ambiguousRef = imported.Ref + } + if mainRef != ambiguousRef { + continue nextAlias + } + } + } + + // Ignore re-exported imports in TypeScript files that failed to be + // resolved. These are probably just type-only imports so the best thing to + // do is to silently omit them from the export list. + if otherRepr.Meta.IsProbablyTypeScriptType[export.Ref] { + continue + } + + aliases = append(aliases, alias) + } + sort.Strings(aliases) + repr.Meta.SortedAndFilteredExportAliases = aliases + + // Export creation uses "sortedAndFilteredExportAliases" so this must + // come second after we fill in that array + c.createExportsForFile(uint32(sourceIndex)) + + waitGroup.Done() + }(sourceIndex, repr) + } + waitGroup.Wait() + c.timer.End("Step 5") + + // Step 6: Bind imports to exports. This adds non-local dependencies on the + // parts that declare the export to all parts that use the import. Also + // generate wrapper parts for wrapped files. + c.timer.Begin("Step 6") + for _, sourceIndex := range c.graph.ReachableFiles { + file := &c.graph.Files[sourceIndex] + repr, ok := file.InputFile.Repr.(*graph.JSRepr) + if !ok { + continue + } + + // Pre-generate symbols for re-exports CommonJS symbols in case they + // are necessary later. This is done now because the symbols map cannot be + // mutated later due to parallelism. + if file.IsEntryPoint() && c.options.OutputFormat == config.FormatESModule { + copies := make([]js_ast.Ref, len(repr.Meta.SortedAndFilteredExportAliases)) + for i, alias := range repr.Meta.SortedAndFilteredExportAliases { + copies[i] = c.graph.GenerateNewSymbol(sourceIndex, js_ast.SymbolOther, "export_"+alias) + } + repr.Meta.CJSExportCopies = copies + } + + // Use "init_*" for ESM wrappers instead of "require_*" + if repr.Meta.Wrap == graph.WrapESM { + c.graph.Symbols.Get(repr.AST.WrapperRef).OriginalName = "init_" + file.InputFile.Source.IdentifierName + } + + // If this isn't CommonJS, then rename the unused "exports" and "module" + // variables to avoid them causing the identically-named variables in + // actual CommonJS files from being renamed. This is purely about + // aesthetics and is not about correctness. This is done here because by + // this point, we know the CommonJS status will not change further. + if repr.Meta.Wrap != graph.WrapCJS && repr.AST.ExportsKind != js_ast.ExportsCommonJS { + name := file.InputFile.Source.IdentifierName + c.graph.Symbols.Get(repr.AST.ExportsRef).OriginalName = name + "_exports" + c.graph.Symbols.Get(repr.AST.ModuleRef).OriginalName = name + "_module" + } + + // Include the "__export" symbol from the runtime if it was used in the + // previous step. The previous step can't do this because it's running in + // parallel and can't safely mutate the "importsToBind" map of another file. + if repr.Meta.NeedsExportSymbolFromRuntime { + runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) + exportRef := runtimeRepr.AST.ModuleScope.Members["__export"].Ref + c.graph.GenerateSymbolImportAndUse(sourceIndex, js_ast.NSExportPartIndex, exportRef, 1, runtime.SourceIndex) + } + + for importRef, importData := range repr.Meta.ImportsToBind { + resolvedRepr := c.graph.Files[importData.SourceIndex].InputFile.Repr.(*graph.JSRepr) + partsDeclaringSymbol := resolvedRepr.TopLevelSymbolToParts(importData.Ref) + + for _, partIndex := range repr.AST.NamedImports[importRef].LocalPartsWithUses { + part := &repr.AST.Parts[partIndex] + + // Depend on the file containing the imported symbol + for _, resolvedPartIndex := range partsDeclaringSymbol { + part.Dependencies = append(part.Dependencies, js_ast.Dependency{ + SourceIndex: importData.SourceIndex, + PartIndex: resolvedPartIndex, + }) + } + + // Also depend on any files that re-exported this symbol in between the + // file containing the import and the file containing the imported symbol + part.Dependencies = append(part.Dependencies, importData.ReExports...) + } + + // Merge these symbols so they will share the same name + js_ast.MergeSymbols(c.graph.Symbols, importRef, importData.Ref) + } + + // If this is an entry point, depend on all exports so they are included + if file.IsEntryPoint() { + var dependencies []js_ast.Dependency + + for _, alias := range repr.Meta.SortedAndFilteredExportAliases { + export := repr.Meta.ResolvedExports[alias] + targetSourceIndex := export.SourceIndex + targetRef := export.Ref + + // If this is an import, then target what the import points to + targetRepr := c.graph.Files[targetSourceIndex].InputFile.Repr.(*graph.JSRepr) + if importData, ok := targetRepr.Meta.ImportsToBind[targetRef]; ok { + targetSourceIndex = importData.SourceIndex + targetRef = importData.Ref + targetRepr = c.graph.Files[targetSourceIndex].InputFile.Repr.(*graph.JSRepr) + dependencies = append(dependencies, importData.ReExports...) + } + + // Pull in all declarations of this symbol + for _, partIndex := range targetRepr.TopLevelSymbolToParts(targetRef) { + dependencies = append(dependencies, js_ast.Dependency{ + SourceIndex: targetSourceIndex, + PartIndex: partIndex, + }) + } + } + + // Ensure "exports" is included if the current output format needs it + if repr.Meta.ForceIncludeExportsForEntryPoint { + dependencies = append(dependencies, js_ast.Dependency{ + SourceIndex: sourceIndex, + PartIndex: js_ast.NSExportPartIndex, + }) + } + + // Include the wrapper if present + if repr.Meta.Wrap != graph.WrapNone { + dependencies = append(dependencies, js_ast.Dependency{ + SourceIndex: sourceIndex, + PartIndex: repr.Meta.WrapperPartIndex.GetIndex(), + }) + } + + // Represent these constraints with a dummy part + entryPointPartIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ + Dependencies: dependencies, + CanBeRemovedIfUnused: false, + }) + repr.Meta.EntryPointPartIndex = ast.MakeIndex32(entryPointPartIndex) + + // Pull in the "__toCommonJS" symbol if we need it due to being an entry point + if repr.Meta.ForceIncludeExportsForEntryPoint { + c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, entryPointPartIndex, "__toCommonJS", 1) + } + } + + // Encode import-specific constraints in the dependency graph + for partIndex, part := range repr.AST.Parts { + toESMUses := uint32(0) + toCommonJSUses := uint32(0) + runtimeRequireUses := uint32(0) + + // Imports of wrapped files must depend on the wrapper + for _, importRecordIndex := range part.ImportRecordIndices { + record := &repr.AST.ImportRecords[importRecordIndex] + + // Don't follow external imports (this includes import() expressions) + if !record.SourceIndex.IsValid() || c.isExternalDynamicImport(record, sourceIndex) { + // This is an external import. Check if it will be a "require()" call. + if record.Kind == ast.ImportRequire || !c.options.OutputFormat.KeepES6ImportExportSyntax() || + (record.Kind == ast.ImportDynamic && c.options.UnsupportedJSFeatures.Has(compat.DynamicImport)) { + // We should use "__require" instead of "require" if we're not + // generating a CommonJS output file, since it won't exist otherwise + if config.ShouldCallRuntimeRequire(c.options.Mode, c.options.OutputFormat) { + record.CallRuntimeRequire = true + runtimeRequireUses++ + } + + // It needs the "__toESM" wrapper if it wasn't originally a + // CommonJS import (i.e. it wasn't a "require()" call). + if record.Kind != ast.ImportRequire { + record.WrapWithToESM = true + toESMUses++ + } + } + continue + } + + otherSourceIndex := record.SourceIndex.GetIndex() + otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) + + if otherRepr.Meta.Wrap != graph.WrapNone { + // Depend on the automatically-generated require wrapper symbol + wrapperRef := otherRepr.AST.WrapperRef + c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), wrapperRef, 1, otherSourceIndex) + + // This is an ES6 import of a CommonJS module, so it needs the + // "__toESM" wrapper as long as it's not a bare "require()" + if record.Kind != ast.ImportRequire && otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { + record.WrapWithToESM = true + toESMUses++ + } + + // If this is an ESM wrapper, also depend on the exports object + // since the final code will contain an inline reference to it. + // This must be done for "require()" and "import()" expressions + // but does not need to be done for "import" statements since + // those just cause us to reference the exports directly. + if otherRepr.Meta.Wrap == graph.WrapESM && record.Kind != ast.ImportStmt { + c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), otherRepr.AST.ExportsRef, 1, otherSourceIndex) + + // If this is a "require()" call, then we should add the + // "__esModule" marker to behave as if the module was converted + // from ESM to CommonJS. This is done via a wrapper instead of + // by modifying the exports object itself because the same ES + // module may be simultaneously imported and required, and the + // importing code should not see "__esModule" while the requiring + // code should see "__esModule". This is an extremely complex + // and subtle set of bundler interop issues. See for example + // https://github.com/evanw/esbuild/issues/1591. + if record.Kind == ast.ImportRequire { + record.WrapWithToCJS = true + toCommonJSUses++ + } + } + } else if record.Kind == ast.ImportStmt && otherRepr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { + // This is an import of a module that has a dynamic export fallback + // object. In that case we need to depend on that object in case + // something ends up needing to use it later. This could potentially + // be omitted in some cases with more advanced analysis if this + // dynamic export fallback object doesn't end up being needed. + c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), otherRepr.AST.ExportsRef, 1, otherSourceIndex) + } + } + + // If there's an ES6 import of a non-ES6 module, then we're going to need the + // "__toESM" symbol from the runtime to wrap the result of "require()" + c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__toESM", toESMUses) + + // If there's a CommonJS require of an ES6 module, then we're going to need the + // "__toCommonJS" symbol from the runtime to wrap the exports object + c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__toCommonJS", toCommonJSUses) + + // If there are unbundled calls to "require()" and we're not generating + // code for node, then substitute a "__require" wrapper for "require". + c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__require", runtimeRequireUses) + + // If there's an ES6 export star statement of a non-ES6 module, then we're + // going to need the "__reExport" symbol from the runtime + reExportUses := uint32(0) + for _, importRecordIndex := range repr.AST.ExportStarImportRecords { + record := &repr.AST.ImportRecords[importRecordIndex] + + // Is this export star evaluated at run time? + happensAtRunTime := !record.SourceIndex.IsValid() && (!file.IsEntryPoint() || !c.options.OutputFormat.KeepES6ImportExportSyntax()) + if record.SourceIndex.IsValid() { + otherSourceIndex := record.SourceIndex.GetIndex() + otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) + if otherSourceIndex != sourceIndex && otherRepr.AST.ExportsKind.IsDynamic() { + happensAtRunTime = true + } + if otherRepr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { + // This looks like "__reExport(exports_a, exports_b)". Make sure to + // pull in the "exports_b" symbol into this export star. This matters + // in code splitting situations where the "export_b" symbol might live + // in a different chunk than this export star. + c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), otherRepr.AST.ExportsRef, 1, otherSourceIndex) + } + } + if happensAtRunTime { + // Depend on this file's "exports" object for the first argument to "__reExport" + c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), repr.AST.ExportsRef, 1, sourceIndex) + record.CallsRunTimeReExportFn = true + repr.AST.UsesExportsRef = true + reExportUses++ + } + } + c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__reExport", reExportUses) + } + } + c.timer.End("Step 6") +} + +func (c *linkerContext) generateCodeForLazyExport(sourceIndex uint32) { + file := &c.graph.Files[sourceIndex] + repr := file.InputFile.Repr.(*graph.JSRepr) + + // Grab the lazy expression + if len(repr.AST.Parts) < 1 { + panic("Internal error") + } + part := &repr.AST.Parts[1] + if len(part.Stmts) != 1 { + panic("Internal error") + } + lazy, ok := part.Stmts[0].Data.(*js_ast.SLazyExport) + if !ok { + panic("Internal error") + } + + // Use "module.exports = value" for CommonJS-style modules + if repr.AST.ExportsKind == js_ast.ExportsCommonJS { + part.Stmts = []js_ast.Stmt{js_ast.AssignStmt( + js_ast.Expr{Loc: lazy.Value.Loc, Data: &js_ast.EDot{ + Target: js_ast.Expr{Loc: lazy.Value.Loc, Data: &js_ast.EIdentifier{Ref: repr.AST.ModuleRef}}, + Name: "exports", + NameLoc: lazy.Value.Loc, + }}, + lazy.Value, + )} + c.graph.GenerateSymbolImportAndUse(sourceIndex, 0, repr.AST.ModuleRef, 1, sourceIndex) + return + } + + // Otherwise, generate ES6 export statements. These are added as additional + // parts so they can be tree shaken individually. + part.Stmts = nil + + type prevExport struct { + ref js_ast.Ref + partIndex uint32 + } + + generateExport := func(name string, alias string, value js_ast.Expr) prevExport { + // Generate a new symbol + ref := c.graph.GenerateNewSymbol(sourceIndex, js_ast.SymbolOther, name) + + // Generate an ES6 export + var stmt js_ast.Stmt + if alias == "default" { + stmt = js_ast.Stmt{Loc: value.Loc, Data: &js_ast.SExportDefault{ + DefaultName: js_ast.LocRef{Loc: value.Loc, Ref: ref}, + Value: js_ast.Stmt{Loc: value.Loc, Data: &js_ast.SExpr{Value: value}}, + }} + } else { + stmt = js_ast.Stmt{Loc: value.Loc, Data: &js_ast.SLocal{ + IsExport: true, + Decls: []js_ast.Decl{{ + Binding: js_ast.Binding{Loc: value.Loc, Data: &js_ast.BIdentifier{Ref: ref}}, + ValueOrNil: value, + }}, + }} + } + + // Link the export into the graph for tree shaking + partIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ + Stmts: []js_ast.Stmt{stmt}, + DeclaredSymbols: []js_ast.DeclaredSymbol{{Ref: ref, IsTopLevel: true}}, + CanBeRemovedIfUnused: true, + }) + c.graph.GenerateSymbolImportAndUse(sourceIndex, partIndex, repr.AST.ModuleRef, 1, sourceIndex) + repr.Meta.ResolvedExports[alias] = graph.ExportData{Ref: ref, SourceIndex: sourceIndex} + return prevExport{ref: ref, partIndex: partIndex} + } + + // Unwrap JSON objects into separate top-level variables + var prevExports []js_ast.Ref + jsonValue := lazy.Value + if object, ok := jsonValue.Data.(*js_ast.EObject); ok { + clone := *object + clone.Properties = append(make([]js_ast.Property, 0, len(clone.Properties)), clone.Properties...) + for i, property := range clone.Properties { + if str, ok := property.Key.Data.(*js_ast.EString); ok && + (!file.IsEntryPoint() || js_lexer.IsIdentifierUTF16(str.Value) || + !c.options.UnsupportedJSFeatures.Has(compat.ArbitraryModuleNamespaceNames)) { + name := js_lexer.UTF16ToString(str.Value) + exportRef := generateExport(name, name, property.ValueOrNil).ref + prevExports = append(prevExports, exportRef) + clone.Properties[i].ValueOrNil = js_ast.Expr{Loc: property.Key.Loc, Data: &js_ast.EIdentifier{Ref: exportRef}} + } + } + jsonValue.Data = &clone + } + + // Generate the default export + finalExportPartIndex := generateExport(file.InputFile.Source.IdentifierName+"_default", "default", jsonValue).partIndex + + // The default export depends on all of the previous exports + for _, exportRef := range prevExports { + c.graph.GenerateSymbolImportAndUse(sourceIndex, finalExportPartIndex, exportRef, 1, sourceIndex) + } +} + +func (c *linkerContext) createExportsForFile(sourceIndex uint32) { + //////////////////////////////////////////////////////////////////////////////// + // WARNING: This method is run in parallel over all files. Do not mutate data + // for other files within this method or you will create a data race. + //////////////////////////////////////////////////////////////////////////////// + + file := &c.graph.Files[sourceIndex] + repr := file.InputFile.Repr.(*graph.JSRepr) + + // Generate a getter per export + properties := []js_ast.Property{} + nsExportDependencies := []js_ast.Dependency{} + nsExportSymbolUses := make(map[js_ast.Ref]js_ast.SymbolUse) + for _, alias := range repr.Meta.SortedAndFilteredExportAliases { + export := repr.Meta.ResolvedExports[alias] + + // If this is an export of an import, reference the symbol that the import + // was eventually resolved to. We need to do this because imports have + // already been resolved by this point, so we can't generate a new import + // and have that be resolved later. + if importData, ok := c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr).Meta.ImportsToBind[export.Ref]; ok { + export.Ref = importData.Ref + export.SourceIndex = importData.SourceIndex + nsExportDependencies = append(nsExportDependencies, importData.ReExports...) + } + + // Exports of imports need EImportIdentifier in case they need to be re- + // written to a property access later on + var value js_ast.Expr + if c.graph.Symbols.Get(export.Ref).NamespaceAlias != nil { + value = js_ast.Expr{Data: &js_ast.EImportIdentifier{Ref: export.Ref}} + } else { + value = js_ast.Expr{Data: &js_ast.EIdentifier{Ref: export.Ref}} + } + + // Add a getter property + var getter js_ast.Expr + body := js_ast.FnBody{Stmts: []js_ast.Stmt{{Loc: value.Loc, Data: &js_ast.SReturn{ValueOrNil: value}}}} + if c.options.UnsupportedJSFeatures.Has(compat.Arrow) { + getter = js_ast.Expr{Data: &js_ast.EFunction{Fn: js_ast.Fn{Body: body}}} + } else { + getter = js_ast.Expr{Data: &js_ast.EArrow{PreferExpr: true, Body: body}} + } + properties = append(properties, js_ast.Property{ + Key: js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(alias)}}, + ValueOrNil: getter, + }) + nsExportSymbolUses[export.Ref] = js_ast.SymbolUse{CountEstimate: 1} + + // Make sure the part that declares the export is included + for _, partIndex := range c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr).TopLevelSymbolToParts(export.Ref) { + // Use a non-local dependency since this is likely from a different + // file if it came in through an export star + nsExportDependencies = append(nsExportDependencies, js_ast.Dependency{ + SourceIndex: export.SourceIndex, + PartIndex: partIndex, + }) + } + } + + declaredSymbols := []js_ast.DeclaredSymbol{} + var nsExportStmts []js_ast.Stmt + + // Prefix this part with "var exports = {}" if this isn't a CommonJS entry point + if repr.Meta.NeedsExportsVariable { + nsExportStmts = append(nsExportStmts, js_ast.Stmt{Data: &js_ast.SLocal{Decls: []js_ast.Decl{{ + Binding: js_ast.Binding{Data: &js_ast.BIdentifier{Ref: repr.AST.ExportsRef}}, + ValueOrNil: js_ast.Expr{Data: &js_ast.EObject{}}, + }}}}) + declaredSymbols = append(declaredSymbols, js_ast.DeclaredSymbol{ + Ref: repr.AST.ExportsRef, + IsTopLevel: true, + }) + } + + // "__export(exports, { foo: () => foo })" + exportRef := js_ast.InvalidRef + if len(properties) > 0 { + runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) + exportRef = runtimeRepr.AST.ModuleScope.Members["__export"].Ref + nsExportStmts = append(nsExportStmts, js_ast.Stmt{Data: &js_ast.SExpr{Value: js_ast.Expr{Data: &js_ast.ECall{ + Target: js_ast.Expr{Data: &js_ast.EIdentifier{Ref: exportRef}}, + Args: []js_ast.Expr{ + {Data: &js_ast.EIdentifier{Ref: repr.AST.ExportsRef}}, + {Data: &js_ast.EObject{ + Properties: properties, + }}, + }, + }}}}) + + // Make sure this file depends on the "__export" symbol + for _, partIndex := range runtimeRepr.TopLevelSymbolToParts(exportRef) { + nsExportDependencies = append(nsExportDependencies, js_ast.Dependency{ + SourceIndex: runtime.SourceIndex, + PartIndex: partIndex, + }) + } + + // Make sure the CommonJS closure, if there is one, includes "exports" + repr.AST.UsesExportsRef = true + } + + // No need to generate a part if it'll be empty + if len(nsExportStmts) > 0 { + // Initialize the part that was allocated for us earlier. The information + // here will be used after this during tree shaking. + repr.AST.Parts[js_ast.NSExportPartIndex] = js_ast.Part{ + Stmts: nsExportStmts, + SymbolUses: nsExportSymbolUses, + Dependencies: nsExportDependencies, + DeclaredSymbols: declaredSymbols, + + // This can be removed if nothing uses it + CanBeRemovedIfUnused: true, + + // Make sure this is trimmed if unused even if tree shaking is disabled + ForceTreeShaking: true, + } + + // Pull in the "__export" symbol if it was used + if exportRef != js_ast.InvalidRef { + repr.Meta.NeedsExportSymbolFromRuntime = true + } + } +} + +func (c *linkerContext) createWrapperForFile(sourceIndex uint32) { + repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) + + switch repr.Meta.Wrap { + // If this is a CommonJS file, we're going to need to generate a wrapper + // for the CommonJS closure. That will end up looking something like this: + // + // var require_foo = __commonJS((exports, module) => { + // ... + // }); + // + // However, that generation is special-cased for various reasons and is + // done later on. Still, we're going to need to ensure that this file + // both depends on the "__commonJS" symbol and declares the "require_foo" + // symbol. Instead of special-casing this during the reachablity analysis + // below, we just append a dummy part to the end of the file with these + // dependencies and let the general-purpose reachablity analysis take care + // of it. + case graph.WrapCJS: + runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) + commonJSParts := runtimeRepr.TopLevelSymbolToParts(c.cjsRuntimeRef) + + // Generate the dummy part + dependencies := make([]js_ast.Dependency, len(commonJSParts)) + for i, partIndex := range commonJSParts { + dependencies[i] = js_ast.Dependency{ + SourceIndex: runtime.SourceIndex, + PartIndex: partIndex, + } + } + partIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ + SymbolUses: map[js_ast.Ref]js_ast.SymbolUse{ + repr.AST.WrapperRef: {CountEstimate: 1}, + }, + DeclaredSymbols: []js_ast.DeclaredSymbol{ + {Ref: repr.AST.ExportsRef, IsTopLevel: true}, + {Ref: repr.AST.ModuleRef, IsTopLevel: true}, + {Ref: repr.AST.WrapperRef, IsTopLevel: true}, + }, + Dependencies: dependencies, + }) + repr.Meta.WrapperPartIndex = ast.MakeIndex32(partIndex) + c.graph.GenerateSymbolImportAndUse(sourceIndex, partIndex, c.cjsRuntimeRef, 1, runtime.SourceIndex) + + // If this is a lazily-initialized ESM file, we're going to need to + // generate a wrapper for the ESM closure. That will end up looking + // something like this: + // + // var init_foo = __esm(() => { + // ... + // }); + // + // This depends on the "__esm" symbol and declares the "init_foo" symbol + // for similar reasons to the CommonJS closure above. + case graph.WrapESM: + runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) + esmParts := runtimeRepr.TopLevelSymbolToParts(c.esmRuntimeRef) + + // Generate the dummy part + dependencies := make([]js_ast.Dependency, len(esmParts)) + for i, partIndex := range esmParts { + dependencies[i] = js_ast.Dependency{ + SourceIndex: runtime.SourceIndex, + PartIndex: partIndex, + } + } + partIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ + SymbolUses: map[js_ast.Ref]js_ast.SymbolUse{ + repr.AST.WrapperRef: {CountEstimate: 1}, + }, + DeclaredSymbols: []js_ast.DeclaredSymbol{ + {Ref: repr.AST.WrapperRef, IsTopLevel: true}, + }, + Dependencies: dependencies, + }) + repr.Meta.WrapperPartIndex = ast.MakeIndex32(partIndex) + c.graph.GenerateSymbolImportAndUse(sourceIndex, partIndex, c.esmRuntimeRef, 1, runtime.SourceIndex) + } +} + +func (c *linkerContext) matchImportsWithExportsForFile(sourceIndex uint32) { + file := &c.graph.Files[sourceIndex] + repr := file.InputFile.Repr.(*graph.JSRepr) + + // Sort imports for determinism. Otherwise our unit tests will randomly + // fail sometimes when error messages are reordered. + sortedImportRefs := make([]int, 0, len(repr.AST.NamedImports)) + for ref := range repr.AST.NamedImports { + sortedImportRefs = append(sortedImportRefs, int(ref.InnerIndex)) + } + sort.Ints(sortedImportRefs) + + // Pair imports with their matching exports + for _, innerIndex := range sortedImportRefs { + // Re-use memory for the cycle detector + c.cycleDetector = c.cycleDetector[:0] + + importRef := js_ast.Ref{SourceIndex: sourceIndex, InnerIndex: uint32(innerIndex)} + result, reExports := c.matchImportWithExport(importTracker{sourceIndex: sourceIndex, importRef: importRef}, nil) + switch result.kind { + case matchImportIgnore: + + case matchImportNormal: + repr.Meta.ImportsToBind[importRef] = graph.ImportData{ + ReExports: reExports, + SourceIndex: result.sourceIndex, + Ref: result.ref, + } + + case matchImportNamespace: + c.graph.Symbols.Get(importRef).NamespaceAlias = &js_ast.NamespaceAlias{ + NamespaceRef: result.namespaceRef, + Alias: result.alias, + } + + case matchImportNormalAndNamespace: + repr.Meta.ImportsToBind[importRef] = graph.ImportData{ + ReExports: reExports, + SourceIndex: result.sourceIndex, + Ref: result.ref, + } + + c.graph.Symbols.Get(importRef).NamespaceAlias = &js_ast.NamespaceAlias{ + NamespaceRef: result.namespaceRef, + Alias: result.alias, + } + + case matchImportCycle: + namedImport := repr.AST.NamedImports[importRef] + c.log.Add(logger.Error, file.LineColumnTracker(), js_lexer.RangeOfIdentifier(file.InputFile.Source, namedImport.AliasLoc), + fmt.Sprintf("Detected cycle while resolving import %q", namedImport.Alias)) + + case matchImportProbablyTypeScriptType: + repr.Meta.IsProbablyTypeScriptType[importRef] = true + + case matchImportAmbiguous: + namedImport := repr.AST.NamedImports[importRef] + r := js_lexer.RangeOfIdentifier(file.InputFile.Source, namedImport.AliasLoc) + var notes []logger.MsgData + + // Provide the locations of both ambiguous exports if possible + if result.nameLoc.Start != 0 && result.otherNameLoc.Start != 0 { + a := c.graph.Files[result.sourceIndex] + b := c.graph.Files[result.otherSourceIndex] + ra := js_lexer.RangeOfIdentifier(a.InputFile.Source, result.nameLoc) + rb := js_lexer.RangeOfIdentifier(b.InputFile.Source, result.otherNameLoc) + notes = []logger.MsgData{ + a.LineColumnTracker().MsgData(ra, "One matching export is here:"), + b.LineColumnTracker().MsgData(rb, "Another matching export is here:"), + } + } + + symbol := c.graph.Symbols.Get(importRef) + if symbol.ImportItemStatus == js_ast.ImportItemGenerated { + // This is a warning instead of an error because although it appears + // to be a named import, it's actually an automatically-generated + // named import that was originally a property access on an import + // star namespace object. Normally this property access would just + // resolve to undefined at run-time instead of failing at binding- + // time, so we emit a warning and rewrite the value to the literal + // "undefined" instead of emitting an error. + symbol.ImportItemStatus = js_ast.ImportItemMissing + msg := fmt.Sprintf("Import %q will always be undefined because there are multiple matching exports", namedImport.Alias) + c.log.AddWithNotes(logger.Warning, file.LineColumnTracker(), r, msg, notes) + } else { + msg := fmt.Sprintf("Ambiguous import %q has multiple matching exports", namedImport.Alias) + c.log.AddWithNotes(logger.Error, file.LineColumnTracker(), r, msg, notes) + } + } + } +} + +type matchImportKind uint8 + +const ( + // The import is either external or undefined + matchImportIgnore matchImportKind = iota + + // "sourceIndex" and "ref" are in use + matchImportNormal + + // "namespaceRef" and "alias" are in use + matchImportNamespace + + // Both "matchImportNormal" and "matchImportNamespace" + matchImportNormalAndNamespace + + // The import could not be evaluated due to a cycle + matchImportCycle + + // The import is missing but came from a TypeScript file + matchImportProbablyTypeScriptType + + // The import resolved to multiple symbols via "export * from" + matchImportAmbiguous +) + +type matchImportResult struct { + kind matchImportKind + namespaceRef js_ast.Ref + alias string + sourceIndex uint32 + nameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero + otherSourceIndex uint32 + otherNameLoc logger.Loc // Optional, goes with otherSourceIndex, ignore if zero + ref js_ast.Ref +} + +func (c *linkerContext) matchImportWithExport( + tracker importTracker, reExportsIn []js_ast.Dependency, +) (result matchImportResult, reExports []js_ast.Dependency) { + var ambiguousResults []matchImportResult + reExports = reExportsIn + +loop: + for { + // Make sure we avoid infinite loops trying to resolve cycles: + // + // // foo.js + // export {a as b} from './foo.js' + // export {b as c} from './foo.js' + // export {c as a} from './foo.js' + // + // This uses a O(n^2) array scan instead of a O(n) map because the vast + // majority of cases have one or two elements and Go arrays are cheap to + // reuse without allocating. + for _, previousTracker := range c.cycleDetector { + if tracker == previousTracker { + result = matchImportResult{kind: matchImportCycle} + break loop + } + } + c.cycleDetector = append(c.cycleDetector, tracker) + + // Resolve the import by one step + nextTracker, status, potentiallyAmbiguousExportStarRefs := c.advanceImportTracker(tracker) + switch status { + case importCommonJS, importCommonJSWithoutExports, importExternal, importDisabled: + if status == importExternal && c.options.OutputFormat.KeepES6ImportExportSyntax() { + // Imports from external modules should not be converted to CommonJS + // if the output format preserves the original ES6 import statements + break + } + + // If it's a CommonJS or external file, rewrite the import to a + // property access. Don't do this if the namespace reference is invalid + // though. This is the case for star imports, where the import is the + // namespace. + trackerFile := &c.graph.Files[tracker.sourceIndex] + namedImport := trackerFile.InputFile.Repr.(*graph.JSRepr).AST.NamedImports[tracker.importRef] + if namedImport.NamespaceRef != js_ast.InvalidRef { + if result.kind == matchImportNormal { + result.kind = matchImportNormalAndNamespace + result.namespaceRef = namedImport.NamespaceRef + result.alias = namedImport.Alias + } else { + result = matchImportResult{ + kind: matchImportNamespace, + namespaceRef: namedImport.NamespaceRef, + alias: namedImport.Alias, + } + } + } + + // Warn about importing from a file that is known to not have any exports + if status == importCommonJSWithoutExports { + symbol := c.graph.Symbols.Get(tracker.importRef) + symbol.ImportItemStatus = js_ast.ImportItemMissing + c.log.Add(logger.Warning, + trackerFile.LineColumnTracker(), + js_lexer.RangeOfIdentifier(trackerFile.InputFile.Source, namedImport.AliasLoc), + fmt.Sprintf("Import %q will always be undefined because the file %q has no exports", + namedImport.Alias, c.graph.Files[nextTracker.sourceIndex].InputFile.Source.PrettyPath)) + } + + case importDynamicFallback: + // If it's a file with dynamic export fallback, rewrite the import to a property access + trackerFile := &c.graph.Files[tracker.sourceIndex] + namedImport := trackerFile.InputFile.Repr.(*graph.JSRepr).AST.NamedImports[tracker.importRef] + if result.kind == matchImportNormal { + result.kind = matchImportNormalAndNamespace + result.namespaceRef = nextTracker.importRef + result.alias = namedImport.Alias + } else { + result = matchImportResult{ + kind: matchImportNamespace, + namespaceRef: nextTracker.importRef, + alias: namedImport.Alias, + } + } + + case importNoMatch: + symbol := c.graph.Symbols.Get(tracker.importRef) + trackerFile := &c.graph.Files[tracker.sourceIndex] + namedImport := trackerFile.InputFile.Repr.(*graph.JSRepr).AST.NamedImports[tracker.importRef] + r := js_lexer.RangeOfIdentifier(trackerFile.InputFile.Source, namedImport.AliasLoc) + + // Report mismatched imports and exports + if symbol.ImportItemStatus == js_ast.ImportItemGenerated { + // This is a warning instead of an error because although it appears + // to be a named import, it's actually an automatically-generated + // named import that was originally a property access on an import + // star namespace object. Normally this property access would just + // resolve to undefined at run-time instead of failing at binding- + // time, so we emit a warning and rewrite the value to the literal + // "undefined" instead of emitting an error. + symbol.ImportItemStatus = js_ast.ImportItemMissing + c.log.Add(logger.Warning, trackerFile.LineColumnTracker(), r, fmt.Sprintf( + "Import %q will always be undefined because there is no matching export in %q", + namedImport.Alias, c.graph.Files[nextTracker.sourceIndex].InputFile.Source.PrettyPath)) + } else { + c.log.Add(logger.Error, trackerFile.LineColumnTracker(), r, fmt.Sprintf("No matching export in %q for import %q", + c.graph.Files[nextTracker.sourceIndex].InputFile.Source.PrettyPath, namedImport.Alias)) + } + + case importProbablyTypeScriptType: + // Omit this import from any namespace export code we generate for + // import star statements (i.e. "import * as ns from 'path'") + result = matchImportResult{kind: matchImportProbablyTypeScriptType} + + case importFound: + // If there are multiple ambiguous results due to use of "export * from" + // statements, trace them all to see if they point to different things. + for _, ambiguousTracker := range potentiallyAmbiguousExportStarRefs { + // If this is a re-export of another import, follow the import + if _, ok := c.graph.Files[ambiguousTracker.SourceIndex].InputFile.Repr.(*graph.JSRepr).AST.NamedImports[ambiguousTracker.Ref]; ok { + // Save and restore the cycle detector to avoid mixing information + oldCycleDetector := c.cycleDetector + ambiguousResult, newReExportFiles := c.matchImportWithExport(importTracker{ + sourceIndex: ambiguousTracker.SourceIndex, + importRef: ambiguousTracker.Ref, + }, reExports) + c.cycleDetector = oldCycleDetector + ambiguousResults = append(ambiguousResults, ambiguousResult) + reExports = newReExportFiles + } else { + ambiguousResults = append(ambiguousResults, matchImportResult{ + kind: matchImportNormal, + sourceIndex: ambiguousTracker.SourceIndex, + ref: ambiguousTracker.Ref, + nameLoc: ambiguousTracker.NameLoc, + }) + } + } + + // Defer the actual binding of this import until after we generate + // namespace export code for all files. This has to be done for all + // import-to-export matches, not just the initial import to the final + // export, since all imports and re-exports must be merged together + // for correctness. + result = matchImportResult{ + kind: matchImportNormal, + sourceIndex: nextTracker.sourceIndex, + ref: nextTracker.importRef, + nameLoc: nextTracker.nameLoc, + } + + // Depend on the statement(s) that declared this import symbol in the + // original file + for _, resolvedPartIndex := range c.graph.Files[tracker.sourceIndex].InputFile.Repr.(*graph.JSRepr).TopLevelSymbolToParts(tracker.importRef) { + reExports = append(reExports, js_ast.Dependency{ + SourceIndex: tracker.sourceIndex, + PartIndex: resolvedPartIndex, + }) + } + + // If this is a re-export of another import, continue for another + // iteration of the loop to resolve that import as well + if _, ok := c.graph.Files[nextTracker.sourceIndex].InputFile.Repr.(*graph.JSRepr).AST.NamedImports[nextTracker.importRef]; ok { + tracker = nextTracker + continue + } + + default: + panic("Internal error") + } + + // Stop now if we didn't explicitly "continue" above + break + } + + // If there is a potential ambiguity, all results must be the same + for _, ambiguousResult := range ambiguousResults { + if ambiguousResult != result { + if result.kind == matchImportNormal && ambiguousResult.kind == matchImportNormal && + result.nameLoc.Start != 0 && ambiguousResult.nameLoc.Start != 0 { + return matchImportResult{ + kind: matchImportAmbiguous, + sourceIndex: result.sourceIndex, + nameLoc: result.nameLoc, + otherSourceIndex: ambiguousResult.sourceIndex, + otherNameLoc: ambiguousResult.nameLoc, + }, nil + } + return matchImportResult{kind: matchImportAmbiguous}, nil + } + } + + return +} + +func (c *linkerContext) recursivelyWrapDependencies(sourceIndex uint32) { + repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) + if repr.Meta.DidWrapDependencies { + return + } + repr.Meta.DidWrapDependencies = true + + // Never wrap the runtime file since it always comes first + if sourceIndex == runtime.SourceIndex { + return + } + + // This module must be wrapped + if repr.Meta.Wrap == graph.WrapNone { + if repr.AST.ExportsKind == js_ast.ExportsCommonJS { + repr.Meta.Wrap = graph.WrapCJS + } else { + repr.Meta.Wrap = graph.WrapESM + } + } + + // All dependencies must also be wrapped + for _, record := range repr.AST.ImportRecords { + if record.SourceIndex.IsValid() { + c.recursivelyWrapDependencies(record.SourceIndex.GetIndex()) + } + } +} + +func (c *linkerContext) hasDynamicExportsDueToExportStar(sourceIndex uint32, visited map[uint32]bool) bool { + // Terminate the traversal now if this file already has dynamic exports + repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) + if repr.AST.ExportsKind == js_ast.ExportsCommonJS || repr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { + return true + } + + // Avoid infinite loops due to cycles in the export star graph + if visited[sourceIndex] { + return false + } + visited[sourceIndex] = true + + // Scan over the export star graph + for _, importRecordIndex := range repr.AST.ExportStarImportRecords { + record := &repr.AST.ImportRecords[importRecordIndex] + + // This file has dynamic exports if the exported imports are from a file + // that either has dynamic exports directly or transitively by itself + // having an export star from a file with dynamic exports. + if (!record.SourceIndex.IsValid() && (!c.graph.Files[sourceIndex].IsEntryPoint() || !c.options.OutputFormat.KeepES6ImportExportSyntax())) || + (record.SourceIndex.IsValid() && record.SourceIndex.GetIndex() != sourceIndex && c.hasDynamicExportsDueToExportStar(record.SourceIndex.GetIndex(), visited)) { + repr.AST.ExportsKind = js_ast.ExportsESMWithDynamicFallback + return true + } + } + + return false +} + +func (c *linkerContext) addExportsForExportStar( + resolvedExports map[string]graph.ExportData, + sourceIndex uint32, + sourceIndexStack []uint32, +) { + // Avoid infinite loops due to cycles in the export star graph + for _, prevSourceIndex := range sourceIndexStack { + if prevSourceIndex == sourceIndex { + return + } + } + sourceIndexStack = append(sourceIndexStack, sourceIndex) + repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) + + for _, importRecordIndex := range repr.AST.ExportStarImportRecords { + record := &repr.AST.ImportRecords[importRecordIndex] + if !record.SourceIndex.IsValid() { + // This will be resolved at run time instead + continue + } + otherSourceIndex := record.SourceIndex.GetIndex() + + // Export stars from a CommonJS module don't work because they can't be + // statically discovered. Just silently ignore them in this case. + // + // We could attempt to check whether the imported file still has ES6 + // exports even though it still uses CommonJS features. However, when + // doing this we'd also have to rewrite any imports of these export star + // re-exports as property accesses off of a generated require() call. + otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) + if otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { + // All exports will be resolved at run time instead + continue + } + + // Accumulate this file's exports + nextExport: + for alias, name := range otherRepr.AST.NamedExports { + // ES6 export star statements ignore exports named "default" + if alias == "default" { + continue + } + + // This export star is shadowed if any file in the stack has a matching real named export + for _, prevSourceIndex := range sourceIndexStack { + prevRepr := c.graph.Files[prevSourceIndex].InputFile.Repr.(*graph.JSRepr) + if _, ok := prevRepr.AST.NamedExports[alias]; ok { + continue nextExport + } + } + + if existing, ok := resolvedExports[alias]; !ok { + // Initialize the re-export + resolvedExports[alias] = graph.ExportData{ + Ref: name.Ref, + SourceIndex: otherSourceIndex, + NameLoc: name.AliasLoc, + } + + // Make sure the symbol is marked as imported so that code splitting + // imports it correctly if it ends up being shared with another chunk + repr.Meta.ImportsToBind[name.Ref] = graph.ImportData{ + Ref: name.Ref, + SourceIndex: otherSourceIndex, + } + } else if existing.SourceIndex != otherSourceIndex { + // Two different re-exports colliding makes it potentially ambiguous + existing.PotentiallyAmbiguousExportStarRefs = + append(existing.PotentiallyAmbiguousExportStarRefs, graph.ImportData{ + SourceIndex: otherSourceIndex, + Ref: name.Ref, + NameLoc: name.AliasLoc, + }) + resolvedExports[alias] = existing + } + } + + // Search further through this file's export stars + c.addExportsForExportStar(resolvedExports, otherSourceIndex, sourceIndexStack) + } +} + +type importTracker struct { + sourceIndex uint32 + nameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero + importRef js_ast.Ref +} + +type importStatus uint8 + +const ( + // The imported file has no matching export + importNoMatch importStatus = iota + + // The imported file has a matching export + importFound + + // The imported file is CommonJS and has unknown exports + importCommonJS + + // The import is missing but there is a dynamic fallback object + importDynamicFallback + + // The import was treated as a CommonJS import but the file is known to have no exports + importCommonJSWithoutExports + + // The imported file was disabled by mapping it to false in the "browser" + // field of package.json + importDisabled + + // The imported file is external and has unknown exports + importExternal + + // This is a missing re-export in a TypeScript file, so it's probably a type + importProbablyTypeScriptType +) + +func (c *linkerContext) advanceImportTracker(tracker importTracker) (importTracker, importStatus, []graph.ImportData) { + file := &c.graph.Files[tracker.sourceIndex] + repr := file.InputFile.Repr.(*graph.JSRepr) + namedImport := repr.AST.NamedImports[tracker.importRef] + + // Is this an external file? + record := &repr.AST.ImportRecords[namedImport.ImportRecordIndex] + if !record.SourceIndex.IsValid() { + return importTracker{}, importExternal, nil + } + + // Is this a disabled file? + otherSourceIndex := record.SourceIndex.GetIndex() + if c.graph.Files[otherSourceIndex].InputFile.Source.KeyPath.IsDisabled() { + return importTracker{sourceIndex: otherSourceIndex, importRef: js_ast.InvalidRef}, importDisabled, nil + } + + // Is this a named import of a file without any exports? + otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) + if !namedImport.AliasIsStar && !otherRepr.AST.HasLazyExport && + // CommonJS exports + otherRepr.AST.ExportKeyword.Len == 0 && namedImport.Alias != "default" && + // ESM exports + !otherRepr.AST.UsesExportsRef && !otherRepr.AST.UsesModuleRef { + // Just warn about it and replace the import with "undefined" + return importTracker{sourceIndex: otherSourceIndex, importRef: js_ast.InvalidRef}, importCommonJSWithoutExports, nil + } + + // Is this a CommonJS file? + if otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { + return importTracker{sourceIndex: otherSourceIndex, importRef: js_ast.InvalidRef}, importCommonJS, nil + } + + // Match this import star with an export star from the imported file + if matchingExport := otherRepr.Meta.ResolvedExportStar; namedImport.AliasIsStar && matchingExport != nil { + // Check to see if this is a re-export of another import + return importTracker{ + sourceIndex: matchingExport.SourceIndex, + importRef: matchingExport.Ref, + nameLoc: matchingExport.NameLoc, + }, importFound, matchingExport.PotentiallyAmbiguousExportStarRefs + } + + // Match this import up with an export from the imported file + if matchingExport, ok := otherRepr.Meta.ResolvedExports[namedImport.Alias]; ok { + // Check to see if this is a re-export of another import + return importTracker{ + sourceIndex: matchingExport.SourceIndex, + importRef: matchingExport.Ref, + nameLoc: matchingExport.NameLoc, + }, importFound, matchingExport.PotentiallyAmbiguousExportStarRefs + } + + // Is this a file with dynamic exports? + if otherRepr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { + return importTracker{sourceIndex: otherSourceIndex, importRef: otherRepr.AST.ExportsRef}, importDynamicFallback, nil + } + + // Missing re-exports in TypeScript files are indistinguishable from types + if file.InputFile.Loader.IsTypeScript() && namedImport.IsExported { + return importTracker{}, importProbablyTypeScriptType, nil + } + + return importTracker{sourceIndex: otherSourceIndex}, importNoMatch, nil +} + +func (c *linkerContext) treeShakingAndCodeSplitting() { + // Tree shaking: Each entry point marks all files reachable from itself + c.timer.Begin("Tree shaking") + for _, entryPoint := range c.graph.EntryPoints() { + c.markFileLiveForTreeShaking(entryPoint.SourceIndex) + } + c.timer.End("Tree shaking") + + // Code splitting: Determine which entry points can reach which files. This + // has to happen after tree shaking because there is an implicit dependency + // between live parts within the same file. All liveness has to be computed + // first before determining which entry points can reach which files. + c.timer.Begin("Code splitting") + for i, entryPoint := range c.graph.EntryPoints() { + c.markFileReachableForCodeSplitting(entryPoint.SourceIndex, uint(i), 0) + } + c.timer.End("Code splitting") +} + +func (c *linkerContext) markFileReachableForCodeSplitting(sourceIndex uint32, entryPointBit uint, distanceFromEntryPoint uint32) { + file := &c.graph.Files[sourceIndex] + if !file.IsLive { + return + } + traverseAgain := false + + // Track the minimum distance to an entry point + if distanceFromEntryPoint < file.DistanceFromEntryPoint { + file.DistanceFromEntryPoint = distanceFromEntryPoint + traverseAgain = true + } + distanceFromEntryPoint++ + + // Don't mark this file more than once + if file.EntryBits.HasBit(entryPointBit) && !traverseAgain { + return + } + file.EntryBits.SetBit(entryPointBit) + + switch repr := file.InputFile.Repr.(type) { + case *graph.JSRepr: + // If the JavaScript stub for a CSS file is included, also include the CSS file + if repr.CSSSourceIndex.IsValid() { + c.markFileReachableForCodeSplitting(repr.CSSSourceIndex.GetIndex(), entryPointBit, distanceFromEntryPoint) + } + + // Traverse into all imported files + for _, record := range repr.AST.ImportRecords { + if record.SourceIndex.IsValid() && !c.isExternalDynamicImport(&record, sourceIndex) { + c.markFileReachableForCodeSplitting(record.SourceIndex.GetIndex(), entryPointBit, distanceFromEntryPoint) + } + } + + // Traverse into all dependencies of all parts in this file + for _, part := range repr.AST.Parts { + for _, dependency := range part.Dependencies { + if dependency.SourceIndex != sourceIndex { + c.markFileReachableForCodeSplitting(dependency.SourceIndex, entryPointBit, distanceFromEntryPoint) + } + } + } + + case *graph.CSSRepr: + // Traverse into all dependencies + for _, record := range repr.AST.ImportRecords { + if record.SourceIndex.IsValid() { + c.markFileReachableForCodeSplitting(record.SourceIndex.GetIndex(), entryPointBit, distanceFromEntryPoint) + } + } + } +} + +func (c *linkerContext) markFileLiveForTreeShaking(sourceIndex uint32) { + file := &c.graph.Files[sourceIndex] + + // Don't mark this file more than once + if file.IsLive { + return + } + file.IsLive = true + + switch repr := file.InputFile.Repr.(type) { + case *graph.JSRepr: + // If the JavaScript stub for a CSS file is included, also include the CSS file + if repr.CSSSourceIndex.IsValid() { + c.markFileLiveForTreeShaking(repr.CSSSourceIndex.GetIndex()) + } + + for partIndex, part := range repr.AST.Parts { + canBeRemovedIfUnused := part.CanBeRemovedIfUnused + + // Also include any statement-level imports + for _, importRecordIndex := range part.ImportRecordIndices { + record := &repr.AST.ImportRecords[importRecordIndex] + if record.Kind != ast.ImportStmt { + continue + } + + if record.SourceIndex.IsValid() { + otherSourceIndex := record.SourceIndex.GetIndex() + + // Don't include this module for its side effects if it can be + // considered to have no side effects + if otherFile := &c.graph.Files[otherSourceIndex]; otherFile.InputFile.SideEffects.Kind != graph.HasSideEffects && !c.options.IgnoreDCEAnnotations { + continue + } + + // Otherwise, include this module for its side effects + c.markFileLiveForTreeShaking(otherSourceIndex) + } + + // If we get here then the import was included for its side effects, so + // we must also keep this part + canBeRemovedIfUnused = false + } + + // Include all parts in this file with side effects, or just include + // everything if tree-shaking is disabled. Note that we still want to + // perform tree-shaking on the runtime even if tree-shaking is disabled. + if !canBeRemovedIfUnused || (!part.ForceTreeShaking && !c.options.TreeShaking && file.IsEntryPoint()) { + c.markPartLiveForTreeShaking(sourceIndex, uint32(partIndex)) + } + } + + case *graph.CSSRepr: + // Include all "@import" rules + for _, record := range repr.AST.ImportRecords { + if record.SourceIndex.IsValid() { + c.markFileLiveForTreeShaking(record.SourceIndex.GetIndex()) + } + } + } +} + +func (c *linkerContext) isExternalDynamicImport(record *ast.ImportRecord, sourceIndex uint32) bool { + return record.Kind == ast.ImportDynamic && c.graph.Files[record.SourceIndex.GetIndex()].IsEntryPoint() && record.SourceIndex.GetIndex() != sourceIndex +} + +func (c *linkerContext) markPartLiveForTreeShaking(sourceIndex uint32, partIndex uint32) { + file := &c.graph.Files[sourceIndex] + repr := file.InputFile.Repr.(*graph.JSRepr) + part := &repr.AST.Parts[partIndex] + + // Don't mark this part more than once + if part.IsLive { + return + } + part.IsLive = true + + // Include the file containing this part + c.markFileLiveForTreeShaking(sourceIndex) + + // Also include any dependencies + for _, dep := range part.Dependencies { + c.markPartLiveForTreeShaking(dep.SourceIndex, dep.PartIndex) + } +} + +func sanitizeFilePathForVirtualModulePath(path string) string { + // Convert it to a safe file path. See: https://stackoverflow.com/a/31976060 + sb := strings.Builder{} + needsGap := false + for _, c := range path { + switch c { + case 0: + // These characters are forbidden on Unix and Windows + + case '<', '>', ':', '"', '|', '?', '*': + // These characters are forbidden on Windows + + default: + if c < 0x20 { + // These characters are forbidden on Windows + break + } + + // Turn runs of invalid characters into a '_' + if needsGap { + sb.WriteByte('_') + needsGap = false + } + + sb.WriteRune(c) + continue + } + + if sb.Len() > 0 { + needsGap = true + } + } + + // Make sure the name isn't empty + if sb.Len() == 0 { + return "_" + } + + // Note: An extension will be added to this base name, so there is no need to + // avoid forbidden file names such as ".." since ".js" is a valid file name. + return sb.String() +} + +// JavaScript modules are traversed in depth-first postorder. This is the +// order that JavaScript modules were evaluated in before the top-level await +// feature was introduced. +// +// A +// / \ +// B C +// \ / +// D +// +// If A imports B and then C, B imports D, and C imports D, then the JavaScript +// traversal order is D B C A. +// +// This function may deviate from ESM import order for dynamic imports (both +// "require()" and "import()"). This is because the import order is impossible +// to determine since the imports happen at run-time instead of compile-time. +// In this case we just pick an arbitrary but consistent order. +func (c *linkerContext) findImportedCSSFilesInJSOrder(entryPoint uint32) (order []uint32) { + visited := make(map[uint32]bool) + var visit func(uint32, ast.Index32) + + // Include this file and all files it imports + visit = func(sourceIndex uint32, importerIndex ast.Index32) { + if visited[sourceIndex] { + return + } + visited[sourceIndex] = true + file := &c.graph.Files[sourceIndex] + repr := file.InputFile.Repr.(*graph.JSRepr) + + // Iterate over each part in the file in order + for _, part := range repr.AST.Parts { + // Ignore dead code that has been removed from the bundle. Any code + // that's reachable from the entry point, even through lazy dynamic + // imports, could end up being activated by the bundle and needs its + // CSS to be included. This may change if/when code splitting is + // supported for CSS. + if !part.IsLive { + continue + } + + // Traverse any files imported by this part. Note that CommonJS calls + // to "require()" count as imports too, sort of as if the part has an + // ESM "import" statement in it. This may seem weird because ESM imports + // are a compile-time concept while CommonJS imports are a run-time + // concept. But we don't want to manipulate