package bundler import ( "bytes" "encoding/base64" "encoding/binary" "fmt" "hash" "path" "sort" "strings" "sync" "github.com/evanw/esbuild/internal/ast" "github.com/evanw/esbuild/internal/compat" "github.com/evanw/esbuild/internal/config" "github.com/evanw/esbuild/internal/css_ast" "github.com/evanw/esbuild/internal/css_printer" "github.com/evanw/esbuild/internal/fs" "github.com/evanw/esbuild/internal/graph" "github.com/evanw/esbuild/internal/helpers" "github.com/evanw/esbuild/internal/js_ast" "github.com/evanw/esbuild/internal/js_lexer" "github.com/evanw/esbuild/internal/js_printer" "github.com/evanw/esbuild/internal/logger" "github.com/evanw/esbuild/internal/renamer" "github.com/evanw/esbuild/internal/resolver" "github.com/evanw/esbuild/internal/runtime" "github.com/evanw/esbuild/internal/sourcemap" "github.com/evanw/esbuild/internal/xxhash" ) type linkerContext struct { options *config.Options timer *helpers.Timer log logger.Log fs fs.FS res resolver.Resolver graph graph.LinkerGraph // This helps avoid an infinite loop when matching imports to exports cycleDetector []importTracker // We may need to refer to the CommonJS "module" symbol for exports unboundModuleRef js_ast.Ref // We may need to refer to the "__esm" and/or "__commonJS" runtime symbols cjsRuntimeRef js_ast.Ref esmRuntimeRef js_ast.Ref // This represents the parallel computation of source map related data. // Calling this will block until the computation is done. The resulting value // is shared between threads and must be treated as immutable. dataForSourceMaps func() []dataForSourceMap // This is passed to us from the bundling phase uniqueKeyPrefix string uniqueKeyPrefixBytes []byte // This is just "uniqueKeyPrefix" in byte form } type partRange struct { sourceIndex uint32 partIndexBegin uint32 partIndexEnd uint32 } type chunkInfo struct { // This is a random string and is used to represent the output path of this // chunk before the final output path has been computed. uniqueKey string filesWithPartsInChunk map[uint32]bool entryBits helpers.BitSet // This information is only useful if "isEntryPoint" is true isEntryPoint bool sourceIndex uint32 // An index into "c.sources" entryPointBit uint // An index into "c.graph.EntryPoints" // For code splitting crossChunkImports []chunkImport // This is the representation-specific information chunkRepr chunkRepr // This is the final path of this chunk relative to the output directory, but // without the substitution of the final hash (since it hasn't been computed). finalTemplate []config.PathTemplate // This is the final path of this chunk relative to the output directory. It // is the substitution of the final hash into "finalTemplate". finalRelPath string // If non-empty, this chunk needs to generate an external legal comments file. externalLegalComments []byte // When this chunk is initially generated in isolation, the output pieces // will contain slices of the output with the unique keys of other chunks // omitted. intermediateOutput intermediateOutput // This contains the hash for just this chunk without including information // from the hashes of other chunks. Later on in the linking process, the // final hash for this chunk will be constructed by merging the isolated // hashes of all transitive dependencies of this chunk. This is separated // into two phases like this to handle cycles in the chunk import graph. waitForIsolatedHash func() []byte // Other fields relating to the output file for this chunk jsonMetadataChunkCallback func(finalOutputSize int) helpers.Joiner outputSourceMap sourcemap.SourceMapPieces isExecutable bool } type chunkImport struct { chunkIndex uint32 importKind ast.ImportKind } type outputPieceIndexKind uint8 const ( outputPieceNone outputPieceIndexKind = iota outputPieceAssetIndex outputPieceChunkIndex ) // This is a chunk of source code followed by a reference to another chunk. For // example, the file "@import 'CHUNK0001'; body { color: black; }" would be // represented by two pieces, one with the data "@import '" and another with the // data "'; body { color: black; }". The first would have the chunk index 1 and // the second would have an invalid chunk index. type outputPiece struct { data []byte // Note: The "kind" may be "outputPieceNone" in which case there is one piece // with data and no chunk index. For example, the chunk may not contain any // imports. index uint32 kind outputPieceIndexKind } type intermediateOutput struct { // If the chunk doesn't have any references to other chunks, then "pieces" is // nil and "joiner" contains the contents of the chunk. This is more efficient // because it avoids doing a join operation twice. joiner helpers.Joiner // Otherwise, "pieces" contains the contents of the chunk and "joiner" should // not be used. Another joiner will have to be constructed later when merging // the pieces together. pieces []outputPiece } type chunkRepr interface{ isChunk() } func (*chunkReprJS) isChunk() {} func (*chunkReprCSS) isChunk() {} type chunkReprJS struct { filesInChunkInOrder []uint32 partsInChunkInOrder []partRange // For code splitting crossChunkPrefixStmts []js_ast.Stmt crossChunkSuffixStmts []js_ast.Stmt exportsToOtherChunks map[js_ast.Ref]string importsFromOtherChunks map[uint32]crossChunkImportItemArray } type chunkReprCSS struct { externalImportsInOrder []externalImportCSS filesInChunkInOrder []uint32 } type externalImportCSS struct { path logger.Path conditions []css_ast.Token conditionImportRecords []ast.ImportRecord } // Returns a log where "log.HasErrors()" only returns true if any errors have // been logged since this call. This is useful when there have already been // errors logged by other linkers that share the same log. func wrappedLog(log logger.Log) logger.Log { var mutex sync.Mutex var hasErrors bool addMsg := log.AddMsg log.AddMsg = func(msg logger.Msg) { if msg.Kind == logger.Error { mutex.Lock() defer mutex.Unlock() hasErrors = true } addMsg(msg) } log.HasErrors = func() bool { mutex.Lock() defer mutex.Unlock() return hasErrors } return log } func link( options *config.Options, timer *helpers.Timer, log logger.Log, fs fs.FS, res resolver.Resolver, inputFiles []graph.InputFile, entryPoints []graph.EntryPoint, uniqueKeyPrefix string, reachableFiles []uint32, dataForSourceMaps func() []dataForSourceMap, ) []graph.OutputFile { timer.Begin("Link") defer timer.End("Link") log = wrappedLog(log) timer.Begin("Clone linker graph") c := linkerContext{ options: options, timer: timer, log: log, fs: fs, res: res, dataForSourceMaps: dataForSourceMaps, uniqueKeyPrefix: uniqueKeyPrefix, uniqueKeyPrefixBytes: []byte(uniqueKeyPrefix), graph: graph.CloneLinkerGraph( inputFiles, reachableFiles, entryPoints, options.CodeSplitting, ), } timer.End("Clone linker graph") // Use a smaller version of these functions if we don't need profiler names runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) if c.options.ProfilerNames { c.cjsRuntimeRef = runtimeRepr.AST.NamedExports["__commonJS"].Ref c.esmRuntimeRef = runtimeRepr.AST.NamedExports["__esm"].Ref } else { c.cjsRuntimeRef = runtimeRepr.AST.NamedExports["__commonJSMin"].Ref c.esmRuntimeRef = runtimeRepr.AST.NamedExports["__esmMin"].Ref } for _, entryPoint := range entryPoints { if repr, ok := c.graph.Files[entryPoint.SourceIndex].InputFile.Repr.(*graph.JSRepr); ok { // Loaders default to CommonJS when they are the entry point and the output // format is not ESM-compatible since that avoids generating the ESM-to-CJS // machinery. if repr.AST.HasLazyExport && (c.options.Mode == config.ModePassThrough || (c.options.Mode == config.ModeConvertFormat && !c.options.OutputFormat.KeepES6ImportExportSyntax())) { repr.AST.ExportsKind = js_ast.ExportsCommonJS } // Entry points with ES6 exports must generate an exports object when // targeting non-ES6 formats. Note that the IIFE format only needs this // when the global name is present, since that's the only way the exports // can actually be observed externally. if repr.AST.ExportKeyword.Len > 0 && (options.OutputFormat == config.FormatCommonJS || (options.OutputFormat == config.FormatIIFE && len(options.GlobalName) > 0)) { repr.AST.UsesExportsRef = true repr.Meta.ForceIncludeExportsForEntryPoint = true } } } // Allocate a new unbound symbol called "module" in case we need it later if c.options.OutputFormat == config.FormatCommonJS { c.unboundModuleRef = c.graph.GenerateNewSymbol(runtime.SourceIndex, js_ast.SymbolUnbound, "module") } else { c.unboundModuleRef = js_ast.InvalidRef } c.scanImportsAndExports() // Stop now if there were errors if c.log.HasErrors() { return []graph.OutputFile{} } c.treeShakingAndCodeSplitting() if c.options.Mode == config.ModePassThrough { for _, entryPoint := range c.graph.EntryPoints() { c.preventExportsFromBeingRenamed(entryPoint.SourceIndex) } } chunks := c.computeChunks() c.computeCrossChunkDependencies(chunks) // Make sure calls to "js_ast.FollowSymbols()" in parallel goroutines after this // won't hit concurrent map mutation hazards js_ast.FollowAllSymbols(c.graph.Symbols) return c.generateChunksInParallel(chunks) } // Currently the automatic chunk generation algorithm should by construction // never generate chunks that import each other since files are allocated to // chunks based on which entry points they are reachable from. // // This will change in the future when we allow manual chunk labels. But before // we allow manual chunk labels, we'll need to rework module initialization to // allow code splitting chunks to be lazily-initialized. // // Since that work hasn't been finished yet, cycles in the chunk import graph // can cause initialization bugs. So let's forbid these cycles for now to guard // against code splitting bugs that could cause us to generate buggy chunks. func (c *linkerContext) enforceNoCyclicChunkImports(chunks []chunkInfo) { var validate func(int, []int) validate = func(chunkIndex int, path []int) { for _, otherChunkIndex := range path { if chunkIndex == otherChunkIndex { c.log.Add(logger.Error, nil, logger.Range{}, "Internal error: generated chunks contain a circular import") return } } path = append(path, chunkIndex) for _, chunkImport := range chunks[chunkIndex].crossChunkImports { // Ignore cycles caused by dynamic "import()" expressions. These are fine // because they don't necessarily cause initialization order issues and // they don't indicate a bug in our chunk generation algorithm. They arise // normally in real code (e.g. two files that import each other). if chunkImport.importKind != ast.ImportDynamic { validate(int(chunkImport.chunkIndex), path) } } } path := make([]int, 0, len(chunks)) for i := range chunks { validate(i, path) } } func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo) []graph.OutputFile { c.timer.Begin("Generate chunks") defer c.timer.End("Generate chunks") // Generate each chunk on a separate goroutine generateWaitGroup := sync.WaitGroup{} generateWaitGroup.Add(len(chunks)) for chunkIndex := range chunks { switch chunks[chunkIndex].chunkRepr.(type) { case *chunkReprJS: go c.generateChunkJS(chunks, chunkIndex, &generateWaitGroup) case *chunkReprCSS: go c.generateChunkCSS(chunks, chunkIndex, &generateWaitGroup) } } c.enforceNoCyclicChunkImports(chunks) generateWaitGroup.Wait() // Compute the final hashes of each chunk. This can technically be done in // parallel but it probably doesn't matter so much because we're not hashing // that much data. visited := make([]uint32, len(chunks)) var finalBytes []byte for chunkIndex := range chunks { chunk := &chunks[chunkIndex] var hashSubstitution *string // Only wait for the hash if necessary if config.HasPlaceholder(chunk.finalTemplate, config.HashPlaceholder) { // Compute the final hash using the isolated hashes of the dependencies hash := xxhash.New() appendIsolatedHashesForImportedChunks(hash, chunks, uint32(chunkIndex), visited, ^uint32(chunkIndex)) finalBytes = hash.Sum(finalBytes[:0]) finalString := hashForFileName(finalBytes) hashSubstitution = &finalString } // Render the last remaining placeholder in the template chunk.finalRelPath = config.TemplateToString(config.SubstituteTemplate(chunk.finalTemplate, config.PathPlaceholders{ Hash: hashSubstitution, })) } // Generate the final output files by joining file pieces together c.timer.Begin("Generate final output files") var resultsWaitGroup sync.WaitGroup results := make([][]graph.OutputFile, len(chunks)) resultsWaitGroup.Add(len(chunks)) for chunkIndex, chunk := range chunks { go func(chunkIndex int, chunk chunkInfo) { var outputFiles []graph.OutputFile // Each file may optionally contain additional files to be copied to the // output directory. This is used by the "file" loader. var commentPrefix string var commentSuffix string switch chunkRepr := chunk.chunkRepr.(type) { case *chunkReprJS: for _, sourceIndex := range chunkRepr.filesInChunkInOrder { outputFiles = append(outputFiles, c.graph.Files[sourceIndex].InputFile.AdditionalFiles...) } commentPrefix = "//" case *chunkReprCSS: for _, sourceIndex := range chunkRepr.filesInChunkInOrder { outputFiles = append(outputFiles, c.graph.Files[sourceIndex].InputFile.AdditionalFiles...) } commentPrefix = "/*" commentSuffix = " */" } // Path substitution for the chunk itself finalRelDir := c.fs.Dir(chunk.finalRelPath) outputContentsJoiner, outputSourceMapShifts := c.substituteFinalPaths(chunks, chunk.intermediateOutput, func(finalRelPathForImport string) string { return c.pathBetweenChunks(finalRelDir, finalRelPathForImport) }) // Generate the optional legal comments file for this chunk if chunk.externalLegalComments != nil { finalRelPathForLegalComments := chunk.finalRelPath + ".LEGAL.txt" // Link the file to the legal comments if c.options.LegalComments == config.LegalCommentsLinkedWithComment { importPath := c.pathBetweenChunks(finalRelDir, finalRelPathForLegalComments) importPath = strings.TrimPrefix(importPath, "./") outputContentsJoiner.EnsureNewlineAtEnd() outputContentsJoiner.AddString("/*! For license information please see ") outputContentsJoiner.AddString(importPath) outputContentsJoiner.AddString(" */\n") } // Write the external legal comments file outputFiles = append(outputFiles, graph.OutputFile{ AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForLegalComments), Contents: chunk.externalLegalComments, JSONMetadataChunk: fmt.Sprintf( "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(chunk.externalLegalComments)), }) } // Generate the optional source map for this chunk if c.options.SourceMap != config.SourceMapNone && chunk.outputSourceMap.HasContent() { outputSourceMap := chunk.outputSourceMap.Finalize(outputSourceMapShifts) finalRelPathForSourceMap := chunk.finalRelPath + ".map" // Potentially write a trailing source map comment switch c.options.SourceMap { case config.SourceMapLinkedWithComment: importPath := c.pathBetweenChunks(finalRelDir, finalRelPathForSourceMap) importPath = strings.TrimPrefix(importPath, "./") outputContentsJoiner.EnsureNewlineAtEnd() outputContentsJoiner.AddString(commentPrefix) outputContentsJoiner.AddString("# sourceMappingURL=") outputContentsJoiner.AddString(importPath) outputContentsJoiner.AddString(commentSuffix) outputContentsJoiner.AddString("\n") case config.SourceMapInline, config.SourceMapInlineAndExternal: outputContentsJoiner.EnsureNewlineAtEnd() outputContentsJoiner.AddString(commentPrefix) outputContentsJoiner.AddString("# sourceMappingURL=data:application/json;base64,") outputContentsJoiner.AddString(base64.StdEncoding.EncodeToString(outputSourceMap)) outputContentsJoiner.AddString(commentSuffix) outputContentsJoiner.AddString("\n") } // Potentially write the external source map file switch c.options.SourceMap { case config.SourceMapLinkedWithComment, config.SourceMapInlineAndExternal, config.SourceMapExternalWithoutComment: outputFiles = append(outputFiles, graph.OutputFile{ AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForSourceMap), Contents: outputSourceMap, JSONMetadataChunk: fmt.Sprintf( "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(outputSourceMap)), }) } } // Finalize the output contents outputContents := outputContentsJoiner.Done() // Path substitution for the JSON metadata var jsonMetadataChunk string if c.options.NeedsMetafile { jsonMetadataChunkPieces := c.breakOutputIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents)), uint32(len(chunks))) jsonMetadataChunkBytes, _ := c.substituteFinalPaths(chunks, jsonMetadataChunkPieces, func(finalRelPathForImport string) string { return c.res.PrettyPath(logger.Path{Text: c.fs.Join(c.options.AbsOutputDir, finalRelPathForImport), Namespace: "file"}) }) jsonMetadataChunk = string(jsonMetadataChunkBytes.Done()) } // Generate the output file for this chunk outputFiles = append(outputFiles, graph.OutputFile{ AbsPath: c.fs.Join(c.options.AbsOutputDir, chunk.finalRelPath), Contents: outputContents, JSONMetadataChunk: jsonMetadataChunk, IsExecutable: chunk.isExecutable, }) results[chunkIndex] = outputFiles resultsWaitGroup.Done() }(chunkIndex, chunk) } resultsWaitGroup.Wait() c.timer.End("Generate final output files") // Merge the output files from the different goroutines together in order outputFilesLen := 0 for _, result := range results { outputFilesLen += len(result) } outputFiles := make([]graph.OutputFile, 0, outputFilesLen) for _, result := range results { outputFiles = append(outputFiles, result...) } return outputFiles } // Given a set of output pieces (i.e. a buffer already divided into the spans // between import paths), substitute the final import paths in and then join // everything into a single byte buffer. func (c *linkerContext) substituteFinalPaths( chunks []chunkInfo, intermediateOutput intermediateOutput, modifyPath func(string) string, ) (j helpers.Joiner, shifts []sourcemap.SourceMapShift) { // Optimization: If there can be no substitutions, just reuse the initial // joiner that was used when generating the intermediate chunk output // instead of creating another one and copying the whole file into it. if intermediateOutput.pieces == nil { return intermediateOutput.joiner, []sourcemap.SourceMapShift{{}} } var shift sourcemap.SourceMapShift shifts = make([]sourcemap.SourceMapShift, 0, len(intermediateOutput.pieces)) shifts = append(shifts, shift) for _, piece := range intermediateOutput.pieces { var dataOffset sourcemap.LineColumnOffset j.AddBytes(piece.data) dataOffset.AdvanceBytes(piece.data) shift.Before.Add(dataOffset) shift.After.Add(dataOffset) switch piece.kind { case outputPieceAssetIndex: file := c.graph.Files[piece.index] if len(file.InputFile.AdditionalFiles) != 1 { panic("Internal error") } relPath, _ := c.fs.Rel(c.options.AbsOutputDir, file.InputFile.AdditionalFiles[0].AbsPath) // Make sure to always use forward slashes, even on Windows relPath = strings.ReplaceAll(relPath, "\\", "/") importPath := modifyPath(relPath) j.AddString(importPath) shift.Before.AdvanceString(file.InputFile.UniqueKeyForFileLoader) shift.After.AdvanceString(importPath) shifts = append(shifts, shift) case outputPieceChunkIndex: chunk := chunks[piece.index] importPath := modifyPath(chunk.finalRelPath) j.AddString(importPath) shift.Before.AdvanceString(chunk.uniqueKey) shift.After.AdvanceString(importPath) shifts = append(shifts, shift) } } return } func (c *linkerContext) pathBetweenChunks(fromRelDir string, toRelPath string) string { // Join with the public path if it has been configured if c.options.PublicPath != "" { return joinWithPublicPath(c.options.PublicPath, toRelPath) } // Otherwise, return a relative path relPath, ok := c.fs.Rel(fromRelDir, toRelPath) if !ok { c.log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Cannot traverse from directory %q to chunk %q", fromRelDir, toRelPath)) return "" } // Make sure to always use forward slashes, even on Windows relPath = strings.ReplaceAll(relPath, "\\", "/") // Make sure the relative path doesn't start with a name, since that could // be interpreted as a package path instead of a relative path if !strings.HasPrefix(relPath, "./") && !strings.HasPrefix(relPath, "../") { relPath = "./" + relPath } return relPath } // Returns the path of this file relative to "outbase", which is then ready to // be joined with the absolute output directory path. The directory and name // components are returned separately for convenience. func pathRelativeToOutbase( inputFile *graph.InputFile, options *config.Options, fs fs.FS, avoidIndex bool, customFilePath string, ) (relDir string, baseName string) { relDir = "/" absPath := inputFile.Source.KeyPath.Text if customFilePath != "" { // Use the configured output path if present absPath = customFilePath if !fs.IsAbs(absPath) { absPath = fs.Join(options.AbsOutputBase, absPath) } } else if inputFile.Source.KeyPath.Namespace != "file" { // Come up with a path for virtual paths (i.e. non-file-system paths) dir, base, _ := logger.PlatformIndependentPathDirBaseExt(absPath) if avoidIndex && base == "index" { _, base, _ = logger.PlatformIndependentPathDirBaseExt(dir) } baseName = sanitizeFilePathForVirtualModulePath(base) return } else { // Heuristic: If the file is named something like "index.js", then use // the name of the parent directory instead. This helps avoid the // situation where many chunks are named "index" because of people // dynamically-importing npm packages that make use of node's implicit // "index" file name feature. if avoidIndex { base := fs.Base(absPath) base = base[:len(base)-len(fs.Ext(base))] if base == "index" { absPath = fs.Dir(absPath) } } } // Try to get a relative path to the base directory relPath, ok := fs.Rel(options.AbsOutputBase, absPath) if !ok { // This can fail in some situations such as on different drives on // Windows. In that case we just use the file name. baseName = fs.Base(absPath) } else { // Now we finally have a relative path relDir = fs.Dir(relPath) + "/" baseName = fs.Base(relPath) // Use platform-independent slashes relDir = strings.ReplaceAll(relDir, "\\", "/") // Replace leading "../" so we don't try to write outside of the output // directory. This normally can't happen because "AbsOutputBase" is // automatically computed to contain all entry point files, but it can // happen if someone sets it manually via the "outbase" API option. // // Note that we can't just strip any leading "../" because that could // cause two separate entry point paths to collide. For example, there // could be both "src/index.js" and "../src/index.js" as entry points. dotDotCount := 0 for strings.HasPrefix(relDir[dotDotCount*3:], "../") { dotDotCount++ } if dotDotCount > 0 { // The use of "_.._" here is somewhat arbitrary but it is unlikely to // collide with a folder named by a human and it works on Windows // (Windows doesn't like names that end with a "."). And not starting // with a "." means that it will not be hidden on Unix. relDir = strings.Repeat("_.._/", dotDotCount) + relDir[dotDotCount*3:] } for strings.HasSuffix(relDir, "/") { relDir = relDir[:len(relDir)-1] } relDir = "/" + relDir if strings.HasSuffix(relDir, "/.") { relDir = relDir[:len(relDir)-1] } } // Strip the file extension if the output path is an input file if customFilePath == "" { ext := fs.Ext(baseName) baseName = baseName[:len(baseName)-len(ext)] } return } func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { c.timer.Begin("Compute cross-chunk dependencies") defer c.timer.End("Compute cross-chunk dependencies") jsChunks := 0 for _, chunk := range chunks { if _, ok := chunk.chunkRepr.(*chunkReprJS); ok { jsChunks++ } } if jsChunks < 2 { // No need to compute cross-chunk dependencies if there can't be any return } type chunkMeta struct { imports map[js_ast.Ref]bool exports map[js_ast.Ref]bool dynamicImports map[int]bool } chunkMetas := make([]chunkMeta, len(chunks)) // For each chunk, see what symbols it uses from other chunks. Do this in // parallel because it's the most expensive part of this function. waitGroup := sync.WaitGroup{} waitGroup.Add(len(chunks)) for chunkIndex, chunk := range chunks { go func(chunkIndex int, chunk chunkInfo) { chunkMeta := &chunkMetas[chunkIndex] imports := make(map[js_ast.Ref]bool) chunkMeta.imports = imports chunkMeta.exports = make(map[js_ast.Ref]bool) // Go over each file in this chunk for sourceIndex := range chunk.filesWithPartsInChunk { // Go over each part in this file that's marked for inclusion in this chunk switch repr := c.graph.Files[sourceIndex].InputFile.Repr.(type) { case *graph.JSRepr: for partIndex, partMeta := range repr.AST.Parts { if !partMeta.IsLive { continue } part := &repr.AST.Parts[partIndex] // Rewrite external dynamic imports to point to the chunk for that entry point for _, importRecordIndex := range part.ImportRecordIndices { record := &repr.AST.ImportRecords[importRecordIndex] if record.SourceIndex.IsValid() && c.isExternalDynamicImport(record, sourceIndex) { otherChunkIndex := c.graph.Files[record.SourceIndex.GetIndex()].EntryPointChunkIndex record.Path.Text = chunks[otherChunkIndex].uniqueKey record.SourceIndex = ast.Index32{} // Track this cross-chunk dynamic import so we make sure to // include its hash when we're calculating the hashes of all // dependencies of this chunk. if int(otherChunkIndex) != chunkIndex { if chunkMeta.dynamicImports == nil { chunkMeta.dynamicImports = make(map[int]bool) } chunkMeta.dynamicImports[int(otherChunkIndex)] = true } } } // Remember what chunk each top-level symbol is declared in. Symbols // with multiple declarations such as repeated "var" statements with // the same name should already be marked as all being in a single // chunk. In that case this will overwrite the same value below which // is fine. for _, declared := range part.DeclaredSymbols { if declared.IsTopLevel { c.graph.Symbols.Get(declared.Ref).ChunkIndex = ast.MakeIndex32(uint32(chunkIndex)) } } // Record each symbol used in this part. This will later be matched up // with our map of which chunk a given symbol is declared in to // determine if the symbol needs to be imported from another chunk. for ref := range part.SymbolUses { symbol := c.graph.Symbols.Get(ref) // Ignore unbound symbols, which don't have declarations if symbol.Kind == js_ast.SymbolUnbound { continue } // Ignore symbols that are going to be replaced by undefined if symbol.ImportItemStatus == js_ast.ImportItemMissing { continue } // If this is imported from another file, follow the import // reference and reference the symbol in that file instead if importData, ok := repr.Meta.ImportsToBind[ref]; ok { ref = importData.Ref symbol = c.graph.Symbols.Get(ref) } else if repr.Meta.Wrap == graph.WrapCJS && ref != repr.AST.WrapperRef { // The only internal symbol that wrapped CommonJS files export // is the wrapper itself. continue } // If this is an ES6 import from a CommonJS file, it will become a // property access off the namespace symbol instead of a bare // identifier. In that case we want to pull in the namespace symbol // instead. The namespace symbol stores the result of "require()". if symbol.NamespaceAlias != nil { ref = symbol.NamespaceAlias.NamespaceRef } // We must record this relationship even for symbols that are not // imports. Due to code splitting, the definition of a symbol may // be moved to a separate chunk than the use of a symbol even if // the definition and use of that symbol are originally from the // same source file. imports[ref] = true } } } } // Include the exports if this is an entry point chunk if chunk.isEntryPoint { if repr, ok := c.graph.Files[chunk.sourceIndex].InputFile.Repr.(*graph.JSRepr); ok { if repr.Meta.Wrap != graph.WrapCJS { for _, alias := range repr.Meta.SortedAndFilteredExportAliases { export := repr.Meta.ResolvedExports[alias] targetRef := export.Ref // If this is an import, then target what the import points to if importData, ok := c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr).Meta.ImportsToBind[targetRef]; ok { targetRef = importData.Ref } // If this is an ES6 import from a CommonJS file, it will become a // property access off the namespace symbol instead of a bare // identifier. In that case we want to pull in the namespace symbol // instead. The namespace symbol stores the result of "require()". if symbol := c.graph.Symbols.Get(targetRef); symbol.NamespaceAlias != nil { targetRef = symbol.NamespaceAlias.NamespaceRef } imports[targetRef] = true } } // Ensure "exports" is included if the current output format needs it if repr.Meta.ForceIncludeExportsForEntryPoint { imports[repr.AST.ExportsRef] = true } // Include the wrapper if present if repr.Meta.Wrap != graph.WrapNone { imports[repr.AST.WrapperRef] = true } } } waitGroup.Done() }(chunkIndex, chunk) } waitGroup.Wait() // Mark imported symbols as exported in the chunk from which they are declared for chunkIndex := range chunks { chunk := &chunks[chunkIndex] chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS) if !ok { continue } chunkMeta := chunkMetas[chunkIndex] // Find all uses in this chunk of symbols from other chunks chunkRepr.importsFromOtherChunks = make(map[uint32]crossChunkImportItemArray) for importRef := range chunkMeta.imports { // Ignore uses that aren't top-level symbols if otherChunkIndex := c.graph.Symbols.Get(importRef).ChunkIndex; otherChunkIndex.IsValid() { if otherChunkIndex := otherChunkIndex.GetIndex(); otherChunkIndex != uint32(chunkIndex) { chunkRepr.importsFromOtherChunks[otherChunkIndex] = append(chunkRepr.importsFromOtherChunks[otherChunkIndex], crossChunkImportItem{ref: importRef}) chunkMetas[otherChunkIndex].exports[importRef] = true } } } // If this is an entry point, make sure we import all chunks belonging to // this entry point, even if there are no imports. We need to make sure // these chunks are evaluated for their side effects too. if chunk.isEntryPoint { for otherChunkIndex, otherChunk := range chunks { if _, ok := otherChunk.chunkRepr.(*chunkReprJS); ok && chunkIndex != otherChunkIndex && otherChunk.entryBits.HasBit(chunk.entryPointBit) { imports := chunkRepr.importsFromOtherChunks[uint32(otherChunkIndex)] chunkRepr.importsFromOtherChunks[uint32(otherChunkIndex)] = imports } } } // Make sure we also track dynamic cross-chunk imports. These need to be // tracked so we count them as dependencies of this chunk for the purpose // of hash calculation. if chunkMeta.dynamicImports != nil { sortedDynamicImports := make([]int, 0, len(chunkMeta.dynamicImports)) for chunkIndex := range chunkMeta.dynamicImports { sortedDynamicImports = append(sortedDynamicImports, chunkIndex) } sort.Ints(sortedDynamicImports) for _, chunkIndex := range sortedDynamicImports { chunk.crossChunkImports = append(chunk.crossChunkImports, chunkImport{ importKind: ast.ImportDynamic, chunkIndex: uint32(chunkIndex), }) } } } // Generate cross-chunk exports. These must be computed before cross-chunk // imports because of export alias renaming, which must consider all export // aliases simultaneously to avoid collisions. for chunkIndex := range chunks { chunk := &chunks[chunkIndex] chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS) if !ok { continue } chunkRepr.exportsToOtherChunks = make(map[js_ast.Ref]string) switch c.options.OutputFormat { case config.FormatESModule: r := renamer.ExportRenamer{} var items []js_ast.ClauseItem for _, export := range c.sortedCrossChunkExportItems(chunkMetas[chunkIndex].exports) { var alias string if c.options.MinifyIdentifiers { alias = r.NextMinifiedName() } else { alias = r.NextRenamedName(c.graph.Symbols.Get(export.Ref).OriginalName) } items = append(items, js_ast.ClauseItem{Name: js_ast.LocRef{Ref: export.Ref}, Alias: alias}) chunkRepr.exportsToOtherChunks[export.Ref] = alias } if len(items) > 0 { chunkRepr.crossChunkSuffixStmts = []js_ast.Stmt{{Data: &js_ast.SExportClause{ Items: items, }}} } default: panic("Internal error") } } // Generate cross-chunk imports. These must be computed after cross-chunk // exports because the export aliases must already be finalized so they can // be embedded in the generated import statements. for chunkIndex := range chunks { chunk := &chunks[chunkIndex] chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS) if !ok { continue } var crossChunkPrefixStmts []js_ast.Stmt for _, crossChunkImport := range c.sortedCrossChunkImports(chunks, chunkRepr.importsFromOtherChunks) { switch c.options.OutputFormat { case config.FormatESModule: var items []js_ast.ClauseItem for _, item := range crossChunkImport.sortedImportItems { items = append(items, js_ast.ClauseItem{Name: js_ast.LocRef{Ref: item.ref}, Alias: item.exportAlias}) } importRecordIndex := uint32(len(chunk.crossChunkImports)) chunk.crossChunkImports = append(chunk.crossChunkImports, chunkImport{ importKind: ast.ImportStmt, chunkIndex: crossChunkImport.chunkIndex, }) if len(items) > 0 { // "import {a, b} from './chunk.js'" crossChunkPrefixStmts = append(crossChunkPrefixStmts, js_ast.Stmt{Data: &js_ast.SImport{ Items: &items, ImportRecordIndex: importRecordIndex, }}) } else { // "import './chunk.js'" crossChunkPrefixStmts = append(crossChunkPrefixStmts, js_ast.Stmt{Data: &js_ast.SImport{ ImportRecordIndex: importRecordIndex, }}) } default: panic("Internal error") } } chunkRepr.crossChunkPrefixStmts = crossChunkPrefixStmts } } type crossChunkImport struct { chunkIndex uint32 sortedImportItems crossChunkImportItemArray } // This type is just so we can use Go's native sort function type crossChunkImportArray []crossChunkImport func (a crossChunkImportArray) Len() int { return len(a) } func (a crossChunkImportArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } func (a crossChunkImportArray) Less(i int, j int) bool { return a[i].chunkIndex < a[j].chunkIndex } // Sort cross-chunk imports by chunk name for determinism func (c *linkerContext) sortedCrossChunkImports(chunks []chunkInfo, importsFromOtherChunks map[uint32]crossChunkImportItemArray) crossChunkImportArray { result := make(crossChunkImportArray, 0, len(importsFromOtherChunks)) for otherChunkIndex, importItems := range importsFromOtherChunks { // Sort imports from a single chunk by alias for determinism otherChunk := &chunks[otherChunkIndex] exportsToOtherChunks := otherChunk.chunkRepr.(*chunkReprJS).exportsToOtherChunks for i, item := range importItems { importItems[i].exportAlias = exportsToOtherChunks[item.ref] } sort.Sort(importItems) result = append(result, crossChunkImport{ chunkIndex: otherChunkIndex, sortedImportItems: importItems, }) } sort.Sort(result) return result } type crossChunkImportItem struct { ref js_ast.Ref exportAlias string } // This type is just so we can use Go's native sort function type crossChunkImportItemArray []crossChunkImportItem func (a crossChunkImportItemArray) Len() int { return len(a) } func (a crossChunkImportItemArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } func (a crossChunkImportItemArray) Less(i int, j int) bool { return a[i].exportAlias < a[j].exportAlias } // The sort order here is arbitrary but needs to be consistent between builds. // The InnerIndex should be stable because the parser for a single file is // single-threaded and deterministically assigns out InnerIndex values // sequentially. But the SourceIndex should be unstable because the main thread // assigns out source index values sequentially to newly-discovered dependencies // in a multi-threaded producer/consumer relationship. So instead we use the // index of the source in the DFS order over all entry points for stability. type stableRef struct { StableSourceIndex uint32 Ref js_ast.Ref } // This type is just so we can use Go's native sort function type stableRefArray []stableRef func (a stableRefArray) Len() int { return len(a) } func (a stableRefArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } func (a stableRefArray) Less(i int, j int) bool { ai, aj := a[i], a[j] return ai.StableSourceIndex < aj.StableSourceIndex || (ai.StableSourceIndex == aj.StableSourceIndex && ai.Ref.InnerIndex < aj.Ref.InnerIndex) } // Sort cross-chunk exports by chunk name for determinism func (c *linkerContext) sortedCrossChunkExportItems(exportRefs map[js_ast.Ref]bool) stableRefArray { result := make(stableRefArray, 0, len(exportRefs)) for ref := range exportRefs { result = append(result, stableRef{ StableSourceIndex: c.graph.StableSourceIndices[ref.SourceIndex], Ref: ref, }) } sort.Sort(result) return result } func (c *linkerContext) scanImportsAndExports() { c.timer.Begin("Scan imports and exports") defer c.timer.End("Scan imports and exports") // Step 1: Figure out what modules must be CommonJS c.timer.Begin("Step 1") for _, sourceIndex := range c.graph.ReachableFiles { file := &c.graph.Files[sourceIndex] switch repr := file.InputFile.Repr.(type) { case *graph.CSSRepr: // Inline URLs for non-CSS files into the CSS file var additionalFiles []graph.OutputFile for importRecordIndex := range repr.AST.ImportRecords { if record := &repr.AST.ImportRecords[importRecordIndex]; record.SourceIndex.IsValid() { otherFile := &c.graph.Files[record.SourceIndex.GetIndex()] if otherRepr, ok := otherFile.InputFile.Repr.(*graph.JSRepr); ok { record.Path.Text = otherRepr.AST.URLForCSS record.Path.Namespace = "" record.SourceIndex = ast.Index32{} // Copy the additional files to the output directory additionalFiles = append(additionalFiles, otherFile.InputFile.AdditionalFiles...) } } } file.InputFile.AdditionalFiles = additionalFiles case *graph.JSRepr: for importRecordIndex := range repr.AST.ImportRecords { record := &repr.AST.ImportRecords[importRecordIndex] if !record.SourceIndex.IsValid() { continue } otherFile := &c.graph.Files[record.SourceIndex.GetIndex()] otherRepr := otherFile.InputFile.Repr.(*graph.JSRepr) switch record.Kind { case ast.ImportStmt: // Importing using ES6 syntax from a file without any ES6 syntax // causes that module to be considered CommonJS-style, even if it // doesn't have any CommonJS exports. // // That means the ES6 imports will become undefined instead of // causing errors. This is for compatibility with older CommonJS- // style bundlers. // // We emit a warning in this case but try to avoid turning the module // into a CommonJS module if possible. This is possible with named // imports (the module stays an ECMAScript module but the imports are // rewritten with undefined) but is not possible with star or default // imports: // // import * as ns from './empty-file' // import defVal from './empty-file' // console.log(ns, defVal) // // In that case the module *is* considered a CommonJS module because // the namespace object must be created. if (record.ContainsImportStar || record.ContainsDefaultAlias) && otherRepr.AST.ExportsKind == js_ast.ExportsNone && !otherRepr.AST.HasLazyExport { otherRepr.Meta.Wrap = graph.WrapCJS otherRepr.AST.ExportsKind = js_ast.ExportsCommonJS } case ast.ImportRequire: // Files that are imported with require() must be CommonJS modules if otherRepr.AST.ExportsKind == js_ast.ExportsESM { otherRepr.Meta.Wrap = graph.WrapESM } else { otherRepr.Meta.Wrap = graph.WrapCJS otherRepr.AST.ExportsKind = js_ast.ExportsCommonJS } case ast.ImportDynamic: if !c.options.CodeSplitting { // If we're not splitting, then import() is just a require() that // returns a promise, so the imported file must be a CommonJS module if otherRepr.AST.ExportsKind == js_ast.ExportsESM { otherRepr.Meta.Wrap = graph.WrapESM } else { otherRepr.Meta.Wrap = graph.WrapCJS otherRepr.AST.ExportsKind = js_ast.ExportsCommonJS } } } } // If the output format doesn't have an implicit CommonJS wrapper, any file // that uses CommonJS features will need to be wrapped, even though the // resulting wrapper won't be invoked by other files. An exception is made // for entry point files in CommonJS format (or when in pass-through mode). if repr.AST.ExportsKind == js_ast.ExportsCommonJS && (!file.IsEntryPoint() || c.options.OutputFormat == config.FormatIIFE || c.options.OutputFormat == config.FormatESModule) { repr.Meta.Wrap = graph.WrapCJS } } } c.timer.End("Step 1") // Step 2: Propagate dynamic export status for export star statements that // are re-exports from a module whose exports are not statically analyzable. // In this case the export star must be evaluated at run time instead of at // bundle time. c.timer.Begin("Step 2") for _, sourceIndex := range c.graph.ReachableFiles { repr, ok := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) if !ok { continue } if repr.Meta.Wrap != graph.WrapNone { c.recursivelyWrapDependencies(sourceIndex) } if len(repr.AST.ExportStarImportRecords) > 0 { visited := make(map[uint32]bool) c.hasDynamicExportsDueToExportStar(sourceIndex, visited) } // Even if the output file is CommonJS-like, we may still need to wrap // CommonJS-style files. Any file that imports a CommonJS-style file will // cause that file to need to be wrapped. This is because the import // method, whatever it is, will need to invoke the wrapper. Note that // this can include entry points (e.g. an entry point that imports a file // that imports that entry point). for _, record := range repr.AST.ImportRecords { if record.SourceIndex.IsValid() { otherRepr := c.graph.Files[record.SourceIndex.GetIndex()].InputFile.Repr.(*graph.JSRepr) if otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { c.recursivelyWrapDependencies(record.SourceIndex.GetIndex()) } } } } c.timer.End("Step 2") // Step 3: Resolve "export * from" statements. This must be done after we // discover all modules that can have dynamic exports because export stars // are ignored for those modules. c.timer.Begin("Step 3") exportStarStack := make([]uint32, 0, 32) for _, sourceIndex := range c.graph.ReachableFiles { repr, ok := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) if !ok { continue } // Expression-style loaders defer code generation until linking. Code // generation is done here because at this point we know that the // "ExportsKind" field has its final value and will not be changed. if repr.AST.HasLazyExport { c.generateCodeForLazyExport(sourceIndex) } // Propagate exports for export star statements if len(repr.AST.ExportStarImportRecords) > 0 { c.addExportsForExportStar(repr.Meta.ResolvedExports, sourceIndex, exportStarStack) } // Also add a special export so import stars can bind to it. This must be // done in this step because it must come after CommonJS module discovery // but before matching imports with exports. repr.Meta.ResolvedExportStar = &graph.ExportData{ Ref: repr.AST.ExportsRef, SourceIndex: sourceIndex, } } c.timer.End("Step 3") // Step 4: Match imports with exports. This must be done after we process all // export stars because imports can bind to export star re-exports. c.timer.Begin("Step 4") for _, sourceIndex := range c.graph.ReachableFiles { file := &c.graph.Files[sourceIndex] repr, ok := file.InputFile.Repr.(*graph.JSRepr) if !ok { continue } if len(repr.AST.NamedImports) > 0 { c.matchImportsWithExportsForFile(uint32(sourceIndex)) } // If we're exporting as CommonJS and this file was originally CommonJS, // then we'll be using the actual CommonJS "exports" and/or "module" // symbols. In that case make sure to mark them as such so they don't // get minified. if file.IsEntryPoint() && repr.AST.ExportsKind == js_ast.ExportsCommonJS && repr.Meta.Wrap == graph.WrapNone && (c.options.OutputFormat == config.FormatPreserve || c.options.OutputFormat == config.FormatCommonJS) { exportsRef := js_ast.FollowSymbols(c.graph.Symbols, repr.AST.ExportsRef) moduleRef := js_ast.FollowSymbols(c.graph.Symbols, repr.AST.ModuleRef) c.graph.Symbols.Get(exportsRef).Kind = js_ast.SymbolUnbound c.graph.Symbols.Get(moduleRef).Kind = js_ast.SymbolUnbound } else if repr.Meta.ForceIncludeExportsForEntryPoint || repr.AST.ExportsKind != js_ast.ExportsCommonJS { repr.Meta.NeedsExportsVariable = true } // Create the wrapper part for wrapped files. This is needed by a later step. c.createWrapperForFile(uint32(sourceIndex)) } c.timer.End("Step 4") // Step 5: Create namespace exports for every file. This is always necessary // for CommonJS files, and is also necessary for other files if they are // imported using an import star statement. c.timer.Begin("Step 5") waitGroup := sync.WaitGroup{} for _, sourceIndex := range c.graph.ReachableFiles { repr, ok := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) if !ok { continue } // This is the slowest step and is also parallelizable, so do this in parallel. waitGroup.Add(1) go func(sourceIndex uint32, repr *graph.JSRepr) { // Now that all exports have been resolved, sort and filter them to create // something we can iterate over later. aliases := make([]string, 0, len(repr.Meta.ResolvedExports)) nextAlias: for alias, export := range repr.Meta.ResolvedExports { // Re-exporting multiple symbols with the same name causes an ambiguous // export. These names cannot be used and should not end up in generated code. otherRepr := c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr) if len(export.PotentiallyAmbiguousExportStarRefs) > 0 { mainRef := export.Ref if imported, ok := otherRepr.Meta.ImportsToBind[export.Ref]; ok { mainRef = imported.Ref } for _, ambiguousExport := range export.PotentiallyAmbiguousExportStarRefs { ambiguousRepr := c.graph.Files[ambiguousExport.SourceIndex].InputFile.Repr.(*graph.JSRepr) ambiguousRef := ambiguousExport.Ref if imported, ok := ambiguousRepr.Meta.ImportsToBind[ambiguousExport.Ref]; ok { ambiguousRef = imported.Ref } if mainRef != ambiguousRef { continue nextAlias } } } // Ignore re-exported imports in TypeScript files that failed to be // resolved. These are probably just type-only imports so the best thing to // do is to silently omit them from the export list. if otherRepr.Meta.IsProbablyTypeScriptType[export.Ref] { continue } aliases = append(aliases, alias) } sort.Strings(aliases) repr.Meta.SortedAndFilteredExportAliases = aliases // Export creation uses "sortedAndFilteredExportAliases" so this must // come second after we fill in that array c.createExportsForFile(uint32(sourceIndex)) waitGroup.Done() }(sourceIndex, repr) } waitGroup.Wait() c.timer.End("Step 5") // Step 6: Bind imports to exports. This adds non-local dependencies on the // parts that declare the export to all parts that use the import. Also // generate wrapper parts for wrapped files. c.timer.Begin("Step 6") for _, sourceIndex := range c.graph.ReachableFiles { file := &c.graph.Files[sourceIndex] repr, ok := file.InputFile.Repr.(*graph.JSRepr) if !ok { continue } // Pre-generate symbols for re-exports CommonJS symbols in case they // are necessary later. This is done now because the symbols map cannot be // mutated later due to parallelism. if file.IsEntryPoint() && c.options.OutputFormat == config.FormatESModule { copies := make([]js_ast.Ref, len(repr.Meta.SortedAndFilteredExportAliases)) for i, alias := range repr.Meta.SortedAndFilteredExportAliases { copies[i] = c.graph.GenerateNewSymbol(sourceIndex, js_ast.SymbolOther, "export_"+alias) } repr.Meta.CJSExportCopies = copies } // Use "init_*" for ESM wrappers instead of "require_*" if repr.Meta.Wrap == graph.WrapESM { c.graph.Symbols.Get(repr.AST.WrapperRef).OriginalName = "init_" + file.InputFile.Source.IdentifierName } // If this isn't CommonJS, then rename the unused "exports" and "module" // variables to avoid them causing the identically-named variables in // actual CommonJS files from being renamed. This is purely about // aesthetics and is not about correctness. This is done here because by // this point, we know the CommonJS status will not change further. if repr.Meta.Wrap != graph.WrapCJS && repr.AST.ExportsKind != js_ast.ExportsCommonJS { name := file.InputFile.Source.IdentifierName c.graph.Symbols.Get(repr.AST.ExportsRef).OriginalName = name + "_exports" c.graph.Symbols.Get(repr.AST.ModuleRef).OriginalName = name + "_module" } // Include the "__export" symbol from the runtime if it was used in the // previous step. The previous step can't do this because it's running in // parallel and can't safely mutate the "importsToBind" map of another file. if repr.Meta.NeedsExportSymbolFromRuntime { runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) exportRef := runtimeRepr.AST.ModuleScope.Members["__export"].Ref c.graph.GenerateSymbolImportAndUse(sourceIndex, js_ast.NSExportPartIndex, exportRef, 1, runtime.SourceIndex) } for importRef, importData := range repr.Meta.ImportsToBind { resolvedRepr := c.graph.Files[importData.SourceIndex].InputFile.Repr.(*graph.JSRepr) partsDeclaringSymbol := resolvedRepr.TopLevelSymbolToParts(importData.Ref) for _, partIndex := range repr.AST.NamedImports[importRef].LocalPartsWithUses { part := &repr.AST.Parts[partIndex] // Depend on the file containing the imported symbol for _, resolvedPartIndex := range partsDeclaringSymbol { part.Dependencies = append(part.Dependencies, js_ast.Dependency{ SourceIndex: importData.SourceIndex, PartIndex: resolvedPartIndex, }) } // Also depend on any files that re-exported this symbol in between the // file containing the import and the file containing the imported symbol part.Dependencies = append(part.Dependencies, importData.ReExports...) } // Merge these symbols so they will share the same name js_ast.MergeSymbols(c.graph.Symbols, importRef, importData.Ref) } // If this is an entry point, depend on all exports so they are included if file.IsEntryPoint() { var dependencies []js_ast.Dependency for _, alias := range repr.Meta.SortedAndFilteredExportAliases { export := repr.Meta.ResolvedExports[alias] targetSourceIndex := export.SourceIndex targetRef := export.Ref // If this is an import, then target what the import points to targetRepr := c.graph.Files[targetSourceIndex].InputFile.Repr.(*graph.JSRepr) if importData, ok := targetRepr.Meta.ImportsToBind[targetRef]; ok { targetSourceIndex = importData.SourceIndex targetRef = importData.Ref targetRepr = c.graph.Files[targetSourceIndex].InputFile.Repr.(*graph.JSRepr) dependencies = append(dependencies, importData.ReExports...) } // Pull in all declarations of this symbol for _, partIndex := range targetRepr.TopLevelSymbolToParts(targetRef) { dependencies = append(dependencies, js_ast.Dependency{ SourceIndex: targetSourceIndex, PartIndex: partIndex, }) } } // Ensure "exports" is included if the current output format needs it if repr.Meta.ForceIncludeExportsForEntryPoint { dependencies = append(dependencies, js_ast.Dependency{ SourceIndex: sourceIndex, PartIndex: js_ast.NSExportPartIndex, }) } // Include the wrapper if present if repr.Meta.Wrap != graph.WrapNone { dependencies = append(dependencies, js_ast.Dependency{ SourceIndex: sourceIndex, PartIndex: repr.Meta.WrapperPartIndex.GetIndex(), }) } // Represent these constraints with a dummy part entryPointPartIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ Dependencies: dependencies, CanBeRemovedIfUnused: false, }) repr.Meta.EntryPointPartIndex = ast.MakeIndex32(entryPointPartIndex) // Pull in the "__toCommonJS" symbol if we need it due to being an entry point if repr.Meta.ForceIncludeExportsForEntryPoint { c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, entryPointPartIndex, "__toCommonJS", 1) } } // Encode import-specific constraints in the dependency graph for partIndex, part := range repr.AST.Parts { toESMUses := uint32(0) toCommonJSUses := uint32(0) runtimeRequireUses := uint32(0) // Imports of wrapped files must depend on the wrapper for _, importRecordIndex := range part.ImportRecordIndices { record := &repr.AST.ImportRecords[importRecordIndex] // Don't follow external imports (this includes import() expressions) if !record.SourceIndex.IsValid() || c.isExternalDynamicImport(record, sourceIndex) { // This is an external import. Check if it will be a "require()" call. if record.Kind == ast.ImportRequire || !c.options.OutputFormat.KeepES6ImportExportSyntax() || (record.Kind == ast.ImportDynamic && c.options.UnsupportedJSFeatures.Has(compat.DynamicImport)) { // We should use "__require" instead of "require" if we're not // generating a CommonJS output file, since it won't exist otherwise if config.ShouldCallRuntimeRequire(c.options.Mode, c.options.OutputFormat) { record.CallRuntimeRequire = true runtimeRequireUses++ } // It needs the "__toESM" wrapper if it wasn't originally a // CommonJS import (i.e. it wasn't a "require()" call). if record.Kind != ast.ImportRequire { record.WrapWithToESM = true toESMUses++ } } continue } otherSourceIndex := record.SourceIndex.GetIndex() otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) if otherRepr.Meta.Wrap != graph.WrapNone { // Depend on the automatically-generated require wrapper symbol wrapperRef := otherRepr.AST.WrapperRef c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), wrapperRef, 1, otherSourceIndex) // This is an ES6 import of a CommonJS module, so it needs the // "__toESM" wrapper as long as it's not a bare "require()" if record.Kind != ast.ImportRequire && otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { record.WrapWithToESM = true toESMUses++ } // If this is an ESM wrapper, also depend on the exports object // since the final code will contain an inline reference to it. // This must be done for "require()" and "import()" expressions // but does not need to be done for "import" statements since // those just cause us to reference the exports directly. if otherRepr.Meta.Wrap == graph.WrapESM && record.Kind != ast.ImportStmt { c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), otherRepr.AST.ExportsRef, 1, otherSourceIndex) // If this is a "require()" call, then we should add the // "__esModule" marker to behave as if the module was converted // from ESM to CommonJS. This is done via a wrapper instead of // by modifying the exports object itself because the same ES // module may be simultaneously imported and required, and the // importing code should not see "__esModule" while the requiring // code should see "__esModule". This is an extremely complex // and subtle set of bundler interop issues. See for example // https://github.com/evanw/esbuild/issues/1591. if record.Kind == ast.ImportRequire { record.WrapWithToCJS = true toCommonJSUses++ } } } else if record.Kind == ast.ImportStmt && otherRepr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { // This is an import of a module that has a dynamic export fallback // object. In that case we need to depend on that object in case // something ends up needing to use it later. This could potentially // be omitted in some cases with more advanced analysis if this // dynamic export fallback object doesn't end up being needed. c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), otherRepr.AST.ExportsRef, 1, otherSourceIndex) } } // If there's an ES6 import of a non-ES6 module, then we're going to need the // "__toESM" symbol from the runtime to wrap the result of "require()" c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__toESM", toESMUses) // If there's a CommonJS require of an ES6 module, then we're going to need the // "__toCommonJS" symbol from the runtime to wrap the exports object c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__toCommonJS", toCommonJSUses) // If there are unbundled calls to "require()" and we're not generating // code for node, then substitute a "__require" wrapper for "require". c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__require", runtimeRequireUses) // If there's an ES6 export star statement of a non-ES6 module, then we're // going to need the "__reExport" symbol from the runtime reExportUses := uint32(0) for _, importRecordIndex := range repr.AST.ExportStarImportRecords { record := &repr.AST.ImportRecords[importRecordIndex] // Is this export star evaluated at run time? happensAtRunTime := !record.SourceIndex.IsValid() && (!file.IsEntryPoint() || !c.options.OutputFormat.KeepES6ImportExportSyntax()) if record.SourceIndex.IsValid() { otherSourceIndex := record.SourceIndex.GetIndex() otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) if otherSourceIndex != sourceIndex && otherRepr.AST.ExportsKind.IsDynamic() { happensAtRunTime = true } if otherRepr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { // This looks like "__reExport(exports_a, exports_b)". Make sure to // pull in the "exports_b" symbol into this export star. This matters // in code splitting situations where the "export_b" symbol might live // in a different chunk than this export star. c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), otherRepr.AST.ExportsRef, 1, otherSourceIndex) } } if happensAtRunTime { // Depend on this file's "exports" object for the first argument to "__reExport" c.graph.GenerateSymbolImportAndUse(sourceIndex, uint32(partIndex), repr.AST.ExportsRef, 1, sourceIndex) record.CallsRunTimeReExportFn = true repr.AST.UsesExportsRef = true reExportUses++ } } c.graph.GenerateRuntimeSymbolImportAndUse(sourceIndex, uint32(partIndex), "__reExport", reExportUses) } } c.timer.End("Step 6") } func (c *linkerContext) generateCodeForLazyExport(sourceIndex uint32) { file := &c.graph.Files[sourceIndex] repr := file.InputFile.Repr.(*graph.JSRepr) // Grab the lazy expression if len(repr.AST.Parts) < 1 { panic("Internal error") } part := &repr.AST.Parts[1] if len(part.Stmts) != 1 { panic("Internal error") } lazy, ok := part.Stmts[0].Data.(*js_ast.SLazyExport) if !ok { panic("Internal error") } // Use "module.exports = value" for CommonJS-style modules if repr.AST.ExportsKind == js_ast.ExportsCommonJS { part.Stmts = []js_ast.Stmt{js_ast.AssignStmt( js_ast.Expr{Loc: lazy.Value.Loc, Data: &js_ast.EDot{ Target: js_ast.Expr{Loc: lazy.Value.Loc, Data: &js_ast.EIdentifier{Ref: repr.AST.ModuleRef}}, Name: "exports", NameLoc: lazy.Value.Loc, }}, lazy.Value, )} c.graph.GenerateSymbolImportAndUse(sourceIndex, 0, repr.AST.ModuleRef, 1, sourceIndex) return } // Otherwise, generate ES6 export statements. These are added as additional // parts so they can be tree shaken individually. part.Stmts = nil type prevExport struct { ref js_ast.Ref partIndex uint32 } generateExport := func(name string, alias string, value js_ast.Expr) prevExport { // Generate a new symbol ref := c.graph.GenerateNewSymbol(sourceIndex, js_ast.SymbolOther, name) // Generate an ES6 export var stmt js_ast.Stmt if alias == "default" { stmt = js_ast.Stmt{Loc: value.Loc, Data: &js_ast.SExportDefault{ DefaultName: js_ast.LocRef{Loc: value.Loc, Ref: ref}, Value: js_ast.Stmt{Loc: value.Loc, Data: &js_ast.SExpr{Value: value}}, }} } else { stmt = js_ast.Stmt{Loc: value.Loc, Data: &js_ast.SLocal{ IsExport: true, Decls: []js_ast.Decl{{ Binding: js_ast.Binding{Loc: value.Loc, Data: &js_ast.BIdentifier{Ref: ref}}, ValueOrNil: value, }}, }} } // Link the export into the graph for tree shaking partIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ Stmts: []js_ast.Stmt{stmt}, DeclaredSymbols: []js_ast.DeclaredSymbol{{Ref: ref, IsTopLevel: true}}, CanBeRemovedIfUnused: true, }) c.graph.GenerateSymbolImportAndUse(sourceIndex, partIndex, repr.AST.ModuleRef, 1, sourceIndex) repr.Meta.ResolvedExports[alias] = graph.ExportData{Ref: ref, SourceIndex: sourceIndex} return prevExport{ref: ref, partIndex: partIndex} } // Unwrap JSON objects into separate top-level variables var prevExports []js_ast.Ref jsonValue := lazy.Value if object, ok := jsonValue.Data.(*js_ast.EObject); ok { clone := *object clone.Properties = append(make([]js_ast.Property, 0, len(clone.Properties)), clone.Properties...) for i, property := range clone.Properties { if str, ok := property.Key.Data.(*js_ast.EString); ok && (!file.IsEntryPoint() || js_lexer.IsIdentifierUTF16(str.Value) || !c.options.UnsupportedJSFeatures.Has(compat.ArbitraryModuleNamespaceNames)) { name := js_lexer.UTF16ToString(str.Value) exportRef := generateExport(name, name, property.ValueOrNil).ref prevExports = append(prevExports, exportRef) clone.Properties[i].ValueOrNil = js_ast.Expr{Loc: property.Key.Loc, Data: &js_ast.EIdentifier{Ref: exportRef}} } } jsonValue.Data = &clone } // Generate the default export finalExportPartIndex := generateExport(file.InputFile.Source.IdentifierName+"_default", "default", jsonValue).partIndex // The default export depends on all of the previous exports for _, exportRef := range prevExports { c.graph.GenerateSymbolImportAndUse(sourceIndex, finalExportPartIndex, exportRef, 1, sourceIndex) } } func (c *linkerContext) createExportsForFile(sourceIndex uint32) { //////////////////////////////////////////////////////////////////////////////// // WARNING: This method is run in parallel over all files. Do not mutate data // for other files within this method or you will create a data race. //////////////////////////////////////////////////////////////////////////////// file := &c.graph.Files[sourceIndex] repr := file.InputFile.Repr.(*graph.JSRepr) // Generate a getter per export properties := []js_ast.Property{} nsExportDependencies := []js_ast.Dependency{} nsExportSymbolUses := make(map[js_ast.Ref]js_ast.SymbolUse) for _, alias := range repr.Meta.SortedAndFilteredExportAliases { export := repr.Meta.ResolvedExports[alias] // If this is an export of an import, reference the symbol that the import // was eventually resolved to. We need to do this because imports have // already been resolved by this point, so we can't generate a new import // and have that be resolved later. if importData, ok := c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr).Meta.ImportsToBind[export.Ref]; ok { export.Ref = importData.Ref export.SourceIndex = importData.SourceIndex nsExportDependencies = append(nsExportDependencies, importData.ReExports...) } // Exports of imports need EImportIdentifier in case they need to be re- // written to a property access later on var value js_ast.Expr if c.graph.Symbols.Get(export.Ref).NamespaceAlias != nil { value = js_ast.Expr{Data: &js_ast.EImportIdentifier{Ref: export.Ref}} } else { value = js_ast.Expr{Data: &js_ast.EIdentifier{Ref: export.Ref}} } // Add a getter property var getter js_ast.Expr body := js_ast.FnBody{Stmts: []js_ast.Stmt{{Loc: value.Loc, Data: &js_ast.SReturn{ValueOrNil: value}}}} if c.options.UnsupportedJSFeatures.Has(compat.Arrow) { getter = js_ast.Expr{Data: &js_ast.EFunction{Fn: js_ast.Fn{Body: body}}} } else { getter = js_ast.Expr{Data: &js_ast.EArrow{PreferExpr: true, Body: body}} } properties = append(properties, js_ast.Property{ Key: js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(alias)}}, ValueOrNil: getter, }) nsExportSymbolUses[export.Ref] = js_ast.SymbolUse{CountEstimate: 1} // Make sure the part that declares the export is included for _, partIndex := range c.graph.Files[export.SourceIndex].InputFile.Repr.(*graph.JSRepr).TopLevelSymbolToParts(export.Ref) { // Use a non-local dependency since this is likely from a different // file if it came in through an export star nsExportDependencies = append(nsExportDependencies, js_ast.Dependency{ SourceIndex: export.SourceIndex, PartIndex: partIndex, }) } } declaredSymbols := []js_ast.DeclaredSymbol{} var nsExportStmts []js_ast.Stmt // Prefix this part with "var exports = {}" if this isn't a CommonJS entry point if repr.Meta.NeedsExportsVariable { nsExportStmts = append(nsExportStmts, js_ast.Stmt{Data: &js_ast.SLocal{Decls: []js_ast.Decl{{ Binding: js_ast.Binding{Data: &js_ast.BIdentifier{Ref: repr.AST.ExportsRef}}, ValueOrNil: js_ast.Expr{Data: &js_ast.EObject{}}, }}}}) declaredSymbols = append(declaredSymbols, js_ast.DeclaredSymbol{ Ref: repr.AST.ExportsRef, IsTopLevel: true, }) } // "__export(exports, { foo: () => foo })" exportRef := js_ast.InvalidRef if len(properties) > 0 { runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) exportRef = runtimeRepr.AST.ModuleScope.Members["__export"].Ref nsExportStmts = append(nsExportStmts, js_ast.Stmt{Data: &js_ast.SExpr{Value: js_ast.Expr{Data: &js_ast.ECall{ Target: js_ast.Expr{Data: &js_ast.EIdentifier{Ref: exportRef}}, Args: []js_ast.Expr{ {Data: &js_ast.EIdentifier{Ref: repr.AST.ExportsRef}}, {Data: &js_ast.EObject{ Properties: properties, }}, }, }}}}) // Make sure this file depends on the "__export" symbol for _, partIndex := range runtimeRepr.TopLevelSymbolToParts(exportRef) { nsExportDependencies = append(nsExportDependencies, js_ast.Dependency{ SourceIndex: runtime.SourceIndex, PartIndex: partIndex, }) } // Make sure the CommonJS closure, if there is one, includes "exports" repr.AST.UsesExportsRef = true } // No need to generate a part if it'll be empty if len(nsExportStmts) > 0 { // Initialize the part that was allocated for us earlier. The information // here will be used after this during tree shaking. repr.AST.Parts[js_ast.NSExportPartIndex] = js_ast.Part{ Stmts: nsExportStmts, SymbolUses: nsExportSymbolUses, Dependencies: nsExportDependencies, DeclaredSymbols: declaredSymbols, // This can be removed if nothing uses it CanBeRemovedIfUnused: true, // Make sure this is trimmed if unused even if tree shaking is disabled ForceTreeShaking: true, } // Pull in the "__export" symbol if it was used if exportRef != js_ast.InvalidRef { repr.Meta.NeedsExportSymbolFromRuntime = true } } } func (c *linkerContext) createWrapperForFile(sourceIndex uint32) { repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) switch repr.Meta.Wrap { // If this is a CommonJS file, we're going to need to generate a wrapper // for the CommonJS closure. That will end up looking something like this: // // var require_foo = __commonJS((exports, module) => { // ... // }); // // However, that generation is special-cased for various reasons and is // done later on. Still, we're going to need to ensure that this file // both depends on the "__commonJS" symbol and declares the "require_foo" // symbol. Instead of special-casing this during the reachablity analysis // below, we just append a dummy part to the end of the file with these // dependencies and let the general-purpose reachablity analysis take care // of it. case graph.WrapCJS: runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) commonJSParts := runtimeRepr.TopLevelSymbolToParts(c.cjsRuntimeRef) // Generate the dummy part dependencies := make([]js_ast.Dependency, len(commonJSParts)) for i, partIndex := range commonJSParts { dependencies[i] = js_ast.Dependency{ SourceIndex: runtime.SourceIndex, PartIndex: partIndex, } } partIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ SymbolUses: map[js_ast.Ref]js_ast.SymbolUse{ repr.AST.WrapperRef: {CountEstimate: 1}, }, DeclaredSymbols: []js_ast.DeclaredSymbol{ {Ref: repr.AST.ExportsRef, IsTopLevel: true}, {Ref: repr.AST.ModuleRef, IsTopLevel: true}, {Ref: repr.AST.WrapperRef, IsTopLevel: true}, }, Dependencies: dependencies, }) repr.Meta.WrapperPartIndex = ast.MakeIndex32(partIndex) c.graph.GenerateSymbolImportAndUse(sourceIndex, partIndex, c.cjsRuntimeRef, 1, runtime.SourceIndex) // If this is a lazily-initialized ESM file, we're going to need to // generate a wrapper for the ESM closure. That will end up looking // something like this: // // var init_foo = __esm(() => { // ... // }); // // This depends on the "__esm" symbol and declares the "init_foo" symbol // for similar reasons to the CommonJS closure above. case graph.WrapESM: runtimeRepr := c.graph.Files[runtime.SourceIndex].InputFile.Repr.(*graph.JSRepr) esmParts := runtimeRepr.TopLevelSymbolToParts(c.esmRuntimeRef) // Generate the dummy part dependencies := make([]js_ast.Dependency, len(esmParts)) for i, partIndex := range esmParts { dependencies[i] = js_ast.Dependency{ SourceIndex: runtime.SourceIndex, PartIndex: partIndex, } } partIndex := c.graph.AddPartToFile(sourceIndex, js_ast.Part{ SymbolUses: map[js_ast.Ref]js_ast.SymbolUse{ repr.AST.WrapperRef: {CountEstimate: 1}, }, DeclaredSymbols: []js_ast.DeclaredSymbol{ {Ref: repr.AST.WrapperRef, IsTopLevel: true}, }, Dependencies: dependencies, }) repr.Meta.WrapperPartIndex = ast.MakeIndex32(partIndex) c.graph.GenerateSymbolImportAndUse(sourceIndex, partIndex, c.esmRuntimeRef, 1, runtime.SourceIndex) } } func (c *linkerContext) matchImportsWithExportsForFile(sourceIndex uint32) { file := &c.graph.Files[sourceIndex] repr := file.InputFile.Repr.(*graph.JSRepr) // Sort imports for determinism. Otherwise our unit tests will randomly // fail sometimes when error messages are reordered. sortedImportRefs := make([]int, 0, len(repr.AST.NamedImports)) for ref := range repr.AST.NamedImports { sortedImportRefs = append(sortedImportRefs, int(ref.InnerIndex)) } sort.Ints(sortedImportRefs) // Pair imports with their matching exports for _, innerIndex := range sortedImportRefs { // Re-use memory for the cycle detector c.cycleDetector = c.cycleDetector[:0] importRef := js_ast.Ref{SourceIndex: sourceIndex, InnerIndex: uint32(innerIndex)} result, reExports := c.matchImportWithExport(importTracker{sourceIndex: sourceIndex, importRef: importRef}, nil) switch result.kind { case matchImportIgnore: case matchImportNormal: repr.Meta.ImportsToBind[importRef] = graph.ImportData{ ReExports: reExports, SourceIndex: result.sourceIndex, Ref: result.ref, } case matchImportNamespace: c.graph.Symbols.Get(importRef).NamespaceAlias = &js_ast.NamespaceAlias{ NamespaceRef: result.namespaceRef, Alias: result.alias, } case matchImportNormalAndNamespace: repr.Meta.ImportsToBind[importRef] = graph.ImportData{ ReExports: reExports, SourceIndex: result.sourceIndex, Ref: result.ref, } c.graph.Symbols.Get(importRef).NamespaceAlias = &js_ast.NamespaceAlias{ NamespaceRef: result.namespaceRef, Alias: result.alias, } case matchImportCycle: namedImport := repr.AST.NamedImports[importRef] c.log.Add(logger.Error, file.LineColumnTracker(), js_lexer.RangeOfIdentifier(file.InputFile.Source, namedImport.AliasLoc), fmt.Sprintf("Detected cycle while resolving import %q", namedImport.Alias)) case matchImportProbablyTypeScriptType: repr.Meta.IsProbablyTypeScriptType[importRef] = true case matchImportAmbiguous: namedImport := repr.AST.NamedImports[importRef] r := js_lexer.RangeOfIdentifier(file.InputFile.Source, namedImport.AliasLoc) var notes []logger.MsgData // Provide the locations of both ambiguous exports if possible if result.nameLoc.Start != 0 && result.otherNameLoc.Start != 0 { a := c.graph.Files[result.sourceIndex] b := c.graph.Files[result.otherSourceIndex] ra := js_lexer.RangeOfIdentifier(a.InputFile.Source, result.nameLoc) rb := js_lexer.RangeOfIdentifier(b.InputFile.Source, result.otherNameLoc) notes = []logger.MsgData{ a.LineColumnTracker().MsgData(ra, "One matching export is here:"), b.LineColumnTracker().MsgData(rb, "Another matching export is here:"), } } symbol := c.graph.Symbols.Get(importRef) if symbol.ImportItemStatus == js_ast.ImportItemGenerated { // This is a warning instead of an error because although it appears // to be a named import, it's actually an automatically-generated // named import that was originally a property access on an import // star namespace object. Normally this property access would just // resolve to undefined at run-time instead of failing at binding- // time, so we emit a warning and rewrite the value to the literal // "undefined" instead of emitting an error. symbol.ImportItemStatus = js_ast.ImportItemMissing msg := fmt.Sprintf("Import %q will always be undefined because there are multiple matching exports", namedImport.Alias) c.log.AddWithNotes(logger.Warning, file.LineColumnTracker(), r, msg, notes) } else { msg := fmt.Sprintf("Ambiguous import %q has multiple matching exports", namedImport.Alias) c.log.AddWithNotes(logger.Error, file.LineColumnTracker(), r, msg, notes) } } } } type matchImportKind uint8 const ( // The import is either external or undefined matchImportIgnore matchImportKind = iota // "sourceIndex" and "ref" are in use matchImportNormal // "namespaceRef" and "alias" are in use matchImportNamespace // Both "matchImportNormal" and "matchImportNamespace" matchImportNormalAndNamespace // The import could not be evaluated due to a cycle matchImportCycle // The import is missing but came from a TypeScript file matchImportProbablyTypeScriptType // The import resolved to multiple symbols via "export * from" matchImportAmbiguous ) type matchImportResult struct { kind matchImportKind namespaceRef js_ast.Ref alias string sourceIndex uint32 nameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero otherSourceIndex uint32 otherNameLoc logger.Loc // Optional, goes with otherSourceIndex, ignore if zero ref js_ast.Ref } func (c *linkerContext) matchImportWithExport( tracker importTracker, reExportsIn []js_ast.Dependency, ) (result matchImportResult, reExports []js_ast.Dependency) { var ambiguousResults []matchImportResult reExports = reExportsIn loop: for { // Make sure we avoid infinite loops trying to resolve cycles: // // // foo.js // export {a as b} from './foo.js' // export {b as c} from './foo.js' // export {c as a} from './foo.js' // // This uses a O(n^2) array scan instead of a O(n) map because the vast // majority of cases have one or two elements and Go arrays are cheap to // reuse without allocating. for _, previousTracker := range c.cycleDetector { if tracker == previousTracker { result = matchImportResult{kind: matchImportCycle} break loop } } c.cycleDetector = append(c.cycleDetector, tracker) // Resolve the import by one step nextTracker, status, potentiallyAmbiguousExportStarRefs := c.advanceImportTracker(tracker) switch status { case importCommonJS, importCommonJSWithoutExports, importExternal, importDisabled: if status == importExternal && c.options.OutputFormat.KeepES6ImportExportSyntax() { // Imports from external modules should not be converted to CommonJS // if the output format preserves the original ES6 import statements break } // If it's a CommonJS or external file, rewrite the import to a // property access. Don't do this if the namespace reference is invalid // though. This is the case for star imports, where the import is the // namespace. trackerFile := &c.graph.Files[tracker.sourceIndex] namedImport := trackerFile.InputFile.Repr.(*graph.JSRepr).AST.NamedImports[tracker.importRef] if namedImport.NamespaceRef != js_ast.InvalidRef { if result.kind == matchImportNormal { result.kind = matchImportNormalAndNamespace result.namespaceRef = namedImport.NamespaceRef result.alias = namedImport.Alias } else { result = matchImportResult{ kind: matchImportNamespace, namespaceRef: namedImport.NamespaceRef, alias: namedImport.Alias, } } } // Warn about importing from a file that is known to not have any exports if status == importCommonJSWithoutExports { symbol := c.graph.Symbols.Get(tracker.importRef) symbol.ImportItemStatus = js_ast.ImportItemMissing c.log.Add(logger.Warning, trackerFile.LineColumnTracker(), js_lexer.RangeOfIdentifier(trackerFile.InputFile.Source, namedImport.AliasLoc), fmt.Sprintf("Import %q will always be undefined because the file %q has no exports", namedImport.Alias, c.graph.Files[nextTracker.sourceIndex].InputFile.Source.PrettyPath)) } case importDynamicFallback: // If it's a file with dynamic export fallback, rewrite the import to a property access trackerFile := &c.graph.Files[tracker.sourceIndex] namedImport := trackerFile.InputFile.Repr.(*graph.JSRepr).AST.NamedImports[tracker.importRef] if result.kind == matchImportNormal { result.kind = matchImportNormalAndNamespace result.namespaceRef = nextTracker.importRef result.alias = namedImport.Alias } else { result = matchImportResult{ kind: matchImportNamespace, namespaceRef: nextTracker.importRef, alias: namedImport.Alias, } } case importNoMatch: symbol := c.graph.Symbols.Get(tracker.importRef) trackerFile := &c.graph.Files[tracker.sourceIndex] namedImport := trackerFile.InputFile.Repr.(*graph.JSRepr).AST.NamedImports[tracker.importRef] r := js_lexer.RangeOfIdentifier(trackerFile.InputFile.Source, namedImport.AliasLoc) // Report mismatched imports and exports if symbol.ImportItemStatus == js_ast.ImportItemGenerated { // This is a warning instead of an error because although it appears // to be a named import, it's actually an automatically-generated // named import that was originally a property access on an import // star namespace object. Normally this property access would just // resolve to undefined at run-time instead of failing at binding- // time, so we emit a warning and rewrite the value to the literal // "undefined" instead of emitting an error. symbol.ImportItemStatus = js_ast.ImportItemMissing c.log.Add(logger.Warning, trackerFile.LineColumnTracker(), r, fmt.Sprintf( "Import %q will always be undefined because there is no matching export in %q", namedImport.Alias, c.graph.Files[nextTracker.sourceIndex].InputFile.Source.PrettyPath)) } else { c.log.Add(logger.Error, trackerFile.LineColumnTracker(), r, fmt.Sprintf("No matching export in %q for import %q", c.graph.Files[nextTracker.sourceIndex].InputFile.Source.PrettyPath, namedImport.Alias)) } case importProbablyTypeScriptType: // Omit this import from any namespace export code we generate for // import star statements (i.e. "import * as ns from 'path'") result = matchImportResult{kind: matchImportProbablyTypeScriptType} case importFound: // If there are multiple ambiguous results due to use of "export * from" // statements, trace them all to see if they point to different things. for _, ambiguousTracker := range potentiallyAmbiguousExportStarRefs { // If this is a re-export of another import, follow the import if _, ok := c.graph.Files[ambiguousTracker.SourceIndex].InputFile.Repr.(*graph.JSRepr).AST.NamedImports[ambiguousTracker.Ref]; ok { // Save and restore the cycle detector to avoid mixing information oldCycleDetector := c.cycleDetector ambiguousResult, newReExportFiles := c.matchImportWithExport(importTracker{ sourceIndex: ambiguousTracker.SourceIndex, importRef: ambiguousTracker.Ref, }, reExports) c.cycleDetector = oldCycleDetector ambiguousResults = append(ambiguousResults, ambiguousResult) reExports = newReExportFiles } else { ambiguousResults = append(ambiguousResults, matchImportResult{ kind: matchImportNormal, sourceIndex: ambiguousTracker.SourceIndex, ref: ambiguousTracker.Ref, nameLoc: ambiguousTracker.NameLoc, }) } } // Defer the actual binding of this import until after we generate // namespace export code for all files. This has to be done for all // import-to-export matches, not just the initial import to the final // export, since all imports and re-exports must be merged together // for correctness. result = matchImportResult{ kind: matchImportNormal, sourceIndex: nextTracker.sourceIndex, ref: nextTracker.importRef, nameLoc: nextTracker.nameLoc, } // Depend on the statement(s) that declared this import symbol in the // original file for _, resolvedPartIndex := range c.graph.Files[tracker.sourceIndex].InputFile.Repr.(*graph.JSRepr).TopLevelSymbolToParts(tracker.importRef) { reExports = append(reExports, js_ast.Dependency{ SourceIndex: tracker.sourceIndex, PartIndex: resolvedPartIndex, }) } // If this is a re-export of another import, continue for another // iteration of the loop to resolve that import as well if _, ok := c.graph.Files[nextTracker.sourceIndex].InputFile.Repr.(*graph.JSRepr).AST.NamedImports[nextTracker.importRef]; ok { tracker = nextTracker continue } default: panic("Internal error") } // Stop now if we didn't explicitly "continue" above break } // If there is a potential ambiguity, all results must be the same for _, ambiguousResult := range ambiguousResults { if ambiguousResult != result { if result.kind == matchImportNormal && ambiguousResult.kind == matchImportNormal && result.nameLoc.Start != 0 && ambiguousResult.nameLoc.Start != 0 { return matchImportResult{ kind: matchImportAmbiguous, sourceIndex: result.sourceIndex, nameLoc: result.nameLoc, otherSourceIndex: ambiguousResult.sourceIndex, otherNameLoc: ambiguousResult.nameLoc, }, nil } return matchImportResult{kind: matchImportAmbiguous}, nil } } return } func (c *linkerContext) recursivelyWrapDependencies(sourceIndex uint32) { repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) if repr.Meta.DidWrapDependencies { return } repr.Meta.DidWrapDependencies = true // Never wrap the runtime file since it always comes first if sourceIndex == runtime.SourceIndex { return } // This module must be wrapped if repr.Meta.Wrap == graph.WrapNone { if repr.AST.ExportsKind == js_ast.ExportsCommonJS { repr.Meta.Wrap = graph.WrapCJS } else { repr.Meta.Wrap = graph.WrapESM } } // All dependencies must also be wrapped for _, record := range repr.AST.ImportRecords { if record.SourceIndex.IsValid() { c.recursivelyWrapDependencies(record.SourceIndex.GetIndex()) } } } func (c *linkerContext) hasDynamicExportsDueToExportStar(sourceIndex uint32, visited map[uint32]bool) bool { // Terminate the traversal now if this file already has dynamic exports repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) if repr.AST.ExportsKind == js_ast.ExportsCommonJS || repr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { return true } // Avoid infinite loops due to cycles in the export star graph if visited[sourceIndex] { return false } visited[sourceIndex] = true // Scan over the export star graph for _, importRecordIndex := range repr.AST.ExportStarImportRecords { record := &repr.AST.ImportRecords[importRecordIndex] // This file has dynamic exports if the exported imports are from a file // that either has dynamic exports directly or transitively by itself // having an export star from a file with dynamic exports. if (!record.SourceIndex.IsValid() && (!c.graph.Files[sourceIndex].IsEntryPoint() || !c.options.OutputFormat.KeepES6ImportExportSyntax())) || (record.SourceIndex.IsValid() && record.SourceIndex.GetIndex() != sourceIndex && c.hasDynamicExportsDueToExportStar(record.SourceIndex.GetIndex(), visited)) { repr.AST.ExportsKind = js_ast.ExportsESMWithDynamicFallback return true } } return false } func (c *linkerContext) addExportsForExportStar( resolvedExports map[string]graph.ExportData, sourceIndex uint32, sourceIndexStack []uint32, ) { // Avoid infinite loops due to cycles in the export star graph for _, prevSourceIndex := range sourceIndexStack { if prevSourceIndex == sourceIndex { return } } sourceIndexStack = append(sourceIndexStack, sourceIndex) repr := c.graph.Files[sourceIndex].InputFile.Repr.(*graph.JSRepr) for _, importRecordIndex := range repr.AST.ExportStarImportRecords { record := &repr.AST.ImportRecords[importRecordIndex] if !record.SourceIndex.IsValid() { // This will be resolved at run time instead continue } otherSourceIndex := record.SourceIndex.GetIndex() // Export stars from a CommonJS module don't work because they can't be // statically discovered. Just silently ignore them in this case. // // We could attempt to check whether the imported file still has ES6 // exports even though it still uses CommonJS features. However, when // doing this we'd also have to rewrite any imports of these export star // re-exports as property accesses off of a generated require() call. otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) if otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { // All exports will be resolved at run time instead continue } // Accumulate this file's exports nextExport: for alias, name := range otherRepr.AST.NamedExports { // ES6 export star statements ignore exports named "default" if alias == "default" { continue } // This export star is shadowed if any file in the stack has a matching real named export for _, prevSourceIndex := range sourceIndexStack { prevRepr := c.graph.Files[prevSourceIndex].InputFile.Repr.(*graph.JSRepr) if _, ok := prevRepr.AST.NamedExports[alias]; ok { continue nextExport } } if existing, ok := resolvedExports[alias]; !ok { // Initialize the re-export resolvedExports[alias] = graph.ExportData{ Ref: name.Ref, SourceIndex: otherSourceIndex, NameLoc: name.AliasLoc, } // Make sure the symbol is marked as imported so that code splitting // imports it correctly if it ends up being shared with another chunk repr.Meta.ImportsToBind[name.Ref] = graph.ImportData{ Ref: name.Ref, SourceIndex: otherSourceIndex, } } else if existing.SourceIndex != otherSourceIndex { // Two different re-exports colliding makes it potentially ambiguous existing.PotentiallyAmbiguousExportStarRefs = append(existing.PotentiallyAmbiguousExportStarRefs, graph.ImportData{ SourceIndex: otherSourceIndex, Ref: name.Ref, NameLoc: name.AliasLoc, }) resolvedExports[alias] = existing } } // Search further through this file's export stars c.addExportsForExportStar(resolvedExports, otherSourceIndex, sourceIndexStack) } } type importTracker struct { sourceIndex uint32 nameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero importRef js_ast.Ref } type importStatus uint8 const ( // The imported file has no matching export importNoMatch importStatus = iota // The imported file has a matching export importFound // The imported file is CommonJS and has unknown exports importCommonJS // The import is missing but there is a dynamic fallback object importDynamicFallback // The import was treated as a CommonJS import but the file is known to have no exports importCommonJSWithoutExports // The imported file was disabled by mapping it to false in the "browser" // field of package.json importDisabled // The imported file is external and has unknown exports importExternal // This is a missing re-export in a TypeScript file, so it's probably a type importProbablyTypeScriptType ) func (c *linkerContext) advanceImportTracker(tracker importTracker) (importTracker, importStatus, []graph.ImportData) { file := &c.graph.Files[tracker.sourceIndex] repr := file.InputFile.Repr.(*graph.JSRepr) namedImport := repr.AST.NamedImports[tracker.importRef] // Is this an external file? record := &repr.AST.ImportRecords[namedImport.ImportRecordIndex] if !record.SourceIndex.IsValid() { return importTracker{}, importExternal, nil } // Is this a disabled file? otherSourceIndex := record.SourceIndex.GetIndex() if c.graph.Files[otherSourceIndex].InputFile.Source.KeyPath.IsDisabled() { return importTracker{sourceIndex: otherSourceIndex, importRef: js_ast.InvalidRef}, importDisabled, nil } // Is this a named import of a file without any exports? otherRepr := c.graph.Files[otherSourceIndex].InputFile.Repr.(*graph.JSRepr) if !namedImport.AliasIsStar && !otherRepr.AST.HasLazyExport && // CommonJS exports otherRepr.AST.ExportKeyword.Len == 0 && namedImport.Alias != "default" && // ESM exports !otherRepr.AST.UsesExportsRef && !otherRepr.AST.UsesModuleRef { // Just warn about it and replace the import with "undefined" return importTracker{sourceIndex: otherSourceIndex, importRef: js_ast.InvalidRef}, importCommonJSWithoutExports, nil } // Is this a CommonJS file? if otherRepr.AST.ExportsKind == js_ast.ExportsCommonJS { return importTracker{sourceIndex: otherSourceIndex, importRef: js_ast.InvalidRef}, importCommonJS, nil } // Match this import star with an export star from the imported file if matchingExport := otherRepr.Meta.ResolvedExportStar; namedImport.AliasIsStar && matchingExport != nil { // Check to see if this is a re-export of another import return importTracker{ sourceIndex: matchingExport.SourceIndex, importRef: matchingExport.Ref, nameLoc: matchingExport.NameLoc, }, importFound, matchingExport.PotentiallyAmbiguousExportStarRefs } // Match this import up with an export from the imported file if matchingExport, ok := otherRepr.Meta.ResolvedExports[namedImport.Alias]; ok { // Check to see if this is a re-export of another import return importTracker{ sourceIndex: matchingExport.SourceIndex, importRef: matchingExport.Ref, nameLoc: matchingExport.NameLoc, }, importFound, matchingExport.PotentiallyAmbiguousExportStarRefs } // Is this a file with dynamic exports? if otherRepr.AST.ExportsKind == js_ast.ExportsESMWithDynamicFallback { return importTracker{sourceIndex: otherSourceIndex, importRef: otherRepr.AST.ExportsRef}, importDynamicFallback, nil } // Missing re-exports in TypeScript files are indistinguishable from types if file.InputFile.Loader.IsTypeScript() && namedImport.IsExported { return importTracker{}, importProbablyTypeScriptType, nil } return importTracker{sourceIndex: otherSourceIndex}, importNoMatch, nil } func (c *linkerContext) treeShakingAndCodeSplitting() { // Tree shaking: Each entry point marks all files reachable from itself c.timer.Begin("Tree shaking") for _, entryPoint := range c.graph.EntryPoints() { c.markFileLiveForTreeShaking(entryPoint.SourceIndex) } c.timer.End("Tree shaking") // Code splitting: Determine which entry points can reach which files. This // has to happen after tree shaking because there is an implicit dependency // between live parts within the same file. All liveness has to be computed // first before determining which entry points can reach which files. c.timer.Begin("Code splitting") for i, entryPoint := range c.graph.EntryPoints() { c.markFileReachableForCodeSplitting(entryPoint.SourceIndex, uint(i), 0) } c.timer.End("Code splitting") } func (c *linkerContext) markFileReachableForCodeSplitting(sourceIndex uint32, entryPointBit uint, distanceFromEntryPoint uint32) { file := &c.graph.Files[sourceIndex] if !file.IsLive { return } traverseAgain := false // Track the minimum distance to an entry point if distanceFromEntryPoint < file.DistanceFromEntryPoint { file.DistanceFromEntryPoint = distanceFromEntryPoint traverseAgain = true } distanceFromEntryPoint++ // Don't mark this file more than once if file.EntryBits.HasBit(entryPointBit) && !traverseAgain { return } file.EntryBits.SetBit(entryPointBit) switch repr := file.InputFile.Repr.(type) { case *graph.JSRepr: // If the JavaScript stub for a CSS file is included, also include the CSS file if repr.CSSSourceIndex.IsValid() { c.markFileReachableForCodeSplitting(repr.CSSSourceIndex.GetIndex(), entryPointBit, distanceFromEntryPoint) } // Traverse into all imported files for _, record := range repr.AST.ImportRecords { if record.SourceIndex.IsValid() && !c.isExternalDynamicImport(&record, sourceIndex) { c.markFileReachableForCodeSplitting(record.SourceIndex.GetIndex(), entryPointBit, distanceFromEntryPoint) } } // Traverse into all dependencies of all parts in this file for _, part := range repr.AST.Parts { for _, dependency := range part.Dependencies { if dependency.SourceIndex != sourceIndex { c.markFileReachableForCodeSplitting(dependency.SourceIndex, entryPointBit, distanceFromEntryPoint) } } } case *graph.CSSRepr: // Traverse into all dependencies for _, record := range repr.AST.ImportRecords { if record.SourceIndex.IsValid() { c.markFileReachableForCodeSplitting(record.SourceIndex.GetIndex(), entryPointBit, distanceFromEntryPoint) } } } } func (c *linkerContext) markFileLiveForTreeShaking(sourceIndex uint32) { file := &c.graph.Files[sourceIndex] // Don't mark this file more than once if file.IsLive { return } file.IsLive = true switch repr := file.InputFile.Repr.(type) { case *graph.JSRepr: // If the JavaScript stub for a CSS file is included, also include the CSS file if repr.CSSSourceIndex.IsValid() { c.markFileLiveForTreeShaking(repr.CSSSourceIndex.GetIndex()) } for partIndex, part := range repr.AST.Parts { canBeRemovedIfUnused := part.CanBeRemovedIfUnused // Also include any statement-level imports for _, importRecordIndex := range part.ImportRecordIndices { record := &repr.AST.ImportRecords[importRecordIndex] if record.Kind != ast.ImportStmt { continue } if record.SourceIndex.IsValid() { otherSourceIndex := record.SourceIndex.GetIndex() // Don't include this module for its side effects if it can be // considered to have no side effects if otherFile := &c.graph.Files[otherSourceIndex]; otherFile.InputFile.SideEffects.Kind != graph.HasSideEffects && !c.options.IgnoreDCEAnnotations { continue } // Otherwise, include this module for its side effects c.markFileLiveForTreeShaking(otherSourceIndex) } // If we get here then the import was included for its side effects, so // we must also keep this part canBeRemovedIfUnused = false } // Include all parts in this file with side effects, or just include // everything if tree-shaking is disabled. Note that we still want to // perform tree-shaking on the runtime even if tree-shaking is disabled. if !canBeRemovedIfUnused || (!part.ForceTreeShaking && !c.options.TreeShaking && file.IsEntryPoint()) { c.markPartLiveForTreeShaking(sourceIndex, uint32(partIndex)) } } case *graph.CSSRepr: // Include all "@import" rules for _, record := range repr.AST.ImportRecords { if record.SourceIndex.IsValid() { c.markFileLiveForTreeShaking(record.SourceIndex.GetIndex()) } } } } func (c *linkerContext) isExternalDynamicImport(record *ast.ImportRecord, sourceIndex uint32) bool { return record.Kind == ast.ImportDynamic && c.graph.Files[record.SourceIndex.GetIndex()].IsEntryPoint() && record.SourceIndex.GetIndex() != sourceIndex } func (c *linkerContext) markPartLiveForTreeShaking(sourceIndex uint32, partIndex uint32) { file := &c.graph.Files[sourceIndex] repr := file.InputFile.Repr.(*graph.JSRepr) part := &repr.AST.Parts[partIndex] // Don't mark this part more than once if part.IsLive { return } part.IsLive = true // Include the file containing this part c.markFileLiveForTreeShaking(sourceIndex) // Also include any dependencies for _, dep := range part.Dependencies { c.markPartLiveForTreeShaking(dep.SourceIndex, dep.PartIndex) } } func sanitizeFilePathForVirtualModulePath(path string) string { // Convert it to a safe file path. See: https://stackoverflow.com/a/31976060 sb := strings.Builder{} needsGap := false for _, c := range path { switch c { case 0: // These characters are forbidden on Unix and Windows case '<', '>', ':', '"', '|', '?', '*': // These characters are forbidden on Windows default: if c < 0x20 { // These characters are forbidden on Windows break } // Turn runs of invalid characters into a '_' if needsGap { sb.WriteByte('_') needsGap = false } sb.WriteRune(c) continue } if sb.Len() > 0 { needsGap = true } } // Make sure the name isn't empty if sb.Len() == 0 { return "_" } // Note: An extension will be added to this base name, so there is no need to // avoid forbidden file names such as ".." since ".js" is a valid file name. return sb.String() } // JavaScript modules are traversed in depth-first postorder. This is the // order that JavaScript modules were evaluated in before the top-level await // feature was introduced. // // A // / \ // B C // \ / // D // // If A imports B and then C, B imports D, and C imports D, then the JavaScript // traversal order is D B C A. // // This function may deviate from ESM import order for dynamic imports (both // "require()" and "import()"). This is because the import order is impossible // to determine since the imports happen at run-time instead of compile-time. // In this case we just pick an arbitrary but consistent order. func (c *linkerContext) findImportedCSSFilesInJSOrder(entryPoint uint32) (order []uint32) { visited := make(map[uint32]bool) var visit func(uint32, ast.Index32) // Include this file and all files it imports visit = func(sourceIndex uint32, importerIndex ast.Index32) { if visited[sourceIndex] { return } visited[sourceIndex] = true file := &c.graph.Files[sourceIndex] repr := file.InputFile.Repr.(*graph.JSRepr) // Iterate over each part in the file in order for _, part := range repr.AST.Parts { // Ignore dead code that has been removed from the bundle. Any code // that's reachable from the entry point, even through lazy dynamic // imports, could end up being activated by the bundle and needs its // CSS to be included. This may change if/when code splitting is // supported for CSS. if !part.IsLive { continue } // Traverse any files imported by this part. Note that CommonJS calls // to "require()" count as imports too, sort of as if the part has an // ESM "import" statement in it. This may seem weird because ESM imports // are a compile-time concept while CommonJS imports are a run-time // concept. But we don't want to manipulate