1package api
2
3import (
4	"fmt"
5	"io/ioutil"
6	"math"
7	"math/rand"
8	"os"
9	"regexp"
10	"sort"
11	"strconv"
12	"strings"
13	"sync"
14	"sync/atomic"
15	"time"
16	"unicode/utf8"
17
18	"github.com/evanw/esbuild/internal/api_helpers"
19	"github.com/evanw/esbuild/internal/ast"
20	"github.com/evanw/esbuild/internal/bundler"
21	"github.com/evanw/esbuild/internal/cache"
22	"github.com/evanw/esbuild/internal/compat"
23	"github.com/evanw/esbuild/internal/config"
24	"github.com/evanw/esbuild/internal/fs"
25	"github.com/evanw/esbuild/internal/graph"
26	"github.com/evanw/esbuild/internal/helpers"
27	"github.com/evanw/esbuild/internal/js_ast"
28	"github.com/evanw/esbuild/internal/js_lexer"
29	"github.com/evanw/esbuild/internal/js_parser"
30	"github.com/evanw/esbuild/internal/logger"
31	"github.com/evanw/esbuild/internal/resolver"
32)
33
34func validatePathTemplate(template string) []config.PathTemplate {
35	if template == "" {
36		return nil
37	}
38	template = "./" + strings.ReplaceAll(template, "\\", "/")
39
40	parts := make([]config.PathTemplate, 0, 4)
41	search := 0
42
43	// Split by placeholders
44	for search < len(template) {
45		// Jump to the next "["
46		if found := strings.IndexByte(template[search:], '['); found == -1 {
47			break
48		} else {
49			search += found
50		}
51		head, tail := template[:search], template[search:]
52		placeholder := config.NoPlaceholder
53
54		// Check for a placeholder
55		switch {
56		case strings.HasPrefix(tail, "[dir]"):
57			placeholder = config.DirPlaceholder
58			search += len("[dir]")
59
60		case strings.HasPrefix(tail, "[name]"):
61			placeholder = config.NamePlaceholder
62			search += len("[name]")
63
64		case strings.HasPrefix(tail, "[hash]"):
65			placeholder = config.HashPlaceholder
66			search += len("[hash]")
67
68		case strings.HasPrefix(tail, "[ext]"):
69			placeholder = config.ExtPlaceholder
70			search += len("[ext]")
71
72		default:
73			// Skip past the "[" so we don't find it again
74			search++
75			continue
76		}
77
78		// Add a part for everything up to and including this placeholder
79		parts = append(parts, config.PathTemplate{
80			Data:        head,
81			Placeholder: placeholder,
82		})
83
84		// Reset the search after this placeholder
85		template = template[search:]
86		search = 0
87	}
88
89	// Append any remaining data as a part without a placeholder
90	if search < len(template) {
91		parts = append(parts, config.PathTemplate{
92			Data:        template,
93			Placeholder: config.NoPlaceholder,
94		})
95	}
96
97	return parts
98}
99
100func validatePlatform(value Platform) config.Platform {
101	switch value {
102	case PlatformBrowser:
103		return config.PlatformBrowser
104	case PlatformNode:
105		return config.PlatformNode
106	case PlatformNeutral:
107		return config.PlatformNeutral
108	default:
109		panic("Invalid platform")
110	}
111}
112
113func validateFormat(value Format) config.Format {
114	switch value {
115	case FormatDefault:
116		return config.FormatPreserve
117	case FormatIIFE:
118		return config.FormatIIFE
119	case FormatCommonJS:
120		return config.FormatCommonJS
121	case FormatESModule:
122		return config.FormatESModule
123	default:
124		panic("Invalid format")
125	}
126}
127
128func validateSourceMap(value SourceMap) config.SourceMap {
129	switch value {
130	case SourceMapNone:
131		return config.SourceMapNone
132	case SourceMapLinked:
133		return config.SourceMapLinkedWithComment
134	case SourceMapInline:
135		return config.SourceMapInline
136	case SourceMapExternal:
137		return config.SourceMapExternalWithoutComment
138	case SourceMapInlineAndExternal:
139		return config.SourceMapInlineAndExternal
140	default:
141		panic("Invalid source map")
142	}
143}
144
145func validateLegalComments(value LegalComments, bundle bool) config.LegalComments {
146	switch value {
147	case LegalCommentsDefault:
148		if bundle {
149			return config.LegalCommentsEndOfFile
150		} else {
151			return config.LegalCommentsInline
152		}
153	case LegalCommentsNone:
154		return config.LegalCommentsNone
155	case LegalCommentsInline:
156		return config.LegalCommentsInline
157	case LegalCommentsEndOfFile:
158		return config.LegalCommentsEndOfFile
159	case LegalCommentsLinked:
160		return config.LegalCommentsLinkedWithComment
161	case LegalCommentsExternal:
162		return config.LegalCommentsExternalWithoutComment
163	default:
164		panic("Invalid source map")
165	}
166}
167
168func validateColor(value StderrColor) logger.UseColor {
169	switch value {
170	case ColorIfTerminal:
171		return logger.ColorIfTerminal
172	case ColorNever:
173		return logger.ColorNever
174	case ColorAlways:
175		return logger.ColorAlways
176	default:
177		panic("Invalid color")
178	}
179}
180
181func validateLogLevel(value LogLevel) logger.LogLevel {
182	switch value {
183	case LogLevelVerbose:
184		return logger.LevelVerbose
185	case LogLevelDebug:
186		return logger.LevelDebug
187	case LogLevelInfo:
188		return logger.LevelInfo
189	case LogLevelWarning:
190		return logger.LevelWarning
191	case LogLevelError:
192		return logger.LevelError
193	case LogLevelSilent:
194		return logger.LevelSilent
195	default:
196		panic("Invalid log level")
197	}
198}
199
200func validateASCIIOnly(value Charset) bool {
201	switch value {
202	case CharsetDefault, CharsetASCII:
203		return true
204	case CharsetUTF8:
205		return false
206	default:
207		panic("Invalid charset")
208	}
209}
210
211func validateTreeShaking(value TreeShaking, bundle bool, format Format) bool {
212	switch value {
213	case TreeShakingDefault:
214		// If we're in an IIFE then there's no way to concatenate additional code
215		// to the end of our output so we assume tree shaking is safe. And when
216		// bundling we assume that tree shaking is safe because if you want to add
217		// code to the bundle, you should be doing that by including it in the
218		// bundle instead of concatenating it afterward, so we also assume tree
219		// shaking is safe then. Otherwise we assume tree shaking is not safe.
220		return bundle || format == FormatIIFE
221	case TreeShakingFalse:
222		return false
223	case TreeShakingTrue:
224		return true
225	default:
226		panic("Invalid tree shaking")
227	}
228}
229
230func validateLoader(value Loader) config.Loader {
231	switch value {
232	case LoaderNone:
233		return config.LoaderNone
234	case LoaderJS:
235		return config.LoaderJS
236	case LoaderJSX:
237		return config.LoaderJSX
238	case LoaderTS:
239		return config.LoaderTS
240	case LoaderTSX:
241		return config.LoaderTSX
242	case LoaderJSON:
243		return config.LoaderJSON
244	case LoaderText:
245		return config.LoaderText
246	case LoaderBase64:
247		return config.LoaderBase64
248	case LoaderDataURL:
249		return config.LoaderDataURL
250	case LoaderFile:
251		return config.LoaderFile
252	case LoaderBinary:
253		return config.LoaderBinary
254	case LoaderCSS:
255		return config.LoaderCSS
256	case LoaderDefault:
257		return config.LoaderDefault
258	default:
259		panic("Invalid loader")
260	}
261}
262
263func validateEngine(value EngineName) compat.Engine {
264	switch value {
265	case EngineChrome:
266		return compat.Chrome
267	case EngineEdge:
268		return compat.Edge
269	case EngineFirefox:
270		return compat.Firefox
271	case EngineIOS:
272		return compat.IOS
273	case EngineNode:
274		return compat.Node
275	case EngineSafari:
276		return compat.Safari
277	default:
278		panic("Invalid loader")
279	}
280}
281
282var versionRegex = regexp.MustCompile(`^([0-9]+)(?:\.([0-9]+))?(?:\.([0-9]+))?$`)
283
284func validateFeatures(log logger.Log, target Target, engines []Engine) (config.TargetFromAPI, compat.JSFeature, compat.CSSFeature, string) {
285	if target == DefaultTarget && len(engines) == 0 {
286		return config.TargetWasUnconfigured, 0, 0, ""
287	}
288
289	constraints := make(map[compat.Engine][]int)
290	targets := make([]string, 0, 1+len(engines))
291	targetFromAPI := config.TargetWasConfigured
292
293	switch target {
294	case ES5:
295		constraints[compat.ES] = []int{5}
296	case ES2015:
297		constraints[compat.ES] = []int{2015}
298	case ES2016:
299		constraints[compat.ES] = []int{2016}
300	case ES2017:
301		constraints[compat.ES] = []int{2017}
302	case ES2018:
303		constraints[compat.ES] = []int{2018}
304	case ES2019:
305		constraints[compat.ES] = []int{2019}
306	case ES2020:
307		constraints[compat.ES] = []int{2020}
308	case ES2021:
309		constraints[compat.ES] = []int{2021}
310	case ESNext:
311		targetFromAPI = config.TargetWasConfiguredIncludingESNext
312	case DefaultTarget:
313	default:
314		panic("Invalid target")
315	}
316
317	for _, engine := range engines {
318		if match := versionRegex.FindStringSubmatch(engine.Version); match != nil {
319			if major, err := strconv.Atoi(match[1]); err == nil {
320				version := []int{major}
321				if minor, err := strconv.Atoi(match[2]); err == nil {
322					version = append(version, minor)
323				}
324				if patch, err := strconv.Atoi(match[3]); err == nil {
325					version = append(version, patch)
326				}
327				switch engine.Name {
328				case EngineChrome:
329					constraints[compat.Chrome] = version
330				case EngineEdge:
331					constraints[compat.Edge] = version
332				case EngineFirefox:
333					constraints[compat.Firefox] = version
334				case EngineIOS:
335					constraints[compat.IOS] = version
336				case EngineNode:
337					constraints[compat.Node] = version
338				case EngineSafari:
339					constraints[compat.Safari] = version
340				default:
341					panic("Invalid engine name")
342				}
343				continue
344			}
345		}
346
347		log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid version: %q", engine.Version))
348	}
349
350	for engine, version := range constraints {
351		var text string
352		switch len(version) {
353		case 1:
354			text = fmt.Sprintf("%s%d", engine.String(), version[0])
355		case 2:
356			text = fmt.Sprintf("%s%d.%d", engine.String(), version[0], version[1])
357		case 3:
358			text = fmt.Sprintf("%s%d.%d.%d", engine.String(), version[0], version[1], version[2])
359		}
360		targets = append(targets, fmt.Sprintf("%q", text))
361	}
362
363	sort.Strings(targets)
364	targetEnv := strings.Join(targets, ", ")
365
366	return targetFromAPI, compat.UnsupportedJSFeatures(constraints), compat.UnsupportedCSSFeatures(constraints), targetEnv
367}
368
369func validateGlobalName(log logger.Log, text string) []string {
370	if text != "" {
371		source := logger.Source{
372			KeyPath:    logger.Path{Text: "(global path)"},
373			PrettyPath: "(global name)",
374			Contents:   text,
375		}
376
377		if result, ok := js_parser.ParseGlobalName(log, source); ok {
378			return result
379		}
380	}
381
382	return nil
383}
384
385func validateExternals(log logger.Log, fs fs.FS, paths []string) config.ExternalModules {
386	result := config.ExternalModules{
387		NodeModules: make(map[string]bool),
388		AbsPaths:    make(map[string]bool),
389	}
390	for _, path := range paths {
391		if index := strings.IndexByte(path, '*'); index != -1 {
392			if strings.ContainsRune(path[index+1:], '*') {
393				log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("External path %q cannot have more than one \"*\" wildcard", path))
394			} else {
395				result.Patterns = append(result.Patterns, config.WildcardPattern{
396					Prefix: path[:index],
397					Suffix: path[index+1:],
398				})
399			}
400		} else if resolver.IsPackagePath(path) {
401			result.NodeModules[path] = true
402		} else if absPath := validatePath(log, fs, path, "external path"); absPath != "" {
403			result.AbsPaths[absPath] = true
404		}
405	}
406	return result
407}
408
409func isValidExtension(ext string) bool {
410	return len(ext) >= 2 && ext[0] == '.' && ext[len(ext)-1] != '.'
411}
412
413func validateResolveExtensions(log logger.Log, order []string) []string {
414	if order == nil {
415		return []string{".tsx", ".ts", ".jsx", ".js", ".css", ".json"}
416	}
417	for _, ext := range order {
418		if !isValidExtension(ext) {
419			log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid file extension: %q", ext))
420		}
421	}
422	return order
423}
424
425func validateLoaders(log logger.Log, loaders map[string]Loader) map[string]config.Loader {
426	result := bundler.DefaultExtensionToLoaderMap()
427	if loaders != nil {
428		for ext, loader := range loaders {
429			if !isValidExtension(ext) {
430				log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid file extension: %q", ext))
431			}
432			result[ext] = validateLoader(loader)
433		}
434	}
435	return result
436}
437
438func validateJSXExpr(log logger.Log, text string, name string, kind js_parser.JSXExprKind) config.JSXExpr {
439	if expr, ok := js_parser.ParseJSXExpr(text, kind); ok {
440		return expr
441	}
442	log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid JSX %s: %q", name, text))
443	return config.JSXExpr{}
444}
445
446func validateDefines(
447	log logger.Log,
448	defines map[string]string,
449	pureFns []string,
450	platform Platform,
451	minify bool,
452) (*config.ProcessedDefines, []config.InjectedDefine) {
453	rawDefines := make(map[string]config.DefineData)
454	var valueToInject map[string]config.InjectedDefine
455	var definesToInject []string
456
457	for key, value := range defines {
458		// The key must be a dot-separated identifier list
459		for _, part := range strings.Split(key, ".") {
460			if !js_lexer.IsIdentifier(part) {
461				if part == key {
462					log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("The define key %q must be a valid identifier", key))
463				} else {
464					log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("The define key %q contains invalid identifier %q", key, part))
465				}
466				continue
467			}
468		}
469
470		// Allow substituting for an identifier
471		if js_lexer.IsIdentifier(value) {
472			if _, ok := js_lexer.Keywords[value]; !ok {
473				name := value // The closure must close over a variable inside the loop
474				rawDefines[key] = config.DefineData{
475					DefineFunc: func(args config.DefineArgs) js_ast.E {
476						return &js_ast.EIdentifier{Ref: args.FindSymbol(args.Loc, name)}
477					},
478				}
479				continue
480			}
481		}
482
483		// Parse the value as JSON
484		source := logger.Source{Contents: value}
485		expr, ok := js_parser.ParseJSON(logger.NewDeferLog(logger.DeferLogAll), source, js_parser.JSONOptions{})
486		if !ok {
487			log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid define value (must be valid JSON syntax or a single identifier): %s", value))
488			continue
489		}
490
491		var fn config.DefineFunc
492		switch e := expr.Data.(type) {
493		// These values are inserted inline, and can participate in constant folding
494		case *js_ast.ENull:
495			fn = func(config.DefineArgs) js_ast.E { return js_ast.ENullShared }
496		case *js_ast.EBoolean:
497			fn = func(config.DefineArgs) js_ast.E { return &js_ast.EBoolean{Value: e.Value} }
498		case *js_ast.EString:
499			fn = func(config.DefineArgs) js_ast.E { return &js_ast.EString{Value: e.Value} }
500		case *js_ast.ENumber:
501			fn = func(config.DefineArgs) js_ast.E { return &js_ast.ENumber{Value: e.Value} }
502
503		// These values are extracted into a shared symbol reference
504		case *js_ast.EArray, *js_ast.EObject:
505			definesToInject = append(definesToInject, key)
506			if valueToInject == nil {
507				valueToInject = make(map[string]config.InjectedDefine)
508			}
509			valueToInject[key] = config.InjectedDefine{Source: source, Data: e, Name: key}
510			continue
511		}
512
513		rawDefines[key] = config.DefineData{DefineFunc: fn}
514	}
515
516	// Sort injected defines for determinism, since the imports will be injected
517	// into every file in the order that we return them from this function
518	var injectedDefines []config.InjectedDefine
519	if len(definesToInject) > 0 {
520		injectedDefines = make([]config.InjectedDefine, len(definesToInject))
521		sort.Strings(definesToInject)
522		for i, key := range definesToInject {
523			index := i // Capture this for the closure below
524			injectedDefines[i] = valueToInject[key]
525			rawDefines[key] = config.DefineData{DefineFunc: func(args config.DefineArgs) js_ast.E {
526				return &js_ast.EIdentifier{Ref: args.SymbolForDefine(index)}
527			}}
528		}
529	}
530
531	// If we're bundling for the browser, add a special-cased define for
532	// "process.env.NODE_ENV" that is "development" when not minifying and
533	// "production" when minifying. This is a convention from the React world
534	// that must be handled to avoid all React code crashing instantly. This
535	// is only done if it's not already defined so that you can override it if
536	// necessary.
537	if platform == PlatformBrowser {
538		if _, process := rawDefines["process"]; !process {
539			if _, processEnv := rawDefines["process.env"]; !processEnv {
540				if _, processEnvNodeEnv := rawDefines["process.env.NODE_ENV"]; !processEnvNodeEnv {
541					var value []uint16
542					if minify {
543						value = js_lexer.StringToUTF16("production")
544					} else {
545						value = js_lexer.StringToUTF16("development")
546					}
547					rawDefines["process.env.NODE_ENV"] = config.DefineData{
548						DefineFunc: func(args config.DefineArgs) js_ast.E {
549							return &js_ast.EString{Value: value}
550						},
551					}
552				}
553			}
554		}
555	}
556
557	for _, key := range pureFns {
558		// The key must be a dot-separated identifier list
559		for _, part := range strings.Split(key, ".") {
560			if !js_lexer.IsIdentifier(part) {
561				log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid pure function: %q", key))
562				continue
563			}
564		}
565
566		// Merge with any previously-specified defines
567		define := rawDefines[key]
568		define.CallCanBeUnwrappedIfUnused = true
569		rawDefines[key] = define
570	}
571
572	// Processing defines is expensive. Process them once here so the same object
573	// can be shared between all parsers we create using these arguments.
574	processed := config.ProcessDefines(rawDefines)
575	return &processed, injectedDefines
576}
577
578func validatePath(log logger.Log, fs fs.FS, relPath string, pathKind string) string {
579	if relPath == "" {
580		return ""
581	}
582	absPath, ok := fs.Abs(relPath)
583	if !ok {
584		log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid %s: %s", pathKind, relPath))
585	}
586	return absPath
587}
588
589func validateOutputExtensions(log logger.Log, outExtensions map[string]string) (js string, css string) {
590	for key, value := range outExtensions {
591		if !isValidExtension(value) {
592			log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid output extension: %q", value))
593		}
594		switch key {
595		case ".js":
596			js = value
597		case ".css":
598			css = value
599		default:
600			log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid output extension: %q (valid: .css, .js)", key))
601		}
602	}
603	return
604}
605
606func validateBannerOrFooter(log logger.Log, name string, values map[string]string) (js string, css string) {
607	for key, value := range values {
608		switch key {
609		case "js":
610			js = value
611		case "css":
612			css = value
613		default:
614			log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Invalid %s file type: %q (valid: css, js)", name, key))
615		}
616	}
617	return
618}
619
620func convertLocationToPublic(loc *logger.MsgLocation) *Location {
621	if loc != nil {
622		return &Location{
623			File:       loc.File,
624			Namespace:  loc.Namespace,
625			Line:       loc.Line,
626			Column:     loc.Column,
627			Length:     loc.Length,
628			LineText:   loc.LineText,
629			Suggestion: loc.Suggestion,
630		}
631	}
632	return nil
633}
634
635func convertMessagesToPublic(kind logger.MsgKind, msgs []logger.Msg) []Message {
636	var filtered []Message
637	for _, msg := range msgs {
638		if msg.Kind == kind {
639			var notes []Note
640			for _, note := range msg.Notes {
641				notes = append(notes, Note{
642					Text:     note.Text,
643					Location: convertLocationToPublic(note.Location),
644				})
645			}
646			filtered = append(filtered, Message{
647				PluginName: msg.PluginName,
648				Text:       msg.Data.Text,
649				Location:   convertLocationToPublic(msg.Data.Location),
650				Notes:      notes,
651				Detail:     msg.Data.UserDetail,
652			})
653		}
654	}
655	return filtered
656}
657
658func convertLocationToInternal(loc *Location) *logger.MsgLocation {
659	if loc != nil {
660		namespace := loc.Namespace
661		if namespace == "" {
662			namespace = "file"
663		}
664		return &logger.MsgLocation{
665			File:       loc.File,
666			Namespace:  namespace,
667			Line:       loc.Line,
668			Column:     loc.Column,
669			Length:     loc.Length,
670			LineText:   loc.LineText,
671			Suggestion: loc.Suggestion,
672		}
673	}
674	return nil
675}
676
677func convertMessagesToInternal(msgs []logger.Msg, kind logger.MsgKind, messages []Message) []logger.Msg {
678	for _, message := range messages {
679		var notes []logger.MsgData
680		for _, note := range message.Notes {
681			notes = append(notes, logger.MsgData{
682				Text:     note.Text,
683				Location: convertLocationToInternal(note.Location),
684			})
685		}
686		msgs = append(msgs, logger.Msg{
687			PluginName: message.PluginName,
688			Kind:       kind,
689			Data: logger.MsgData{
690				Text:       message.Text,
691				Location:   convertLocationToInternal(message.Location),
692				UserDetail: message.Detail,
693			},
694			Notes: notes,
695		})
696	}
697	return msgs
698}
699
700////////////////////////////////////////////////////////////////////////////////
701// Build API
702
703type internalBuildResult struct {
704	result    BuildResult
705	options   config.Options
706	watchData fs.WatchData
707}
708
709func buildImpl(buildOpts BuildOptions) internalBuildResult {
710	start := time.Now()
711	logOptions := logger.OutputOptions{
712		IncludeSource: true,
713		MessageLimit:  buildOpts.LogLimit,
714		Color:         validateColor(buildOpts.Color),
715		LogLevel:      validateLogLevel(buildOpts.LogLevel),
716	}
717	log := logger.NewStderrLog(logOptions)
718
719	// Validate that the current working directory is an absolute path
720	realFS, err := fs.RealFS(fs.RealFSOptions{
721		AbsWorkingDir: buildOpts.AbsWorkingDir,
722
723		// This is a long-lived file system object so do not cache calls to
724		// ReadDirectory() (they are normally cached for the duration of a build
725		// for performance).
726		DoNotCache: true,
727	})
728	if err != nil {
729		log.Add(logger.Error, nil, logger.Range{}, err.Error())
730		return internalBuildResult{result: BuildResult{Errors: convertMessagesToPublic(logger.Error, log.Done())}}
731	}
732
733	// Do not re-evaluate plugins when rebuilding. Also make sure the working
734	// directory doesn't change, since breaking that invariant would break the
735	// validation that we just did above.
736	caches := cache.MakeCacheSet()
737	oldAbsWorkingDir := buildOpts.AbsWorkingDir
738	plugins, onEndCallbacks, finalizeBuildOptions := loadPlugins(&buildOpts, realFS, log, caches)
739	if buildOpts.AbsWorkingDir != oldAbsWorkingDir {
740		panic("Mutating \"AbsWorkingDir\" is not allowed")
741	}
742
743	internalResult := rebuildImpl(buildOpts, caches, plugins, finalizeBuildOptions, onEndCallbacks, logOptions, log, false /* isRebuild */)
744
745	// Print a summary of the generated files to stderr. Except don't do
746	// this if the terminal is already being used for something else.
747	if logOptions.LogLevel <= logger.LevelInfo && len(internalResult.result.OutputFiles) > 0 &&
748		buildOpts.Watch == nil && !buildOpts.Incremental && !internalResult.options.WriteToStdout {
749		printSummary(logOptions, internalResult.result.OutputFiles, start)
750	}
751
752	return internalResult
753}
754
755func prettyPrintByteCount(n int) string {
756	var size string
757	if n < 1024 {
758		size = fmt.Sprintf("%db ", n)
759	} else if n < 1024*1024 {
760		size = fmt.Sprintf("%.1fkb", float64(n)/(1024))
761	} else if n < 1024*1024*1024 {
762		size = fmt.Sprintf("%.1fmb", float64(n)/(1024*1024))
763	} else {
764		size = fmt.Sprintf("%.1fgb", float64(n)/(1024*1024*1024))
765	}
766	return size
767}
768
769func printSummary(logOptions logger.OutputOptions, outputFiles []OutputFile, start time.Time) {
770	var table logger.SummaryTable = make([]logger.SummaryTableEntry, len(outputFiles))
771
772	if len(outputFiles) > 0 {
773		if cwd, err := os.Getwd(); err == nil {
774			if realFS, err := fs.RealFS(fs.RealFSOptions{AbsWorkingDir: cwd}); err == nil {
775				for i, file := range outputFiles {
776					path, ok := realFS.Rel(realFS.Cwd(), file.Path)
777					if !ok {
778						path = file.Path
779					}
780					base := realFS.Base(path)
781					n := len(file.Contents)
782					table[i] = logger.SummaryTableEntry{
783						Dir:         path[:len(path)-len(base)],
784						Base:        base,
785						Size:        prettyPrintByteCount(n),
786						Bytes:       n,
787						IsSourceMap: strings.HasSuffix(base, ".map"),
788					}
789				}
790			}
791		}
792	}
793
794	// Don't print the time taken by the build if we're running under Yarn 1
795	// since Yarn 1 always prints its own copy of the time taken by each command
796	for _, env := range os.Environ() {
797		if strings.HasPrefix(env, "npm_config_user_agent=") && strings.Contains(env, "yarn/1.") {
798			logger.PrintSummary(logOptions.Color, table, nil)
799			return
800		}
801	}
802
803	logger.PrintSummary(logOptions.Color, table, &start)
804}
805
806func rebuildImpl(
807	buildOpts BuildOptions,
808	caches *cache.CacheSet,
809	plugins []config.Plugin,
810	finalizeBuildOptions func(*config.Options),
811	onEndCallbacks []func(*BuildResult),
812	logOptions logger.OutputOptions,
813	log logger.Log,
814	isRebuild bool,
815) internalBuildResult {
816	// Convert and validate the buildOpts
817	realFS, err := fs.RealFS(fs.RealFSOptions{
818		AbsWorkingDir: buildOpts.AbsWorkingDir,
819		WantWatchData: buildOpts.Watch != nil,
820	})
821	if err != nil {
822		// This should already have been checked above
823		panic(err.Error())
824	}
825	targetFromAPI, jsFeatures, cssFeatures, targetEnv := validateFeatures(log, buildOpts.Target, buildOpts.Engines)
826	outJS, outCSS := validateOutputExtensions(log, buildOpts.OutExtensions)
827	bannerJS, bannerCSS := validateBannerOrFooter(log, "banner", buildOpts.Banner)
828	footerJS, footerCSS := validateBannerOrFooter(log, "footer", buildOpts.Footer)
829	minify := buildOpts.MinifyWhitespace && buildOpts.MinifyIdentifiers && buildOpts.MinifySyntax
830	defines, injectedDefines := validateDefines(log, buildOpts.Define, buildOpts.Pure, buildOpts.Platform, minify)
831	options := config.Options{
832		TargetFromAPI:          targetFromAPI,
833		UnsupportedJSFeatures:  jsFeatures,
834		UnsupportedCSSFeatures: cssFeatures,
835		OriginalTargetEnv:      targetEnv,
836		JSX: config.JSXOptions{
837			Preserve: buildOpts.JSXMode == JSXModePreserve,
838			Factory:  validateJSXExpr(log, buildOpts.JSXFactory, "factory", js_parser.JSXFactory),
839			Fragment: validateJSXExpr(log, buildOpts.JSXFragment, "fragment", js_parser.JSXFragment),
840		},
841		Defines:               defines,
842		InjectedDefines:       injectedDefines,
843		Platform:              validatePlatform(buildOpts.Platform),
844		SourceMap:             validateSourceMap(buildOpts.Sourcemap),
845		LegalComments:         validateLegalComments(buildOpts.LegalComments, buildOpts.Bundle),
846		SourceRoot:            buildOpts.SourceRoot,
847		ExcludeSourcesContent: buildOpts.SourcesContent == SourcesContentExclude,
848		MangleSyntax:          buildOpts.MinifySyntax,
849		RemoveWhitespace:      buildOpts.MinifyWhitespace,
850		MinifyIdentifiers:     buildOpts.MinifyIdentifiers,
851		AllowOverwrite:        buildOpts.AllowOverwrite,
852		ASCIIOnly:             validateASCIIOnly(buildOpts.Charset),
853		IgnoreDCEAnnotations:  buildOpts.IgnoreAnnotations,
854		TreeShaking:           validateTreeShaking(buildOpts.TreeShaking, buildOpts.Bundle, buildOpts.Format),
855		GlobalName:            validateGlobalName(log, buildOpts.GlobalName),
856		CodeSplitting:         buildOpts.Splitting,
857		OutputFormat:          validateFormat(buildOpts.Format),
858		AbsOutputFile:         validatePath(log, realFS, buildOpts.Outfile, "outfile path"),
859		AbsOutputDir:          validatePath(log, realFS, buildOpts.Outdir, "outdir path"),
860		AbsOutputBase:         validatePath(log, realFS, buildOpts.Outbase, "outbase path"),
861		NeedsMetafile:         buildOpts.Metafile,
862		EntryPathTemplate:     validatePathTemplate(buildOpts.EntryNames),
863		ChunkPathTemplate:     validatePathTemplate(buildOpts.ChunkNames),
864		AssetPathTemplate:     validatePathTemplate(buildOpts.AssetNames),
865		OutputExtensionJS:     outJS,
866		OutputExtensionCSS:    outCSS,
867		ExtensionToLoader:     validateLoaders(log, buildOpts.Loader),
868		ExtensionOrder:        validateResolveExtensions(log, buildOpts.ResolveExtensions),
869		ExternalModules:       validateExternals(log, realFS, buildOpts.External),
870		TsConfigOverride:      validatePath(log, realFS, buildOpts.Tsconfig, "tsconfig path"),
871		MainFields:            buildOpts.MainFields,
872		Conditions:            append([]string{}, buildOpts.Conditions...),
873		PublicPath:            buildOpts.PublicPath,
874		KeepNames:             buildOpts.KeepNames,
875		InjectAbsPaths:        make([]string, len(buildOpts.Inject)),
876		AbsNodePaths:          make([]string, len(buildOpts.NodePaths)),
877		JSBanner:              bannerJS,
878		JSFooter:              footerJS,
879		CSSBanner:             bannerCSS,
880		CSSFooter:             footerCSS,
881		PreserveSymlinks:      buildOpts.PreserveSymlinks,
882		WatchMode:             buildOpts.Watch != nil,
883		Plugins:               plugins,
884	}
885	if options.MainFields != nil {
886		options.MainFields = append([]string{}, options.MainFields...)
887	}
888	for i, path := range buildOpts.Inject {
889		options.InjectAbsPaths[i] = validatePath(log, realFS, path, "inject path")
890	}
891	for i, path := range buildOpts.NodePaths {
892		options.AbsNodePaths[i] = validatePath(log, realFS, path, "node path")
893	}
894	entryPoints := make([]bundler.EntryPoint, 0, len(buildOpts.EntryPoints)+len(buildOpts.EntryPointsAdvanced))
895	for _, ep := range buildOpts.EntryPoints {
896		entryPoints = append(entryPoints, bundler.EntryPoint{InputPath: ep})
897	}
898	for _, ep := range buildOpts.EntryPointsAdvanced {
899		entryPoints = append(entryPoints, bundler.EntryPoint{InputPath: ep.InputPath, OutputPath: ep.OutputPath})
900	}
901	entryPointCount := len(entryPoints)
902	if buildOpts.Stdin != nil {
903		entryPointCount++
904		options.Stdin = &config.StdinInfo{
905			Loader:        validateLoader(buildOpts.Stdin.Loader),
906			Contents:      buildOpts.Stdin.Contents,
907			SourceFile:    buildOpts.Stdin.Sourcefile,
908			AbsResolveDir: validatePath(log, realFS, buildOpts.Stdin.ResolveDir, "resolve directory path"),
909		}
910	}
911
912	if options.AbsOutputDir == "" && entryPointCount > 1 {
913		log.Add(logger.Error, nil, logger.Range{},
914			"Must use \"outdir\" when there are multiple input files")
915	} else if options.AbsOutputDir == "" && options.CodeSplitting {
916		log.Add(logger.Error, nil, logger.Range{},
917			"Must use \"outdir\" when code splitting is enabled")
918	} else if options.AbsOutputFile != "" && options.AbsOutputDir != "" {
919		log.Add(logger.Error, nil, logger.Range{}, "Cannot use both \"outfile\" and \"outdir\"")
920	} else if options.AbsOutputFile != "" {
921		// If the output file is specified, use it to derive the output directory
922		options.AbsOutputDir = realFS.Dir(options.AbsOutputFile)
923	} else if options.AbsOutputDir == "" {
924		options.WriteToStdout = true
925
926		// Forbid certain features when writing to stdout
927		if options.SourceMap != config.SourceMapNone && options.SourceMap != config.SourceMapInline {
928			log.Add(logger.Error, nil, logger.Range{}, "Cannot use an external source map without an output path")
929		}
930		if options.LegalComments.HasExternalFile() {
931			log.Add(logger.Error, nil, logger.Range{}, "Cannot use linked or external legal comments without an output path")
932		}
933		for _, loader := range options.ExtensionToLoader {
934			if loader == config.LoaderFile {
935				log.Add(logger.Error, nil, logger.Range{}, "Cannot use the \"file\" loader without an output path")
936				break
937			}
938		}
939
940		// Use the current directory as the output directory instead of an empty
941		// string because external modules with relative paths need a base directory.
942		options.AbsOutputDir = realFS.Cwd()
943	}
944
945	if !buildOpts.Bundle {
946		// Disallow bundle-only options when not bundling
947		if len(options.ExternalModules.NodeModules) > 0 || len(options.ExternalModules.AbsPaths) > 0 {
948			log.Add(logger.Error, nil, logger.Range{}, "Cannot use \"external\" without \"bundle\"")
949		}
950	} else if options.OutputFormat == config.FormatPreserve {
951		// If the format isn't specified, set the default format using the platform
952		switch options.Platform {
953		case config.PlatformBrowser:
954			options.OutputFormat = config.FormatIIFE
955		case config.PlatformNode:
956			options.OutputFormat = config.FormatCommonJS
957		case config.PlatformNeutral:
958			options.OutputFormat = config.FormatESModule
959		}
960	}
961
962	// Set the output mode using other settings
963	if buildOpts.Bundle {
964		options.Mode = config.ModeBundle
965	} else if options.OutputFormat != config.FormatPreserve {
966		options.Mode = config.ModeConvertFormat
967	}
968
969	// Code splitting is experimental and currently only enabled for ES6 modules
970	if options.CodeSplitting && options.OutputFormat != config.FormatESModule {
971		log.Add(logger.Error, nil, logger.Range{}, "Splitting currently only works with the \"esm\" format")
972	}
973
974	var outputFiles []OutputFile
975	var metafileJSON string
976	var watchData fs.WatchData
977
978	// Stop now if there were errors
979	resolver := resolver.NewResolver(realFS, log, caches, options)
980	if !log.HasErrors() {
981		var timer *helpers.Timer
982		if api_helpers.UseTimer {
983			timer = &helpers.Timer{}
984		}
985
986		// Finalize the build options, which will enable API methods that need them such as the "resolve" API
987		if finalizeBuildOptions != nil {
988			finalizeBuildOptions(&options)
989		}
990
991		// Scan over the bundle
992		bundle := bundler.ScanBundle(log, realFS, resolver, caches, entryPoints, options, timer)
993		watchData = realFS.WatchData()
994
995		// Stop now if there were errors
996		if !log.HasErrors() {
997			// Compile the bundle
998			results, metafile := bundle.Compile(log, options, timer)
999
1000			// Stop now if there were errors
1001			if !log.HasErrors() {
1002				metafileJSON = metafile
1003
1004				// Flush any deferred warnings now
1005				log.AlmostDone()
1006
1007				if buildOpts.Write {
1008					timer.Begin("Write output files")
1009					if options.WriteToStdout {
1010						// Special-case writing to stdout
1011						if len(results) != 1 {
1012							log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf(
1013								"Internal error: did not expect to generate %d files when writing to stdout", len(results)))
1014						} else if _, err := os.Stdout.Write(results[0].Contents); err != nil {
1015							log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf(
1016								"Failed to write to stdout: %s", err.Error()))
1017						}
1018					} else {
1019						// Write out files in parallel
1020						waitGroup := sync.WaitGroup{}
1021						waitGroup.Add(len(results))
1022						for _, result := range results {
1023							go func(result graph.OutputFile) {
1024								fs.BeforeFileOpen()
1025								defer fs.AfterFileClose()
1026								if err := fs.MkdirAll(realFS, realFS.Dir(result.AbsPath), 0755); err != nil {
1027									log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf(
1028										"Failed to create output directory: %s", err.Error()))
1029								} else {
1030									var mode os.FileMode = 0644
1031									if result.IsExecutable {
1032										mode = 0755
1033									}
1034									if err := ioutil.WriteFile(result.AbsPath, result.Contents, mode); err != nil {
1035										log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf(
1036											"Failed to write to output file: %s", err.Error()))
1037									}
1038								}
1039								waitGroup.Done()
1040							}(result)
1041						}
1042						waitGroup.Wait()
1043					}
1044					timer.End("Write output files")
1045				}
1046
1047				// Return the results
1048				outputFiles = make([]OutputFile, len(results))
1049				for i, result := range results {
1050					if options.WriteToStdout {
1051						result.AbsPath = "<stdout>"
1052					}
1053					outputFiles[i] = OutputFile{
1054						Path:     result.AbsPath,
1055						Contents: result.Contents,
1056					}
1057				}
1058			}
1059		}
1060
1061		timer.Log(log)
1062	}
1063
1064	// End the log now, which may print a message
1065	msgs := log.Done()
1066
1067	// Start watching, but only for the top-level build
1068	var watch *watcher
1069	var stop func()
1070	if buildOpts.Watch != nil && !isRebuild {
1071		onRebuild := buildOpts.Watch.OnRebuild
1072		watch = &watcher{
1073			data:     watchData,
1074			resolver: resolver,
1075			rebuild: func() fs.WatchData {
1076				value := rebuildImpl(buildOpts, caches, plugins, nil, onEndCallbacks, logOptions, logger.NewStderrLog(logOptions), true /* isRebuild */)
1077				if onRebuild != nil {
1078					go onRebuild(value.result)
1079				}
1080				return value.watchData
1081			},
1082		}
1083		mode := *buildOpts.Watch
1084		watch.start(buildOpts.LogLevel, buildOpts.Color, mode)
1085		stop = func() {
1086			watch.stop()
1087		}
1088	}
1089
1090	var rebuild func() BuildResult
1091	if buildOpts.Incremental {
1092		rebuild = func() BuildResult {
1093			value := rebuildImpl(buildOpts, caches, plugins, nil, onEndCallbacks, logOptions, logger.NewStderrLog(logOptions), true /* isRebuild */)
1094			if watch != nil {
1095				watch.setWatchData(value.watchData)
1096			}
1097			return value.result
1098		}
1099	}
1100
1101	result := BuildResult{
1102		Errors:      convertMessagesToPublic(logger.Error, msgs),
1103		Warnings:    convertMessagesToPublic(logger.Warning, msgs),
1104		OutputFiles: outputFiles,
1105		Metafile:    metafileJSON,
1106		Rebuild:     rebuild,
1107		Stop:        stop,
1108	}
1109
1110	for _, onEnd := range onEndCallbacks {
1111		onEnd(&result)
1112	}
1113
1114	return internalBuildResult{
1115		result:    result,
1116		options:   options,
1117		watchData: watchData,
1118	}
1119}
1120
1121type watcher struct {
1122	mutex             sync.Mutex
1123	data              fs.WatchData
1124	resolver          resolver.Resolver
1125	shouldStop        int32
1126	rebuild           func() fs.WatchData
1127	recentItems       []string
1128	itemsToScan       []string
1129	itemsPerIteration int
1130}
1131
1132func (w *watcher) setWatchData(data fs.WatchData) {
1133	defer w.mutex.Unlock()
1134	w.mutex.Lock()
1135	w.data = data
1136	w.itemsToScan = w.itemsToScan[:0] // Reuse memory
1137
1138	// Remove any recent items that weren't a part of the latest build
1139	end := 0
1140	for _, path := range w.recentItems {
1141		if data.Paths[path] != nil {
1142			w.recentItems[end] = path
1143			end++
1144		}
1145	}
1146	w.recentItems = w.recentItems[:end]
1147}
1148
1149// The time to wait between watch intervals
1150const watchIntervalSleep = 100 * time.Millisecond
1151
1152// The maximum number of recently-edited items to check every interval
1153const maxRecentItemCount = 16
1154
1155// The minimum number of non-recent items to check every interval
1156const minItemCountPerIter = 64
1157
1158// The maximum number of intervals before a change is detected
1159const maxIntervalsBeforeUpdate = 20
1160
1161func (w *watcher) start(logLevel LogLevel, color StderrColor, mode WatchMode) {
1162	useColor := validateColor(color)
1163
1164	go func() {
1165		shouldLog := logLevel == LogLevelInfo || logLevel == LogLevelDebug
1166
1167		// Note: Do not change these log messages without a breaking version change.
1168		// People want to run regexes over esbuild's stderr stream to look for these
1169		// messages instead of using esbuild's API.
1170
1171		if shouldLog {
1172			logger.PrintTextWithColor(os.Stderr, useColor, func(colors logger.Colors) string {
1173				return fmt.Sprintf("%s[watch] build finished, watching for changes...%s\n", colors.Dim, colors.Reset)
1174			})
1175		}
1176
1177		for atomic.LoadInt32(&w.shouldStop) == 0 {
1178			// Sleep for the watch interval
1179			time.Sleep(watchIntervalSleep)
1180
1181			// Rebuild if we're dirty
1182			if absPath := w.tryToFindDirtyPath(); absPath != "" {
1183				if shouldLog {
1184					logger.PrintTextWithColor(os.Stderr, useColor, func(colors logger.Colors) string {
1185						prettyPath := w.resolver.PrettyPath(logger.Path{Text: absPath, Namespace: "file"})
1186						return fmt.Sprintf("%s[watch] build started (change: %q)%s\n", colors.Dim, prettyPath, colors.Reset)
1187					})
1188				}
1189
1190				// Run the build
1191				w.setWatchData(w.rebuild())
1192
1193				if shouldLog {
1194					logger.PrintTextWithColor(os.Stderr, useColor, func(colors logger.Colors) string {
1195						return fmt.Sprintf("%s[watch] build finished%s\n", colors.Dim, colors.Reset)
1196					})
1197				}
1198			}
1199		}
1200	}()
1201}
1202
1203func (w *watcher) stop() {
1204	atomic.StoreInt32(&w.shouldStop, 1)
1205}
1206
1207func (w *watcher) tryToFindDirtyPath() string {
1208	defer w.mutex.Unlock()
1209	w.mutex.Lock()
1210
1211	// If we ran out of items to scan, fill the items back up in a random order
1212	if len(w.itemsToScan) == 0 {
1213		items := w.itemsToScan[:0] // Reuse memory
1214		for path := range w.data.Paths {
1215			items = append(items, path)
1216		}
1217		rand.Seed(time.Now().UnixNano())
1218		for i := int32(len(items) - 1); i > 0; i-- { // Fisher-Yates shuffle
1219			j := rand.Int31n(i + 1)
1220			items[i], items[j] = items[j], items[i]
1221		}
1222		w.itemsToScan = items
1223
1224		// Determine how many items to check every iteration, rounded up
1225		perIter := (len(items) + maxIntervalsBeforeUpdate - 1) / maxIntervalsBeforeUpdate
1226		if perIter < minItemCountPerIter {
1227			perIter = minItemCountPerIter
1228		}
1229		w.itemsPerIteration = perIter
1230	}
1231
1232	// Always check all recent items every iteration
1233	for i, path := range w.recentItems {
1234		if dirtyPath := w.data.Paths[path](); dirtyPath != "" {
1235			// Move this path to the back of the list (i.e. the "most recent" position)
1236			copy(w.recentItems[i:], w.recentItems[i+1:])
1237			w.recentItems[len(w.recentItems)-1] = path
1238			return dirtyPath
1239		}
1240	}
1241
1242	// Check a constant number of items every iteration
1243	remainingCount := len(w.itemsToScan) - w.itemsPerIteration
1244	if remainingCount < 0 {
1245		remainingCount = 0
1246	}
1247	toCheck, remaining := w.itemsToScan[remainingCount:], w.itemsToScan[:remainingCount]
1248	w.itemsToScan = remaining
1249
1250	// Check if any of the entries in this iteration have been modified
1251	for _, path := range toCheck {
1252		if dirtyPath := w.data.Paths[path](); dirtyPath != "" {
1253			// Mark this item as recent by adding it to the back of the list
1254			w.recentItems = append(w.recentItems, path)
1255			if len(w.recentItems) > maxRecentItemCount {
1256				// Remove items from the front of the list when we hit the limit
1257				copy(w.recentItems, w.recentItems[1:])
1258				w.recentItems = w.recentItems[:maxRecentItemCount]
1259			}
1260			return dirtyPath
1261		}
1262	}
1263	return ""
1264}
1265
1266////////////////////////////////////////////////////////////////////////////////
1267// Transform API
1268
1269func transformImpl(input string, transformOpts TransformOptions) TransformResult {
1270	log := logger.NewStderrLog(logger.OutputOptions{
1271		IncludeSource: true,
1272		MessageLimit:  transformOpts.LogLimit,
1273		Color:         validateColor(transformOpts.Color),
1274		LogLevel:      validateLogLevel(transformOpts.LogLevel),
1275	})
1276
1277	// Settings from the user come first
1278	unusedImportsTS := config.UnusedImportsRemoveStmt
1279	useDefineForClassFieldsTS := config.Unspecified
1280	jsx := config.JSXOptions{
1281		Preserve: transformOpts.JSXMode == JSXModePreserve,
1282		Factory:  validateJSXExpr(log, transformOpts.JSXFactory, "factory", js_parser.JSXFactory),
1283		Fragment: validateJSXExpr(log, transformOpts.JSXFragment, "fragment", js_parser.JSXFragment),
1284	}
1285
1286	// Settings from "tsconfig.json" override those
1287	var tsTarget *config.TSTarget
1288	caches := cache.MakeCacheSet()
1289	if transformOpts.TsconfigRaw != "" {
1290		source := logger.Source{
1291			KeyPath:    logger.Path{Text: "tsconfig.json"},
1292			PrettyPath: "tsconfig.json",
1293			Contents:   transformOpts.TsconfigRaw,
1294		}
1295		if result := resolver.ParseTSConfigJSON(log, source, &caches.JSONCache, nil); result != nil {
1296			if len(result.JSXFactory) > 0 {
1297				jsx.Factory = config.JSXExpr{Parts: result.JSXFactory}
1298			}
1299			if len(result.JSXFragmentFactory) > 0 {
1300				jsx.Fragment = config.JSXExpr{Parts: result.JSXFragmentFactory}
1301			}
1302			if result.UseDefineForClassFields != config.Unspecified {
1303				useDefineForClassFieldsTS = result.UseDefineForClassFields
1304			}
1305			unusedImportsTS = config.UnusedImportsFromTsconfigValues(
1306				result.PreserveImportsNotUsedAsValues,
1307				result.PreserveValueImports,
1308			)
1309			tsTarget = result.TSTarget
1310		}
1311	}
1312
1313	// Apply default values
1314	if transformOpts.Sourcefile == "" {
1315		transformOpts.Sourcefile = "<stdin>"
1316	}
1317	if transformOpts.Loader == LoaderNone {
1318		transformOpts.Loader = LoaderJS
1319	}
1320
1321	// Convert and validate the transformOpts
1322	targetFromAPI, jsFeatures, cssFeatures, targetEnv := validateFeatures(log, transformOpts.Target, transformOpts.Engines)
1323	defines, injectedDefines := validateDefines(log, transformOpts.Define, transformOpts.Pure, PlatformNeutral, false /* minify */)
1324	options := config.Options{
1325		TargetFromAPI:           targetFromAPI,
1326		UnsupportedJSFeatures:   jsFeatures,
1327		UnsupportedCSSFeatures:  cssFeatures,
1328		OriginalTargetEnv:       targetEnv,
1329		TSTarget:                tsTarget,
1330		JSX:                     jsx,
1331		Defines:                 defines,
1332		InjectedDefines:         injectedDefines,
1333		SourceMap:               validateSourceMap(transformOpts.Sourcemap),
1334		LegalComments:           validateLegalComments(transformOpts.LegalComments, false /* bundle */),
1335		SourceRoot:              transformOpts.SourceRoot,
1336		ExcludeSourcesContent:   transformOpts.SourcesContent == SourcesContentExclude,
1337		OutputFormat:            validateFormat(transformOpts.Format),
1338		GlobalName:              validateGlobalName(log, transformOpts.GlobalName),
1339		MangleSyntax:            transformOpts.MinifySyntax,
1340		RemoveWhitespace:        transformOpts.MinifyWhitespace,
1341		MinifyIdentifiers:       transformOpts.MinifyIdentifiers,
1342		ASCIIOnly:               validateASCIIOnly(transformOpts.Charset),
1343		IgnoreDCEAnnotations:    transformOpts.IgnoreAnnotations,
1344		TreeShaking:             validateTreeShaking(transformOpts.TreeShaking, false /* bundle */, transformOpts.Format),
1345		AbsOutputFile:           transformOpts.Sourcefile + "-out",
1346		KeepNames:               transformOpts.KeepNames,
1347		UseDefineForClassFields: useDefineForClassFieldsTS,
1348		UnusedImportsTS:         unusedImportsTS,
1349		Stdin: &config.StdinInfo{
1350			Loader:     validateLoader(transformOpts.Loader),
1351			Contents:   input,
1352			SourceFile: transformOpts.Sourcefile,
1353		},
1354	}
1355	if options.Stdin.Loader == config.LoaderCSS {
1356		options.CSSBanner = transformOpts.Banner
1357		options.CSSFooter = transformOpts.Footer
1358	} else {
1359		options.JSBanner = transformOpts.Banner
1360		options.JSFooter = transformOpts.Footer
1361	}
1362	if options.SourceMap == config.SourceMapLinkedWithComment {
1363		// Linked source maps don't make sense because there's no output file name
1364		log.Add(logger.Error, nil, logger.Range{}, "Cannot transform with linked source maps")
1365	}
1366	if options.SourceMap != config.SourceMapNone && options.Stdin.SourceFile == "" {
1367		log.Add(logger.Error, nil, logger.Range{},
1368			"Must use \"sourcefile\" with \"sourcemap\" to set the original file name")
1369	}
1370	if options.LegalComments.HasExternalFile() {
1371		log.Add(logger.Error, nil, logger.Range{}, "Cannot transform with linked or external legal comments")
1372	}
1373
1374	// Set the output mode using other settings
1375	if options.OutputFormat != config.FormatPreserve {
1376		options.Mode = config.ModeConvertFormat
1377	}
1378
1379	var results []graph.OutputFile
1380
1381	// Stop now if there were errors
1382	if !log.HasErrors() {
1383		var timer *helpers.Timer
1384		if api_helpers.UseTimer {
1385			timer = &helpers.Timer{}
1386		}
1387
1388		// Scan over the bundle
1389		mockFS := fs.MockFS(make(map[string]string))
1390		resolver := resolver.NewResolver(mockFS, log, caches, options)
1391		bundle := bundler.ScanBundle(log, mockFS, resolver, caches, nil, options, timer)
1392
1393		// Stop now if there were errors
1394		if !log.HasErrors() {
1395			// Compile the bundle
1396			results, _ = bundle.Compile(log, options, timer)
1397		}
1398
1399		timer.Log(log)
1400	}
1401
1402	// Return the results
1403	var code []byte
1404	var sourceMap []byte
1405
1406	// Unpack the JavaScript file and the source map file
1407	if len(results) == 1 {
1408		code = results[0].Contents
1409	} else if len(results) == 2 {
1410		a, b := results[0], results[1]
1411		if a.AbsPath == b.AbsPath+".map" {
1412			sourceMap, code = a.Contents, b.Contents
1413		} else if a.AbsPath+".map" == b.AbsPath {
1414			code, sourceMap = a.Contents, b.Contents
1415		}
1416	}
1417
1418	msgs := log.Done()
1419	return TransformResult{
1420		Errors:   convertMessagesToPublic(logger.Error, msgs),
1421		Warnings: convertMessagesToPublic(logger.Warning, msgs),
1422		Code:     code,
1423		Map:      sourceMap,
1424	}
1425}
1426
1427////////////////////////////////////////////////////////////////////////////////
1428// Plugin API
1429
1430type pluginImpl struct {
1431	log    logger.Log
1432	fs     fs.FS
1433	plugin config.Plugin
1434}
1435
1436func (impl *pluginImpl) onStart(callback func() (OnStartResult, error)) {
1437	impl.plugin.OnStart = append(impl.plugin.OnStart, config.OnStart{
1438		Name: impl.plugin.Name,
1439		Callback: func() (result config.OnStartResult) {
1440			response, err := callback()
1441
1442			if err != nil {
1443				result.ThrownError = err
1444				return
1445			}
1446
1447			// Convert log messages
1448			if len(response.Errors)+len(response.Warnings) > 0 {
1449				msgs := make(logger.SortableMsgs, 0, len(response.Errors)+len(response.Warnings))
1450				msgs = convertMessagesToInternal(msgs, logger.Error, response.Errors)
1451				msgs = convertMessagesToInternal(msgs, logger.Warning, response.Warnings)
1452				sort.Stable(msgs)
1453				result.Msgs = msgs
1454			}
1455			return
1456		},
1457	})
1458}
1459
1460func importKindToResolveKind(kind ast.ImportKind) ResolveKind {
1461	switch kind {
1462	case ast.ImportEntryPoint:
1463		return ResolveEntryPoint
1464	case ast.ImportStmt:
1465		return ResolveJSImportStatement
1466	case ast.ImportRequire:
1467		return ResolveJSRequireCall
1468	case ast.ImportDynamic:
1469		return ResolveJSDynamicImport
1470	case ast.ImportRequireResolve:
1471		return ResolveJSRequireResolve
1472	case ast.ImportAt, ast.ImportAtConditional:
1473		return ResolveCSSImportRule
1474	case ast.ImportURL:
1475		return ResolveCSSURLToken
1476	default:
1477		panic("Internal error")
1478	}
1479}
1480
1481func resolveKindToImportKind(kind ResolveKind) ast.ImportKind {
1482	switch kind {
1483	case ResolveEntryPoint:
1484		return ast.ImportEntryPoint
1485	case ResolveJSImportStatement:
1486		return ast.ImportStmt
1487	case ResolveJSRequireCall:
1488		return ast.ImportRequire
1489	case ResolveJSDynamicImport:
1490		return ast.ImportDynamic
1491	case ResolveJSRequireResolve:
1492		return ast.ImportRequireResolve
1493	case ResolveCSSImportRule:
1494		return ast.ImportAt
1495	case ResolveCSSURLToken:
1496		return ast.ImportURL
1497	default:
1498		panic("Internal error")
1499	}
1500}
1501
1502func (impl *pluginImpl) onResolve(options OnResolveOptions, callback func(OnResolveArgs) (OnResolveResult, error)) {
1503	filter, err := config.CompileFilterForPlugin(impl.plugin.Name, "OnResolve", options.Filter)
1504	if filter == nil {
1505		impl.log.Add(logger.Error, nil, logger.Range{}, err.Error())
1506		return
1507	}
1508
1509	impl.plugin.OnResolve = append(impl.plugin.OnResolve, config.OnResolve{
1510		Name:      impl.plugin.Name,
1511		Filter:    filter,
1512		Namespace: options.Namespace,
1513		Callback: func(args config.OnResolveArgs) (result config.OnResolveResult) {
1514			response, err := callback(OnResolveArgs{
1515				Path:       args.Path,
1516				Importer:   args.Importer.Text,
1517				Namespace:  args.Importer.Namespace,
1518				ResolveDir: args.ResolveDir,
1519				Kind:       importKindToResolveKind(args.Kind),
1520				PluginData: args.PluginData,
1521			})
1522			result.PluginName = response.PluginName
1523			result.AbsWatchFiles = impl.validatePathsArray(response.WatchFiles, "watch file")
1524			result.AbsWatchDirs = impl.validatePathsArray(response.WatchDirs, "watch directory")
1525
1526			// Restrict the suffix to start with "?" or "#" for now to match esbuild's behavior
1527			if err == nil && response.Suffix != "" && response.Suffix[0] != '?' && response.Suffix[0] != '#' {
1528				err = fmt.Errorf("Invalid path suffix %q returned from plugin (must start with \"?\" or \"#\")", response.Suffix)
1529			}
1530
1531			if err != nil {
1532				result.ThrownError = err
1533				return
1534			}
1535
1536			result.Path = logger.Path{
1537				Text:          response.Path,
1538				Namespace:     response.Namespace,
1539				IgnoredSuffix: response.Suffix,
1540			}
1541			result.External = response.External
1542			result.IsSideEffectFree = response.SideEffects == SideEffectsFalse
1543			result.PluginData = response.PluginData
1544
1545			// Convert log messages
1546			if len(response.Errors)+len(response.Warnings) > 0 {
1547				msgs := make(logger.SortableMsgs, 0, len(response.Errors)+len(response.Warnings))
1548				msgs = convertMessagesToInternal(msgs, logger.Error, response.Errors)
1549				msgs = convertMessagesToInternal(msgs, logger.Warning, response.Warnings)
1550				sort.Stable(msgs)
1551				result.Msgs = msgs
1552			}
1553			return
1554		},
1555	})
1556}
1557
1558func (impl *pluginImpl) onLoad(options OnLoadOptions, callback func(OnLoadArgs) (OnLoadResult, error)) {
1559	filter, err := config.CompileFilterForPlugin(impl.plugin.Name, "OnLoad", options.Filter)
1560	if filter == nil {
1561		impl.log.Add(logger.Error, nil, logger.Range{}, err.Error())
1562		return
1563	}
1564
1565	impl.plugin.OnLoad = append(impl.plugin.OnLoad, config.OnLoad{
1566		Filter:    filter,
1567		Namespace: options.Namespace,
1568		Callback: func(args config.OnLoadArgs) (result config.OnLoadResult) {
1569			response, err := callback(OnLoadArgs{
1570				Path:       args.Path.Text,
1571				Namespace:  args.Path.Namespace,
1572				PluginData: args.PluginData,
1573				Suffix:     args.Path.IgnoredSuffix,
1574			})
1575			result.PluginName = response.PluginName
1576			result.AbsWatchFiles = impl.validatePathsArray(response.WatchFiles, "watch file")
1577			result.AbsWatchDirs = impl.validatePathsArray(response.WatchDirs, "watch directory")
1578
1579			if err != nil {
1580				result.ThrownError = err
1581				return
1582			}
1583
1584			result.Contents = response.Contents
1585			result.Loader = validateLoader(response.Loader)
1586			result.PluginData = response.PluginData
1587			pathKind := fmt.Sprintf("resolve directory path for plugin %q", impl.plugin.Name)
1588			if absPath := validatePath(impl.log, impl.fs, response.ResolveDir, pathKind); absPath != "" {
1589				result.AbsResolveDir = absPath
1590			}
1591
1592			// Convert log messages
1593			if len(response.Errors)+len(response.Warnings) > 0 {
1594				msgs := make(logger.SortableMsgs, 0, len(response.Errors)+len(response.Warnings))
1595				msgs = convertMessagesToInternal(msgs, logger.Error, response.Errors)
1596				msgs = convertMessagesToInternal(msgs, logger.Warning, response.Warnings)
1597				sort.Stable(msgs)
1598				result.Msgs = msgs
1599			}
1600			return
1601		},
1602	})
1603}
1604
1605func (impl *pluginImpl) validatePathsArray(pathsIn []string, name string) (pathsOut []string) {
1606	if len(pathsIn) > 0 {
1607		pathKind := fmt.Sprintf("%s path for plugin %q", name, impl.plugin.Name)
1608		for _, relPath := range pathsIn {
1609			if absPath := validatePath(impl.log, impl.fs, relPath, pathKind); absPath != "" {
1610				pathsOut = append(pathsOut, absPath)
1611			}
1612		}
1613	}
1614	return
1615}
1616
1617func loadPlugins(initialOptions *BuildOptions, fs fs.FS, log logger.Log, caches *cache.CacheSet) (
1618	plugins []config.Plugin,
1619	onEndCallbacks []func(*BuildResult),
1620	finalizeBuildOptions func(*config.Options),
1621) {
1622	onEnd := func(callback func(*BuildResult)) {
1623		onEndCallbacks = append(onEndCallbacks, callback)
1624	}
1625
1626	// Clone the plugin array to guard against mutation during iteration
1627	clone := append(make([]Plugin, 0, len(initialOptions.Plugins)), initialOptions.Plugins...)
1628
1629	var resolveMutex sync.Mutex
1630	var optionsForResolve *config.Options
1631
1632	// This is called when the build options are finalized
1633	finalizeBuildOptions = func(options *config.Options) {
1634		resolveMutex.Lock()
1635		if optionsForResolve == nil {
1636			optionsForResolve = options
1637		}
1638		resolveMutex.Unlock()
1639	}
1640
1641	for i, item := range clone {
1642		if item.Name == "" {
1643			log.Add(logger.Error, nil, logger.Range{}, fmt.Sprintf("Plugin at index %d is missing a name", i))
1644			continue
1645		}
1646
1647		impl := &pluginImpl{
1648			fs:     fs,
1649			log:    log,
1650			plugin: config.Plugin{Name: item.Name},
1651		}
1652
1653		resolve := func(path string, options ResolveOptions) (result ResolveResult) {
1654			// Try to grab the resolver options
1655			resolveMutex.Lock()
1656			buildOptions := optionsForResolve
1657			resolveMutex.Unlock()
1658
1659			// If we couldn't grab them, then this is being called before plugin setup
1660			// has finished. That isn't allowed because plugin setup is allowed to
1661			// change the initial options object, which can affect path resolution.
1662			if buildOptions == nil {
1663				return ResolveResult{Errors: []Message{{Text: "Cannot call \"resolve\" before plugin setup has completed"}}}
1664			}
1665
1666			// Make a new resolver so it has its own log
1667			log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug)
1668			resolver := resolver.NewResolver(fs, log, caches, *buildOptions)
1669
1670			// Make sure the resolve directory is an absolute path, which can fail
1671			absResolveDir := validatePath(log, fs, options.ResolveDir, "resolve directory")
1672			if log.HasErrors() {
1673				msgs := log.Done()
1674				result.Errors = convertMessagesToPublic(logger.Error, msgs)
1675				result.Warnings = convertMessagesToPublic(logger.Warning, msgs)
1676				return
1677			}
1678
1679			// Run path resolution
1680			kind := resolveKindToImportKind(options.Kind)
1681			resolveResult, _, _ := bundler.RunOnResolvePlugins(
1682				plugins,
1683				resolver,
1684				log,
1685				fs,
1686				&caches.FSCache,
1687				nil,            // importSource
1688				logger.Range{}, // importPathRange
1689				logger.Path{Text: options.Importer, Namespace: options.Namespace},
1690				path,
1691				kind,
1692				absResolveDir,
1693				options.PluginData,
1694			)
1695			msgs := log.Done()
1696
1697			// Populate the result
1698			result.Errors = convertMessagesToPublic(logger.Error, msgs)
1699			result.Warnings = convertMessagesToPublic(logger.Warning, msgs)
1700			if resolveResult != nil {
1701				result.Path = resolveResult.PathPair.Primary.Text
1702				result.External = resolveResult.IsExternal
1703				result.SideEffects = resolveResult.PrimarySideEffectsData == nil
1704				result.Namespace = resolveResult.PathPair.Primary.Namespace
1705				result.Suffix = resolveResult.PathPair.Primary.IgnoredSuffix
1706				result.PluginData = resolveResult.PluginData
1707			} else if len(result.Errors) == 0 {
1708				// Always fail with at least one error
1709				pluginName := item.Name
1710				if options.PluginName != "" {
1711					pluginName = options.PluginName
1712				}
1713				text, notes := bundler.ResolveFailureErrorTextAndNotes(resolver, path, kind, pluginName, fs, absResolveDir, buildOptions.Platform, "")
1714				result.Errors = append(result.Errors, convertMessagesToPublic(logger.Error, []logger.Msg{{
1715					Data:  logger.MsgData{Text: text},
1716					Notes: notes,
1717				}})...)
1718			}
1719			return
1720		}
1721
1722		item.Setup(PluginBuild{
1723			InitialOptions: initialOptions,
1724			Resolve:        resolve,
1725			OnStart:        impl.onStart,
1726			OnEnd:          onEnd,
1727			OnResolve:      impl.onResolve,
1728			OnLoad:         impl.onLoad,
1729		})
1730
1731		plugins = append(plugins, impl.plugin)
1732	}
1733
1734	return
1735}
1736
1737////////////////////////////////////////////////////////////////////////////////
1738// FormatMessages API
1739
1740func formatMsgsImpl(msgs []Message, opts FormatMessagesOptions) []string {
1741	kind := logger.Error
1742	if opts.Kind == WarningMessage {
1743		kind = logger.Warning
1744	}
1745	logMsgs := convertMessagesToInternal(nil, kind, msgs)
1746	strings := make([]string, len(logMsgs))
1747	for i, msg := range logMsgs {
1748		strings[i] = msg.String(
1749			logger.OutputOptions{
1750				IncludeSource: true,
1751			},
1752			logger.TerminalInfo{
1753				UseColorEscapes: opts.Color,
1754				Width:           opts.TerminalWidth,
1755			},
1756		)
1757	}
1758	return strings
1759}
1760
1761////////////////////////////////////////////////////////////////////////////////
1762// AnalyzeMetafile API
1763
1764type metafileEntry struct {
1765	name       string
1766	entryPoint string
1767	entries    []metafileEntry
1768	size       int
1769}
1770
1771type metafileArray []metafileEntry
1772
1773func (a metafileArray) Len() int          { return len(a) }
1774func (a metafileArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
1775
1776func (a metafileArray) Less(i int, j int) bool {
1777	ai := a[i]
1778	aj := a[j]
1779	return ai.size > aj.size || (ai.size == aj.size && ai.name < aj.name)
1780}
1781
1782func getObjectProperty(expr js_ast.Expr, key string) js_ast.Expr {
1783	if obj, ok := expr.Data.(*js_ast.EObject); ok {
1784		for _, prop := range obj.Properties {
1785			if js_lexer.UTF16EqualsString(prop.Key.Data.(*js_ast.EString).Value, key) {
1786				return prop.ValueOrNil
1787			}
1788		}
1789	}
1790	return js_ast.Expr{}
1791}
1792
1793func getObjectPropertyNumber(expr js_ast.Expr, key string) *js_ast.ENumber {
1794	value, _ := getObjectProperty(expr, key).Data.(*js_ast.ENumber)
1795	return value
1796}
1797
1798func getObjectPropertyString(expr js_ast.Expr, key string) *js_ast.EString {
1799	value, _ := getObjectProperty(expr, key).Data.(*js_ast.EString)
1800	return value
1801}
1802
1803func getObjectPropertyObject(expr js_ast.Expr, key string) *js_ast.EObject {
1804	value, _ := getObjectProperty(expr, key).Data.(*js_ast.EObject)
1805	return value
1806}
1807
1808func getObjectPropertyArray(expr js_ast.Expr, key string) *js_ast.EArray {
1809	value, _ := getObjectProperty(expr, key).Data.(*js_ast.EArray)
1810	return value
1811}
1812
1813func analyzeMetafileImpl(metafile string, opts AnalyzeMetafileOptions) string {
1814	log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug)
1815	source := logger.Source{Contents: metafile}
1816
1817	if result, ok := js_parser.ParseJSON(log, source, js_parser.JSONOptions{}); ok {
1818		if outputs := getObjectPropertyObject(result, "outputs"); outputs != nil {
1819			var entries metafileArray
1820			var entryPoints []string
1821
1822			// Scan over the "outputs" object
1823			for _, output := range outputs.Properties {
1824				if key := js_lexer.UTF16ToString(output.Key.Data.(*js_ast.EString).Value); !strings.HasSuffix(key, ".map") {
1825					entryPointPath := ""
1826					if entryPoint := getObjectPropertyString(output.ValueOrNil, "entryPoint"); entryPoint != nil {
1827						entryPointPath = js_lexer.UTF16ToString(entryPoint.Value)
1828						entryPoints = append(entryPoints, entryPointPath)
1829					}
1830
1831					if bytes := getObjectPropertyNumber(output.ValueOrNil, "bytes"); bytes != nil {
1832						if inputs := getObjectPropertyObject(output.ValueOrNil, "inputs"); inputs != nil {
1833							var children metafileArray
1834
1835							for _, input := range inputs.Properties {
1836								if bytesInOutput := getObjectPropertyNumber(input.ValueOrNil, "bytesInOutput"); bytesInOutput != nil && bytesInOutput.Value > 0 {
1837									children = append(children, metafileEntry{
1838										name: js_lexer.UTF16ToString(input.Key.Data.(*js_ast.EString).Value),
1839										size: int(bytesInOutput.Value),
1840									})
1841								}
1842							}
1843
1844							sort.Sort(children)
1845
1846							entries = append(entries, metafileEntry{
1847								name:       key,
1848								size:       int(bytes.Value),
1849								entries:    children,
1850								entryPoint: entryPointPath,
1851							})
1852						}
1853					}
1854				}
1855			}
1856
1857			sort.Sort(entries)
1858
1859			type importData struct {
1860				imports []string
1861			}
1862
1863			type graphData struct {
1864				parent string
1865				depth  uint32
1866			}
1867
1868			importsForPath := make(map[string]importData)
1869
1870			// Scan over the "inputs" object
1871			if inputs := getObjectPropertyObject(result, "inputs"); inputs != nil {
1872				for _, prop := range inputs.Properties {
1873					if imports := getObjectPropertyArray(prop.ValueOrNil, "imports"); imports != nil {
1874						var data importData
1875
1876						for _, item := range imports.Items {
1877							if path := getObjectPropertyString(item, "path"); path != nil {
1878								data.imports = append(data.imports, js_lexer.UTF16ToString(path.Value))
1879							}
1880						}
1881
1882						importsForPath[js_lexer.UTF16ToString(prop.Key.Data.(*js_ast.EString).Value)] = data
1883					}
1884				}
1885			}
1886
1887			// Returns a graph with links pointing from imports to importers
1888			graphForEntryPoints := func(worklist []string) map[string]graphData {
1889				if !opts.Verbose {
1890					return nil
1891				}
1892
1893				graph := make(map[string]graphData)
1894
1895				for _, entryPoint := range worklist {
1896					graph[entryPoint] = graphData{}
1897				}
1898
1899				for len(worklist) > 0 {
1900					top := worklist[len(worklist)-1]
1901					worklist = worklist[:len(worklist)-1]
1902					childDepth := graph[top].depth + 1
1903
1904					for _, importPath := range importsForPath[top].imports {
1905						imported, ok := graph[importPath]
1906						if !ok {
1907							imported.depth = math.MaxUint32
1908						}
1909
1910						if imported.depth > childDepth {
1911							imported.depth = childDepth
1912							imported.parent = top
1913							graph[importPath] = imported
1914							worklist = append(worklist, importPath)
1915						}
1916					}
1917				}
1918
1919				return graph
1920			}
1921
1922			graphForAllEntryPoints := graphForEntryPoints(entryPoints)
1923
1924			type tableEntry struct {
1925				first      string
1926				second     string
1927				third      string
1928				firstLen   int
1929				secondLen  int
1930				thirdLen   int
1931				isTopLevel bool
1932			}
1933
1934			var table []tableEntry
1935			var colors logger.Colors
1936
1937			if opts.Color {
1938				colors = logger.TerminalColors
1939			}
1940
1941			// Build up the table with an entry for each output file (other than ".map" files)
1942			for _, entry := range entries {
1943				second := prettyPrintByteCount(entry.size)
1944				third := "100.0%"
1945
1946				table = append(table, tableEntry{
1947					first:      fmt.Sprintf("%s%s%s", colors.Bold, entry.name, colors.Reset),
1948					firstLen:   utf8.RuneCountInString(entry.name),
1949					second:     fmt.Sprintf("%s%s%s", colors.Bold, second, colors.Reset),
1950					secondLen:  len(second),
1951					third:      fmt.Sprintf("%s%s%s", colors.Bold, third, colors.Reset),
1952					thirdLen:   len(third),
1953					isTopLevel: true,
1954				})
1955
1956				graph := graphForAllEntryPoints
1957				if entry.entryPoint != "" {
1958					// If there are multiple entry points and this output file is from an
1959					// entry point, prefer import paths for this entry point. This is less
1960					// confusing than showing import paths for another entry point.
1961					graph = graphForEntryPoints([]string{entry.entryPoint})
1962				}
1963
1964				// Add a sub-entry for each input file in this output file
1965				for j, child := range entry.entries {
1966					indent := " ├ "
1967					if j+1 == len(entry.entries) {
1968						indent = " └ "
1969					}
1970					percent := 100.0 * float64(child.size) / float64(entry.size)
1971
1972					first := indent + child.name
1973					second := prettyPrintByteCount(child.size)
1974					third := fmt.Sprintf("%.1f%%", percent)
1975
1976					table = append(table, tableEntry{
1977						first:     first,
1978						firstLen:  utf8.RuneCountInString(first),
1979						second:    second,
1980						secondLen: len(second),
1981						third:     third,
1982						thirdLen:  len(third),
1983					})
1984
1985					// If we're in verbose mode, also print the import chain from this file
1986					// up toward an entry point to show why this file is in the bundle
1987					if opts.Verbose {
1988						indent = " │ "
1989						if j+1 == len(entry.entries) {
1990							indent = "   "
1991						}
1992						data := graph[child.name]
1993						depth := 0
1994
1995						for data.depth != 0 {
1996							table = append(table, tableEntry{
1997								first: fmt.Sprintf("%s%s%s └ %s%s", indent, colors.Dim, strings.Repeat(" ", depth), data.parent, colors.Reset),
1998							})
1999							data = graph[data.parent]
2000							depth += 3
2001						}
2002					}
2003				}
2004			}
2005
2006			maxFirstLen := 0
2007			maxSecondLen := 0
2008			maxThirdLen := 0
2009
2010			// Calculate column widths
2011			for _, entry := range table {
2012				if maxFirstLen < entry.firstLen {
2013					maxFirstLen = entry.firstLen
2014				}
2015				if maxSecondLen < entry.secondLen {
2016					maxSecondLen = entry.secondLen
2017				}
2018				if maxThirdLen < entry.thirdLen {
2019					maxThirdLen = entry.thirdLen
2020				}
2021			}
2022
2023			sb := strings.Builder{}
2024
2025			// Render the columns now that we know the widths
2026			for _, entry := range table {
2027				prefix := "\n"
2028				if !entry.isTopLevel {
2029					prefix = ""
2030				}
2031
2032				// Import paths don't have second and third columns
2033				if entry.second == "" && entry.third == "" {
2034					sb.WriteString(fmt.Sprintf("%s  %s\n",
2035						prefix,
2036						entry.first,
2037					))
2038					continue
2039				}
2040
2041				second := entry.second
2042				secondTrimmed := strings.TrimRight(second, " ")
2043				lineChar := " "
2044				extraSpace := 0
2045
2046				if opts.Verbose {
2047					lineChar = "─"
2048					extraSpace = 1
2049				}
2050
2051				sb.WriteString(fmt.Sprintf("%s  %s %s%s%s %s %s%s%s %s\n",
2052					prefix,
2053					entry.first,
2054					colors.Dim,
2055					strings.Repeat(lineChar, extraSpace+maxFirstLen-entry.firstLen+maxSecondLen-entry.secondLen),
2056					colors.Reset,
2057					secondTrimmed,
2058					colors.Dim,
2059					strings.Repeat(lineChar, extraSpace+maxThirdLen-entry.thirdLen+len(second)-len(secondTrimmed)),
2060					colors.Reset,
2061					entry.third,
2062				))
2063			}
2064
2065			return sb.String()
2066		}
2067	}
2068
2069	return ""
2070}
2071