Compare commits
19 Commits
Author | SHA1 | Date | |
---|---|---|---|
d595b08592 | |||
82c60b14b2 | |||
a3cebd2b3e | |||
4b0bfa8e4c | |||
48b15de4c5 | |||
79afdd39b8 | |||
6b14faa5b1 | |||
93338b0712 | |||
7f25c3f83c | |||
e87dfaf38b | |||
30f8be3d5d | |||
c356f34e21 | |||
734a6ce62e | |||
9e7f716be8 | |||
760decfb85 | |||
f081b97beb | |||
427e287895 | |||
8433170681 | |||
bea89e2a80 |
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
|
||||
vendor/
|
||||
tmp
|
34
.gowebbuild.yaml
Executable file
34
.gowebbuild.yaml
Executable file
@ -0,0 +1,34 @@
|
||||
- esbuild:
|
||||
entryPoints:
|
||||
- frontend/the-app.js
|
||||
outdir: ./frontend-dist
|
||||
sourcemap: 1
|
||||
format: 3
|
||||
splitting: true
|
||||
platform: 0
|
||||
bundle: true
|
||||
write: true
|
||||
logLevel: 3
|
||||
purgeBeforeBuild: false
|
||||
watch:
|
||||
paths:
|
||||
- ./frontend/src
|
||||
exclude: []
|
||||
# serve: # Uncomment and set a path to enable
|
||||
# path: ""
|
||||
# port: 8080
|
||||
copy:
|
||||
- src: ./frontend/index.html
|
||||
dest: ./frontend-dist
|
||||
# download:
|
||||
# - url: https://example.com/some-file-or-asset.js
|
||||
# dest: ./frontend/src/vendor/some-file-or-asset.js
|
||||
# replace:
|
||||
# - pattern: "*.go|*.js|*.html"
|
||||
# search: "Something"
|
||||
# replace: "This"
|
||||
# link:
|
||||
# from: ../../web/tp-elements
|
||||
# to: ./web
|
||||
# productionBuildOptions:
|
||||
# cmdPostBuild: ""
|
16
.vscode/launch.json
vendored
Normal file
16
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Launch Package",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"args": ["generate"]
|
||||
}
|
||||
]
|
||||
}
|
@ -1 +1,6 @@
|
||||
gowebbuild
|
||||
|
||||
# NPM Proxy
|
||||
|
||||
The npm proxy is a small npm registry server that can be used to serve packages from the local filesystem instead of the default registry.
|
||||
This allows to install packages that haven't been published yet.
|
||||
|
56
build.go
Normal file
56
build.go
Normal file
@ -0,0 +1,56 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/evanw/esbuild/pkg/api"
|
||||
"github.com/trading-peter/gowebbuild/fsutils"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func buildAction(ctx *cli.Context) error {
|
||||
cfgPath := fsutils.ResolvePath(ctx.String("c"))
|
||||
|
||||
os.Chdir(filepath.Dir(cfgPath))
|
||||
opts := readCfg(cfgPath)
|
||||
|
||||
for _, o := range opts {
|
||||
if ctx.Bool("p") {
|
||||
download(o)
|
||||
}
|
||||
purge(o)
|
||||
cp(o)
|
||||
|
||||
esBuildCfg := cfgToESBuildCfg(o)
|
||||
|
||||
if ctx.Bool("p") {
|
||||
esBuildCfg.MinifyIdentifiers = true
|
||||
esBuildCfg.MinifySyntax = true
|
||||
esBuildCfg.MinifyWhitespace = true
|
||||
esBuildCfg.Sourcemap = api.SourceMapNone
|
||||
}
|
||||
|
||||
api.Build(esBuildCfg)
|
||||
replace(o)
|
||||
|
||||
if ctx.Bool("p") && o.ProductionBuildOptions.CmdPostBuild != "" {
|
||||
defer func() {
|
||||
fmt.Printf("Executing post production build command `%s`\n", o.ProductionBuildOptions.CmdPostBuild)
|
||||
cmd := exec.Command("sh", "-c", o.ProductionBuildOptions.CmdPostBuild)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
err := cmd.Run()
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to execute post production build command `%s`: %+v\n", o.ProductionBuildOptions.CmdPostBuild, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
202
config.go
Normal file
202
config.go
Normal file
@ -0,0 +1,202 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/pkg/api"
|
||||
"github.com/trading-peter/gowebbuild/fsutils"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func cfgToESBuildCfg(cfg options) api.BuildOptions {
|
||||
return api.BuildOptions{
|
||||
EntryPoints: cfg.ESBuild.EntryPoints,
|
||||
Outdir: cfg.ESBuild.Outdir,
|
||||
Outfile: cfg.ESBuild.Outfile,
|
||||
Outbase: cfg.ESBuild.Outbase,
|
||||
Sourcemap: api.SourceMap(cfg.ESBuild.Sourcemap),
|
||||
Format: api.Format(cfg.ESBuild.Format),
|
||||
Splitting: cfg.ESBuild.Splitting,
|
||||
Platform: api.Platform(cfg.ESBuild.Platform),
|
||||
Bundle: cfg.ESBuild.Bundle,
|
||||
Write: cfg.ESBuild.Write,
|
||||
LogLevel: api.LogLevel(cfg.ESBuild.LogLevel),
|
||||
}
|
||||
}
|
||||
|
||||
type options struct {
|
||||
ESBuild struct {
|
||||
EntryPoints []string `yaml:"entryPoints"`
|
||||
Outdir string `yaml:"outdir"`
|
||||
Outbase string `yaml:"outbase"`
|
||||
Outfile string `yaml:"outfile"`
|
||||
Sourcemap int `yaml:"sourcemap"`
|
||||
Format int `yaml:"format"`
|
||||
Splitting bool `yaml:"splitting"`
|
||||
Platform int `yaml:"platform"`
|
||||
Bundle bool `yaml:"bundle"`
|
||||
Write bool `yaml:"write"`
|
||||
LogLevel int `yaml:"logLevel"`
|
||||
PurgeBeforeBuild bool `yaml:"purgeBeforeBuild"`
|
||||
} `yaml:"esbuild"`
|
||||
Watch struct {
|
||||
Paths []string `yaml:"paths"`
|
||||
Exclude []string `yaml:"exclude"`
|
||||
InjectLiveReload string `yaml:"injectLiveReload"`
|
||||
}
|
||||
Serve struct {
|
||||
Path string `yaml:"path"`
|
||||
Port int `yaml:"port"`
|
||||
} `yaml:"serve"`
|
||||
Copy []struct {
|
||||
Src string `yaml:"src"`
|
||||
Dest string `yaml:"dest"`
|
||||
} `yaml:"copy"`
|
||||
Download []struct {
|
||||
Url string `yaml:"url"`
|
||||
Dest string `yaml:"dest"`
|
||||
} `yaml:"download"`
|
||||
Replace []struct {
|
||||
Pattern string `yaml:"pattern"`
|
||||
Search string `yaml:"search"`
|
||||
Replace string `yaml:"replace"`
|
||||
} `yaml:"replace"`
|
||||
Link struct {
|
||||
From string `yaml:"from"`
|
||||
To string `yaml:"to"`
|
||||
} `yaml:"link"`
|
||||
ProductionBuildOptions struct {
|
||||
CmdPostBuild string `yaml:"cmdPostBuild"`
|
||||
} `yaml:"productionBuildOptions"`
|
||||
NpmProxy struct {
|
||||
Overrides []NpmProxyOverride
|
||||
} `yaml:"npm_proxy"`
|
||||
}
|
||||
|
||||
type NpmProxyOverride struct {
|
||||
Namespace string `yaml:"namespace"`
|
||||
Upstream string `yaml:"upstream"`
|
||||
PackageRoot string `yaml:"packageRoot"`
|
||||
}
|
||||
|
||||
func readCfg(cfgPath string) []options {
|
||||
if filepath.Ext(cfgPath) == ".json" {
|
||||
jsonOpts := readJsonCfg(cfgPath)
|
||||
|
||||
data, err := yaml.Marshal(jsonOpts)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
yamlPath := strings.TrimSuffix(cfgPath, ".json") + ".yaml"
|
||||
|
||||
err = os.WriteFile(yamlPath, data, 0755)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
cfgPath = yamlPath
|
||||
}
|
||||
|
||||
cfgContent, err := os.ReadFile(cfgPath)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
optsSetups := []options{}
|
||||
|
||||
err = yaml.Unmarshal(cfgContent, &optsSetups)
|
||||
if err != nil {
|
||||
opt := options{}
|
||||
err = yaml.Unmarshal(cfgContent, &opt)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
optsSetups = append(optsSetups, opt)
|
||||
}
|
||||
|
||||
// Process all paths in each options setup
|
||||
for i := range optsSetups {
|
||||
processPaths(&optsSetups[i])
|
||||
}
|
||||
|
||||
return optsSetups
|
||||
}
|
||||
|
||||
func readJsonCfg(cfgPath string) []options {
|
||||
cfgContent, err := os.ReadFile(cfgPath)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
optsSetups := []options{}
|
||||
|
||||
err = json.Unmarshal(cfgContent, &optsSetups)
|
||||
if err != nil {
|
||||
opt := options{}
|
||||
err = json.Unmarshal(cfgContent, &opt)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
optsSetups = append(optsSetups, opt)
|
||||
}
|
||||
|
||||
return optsSetups
|
||||
}
|
||||
|
||||
func processPaths(opts *options) {
|
||||
// ESBuild paths
|
||||
for i, entry := range opts.ESBuild.EntryPoints {
|
||||
opts.ESBuild.EntryPoints[i] = fsutils.ResolvePath(entry)
|
||||
}
|
||||
opts.ESBuild.Outdir = fsutils.ResolvePath(opts.ESBuild.Outdir)
|
||||
opts.ESBuild.Outfile = fsutils.ResolvePath(opts.ESBuild.Outfile)
|
||||
|
||||
// Watch paths
|
||||
for i, path := range opts.Watch.Paths {
|
||||
opts.Watch.Paths[i] = fsutils.ResolvePath(path)
|
||||
}
|
||||
|
||||
for i, path := range opts.Watch.Exclude {
|
||||
opts.Watch.Exclude[i] = fsutils.ResolvePath(path)
|
||||
}
|
||||
|
||||
// opts.Watch.Inject = fsutils.ResolvePath(opts.Watch.Inject)
|
||||
|
||||
// Serve path
|
||||
opts.Serve.Path = fsutils.ResolvePath(opts.Serve.Path)
|
||||
|
||||
// Copy paths
|
||||
for i := range opts.Copy {
|
||||
opts.Copy[i].Src = fsutils.ResolvePath(opts.Copy[i].Src)
|
||||
opts.Copy[i].Dest = fsutils.ResolvePath(opts.Copy[i].Dest)
|
||||
}
|
||||
|
||||
// Download paths
|
||||
for i := range opts.Download {
|
||||
opts.Download[i].Dest = fsutils.ResolvePath(opts.Download[i].Dest)
|
||||
}
|
||||
|
||||
// Link paths
|
||||
opts.Link.From = fsutils.ResolvePath(opts.Link.From)
|
||||
opts.Link.To = fsutils.ResolvePath(opts.Link.To)
|
||||
|
||||
// Npm proxy paths
|
||||
for i := range opts.NpmProxy.Overrides {
|
||||
opts.NpmProxy.Overrides[i].PackageRoot = fsutils.ResolvePath(opts.NpmProxy.Overrides[i].PackageRoot)
|
||||
}
|
||||
}
|
75
fsutils/helpers.go
Normal file
75
fsutils/helpers.go
Normal file
@ -0,0 +1,75 @@
|
||||
package fsutils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func FindFiles(root, name string) []string {
|
||||
paths := []string{}
|
||||
|
||||
filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !d.IsDir() && filepath.Base(path) == name && !strings.Contains(path, "node_modules") {
|
||||
paths = append(paths, path)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return paths
|
||||
}
|
||||
|
||||
func IsFile(path string) bool {
|
||||
stat, err := os.Stat(path)
|
||||
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
return false
|
||||
}
|
||||
|
||||
return !stat.IsDir()
|
||||
}
|
||||
|
||||
func IsDir(path string) bool {
|
||||
stat, err := os.Stat(path)
|
||||
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
os.MkdirAll(path, 0755)
|
||||
return true
|
||||
}
|
||||
|
||||
return err == nil && stat.IsDir()
|
||||
}
|
||||
|
||||
func ResolvePath(path string) string {
|
||||
// We assume that the user doesn't use the involved feature if the path is empty.
|
||||
if path == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
expandedPath := os.ExpandEnv(path)
|
||||
|
||||
if strings.HasPrefix(expandedPath, "~") {
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
fmt.Println(err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
expandedPath = filepath.Join(homeDir, expandedPath[1:])
|
||||
}
|
||||
|
||||
path, err := filepath.Abs(expandedPath)
|
||||
if err != nil {
|
||||
fmt.Println(err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
95
go.mod
95
go.mod
@ -1,18 +1,95 @@
|
||||
module github.com/trading-peter/gowebbuild
|
||||
|
||||
go 1.17
|
||||
go 1.24
|
||||
|
||||
require (
|
||||
github.com/evanw/esbuild v0.14.5
|
||||
github.com/goyek/goyek v0.6.0
|
||||
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71
|
||||
github.com/otiai10/copy v1.7.0
|
||||
github.com/Iilun/survey/v2 v2.5.2
|
||||
github.com/Masterminds/semver/v3 v3.3.1
|
||||
github.com/PuerkitoBio/goquery v1.10.2
|
||||
github.com/evanw/esbuild v0.25.0
|
||||
github.com/jaschaephraim/lrserver v0.0.0-20240306232639-afed386b3640
|
||||
github.com/kataras/golog v0.1.12
|
||||
github.com/kataras/iris/v12 v12.2.11
|
||||
github.com/mholt/archives v0.1.0
|
||||
github.com/otiai10/copy v1.14.1
|
||||
github.com/radovskyb/watcher v1.0.7
|
||||
github.com/tidwall/gjson v1.18.0
|
||||
github.com/urfave/cli/v2 v2.27.5
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/gorilla/websocket v1.4.2 // indirect
|
||||
github.com/smartystreets/goconvey v1.7.2 // indirect
|
||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365 // indirect
|
||||
gopkg.in/fsnotify.v1 v1.4.7 // indirect
|
||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||
github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53 // indirect
|
||||
github.com/CloudyKit/jet/v6 v6.3.1 // indirect
|
||||
github.com/Joker/jade v1.1.3 // indirect
|
||||
github.com/STARRY-S/zip v0.2.2 // indirect
|
||||
github.com/Shopify/goreferrer v0.0.0-20240724165105-aceaa0259138 // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||
github.com/aymerick/douceur v0.2.0 // indirect
|
||||
github.com/bodgit/plumbing v1.3.0 // indirect
|
||||
github.com/bodgit/sevenzip v1.6.0 // indirect
|
||||
github.com/bodgit/windows v1.0.1 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect
|
||||
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
|
||||
github.com/fatih/structs v1.1.0 // indirect
|
||||
github.com/flosch/pongo2/v4 v4.0.2 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/gomarkdown/markdown v0.0.0-20250207164621-7a1f277a159e // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/iris-contrib/schema v0.0.6 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/kataras/blocks v0.0.11 // indirect
|
||||
github.com/kataras/pio v0.0.13 // indirect
|
||||
github.com/kataras/sitemap v0.0.6 // indirect
|
||||
github.com/kataras/tunnel v0.0.4 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/pgzip v1.2.6 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/mailgun/raymond/v2 v2.0.48 // indirect
|
||||
github.com/mailru/easyjson v0.9.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
|
||||
github.com/nwaples/rardecode/v2 v2.1.0 // indirect
|
||||
github.com/otiai10/mint v1.6.3 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/schollz/closestmatch v2.1.0+incompatible // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/sorairolake/lzip-go v0.3.5 // indirect
|
||||
github.com/tdewolff/minify/v2 v2.21.3 // indirect
|
||||
github.com/tdewolff/parse/v2 v2.7.20 // indirect
|
||||
github.com/therootcompany/xz v1.0.1 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/ulikunitz/xz v0.5.12 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
||||
github.com/yosssi/ace v0.0.5 // indirect
|
||||
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
|
||||
golang.design/x/clipboard v0.7.0 // indirect
|
||||
golang.org/x/crypto v0.35.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 // indirect
|
||||
golang.org/x/exp/shiny v0.0.0-20250228200357-dead58393ab7 // indirect
|
||||
golang.org/x/image v0.24.0 // indirect
|
||||
golang.org/x/mobile v0.0.0-20250218173827-cd096645fcd3 // indirect
|
||||
golang.org/x/net v0.35.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.29.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/time v0.10.0 // indirect
|
||||
google.golang.org/protobuf v1.36.5 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
)
|
||||
|
577
go.sum
577
go.sum
@ -1,34 +1,559 @@
|
||||
github.com/evanw/esbuild v0.14.5 h1:Gh/vGvDL/g++7erzQZofohZqFBzQblWfLdtYCf15zcQ=
|
||||
github.com/evanw/esbuild v0.14.5/go.mod h1:GG+zjdi59yh3ehDn4ZWfPcATxjPDUH53iU4ZJbp7dkY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
|
||||
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/goyek/goyek v0.6.0 h1:2YQ4V3X7q+zFF98IBWMc1WRwfzs0TQ8jrwOKY3XRQRk=
|
||||
github.com/goyek/goyek v0.6.0/go.mod h1:UGjZz3juJL2l2eMqRbxQYjG8ieyKb7WMYPv0KB0KVxA=
|
||||
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71 h1:24NdJ5N6gtrcoeS4JwLMeruKFmg20QdF/5UnX5S/j18=
|
||||
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71/go.mod h1:ozZLfjiLmXytkIUh200wMeuoQJ4ww06wN+KZtFP6j3g=
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
|
||||
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
|
||||
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
|
||||
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
|
||||
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
|
||||
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
|
||||
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
|
||||
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
|
||||
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
|
||||
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
|
||||
github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53 h1:sR+/8Yb4slttB4vD+b9btVEnWgL3Q00OBTzVT8B9C0c=
|
||||
github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno=
|
||||
github.com/CloudyKit/jet/v6 v6.3.1 h1:6IAo5Cx21xrHVaR8zzXN5gJatKV/wO7Nf6bfCnCSbUw=
|
||||
github.com/CloudyKit/jet/v6 v6.3.1/go.mod h1:lf8ksdNsxZt7/yH/3n4vJQWA9RUq4wpaHtArHhGVMOw=
|
||||
github.com/Iilun/survey/v2 v2.5.2 h1:LaidRUryJVuwPMLQIhIiYcX3P1KVxtkJh1q+RMGX97s=
|
||||
github.com/Iilun/survey/v2 v2.5.2/go.mod h1:IgwKxozHo0Gf8pnof+IDSDcieRtD2jtB4nTpj1+Oz7E=
|
||||
github.com/Joker/hpp v1.0.0 h1:65+iuJYdRXv/XyN62C1uEmmOx3432rNG/rKlX6V7Kkc=
|
||||
github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY=
|
||||
github.com/Joker/jade v1.1.3 h1:Qbeh12Vq6BxURXT1qZBRHsDxeURB8ztcL6f3EXSGeHk=
|
||||
github.com/Joker/jade v1.1.3/go.mod h1:T+2WLyt7VH6Lp0TRxQrUYEs64nRc83wkMQrfeIQKduM=
|
||||
github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4=
|
||||
github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
|
||||
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s=
|
||||
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w=
|
||||
github.com/PuerkitoBio/goquery v1.10.2 h1:7fh2BdHcG6VFZsK7toXBT/Bh1z5Wmy8Q9MV9HqT2AM8=
|
||||
github.com/PuerkitoBio/goquery v1.10.2/go.mod h1:0guWGjcLu9AYC7C1GHnpysHy056u9aEkUHwhdnePMCU=
|
||||
github.com/STARRY-S/zip v0.2.2 h1:8QeCbIi1Z9U5MgoDARJR1ClbBo9RD46SmVy+dl0woCk=
|
||||
github.com/STARRY-S/zip v0.2.2/go.mod h1:lqJ9JdeRipyOQJrYSOtpNAiaesFO6zVDsE8GIGFaoSk=
|
||||
github.com/Shopify/goreferrer v0.0.0-20240724165105-aceaa0259138 h1:gjbp60h8IZQbN/TpDaYJedWbbD1h1aDPEwWnYWaDaUY=
|
||||
github.com/Shopify/goreferrer v0.0.0-20240724165105-aceaa0259138/go.mod h1:NYezi6wtnJtBm5btoprXc5SvAdqH0XTXWnUup0MptAI=
|
||||
github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU=
|
||||
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||
github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU=
|
||||
github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs=
|
||||
github.com/bodgit/sevenzip v1.6.0 h1:a4R0Wu6/P1o1pP/3VV++aEOcyeBxeO/xE2Y9NSTrr6A=
|
||||
github.com/bodgit/sevenzip v1.6.0/go.mod h1:zOBh9nJUof7tcrlqJFv1koWRrhz3LbDbUNngkuZxLMc=
|
||||
github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4=
|
||||
github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI=
|
||||
github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 h1:2tV76y6Q9BB+NEBasnqvs7e49aEBFI8ejC89PSnWH+4=
|
||||
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
|
||||
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/evanw/esbuild v0.25.0 h1:jRR9D1pfdb669VzdN4w0jwsDfrKE098nKMaDMKvMPyU=
|
||||
github.com/evanw/esbuild v0.25.0/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48=
|
||||
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
|
||||
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
|
||||
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
|
||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/flosch/pongo2/v4 v4.0.2 h1:gv+5Pe3vaSVmiJvh/BZa82b7/00YUGm0PIyVVLop0Hw=
|
||||
github.com/flosch/pongo2/v4 v4.0.2/go.mod h1:B5ObFANs/36VwxxlgKpdchIJHMvHB562PW+BWPhwZD8=
|
||||
github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
|
||||
github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/gomarkdown/markdown v0.0.0-20250207164621-7a1f277a159e h1:ESHlT0RVZphh4JGBz49I5R6nTdC8Qyc08vU25GQHzzQ=
|
||||
github.com/gomarkdown/markdown v0.0.0-20250207164621-7a1f277a159e/go.mod h1:JDGcbDT52eL4fju3sZ4TeHGsQwhG9nbDV21aMyhwPoA=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
|
||||
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
|
||||
github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
|
||||
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
|
||||
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog=
|
||||
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk=
|
||||
github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA=
|
||||
github.com/iris-contrib/httpexpect/v2 v2.15.2 h1:T9THsdP1woyAqKHwjkEsbCnMefsAFvk8iJJKokcJ3Go=
|
||||
github.com/iris-contrib/httpexpect/v2 v2.15.2/go.mod h1:JLDgIqnFy5loDSUv1OA2j0mb6p/rDhiCqigP22Uq9xE=
|
||||
github.com/iris-contrib/schema v0.0.6 h1:CPSBLyx2e91H2yJzPuhGuifVRnZBBJ3pCOMbOvPZaTw=
|
||||
github.com/iris-contrib/schema v0.0.6/go.mod h1:iYszG0IOsuIsfzjymw1kMzTL8YQcCWlm65f3wX8J5iA=
|
||||
github.com/jaschaephraim/lrserver v0.0.0-20240306232639-afed386b3640 h1:qxoA9wh1IZAbMhfFSE81tn8RsB48LNd7ecH6lFpxucc=
|
||||
github.com/jaschaephraim/lrserver v0.0.0-20240306232639-afed386b3640/go.mod h1:1Dkfm1/kgjeZc+2TBUAyZ3TJeQ/HaKbj8ig+7nAHkws=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/otiai10/copy v1.7.0 h1:hVoPiN+t+7d2nzzwMiDHPSOogsWAStewq3TwU05+clE=
|
||||
github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U=
|
||||
github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
|
||||
github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
|
||||
github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
|
||||
github.com/otiai10/mint v1.3.3 h1:7JgpsBaN0uMkyju4tbYHu0mnM55hNKVYLsXmwr15NQI=
|
||||
github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
|
||||
github.com/kataras/blocks v0.0.11 h1:JJdYW0AUaJKLx5kEWs/oRVCvKVXo+6CAAeaVAiJf7wE=
|
||||
github.com/kataras/blocks v0.0.11/go.mod h1:b4UySrJySEOq6drKH9U3bOpMI+dRH148mayYfS3RFb8=
|
||||
github.com/kataras/golog v0.1.12 h1:Bu7I/G4ilJlbfzjmU39O9N+2uO1pBcMK045fzZ4ytNg=
|
||||
github.com/kataras/golog v0.1.12/go.mod h1:wrGSbOiBqbQSQznleVNX4epWM8rl9SJ/rmEacl0yqy4=
|
||||
github.com/kataras/iris/v12 v12.2.11 h1:sGgo43rMPfzDft8rjVhPs6L3qDJy3TbBrMD/zGL1pzk=
|
||||
github.com/kataras/iris/v12 v12.2.11/go.mod h1:uMAeX8OqG9vqdhyrIPv8Lajo/wXTtAF43wchP9WHt2w=
|
||||
github.com/kataras/pio v0.0.13 h1:x0rXVX0fviDTXOOLOmr4MUxOabu1InVSTu5itF8CXCM=
|
||||
github.com/kataras/pio v0.0.13/go.mod h1:k3HNuSw+eJ8Pm2lA4lRhg3DiCjVgHlP8hmXApSej3oM=
|
||||
github.com/kataras/sitemap v0.0.6 h1:w71CRMMKYMJh6LR2wTgnk5hSgjVNB9KL60n5e2KHvLY=
|
||||
github.com/kataras/sitemap v0.0.6/go.mod h1:dW4dOCNs896OR1HmG+dMLdT7JjDk7mYBzoIRwuj5jA4=
|
||||
github.com/kataras/tunnel v0.0.4 h1:sCAqWuJV7nPzGrlb0os3j49lk2JhILT0rID38NHNLpA=
|
||||
github.com/kataras/tunnel v0.0.4/go.mod h1:9FkU4LaeifdMWqZu7o20ojmW4B7hdhv2CMLwfnHGpYw=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
|
||||
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mailgun/raymond/v2 v2.0.48 h1:5dmlB680ZkFG2RN/0lvTAghrSxIESeu9/2aeDqACtjw=
|
||||
github.com/mailgun/raymond/v2 v2.0.48/go.mod h1:lsgvL50kgt1ylcFJYZiULi5fjPBkkhNfj4KA0W54Z18=
|
||||
github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
|
||||
github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU=
|
||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||
github.com/mholt/archives v0.1.0 h1:FacgJyrjiuyomTuNA92X5GyRBRZjE43Y/lrzKIlF35Q=
|
||||
github.com/mholt/archives v0.1.0/go.mod h1:j/Ire/jm42GN7h90F5kzj6hf6ZFzEH66de+hmjEKu+I=
|
||||
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
|
||||
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/nwaples/rardecode/v2 v2.1.0 h1:JQl9ZoBPDy+nIZGb1mx8+anfHp/LV3NE2MjMiv0ct/U=
|
||||
github.com/nwaples/rardecode/v2 v2.1.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
|
||||
github.com/otiai10/copy v1.14.1 h1:5/7E6qsUMBaH5AnQ0sSLzzTg1oTECmcCmT6lvF45Na8=
|
||||
github.com/otiai10/copy v1.14.1/go.mod h1:oQwrEDDOci3IM8dJF0d8+jnbfPDllW6vUjNc3DoZm9I=
|
||||
github.com/otiai10/mint v1.6.3 h1:87qsV/aw1F5as1eH1zS/yqHY85ANKVMgkDrf9rcxbQs=
|
||||
github.com/otiai10/mint v1.6.3/go.mod h1:MJm72SBthJjz8qhefc4z1PYEieWmy8Bku7CjcAqyUSM=
|
||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/radovskyb/watcher v1.0.7 h1:AYePLih6dpmS32vlHfhCeli8127LzkIgwJGcwwe8tUE=
|
||||
github.com/radovskyb/watcher v1.0.7/go.mod h1:78okwvY5wPdzcb1UYnip1pvrZNIVEIh/Cm+ZuvsUYIg=
|
||||
github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=
|
||||
github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
|
||||
github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg=
|
||||
github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
|
||||
github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo=
|
||||
github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U=
|
||||
github.com/schollz/closestmatch v2.1.0+incompatible h1:Uel2GXEpJqOWBrlyI+oY9LTiyyjYS17cCYRqP13/SHk=
|
||||
github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY=
|
||||
github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec=
|
||||
github.com/smartystreets/goconvey v1.8.1 h1:qGjIddxOk4grTu9JPOU31tVfq3cNdBlNa5sSznIX1xY=
|
||||
github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60=
|
||||
github.com/sorairolake/lzip-go v0.3.5 h1:ms5Xri9o1JBIWvOFAorYtUNik6HI3HgBTkISiqu0Cwg=
|
||||
github.com/sorairolake/lzip-go v0.3.5/go.mod h1:N0KYq5iWrMXI0ZEXKXaS9hCyOjZUQdBDEIbXfoUwbdk=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/tdewolff/minify/v2 v2.21.3 h1:KmhKNGrN/dGcvb2WDdB5yA49bo37s+hcD8RiF+lioV8=
|
||||
github.com/tdewolff/minify/v2 v2.21.3/go.mod h1:iGxHaGiONAnsYuo8CRyf8iPUcqRJVB/RhtEcTpqS7xw=
|
||||
github.com/tdewolff/parse/v2 v2.7.20 h1:Y33JmRLjyGhX5JRvYh+CO6Sk6pGMw3iO5eKGhUhx8JE=
|
||||
github.com/tdewolff/parse/v2 v2.7.20/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA=
|
||||
github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE=
|
||||
github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo=
|
||||
github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8=
|
||||
github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw=
|
||||
github.com/therootcompany/xz v1.0.1/go.mod h1:3K3UH1yCKgBneZYhuQUvJ9HPD19UEXEI0BWbMn8qNMY=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
|
||||
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w=
|
||||
github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
||||
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY=
|
||||
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI=
|
||||
github.com/yosssi/ace v0.0.5 h1:tUkIP/BLdKqrlrPwcmH0shwEEhTRHoGnc1wFIWmaBUA=
|
||||
github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0=
|
||||
github.com/yudai/gojsondiff v1.0.0 h1:27cbfqXLVEJ1o8I6v3y9lg8Ydm53EKqHXAOMxEGlCOA=
|
||||
github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg=
|
||||
github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 h1:BHyfKlQyqbsFN5p3IfnEUduWvb9is428/nNb5L3U01M=
|
||||
github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc=
|
||||
go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU=
|
||||
golang.design/x/clipboard v0.6.3/go.mod h1:kqBSweBP0/im4SZGGjLrppH0D400Hnfo5WbFKSNK8N4=
|
||||
golang.design/x/clipboard v0.7.0 h1:4Je8M/ys9AJumVnl8m+rZnIvstSnYj1fvzqYrU3TXvo=
|
||||
golang.design/x/clipboard v0.7.0/go.mod h1:PQIvqYO9GP29yINEfsEn5zSQKAz3UgXmZKzDA6dnq2E=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4=
|
||||
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 h1:aWwlzYV971S4BXRS9AmqwDLAD85ouC6X+pocatKY58c=
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
|
||||
golang.org/x/exp/shiny v0.0.0-20250228200357-dead58393ab7 h1:VxTRg3kpOpYQ+S2PlDH9x2j/ZOQMxVsPgdYYRvkErNY=
|
||||
golang.org/x/exp/shiny v0.0.0-20250228200357-dead58393ab7/go.mod h1:ygj7T6vSGhhm/9yTpOQQNvuAUFziTH7RUiH74EoE2C8=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
|
||||
golang.org/x/image v0.24.0 h1:AN7zRgVsbvmTfNyqIbbOraYL8mSwcKncEj8ofjgzcMQ=
|
||||
golang.org/x/image v0.24.0/go.mod h1:4b/ITuLfqYq1hqZcjofwctIhi7sZh2WaCjvsBNjjya8=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
|
||||
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
|
||||
golang.org/x/mobile v0.0.0-20210716004757-34ab1303b554/go.mod h1:jFTmtFYCV0MFtXBU+J5V/+5AUeVS0ON/0WkE/KSrl6E=
|
||||
golang.org/x/mobile v0.0.0-20250218173827-cd096645fcd3 h1:0V/7Y1FEaFdAzb9DkVDh4QFp4vL4yYCiJ5cjk80lZyA=
|
||||
golang.org/x/mobile v0.0.0-20250218173827-cd096645fcd3/go.mod h1:j5VYNgQ6lZYZlzHFjdgS2UeqRSZunDk+/zXVTAIA3z4=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365 h1:6wSTsvPddg9gc/mVEEyk9oOAoxn+bT4Z9q1zx+4RwA4=
|
||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.10.0 h1:3usCWA8tQn0L8+hFJQNgzpWbd89begxN66o1Ojdn5L4=
|
||||
golang.org/x/time v0.10.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
|
||||
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U=
|
||||
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
moul.io/http2curl/v2 v2.3.0 h1:9r3JfDzWPcbIklMOs2TnIFzDYvfAZvjeavG6EzP7jYs=
|
||||
moul.io/http2curl/v2 v2.3.0/go.mod h1:RW4hyBjTWSYDOxapodpNEtX0g5Eb16sxklBqmd2RHcE=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
|
346
helpers.go
Normal file
346
helpers.go
Normal file
@ -0,0 +1,346 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/evanw/esbuild/pkg/api"
|
||||
"github.com/otiai10/copy"
|
||||
"github.com/trading-peter/gowebbuild/fsutils"
|
||||
)
|
||||
|
||||
func purge(opts options) {
|
||||
if opts.ESBuild.PurgeBeforeBuild {
|
||||
if opts.ESBuild.Outdir != "" {
|
||||
fmt.Printf("Purging output folder %s\n", opts.ESBuild.Outdir)
|
||||
os.RemoveAll(opts.ESBuild.Outdir)
|
||||
}
|
||||
|
||||
if opts.ESBuild.Outfile != "" {
|
||||
fmt.Printf("Purging output file %s\n", opts.ESBuild.Outfile)
|
||||
os.Remove(opts.ESBuild.Outfile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func download(opts options) {
|
||||
if len(opts.Download) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
for _, dl := range opts.Download {
|
||||
if !fsutils.IsDir(filepath.Dir(dl.Dest)) {
|
||||
fmt.Printf("Failed to find destination folder for downloading from %s\n", dl.Url)
|
||||
continue
|
||||
}
|
||||
|
||||
file, err := os.Create(dl.Dest)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to create file for downloading from %s: %v\n", dl.Url, err)
|
||||
continue
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
client := http.Client{
|
||||
CheckRedirect: func(r *http.Request, via []*http.Request) error {
|
||||
r.URL.Opaque = r.URL.Path
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
fmt.Printf("Downloading %s to %s\n", dl.Url, dl.Dest)
|
||||
resp, err := client.Get(dl.Url)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to download file from %s: %v\n", dl.Url, err)
|
||||
continue
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
_, err = io.Copy(file, resp.Body)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to write file downloaded from %s: %v\n", dl.Url, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func cp(opts options) {
|
||||
if len(opts.Copy) == 0 {
|
||||
fmt.Println("Nothing to copy")
|
||||
return
|
||||
}
|
||||
|
||||
for _, op := range opts.Copy {
|
||||
paths, err := filepath.Glob(op.Src)
|
||||
if err != nil {
|
||||
fmt.Printf("Invalid glob pattern: %s\n", op.Src)
|
||||
continue
|
||||
}
|
||||
|
||||
destIsDir := fsutils.IsDir(op.Dest)
|
||||
for _, p := range paths {
|
||||
d := op.Dest
|
||||
|
||||
if destIsDir && fsutils.IsFile(p) {
|
||||
d = filepath.Join(d, filepath.Base(p))
|
||||
}
|
||||
err := copy.Copy(p, d)
|
||||
fmt.Printf("Copying %s to %s\n", p, d)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to copy %s: %v\n", p, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func replace(opts options) {
|
||||
if len(opts.Replace) == 0 {
|
||||
fmt.Println("Nothing to replace")
|
||||
return
|
||||
}
|
||||
for _, op := range opts.Replace {
|
||||
paths, err := filepath.Glob(op.Pattern)
|
||||
if err != nil {
|
||||
fmt.Printf("Invalid glob pattern: %s\n", op.Pattern)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, p := range paths {
|
||||
if !fsutils.IsFile(p) {
|
||||
continue
|
||||
}
|
||||
|
||||
read, err := os.ReadFile(p)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
r := op.Replace
|
||||
if strings.HasPrefix(op.Replace, "$") {
|
||||
r = os.ExpandEnv(op.Replace)
|
||||
}
|
||||
|
||||
count := strings.Count(string(read), op.Search)
|
||||
|
||||
if count > 0 {
|
||||
fmt.Printf("Replacing %d occurrences of '%s' with '%s' in %s\n", count, op.Search, r, p)
|
||||
newContents := strings.ReplaceAll(string(read), op.Search, r)
|
||||
err = os.WriteFile(p, []byte(newContents), 0)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func injectLR(opts options) {
|
||||
if opts.Watch.InjectLiveReload == "" {
|
||||
return
|
||||
}
|
||||
|
||||
// Read the HTML file
|
||||
contents, err := os.ReadFile(opts.Watch.InjectLiveReload)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to read inject live reload script: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
htmlContent := string(contents)
|
||||
|
||||
// First modify CSP
|
||||
cspModified, err := updateContentPolicyTag(htmlContent)
|
||||
if err != nil {
|
||||
fmt.Println("Error modifying CSP:", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Then inject script
|
||||
finalHTML, err := injectLiveReloadScript(cspModified)
|
||||
if err != nil {
|
||||
fmt.Println("Error injecting script:", err)
|
||||
return
|
||||
}
|
||||
|
||||
err = os.WriteFile(opts.Watch.InjectLiveReload, []byte(finalHTML), 0644)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to write live reload script reference: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Injected live reload script reference into %s\n", opts.Watch.InjectLiveReload)
|
||||
}
|
||||
|
||||
func injectLiveReloadScript(html string) (string, error) {
|
||||
// Check if script is already present
|
||||
if strings.Contains(html, "livereload.js") {
|
||||
return html, nil
|
||||
}
|
||||
|
||||
// Find the closing body tag and inject script before it
|
||||
bodyCloseRegex := regexp.MustCompile(`(?i)</body>`)
|
||||
if !bodyCloseRegex.MatchString(html) {
|
||||
return html, nil // Return unchanged if no body tag found
|
||||
}
|
||||
|
||||
scriptTag := `<script src="http://localhost:35729/livereload.js" type="text/javascript"></script>`
|
||||
newHTML := bodyCloseRegex.ReplaceAllString(html, scriptTag+"</body>")
|
||||
|
||||
return newHTML, nil
|
||||
}
|
||||
|
||||
func updateContentPolicyTag(html string) (string, error) {
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(html))
|
||||
if err != nil {
|
||||
return html, err
|
||||
}
|
||||
|
||||
liveReloadHost := "localhost:35729"
|
||||
liveReloadURL := "http://" + liveReloadHost
|
||||
liveReloadWS := "ws://" + liveReloadHost
|
||||
|
||||
doc.Find("meta[http-equiv='Content-Security-Policy']").Each(func(i int, s *goquery.Selection) {
|
||||
if originalCSP, ok := s.Attr("content"); ok {
|
||||
// Split CSP into individual directives
|
||||
directives := strings.Split(originalCSP, ";")
|
||||
|
||||
// Look for script-src directive
|
||||
scriptSrcFound := false
|
||||
connectSrcFound := false
|
||||
|
||||
for i, directive := range directives {
|
||||
trimmed := strings.TrimSpace(directive)
|
||||
|
||||
// Handle script-src directive
|
||||
if strings.HasPrefix(trimmed, "script-src") {
|
||||
// If script-src already exists, append localhost if not present
|
||||
if !strings.Contains(trimmed, liveReloadURL) {
|
||||
directives[i] = trimmed + " " + liveReloadURL
|
||||
}
|
||||
scriptSrcFound = true
|
||||
}
|
||||
|
||||
// Handle connect-src directive
|
||||
if strings.HasPrefix(trimmed, "connect-src") {
|
||||
// If connect-src already exists, append WebSocket URL if not present
|
||||
if !strings.Contains(trimmed, liveReloadWS) {
|
||||
directives[i] = trimmed + " " + liveReloadWS
|
||||
}
|
||||
connectSrcFound = true
|
||||
}
|
||||
}
|
||||
|
||||
// If no script-src found, add it with 'self' as default
|
||||
if !scriptSrcFound {
|
||||
directives = append(directives, "script-src 'self' "+liveReloadURL)
|
||||
}
|
||||
|
||||
// If no connect-src found, add it with 'self' as default
|
||||
if !connectSrcFound {
|
||||
directives = append(directives, "connect-src 'self' "+liveReloadWS)
|
||||
}
|
||||
|
||||
// Join directives back together
|
||||
newCSP := strings.Join(directives, ";")
|
||||
|
||||
// Ensure we don't have trailing semicolon if original didn't
|
||||
if !strings.HasSuffix(originalCSP, ";") && strings.HasSuffix(newCSP, ";") {
|
||||
newCSP = strings.TrimSuffix(newCSP, ";")
|
||||
}
|
||||
|
||||
s.SetAttr("content", newCSP)
|
||||
}
|
||||
})
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = goquery.Render(&buf, doc.Selection)
|
||||
if err != nil {
|
||||
return html, err
|
||||
}
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func build(opts options) {
|
||||
esBuildOpts := cfgToESBuildCfg(opts)
|
||||
result := api.Build(esBuildOpts)
|
||||
|
||||
if len(result.Errors) == 0 {
|
||||
triggerReload <- struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
func getGoModuleName(root string) (string, error) {
|
||||
modFile := filepath.Join(root, "go.mod")
|
||||
|
||||
if !fsutils.IsFile(modFile) {
|
||||
return "", fmt.Errorf("go.mod file not found")
|
||||
}
|
||||
|
||||
// Open the go.mod file
|
||||
file, err := os.Open(modFile)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error opening go.mod: %v", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Create a scanner to read the file line by line
|
||||
scanner := bufio.NewScanner(file)
|
||||
|
||||
// Iterate through the lines
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
// Check if the line starts with "module "
|
||||
if strings.HasPrefix(line, "module ") {
|
||||
// Extract the module name
|
||||
moduleName := strings.TrimSpace(strings.TrimPrefix(line, "module "))
|
||||
return moduleName, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Check for errors in scanning
|
||||
if err := scanner.Err(); err != nil {
|
||||
return "", fmt.Errorf("error scanning go.mod: %v", err)
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func findFreePort(from, to int) int {
|
||||
for port := from; port <= to; port++ {
|
||||
if isFreePort(port) {
|
||||
return port
|
||||
}
|
||||
port++
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func isFreePort(port int) bool {
|
||||
addr, err := net.ResolveTCPAddr("tcp", fmt.Sprintf("localhost:%d", port))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
l, err := net.ListenTCP("tcp", addr)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
defer l.Close()
|
||||
return true
|
||||
}
|
131
linker.go
Normal file
131
linker.go
Normal file
@ -0,0 +1,131 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/otiai10/copy"
|
||||
"github.com/radovskyb/watcher"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/trading-peter/gowebbuild/fsutils"
|
||||
)
|
||||
|
||||
func link(from, to string) chan struct{} {
|
||||
requestBuildCh := make(chan struct{})
|
||||
|
||||
// Load package.json in destination.
|
||||
destPkg := readFileContent(filepath.Join(to, "package.json"))
|
||||
depsRaw := gjson.Get(destPkg, "dependencies").Map()
|
||||
deps := map[string]bool{}
|
||||
for k := range depsRaw {
|
||||
deps[k] = true
|
||||
}
|
||||
|
||||
packages := map[string]string{}
|
||||
packageFiles := fsutils.FindFiles(from, "package.json")
|
||||
|
||||
for i := range packageFiles {
|
||||
content := readFileContent(packageFiles[i])
|
||||
name := gjson.Get(content, "name").String()
|
||||
|
||||
if deps[name] {
|
||||
pp, err := filepath.Abs(filepath.Dir(packageFiles[i]))
|
||||
if err == nil {
|
||||
packages[name] = pp
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d npm packages to monitor for changes.\n", len(packages))
|
||||
|
||||
go func() {
|
||||
w := watcher.New()
|
||||
w.SetMaxEvents(1)
|
||||
w.FilterOps(watcher.Write, watcher.Rename, watcher.Move, watcher.Create, watcher.Remove)
|
||||
w.IgnoreHiddenFiles(true)
|
||||
|
||||
if err := w.AddRecursive(from); err != nil {
|
||||
fmt.Println(err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case event := <-w.Event:
|
||||
fmt.Printf("File %s changed\n", event.Path)
|
||||
for k, v := range packages {
|
||||
if strings.HasPrefix(event.Path, v) {
|
||||
src := filepath.Dir(event.Path)
|
||||
subPath, _ := filepath.Rel(v, src)
|
||||
dest := filepath.Join(to, "node_modules", k, subPath)
|
||||
fmt.Printf("Copying %s to %s\n", src, dest)
|
||||
err := copy.Copy(src, dest, copy.Options{
|
||||
Skip: func(stat fs.FileInfo, src, dest string) (bool, error) {
|
||||
if !isExcludedPath(src, "node_modules", ".git") && (stat.IsDir() || isIncludedExt(filepath.Base(src), "*.js", "*.ts")) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
},
|
||||
Sync: true,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to copy %s: %v\n", k, err)
|
||||
}
|
||||
|
||||
requestBuildCh <- struct{}{}
|
||||
}
|
||||
}
|
||||
case err := <-w.Error:
|
||||
fmt.Println(err.Error())
|
||||
case <-w.Closed:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
fmt.Printf("Watching packages in %s\n", from)
|
||||
|
||||
if err := w.Start(time.Millisecond * 100); err != nil {
|
||||
fmt.Println(err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
return requestBuildCh
|
||||
}
|
||||
|
||||
func isExcludedPath(srcPath string, exPaths ...string) bool {
|
||||
for _, exP := range exPaths {
|
||||
if strings.Contains(srcPath, exP) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isIncludedExt(srcPath string, extensions ...string) bool {
|
||||
for _, ext := range extensions {
|
||||
if ok, _ := filepath.Match(ext, srcPath); ok {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func readFileContent(path string) string {
|
||||
pkgData, err := os.ReadFile(path)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
return string(pkgData)
|
||||
}
|
394
main.go
394
main.go
@ -1,131 +1,130 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/evanw/esbuild/pkg/api"
|
||||
"github.com/goyek/goyek"
|
||||
"github.com/jaschaephraim/lrserver"
|
||||
"github.com/otiai10/copy"
|
||||
"github.com/radovskyb/watcher"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
var triggerReload = make(chan struct{})
|
||||
|
||||
type options struct {
|
||||
ESBuild api.BuildOptions
|
||||
Watch struct {
|
||||
Path string
|
||||
Exclude []string
|
||||
}
|
||||
Serve struct {
|
||||
Path string
|
||||
Port int
|
||||
}
|
||||
Copy []struct {
|
||||
Src string
|
||||
Dest string
|
||||
}
|
||||
Replace []struct {
|
||||
Pattern string
|
||||
Search string
|
||||
Replace string
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
flow := &goyek.Flow{}
|
||||
opts := options{}
|
||||
|
||||
cfgPathParam := flow.RegisterStringParam(goyek.StringParam{
|
||||
cfgParam := &cli.StringFlag{
|
||||
Name: "c",
|
||||
Usage: "Path to config file config file.",
|
||||
Default: "./.gowebbuild.json",
|
||||
})
|
||||
|
||||
prodParam := flow.RegisterBoolParam(goyek.BoolParam{
|
||||
Name: "p",
|
||||
Usage: "Use production ready build settings",
|
||||
Default: false,
|
||||
})
|
||||
|
||||
buildOnly := goyek.Task{
|
||||
Name: "build",
|
||||
Usage: "",
|
||||
Params: goyek.Params{cfgPathParam, prodParam},
|
||||
Action: func(tf *goyek.TF) {
|
||||
cfgPath := cfgPathParam.Get(tf)
|
||||
cfgContent, err := os.ReadFile(cfgPath)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
Value: "./.gowebbuild.yaml",
|
||||
Usage: "path to config file config file.",
|
||||
}
|
||||
|
||||
err = json.Unmarshal(cfgContent, &opts)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
app := &cli.App{
|
||||
Name: "gowebbuild",
|
||||
Usage: "All in one tool to build web frontend projects.",
|
||||
Version: "7.1.1",
|
||||
Authors: []*cli.Author{{
|
||||
Name: "trading-peter (https://github.com/trading-peter)",
|
||||
}},
|
||||
UsageText: `gowebbuild [global options] command [command options] [arguments...]
|
||||
|
||||
cp(opts)
|
||||
Examples:
|
||||
|
||||
if prodParam.Get(tf) {
|
||||
opts.ESBuild.MinifyIdentifiers = true
|
||||
opts.ESBuild.MinifySyntax = true
|
||||
opts.ESBuild.MinifyWhitespace = true
|
||||
opts.ESBuild.Sourcemap = api.SourceMapNone
|
||||
}
|
||||
Watch project and rebuild if a files changes:
|
||||
$ gowebbuild
|
||||
|
||||
api.Build(opts.ESBuild)
|
||||
replace(opts)
|
||||
Use a different name or path for the config file (working directory is always the location of the config file):
|
||||
$ gowebbuild -c /path/to/gowebbuild.yaml watch
|
||||
|
||||
Production build:
|
||||
$ gowebbuild build -p
|
||||
|
||||
Manually replace a string within some files (not limited to project directory):
|
||||
$ gowebbuild replace *.go foo bar
|
||||
`,
|
||||
Commands: []*cli.Command{
|
||||
{
|
||||
Name: "template",
|
||||
Usage: "execute a template",
|
||||
Flags: []cli.Flag{
|
||||
cfgParam,
|
||||
},
|
||||
Action: tplAction,
|
||||
},
|
||||
}
|
||||
|
||||
watch := goyek.Task{
|
||||
{
|
||||
Name: "npm-proxy",
|
||||
Usage: "proxy npm packages",
|
||||
Flags: []cli.Flag{
|
||||
cfgParam,
|
||||
},
|
||||
Action: proxyAction,
|
||||
},
|
||||
|
||||
{
|
||||
Name: "build",
|
||||
Usage: "build web sources one time and exit",
|
||||
Flags: []cli.Flag{
|
||||
cfgParam,
|
||||
&cli.BoolFlag{
|
||||
Name: "p",
|
||||
Value: false,
|
||||
Usage: "use production ready build settings",
|
||||
},
|
||||
},
|
||||
Action: buildAction,
|
||||
},
|
||||
|
||||
{
|
||||
Name: "watch",
|
||||
Usage: "",
|
||||
Params: goyek.Params{cfgPathParam},
|
||||
Action: func(tf *goyek.TF) {
|
||||
cfgPath := cfgPathParam.Get(tf)
|
||||
cfgContent, err := os.ReadFile(cfgPath)
|
||||
Usage: "watch for changes and trigger the build",
|
||||
Flags: []cli.Flag{
|
||||
cfgParam,
|
||||
&cli.UintFlag{
|
||||
Name: "lr-port",
|
||||
Value: uint(lrserver.DefaultPort),
|
||||
Usage: "port for the live reload server",
|
||||
},
|
||||
},
|
||||
Action: watchAction,
|
||||
},
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(cfgContent, &opts)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
c := make(chan os.Signal, 1)
|
||||
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
|
||||
|
||||
fmt.Println("Starting live reload server")
|
||||
{
|
||||
Name: "serve",
|
||||
Usage: "serve a directory with a simply http server",
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "root",
|
||||
Value: "./",
|
||||
Usage: "folder to serve",
|
||||
},
|
||||
&cli.UintFlag{
|
||||
Name: "port",
|
||||
Value: uint(8080),
|
||||
Usage: "serve directory this on port",
|
||||
},
|
||||
&cli.UintFlag{
|
||||
Name: "lr-port",
|
||||
Value: uint(lrserver.DefaultPort),
|
||||
Usage: "port for the live reload server",
|
||||
},
|
||||
},
|
||||
Action: func(ctx *cli.Context) error {
|
||||
port := ctx.Uint("port")
|
||||
root := ctx.String("root")
|
||||
lrPort := ctx.Uint("lr-port")
|
||||
|
||||
if lrPort != 0 {
|
||||
go func() {
|
||||
w := watcher.New()
|
||||
w.SetMaxEvents(1)
|
||||
w.FilterOps(watcher.Write, watcher.Rename, watcher.Move, watcher.Create, watcher.Remove)
|
||||
|
||||
if len(opts.Watch.Exclude) > 0 {
|
||||
w.Ignore(opts.Watch.Exclude...)
|
||||
}
|
||||
|
||||
if err := w.AddRecursive(opts.Watch.Path); err != nil {
|
||||
if err := w.AddRecursive(root); err != nil {
|
||||
fmt.Println(err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
@ -135,9 +134,7 @@ func main() {
|
||||
select {
|
||||
case event := <-w.Event:
|
||||
fmt.Printf("File %s changed\n", event.Name())
|
||||
cp(opts)
|
||||
build(opts)
|
||||
replace(opts)
|
||||
triggerReload <- struct{}{}
|
||||
case err := <-w.Error:
|
||||
fmt.Println(err.Error())
|
||||
case <-w.Closed:
|
||||
@ -146,19 +143,13 @@ func main() {
|
||||
}
|
||||
}()
|
||||
|
||||
fmt.Printf("Watching %d elements in %s\n", len(w.WatchedFiles()), opts.Watch.Path)
|
||||
|
||||
cp(opts)
|
||||
build(opts)
|
||||
replace(opts)
|
||||
|
||||
if err := w.Start(time.Millisecond * 100); err != nil {
|
||||
fmt.Println(err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
lr := lrserver.New(lrserver.DefaultName, lrserver.DefaultPort)
|
||||
lr := lrserver.New(lrserver.DefaultName, uint16(lrPort))
|
||||
|
||||
go func() {
|
||||
for {
|
||||
@ -173,136 +164,83 @@ func main() {
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
if opts.Serve.Path != "" {
|
||||
go func() {
|
||||
port := 8888
|
||||
if opts.Serve.Port != 0 {
|
||||
port = opts.Serve.Port
|
||||
}
|
||||
|
||||
http.Handle("/", http.FileServer(http.Dir(opts.Serve.Path)))
|
||||
|
||||
fmt.Printf("Serving contents of %s at :%d\n", opts.Serve.Path, port)
|
||||
err := http.ListenAndServe(fmt.Sprintf(":%d", port), nil)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
<-c
|
||||
fmt.Println("\nExit")
|
||||
os.Exit(0)
|
||||
return Serve(root, port)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
flow.DefaultTask = flow.Register(watch)
|
||||
flow.Register(buildOnly)
|
||||
flow.Main()
|
||||
}
|
||||
|
||||
func cp(opts options) {
|
||||
if len(opts.Copy) == 0 {
|
||||
fmt.Println("Nothing to copy")
|
||||
return
|
||||
}
|
||||
for _, op := range opts.Copy {
|
||||
paths, err := filepath.Glob(op.Src)
|
||||
if err != nil {
|
||||
fmt.Printf("Invalid glob pattern: %s\n", op.Src)
|
||||
continue
|
||||
}
|
||||
|
||||
destIsDir := isDir(op.Dest)
|
||||
for _, p := range paths {
|
||||
d := op.Dest
|
||||
|
||||
if destIsDir && isFile(p) {
|
||||
d = filepath.Join(d, filepath.Base(p))
|
||||
}
|
||||
err := copy.Copy(p, d)
|
||||
fmt.Printf("Copying %s to %s\n", p, d)
|
||||
{
|
||||
Name: "download",
|
||||
Usage: "execute downloads as configured",
|
||||
Flags: []cli.Flag{
|
||||
cfgParam,
|
||||
},
|
||||
Action: func(ctx *cli.Context) error {
|
||||
cfgPath, err := filepath.Abs(ctx.String("c"))
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to copy %s: %v\n", p, err)
|
||||
continue
|
||||
return err
|
||||
}
|
||||
|
||||
os.Chdir(filepath.Dir(cfgPath))
|
||||
opts := readCfg(cfgPath)
|
||||
|
||||
for i := range opts {
|
||||
download(opts[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func replace(opts options) {
|
||||
if len(opts.Replace) == 0 {
|
||||
fmt.Println("Nothing to replace")
|
||||
return
|
||||
}
|
||||
for _, op := range opts.Replace {
|
||||
paths, err := filepath.Glob(op.Pattern)
|
||||
if err != nil {
|
||||
fmt.Printf("Invalid glob pattern: %s\n", op.Pattern)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, p := range paths {
|
||||
if !isFile(p) {
|
||||
continue
|
||||
}
|
||||
|
||||
read, err := ioutil.ReadFile(p)
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
r := op.Replace
|
||||
if strings.HasPrefix(op.Replace, "$") {
|
||||
r = os.ExpandEnv(op.Replace)
|
||||
}
|
||||
|
||||
count := strings.Count(string(read), op.Search)
|
||||
|
||||
if count > 0 {
|
||||
fmt.Printf("Replacing %d occurrences of '%s' with '%s' in %s\n", count, op.Search, r, p)
|
||||
newContents := strings.ReplaceAll(string(read), op.Search, r)
|
||||
err = ioutil.WriteFile(p, []byte(newContents), 0)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%+v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isFile(path string) bool {
|
||||
stat, err := os.Stat(path)
|
||||
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
return false
|
||||
}
|
||||
|
||||
return !stat.IsDir()
|
||||
}
|
||||
|
||||
func isDir(path string) bool {
|
||||
stat, err := os.Stat(path)
|
||||
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
os.MkdirAll(path, 0755)
|
||||
return true
|
||||
}
|
||||
|
||||
return err == nil && stat.IsDir()
|
||||
}
|
||||
|
||||
func build(opts options) {
|
||||
result := api.Build(opts.ESBuild)
|
||||
|
||||
if len(result.Errors) == 0 {
|
||||
triggerReload <- struct{}{}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
Name: "replace",
|
||||
ArgsUsage: "[glob file pattern] [search] [replace]",
|
||||
Usage: "replace text in files",
|
||||
Action: func(ctx *cli.Context) error {
|
||||
files := ctx.Args().Get(0)
|
||||
searchStr := ctx.Args().Get(1)
|
||||
replaceStr := ctx.Args().Get(2)
|
||||
|
||||
if files == "" {
|
||||
return fmt.Errorf("invalid file pattern")
|
||||
}
|
||||
|
||||
if searchStr == "" {
|
||||
return fmt.Errorf("invalid search string")
|
||||
}
|
||||
|
||||
replace(options{
|
||||
Replace: []struct {
|
||||
Pattern string `yaml:"pattern"`
|
||||
Search string `yaml:"search"`
|
||||
Replace string `yaml:"replace"`
|
||||
}{
|
||||
{
|
||||
Pattern: files,
|
||||
Search: searchStr,
|
||||
Replace: replaceStr,
|
||||
},
|
||||
},
|
||||
})
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
DefaultCommand: "watch",
|
||||
}
|
||||
|
||||
sigChan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM)
|
||||
appCtx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
go func() {
|
||||
<-sigChan
|
||||
cancel()
|
||||
fmt.Println("Received interrupt, shutting down...")
|
||||
}()
|
||||
|
||||
if err := app.RunContext(appCtx, os.Args); err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
|
87
npmproxy.go
Normal file
87
npmproxy.go
Normal file
@ -0,0 +1,87 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/trading-peter/gowebbuild/npmproxy"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func proxyAction(ctx *cli.Context) error {
|
||||
cfgPath, err := filepath.Abs(ctx.String("c"))
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
projectDir := filepath.Dir(cfgPath)
|
||||
os.Chdir(projectDir)
|
||||
opts := readCfg(cfgPath)
|
||||
|
||||
return runProxy(ctx.Context, projectDir, opts)
|
||||
}
|
||||
|
||||
func runProxy(ctx context.Context, projectDir string, opts []options) error {
|
||||
overrides := []NpmProxyOverride{}
|
||||
|
||||
for _, o := range opts {
|
||||
overrides = append(overrides, o.NpmProxy.Overrides...)
|
||||
}
|
||||
|
||||
if len(overrides) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d npm overrides. Starting proxy server.\n", len(overrides))
|
||||
|
||||
// if fs.IsFile(filepath.Join(projectDir, ".npmrc")) {
|
||||
// return fmt.Errorf(".npmrc file already exists in project root.")
|
||||
// }
|
||||
|
||||
freePort := findFreePort(10000, 20000)
|
||||
freePortInternal := findFreePort(20001, 30000)
|
||||
|
||||
if freePort == -1 || freePortInternal == -1 {
|
||||
return fmt.Errorf("Failed to find free ports for proxy setup.")
|
||||
}
|
||||
|
||||
list := []npmproxy.Override{}
|
||||
npmrcRules := []string{
|
||||
";CREATED BY GOWEBBUILD. DO NOT EDIT.",
|
||||
";This file is used by the npm proxy server.",
|
||||
";It is used to override the default registry for specific package namespaces.",
|
||||
";This file will be removed after the proxy server is stopped.",
|
||||
}
|
||||
|
||||
for _, o := range overrides {
|
||||
list = append(list, npmproxy.Override{
|
||||
Namespace: o.Namespace,
|
||||
Upstream: o.Upstream,
|
||||
PackageRoot: o.PackageRoot,
|
||||
})
|
||||
|
||||
npmrcRules = append(npmrcRules, fmt.Sprintf("%s:registry=http://localhost:%d", o.Namespace, freePort))
|
||||
}
|
||||
|
||||
err := os.WriteFile(filepath.Join(projectDir, ".npmrc"), []byte(strings.Join(npmrcRules, "\n")), 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer os.Remove(filepath.Join(projectDir, ".npmrc"))
|
||||
|
||||
proxy := npmproxy.New(
|
||||
list,
|
||||
projectDir,
|
||||
npmproxy.WithPort(freePort),
|
||||
npmproxy.WithInternalPort(freePortInternal),
|
||||
)
|
||||
|
||||
proxy.Start(ctx)
|
||||
fmt.Println("Stopped npm proxy server")
|
||||
return nil
|
||||
}
|
92
npmproxy/externalProxy.go
Normal file
92
npmproxy/externalProxy.go
Normal file
@ -0,0 +1,92 @@
|
||||
package npmproxy
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/kataras/golog"
|
||||
)
|
||||
|
||||
func (p *Proxy) externalHTTPServer(ctx context.Context) {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: p.externalProxyHost,
|
||||
Handler: mux,
|
||||
}
|
||||
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if err := srv.Shutdown(shutdownCtx); err != nil {
|
||||
fmt.Printf("Failed to shutdown proxy server: %v\n", err)
|
||||
}
|
||||
}()
|
||||
|
||||
mux.HandleFunc("/", p.incomingNpmRequestHandler)
|
||||
|
||||
if err := srv.ListenAndServe(); err != nil {
|
||||
if err != http.ErrServerClosed {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Receives incoming requests from the npm cli and decides based on override rules what server it should forwarded to.
|
||||
func (p *Proxy) incomingNpmRequestHandler(res http.ResponseWriter, req *http.Request) {
|
||||
golog.Infof("Incoming NPM request for %s", req.URL.Path)
|
||||
pkgPath := strings.TrimLeft(req.URL.Path, "/")
|
||||
|
||||
// If no matching override is found, we forward the request to the default registry.
|
||||
_, ok := p.matchingOverride(pkgPath)
|
||||
if !ok {
|
||||
serveReverseProxy(p.DefaultRegistry, res, req)
|
||||
return
|
||||
}
|
||||
|
||||
// Process the override by forwarding the request to the internal proxy.
|
||||
|
||||
serveReverseProxy(p.internalProxyUrl, res, req)
|
||||
|
||||
// golog.Infof("Received request for url: %v", proxyUrl)
|
||||
}
|
||||
|
||||
type ResponseWriterWrapper struct {
|
||||
http.ResponseWriter
|
||||
Body *bytes.Buffer
|
||||
}
|
||||
|
||||
func (rw *ResponseWriterWrapper) Write(b []byte) (int, error) {
|
||||
rw.Body.Write(b) // Capture the response body
|
||||
return rw.ResponseWriter.Write(b) // Send the response to the original writer
|
||||
}
|
||||
|
||||
func serveReverseProxy(target string, res http.ResponseWriter, req *http.Request) {
|
||||
// parse the OriginalUrl
|
||||
OriginalUrl, _ := url.Parse(target)
|
||||
|
||||
// create the reverse proxy
|
||||
proxy := httputil.NewSingleHostReverseProxy(OriginalUrl)
|
||||
|
||||
// Update the headers to allow for SSL redirection
|
||||
req.URL.Host = OriginalUrl.Host
|
||||
req.URL.Scheme = OriginalUrl.Scheme
|
||||
req.Header.Set("X-Forwarded-Host", req.Header.Get("Host"))
|
||||
req.Host = OriginalUrl.Host
|
||||
|
||||
wrappedRes := &ResponseWriterWrapper{ResponseWriter: res, Body: new(bytes.Buffer)}
|
||||
|
||||
// Note that ServeHttp is non blocking and uses a go routine under the hood
|
||||
proxy.ServeHTTP(wrappedRes, req)
|
||||
|
||||
// Print the captured response body
|
||||
fmt.Println("Response body:", wrappedRes.Body.String())
|
||||
}
|
67
npmproxy/internalProxy.go
Normal file
67
npmproxy/internalProxy.go
Normal file
@ -0,0 +1,67 @@
|
||||
package npmproxy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/trading-peter/gowebbuild/fsutils"
|
||||
)
|
||||
|
||||
func (p *Proxy) internalHTTPServer(ctx context.Context) {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: p.internalProxyHost,
|
||||
Handler: mux,
|
||||
}
|
||||
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if err := srv.Shutdown(shutdownCtx); err != nil {
|
||||
fmt.Printf("Failed to shutdown internal server for npm proxy: %v\n", err)
|
||||
}
|
||||
}()
|
||||
|
||||
mux.HandleFunc("GET /{pkg}", func(w http.ResponseWriter, r *http.Request) {
|
||||
pkgName := r.PathValue("pkg")
|
||||
override, ok := p.matchingOverride(pkgName)
|
||||
|
||||
if !ok {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
pkg, err := p.findPackageSource(override, pkgName)
|
||||
if err != nil {
|
||||
serveReverseProxy(override.Upstream, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(pkg)
|
||||
})
|
||||
|
||||
mux.HandleFunc("GET /files/{file}", func(w http.ResponseWriter, r *http.Request) {
|
||||
fileName := r.PathValue("file")
|
||||
filePath := filepath.Join(p.pkgCachePath, fileName)
|
||||
|
||||
if !fsutils.IsFile(filePath) {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
http.ServeFile(w, r, filePath)
|
||||
})
|
||||
|
||||
if err := srv.ListenAndServe(); err != nil {
|
||||
if err != http.ErrServerClosed {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
95
npmproxy/proxy.go
Normal file
95
npmproxy/proxy.go
Normal file
@ -0,0 +1,95 @@
|
||||
package npmproxy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Override struct {
|
||||
Namespace string
|
||||
Upstream string
|
||||
PackageRoot string
|
||||
}
|
||||
|
||||
type Proxy struct {
|
||||
ProjectRoot string
|
||||
Port int
|
||||
InternalPort int
|
||||
DefaultRegistry string
|
||||
Overrides []Override
|
||||
pkgCachePath string
|
||||
externalProxyHost string
|
||||
internalProxyHost string
|
||||
internalProxyUrl string
|
||||
}
|
||||
|
||||
type ProxyOption func(*Proxy)
|
||||
|
||||
func WithPort(port int) ProxyOption {
|
||||
return func(p *Proxy) {
|
||||
p.Port = port
|
||||
}
|
||||
}
|
||||
|
||||
func WithInternalPort(port int) ProxyOption {
|
||||
return func(p *Proxy) {
|
||||
p.InternalPort = port
|
||||
}
|
||||
}
|
||||
|
||||
func WithPkgCachePath(path string) ProxyOption {
|
||||
return func(p *Proxy) {
|
||||
p.pkgCachePath = path
|
||||
}
|
||||
}
|
||||
|
||||
func WithDefaultRegistry(registry string) ProxyOption {
|
||||
return func(p *Proxy) {
|
||||
p.DefaultRegistry = strings.TrimSuffix(registry, "/")
|
||||
}
|
||||
}
|
||||
|
||||
func New(overrides []Override, projectRoot string, options ...ProxyOption) *Proxy {
|
||||
p := &Proxy{
|
||||
ProjectRoot: projectRoot,
|
||||
Port: 1234,
|
||||
InternalPort: 1235,
|
||||
DefaultRegistry: "https://registry.npmjs.org",
|
||||
Overrides: overrides,
|
||||
}
|
||||
|
||||
for _, option := range options {
|
||||
option(p)
|
||||
}
|
||||
|
||||
if p.pkgCachePath == "" {
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
homeDir = "."
|
||||
}
|
||||
p.pkgCachePath = filepath.Join(homeDir, ".gowebbuild", "proxy", "cache")
|
||||
}
|
||||
|
||||
p.externalProxyHost = fmt.Sprintf("127.0.0.1:%d", p.Port)
|
||||
p.internalProxyHost = fmt.Sprintf("127.0.0.1:%d", p.InternalPort)
|
||||
p.internalProxyUrl = fmt.Sprintf("http://%s", p.internalProxyHost)
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *Proxy) Start(ctx context.Context) {
|
||||
go p.internalHTTPServer(ctx)
|
||||
p.externalHTTPServer(ctx)
|
||||
}
|
||||
|
||||
func (p *Proxy) matchingOverride(path string) (*Override, bool) {
|
||||
for _, o := range p.Overrides {
|
||||
if strings.HasPrefix(path, o.Namespace) {
|
||||
return &o, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
188
npmproxy/source.go
Normal file
188
npmproxy/source.go
Normal file
@ -0,0 +1,188 @@
|
||||
package npmproxy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/Masterminds/semver/v3"
|
||||
"github.com/kataras/golog"
|
||||
"github.com/mholt/archives"
|
||||
"github.com/trading-peter/gowebbuild/fsutils"
|
||||
)
|
||||
|
||||
func (p *Proxy) readPackageJson(pkgPath string) (PackageJson, error) {
|
||||
pkgFile := filepath.Join(pkgPath, "package.json")
|
||||
|
||||
if !fsutils.IsFile(pkgFile) {
|
||||
return PackageJson{}, fmt.Errorf("package.json not found in %s", pkgPath)
|
||||
}
|
||||
|
||||
pkgData, err := os.ReadFile(pkgFile)
|
||||
if err != nil {
|
||||
return PackageJson{}, err
|
||||
}
|
||||
|
||||
pkg := PackageJson{}
|
||||
err = json.Unmarshal(pkgData, &pkg)
|
||||
if err != nil {
|
||||
return PackageJson{}, err
|
||||
}
|
||||
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func (p *Proxy) findDependencyVersionConstraint(projectPkg PackageJson, pkgName string) (*semver.Constraints, error) {
|
||||
if verStr, ok := projectPkg.Dependencies[pkgName]; ok {
|
||||
return semver.NewConstraint(verStr)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("package %s not found in project dependencies", pkgName)
|
||||
}
|
||||
|
||||
func (p *Proxy) findPackageSource(override *Override, pkgName string) (*Package, error) {
|
||||
pkgNameParts := strings.Split(pkgName, "/")
|
||||
pkgPath := filepath.Join(override.PackageRoot, pkgNameParts[len(pkgNameParts)-1])
|
||||
|
||||
// Read the projects package.json and figure out the requested semver version, which probably will be a contraint to assert against (like "^1.2.3").
|
||||
projectPkg, err := p.readPackageJson(p.ProjectRoot)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reqVersion, err := p.findDependencyVersionConstraint(projectPkg, pkgName)
|
||||
|
||||
pkg, err := p.readPackageJson(pkgPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pkgVersion, err := semver.NewVersion(pkg.Version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !reqVersion.Check(pkgVersion) {
|
||||
golog.Infof("Version %s in package sources for %s is not meeting the version constrains (%s) of the project. Forwarding request to upstream registry.", pkgVersion, pkgName, reqVersion)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
pkgArchive, err := p.createPackage(pkgPath, pkg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
integrity, shasum, err := p.createHashes(pkgArchive)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Package{
|
||||
ID: pkg.Name,
|
||||
Name: pkg.Name,
|
||||
DistTags: DistTags{
|
||||
Latest: pkg.Version,
|
||||
},
|
||||
Versions: map[string]Version{
|
||||
pkg.Version: {
|
||||
ID: pkg.Name,
|
||||
Name: pkg.Name,
|
||||
Version: pkg.Version,
|
||||
Dependencies: pkg.Dependencies,
|
||||
Dist: Dist{
|
||||
Integrity: integrity,
|
||||
Shasum: shasum,
|
||||
Tarball: fmt.Sprintf("%s/files/%s", p.internalProxyUrl, filepath.Base(pkgArchive)),
|
||||
},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *Proxy) createPackage(pkgPath string, pkg PackageJson) (string, error) {
|
||||
err := os.MkdirAll(p.pkgCachePath, 0755)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
pkgArchive := filepath.Join(p.pkgCachePath, sanitizePkgName(pkg.Name, pkg.Version)+".tar")
|
||||
|
||||
files, err := archives.FilesFromDisk(context.TODO(), nil, map[string]string{
|
||||
pkgPath: ".",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
filesFiltered := []archives.FileInfo{}
|
||||
|
||||
filterRegex := regexp.MustCompile(`^node_modules|.git`)
|
||||
|
||||
for _, file := range files {
|
||||
if filterRegex.MatchString(file.NameInArchive) {
|
||||
continue
|
||||
}
|
||||
filesFiltered = append(filesFiltered, file)
|
||||
}
|
||||
|
||||
out, err := os.Create(pkgArchive)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
format := archives.CompressedArchive{
|
||||
Archival: archives.Tar{},
|
||||
}
|
||||
|
||||
err = format.Archive(context.Background(), out, filesFiltered)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return pkgArchive, nil
|
||||
}
|
||||
|
||||
func (p *Proxy) createHashes(pkgArchive string) (string, string, error) {
|
||||
// Open the file
|
||||
file, err := os.Open(pkgArchive)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Create a new SHA256 hash
|
||||
hash := sha512.New()
|
||||
|
||||
// Copy the file data into the hash
|
||||
if _, err := io.Copy(hash, file); err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
|
||||
// Get the hash sum
|
||||
hashSum := hash.Sum(nil)
|
||||
|
||||
// Generate the integrity string (SHA-256 base64-encoded)
|
||||
integrity := "sha512-" + base64.StdEncoding.EncodeToString(hashSum)
|
||||
|
||||
// Generate the shasum (hexadecimal representation)
|
||||
shasum := fmt.Sprintf("%x", hashSum)
|
||||
|
||||
return integrity, shasum, nil
|
||||
}
|
||||
|
||||
// Replace all characters of the pages name that are not allowed in a URL with a hyphen.
|
||||
func sanitizePkgName(pkgName string, version string) string {
|
||||
pkgName = strings.ReplaceAll(pkgName, "@", "")
|
||||
pkgName = strings.ReplaceAll(pkgName, "/", "_")
|
||||
version = strings.ReplaceAll(version, ".", "_")
|
||||
return fmt.Sprintf("%s_%s", pkgName, version)
|
||||
}
|
51
npmproxy/types.go
Normal file
51
npmproxy/types.go
Normal file
@ -0,0 +1,51 @@
|
||||
package npmproxy
|
||||
|
||||
type PackageJson struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Dependencies map[string]string `json:"dependencies"`
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
DistTags DistTags `json:"dist-tags"`
|
||||
Versions map[string]Version `json:"versions"`
|
||||
Readme string `json:"readme"`
|
||||
Repository Repository `json:"repository"`
|
||||
Author Author `json:"author"`
|
||||
License string `json:"license"`
|
||||
}
|
||||
|
||||
type DistTags struct {
|
||||
Latest string `json:"latest"`
|
||||
}
|
||||
|
||||
type Author struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type Repository struct {
|
||||
Type string `json:"type"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
type Dist struct {
|
||||
Integrity string `json:"integrity"`
|
||||
Shasum string `json:"shasum"`
|
||||
Tarball string `json:"tarball"`
|
||||
}
|
||||
|
||||
type Version struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Description string `json:"description"`
|
||||
Author Author `json:"author"`
|
||||
License string `json:"license"`
|
||||
Repository Repository `json:"repository"`
|
||||
Dependencies map[string]string `json:"dependencies"`
|
||||
Readme string `json:"readme"`
|
||||
Dist Dist `json:"dist"`
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"Watch": {
|
||||
"Path": "./frontend/src",
|
||||
"Paths": [ "./frontend/src" ],
|
||||
"Exclude": [ "./dist" ]
|
||||
},
|
||||
"Copy": [
|
||||
@ -45,5 +45,8 @@
|
||||
"Bundle": true,
|
||||
"Write": true,
|
||||
"LogLevel": 3
|
||||
},
|
||||
"ProductionBuildOptions": {
|
||||
"CmdPostBuild": "my-build-script.sh"
|
||||
}
|
||||
}
|
22
serve.go
Normal file
22
serve.go
Normal file
@ -0,0 +1,22 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/kataras/iris/v12"
|
||||
)
|
||||
|
||||
func Serve(root string, port uint) error {
|
||||
app := iris.New()
|
||||
app.HandleDir("/", iris.Dir(root), iris.DirOptions{
|
||||
IndexName: "/index.html",
|
||||
Compress: false,
|
||||
ShowList: true,
|
||||
ShowHidden: true,
|
||||
Cache: iris.DirCacheOptions{
|
||||
Enable: false,
|
||||
},
|
||||
})
|
||||
|
||||
return app.Listen(fmt.Sprintf(":%d", port))
|
||||
}
|
113
templates.go
Normal file
113
templates.go
Normal file
@ -0,0 +1,113 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"text/template"
|
||||
|
||||
"github.com/Iilun/survey/v2"
|
||||
"github.com/kataras/golog"
|
||||
"github.com/trading-peter/gowebbuild/fsutils"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
//go:embed templates/tpl.gowebbuild.yaml
|
||||
var sampleConfig string
|
||||
|
||||
//go:embed templates/docker_image.sh
|
||||
var dockerImage string
|
||||
|
||||
//go:embed templates/Dockerfile
|
||||
var dockerFile string
|
||||
|
||||
//go:embed templates/.air.toml
|
||||
var airToml string
|
||||
|
||||
//go:embed templates/.air.win.toml
|
||||
var airWinToml string
|
||||
|
||||
var qs = []*survey.Question{
|
||||
{
|
||||
Name: "tpl",
|
||||
Prompt: &survey.Select{
|
||||
Message: "Choose a template:",
|
||||
Options: []string{".air.toml", ".gowebbuild.yaml", "docker_image.sh", "Dockerfile"},
|
||||
Default: "docker_image.sh",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func tplAction(ctx *cli.Context) error {
|
||||
cfgPath, err := filepath.Abs(ctx.String("c"))
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
os.Chdir(filepath.Dir(cfgPath))
|
||||
|
||||
answers := struct {
|
||||
Template string `survey:"tpl"`
|
||||
}{}
|
||||
|
||||
err = survey.Ask(qs, &answers)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var tpl string
|
||||
var fileName string
|
||||
|
||||
switch answers.Template {
|
||||
case ".gowebbuild.yaml":
|
||||
tpl = sampleConfig
|
||||
fileName = ".gowebbuild.yaml"
|
||||
case "docker_image.sh":
|
||||
tpl = dockerImage
|
||||
fileName = "docker_image.sh"
|
||||
case "Dockerfile":
|
||||
tpl = dockerFile
|
||||
fileName = "Dockerfile"
|
||||
case ".air.toml":
|
||||
tpl = airToml
|
||||
if runtime.GOOS == "windows" {
|
||||
tpl = airWinToml
|
||||
}
|
||||
fileName = ".air.toml"
|
||||
default:
|
||||
golog.Fatal("Invalid template")
|
||||
}
|
||||
|
||||
if fsutils.IsFile(fileName) {
|
||||
fmt.Printf("File \"%s\" already exists.\n", fileName)
|
||||
return nil
|
||||
}
|
||||
|
||||
outFile, err := os.Create(fileName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer outFile.Close()
|
||||
|
||||
context := map[string]string{
|
||||
"ProjectFolderName": filepath.Base(filepath.Dir(cfgPath)),
|
||||
}
|
||||
|
||||
if moduleName, err := getGoModuleName(filepath.Dir(cfgPath)); err == nil {
|
||||
context["GoModuleName"] = moduleName
|
||||
}
|
||||
|
||||
err = template.Must(template.New("tpl").Parse(tpl)).Execute(outFile, context)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("Created \"%s\" in project root.\n", fileName)
|
||||
|
||||
return nil
|
||||
}
|
47
templates/.air.toml
Normal file
47
templates/.air.toml
Normal file
@ -0,0 +1,47 @@
|
||||
# Config file for [Air](https://github.com/cosmtrek/air) in TOML format
|
||||
|
||||
# Working directory
|
||||
# . or absolute path, please note that the directories following must be under root.
|
||||
root = "."
|
||||
tmp_dir = "tmp"
|
||||
|
||||
[build]
|
||||
# Just plain old shell command. You could use `make` as well.
|
||||
cmd = "go build -o ./tmp/main ./main.go"
|
||||
# Binary file yields from `cmd`.
|
||||
bin = "tmp/main"
|
||||
# Customize binary.
|
||||
full_bin = "./tmp/main run sample.json"
|
||||
# Watch these filename extensions.
|
||||
include_ext = ["go", "tpl", "tmpl", "html"]
|
||||
# Ignore these filename extensions or directories.
|
||||
exclude_dir = ["frontend-dist", "_mongo", "_db", "dist", "frontend", "web"]
|
||||
# Watch these directories if you specified.
|
||||
include_dir = []
|
||||
# Exclude files.
|
||||
exclude_file = []
|
||||
# This log file places in your tmp_dir.
|
||||
log = "air.log"
|
||||
# It's not necessary to trigger build each time file changes if it's too frequent.
|
||||
delay = 1000 # ms
|
||||
# Stop running old binary when build errors occur.
|
||||
stop_on_error = true
|
||||
# Send Interrupt signal before killing process (windows does not support this feature)
|
||||
send_interrupt = false
|
||||
# Delay after sending Interrupt signal
|
||||
kill_delay = 500 # ms
|
||||
|
||||
[log]
|
||||
# Show log time
|
||||
time = false
|
||||
|
||||
[color]
|
||||
# Customize each part's color. If no color found, use the raw app log.
|
||||
main = "magenta"
|
||||
watcher = "cyan"
|
||||
build = "yellow"
|
||||
runner = "green"
|
||||
|
||||
[misc]
|
||||
# Delete tmp directory on exit
|
||||
clean_on_exit = true
|
45
templates/.air.win.toml
Executable file
45
templates/.air.win.toml
Executable file
@ -0,0 +1,45 @@
|
||||
# Config file for [Air](https://github.com/cosmtrek/air) in TOML format
|
||||
|
||||
# Working directory
|
||||
# . or absolute path, please note that the directories following must be under root.
|
||||
root = "."
|
||||
tmp_dir = "tmp"
|
||||
|
||||
[build]
|
||||
# Just plain old shell command. You could use `make` as well.
|
||||
cmd = "go build -o ./tmp/main.exe ./main.go"
|
||||
# Binary file yields from `cmd`.
|
||||
bin = "tmp/main.exe"
|
||||
# Watch these filename extensions.
|
||||
include_ext = ["go", "tpl", "tmpl", "html"]
|
||||
# Ignore these filename extensions or directories.
|
||||
exclude_dir = ["frontend-dist", "_mongo", "_db", "dist", "frontend", "web"]
|
||||
# Watch these directories if you specified.
|
||||
include_dir = ["backend"]
|
||||
# Exclude files.
|
||||
exclude_file = []
|
||||
# This log file places in your tmp_dir.
|
||||
log = "air.log"
|
||||
# It's not necessary to trigger build each time file changes if it's too frequent.
|
||||
delay = 1000 # ms
|
||||
# Stop running old binary when build errors occur.
|
||||
stop_on_error = true
|
||||
# Send Interrupt signal before killing process (windows does not support this feature)
|
||||
send_interrupt = false
|
||||
# Delay after sending Interrupt signal
|
||||
kill_delay = 500 # ms
|
||||
|
||||
[log]
|
||||
# Show log time
|
||||
time = false
|
||||
|
||||
[color]
|
||||
# Customize each part's color. If no color found, use the raw app log.
|
||||
main = "magenta"
|
||||
watcher = "cyan"
|
||||
build = "yellow"
|
||||
runner = "green"
|
||||
|
||||
[misc]
|
||||
# Delete tmp directory on exit
|
||||
clean_on_exit = true
|
22
templates/Dockerfile
Normal file
22
templates/Dockerfile
Normal file
@ -0,0 +1,22 @@
|
||||
FROM alpine
|
||||
|
||||
RUN apk update --no-cache && apk add --no-cache ca-certificates tzdata
|
||||
|
||||
# Set timezone if necessary
|
||||
#ENV TZ UTC
|
||||
ENV USER=gouser
|
||||
ENV UID=10001
|
||||
|
||||
RUN adduser \
|
||||
--disabled-password \
|
||||
--gecos "" \
|
||||
--shell "/sbin/nologin" \
|
||||
--no-create-home \
|
||||
--uid "${UID}" \
|
||||
"${USER}"
|
||||
|
||||
ADD {{.ProjectFolderName}} /app/{{.ProjectFolderName}}
|
||||
WORKDIR /app
|
||||
USER gouser:gouser
|
||||
|
||||
ENTRYPOINT ["./{{.ProjectFolderName}}"]
|
19
templates/docker_image.sh
Executable file
19
templates/docker_image.sh
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
mkdir -p _build
|
||||
cd _build
|
||||
mkdir -p docker_out
|
||||
rm -rf sources
|
||||
git clone $(git remote get-url origin) sources
|
||||
cd sources
|
||||
git fetch --tags
|
||||
ver=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||
git checkout $ver
|
||||
|
||||
CGO_ENABLED=0 go build -ldflags="-s -w" -o ../{{.ProjectFolderName}} .
|
||||
|
||||
# A second run is needed to build the final image.
|
||||
cd ..
|
||||
docker build -f sources/Dockerfile --no-cache -t {{.GoModuleName}}:${ver} -t {{.GoModuleName}}:latest .
|
||||
docker push {{.GoModuleName}}:${ver}
|
||||
docker push {{.GoModuleName}}:latest
|
||||
rm -rf sources {{.ProjectFolderName}}
|
35
templates/tpl.gowebbuild.yaml
Normal file
35
templates/tpl.gowebbuild.yaml
Normal file
@ -0,0 +1,35 @@
|
||||
- esbuild:
|
||||
entryPoints:
|
||||
- frontend/the-app.js
|
||||
outdir: ./frontend-dist
|
||||
sourcemap: 1
|
||||
format: 3
|
||||
splitting: true
|
||||
platform: 0
|
||||
bundle: true
|
||||
write: true
|
||||
logLevel: 3
|
||||
purgeBeforeBuild: false
|
||||
watch:
|
||||
paths:
|
||||
- ./frontend/src
|
||||
exclude: []
|
||||
injectLiveReload: ./frontend-dist/index.html
|
||||
# serve: # Uncomment and set a path to enable
|
||||
# path: ""
|
||||
# port: 8080
|
||||
copy:
|
||||
- src: ./frontend/index.html
|
||||
dest: ./frontend-dist
|
||||
# download:
|
||||
# - url: https://example.com/some-file-or-asset.js
|
||||
# dest: ./frontend/src/vendor/some-file-or-asset.js
|
||||
# replace:
|
||||
# - pattern: "*.go|*.js|*.html"
|
||||
# search: "Something"
|
||||
# replace: "This"
|
||||
# link:
|
||||
# from: ../../web/tp-elements
|
||||
# to: ./frontend
|
||||
# productionBuildOptions:
|
||||
# cmdPostBuild: ""
|
21
vendor/github.com/evanw/esbuild/LICENSE.md
generated
vendored
21
vendor/github.com/evanw/esbuild/LICENSE.md
generated
vendored
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Evan Wallace
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
7
vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go
generated
vendored
7
vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go
generated
vendored
@ -1,7 +0,0 @@
|
||||
package api_helpers
|
||||
|
||||
// This flag is set by the CLI to activate the timer. It's put here instead of
|
||||
// by the timer to discourage code from checking this flag. Only the code that
|
||||
// creates the root timer should check this flag. Other code should check that
|
||||
// the timer is not null to detect if the timer is being used or not.
|
||||
var UseTimer bool
|
141
vendor/github.com/evanw/esbuild/internal/ast/ast.go
generated
vendored
141
vendor/github.com/evanw/esbuild/internal/ast/ast.go
generated
vendored
@ -1,141 +0,0 @@
|
||||
package ast
|
||||
|
||||
import "github.com/evanw/esbuild/internal/logger"
|
||||
|
||||
// This file contains data structures that are used with the AST packages for
|
||||
// both JavaScript and CSS. This helps the bundler treat both AST formats in
|
||||
// a somewhat format-agnostic manner.
|
||||
|
||||
type ImportKind uint8
|
||||
|
||||
const (
|
||||
// An entry point provided by the user
|
||||
ImportEntryPoint ImportKind = iota
|
||||
|
||||
// An ES6 import or re-export statement
|
||||
ImportStmt
|
||||
|
||||
// A call to "require()"
|
||||
ImportRequire
|
||||
|
||||
// An "import()" expression with a string argument
|
||||
ImportDynamic
|
||||
|
||||
// A call to "require.resolve()"
|
||||
ImportRequireResolve
|
||||
|
||||
// A CSS "@import" rule
|
||||
ImportAt
|
||||
|
||||
// A CSS "@import" rule with import conditions
|
||||
ImportAtConditional
|
||||
|
||||
// A CSS "url(...)" token
|
||||
ImportURL
|
||||
)
|
||||
|
||||
func (kind ImportKind) StringForMetafile() string {
|
||||
switch kind {
|
||||
case ImportStmt:
|
||||
return "import-statement"
|
||||
case ImportRequire:
|
||||
return "require-call"
|
||||
case ImportDynamic:
|
||||
return "dynamic-import"
|
||||
case ImportRequireResolve:
|
||||
return "require-resolve"
|
||||
case ImportAt, ImportAtConditional:
|
||||
return "import-rule"
|
||||
case ImportURL:
|
||||
return "url-token"
|
||||
case ImportEntryPoint:
|
||||
return "entry-point"
|
||||
default:
|
||||
panic("Internal error")
|
||||
}
|
||||
}
|
||||
|
||||
func (kind ImportKind) IsFromCSS() bool {
|
||||
return kind == ImportAt || kind == ImportURL
|
||||
}
|
||||
|
||||
type ImportRecord struct {
|
||||
Range logger.Range
|
||||
Path logger.Path
|
||||
Assertions *[]AssertEntry
|
||||
|
||||
// The resolved source index for an internal import (within the bundle) or
|
||||
// nil for an external import (not included in the bundle)
|
||||
SourceIndex Index32
|
||||
|
||||
// Sometimes the parser creates an import record and decides it isn't needed.
|
||||
// For example, TypeScript code may have import statements that later turn
|
||||
// out to be type-only imports after analyzing the whole file.
|
||||
IsUnused bool
|
||||
|
||||
// If this is true, the import contains syntax like "* as ns". This is used
|
||||
// to determine whether modules that have no exports need to be wrapped in a
|
||||
// CommonJS wrapper or not.
|
||||
ContainsImportStar bool
|
||||
|
||||
// If this is true, the import contains an import for the alias "default",
|
||||
// either via the "import x from" or "import {default as x} from" syntax.
|
||||
ContainsDefaultAlias bool
|
||||
|
||||
// If true, this "export * from 'path'" statement is evaluated at run-time by
|
||||
// calling the "__reExport()" helper function
|
||||
CallsRunTimeReExportFn bool
|
||||
|
||||
// Tell the printer to wrap this call to "require()" in "__toESM(...)"
|
||||
WrapWithToESM bool
|
||||
|
||||
// Tell the printer to wrap this ESM exports object in "__toCJS(...)"
|
||||
WrapWithToCJS bool
|
||||
|
||||
// Tell the printer to use the runtime "__require()" instead of "require()"
|
||||
CallRuntimeRequire bool
|
||||
|
||||
// True for the following cases:
|
||||
//
|
||||
// try { require('x') } catch { handle }
|
||||
// try { await import('x') } catch { handle }
|
||||
// try { require.resolve('x') } catch { handle }
|
||||
// import('x').catch(handle)
|
||||
// import('x').then(_, handle)
|
||||
//
|
||||
// In these cases we shouldn't generate an error if the path could not be
|
||||
// resolved.
|
||||
HandlesImportErrors bool
|
||||
|
||||
// If true, this was originally written as a bare "import 'file'" statement
|
||||
WasOriginallyBareImport bool
|
||||
|
||||
Kind ImportKind
|
||||
}
|
||||
|
||||
type AssertEntry struct {
|
||||
Key []uint16 // An identifier or a string
|
||||
Value []uint16 // Always a string
|
||||
KeyLoc logger.Loc
|
||||
ValueLoc logger.Loc
|
||||
PreferQuotedKey bool
|
||||
}
|
||||
|
||||
// This stores a 32-bit index where the zero value is an invalid index. This is
|
||||
// a better alternative to storing the index as a pointer since that has the
|
||||
// same properties but takes up more space and costs an extra pointer traversal.
|
||||
type Index32 struct {
|
||||
flippedBits uint32
|
||||
}
|
||||
|
||||
func MakeIndex32(index uint32) Index32 {
|
||||
return Index32{flippedBits: ^index}
|
||||
}
|
||||
|
||||
func (i Index32) IsValid() bool {
|
||||
return i.flippedBits != 0
|
||||
}
|
||||
|
||||
func (i Index32) GetIndex() uint32 {
|
||||
return ^i.flippedBits
|
||||
}
|
2373
vendor/github.com/evanw/esbuild/internal/bundler/bundler.go
generated
vendored
2373
vendor/github.com/evanw/esbuild/internal/bundler/bundler.go
generated
vendored
File diff suppressed because it is too large
Load Diff
132
vendor/github.com/evanw/esbuild/internal/bundler/debug.go
generated
vendored
132
vendor/github.com/evanw/esbuild/internal/bundler/debug.go
generated
vendored
@ -1,132 +0,0 @@
|
||||
package bundler
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/graph"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_printer"
|
||||
)
|
||||
|
||||
// Set this to true and then load the resulting metafile in "graph-debugger.html"
|
||||
// to debug graph information.
|
||||
//
|
||||
// This is deliberately not exposed in the final binary. It is *very* internal
|
||||
// and only exists to help debug esbuild itself. Make sure this is always set
|
||||
// back to false before committing.
|
||||
const debugVerboseMetafile = false
|
||||
|
||||
func (c *linkerContext) generateExtraDataForFileJS(sourceIndex uint32) string {
|
||||
if !debugVerboseMetafile {
|
||||
return ""
|
||||
}
|
||||
|
||||
file := &c.graph.Files[sourceIndex]
|
||||
repr := file.InputFile.Repr.(*graph.JSRepr)
|
||||
sb := strings.Builder{}
|
||||
|
||||
quoteSym := func(ref js_ast.Ref) string {
|
||||
name := fmt.Sprintf("%d:%d [%s]", ref.SourceIndex, ref.InnerIndex, c.graph.Symbols.Get(ref).OriginalName)
|
||||
return string(js_printer.QuoteForJSON(name, c.options.ASCIIOnly))
|
||||
}
|
||||
|
||||
sb.WriteString(`,"parts":[`)
|
||||
for partIndex, part := range repr.AST.Parts {
|
||||
if partIndex > 0 {
|
||||
sb.WriteByte(',')
|
||||
}
|
||||
var isFirst bool
|
||||
code := ""
|
||||
|
||||
sb.WriteString(fmt.Sprintf(`{"isLive":%v`, part.IsLive))
|
||||
sb.WriteString(fmt.Sprintf(`,"canBeRemovedIfUnused":%v`, part.CanBeRemovedIfUnused))
|
||||
|
||||
if partIndex == int(js_ast.NSExportPartIndex) {
|
||||
sb.WriteString(`,"nsExportPartIndex":true`)
|
||||
} else if ast.MakeIndex32(uint32(partIndex)) == repr.Meta.WrapperPartIndex {
|
||||
sb.WriteString(`,"wrapperPartIndex":true`)
|
||||
} else if len(part.Stmts) > 0 {
|
||||
start := part.Stmts[0].Loc.Start
|
||||
end := len(file.InputFile.Source.Contents)
|
||||
if partIndex+1 < len(repr.AST.Parts) {
|
||||
if nextStmts := repr.AST.Parts[partIndex+1].Stmts; len(nextStmts) > 0 {
|
||||
if nextStart := nextStmts[0].Loc.Start; nextStart >= start {
|
||||
end = int(nextStart)
|
||||
}
|
||||
}
|
||||
}
|
||||
code = file.InputFile.Source.Contents[start:end]
|
||||
}
|
||||
|
||||
// importRecords
|
||||
sb.WriteString(`,"importRecords":[`)
|
||||
isFirst = true
|
||||
for _, importRecordIndex := range part.ImportRecordIndices {
|
||||
record := repr.AST.ImportRecords[importRecordIndex]
|
||||
if !record.SourceIndex.IsValid() {
|
||||
continue
|
||||
}
|
||||
if isFirst {
|
||||
isFirst = false
|
||||
} else {
|
||||
sb.WriteByte(',')
|
||||
}
|
||||
path := c.graph.Files[record.SourceIndex.GetIndex()].InputFile.Source.PrettyPath
|
||||
sb.WriteString(fmt.Sprintf(`{"source":%s}`, js_printer.QuoteForJSON(path, c.options.ASCIIOnly)))
|
||||
}
|
||||
sb.WriteByte(']')
|
||||
|
||||
// declaredSymbols
|
||||
sb.WriteString(`,"declaredSymbols":[`)
|
||||
isFirst = true
|
||||
for _, declSym := range part.DeclaredSymbols {
|
||||
if !declSym.IsTopLevel {
|
||||
continue
|
||||
}
|
||||
if isFirst {
|
||||
isFirst = false
|
||||
} else {
|
||||
sb.WriteByte(',')
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf(`{"name":%s}`, quoteSym(declSym.Ref)))
|
||||
}
|
||||
sb.WriteByte(']')
|
||||
|
||||
// symbolUses
|
||||
sb.WriteString(`,"symbolUses":[`)
|
||||
isFirst = true
|
||||
for ref, uses := range part.SymbolUses {
|
||||
if isFirst {
|
||||
isFirst = false
|
||||
} else {
|
||||
sb.WriteByte(',')
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf(`{"name":%s,"countEstimate":%d}`, quoteSym(ref), uses.CountEstimate))
|
||||
}
|
||||
sb.WriteByte(']')
|
||||
|
||||
// dependencies
|
||||
sb.WriteString(`,"dependencies":[`)
|
||||
for i, dep := range part.Dependencies {
|
||||
if i > 0 {
|
||||
sb.WriteByte(',')
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf(`{"source":%s,"partIndex":%d}`,
|
||||
js_printer.QuoteForJSON(c.graph.Files[dep.SourceIndex].InputFile.Source.PrettyPath, c.options.ASCIIOnly),
|
||||
dep.PartIndex,
|
||||
))
|
||||
}
|
||||
sb.WriteByte(']')
|
||||
|
||||
// code
|
||||
sb.WriteString(`,"code":`)
|
||||
sb.Write(js_printer.QuoteForJSON(strings.TrimRight(code, "\n"), c.options.ASCIIOnly))
|
||||
|
||||
sb.WriteByte('}')
|
||||
}
|
||||
sb.WriteString(`]`)
|
||||
|
||||
return sb.String()
|
||||
}
|
5581
vendor/github.com/evanw/esbuild/internal/bundler/linker.go
generated
vendored
5581
vendor/github.com/evanw/esbuild/internal/bundler/linker.go
generated
vendored
File diff suppressed because it is too large
Load Diff
101
vendor/github.com/evanw/esbuild/internal/cache/cache.go
generated
vendored
101
vendor/github.com/evanw/esbuild/internal/cache/cache.go
generated
vendored
@ -1,101 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
"github.com/evanw/esbuild/internal/runtime"
|
||||
)
|
||||
|
||||
// This is a cache of the parsed contents of a set of files. The idea is to be
|
||||
// able to reuse the results of parsing between builds and make subsequent
|
||||
// builds faster by avoiding redundant parsing work. This only works if:
|
||||
//
|
||||
// * The AST information in the cache must be considered immutable. There is
|
||||
// no way to enforce this in Go, but please be disciplined about this. The
|
||||
// ASTs are shared in between builds. Any information that must be mutated
|
||||
// in the AST during a build must be done on a shallow clone of the data if
|
||||
// the mutation happens after parsing (i.e. a clone that clones everything
|
||||
// that will be mutated and shares only the parts that won't be mutated).
|
||||
//
|
||||
// * The information in the cache must not depend at all on the contents of
|
||||
// any file other than the file being cached. Invalidating an entry in the
|
||||
// cache does not also invalidate any entries that depend on that file, so
|
||||
// caching information that depends on other files can result in incorrect
|
||||
// results due to reusing stale data. For example, do not "bake in" some
|
||||
// value imported from another file.
|
||||
//
|
||||
// * Cached ASTs must only be reused if the parsing options are identical
|
||||
// between builds. For example, it would be bad if the AST parser depended
|
||||
// on options inherited from a nearby "package.json" file but those options
|
||||
// were not part of the cache key. Then the cached AST could incorrectly be
|
||||
// reused even if the contents of that "package.json" file have changed.
|
||||
//
|
||||
type CacheSet struct {
|
||||
SourceIndexCache SourceIndexCache
|
||||
FSCache FSCache
|
||||
CSSCache CSSCache
|
||||
JSONCache JSONCache
|
||||
JSCache JSCache
|
||||
}
|
||||
|
||||
func MakeCacheSet() *CacheSet {
|
||||
return &CacheSet{
|
||||
SourceIndexCache: SourceIndexCache{
|
||||
entries: make(map[sourceIndexKey]uint32),
|
||||
nextSourceIndex: runtime.SourceIndex + 1,
|
||||
},
|
||||
FSCache: FSCache{
|
||||
entries: make(map[string]*fsEntry),
|
||||
},
|
||||
CSSCache: CSSCache{
|
||||
entries: make(map[logger.Path]*cssCacheEntry),
|
||||
},
|
||||
JSONCache: JSONCache{
|
||||
entries: make(map[logger.Path]*jsonCacheEntry),
|
||||
},
|
||||
JSCache: JSCache{
|
||||
entries: make(map[logger.Path]*jsCacheEntry),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type SourceIndexCache struct {
|
||||
mutex sync.Mutex
|
||||
entries map[sourceIndexKey]uint32
|
||||
nextSourceIndex uint32
|
||||
}
|
||||
|
||||
type SourceIndexKind uint8
|
||||
|
||||
const (
|
||||
SourceIndexNormal SourceIndexKind = iota
|
||||
SourceIndexJSStubForCSS
|
||||
)
|
||||
|
||||
type sourceIndexKey struct {
|
||||
path logger.Path
|
||||
kind SourceIndexKind
|
||||
}
|
||||
|
||||
func (c *SourceIndexCache) LenHint() uint32 {
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
|
||||
// Add some extra room at the end for a new file or two without reallocating
|
||||
const someExtraRoom = 16
|
||||
return c.nextSourceIndex + someExtraRoom
|
||||
}
|
||||
|
||||
func (c *SourceIndexCache) Get(path logger.Path, kind SourceIndexKind) uint32 {
|
||||
key := sourceIndexKey{path: path, kind: kind}
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
if sourceIndex, ok := c.entries[key]; ok {
|
||||
return sourceIndex
|
||||
}
|
||||
sourceIndex := c.nextSourceIndex
|
||||
c.nextSourceIndex++
|
||||
c.entries[key] = sourceIndex
|
||||
return sourceIndex
|
||||
}
|
190
vendor/github.com/evanw/esbuild/internal/cache/cache_ast.go
generated
vendored
190
vendor/github.com/evanw/esbuild/internal/cache/cache_ast.go
generated
vendored
@ -1,190 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_parser"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_parser"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
// This cache intends to avoid unnecessarily re-parsing files in subsequent
|
||||
// builds. For a given path, parsing can be avoided if the contents of the file
|
||||
// and the options for the parser are the same as last time. Even if the
|
||||
// contents of the file are the same, the options for the parser may have
|
||||
// changed if they depend on some other file ("package.json" for example).
|
||||
//
|
||||
// This cache checks if the file contents have changed even though we have
|
||||
// the ability to detect if a file has changed on the file system by reading
|
||||
// its metadata. First of all, if the file contents are cached then they should
|
||||
// be the same pointer, which makes the comparison trivial. Also we want to
|
||||
// cache the AST for plugins in the common case that the plugin output stays
|
||||
// the same.
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// CSS
|
||||
|
||||
type CSSCache struct {
|
||||
mutex sync.Mutex
|
||||
entries map[logger.Path]*cssCacheEntry
|
||||
}
|
||||
|
||||
type cssCacheEntry struct {
|
||||
source logger.Source
|
||||
options css_parser.Options
|
||||
ast css_ast.AST
|
||||
msgs []logger.Msg
|
||||
}
|
||||
|
||||
func (c *CSSCache) Parse(log logger.Log, source logger.Source, options css_parser.Options) css_ast.AST {
|
||||
// Check the cache
|
||||
entry := func() *cssCacheEntry {
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
return c.entries[source.KeyPath]
|
||||
}()
|
||||
|
||||
// Cache hit
|
||||
if entry != nil && entry.source == source && entry.options == options {
|
||||
for _, msg := range entry.msgs {
|
||||
log.AddMsg(msg)
|
||||
}
|
||||
return entry.ast
|
||||
}
|
||||
|
||||
// Cache miss
|
||||
tempLog := logger.NewDeferLog(logger.DeferLogAll)
|
||||
ast := css_parser.Parse(tempLog, source, options)
|
||||
msgs := tempLog.Done()
|
||||
for _, msg := range msgs {
|
||||
log.AddMsg(msg)
|
||||
}
|
||||
|
||||
// Create the cache entry
|
||||
entry = &cssCacheEntry{
|
||||
source: source,
|
||||
options: options,
|
||||
ast: ast,
|
||||
msgs: msgs,
|
||||
}
|
||||
|
||||
// Save for next time
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
c.entries[source.KeyPath] = entry
|
||||
return ast
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// JSON
|
||||
|
||||
type JSONCache struct {
|
||||
mutex sync.Mutex
|
||||
entries map[logger.Path]*jsonCacheEntry
|
||||
}
|
||||
|
||||
type jsonCacheEntry struct {
|
||||
source logger.Source
|
||||
options js_parser.JSONOptions
|
||||
expr js_ast.Expr
|
||||
ok bool
|
||||
msgs []logger.Msg
|
||||
}
|
||||
|
||||
func (c *JSONCache) Parse(log logger.Log, source logger.Source, options js_parser.JSONOptions) (js_ast.Expr, bool) {
|
||||
// Check the cache
|
||||
entry := func() *jsonCacheEntry {
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
return c.entries[source.KeyPath]
|
||||
}()
|
||||
|
||||
// Cache hit
|
||||
if entry != nil && entry.source == source && entry.options == options {
|
||||
for _, msg := range entry.msgs {
|
||||
log.AddMsg(msg)
|
||||
}
|
||||
return entry.expr, entry.ok
|
||||
}
|
||||
|
||||
// Cache miss
|
||||
tempLog := logger.NewDeferLog(logger.DeferLogAll)
|
||||
expr, ok := js_parser.ParseJSON(tempLog, source, options)
|
||||
msgs := tempLog.Done()
|
||||
for _, msg := range msgs {
|
||||
log.AddMsg(msg)
|
||||
}
|
||||
|
||||
// Create the cache entry
|
||||
entry = &jsonCacheEntry{
|
||||
source: source,
|
||||
options: options,
|
||||
expr: expr,
|
||||
ok: ok,
|
||||
msgs: msgs,
|
||||
}
|
||||
|
||||
// Save for next time
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
c.entries[source.KeyPath] = entry
|
||||
return expr, ok
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// JS
|
||||
|
||||
type JSCache struct {
|
||||
mutex sync.Mutex
|
||||
entries map[logger.Path]*jsCacheEntry
|
||||
}
|
||||
|
||||
type jsCacheEntry struct {
|
||||
source logger.Source
|
||||
options js_parser.Options
|
||||
ast js_ast.AST
|
||||
ok bool
|
||||
msgs []logger.Msg
|
||||
}
|
||||
|
||||
func (c *JSCache) Parse(log logger.Log, source logger.Source, options js_parser.Options) (js_ast.AST, bool) {
|
||||
// Check the cache
|
||||
entry := func() *jsCacheEntry {
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
return c.entries[source.KeyPath]
|
||||
}()
|
||||
|
||||
// Cache hit
|
||||
if entry != nil && entry.source == source && entry.options.Equal(&options) {
|
||||
for _, msg := range entry.msgs {
|
||||
log.AddMsg(msg)
|
||||
}
|
||||
return entry.ast, entry.ok
|
||||
}
|
||||
|
||||
// Cache miss
|
||||
tempLog := logger.NewDeferLog(logger.DeferLogAll)
|
||||
ast, ok := js_parser.Parse(tempLog, source, options)
|
||||
msgs := tempLog.Done()
|
||||
for _, msg := range msgs {
|
||||
log.AddMsg(msg)
|
||||
}
|
||||
|
||||
// Create the cache entry
|
||||
entry = &jsCacheEntry{
|
||||
source: source,
|
||||
options: options,
|
||||
ast: ast,
|
||||
ok: ok,
|
||||
msgs: msgs,
|
||||
}
|
||||
|
||||
// Save for next time
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
c.entries[source.KeyPath] = entry
|
||||
return ast, ok
|
||||
}
|
52
vendor/github.com/evanw/esbuild/internal/cache/cache_fs.go
generated
vendored
52
vendor/github.com/evanw/esbuild/internal/cache/cache_fs.go
generated
vendored
@ -1,52 +0,0 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/evanw/esbuild/internal/fs"
|
||||
)
|
||||
|
||||
// This cache uses information from the "stat" syscall to try to avoid re-
|
||||
// reading files from the file system during subsequent builds if the file
|
||||
// hasn't changed. The assumption is reading the file metadata is faster than
|
||||
// reading the file contents.
|
||||
|
||||
type FSCache struct {
|
||||
mutex sync.Mutex
|
||||
entries map[string]*fsEntry
|
||||
}
|
||||
|
||||
type fsEntry struct {
|
||||
contents string
|
||||
modKey fs.ModKey
|
||||
isModKeyUsable bool
|
||||
}
|
||||
|
||||
func (c *FSCache) ReadFile(fs fs.FS, path string) (contents string, canonicalError error, originalError error) {
|
||||
entry := func() *fsEntry {
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
return c.entries[path]
|
||||
}()
|
||||
|
||||
// If the file's modification key hasn't changed since it was cached, assume
|
||||
// the contents of the file are also the same and skip reading the file.
|
||||
modKey, modKeyErr := fs.ModKey(path)
|
||||
if entry != nil && entry.isModKeyUsable && modKeyErr == nil && entry.modKey == modKey {
|
||||
return entry.contents, nil, nil
|
||||
}
|
||||
|
||||
contents, err, originalError := fs.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", err, originalError
|
||||
}
|
||||
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
c.entries[path] = &fsEntry{
|
||||
contents: contents,
|
||||
modKey: modKey,
|
||||
isModKeyUsable: modKeyErr == nil,
|
||||
}
|
||||
return contents, nil, nil
|
||||
}
|
45
vendor/github.com/evanw/esbuild/internal/compat/compat.go
generated
vendored
45
vendor/github.com/evanw/esbuild/internal/compat/compat.go
generated
vendored
@ -1,45 +0,0 @@
|
||||
package compat
|
||||
|
||||
type v struct {
|
||||
major uint16
|
||||
minor uint8
|
||||
patch uint8
|
||||
}
|
||||
|
||||
// Returns <0 if "a < b"
|
||||
// Returns 0 if "a == b"
|
||||
// Returns >0 if "a > b"
|
||||
func compareVersions(a v, b []int) int {
|
||||
diff := int(a.major)
|
||||
if len(b) > 0 {
|
||||
diff -= b[0]
|
||||
}
|
||||
if diff == 0 {
|
||||
diff = int(a.minor)
|
||||
if len(b) > 1 {
|
||||
diff -= b[1]
|
||||
}
|
||||
}
|
||||
if diff == 0 {
|
||||
diff = int(a.patch)
|
||||
if len(b) > 2 {
|
||||
diff -= b[2]
|
||||
}
|
||||
}
|
||||
return diff
|
||||
}
|
||||
|
||||
// The start is inclusive and the end is exclusive
|
||||
type versionRange struct {
|
||||
start v
|
||||
end v // Use 0.0.0 for "no end"
|
||||
}
|
||||
|
||||
func isVersionSupported(ranges []versionRange, version []int) bool {
|
||||
for _, r := range ranges {
|
||||
if compareVersions(r.start, version) <= 0 && (r.end == (v{}) || compareVersions(r.end, version) > 0) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
72
vendor/github.com/evanw/esbuild/internal/compat/css_table.go
generated
vendored
72
vendor/github.com/evanw/esbuild/internal/compat/css_table.go
generated
vendored
@ -1,72 +0,0 @@
|
||||
package compat
|
||||
|
||||
type CSSFeature uint32
|
||||
|
||||
const (
|
||||
HexRGBA CSSFeature = 1 << iota
|
||||
|
||||
RebeccaPurple
|
||||
|
||||
// This feature includes all of the following:
|
||||
// - Allow floats in rgb() and rgba()
|
||||
// - hsl() can accept alpha values
|
||||
// - rgb() can accept alpha values
|
||||
// - Space-separated functional color notations
|
||||
Modern_RGB_HSL
|
||||
|
||||
InsetProperty
|
||||
)
|
||||
|
||||
func (features CSSFeature) Has(feature CSSFeature) bool {
|
||||
return (features & feature) != 0
|
||||
}
|
||||
|
||||
var cssTable = map[CSSFeature]map[Engine][]versionRange{
|
||||
// Data from: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value
|
||||
HexRGBA: {
|
||||
Chrome: {{start: v{62, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
Firefox: {{start: v{49, 0, 0}}},
|
||||
IOS: {{start: v{9, 3, 0}}},
|
||||
Safari: {{start: v{9, 1, 0}}},
|
||||
},
|
||||
RebeccaPurple: {
|
||||
Chrome: {{start: v{38, 0, 0}}},
|
||||
Edge: {{start: v{12, 0, 0}}},
|
||||
Firefox: {{start: v{33, 0, 0}}},
|
||||
IOS: {{start: v{8, 0, 0}}},
|
||||
Safari: {{start: v{9, 0, 0}}},
|
||||
},
|
||||
Modern_RGB_HSL: {
|
||||
Chrome: {{start: v{66, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
Firefox: {{start: v{52, 0, 0}}},
|
||||
IOS: {{start: v{12, 2, 0}}},
|
||||
Safari: {{start: v{12, 1, 0}}},
|
||||
},
|
||||
|
||||
// Data from: https://developer.mozilla.org/en-US/docs/Web/CSS/inset
|
||||
InsetProperty: {
|
||||
Chrome: {{start: v{87, 0, 0}}},
|
||||
Edge: {{start: v{87, 0, 0}}},
|
||||
Firefox: {{start: v{66, 0, 0}}},
|
||||
IOS: {{start: v{14, 5, 0}}},
|
||||
Safari: {{start: v{14, 1, 0}}},
|
||||
},
|
||||
}
|
||||
|
||||
// Return all features that are not available in at least one environment
|
||||
func UnsupportedCSSFeatures(constraints map[Engine][]int) (unsupported CSSFeature) {
|
||||
for feature, engines := range cssTable {
|
||||
for engine, version := range constraints {
|
||||
if engine == ES || engine == Node {
|
||||
// Specifying "--target=es2020" shouldn't affect CSS
|
||||
continue
|
||||
}
|
||||
if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) {
|
||||
unsupported |= feature
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
465
vendor/github.com/evanw/esbuild/internal/compat/js_table.go
generated
vendored
465
vendor/github.com/evanw/esbuild/internal/compat/js_table.go
generated
vendored
@ -1,465 +0,0 @@
|
||||
// This file was automatically generated by "compat-table.js"
|
||||
|
||||
package compat
|
||||
|
||||
type Engine uint8
|
||||
|
||||
const (
|
||||
Chrome Engine = iota
|
||||
Edge
|
||||
ES
|
||||
Firefox
|
||||
IOS
|
||||
Node
|
||||
Safari
|
||||
)
|
||||
|
||||
func (e Engine) String() string {
|
||||
switch e {
|
||||
case Chrome:
|
||||
return "chrome"
|
||||
case Edge:
|
||||
return "edge"
|
||||
case ES:
|
||||
return "es"
|
||||
case Firefox:
|
||||
return "firefox"
|
||||
case IOS:
|
||||
return "ios"
|
||||
case Node:
|
||||
return "node"
|
||||
case Safari:
|
||||
return "safari"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type JSFeature uint64
|
||||
|
||||
const (
|
||||
ArbitraryModuleNamespaceNames JSFeature = 1 << iota
|
||||
ArraySpread
|
||||
Arrow
|
||||
AsyncAwait
|
||||
AsyncGenerator
|
||||
BigInt
|
||||
Class
|
||||
ClassField
|
||||
ClassPrivateAccessor
|
||||
ClassPrivateBrandCheck
|
||||
ClassPrivateField
|
||||
ClassPrivateMethod
|
||||
ClassPrivateStaticAccessor
|
||||
ClassPrivateStaticField
|
||||
ClassPrivateStaticMethod
|
||||
ClassStaticBlocks
|
||||
ClassStaticField
|
||||
Const
|
||||
DefaultArgument
|
||||
Destructuring
|
||||
DynamicImport
|
||||
ExponentOperator
|
||||
ExportStarAs
|
||||
ForAwait
|
||||
ForOf
|
||||
Generator
|
||||
Hashbang
|
||||
ImportAssertions
|
||||
ImportMeta
|
||||
Let
|
||||
LogicalAssignment
|
||||
NestedRestBinding
|
||||
NewTarget
|
||||
NodeColonPrefixImport
|
||||
NodeColonPrefixRequire
|
||||
NullishCoalescing
|
||||
ObjectAccessors
|
||||
ObjectExtensions
|
||||
ObjectRestSpread
|
||||
OptionalCatchBinding
|
||||
OptionalChain
|
||||
RestArgument
|
||||
TemplateLiteral
|
||||
TopLevelAwait
|
||||
UnicodeEscapes
|
||||
)
|
||||
|
||||
func (features JSFeature) Has(feature JSFeature) bool {
|
||||
return (features & feature) != 0
|
||||
}
|
||||
|
||||
var jsTable = map[JSFeature]map[Engine][]versionRange{
|
||||
ArbitraryModuleNamespaceNames: {
|
||||
Chrome: {{start: v{90, 0, 0}}},
|
||||
Firefox: {{start: v{87, 0, 0}}},
|
||||
Node: {{start: v{16, 0, 0}}},
|
||||
},
|
||||
ArraySpread: {
|
||||
Chrome: {{start: v{46, 0, 0}}},
|
||||
Edge: {{start: v{13, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{36, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{5, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
Arrow: {
|
||||
Chrome: {{start: v{49, 0, 0}}},
|
||||
Edge: {{start: v{13, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{45, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
AsyncAwait: {
|
||||
Chrome: {{start: v{55, 0, 0}}},
|
||||
Edge: {{start: v{15, 0, 0}}},
|
||||
ES: {{start: v{2017, 0, 0}}},
|
||||
Firefox: {{start: v{52, 0, 0}}},
|
||||
IOS: {{start: v{11, 0, 0}}},
|
||||
Node: {{start: v{7, 6, 0}}},
|
||||
Safari: {{start: v{11, 0, 0}}},
|
||||
},
|
||||
AsyncGenerator: {
|
||||
Chrome: {{start: v{63, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
ES: {{start: v{2018, 0, 0}}},
|
||||
Firefox: {{start: v{57, 0, 0}}},
|
||||
IOS: {{start: v{12, 0, 0}}},
|
||||
Node: {{start: v{10, 0, 0}}},
|
||||
Safari: {{start: v{12, 0, 0}}},
|
||||
},
|
||||
BigInt: {
|
||||
Chrome: {{start: v{67, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
ES: {{start: v{2020, 0, 0}}},
|
||||
Firefox: {{start: v{68, 0, 0}}},
|
||||
IOS: {{start: v{14, 0, 0}}},
|
||||
Node: {{start: v{10, 4, 0}}},
|
||||
Safari: {{start: v{14, 0, 0}}},
|
||||
},
|
||||
Class: {
|
||||
Chrome: {{start: v{49, 0, 0}}},
|
||||
Edge: {{start: v{13, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{45, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
ClassField: {
|
||||
Chrome: {{start: v{73, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
Firefox: {{start: v{69, 0, 0}}},
|
||||
IOS: {{start: v{14, 0, 0}}},
|
||||
Node: {{start: v{12, 0, 0}}},
|
||||
Safari: {{start: v{14, 0, 0}}},
|
||||
},
|
||||
ClassPrivateAccessor: {
|
||||
Chrome: {{start: v{84, 0, 0}}},
|
||||
Edge: {{start: v{84, 0, 0}}},
|
||||
Firefox: {{start: v{90, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{14, 6, 0}}},
|
||||
Safari: {{start: v{15, 0, 0}}},
|
||||
},
|
||||
ClassPrivateBrandCheck: {
|
||||
Chrome: {{start: v{91, 0, 0}}},
|
||||
Edge: {{start: v{91, 0, 0}}},
|
||||
Firefox: {{start: v{90, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{16, 9, 0}}},
|
||||
Safari: {{start: v{15, 0, 0}}},
|
||||
},
|
||||
ClassPrivateField: {
|
||||
Chrome: {{start: v{84, 0, 0}}},
|
||||
Edge: {{start: v{84, 0, 0}}},
|
||||
Firefox: {{start: v{90, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{14, 6, 0}}},
|
||||
Safari: {{start: v{14, 1, 0}}},
|
||||
},
|
||||
ClassPrivateMethod: {
|
||||
Chrome: {{start: v{84, 0, 0}}},
|
||||
Edge: {{start: v{84, 0, 0}}},
|
||||
Firefox: {{start: v{90, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{14, 6, 0}}},
|
||||
Safari: {{start: v{15, 0, 0}}},
|
||||
},
|
||||
ClassPrivateStaticAccessor: {
|
||||
Chrome: {{start: v{84, 0, 0}}},
|
||||
Edge: {{start: v{84, 0, 0}}},
|
||||
Firefox: {{start: v{90, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{14, 6, 0}}},
|
||||
Safari: {{start: v{15, 0, 0}}},
|
||||
},
|
||||
ClassPrivateStaticField: {
|
||||
Chrome: {{start: v{74, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
Firefox: {{start: v{90, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{12, 0, 0}}},
|
||||
Safari: {{start: v{14, 1, 0}}},
|
||||
},
|
||||
ClassPrivateStaticMethod: {
|
||||
Chrome: {{start: v{84, 0, 0}}},
|
||||
Edge: {{start: v{84, 0, 0}}},
|
||||
Firefox: {{start: v{90, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{14, 6, 0}}},
|
||||
Safari: {{start: v{15, 0, 0}}},
|
||||
},
|
||||
ClassStaticBlocks: {
|
||||
Chrome: {{start: v{91, 0, 0}}},
|
||||
Node: {{start: v{16, 11, 0}}},
|
||||
},
|
||||
ClassStaticField: {
|
||||
Chrome: {{start: v{73, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
Firefox: {{start: v{75, 0, 0}}},
|
||||
IOS: {{start: v{15, 0, 0}}},
|
||||
Node: {{start: v{12, 0, 0}}},
|
||||
Safari: {{start: v{14, 1, 0}}},
|
||||
},
|
||||
Const: {
|
||||
Chrome: {{start: v{49, 0, 0}}},
|
||||
Edge: {{start: v{14, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{51, 0, 0}}},
|
||||
IOS: {{start: v{11, 0, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{11, 0, 0}}},
|
||||
},
|
||||
DefaultArgument: {
|
||||
Chrome: {{start: v{49, 0, 0}}},
|
||||
Edge: {{start: v{14, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{53, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
Destructuring: {
|
||||
Chrome: {{start: v{51, 0, 0}}},
|
||||
Edge: {{start: v{18, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{53, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{6, 5, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
DynamicImport: {
|
||||
Chrome: {{start: v{63, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{67, 0, 0}}},
|
||||
IOS: {{start: v{11, 0, 0}}},
|
||||
Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{13, 2, 0}}},
|
||||
Safari: {{start: v{11, 1, 0}}},
|
||||
},
|
||||
ExponentOperator: {
|
||||
Chrome: {{start: v{52, 0, 0}}},
|
||||
Edge: {{start: v{14, 0, 0}}},
|
||||
ES: {{start: v{2016, 0, 0}}},
|
||||
Firefox: {{start: v{52, 0, 0}}},
|
||||
IOS: {{start: v{10, 3, 0}}},
|
||||
Node: {{start: v{7, 0, 0}}},
|
||||
Safari: {{start: v{10, 1, 0}}},
|
||||
},
|
||||
ExportStarAs: {
|
||||
Chrome: {{start: v{72, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
ES: {{start: v{2020, 0, 0}}},
|
||||
Firefox: {{start: v{80, 0, 0}}},
|
||||
Node: {{start: v{12, 0, 0}}},
|
||||
},
|
||||
ForAwait: {
|
||||
Chrome: {{start: v{63, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
ES: {{start: v{2018, 0, 0}}},
|
||||
Firefox: {{start: v{57, 0, 0}}},
|
||||
IOS: {{start: v{12, 0, 0}}},
|
||||
Node: {{start: v{10, 0, 0}}},
|
||||
Safari: {{start: v{12, 0, 0}}},
|
||||
},
|
||||
ForOf: {
|
||||
Chrome: {{start: v{51, 0, 0}}},
|
||||
Edge: {{start: v{15, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{53, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{6, 5, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
Generator: {
|
||||
Chrome: {{start: v{50, 0, 0}}},
|
||||
Edge: {{start: v{13, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{53, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
Hashbang: {
|
||||
Chrome: {{start: v{74, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
Firefox: {{start: v{67, 0, 0}}},
|
||||
IOS: {{start: v{13, 4, 0}}},
|
||||
Node: {{start: v{12, 0, 0}}},
|
||||
Safari: {{start: v{13, 1, 0}}},
|
||||
},
|
||||
ImportAssertions: {
|
||||
Chrome: {{start: v{91, 0, 0}}},
|
||||
},
|
||||
ImportMeta: {
|
||||
Chrome: {{start: v{64, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
ES: {{start: v{2020, 0, 0}}},
|
||||
Firefox: {{start: v{62, 0, 0}}},
|
||||
IOS: {{start: v{12, 0, 0}}},
|
||||
Node: {{start: v{10, 4, 0}}},
|
||||
Safari: {{start: v{11, 1, 0}}},
|
||||
},
|
||||
Let: {
|
||||
Chrome: {{start: v{49, 0, 0}}},
|
||||
Edge: {{start: v{14, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{51, 0, 0}}},
|
||||
IOS: {{start: v{11, 0, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{11, 0, 0}}},
|
||||
},
|
||||
LogicalAssignment: {
|
||||
Chrome: {{start: v{85, 0, 0}}},
|
||||
Edge: {{start: v{85, 0, 0}}},
|
||||
ES: {{start: v{2021, 0, 0}}},
|
||||
Firefox: {{start: v{79, 0, 0}}},
|
||||
IOS: {{start: v{14, 0, 0}}},
|
||||
Node: {{start: v{15, 0, 0}}},
|
||||
Safari: {{start: v{14, 0, 0}}},
|
||||
},
|
||||
NestedRestBinding: {
|
||||
Chrome: {{start: v{49, 0, 0}}},
|
||||
Edge: {{start: v{14, 0, 0}}},
|
||||
ES: {{start: v{2016, 0, 0}}},
|
||||
Firefox: {{start: v{47, 0, 0}}},
|
||||
IOS: {{start: v{10, 3, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{10, 1, 0}}},
|
||||
},
|
||||
NewTarget: {
|
||||
Chrome: {{start: v{46, 0, 0}}},
|
||||
Edge: {{start: v{14, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{41, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{5, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
NodeColonPrefixImport: {
|
||||
Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{14, 13, 1}}},
|
||||
},
|
||||
NodeColonPrefixRequire: {
|
||||
Node: {{start: v{14, 18, 0}, end: v{15, 0, 0}}, {start: v{16, 0, 0}}},
|
||||
},
|
||||
NullishCoalescing: {
|
||||
Chrome: {{start: v{80, 0, 0}}},
|
||||
Edge: {{start: v{80, 0, 0}}},
|
||||
ES: {{start: v{2020, 0, 0}}},
|
||||
Firefox: {{start: v{72, 0, 0}}},
|
||||
IOS: {{start: v{13, 4, 0}}},
|
||||
Node: {{start: v{14, 0, 0}}},
|
||||
Safari: {{start: v{13, 1, 0}}},
|
||||
},
|
||||
ObjectAccessors: {
|
||||
Chrome: {{start: v{5, 0, 0}}},
|
||||
Edge: {{start: v{12, 0, 0}}},
|
||||
ES: {{start: v{5, 0, 0}}},
|
||||
Firefox: {{start: v{2, 0, 0}}},
|
||||
IOS: {{start: v{6, 0, 0}}},
|
||||
Node: {{start: v{0, 10, 0}}},
|
||||
Safari: {{start: v{3, 1, 0}}},
|
||||
},
|
||||
ObjectExtensions: {
|
||||
Chrome: {{start: v{44, 0, 0}}},
|
||||
Edge: {{start: v{12, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{34, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{4, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
ObjectRestSpread: {
|
||||
ES: {{start: v{2018, 0, 0}}},
|
||||
Firefox: {{start: v{55, 0, 0}}},
|
||||
IOS: {{start: v{11, 3, 0}}},
|
||||
Safari: {{start: v{11, 1, 0}}},
|
||||
},
|
||||
OptionalCatchBinding: {
|
||||
Chrome: {{start: v{66, 0, 0}}},
|
||||
Edge: {{start: v{79, 0, 0}}},
|
||||
ES: {{start: v{2019, 0, 0}}},
|
||||
Firefox: {{start: v{58, 0, 0}}},
|
||||
IOS: {{start: v{11, 3, 0}}},
|
||||
Node: {{start: v{10, 0, 0}}},
|
||||
Safari: {{start: v{11, 1, 0}}},
|
||||
},
|
||||
OptionalChain: {
|
||||
Chrome: {{start: v{91, 0, 0}}},
|
||||
Edge: {{start: v{91, 0, 0}}},
|
||||
ES: {{start: v{2020, 0, 0}}},
|
||||
Firefox: {{start: v{74, 0, 0}}},
|
||||
IOS: {{start: v{13, 4, 0}}},
|
||||
Node: {{start: v{16, 9, 0}}},
|
||||
Safari: {{start: v{13, 1, 0}}},
|
||||
},
|
||||
RestArgument: {
|
||||
Chrome: {{start: v{47, 0, 0}}},
|
||||
Edge: {{start: v{12, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{43, 0, 0}}},
|
||||
IOS: {{start: v{10, 0, 0}}},
|
||||
Node: {{start: v{6, 0, 0}}},
|
||||
Safari: {{start: v{10, 0, 0}}},
|
||||
},
|
||||
TemplateLiteral: {
|
||||
Chrome: {{start: v{41, 0, 0}}},
|
||||
Edge: {{start: v{13, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{34, 0, 0}}},
|
||||
IOS: {{start: v{9, 0, 0}}},
|
||||
Node: {{start: v{4, 0, 0}}},
|
||||
Safari: {{start: v{9, 0, 0}}},
|
||||
},
|
||||
TopLevelAwait: {
|
||||
Chrome: {{start: v{89, 0, 0}}},
|
||||
Edge: {{start: v{89, 0, 0}}},
|
||||
Firefox: {{start: v{89, 0, 0}}},
|
||||
Node: {{start: v{14, 8, 0}}},
|
||||
Safari: {{start: v{15, 0, 0}}},
|
||||
},
|
||||
UnicodeEscapes: {
|
||||
Chrome: {{start: v{44, 0, 0}}},
|
||||
Edge: {{start: v{12, 0, 0}}},
|
||||
ES: {{start: v{2015, 0, 0}}},
|
||||
Firefox: {{start: v{53, 0, 0}}},
|
||||
IOS: {{start: v{9, 0, 0}}},
|
||||
Node: {{start: v{4, 0, 0}}},
|
||||
Safari: {{start: v{9, 0, 0}}},
|
||||
},
|
||||
}
|
||||
|
||||
// Return all features that are not available in at least one environment
|
||||
func UnsupportedJSFeatures(constraints map[Engine][]int) (unsupported JSFeature) {
|
||||
for feature, engines := range jsTable {
|
||||
for engine, version := range constraints {
|
||||
if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) {
|
||||
unsupported |= feature
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
583
vendor/github.com/evanw/esbuild/internal/config/config.go
generated
vendored
583
vendor/github.com/evanw/esbuild/internal/config/config.go
generated
vendored
@ -1,583 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/compat"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
type JSXOptions struct {
|
||||
Factory JSXExpr
|
||||
Fragment JSXExpr
|
||||
Parse bool
|
||||
Preserve bool
|
||||
}
|
||||
|
||||
type JSXExpr struct {
|
||||
Parts []string
|
||||
Constant js_ast.E
|
||||
}
|
||||
|
||||
type TSOptions struct {
|
||||
Parse bool
|
||||
NoAmbiguousLessThan bool
|
||||
}
|
||||
|
||||
type Platform uint8
|
||||
|
||||
const (
|
||||
PlatformBrowser Platform = iota
|
||||
PlatformNode
|
||||
PlatformNeutral
|
||||
)
|
||||
|
||||
type StrictOptions struct {
|
||||
// Loose: "class Foo { foo = 1 }" => "class Foo { constructor() { this.foo = 1; } }"
|
||||
// Strict: "class Foo { foo = 1 }" => "class Foo { constructor() { __publicField(this, 'foo', 1); } }"
|
||||
//
|
||||
// The disadvantage of strictness here is code bloat and performance. The
|
||||
// advantage is following the class field specification accurately. For
|
||||
// example, loose mode will incorrectly trigger setter methods while strict
|
||||
// mode won't.
|
||||
ClassFields bool
|
||||
}
|
||||
|
||||
type SourceMap uint8
|
||||
|
||||
const (
|
||||
SourceMapNone SourceMap = iota
|
||||
SourceMapInline
|
||||
SourceMapLinkedWithComment
|
||||
SourceMapExternalWithoutComment
|
||||
SourceMapInlineAndExternal
|
||||
)
|
||||
|
||||
type LegalComments uint8
|
||||
|
||||
const (
|
||||
LegalCommentsInline LegalComments = iota
|
||||
LegalCommentsNone
|
||||
LegalCommentsEndOfFile
|
||||
LegalCommentsLinkedWithComment
|
||||
LegalCommentsExternalWithoutComment
|
||||
)
|
||||
|
||||
func (lc LegalComments) HasExternalFile() bool {
|
||||
return lc == LegalCommentsLinkedWithComment || lc == LegalCommentsExternalWithoutComment
|
||||
}
|
||||
|
||||
type Loader int
|
||||
|
||||
const (
|
||||
LoaderNone Loader = iota
|
||||
LoaderJS
|
||||
LoaderJSX
|
||||
LoaderTS
|
||||
LoaderTSNoAmbiguousLessThan // Used with ".mts" and ".cts"
|
||||
LoaderTSX
|
||||
LoaderJSON
|
||||
LoaderText
|
||||
LoaderBase64
|
||||
LoaderDataURL
|
||||
LoaderFile
|
||||
LoaderBinary
|
||||
LoaderCSS
|
||||
LoaderDefault
|
||||
)
|
||||
|
||||
func (loader Loader) IsTypeScript() bool {
|
||||
switch loader {
|
||||
case LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (loader Loader) CanHaveSourceMap() bool {
|
||||
switch loader {
|
||||
case LoaderJS, LoaderJSX, LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX, LoaderCSS:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type Format uint8
|
||||
|
||||
const (
|
||||
// This is used when not bundling. It means to preserve whatever form the
|
||||
// import or export was originally in. ES6 syntax stays ES6 syntax and
|
||||
// CommonJS syntax stays CommonJS syntax.
|
||||
FormatPreserve Format = iota
|
||||
|
||||
// IIFE stands for immediately-invoked function expression. That looks like
|
||||
// this:
|
||||
//
|
||||
// (() => {
|
||||
// ... bundled code ...
|
||||
// })();
|
||||
//
|
||||
// If the optional GlobalName is configured, then we'll write out this:
|
||||
//
|
||||
// let globalName = (() => {
|
||||
// ... bundled code ...
|
||||
// return exports;
|
||||
// })();
|
||||
//
|
||||
FormatIIFE
|
||||
|
||||
// The CommonJS format looks like this:
|
||||
//
|
||||
// ... bundled code ...
|
||||
// module.exports = exports;
|
||||
//
|
||||
FormatCommonJS
|
||||
|
||||
// The ES module format looks like this:
|
||||
//
|
||||
// ... bundled code ...
|
||||
// export {...};
|
||||
//
|
||||
FormatESModule
|
||||
)
|
||||
|
||||
func (f Format) KeepES6ImportExportSyntax() bool {
|
||||
return f == FormatPreserve || f == FormatESModule
|
||||
}
|
||||
|
||||
func (f Format) String() string {
|
||||
switch f {
|
||||
case FormatIIFE:
|
||||
return "iife"
|
||||
case FormatCommonJS:
|
||||
return "cjs"
|
||||
case FormatESModule:
|
||||
return "esm"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type StdinInfo struct {
|
||||
Loader Loader
|
||||
Contents string
|
||||
SourceFile string
|
||||
AbsResolveDir string
|
||||
}
|
||||
|
||||
type WildcardPattern struct {
|
||||
Prefix string
|
||||
Suffix string
|
||||
}
|
||||
|
||||
type ExternalModules struct {
|
||||
NodeModules map[string]bool
|
||||
AbsPaths map[string]bool
|
||||
Patterns []WildcardPattern
|
||||
}
|
||||
|
||||
type Mode uint8
|
||||
|
||||
const (
|
||||
ModePassThrough Mode = iota
|
||||
ModeConvertFormat
|
||||
ModeBundle
|
||||
)
|
||||
|
||||
type MaybeBool uint8
|
||||
|
||||
const (
|
||||
Unspecified MaybeBool = iota
|
||||
True
|
||||
False
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
Mode Mode
|
||||
ModuleType js_ast.ModuleType
|
||||
PreserveSymlinks bool
|
||||
RemoveWhitespace bool
|
||||
MinifyIdentifiers bool
|
||||
MangleSyntax bool
|
||||
ProfilerNames bool
|
||||
CodeSplitting bool
|
||||
WatchMode bool
|
||||
AllowOverwrite bool
|
||||
LegalComments LegalComments
|
||||
|
||||
// If true, make sure to generate a single file that can be written to stdout
|
||||
WriteToStdout bool
|
||||
|
||||
OmitRuntimeForTests bool
|
||||
UnusedImportsTS UnusedImportsTS
|
||||
UseDefineForClassFields MaybeBool
|
||||
ASCIIOnly bool
|
||||
KeepNames bool
|
||||
IgnoreDCEAnnotations bool
|
||||
TreeShaking bool
|
||||
|
||||
Defines *ProcessedDefines
|
||||
TS TSOptions
|
||||
JSX JSXOptions
|
||||
Platform Platform
|
||||
|
||||
TargetFromAPI TargetFromAPI
|
||||
UnsupportedJSFeatures compat.JSFeature
|
||||
UnsupportedCSSFeatures compat.CSSFeature
|
||||
TSTarget *TSTarget
|
||||
|
||||
// This is the original information that was used to generate the
|
||||
// unsupported feature sets above. It's used for error messages.
|
||||
OriginalTargetEnv string
|
||||
|
||||
ExtensionOrder []string
|
||||
MainFields []string
|
||||
Conditions []string
|
||||
AbsNodePaths []string // The "NODE_PATH" variable from Node.js
|
||||
ExternalModules ExternalModules
|
||||
|
||||
AbsOutputFile string
|
||||
AbsOutputDir string
|
||||
AbsOutputBase string
|
||||
OutputExtensionJS string
|
||||
OutputExtensionCSS string
|
||||
GlobalName []string
|
||||
TsConfigOverride string
|
||||
ExtensionToLoader map[string]Loader
|
||||
OutputFormat Format
|
||||
PublicPath string
|
||||
InjectAbsPaths []string
|
||||
InjectedDefines []InjectedDefine
|
||||
InjectedFiles []InjectedFile
|
||||
|
||||
JSBanner string
|
||||
JSFooter string
|
||||
CSSBanner string
|
||||
CSSFooter string
|
||||
|
||||
EntryPathTemplate []PathTemplate
|
||||
ChunkPathTemplate []PathTemplate
|
||||
AssetPathTemplate []PathTemplate
|
||||
|
||||
Plugins []Plugin
|
||||
|
||||
NeedsMetafile bool
|
||||
|
||||
SourceMap SourceMap
|
||||
SourceRoot string
|
||||
ExcludeSourcesContent bool
|
||||
|
||||
Stdin *StdinInfo
|
||||
}
|
||||
|
||||
type TargetFromAPI uint8
|
||||
|
||||
const (
|
||||
// In this state, the "target" field in "tsconfig.json" is respected
|
||||
TargetWasUnconfigured TargetFromAPI = iota
|
||||
|
||||
// In this state, the "target" field in "tsconfig.json" is overridden
|
||||
TargetWasConfigured
|
||||
|
||||
// In this state, "useDefineForClassFields" is true unless overridden
|
||||
TargetWasConfiguredIncludingESNext
|
||||
)
|
||||
|
||||
type UnusedImportsTS uint8
|
||||
|
||||
const (
|
||||
// "import { unused } from 'foo'" => "" (TypeScript's default behavior)
|
||||
UnusedImportsRemoveStmt UnusedImportsTS = iota
|
||||
|
||||
// "import { unused } from 'foo'" => "import 'foo'" ("importsNotUsedAsValues" != "remove")
|
||||
UnusedImportsKeepStmtRemoveValues
|
||||
|
||||
// "import { unused } from 'foo'" => "import { unused } from 'foo'" ("preserveValueImports" == true)
|
||||
UnusedImportsKeepValues
|
||||
)
|
||||
|
||||
func UnusedImportsFromTsconfigValues(preserveImportsNotUsedAsValues bool, preserveValueImports bool) UnusedImportsTS {
|
||||
if preserveValueImports {
|
||||
return UnusedImportsKeepValues
|
||||
}
|
||||
if preserveImportsNotUsedAsValues {
|
||||
return UnusedImportsKeepStmtRemoveValues
|
||||
}
|
||||
return UnusedImportsRemoveStmt
|
||||
}
|
||||
|
||||
type TSTarget struct {
|
||||
Source logger.Source
|
||||
Range logger.Range
|
||||
Target string
|
||||
UnsupportedJSFeatures compat.JSFeature
|
||||
}
|
||||
|
||||
type PathPlaceholder uint8
|
||||
|
||||
const (
|
||||
NoPlaceholder PathPlaceholder = iota
|
||||
|
||||
// The relative path from the original parent directory to the configured
|
||||
// "outbase" directory, or to the lowest common ancestor directory
|
||||
DirPlaceholder
|
||||
|
||||
// The original name of the file, or the manual chunk name, or the name of
|
||||
// the type of output file ("entry" or "chunk" or "asset")
|
||||
NamePlaceholder
|
||||
|
||||
// A hash of the contents of this file, and the contents and output paths of
|
||||
// all dependencies (except for their hash placeholders)
|
||||
HashPlaceholder
|
||||
|
||||
// The original extension of the file, or the name of the output file
|
||||
// (e.g. "css", "svg", "png")
|
||||
ExtPlaceholder
|
||||
)
|
||||
|
||||
type PathTemplate struct {
|
||||
Data string
|
||||
Placeholder PathPlaceholder
|
||||
}
|
||||
|
||||
type PathPlaceholders struct {
|
||||
Dir *string
|
||||
Name *string
|
||||
Hash *string
|
||||
Ext *string
|
||||
}
|
||||
|
||||
func (placeholders PathPlaceholders) Get(placeholder PathPlaceholder) *string {
|
||||
switch placeholder {
|
||||
case DirPlaceholder:
|
||||
return placeholders.Dir
|
||||
case NamePlaceholder:
|
||||
return placeholders.Name
|
||||
case HashPlaceholder:
|
||||
return placeholders.Hash
|
||||
case ExtPlaceholder:
|
||||
return placeholders.Ext
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func TemplateToString(template []PathTemplate) string {
|
||||
if len(template) == 1 && template[0].Placeholder == NoPlaceholder {
|
||||
// Avoid allocations in this case
|
||||
return template[0].Data
|
||||
}
|
||||
sb := strings.Builder{}
|
||||
for _, part := range template {
|
||||
sb.WriteString(part.Data)
|
||||
switch part.Placeholder {
|
||||
case DirPlaceholder:
|
||||
sb.WriteString("[dir]")
|
||||
case NamePlaceholder:
|
||||
sb.WriteString("[name]")
|
||||
case HashPlaceholder:
|
||||
sb.WriteString("[hash]")
|
||||
case ExtPlaceholder:
|
||||
sb.WriteString("[ext]")
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func HasPlaceholder(template []PathTemplate, placeholder PathPlaceholder) bool {
|
||||
for _, part := range template {
|
||||
if part.Placeholder == placeholder {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func SubstituteTemplate(template []PathTemplate, placeholders PathPlaceholders) []PathTemplate {
|
||||
// Don't allocate if no substitution is possible and the template is already minimal
|
||||
shouldSubstitute := false
|
||||
for i, part := range template {
|
||||
if placeholders.Get(part.Placeholder) != nil || (part.Placeholder == NoPlaceholder && i+1 < len(template)) {
|
||||
shouldSubstitute = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !shouldSubstitute {
|
||||
return template
|
||||
}
|
||||
|
||||
// Otherwise, substitute and merge as appropriate
|
||||
result := make([]PathTemplate, 0, len(template))
|
||||
for _, part := range template {
|
||||
if sub := placeholders.Get(part.Placeholder); sub != nil {
|
||||
part.Data += *sub
|
||||
part.Placeholder = NoPlaceholder
|
||||
}
|
||||
if last := len(result) - 1; last >= 0 && result[last].Placeholder == NoPlaceholder {
|
||||
last := &result[last]
|
||||
last.Data += part.Data
|
||||
last.Placeholder = part.Placeholder
|
||||
} else {
|
||||
result = append(result, part)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func ShouldCallRuntimeRequire(mode Mode, outputFormat Format) bool {
|
||||
return mode == ModeBundle && outputFormat != FormatCommonJS
|
||||
}
|
||||
|
||||
type InjectedDefine struct {
|
||||
Source logger.Source
|
||||
Data js_ast.E
|
||||
Name string
|
||||
}
|
||||
|
||||
type InjectedFile struct {
|
||||
Source logger.Source
|
||||
Exports []InjectableExport
|
||||
DefineName string
|
||||
}
|
||||
|
||||
type InjectableExport struct {
|
||||
Alias string
|
||||
Loc logger.Loc
|
||||
}
|
||||
|
||||
var filterMutex sync.Mutex
|
||||
var filterCache map[string]*regexp.Regexp
|
||||
|
||||
func compileFilter(filter string) (result *regexp.Regexp) {
|
||||
if filter == "" {
|
||||
// Must provide a filter
|
||||
return nil
|
||||
}
|
||||
ok := false
|
||||
|
||||
// Cache hit?
|
||||
(func() {
|
||||
filterMutex.Lock()
|
||||
defer filterMutex.Unlock()
|
||||
if filterCache != nil {
|
||||
result, ok = filterCache[filter]
|
||||
}
|
||||
})()
|
||||
if ok {
|
||||
return
|
||||
}
|
||||
|
||||
// Cache miss
|
||||
result, err := regexp.Compile(filter)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Cache for next time
|
||||
filterMutex.Lock()
|
||||
defer filterMutex.Unlock()
|
||||
if filterCache == nil {
|
||||
filterCache = make(map[string]*regexp.Regexp)
|
||||
}
|
||||
filterCache[filter] = result
|
||||
return
|
||||
}
|
||||
|
||||
func CompileFilterForPlugin(pluginName string, kind string, filter string) (*regexp.Regexp, error) {
|
||||
if filter == "" {
|
||||
return nil, fmt.Errorf("[%s] %q is missing a filter", pluginName, kind)
|
||||
}
|
||||
|
||||
result := compileFilter(filter)
|
||||
if result == nil {
|
||||
return nil, fmt.Errorf("[%s] %q filter is not a valid Go regular expression: %q", pluginName, kind, filter)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func PluginAppliesToPath(path logger.Path, filter *regexp.Regexp, namespace string) bool {
|
||||
return (namespace == "" || path.Namespace == namespace) && filter.MatchString(path.Text)
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Plugin API
|
||||
|
||||
type Plugin struct {
|
||||
Name string
|
||||
OnStart []OnStart
|
||||
OnResolve []OnResolve
|
||||
OnLoad []OnLoad
|
||||
}
|
||||
|
||||
type OnStart struct {
|
||||
Name string
|
||||
Callback func() OnStartResult
|
||||
}
|
||||
|
||||
type OnStartResult struct {
|
||||
Msgs []logger.Msg
|
||||
ThrownError error
|
||||
}
|
||||
|
||||
type OnResolve struct {
|
||||
Name string
|
||||
Filter *regexp.Regexp
|
||||
Namespace string
|
||||
Callback func(OnResolveArgs) OnResolveResult
|
||||
}
|
||||
|
||||
type OnResolveArgs struct {
|
||||
Path string
|
||||
Importer logger.Path
|
||||
ResolveDir string
|
||||
Kind ast.ImportKind
|
||||
PluginData interface{}
|
||||
}
|
||||
|
||||
type OnResolveResult struct {
|
||||
PluginName string
|
||||
|
||||
Path logger.Path
|
||||
External bool
|
||||
IsSideEffectFree bool
|
||||
PluginData interface{}
|
||||
|
||||
Msgs []logger.Msg
|
||||
ThrownError error
|
||||
|
||||
AbsWatchFiles []string
|
||||
AbsWatchDirs []string
|
||||
}
|
||||
|
||||
type OnLoad struct {
|
||||
Name string
|
||||
Filter *regexp.Regexp
|
||||
Namespace string
|
||||
Callback func(OnLoadArgs) OnLoadResult
|
||||
}
|
||||
|
||||
type OnLoadArgs struct {
|
||||
Path logger.Path
|
||||
PluginData interface{}
|
||||
}
|
||||
|
||||
type OnLoadResult struct {
|
||||
PluginName string
|
||||
|
||||
Contents *string
|
||||
AbsResolveDir string
|
||||
Loader Loader
|
||||
PluginData interface{}
|
||||
|
||||
Msgs []logger.Msg
|
||||
ThrownError error
|
||||
|
||||
AbsWatchFiles []string
|
||||
AbsWatchDirs []string
|
||||
}
|
969
vendor/github.com/evanw/esbuild/internal/config/globals.go
generated
vendored
969
vendor/github.com/evanw/esbuild/internal/config/globals.go
generated
vendored
@ -1,969 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
var processedGlobalsMutex sync.Mutex
|
||||
var processedGlobals *ProcessedDefines
|
||||
|
||||
// If something is in this list, then a direct identifier expression or property
|
||||
// access chain matching this will be assumed to have no side effects and will
|
||||
// be removed.
|
||||
//
|
||||
// This also means code is allowed to be reordered past things in this list. For
|
||||
// example, if "console.log" is in this list, permitting reordering allows for
|
||||
// "if (a) console.log(b); else console.log(c)" to be reordered and transformed
|
||||
// into "console.log(a ? b : c)". Notice that "a" and "console.log" are in a
|
||||
// different order, which can only happen if evaluating the "console.log"
|
||||
// property access can be assumed to not change the value of "a".
|
||||
//
|
||||
// Note that membership in this list says nothing about whether calling any of
|
||||
// these functions has any side effects. It only says something about
|
||||
// referencing these function without calling them.
|
||||
var knownGlobals = [][]string{
|
||||
// These global identifiers should exist in all JavaScript environments. This
|
||||
// deliberately omits "NaN", "Infinity", and "undefined" because these are
|
||||
// treated as automatically-inlined constants instead of identifiers.
|
||||
{"Array"},
|
||||
{"Boolean"},
|
||||
{"Function"},
|
||||
{"Math"},
|
||||
{"Number"},
|
||||
{"Object"},
|
||||
{"RegExp"},
|
||||
{"String"},
|
||||
|
||||
// Object: Static methods
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Static_methods
|
||||
{"Object", "assign"},
|
||||
{"Object", "create"},
|
||||
{"Object", "defineProperties"},
|
||||
{"Object", "defineProperty"},
|
||||
{"Object", "entries"},
|
||||
{"Object", "freeze"},
|
||||
{"Object", "fromEntries"},
|
||||
{"Object", "getOwnPropertyDescriptor"},
|
||||
{"Object", "getOwnPropertyDescriptors"},
|
||||
{"Object", "getOwnPropertyNames"},
|
||||
{"Object", "getOwnPropertySymbols"},
|
||||
{"Object", "getPrototypeOf"},
|
||||
{"Object", "is"},
|
||||
{"Object", "isExtensible"},
|
||||
{"Object", "isFrozen"},
|
||||
{"Object", "isSealed"},
|
||||
{"Object", "keys"},
|
||||
{"Object", "preventExtensions"},
|
||||
{"Object", "seal"},
|
||||
{"Object", "setPrototypeOf"},
|
||||
{"Object", "values"},
|
||||
|
||||
// Object: Instance methods
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Instance_methods
|
||||
{"Object", "prototype", "__defineGetter__"},
|
||||
{"Object", "prototype", "__defineSetter__"},
|
||||
{"Object", "prototype", "__lookupGetter__"},
|
||||
{"Object", "prototype", "__lookupSetter__"},
|
||||
{"Object", "prototype", "hasOwnProperty"},
|
||||
{"Object", "prototype", "isPrototypeOf"},
|
||||
{"Object", "prototype", "propertyIsEnumerable"},
|
||||
{"Object", "prototype", "toLocaleString"},
|
||||
{"Object", "prototype", "toString"},
|
||||
{"Object", "prototype", "unwatch"},
|
||||
{"Object", "prototype", "valueOf"},
|
||||
{"Object", "prototype", "watch"},
|
||||
|
||||
// Math: Static properties
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_properties
|
||||
{"Math", "E"},
|
||||
{"Math", "LN10"},
|
||||
{"Math", "LN2"},
|
||||
{"Math", "LOG10E"},
|
||||
{"Math", "LOG2E"},
|
||||
{"Math", "PI"},
|
||||
{"Math", "SQRT1_2"},
|
||||
{"Math", "SQRT2"},
|
||||
|
||||
// Math: Static methods
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_methods
|
||||
{"Math", "abs"},
|
||||
{"Math", "acos"},
|
||||
{"Math", "acosh"},
|
||||
{"Math", "asin"},
|
||||
{"Math", "asinh"},
|
||||
{"Math", "atan"},
|
||||
{"Math", "atan2"},
|
||||
{"Math", "atanh"},
|
||||
{"Math", "cbrt"},
|
||||
{"Math", "ceil"},
|
||||
{"Math", "clz32"},
|
||||
{"Math", "cos"},
|
||||
{"Math", "cosh"},
|
||||
{"Math", "exp"},
|
||||
{"Math", "expm1"},
|
||||
{"Math", "floor"},
|
||||
{"Math", "fround"},
|
||||
{"Math", "hypot"},
|
||||
{"Math", "imul"},
|
||||
{"Math", "log"},
|
||||
{"Math", "log10"},
|
||||
{"Math", "log1p"},
|
||||
{"Math", "log2"},
|
||||
{"Math", "max"},
|
||||
{"Math", "min"},
|
||||
{"Math", "pow"},
|
||||
{"Math", "random"},
|
||||
{"Math", "round"},
|
||||
{"Math", "sign"},
|
||||
{"Math", "sin"},
|
||||
{"Math", "sinh"},
|
||||
{"Math", "sqrt"},
|
||||
{"Math", "tan"},
|
||||
{"Math", "tanh"},
|
||||
{"Math", "trunc"},
|
||||
|
||||
// Reflect: Static methods
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect#static_methods
|
||||
{"Reflect", "apply"},
|
||||
{"Reflect", "construct"},
|
||||
{"Reflect", "defineProperty"},
|
||||
{"Reflect", "deleteProperty"},
|
||||
{"Reflect", "get"},
|
||||
{"Reflect", "getOwnPropertyDescriptor"},
|
||||
{"Reflect", "getPrototypeOf"},
|
||||
{"Reflect", "has"},
|
||||
{"Reflect", "isExtensible"},
|
||||
{"Reflect", "ownKeys"},
|
||||
{"Reflect", "preventExtensions"},
|
||||
{"Reflect", "set"},
|
||||
{"Reflect", "setPrototypeOf"},
|
||||
|
||||
// Other globals present in both the browser and node (except "eval" because
|
||||
// it has special behavior)
|
||||
{"AbortController"},
|
||||
{"AbortSignal"},
|
||||
{"AggregateError"},
|
||||
{"ArrayBuffer"},
|
||||
{"BigInt"},
|
||||
{"DataView"},
|
||||
{"Date"},
|
||||
{"Error"},
|
||||
{"EvalError"},
|
||||
{"Event"},
|
||||
{"EventTarget"},
|
||||
{"Float32Array"},
|
||||
{"Float64Array"},
|
||||
{"Int16Array"},
|
||||
{"Int32Array"},
|
||||
{"Int8Array"},
|
||||
{"Intl"},
|
||||
{"JSON"},
|
||||
{"Map"},
|
||||
{"MessageChannel"},
|
||||
{"MessageEvent"},
|
||||
{"MessagePort"},
|
||||
{"Promise"},
|
||||
{"Proxy"},
|
||||
{"RangeError"},
|
||||
{"ReferenceError"},
|
||||
{"Reflect"},
|
||||
{"Set"},
|
||||
{"Symbol"},
|
||||
{"SyntaxError"},
|
||||
{"TextDecoder"},
|
||||
{"TextEncoder"},
|
||||
{"TypeError"},
|
||||
{"URIError"},
|
||||
{"URL"},
|
||||
{"URLSearchParams"},
|
||||
{"Uint16Array"},
|
||||
{"Uint32Array"},
|
||||
{"Uint8Array"},
|
||||
{"Uint8ClampedArray"},
|
||||
{"WeakMap"},
|
||||
{"WeakSet"},
|
||||
{"WebAssembly"},
|
||||
{"clearInterval"},
|
||||
{"clearTimeout"},
|
||||
{"console"},
|
||||
{"decodeURI"},
|
||||
{"decodeURIComponent"},
|
||||
{"encodeURI"},
|
||||
{"encodeURIComponent"},
|
||||
{"escape"},
|
||||
{"globalThis"},
|
||||
{"isFinite"},
|
||||
{"isNaN"},
|
||||
{"parseFloat"},
|
||||
{"parseInt"},
|
||||
{"queueMicrotask"},
|
||||
{"setInterval"},
|
||||
{"setTimeout"},
|
||||
{"unescape"},
|
||||
|
||||
// Console method references are assumed to have no side effects
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/console
|
||||
{"console", "assert"},
|
||||
{"console", "clear"},
|
||||
{"console", "count"},
|
||||
{"console", "countReset"},
|
||||
{"console", "debug"},
|
||||
{"console", "dir"},
|
||||
{"console", "dirxml"},
|
||||
{"console", "error"},
|
||||
{"console", "group"},
|
||||
{"console", "groupCollapsed"},
|
||||
{"console", "groupEnd"},
|
||||
{"console", "info"},
|
||||
{"console", "log"},
|
||||
{"console", "table"},
|
||||
{"console", "time"},
|
||||
{"console", "timeEnd"},
|
||||
{"console", "timeLog"},
|
||||
{"console", "trace"},
|
||||
{"console", "warn"},
|
||||
|
||||
// CSSOM APIs
|
||||
{"CSSAnimation"},
|
||||
{"CSSFontFaceRule"},
|
||||
{"CSSImportRule"},
|
||||
{"CSSKeyframeRule"},
|
||||
{"CSSKeyframesRule"},
|
||||
{"CSSMediaRule"},
|
||||
{"CSSNamespaceRule"},
|
||||
{"CSSPageRule"},
|
||||
{"CSSRule"},
|
||||
{"CSSRuleList"},
|
||||
{"CSSStyleDeclaration"},
|
||||
{"CSSStyleRule"},
|
||||
{"CSSStyleSheet"},
|
||||
{"CSSSupportsRule"},
|
||||
{"CSSTransition"},
|
||||
|
||||
// SVG DOM
|
||||
{"SVGAElement"},
|
||||
{"SVGAngle"},
|
||||
{"SVGAnimateElement"},
|
||||
{"SVGAnimateMotionElement"},
|
||||
{"SVGAnimateTransformElement"},
|
||||
{"SVGAnimatedAngle"},
|
||||
{"SVGAnimatedBoolean"},
|
||||
{"SVGAnimatedEnumeration"},
|
||||
{"SVGAnimatedInteger"},
|
||||
{"SVGAnimatedLength"},
|
||||
{"SVGAnimatedLengthList"},
|
||||
{"SVGAnimatedNumber"},
|
||||
{"SVGAnimatedNumberList"},
|
||||
{"SVGAnimatedPreserveAspectRatio"},
|
||||
{"SVGAnimatedRect"},
|
||||
{"SVGAnimatedString"},
|
||||
{"SVGAnimatedTransformList"},
|
||||
{"SVGAnimationElement"},
|
||||
{"SVGCircleElement"},
|
||||
{"SVGClipPathElement"},
|
||||
{"SVGComponentTransferFunctionElement"},
|
||||
{"SVGDefsElement"},
|
||||
{"SVGDescElement"},
|
||||
{"SVGElement"},
|
||||
{"SVGEllipseElement"},
|
||||
{"SVGFEBlendElement"},
|
||||
{"SVGFEColorMatrixElement"},
|
||||
{"SVGFEComponentTransferElement"},
|
||||
{"SVGFECompositeElement"},
|
||||
{"SVGFEConvolveMatrixElement"},
|
||||
{"SVGFEDiffuseLightingElement"},
|
||||
{"SVGFEDisplacementMapElement"},
|
||||
{"SVGFEDistantLightElement"},
|
||||
{"SVGFEDropShadowElement"},
|
||||
{"SVGFEFloodElement"},
|
||||
{"SVGFEFuncAElement"},
|
||||
{"SVGFEFuncBElement"},
|
||||
{"SVGFEFuncGElement"},
|
||||
{"SVGFEFuncRElement"},
|
||||
{"SVGFEGaussianBlurElement"},
|
||||
{"SVGFEImageElement"},
|
||||
{"SVGFEMergeElement"},
|
||||
{"SVGFEMergeNodeElement"},
|
||||
{"SVGFEMorphologyElement"},
|
||||
{"SVGFEOffsetElement"},
|
||||
{"SVGFEPointLightElement"},
|
||||
{"SVGFESpecularLightingElement"},
|
||||
{"SVGFESpotLightElement"},
|
||||
{"SVGFETileElement"},
|
||||
{"SVGFETurbulenceElement"},
|
||||
{"SVGFilterElement"},
|
||||
{"SVGForeignObjectElement"},
|
||||
{"SVGGElement"},
|
||||
{"SVGGeometryElement"},
|
||||
{"SVGGradientElement"},
|
||||
{"SVGGraphicsElement"},
|
||||
{"SVGImageElement"},
|
||||
{"SVGLength"},
|
||||
{"SVGLengthList"},
|
||||
{"SVGLineElement"},
|
||||
{"SVGLinearGradientElement"},
|
||||
{"SVGMPathElement"},
|
||||
{"SVGMarkerElement"},
|
||||
{"SVGMaskElement"},
|
||||
{"SVGMatrix"},
|
||||
{"SVGMetadataElement"},
|
||||
{"SVGNumber"},
|
||||
{"SVGNumberList"},
|
||||
{"SVGPathElement"},
|
||||
{"SVGPatternElement"},
|
||||
{"SVGPoint"},
|
||||
{"SVGPointList"},
|
||||
{"SVGPolygonElement"},
|
||||
{"SVGPolylineElement"},
|
||||
{"SVGPreserveAspectRatio"},
|
||||
{"SVGRadialGradientElement"},
|
||||
{"SVGRect"},
|
||||
{"SVGRectElement"},
|
||||
{"SVGSVGElement"},
|
||||
{"SVGScriptElement"},
|
||||
{"SVGSetElement"},
|
||||
{"SVGStopElement"},
|
||||
{"SVGStringList"},
|
||||
{"SVGStyleElement"},
|
||||
{"SVGSwitchElement"},
|
||||
{"SVGSymbolElement"},
|
||||
{"SVGTSpanElement"},
|
||||
{"SVGTextContentElement"},
|
||||
{"SVGTextElement"},
|
||||
{"SVGTextPathElement"},
|
||||
{"SVGTextPositioningElement"},
|
||||
{"SVGTitleElement"},
|
||||
{"SVGTransform"},
|
||||
{"SVGTransformList"},
|
||||
{"SVGUnitTypes"},
|
||||
{"SVGUseElement"},
|
||||
{"SVGViewElement"},
|
||||
|
||||
// Other browser APIs
|
||||
//
|
||||
// This list contains all globals present in modern versions of Chrome, Safari,
|
||||
// and Firefox except for the following properties, since they have a side effect
|
||||
// of triggering layout (https://gist.github.com/paulirish/5d52fb081b3570c81e3a):
|
||||
//
|
||||
// - scrollX
|
||||
// - scrollY
|
||||
// - innerWidth
|
||||
// - innerHeight
|
||||
// - pageXOffset
|
||||
// - pageYOffset
|
||||
//
|
||||
// The following globals have also been removed since they sometimes throw an
|
||||
// exception when accessed, which is a side effect (for more information see
|
||||
// https://stackoverflow.com/a/33047477):
|
||||
//
|
||||
// - localStorage
|
||||
// - sessionStorage
|
||||
//
|
||||
{"AnalyserNode"},
|
||||
{"Animation"},
|
||||
{"AnimationEffect"},
|
||||
{"AnimationEvent"},
|
||||
{"AnimationPlaybackEvent"},
|
||||
{"AnimationTimeline"},
|
||||
{"Attr"},
|
||||
{"Audio"},
|
||||
{"AudioBuffer"},
|
||||
{"AudioBufferSourceNode"},
|
||||
{"AudioDestinationNode"},
|
||||
{"AudioListener"},
|
||||
{"AudioNode"},
|
||||
{"AudioParam"},
|
||||
{"AudioProcessingEvent"},
|
||||
{"AudioScheduledSourceNode"},
|
||||
{"BarProp"},
|
||||
{"BeforeUnloadEvent"},
|
||||
{"BiquadFilterNode"},
|
||||
{"Blob"},
|
||||
{"BlobEvent"},
|
||||
{"ByteLengthQueuingStrategy"},
|
||||
{"CDATASection"},
|
||||
{"CSS"},
|
||||
{"CanvasGradient"},
|
||||
{"CanvasPattern"},
|
||||
{"CanvasRenderingContext2D"},
|
||||
{"ChannelMergerNode"},
|
||||
{"ChannelSplitterNode"},
|
||||
{"CharacterData"},
|
||||
{"ClipboardEvent"},
|
||||
{"CloseEvent"},
|
||||
{"Comment"},
|
||||
{"CompositionEvent"},
|
||||
{"ConvolverNode"},
|
||||
{"CountQueuingStrategy"},
|
||||
{"Crypto"},
|
||||
{"CustomElementRegistry"},
|
||||
{"CustomEvent"},
|
||||
{"DOMException"},
|
||||
{"DOMImplementation"},
|
||||
{"DOMMatrix"},
|
||||
{"DOMMatrixReadOnly"},
|
||||
{"DOMParser"},
|
||||
{"DOMPoint"},
|
||||
{"DOMPointReadOnly"},
|
||||
{"DOMQuad"},
|
||||
{"DOMRect"},
|
||||
{"DOMRectList"},
|
||||
{"DOMRectReadOnly"},
|
||||
{"DOMStringList"},
|
||||
{"DOMStringMap"},
|
||||
{"DOMTokenList"},
|
||||
{"DataTransfer"},
|
||||
{"DataTransferItem"},
|
||||
{"DataTransferItemList"},
|
||||
{"DelayNode"},
|
||||
{"Document"},
|
||||
{"DocumentFragment"},
|
||||
{"DocumentTimeline"},
|
||||
{"DocumentType"},
|
||||
{"DragEvent"},
|
||||
{"DynamicsCompressorNode"},
|
||||
{"Element"},
|
||||
{"ErrorEvent"},
|
||||
{"EventSource"},
|
||||
{"File"},
|
||||
{"FileList"},
|
||||
{"FileReader"},
|
||||
{"FocusEvent"},
|
||||
{"FontFace"},
|
||||
{"FormData"},
|
||||
{"GainNode"},
|
||||
{"Gamepad"},
|
||||
{"GamepadButton"},
|
||||
{"GamepadEvent"},
|
||||
{"Geolocation"},
|
||||
{"GeolocationPositionError"},
|
||||
{"HTMLAllCollection"},
|
||||
{"HTMLAnchorElement"},
|
||||
{"HTMLAreaElement"},
|
||||
{"HTMLAudioElement"},
|
||||
{"HTMLBRElement"},
|
||||
{"HTMLBaseElement"},
|
||||
{"HTMLBodyElement"},
|
||||
{"HTMLButtonElement"},
|
||||
{"HTMLCanvasElement"},
|
||||
{"HTMLCollection"},
|
||||
{"HTMLDListElement"},
|
||||
{"HTMLDataElement"},
|
||||
{"HTMLDataListElement"},
|
||||
{"HTMLDetailsElement"},
|
||||
{"HTMLDirectoryElement"},
|
||||
{"HTMLDivElement"},
|
||||
{"HTMLDocument"},
|
||||
{"HTMLElement"},
|
||||
{"HTMLEmbedElement"},
|
||||
{"HTMLFieldSetElement"},
|
||||
{"HTMLFontElement"},
|
||||
{"HTMLFormControlsCollection"},
|
||||
{"HTMLFormElement"},
|
||||
{"HTMLFrameElement"},
|
||||
{"HTMLFrameSetElement"},
|
||||
{"HTMLHRElement"},
|
||||
{"HTMLHeadElement"},
|
||||
{"HTMLHeadingElement"},
|
||||
{"HTMLHtmlElement"},
|
||||
{"HTMLIFrameElement"},
|
||||
{"HTMLImageElement"},
|
||||
{"HTMLInputElement"},
|
||||
{"HTMLLIElement"},
|
||||
{"HTMLLabelElement"},
|
||||
{"HTMLLegendElement"},
|
||||
{"HTMLLinkElement"},
|
||||
{"HTMLMapElement"},
|
||||
{"HTMLMarqueeElement"},
|
||||
{"HTMLMediaElement"},
|
||||
{"HTMLMenuElement"},
|
||||
{"HTMLMetaElement"},
|
||||
{"HTMLMeterElement"},
|
||||
{"HTMLModElement"},
|
||||
{"HTMLOListElement"},
|
||||
{"HTMLObjectElement"},
|
||||
{"HTMLOptGroupElement"},
|
||||
{"HTMLOptionElement"},
|
||||
{"HTMLOptionsCollection"},
|
||||
{"HTMLOutputElement"},
|
||||
{"HTMLParagraphElement"},
|
||||
{"HTMLParamElement"},
|
||||
{"HTMLPictureElement"},
|
||||
{"HTMLPreElement"},
|
||||
{"HTMLProgressElement"},
|
||||
{"HTMLQuoteElement"},
|
||||
{"HTMLScriptElement"},
|
||||
{"HTMLSelectElement"},
|
||||
{"HTMLSlotElement"},
|
||||
{"HTMLSourceElement"},
|
||||
{"HTMLSpanElement"},
|
||||
{"HTMLStyleElement"},
|
||||
{"HTMLTableCaptionElement"},
|
||||
{"HTMLTableCellElement"},
|
||||
{"HTMLTableColElement"},
|
||||
{"HTMLTableElement"},
|
||||
{"HTMLTableRowElement"},
|
||||
{"HTMLTableSectionElement"},
|
||||
{"HTMLTemplateElement"},
|
||||
{"HTMLTextAreaElement"},
|
||||
{"HTMLTimeElement"},
|
||||
{"HTMLTitleElement"},
|
||||
{"HTMLTrackElement"},
|
||||
{"HTMLUListElement"},
|
||||
{"HTMLUnknownElement"},
|
||||
{"HTMLVideoElement"},
|
||||
{"HashChangeEvent"},
|
||||
{"Headers"},
|
||||
{"History"},
|
||||
{"IDBCursor"},
|
||||
{"IDBCursorWithValue"},
|
||||
{"IDBDatabase"},
|
||||
{"IDBFactory"},
|
||||
{"IDBIndex"},
|
||||
{"IDBKeyRange"},
|
||||
{"IDBObjectStore"},
|
||||
{"IDBOpenDBRequest"},
|
||||
{"IDBRequest"},
|
||||
{"IDBTransaction"},
|
||||
{"IDBVersionChangeEvent"},
|
||||
{"Image"},
|
||||
{"ImageData"},
|
||||
{"InputEvent"},
|
||||
{"IntersectionObserver"},
|
||||
{"IntersectionObserverEntry"},
|
||||
{"KeyboardEvent"},
|
||||
{"KeyframeEffect"},
|
||||
{"Location"},
|
||||
{"MediaCapabilities"},
|
||||
{"MediaElementAudioSourceNode"},
|
||||
{"MediaEncryptedEvent"},
|
||||
{"MediaError"},
|
||||
{"MediaList"},
|
||||
{"MediaQueryList"},
|
||||
{"MediaQueryListEvent"},
|
||||
{"MediaRecorder"},
|
||||
{"MediaSource"},
|
||||
{"MediaStream"},
|
||||
{"MediaStreamAudioDestinationNode"},
|
||||
{"MediaStreamAudioSourceNode"},
|
||||
{"MediaStreamTrack"},
|
||||
{"MediaStreamTrackEvent"},
|
||||
{"MimeType"},
|
||||
{"MimeTypeArray"},
|
||||
{"MouseEvent"},
|
||||
{"MutationEvent"},
|
||||
{"MutationObserver"},
|
||||
{"MutationRecord"},
|
||||
{"NamedNodeMap"},
|
||||
{"Navigator"},
|
||||
{"Node"},
|
||||
{"NodeFilter"},
|
||||
{"NodeIterator"},
|
||||
{"NodeList"},
|
||||
{"Notification"},
|
||||
{"OfflineAudioCompletionEvent"},
|
||||
{"Option"},
|
||||
{"OscillatorNode"},
|
||||
{"PageTransitionEvent"},
|
||||
{"Path2D"},
|
||||
{"Performance"},
|
||||
{"PerformanceEntry"},
|
||||
{"PerformanceMark"},
|
||||
{"PerformanceMeasure"},
|
||||
{"PerformanceNavigation"},
|
||||
{"PerformanceObserver"},
|
||||
{"PerformanceObserverEntryList"},
|
||||
{"PerformanceResourceTiming"},
|
||||
{"PerformanceTiming"},
|
||||
{"PeriodicWave"},
|
||||
{"Plugin"},
|
||||
{"PluginArray"},
|
||||
{"PointerEvent"},
|
||||
{"PopStateEvent"},
|
||||
{"ProcessingInstruction"},
|
||||
{"ProgressEvent"},
|
||||
{"PromiseRejectionEvent"},
|
||||
{"RTCCertificate"},
|
||||
{"RTCDTMFSender"},
|
||||
{"RTCDTMFToneChangeEvent"},
|
||||
{"RTCDataChannel"},
|
||||
{"RTCDataChannelEvent"},
|
||||
{"RTCIceCandidate"},
|
||||
{"RTCPeerConnection"},
|
||||
{"RTCPeerConnectionIceEvent"},
|
||||
{"RTCRtpReceiver"},
|
||||
{"RTCRtpSender"},
|
||||
{"RTCRtpTransceiver"},
|
||||
{"RTCSessionDescription"},
|
||||
{"RTCStatsReport"},
|
||||
{"RTCTrackEvent"},
|
||||
{"RadioNodeList"},
|
||||
{"Range"},
|
||||
{"ReadableStream"},
|
||||
{"Request"},
|
||||
{"ResizeObserver"},
|
||||
{"ResizeObserverEntry"},
|
||||
{"Response"},
|
||||
{"Screen"},
|
||||
{"ScriptProcessorNode"},
|
||||
{"SecurityPolicyViolationEvent"},
|
||||
{"Selection"},
|
||||
{"ShadowRoot"},
|
||||
{"SourceBuffer"},
|
||||
{"SourceBufferList"},
|
||||
{"SpeechSynthesisEvent"},
|
||||
{"SpeechSynthesisUtterance"},
|
||||
{"StaticRange"},
|
||||
{"Storage"},
|
||||
{"StorageEvent"},
|
||||
{"StyleSheet"},
|
||||
{"StyleSheetList"},
|
||||
{"Text"},
|
||||
{"TextMetrics"},
|
||||
{"TextTrack"},
|
||||
{"TextTrackCue"},
|
||||
{"TextTrackCueList"},
|
||||
{"TextTrackList"},
|
||||
{"TimeRanges"},
|
||||
{"TrackEvent"},
|
||||
{"TransitionEvent"},
|
||||
{"TreeWalker"},
|
||||
{"UIEvent"},
|
||||
{"VTTCue"},
|
||||
{"ValidityState"},
|
||||
{"VisualViewport"},
|
||||
{"WaveShaperNode"},
|
||||
{"WebGLActiveInfo"},
|
||||
{"WebGLBuffer"},
|
||||
{"WebGLContextEvent"},
|
||||
{"WebGLFramebuffer"},
|
||||
{"WebGLProgram"},
|
||||
{"WebGLQuery"},
|
||||
{"WebGLRenderbuffer"},
|
||||
{"WebGLRenderingContext"},
|
||||
{"WebGLSampler"},
|
||||
{"WebGLShader"},
|
||||
{"WebGLShaderPrecisionFormat"},
|
||||
{"WebGLSync"},
|
||||
{"WebGLTexture"},
|
||||
{"WebGLUniformLocation"},
|
||||
{"WebKitCSSMatrix"},
|
||||
{"WebSocket"},
|
||||
{"WheelEvent"},
|
||||
{"Window"},
|
||||
{"Worker"},
|
||||
{"XMLDocument"},
|
||||
{"XMLHttpRequest"},
|
||||
{"XMLHttpRequestEventTarget"},
|
||||
{"XMLHttpRequestUpload"},
|
||||
{"XMLSerializer"},
|
||||
{"XPathEvaluator"},
|
||||
{"XPathExpression"},
|
||||
{"XPathResult"},
|
||||
{"XSLTProcessor"},
|
||||
{"alert"},
|
||||
{"atob"},
|
||||
{"blur"},
|
||||
{"btoa"},
|
||||
{"cancelAnimationFrame"},
|
||||
{"captureEvents"},
|
||||
{"close"},
|
||||
{"closed"},
|
||||
{"confirm"},
|
||||
{"customElements"},
|
||||
{"devicePixelRatio"},
|
||||
{"document"},
|
||||
{"event"},
|
||||
{"fetch"},
|
||||
{"find"},
|
||||
{"focus"},
|
||||
{"frameElement"},
|
||||
{"frames"},
|
||||
{"getComputedStyle"},
|
||||
{"getSelection"},
|
||||
{"history"},
|
||||
{"indexedDB"},
|
||||
{"isSecureContext"},
|
||||
{"length"},
|
||||
{"location"},
|
||||
{"locationbar"},
|
||||
{"matchMedia"},
|
||||
{"menubar"},
|
||||
{"moveBy"},
|
||||
{"moveTo"},
|
||||
{"name"},
|
||||
{"navigator"},
|
||||
{"onabort"},
|
||||
{"onafterprint"},
|
||||
{"onanimationend"},
|
||||
{"onanimationiteration"},
|
||||
{"onanimationstart"},
|
||||
{"onbeforeprint"},
|
||||
{"onbeforeunload"},
|
||||
{"onblur"},
|
||||
{"oncanplay"},
|
||||
{"oncanplaythrough"},
|
||||
{"onchange"},
|
||||
{"onclick"},
|
||||
{"oncontextmenu"},
|
||||
{"oncuechange"},
|
||||
{"ondblclick"},
|
||||
{"ondrag"},
|
||||
{"ondragend"},
|
||||
{"ondragenter"},
|
||||
{"ondragleave"},
|
||||
{"ondragover"},
|
||||
{"ondragstart"},
|
||||
{"ondrop"},
|
||||
{"ondurationchange"},
|
||||
{"onemptied"},
|
||||
{"onended"},
|
||||
{"onerror"},
|
||||
{"onfocus"},
|
||||
{"ongotpointercapture"},
|
||||
{"onhashchange"},
|
||||
{"oninput"},
|
||||
{"oninvalid"},
|
||||
{"onkeydown"},
|
||||
{"onkeypress"},
|
||||
{"onkeyup"},
|
||||
{"onlanguagechange"},
|
||||
{"onload"},
|
||||
{"onloadeddata"},
|
||||
{"onloadedmetadata"},
|
||||
{"onloadstart"},
|
||||
{"onlostpointercapture"},
|
||||
{"onmessage"},
|
||||
{"onmousedown"},
|
||||
{"onmouseenter"},
|
||||
{"onmouseleave"},
|
||||
{"onmousemove"},
|
||||
{"onmouseout"},
|
||||
{"onmouseover"},
|
||||
{"onmouseup"},
|
||||
{"onoffline"},
|
||||
{"ononline"},
|
||||
{"onpagehide"},
|
||||
{"onpageshow"},
|
||||
{"onpause"},
|
||||
{"onplay"},
|
||||
{"onplaying"},
|
||||
{"onpointercancel"},
|
||||
{"onpointerdown"},
|
||||
{"onpointerenter"},
|
||||
{"onpointerleave"},
|
||||
{"onpointermove"},
|
||||
{"onpointerout"},
|
||||
{"onpointerover"},
|
||||
{"onpointerup"},
|
||||
{"onpopstate"},
|
||||
{"onprogress"},
|
||||
{"onratechange"},
|
||||
{"onrejectionhandled"},
|
||||
{"onreset"},
|
||||
{"onresize"},
|
||||
{"onscroll"},
|
||||
{"onseeked"},
|
||||
{"onseeking"},
|
||||
{"onselect"},
|
||||
{"onstalled"},
|
||||
{"onstorage"},
|
||||
{"onsubmit"},
|
||||
{"onsuspend"},
|
||||
{"ontimeupdate"},
|
||||
{"ontoggle"},
|
||||
{"ontransitioncancel"},
|
||||
{"ontransitionend"},
|
||||
{"ontransitionrun"},
|
||||
{"ontransitionstart"},
|
||||
{"onunhandledrejection"},
|
||||
{"onunload"},
|
||||
{"onvolumechange"},
|
||||
{"onwaiting"},
|
||||
{"onwebkitanimationend"},
|
||||
{"onwebkitanimationiteration"},
|
||||
{"onwebkitanimationstart"},
|
||||
{"onwebkittransitionend"},
|
||||
{"onwheel"},
|
||||
{"open"},
|
||||
{"opener"},
|
||||
{"origin"},
|
||||
{"outerHeight"},
|
||||
{"outerWidth"},
|
||||
{"parent"},
|
||||
{"performance"},
|
||||
{"personalbar"},
|
||||
{"postMessage"},
|
||||
{"print"},
|
||||
{"prompt"},
|
||||
{"releaseEvents"},
|
||||
{"requestAnimationFrame"},
|
||||
{"resizeBy"},
|
||||
{"resizeTo"},
|
||||
{"screen"},
|
||||
{"screenLeft"},
|
||||
{"screenTop"},
|
||||
{"screenX"},
|
||||
{"screenY"},
|
||||
{"scroll"},
|
||||
{"scrollBy"},
|
||||
{"scrollTo"},
|
||||
{"scrollbars"},
|
||||
{"self"},
|
||||
{"speechSynthesis"},
|
||||
{"status"},
|
||||
{"statusbar"},
|
||||
{"stop"},
|
||||
{"toolbar"},
|
||||
{"top"},
|
||||
{"webkitURL"},
|
||||
{"window"},
|
||||
}
|
||||
|
||||
type DefineArgs struct {
|
||||
Loc logger.Loc
|
||||
FindSymbol func(logger.Loc, string) js_ast.Ref
|
||||
SymbolForDefine func(int) js_ast.Ref
|
||||
}
|
||||
|
||||
type DefineFunc func(DefineArgs) js_ast.E
|
||||
|
||||
type DefineData struct {
|
||||
DefineFunc DefineFunc
|
||||
|
||||
// True if accessing this value is known to not have any side effects. For
|
||||
// example, a bare reference to "Object.create" can be removed because it
|
||||
// does not have any observable side effects.
|
||||
CanBeRemovedIfUnused bool
|
||||
|
||||
// True if a call to this value is known to not have any side effects. For
|
||||
// example, a bare call to "Object()" can be removed because it does not
|
||||
// have any observable side effects.
|
||||
CallCanBeUnwrappedIfUnused bool
|
||||
}
|
||||
|
||||
func mergeDefineData(old DefineData, new DefineData) DefineData {
|
||||
if old.CanBeRemovedIfUnused {
|
||||
new.CanBeRemovedIfUnused = true
|
||||
}
|
||||
if old.CallCanBeUnwrappedIfUnused {
|
||||
new.CallCanBeUnwrappedIfUnused = true
|
||||
}
|
||||
return new
|
||||
}
|
||||
|
||||
type DotDefine struct {
|
||||
Parts []string
|
||||
Data DefineData
|
||||
}
|
||||
|
||||
type ProcessedDefines struct {
|
||||
IdentifierDefines map[string]DefineData
|
||||
DotDefines map[string][]DotDefine
|
||||
}
|
||||
|
||||
// This transformation is expensive, so we only want to do it once. Make sure
|
||||
// to only call processDefines() once per compilation. Unfortunately Golang
|
||||
// doesn't have an efficient way to copy a map and the overhead of copying
|
||||
// all of the properties into a new map once for every new parser noticeably
|
||||
// slows down our benchmarks.
|
||||
func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines {
|
||||
// Optimization: reuse known globals if there are no user-specified defines
|
||||
hasUserDefines := len(userDefines) != 0
|
||||
if !hasUserDefines {
|
||||
processedGlobalsMutex.Lock()
|
||||
if processedGlobals != nil {
|
||||
defer processedGlobalsMutex.Unlock()
|
||||
return *processedGlobals
|
||||
}
|
||||
processedGlobalsMutex.Unlock()
|
||||
}
|
||||
|
||||
result := ProcessedDefines{
|
||||
IdentifierDefines: make(map[string]DefineData),
|
||||
DotDefines: make(map[string][]DotDefine),
|
||||
}
|
||||
|
||||
// Mark these property accesses as free of side effects. That means they can
|
||||
// be removed if their result is unused. We can't just remove all unused
|
||||
// property accesses since property accesses can have side effects. For
|
||||
// example, the property access "a.b.c" has the side effect of throwing an
|
||||
// exception if "a.b" is undefined.
|
||||
for _, parts := range knownGlobals {
|
||||
tail := parts[len(parts)-1]
|
||||
if len(parts) == 1 {
|
||||
result.IdentifierDefines[tail] = DefineData{CanBeRemovedIfUnused: true}
|
||||
} else {
|
||||
result.DotDefines[tail] = append(result.DotDefines[tail], DotDefine{Parts: parts, Data: DefineData{CanBeRemovedIfUnused: true}})
|
||||
}
|
||||
}
|
||||
|
||||
// Swap in certain literal values because those can be constant folded
|
||||
result.IdentifierDefines["undefined"] = DefineData{
|
||||
DefineFunc: func(DefineArgs) js_ast.E { return js_ast.EUndefinedShared },
|
||||
}
|
||||
result.IdentifierDefines["NaN"] = DefineData{
|
||||
DefineFunc: func(DefineArgs) js_ast.E { return &js_ast.ENumber{Value: math.NaN()} },
|
||||
}
|
||||
result.IdentifierDefines["Infinity"] = DefineData{
|
||||
DefineFunc: func(DefineArgs) js_ast.E { return &js_ast.ENumber{Value: math.Inf(1)} },
|
||||
}
|
||||
|
||||
// Then copy the user-specified defines in afterwards, which will overwrite
|
||||
// any known globals above.
|
||||
for key, data := range userDefines {
|
||||
parts := strings.Split(key, ".")
|
||||
|
||||
// Identifier defines are special-cased
|
||||
if len(parts) == 1 {
|
||||
result.IdentifierDefines[key] = mergeDefineData(result.IdentifierDefines[key], data)
|
||||
continue
|
||||
}
|
||||
|
||||
tail := parts[len(parts)-1]
|
||||
dotDefines := result.DotDefines[tail]
|
||||
found := false
|
||||
|
||||
// Try to merge with existing dot defines first
|
||||
for i, define := range dotDefines {
|
||||
if arePartsEqual(parts, define.Parts) {
|
||||
define := &dotDefines[i]
|
||||
define.Data = mergeDefineData(define.Data, data)
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
dotDefines = append(dotDefines, DotDefine{Parts: parts, Data: data})
|
||||
}
|
||||
result.DotDefines[tail] = dotDefines
|
||||
}
|
||||
|
||||
// Potentially cache the result for next time
|
||||
if !hasUserDefines {
|
||||
processedGlobalsMutex.Lock()
|
||||
defer processedGlobalsMutex.Unlock()
|
||||
if processedGlobals == nil {
|
||||
processedGlobals = &result
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func arePartsEqual(a []string, b []string) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i := range a {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
646
vendor/github.com/evanw/esbuild/internal/css_ast/css_ast.go
generated
vendored
646
vendor/github.com/evanw/esbuild/internal/css_ast/css_ast.go
generated
vendored
@ -1,646 +0,0 @@
|
||||
package css_ast
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
// CSS syntax comes in two layers: a minimal syntax that generally accepts
|
||||
// anything that looks vaguely like CSS, and a large set of built-in rules
|
||||
// (the things browsers actually interpret). That way CSS parsers can read
|
||||
// unknown rules and skip over them without having to stop due to errors.
|
||||
//
|
||||
// This AST format is mostly just the minimal syntax. It parses unknown rules
|
||||
// into a tree with enough information that it can write them back out again.
|
||||
// There are some additional layers of syntax including selectors and @-rules
|
||||
// which allow for better pretty-printing and minification.
|
||||
//
|
||||
// Most of the AST just references ranges of the original file by keeping the
|
||||
// original "Token" values around from the lexer. This is a memory-efficient
|
||||
// representation that helps provide good parsing and printing performance.
|
||||
|
||||
type AST struct {
|
||||
ImportRecords []ast.ImportRecord
|
||||
Rules []Rule
|
||||
SourceMapComment logger.Span
|
||||
ApproximateLineCount int32
|
||||
}
|
||||
|
||||
// We create a lot of tokens, so make sure this layout is memory-efficient.
|
||||
// The layout here isn't optimal because it biases for convenience (e.g.
|
||||
// "string" could be shorter) but at least the ordering of fields was
|
||||
// deliberately chosen to minimize size.
|
||||
type Token struct {
|
||||
// This is the raw contents of the token most of the time. However, it
|
||||
// contains the decoded string contents for "TString" tokens.
|
||||
Text string // 16 bytes
|
||||
|
||||
// Contains the child tokens for component values that are simple blocks.
|
||||
// These are either "(", "{", "[", or function tokens. The closing token is
|
||||
// implicit and is not stored.
|
||||
Children *[]Token // 8 bytes
|
||||
|
||||
// URL tokens have an associated import record at the top-level of the AST.
|
||||
// This index points to that import record.
|
||||
ImportRecordIndex uint32 // 4 bytes
|
||||
|
||||
// The division between the number and the unit for "TDimension" tokens.
|
||||
UnitOffset uint16 // 2 bytes
|
||||
|
||||
// This will never be "TWhitespace" because whitespace isn't stored as a
|
||||
// token directly. Instead it is stored in "HasWhitespaceAfter" on the
|
||||
// previous token. This is to make it easier to pattern-match against
|
||||
// tokens when handling CSS rules, since whitespace almost always doesn't
|
||||
// matter. That way you can pattern match against e.g. "rgb(r, g, b)" and
|
||||
// not have to handle all possible combinations of embedded whitespace
|
||||
// tokens.
|
||||
//
|
||||
// There is one exception to this: when in verbatim whitespace mode and
|
||||
// the token list is non-empty and is only whitespace tokens. In that case
|
||||
// a single whitespace token is emitted. This is because otherwise there
|
||||
// would be no tokens to attach the whitespace before/after flags to.
|
||||
Kind css_lexer.T // 1 byte
|
||||
|
||||
// These flags indicate the presence of a "TWhitespace" token before or after
|
||||
// this token. There should be whitespace printed between two tokens if either
|
||||
// token indicates that there should be whitespace. Note that whitespace may
|
||||
// be altered by processing in certain situations (e.g. minification).
|
||||
Whitespace WhitespaceFlags // 1 byte
|
||||
}
|
||||
|
||||
type WhitespaceFlags uint8
|
||||
|
||||
const (
|
||||
WhitespaceBefore WhitespaceFlags = 1 << iota
|
||||
WhitespaceAfter
|
||||
)
|
||||
|
||||
func (a Token) Equal(b Token) bool {
|
||||
if a.Kind == b.Kind && a.Text == b.Text && a.ImportRecordIndex == b.ImportRecordIndex && a.Whitespace == b.Whitespace {
|
||||
if a.Children == nil && b.Children == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if a.Children != nil && b.Children != nil && TokensEqual(*a.Children, *b.Children) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func TokensEqual(a []Token, b []Token) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i, c := range a {
|
||||
if !c.Equal(b[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func HashTokens(hash uint32, tokens []Token) uint32 {
|
||||
hash = helpers.HashCombine(hash, uint32(len(tokens)))
|
||||
|
||||
for _, t := range tokens {
|
||||
hash = helpers.HashCombine(hash, uint32(t.Kind))
|
||||
hash = helpers.HashCombineString(hash, t.Text)
|
||||
if t.Children != nil {
|
||||
hash = HashTokens(hash, *t.Children)
|
||||
}
|
||||
}
|
||||
|
||||
return hash
|
||||
}
|
||||
|
||||
func (a Token) EqualIgnoringWhitespace(b Token) bool {
|
||||
if a.Kind == b.Kind && a.Text == b.Text && a.ImportRecordIndex == b.ImportRecordIndex {
|
||||
if a.Children == nil && b.Children == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if a.Children != nil && b.Children != nil && TokensEqualIgnoringWhitespace(*a.Children, *b.Children) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func TokensEqualIgnoringWhitespace(a []Token, b []Token) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i, c := range a {
|
||||
if !c.EqualIgnoringWhitespace(b[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func TokensAreCommaSeparated(tokens []Token) bool {
|
||||
if n := len(tokens); (n & 1) != 0 {
|
||||
for i := 1; i < n; i += 2 {
|
||||
if tokens[i].Kind != css_lexer.TComma {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (t Token) FractionForPercentage() (float64, bool) {
|
||||
if t.Kind == css_lexer.TPercentage {
|
||||
if f, err := strconv.ParseFloat(t.PercentageValue(), 64); err == nil {
|
||||
if f < 0 {
|
||||
return 0, true
|
||||
}
|
||||
if f > 100 {
|
||||
return 1, true
|
||||
}
|
||||
return f / 100.0, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-values-3/#lengths
|
||||
// For zero lengths the unit identifier is optional
|
||||
// (i.e. can be syntactically represented as the <number> 0).
|
||||
func (t *Token) TurnLengthIntoNumberIfZero() bool {
|
||||
if t.Kind == css_lexer.TDimension && t.DimensionValue() == "0" {
|
||||
t.Kind = css_lexer.TNumber
|
||||
t.Text = "0"
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (t *Token) TurnLengthOrPercentageIntoNumberIfZero() bool {
|
||||
if t.Kind == css_lexer.TPercentage && t.PercentageValue() == "0" {
|
||||
t.Kind = css_lexer.TNumber
|
||||
t.Text = "0"
|
||||
return true
|
||||
}
|
||||
return t.TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
func (t Token) PercentageValue() string {
|
||||
return t.Text[:len(t.Text)-1]
|
||||
}
|
||||
|
||||
func (t Token) DimensionValue() string {
|
||||
return t.Text[:t.UnitOffset]
|
||||
}
|
||||
|
||||
func (t Token) DimensionUnit() string {
|
||||
return t.Text[t.UnitOffset:]
|
||||
}
|
||||
|
||||
func (t Token) DimensionUnitIsSafeLength() bool {
|
||||
switch t.DimensionUnit() {
|
||||
// These units can be reasonably expected to be supported everywhere.
|
||||
// Information used: https://developer.mozilla.org/en-US/docs/Web/CSS/length
|
||||
case "cm", "em", "in", "mm", "pc", "pt", "px":
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (t Token) IsZero() bool {
|
||||
return t.Kind == css_lexer.TNumber && t.Text == "0"
|
||||
}
|
||||
|
||||
func (t Token) IsOne() bool {
|
||||
return t.Kind == css_lexer.TNumber && t.Text == "1"
|
||||
}
|
||||
|
||||
func (t Token) IsAngle() bool {
|
||||
if t.Kind == css_lexer.TDimension {
|
||||
unit := t.DimensionUnit()
|
||||
return unit == "deg" || unit == "grad" || unit == "rad" || unit == "turn"
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func CloneTokensWithImportRecords(
|
||||
tokensIn []Token, importRecordsIn []ast.ImportRecord,
|
||||
tokensOut []Token, importRecordsOut []ast.ImportRecord,
|
||||
) ([]Token, []ast.ImportRecord) {
|
||||
for _, t := range tokensIn {
|
||||
// If this is a URL token, also clone the import record
|
||||
if t.Kind == css_lexer.TURL {
|
||||
importRecordIndex := uint32(len(importRecordsOut))
|
||||
importRecordsOut = append(importRecordsOut, importRecordsIn[t.ImportRecordIndex])
|
||||
t.ImportRecordIndex = importRecordIndex
|
||||
}
|
||||
|
||||
// Also search for URL tokens in this token's children
|
||||
if t.Children != nil {
|
||||
var children []Token
|
||||
children, importRecordsOut = CloneTokensWithImportRecords(*t.Children, importRecordsIn, children, importRecordsOut)
|
||||
t.Children = &children
|
||||
}
|
||||
|
||||
tokensOut = append(tokensOut, t)
|
||||
}
|
||||
|
||||
return tokensOut, importRecordsOut
|
||||
}
|
||||
|
||||
type Rule struct {
|
||||
Loc logger.Loc
|
||||
Data R
|
||||
}
|
||||
|
||||
type R interface {
|
||||
Equal(rule R) bool
|
||||
Hash() (uint32, bool)
|
||||
}
|
||||
|
||||
func RulesEqual(a []Rule, b []Rule) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i, c := range a {
|
||||
if !c.Data.Equal(b[i].Data) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func HashRules(hash uint32, rules []Rule) uint32 {
|
||||
hash = helpers.HashCombine(hash, uint32(len(rules)))
|
||||
for _, child := range rules {
|
||||
if childHash, ok := child.Data.Hash(); ok {
|
||||
hash = helpers.HashCombine(hash, childHash)
|
||||
} else {
|
||||
hash = helpers.HashCombine(hash, 0)
|
||||
}
|
||||
}
|
||||
return hash
|
||||
}
|
||||
|
||||
type RAtCharset struct {
|
||||
Encoding string
|
||||
}
|
||||
|
||||
func (a *RAtCharset) Equal(rule R) bool {
|
||||
b, ok := rule.(*RAtCharset)
|
||||
return ok && a.Encoding == b.Encoding
|
||||
}
|
||||
|
||||
func (r *RAtCharset) Hash() (uint32, bool) {
|
||||
hash := uint32(1)
|
||||
hash = helpers.HashCombineString(hash, r.Encoding)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RAtImport struct {
|
||||
ImportRecordIndex uint32
|
||||
ImportConditions []Token
|
||||
}
|
||||
|
||||
func (*RAtImport) Equal(rule R) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *RAtImport) Hash() (uint32, bool) {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
type RAtKeyframes struct {
|
||||
AtToken string
|
||||
Name string
|
||||
Blocks []KeyframeBlock
|
||||
}
|
||||
|
||||
type KeyframeBlock struct {
|
||||
Selectors []string
|
||||
Rules []Rule
|
||||
}
|
||||
|
||||
func (a *RAtKeyframes) Equal(rule R) bool {
|
||||
b, ok := rule.(*RAtKeyframes)
|
||||
if ok && a.AtToken == b.AtToken && a.Name == b.Name && len(a.Blocks) == len(b.Blocks) {
|
||||
for i, ai := range a.Blocks {
|
||||
bi := b.Blocks[i]
|
||||
if len(ai.Selectors) != len(bi.Selectors) {
|
||||
return false
|
||||
}
|
||||
for j, aj := range ai.Selectors {
|
||||
if aj != bi.Selectors[j] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if !RulesEqual(ai.Rules, bi.Rules) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *RAtKeyframes) Hash() (uint32, bool) {
|
||||
hash := uint32(2)
|
||||
hash = helpers.HashCombineString(hash, r.AtToken)
|
||||
hash = helpers.HashCombineString(hash, r.Name)
|
||||
hash = helpers.HashCombine(hash, uint32(len(r.Blocks)))
|
||||
for _, block := range r.Blocks {
|
||||
hash = helpers.HashCombine(hash, uint32(len(block.Selectors)))
|
||||
for _, sel := range block.Selectors {
|
||||
hash = helpers.HashCombineString(hash, sel)
|
||||
}
|
||||
hash = HashRules(hash, block.Rules)
|
||||
}
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RKnownAt struct {
|
||||
AtToken string
|
||||
Prelude []Token
|
||||
Rules []Rule
|
||||
}
|
||||
|
||||
func (a *RKnownAt) Equal(rule R) bool {
|
||||
b, ok := rule.(*RKnownAt)
|
||||
return ok && a.AtToken == b.AtToken && TokensEqual(a.Prelude, b.Prelude) && RulesEqual(a.Rules, a.Rules)
|
||||
}
|
||||
|
||||
func (r *RKnownAt) Hash() (uint32, bool) {
|
||||
hash := uint32(3)
|
||||
hash = helpers.HashCombineString(hash, r.AtToken)
|
||||
hash = HashTokens(hash, r.Prelude)
|
||||
hash = HashRules(hash, r.Rules)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RUnknownAt struct {
|
||||
AtToken string
|
||||
Prelude []Token
|
||||
Block []Token
|
||||
}
|
||||
|
||||
func (a *RUnknownAt) Equal(rule R) bool {
|
||||
b, ok := rule.(*RUnknownAt)
|
||||
return ok && a.AtToken == b.AtToken && TokensEqual(a.Prelude, b.Prelude) && TokensEqual(a.Block, a.Block)
|
||||
}
|
||||
|
||||
func (r *RUnknownAt) Hash() (uint32, bool) {
|
||||
hash := uint32(4)
|
||||
hash = helpers.HashCombineString(hash, r.AtToken)
|
||||
hash = HashTokens(hash, r.Prelude)
|
||||
hash = HashTokens(hash, r.Block)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RSelector struct {
|
||||
Selectors []ComplexSelector
|
||||
Rules []Rule
|
||||
}
|
||||
|
||||
func (a *RSelector) Equal(rule R) bool {
|
||||
b, ok := rule.(*RSelector)
|
||||
if ok && len(a.Selectors) == len(b.Selectors) {
|
||||
for i, sel := range a.Selectors {
|
||||
if !sel.Equal(b.Selectors[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return RulesEqual(a.Rules, b.Rules)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *RSelector) Hash() (uint32, bool) {
|
||||
hash := uint32(5)
|
||||
hash = helpers.HashCombine(hash, uint32(len(r.Selectors)))
|
||||
for _, complex := range r.Selectors {
|
||||
hash = helpers.HashCombine(hash, uint32(len(complex.Selectors)))
|
||||
for _, sel := range complex.Selectors {
|
||||
if sel.TypeSelector != nil {
|
||||
hash = helpers.HashCombineString(hash, sel.TypeSelector.Name.Text)
|
||||
} else {
|
||||
hash = helpers.HashCombine(hash, 0)
|
||||
}
|
||||
hash = helpers.HashCombine(hash, uint32(len(sel.SubclassSelectors)))
|
||||
for _, sub := range sel.SubclassSelectors {
|
||||
hash = helpers.HashCombine(hash, sub.Hash())
|
||||
}
|
||||
hash = helpers.HashCombineString(hash, sel.Combinator)
|
||||
}
|
||||
}
|
||||
hash = HashRules(hash, r.Rules)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RQualified struct {
|
||||
Prelude []Token
|
||||
Rules []Rule
|
||||
}
|
||||
|
||||
func (a *RQualified) Equal(rule R) bool {
|
||||
b, ok := rule.(*RQualified)
|
||||
return ok && TokensEqual(a.Prelude, b.Prelude) && RulesEqual(a.Rules, b.Rules)
|
||||
}
|
||||
|
||||
func (r *RQualified) Hash() (uint32, bool) {
|
||||
hash := uint32(6)
|
||||
hash = HashTokens(hash, r.Prelude)
|
||||
hash = HashRules(hash, r.Rules)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RDeclaration struct {
|
||||
KeyText string
|
||||
Value []Token
|
||||
KeyRange logger.Range
|
||||
Key D // Compare using this instead of "Key" for speed
|
||||
Important bool
|
||||
}
|
||||
|
||||
func (a *RDeclaration) Equal(rule R) bool {
|
||||
b, ok := rule.(*RDeclaration)
|
||||
return ok && a.KeyText == b.KeyText && TokensEqual(a.Value, b.Value) && a.Important == b.Important
|
||||
}
|
||||
|
||||
func (r *RDeclaration) Hash() (uint32, bool) {
|
||||
hash := uint32(7)
|
||||
hash = helpers.HashCombine(hash, uint32(r.Key))
|
||||
hash = HashTokens(hash, r.Value)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RBadDeclaration struct {
|
||||
Tokens []Token
|
||||
}
|
||||
|
||||
func (a *RBadDeclaration) Equal(rule R) bool {
|
||||
b, ok := rule.(*RBadDeclaration)
|
||||
return ok && TokensEqual(a.Tokens, b.Tokens)
|
||||
}
|
||||
|
||||
func (r *RBadDeclaration) Hash() (uint32, bool) {
|
||||
hash := uint32(8)
|
||||
hash = HashTokens(hash, r.Tokens)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type RComment struct {
|
||||
Text string
|
||||
}
|
||||
|
||||
func (a *RComment) Equal(rule R) bool {
|
||||
b, ok := rule.(*RComment)
|
||||
return ok && a.Text == b.Text
|
||||
}
|
||||
|
||||
func (r *RComment) Hash() (uint32, bool) {
|
||||
hash := uint32(9)
|
||||
hash = helpers.HashCombineString(hash, r.Text)
|
||||
return hash, true
|
||||
}
|
||||
|
||||
type ComplexSelector struct {
|
||||
Selectors []CompoundSelector
|
||||
}
|
||||
|
||||
func (a ComplexSelector) Equal(b ComplexSelector) bool {
|
||||
if len(a.Selectors) != len(b.Selectors) {
|
||||
return false
|
||||
}
|
||||
|
||||
for i, ai := range a.Selectors {
|
||||
bi := b.Selectors[i]
|
||||
if ai.HasNestPrefix != bi.HasNestPrefix || ai.Combinator != bi.Combinator {
|
||||
return false
|
||||
}
|
||||
|
||||
if ats, bts := ai.TypeSelector, bi.TypeSelector; (ats == nil) != (bts == nil) {
|
||||
return false
|
||||
} else if ats != nil && bts != nil && !ats.Equal(*bts) {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(ai.SubclassSelectors) != len(bi.SubclassSelectors) {
|
||||
return false
|
||||
}
|
||||
for j, aj := range ai.SubclassSelectors {
|
||||
if !aj.Equal(bi.SubclassSelectors[j]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
type CompoundSelector struct {
|
||||
HasNestPrefix bool // "&"
|
||||
Combinator string // Optional, may be ""
|
||||
TypeSelector *NamespacedName
|
||||
SubclassSelectors []SS
|
||||
}
|
||||
|
||||
type NameToken struct {
|
||||
Kind css_lexer.T
|
||||
Text string
|
||||
}
|
||||
|
||||
type NamespacedName struct {
|
||||
// If present, this is an identifier or "*" and is followed by a "|" character
|
||||
NamespacePrefix *NameToken
|
||||
|
||||
// This is an identifier or "*"
|
||||
Name NameToken
|
||||
}
|
||||
|
||||
func (a NamespacedName) Equal(b NamespacedName) bool {
|
||||
return a.Name == b.Name && (a.NamespacePrefix == nil) == (b.NamespacePrefix == nil) &&
|
||||
(a.NamespacePrefix == nil || b.NamespacePrefix == nil || *a.NamespacePrefix == *b.NamespacePrefix)
|
||||
}
|
||||
|
||||
type SS interface {
|
||||
Equal(ss SS) bool
|
||||
Hash() uint32
|
||||
}
|
||||
|
||||
type SSHash struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func (a *SSHash) Equal(ss SS) bool {
|
||||
b, ok := ss.(*SSHash)
|
||||
return ok && a.Name == b.Name
|
||||
}
|
||||
|
||||
func (ss *SSHash) Hash() uint32 {
|
||||
hash := uint32(1)
|
||||
hash = helpers.HashCombineString(hash, ss.Name)
|
||||
return hash
|
||||
}
|
||||
|
||||
type SSClass struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func (a *SSClass) Equal(ss SS) bool {
|
||||
b, ok := ss.(*SSClass)
|
||||
return ok && a.Name == b.Name
|
||||
}
|
||||
|
||||
func (ss *SSClass) Hash() uint32 {
|
||||
hash := uint32(2)
|
||||
hash = helpers.HashCombineString(hash, ss.Name)
|
||||
return hash
|
||||
}
|
||||
|
||||
type SSAttribute struct {
|
||||
NamespacedName NamespacedName
|
||||
MatcherOp string // Either "" or one of: "=" "~=" "|=" "^=" "$=" "*="
|
||||
MatcherValue string
|
||||
MatcherModifier byte // Either 0 or one of: 'i' 'I' 's' 'S'
|
||||
}
|
||||
|
||||
func (a *SSAttribute) Equal(ss SS) bool {
|
||||
b, ok := ss.(*SSAttribute)
|
||||
return ok && a.NamespacedName.Equal(b.NamespacedName) && a.MatcherOp == b.MatcherOp &&
|
||||
a.MatcherValue == b.MatcherValue && a.MatcherModifier == b.MatcherModifier
|
||||
}
|
||||
|
||||
func (ss *SSAttribute) Hash() uint32 {
|
||||
hash := uint32(3)
|
||||
hash = helpers.HashCombineString(hash, ss.NamespacedName.Name.Text)
|
||||
hash = helpers.HashCombineString(hash, ss.MatcherOp)
|
||||
hash = helpers.HashCombineString(hash, ss.MatcherValue)
|
||||
return hash
|
||||
}
|
||||
|
||||
type SSPseudoClass struct {
|
||||
Name string
|
||||
Args []Token
|
||||
IsElement bool // If true, this is prefixed by "::" instead of ":"
|
||||
}
|
||||
|
||||
func (a *SSPseudoClass) Equal(ss SS) bool {
|
||||
b, ok := ss.(*SSPseudoClass)
|
||||
return ok && a.Name == b.Name && TokensEqual(a.Args, b.Args) && a.IsElement == b.IsElement
|
||||
}
|
||||
|
||||
func (ss *SSPseudoClass) Hash() uint32 {
|
||||
hash := uint32(4)
|
||||
hash = helpers.HashCombineString(hash, ss.Name)
|
||||
hash = HashTokens(hash, ss.Args)
|
||||
return hash
|
||||
}
|
642
vendor/github.com/evanw/esbuild/internal/css_ast/css_decl_table.go
generated
vendored
642
vendor/github.com/evanw/esbuild/internal/css_ast/css_decl_table.go
generated
vendored
@ -1,642 +0,0 @@
|
||||
package css_ast
|
||||
|
||||
type D uint16
|
||||
|
||||
const (
|
||||
DUnknown D = iota
|
||||
DAlignContent
|
||||
DAlignItems
|
||||
DAlignSelf
|
||||
DAlignmentBaseline
|
||||
DAll
|
||||
DAnimation
|
||||
DAnimationDelay
|
||||
DAnimationDirection
|
||||
DAnimationDuration
|
||||
DAnimationFillMode
|
||||
DAnimationIterationCount
|
||||
DAnimationName
|
||||
DAnimationPlayState
|
||||
DAnimationTimingFunction
|
||||
DBackfaceVisibility
|
||||
DBackground
|
||||
DBackgroundAttachment
|
||||
DBackgroundClip
|
||||
DBackgroundColor
|
||||
DBackgroundImage
|
||||
DBackgroundOrigin
|
||||
DBackgroundPosition
|
||||
DBackgroundPositionX
|
||||
DBackgroundPositionY
|
||||
DBackgroundRepeat
|
||||
DBackgroundSize
|
||||
DBaselineShift
|
||||
DBlockSize
|
||||
DBorder
|
||||
DBorderBlockEnd
|
||||
DBorderBlockEndColor
|
||||
DBorderBlockEndStyle
|
||||
DBorderBlockEndWidth
|
||||
DBorderBlockStart
|
||||
DBorderBlockStartColor
|
||||
DBorderBlockStartStyle
|
||||
DBorderBlockStartWidth
|
||||
DBorderBottom
|
||||
DBorderBottomColor
|
||||
DBorderBottomLeftRadius
|
||||
DBorderBottomRightRadius
|
||||
DBorderBottomStyle
|
||||
DBorderBottomWidth
|
||||
DBorderCollapse
|
||||
DBorderColor
|
||||
DBorderImage
|
||||
DBorderImageOutset
|
||||
DBorderImageRepeat
|
||||
DBorderImageSlice
|
||||
DBorderImageSource
|
||||
DBorderImageWidth
|
||||
DBorderInlineEnd
|
||||
DBorderInlineEndColor
|
||||
DBorderInlineEndStyle
|
||||
DBorderInlineEndWidth
|
||||
DBorderInlineStart
|
||||
DBorderInlineStartColor
|
||||
DBorderInlineStartStyle
|
||||
DBorderInlineStartWidth
|
||||
DBorderLeft
|
||||
DBorderLeftColor
|
||||
DBorderLeftStyle
|
||||
DBorderLeftWidth
|
||||
DBorderRadius
|
||||
DBorderRight
|
||||
DBorderRightColor
|
||||
DBorderRightStyle
|
||||
DBorderRightWidth
|
||||
DBorderSpacing
|
||||
DBorderStyle
|
||||
DBorderTop
|
||||
DBorderTopColor
|
||||
DBorderTopLeftRadius
|
||||
DBorderTopRightRadius
|
||||
DBorderTopStyle
|
||||
DBorderTopWidth
|
||||
DBorderWidth
|
||||
DBottom
|
||||
DBoxShadow
|
||||
DBoxSizing
|
||||
DBreakAfter
|
||||
DBreakBefore
|
||||
DBreakInside
|
||||
DCaptionSide
|
||||
DCaretColor
|
||||
DClear
|
||||
DClip
|
||||
DClipPath
|
||||
DClipRule
|
||||
DColor
|
||||
DColorInterpolation
|
||||
DColorInterpolationFilters
|
||||
DColumnCount
|
||||
DColumnFill
|
||||
DColumnGap
|
||||
DColumnRule
|
||||
DColumnRuleColor
|
||||
DColumnRuleStyle
|
||||
DColumnRuleWidth
|
||||
DColumnSpan
|
||||
DColumnWidth
|
||||
DColumns
|
||||
DContent
|
||||
DCounterIncrement
|
||||
DCounterReset
|
||||
DCssFloat
|
||||
DCssText
|
||||
DCursor
|
||||
DDirection
|
||||
DDisplay
|
||||
DDominantBaseline
|
||||
DEmptyCells
|
||||
DFill
|
||||
DFillOpacity
|
||||
DFillRule
|
||||
DFilter
|
||||
DFlex
|
||||
DFlexBasis
|
||||
DFlexDirection
|
||||
DFlexFlow
|
||||
DFlexGrow
|
||||
DFlexShrink
|
||||
DFlexWrap
|
||||
DFloat
|
||||
DFloodColor
|
||||
DFloodOpacity
|
||||
DFont
|
||||
DFontFamily
|
||||
DFontFeatureSettings
|
||||
DFontKerning
|
||||
DFontSize
|
||||
DFontSizeAdjust
|
||||
DFontStretch
|
||||
DFontStyle
|
||||
DFontSynthesis
|
||||
DFontVariant
|
||||
DFontVariantCaps
|
||||
DFontVariantEastAsian
|
||||
DFontVariantLigatures
|
||||
DFontVariantNumeric
|
||||
DFontVariantPosition
|
||||
DFontWeight
|
||||
DGap
|
||||
DGlyphOrientationVertical
|
||||
DGrid
|
||||
DGridArea
|
||||
DGridAutoColumns
|
||||
DGridAutoFlow
|
||||
DGridAutoRows
|
||||
DGridColumn
|
||||
DGridColumnEnd
|
||||
DGridColumnGap
|
||||
DGridColumnStart
|
||||
DGridGap
|
||||
DGridRow
|
||||
DGridRowEnd
|
||||
DGridRowGap
|
||||
DGridRowStart
|
||||
DGridTemplate
|
||||
DGridTemplateAreas
|
||||
DGridTemplateColumns
|
||||
DGridTemplateRows
|
||||
DHeight
|
||||
DHyphens
|
||||
DImageOrientation
|
||||
DImageRendering
|
||||
DInlineSize
|
||||
DInset
|
||||
DJustifyContent
|
||||
DJustifyItems
|
||||
DJustifySelf
|
||||
DLeft
|
||||
DLetterSpacing
|
||||
DLightingColor
|
||||
DLineBreak
|
||||
DLineHeight
|
||||
DListStyle
|
||||
DListStyleImage
|
||||
DListStylePosition
|
||||
DListStyleType
|
||||
DMargin
|
||||
DMarginBlockEnd
|
||||
DMarginBlockStart
|
||||
DMarginBottom
|
||||
DMarginInlineEnd
|
||||
DMarginInlineStart
|
||||
DMarginLeft
|
||||
DMarginRight
|
||||
DMarginTop
|
||||
DMarker
|
||||
DMarkerEnd
|
||||
DMarkerMid
|
||||
DMarkerStart
|
||||
DMask
|
||||
DMaskComposite
|
||||
DMaskImage
|
||||
DMaskPosition
|
||||
DMaskRepeat
|
||||
DMaskSize
|
||||
DMaskType
|
||||
DMaxBlockSize
|
||||
DMaxHeight
|
||||
DMaxInlineSize
|
||||
DMaxWidth
|
||||
DMinBlockSize
|
||||
DMinHeight
|
||||
DMinInlineSize
|
||||
DMinWidth
|
||||
DObjectFit
|
||||
DObjectPosition
|
||||
DOpacity
|
||||
DOrder
|
||||
DOrphans
|
||||
DOutline
|
||||
DOutlineColor
|
||||
DOutlineOffset
|
||||
DOutlineStyle
|
||||
DOutlineWidth
|
||||
DOverflow
|
||||
DOverflowAnchor
|
||||
DOverflowWrap
|
||||
DOverflowX
|
||||
DOverflowY
|
||||
DOverscrollBehavior
|
||||
DOverscrollBehaviorBlock
|
||||
DOverscrollBehaviorInline
|
||||
DOverscrollBehaviorX
|
||||
DOverscrollBehaviorY
|
||||
DPadding
|
||||
DPaddingBlockEnd
|
||||
DPaddingBlockStart
|
||||
DPaddingBottom
|
||||
DPaddingInlineEnd
|
||||
DPaddingInlineStart
|
||||
DPaddingLeft
|
||||
DPaddingRight
|
||||
DPaddingTop
|
||||
DPageBreakAfter
|
||||
DPageBreakBefore
|
||||
DPageBreakInside
|
||||
DPaintOrder
|
||||
DPerspective
|
||||
DPerspectiveOrigin
|
||||
DPlaceContent
|
||||
DPlaceItems
|
||||
DPlaceSelf
|
||||
DPointerEvents
|
||||
DPosition
|
||||
DQuotes
|
||||
DResize
|
||||
DRight
|
||||
DRotate
|
||||
DRowGap
|
||||
DRubyAlign
|
||||
DRubyPosition
|
||||
DScale
|
||||
DScrollBehavior
|
||||
DShapeRendering
|
||||
DStopColor
|
||||
DStopOpacity
|
||||
DStroke
|
||||
DStrokeDasharray
|
||||
DStrokeDashoffset
|
||||
DStrokeLinecap
|
||||
DStrokeLinejoin
|
||||
DStrokeMiterlimit
|
||||
DStrokeOpacity
|
||||
DStrokeWidth
|
||||
DTabSize
|
||||
DTableLayout
|
||||
DTextAlign
|
||||
DTextAlignLast
|
||||
DTextAnchor
|
||||
DTextCombineUpright
|
||||
DTextDecoration
|
||||
DTextDecorationColor
|
||||
DTextDecorationLine
|
||||
DTextDecorationStyle
|
||||
DTextEmphasis
|
||||
DTextEmphasisColor
|
||||
DTextEmphasisPosition
|
||||
DTextEmphasisStyle
|
||||
DTextIndent
|
||||
DTextJustify
|
||||
DTextOrientation
|
||||
DTextOverflow
|
||||
DTextRendering
|
||||
DTextShadow
|
||||
DTextTransform
|
||||
DTextUnderlinePosition
|
||||
DTop
|
||||
DTouchAction
|
||||
DTransform
|
||||
DTransformBox
|
||||
DTransformOrigin
|
||||
DTransformStyle
|
||||
DTransition
|
||||
DTransitionDelay
|
||||
DTransitionDuration
|
||||
DTransitionProperty
|
||||
DTransitionTimingFunction
|
||||
DTranslate
|
||||
DUnicodeBidi
|
||||
DUserSelect
|
||||
DVerticalAlign
|
||||
DVisibility
|
||||
DWhiteSpace
|
||||
DWidows
|
||||
DWidth
|
||||
DWillChange
|
||||
DWordBreak
|
||||
DWordSpacing
|
||||
DWordWrap
|
||||
DWritingMode
|
||||
DZIndex
|
||||
DZoom
|
||||
)
|
||||
|
||||
var KnownDeclarations = map[string]D{
|
||||
"align-content": DAlignContent,
|
||||
"align-items": DAlignItems,
|
||||
"align-self": DAlignSelf,
|
||||
"alignment-baseline": DAlignmentBaseline,
|
||||
"all": DAll,
|
||||
"animation": DAnimation,
|
||||
"animation-delay": DAnimationDelay,
|
||||
"animation-direction": DAnimationDirection,
|
||||
"animation-duration": DAnimationDuration,
|
||||
"animation-fill-mode": DAnimationFillMode,
|
||||
"animation-iteration-count": DAnimationIterationCount,
|
||||
"animation-name": DAnimationName,
|
||||
"animation-play-state": DAnimationPlayState,
|
||||
"animation-timing-function": DAnimationTimingFunction,
|
||||
"backface-visibility": DBackfaceVisibility,
|
||||
"background": DBackground,
|
||||
"background-attachment": DBackgroundAttachment,
|
||||
"background-clip": DBackgroundClip,
|
||||
"background-color": DBackgroundColor,
|
||||
"background-image": DBackgroundImage,
|
||||
"background-origin": DBackgroundOrigin,
|
||||
"background-position": DBackgroundPosition,
|
||||
"background-position-x": DBackgroundPositionX,
|
||||
"background-position-y": DBackgroundPositionY,
|
||||
"background-repeat": DBackgroundRepeat,
|
||||
"background-size": DBackgroundSize,
|
||||
"baseline-shift": DBaselineShift,
|
||||
"block-size": DBlockSize,
|
||||
"border": DBorder,
|
||||
"border-block-end": DBorderBlockEnd,
|
||||
"border-block-end-color": DBorderBlockEndColor,
|
||||
"border-block-end-style": DBorderBlockEndStyle,
|
||||
"border-block-end-width": DBorderBlockEndWidth,
|
||||
"border-block-start": DBorderBlockStart,
|
||||
"border-block-start-color": DBorderBlockStartColor,
|
||||
"border-block-start-style": DBorderBlockStartStyle,
|
||||
"border-block-start-width": DBorderBlockStartWidth,
|
||||
"border-bottom": DBorderBottom,
|
||||
"border-bottom-color": DBorderBottomColor,
|
||||
"border-bottom-left-radius": DBorderBottomLeftRadius,
|
||||
"border-bottom-right-radius": DBorderBottomRightRadius,
|
||||
"border-bottom-style": DBorderBottomStyle,
|
||||
"border-bottom-width": DBorderBottomWidth,
|
||||
"border-collapse": DBorderCollapse,
|
||||
"border-color": DBorderColor,
|
||||
"border-image": DBorderImage,
|
||||
"border-image-outset": DBorderImageOutset,
|
||||
"border-image-repeat": DBorderImageRepeat,
|
||||
"border-image-slice": DBorderImageSlice,
|
||||
"border-image-source": DBorderImageSource,
|
||||
"border-image-width": DBorderImageWidth,
|
||||
"border-inline-end": DBorderInlineEnd,
|
||||
"border-inline-end-color": DBorderInlineEndColor,
|
||||
"border-inline-end-style": DBorderInlineEndStyle,
|
||||
"border-inline-end-width": DBorderInlineEndWidth,
|
||||
"border-inline-start": DBorderInlineStart,
|
||||
"border-inline-start-color": DBorderInlineStartColor,
|
||||
"border-inline-start-style": DBorderInlineStartStyle,
|
||||
"border-inline-start-width": DBorderInlineStartWidth,
|
||||
"border-left": DBorderLeft,
|
||||
"border-left-color": DBorderLeftColor,
|
||||
"border-left-style": DBorderLeftStyle,
|
||||
"border-left-width": DBorderLeftWidth,
|
||||
"border-radius": DBorderRadius,
|
||||
"border-right": DBorderRight,
|
||||
"border-right-color": DBorderRightColor,
|
||||
"border-right-style": DBorderRightStyle,
|
||||
"border-right-width": DBorderRightWidth,
|
||||
"border-spacing": DBorderSpacing,
|
||||
"border-style": DBorderStyle,
|
||||
"border-top": DBorderTop,
|
||||
"border-top-color": DBorderTopColor,
|
||||
"border-top-left-radius": DBorderTopLeftRadius,
|
||||
"border-top-right-radius": DBorderTopRightRadius,
|
||||
"border-top-style": DBorderTopStyle,
|
||||
"border-top-width": DBorderTopWidth,
|
||||
"border-width": DBorderWidth,
|
||||
"bottom": DBottom,
|
||||
"box-shadow": DBoxShadow,
|
||||
"box-sizing": DBoxSizing,
|
||||
"break-after": DBreakAfter,
|
||||
"break-before": DBreakBefore,
|
||||
"break-inside": DBreakInside,
|
||||
"caption-side": DCaptionSide,
|
||||
"caret-color": DCaretColor,
|
||||
"clear": DClear,
|
||||
"clip": DClip,
|
||||
"clip-path": DClipPath,
|
||||
"clip-rule": DClipRule,
|
||||
"color": DColor,
|
||||
"color-interpolation": DColorInterpolation,
|
||||
"color-interpolation-filters": DColorInterpolationFilters,
|
||||
"column-count": DColumnCount,
|
||||
"column-fill": DColumnFill,
|
||||
"column-gap": DColumnGap,
|
||||
"column-rule": DColumnRule,
|
||||
"column-rule-color": DColumnRuleColor,
|
||||
"column-rule-style": DColumnRuleStyle,
|
||||
"column-rule-width": DColumnRuleWidth,
|
||||
"column-span": DColumnSpan,
|
||||
"column-width": DColumnWidth,
|
||||
"columns": DColumns,
|
||||
"content": DContent,
|
||||
"counter-increment": DCounterIncrement,
|
||||
"counter-reset": DCounterReset,
|
||||
"css-float": DCssFloat,
|
||||
"css-text": DCssText,
|
||||
"cursor": DCursor,
|
||||
"direction": DDirection,
|
||||
"display": DDisplay,
|
||||
"dominant-baseline": DDominantBaseline,
|
||||
"empty-cells": DEmptyCells,
|
||||
"fill": DFill,
|
||||
"fill-opacity": DFillOpacity,
|
||||
"fill-rule": DFillRule,
|
||||
"filter": DFilter,
|
||||
"flex": DFlex,
|
||||
"flex-basis": DFlexBasis,
|
||||
"flex-direction": DFlexDirection,
|
||||
"flex-flow": DFlexFlow,
|
||||
"flex-grow": DFlexGrow,
|
||||
"flex-shrink": DFlexShrink,
|
||||
"flex-wrap": DFlexWrap,
|
||||
"float": DFloat,
|
||||
"flood-color": DFloodColor,
|
||||
"flood-opacity": DFloodOpacity,
|
||||
"font": DFont,
|
||||
"font-family": DFontFamily,
|
||||
"font-feature-settings": DFontFeatureSettings,
|
||||
"font-kerning": DFontKerning,
|
||||
"font-size": DFontSize,
|
||||
"font-size-adjust": DFontSizeAdjust,
|
||||
"font-stretch": DFontStretch,
|
||||
"font-style": DFontStyle,
|
||||
"font-synthesis": DFontSynthesis,
|
||||
"font-variant": DFontVariant,
|
||||
"font-variant-caps": DFontVariantCaps,
|
||||
"font-variant-east-asian": DFontVariantEastAsian,
|
||||
"font-variant-ligatures": DFontVariantLigatures,
|
||||
"font-variant-numeric": DFontVariantNumeric,
|
||||
"font-variant-position": DFontVariantPosition,
|
||||
"font-weight": DFontWeight,
|
||||
"gap": DGap,
|
||||
"glyph-orientation-vertical": DGlyphOrientationVertical,
|
||||
"grid": DGrid,
|
||||
"grid-area": DGridArea,
|
||||
"grid-auto-columns": DGridAutoColumns,
|
||||
"grid-auto-flow": DGridAutoFlow,
|
||||
"grid-auto-rows": DGridAutoRows,
|
||||
"grid-column": DGridColumn,
|
||||
"grid-column-end": DGridColumnEnd,
|
||||
"grid-column-gap": DGridColumnGap,
|
||||
"grid-column-start": DGridColumnStart,
|
||||
"grid-gap": DGridGap,
|
||||
"grid-row": DGridRow,
|
||||
"grid-row-end": DGridRowEnd,
|
||||
"grid-row-gap": DGridRowGap,
|
||||
"grid-row-start": DGridRowStart,
|
||||
"grid-template": DGridTemplate,
|
||||
"grid-template-areas": DGridTemplateAreas,
|
||||
"grid-template-columns": DGridTemplateColumns,
|
||||
"grid-template-rows": DGridTemplateRows,
|
||||
"height": DHeight,
|
||||
"hyphens": DHyphens,
|
||||
"image-orientation": DImageOrientation,
|
||||
"image-rendering": DImageRendering,
|
||||
"inline-size": DInlineSize,
|
||||
"inset": DInset,
|
||||
"justify-content": DJustifyContent,
|
||||
"justify-items": DJustifyItems,
|
||||
"justify-self": DJustifySelf,
|
||||
"left": DLeft,
|
||||
"letter-spacing": DLetterSpacing,
|
||||
"lighting-color": DLightingColor,
|
||||
"line-break": DLineBreak,
|
||||
"line-height": DLineHeight,
|
||||
"list-style": DListStyle,
|
||||
"list-style-image": DListStyleImage,
|
||||
"list-style-position": DListStylePosition,
|
||||
"list-style-type": DListStyleType,
|
||||
"margin": DMargin,
|
||||
"margin-block-end": DMarginBlockEnd,
|
||||
"margin-block-start": DMarginBlockStart,
|
||||
"margin-bottom": DMarginBottom,
|
||||
"margin-inline-end": DMarginInlineEnd,
|
||||
"margin-inline-start": DMarginInlineStart,
|
||||
"margin-left": DMarginLeft,
|
||||
"margin-right": DMarginRight,
|
||||
"margin-top": DMarginTop,
|
||||
"marker": DMarker,
|
||||
"marker-end": DMarkerEnd,
|
||||
"marker-mid": DMarkerMid,
|
||||
"marker-start": DMarkerStart,
|
||||
"mask": DMask,
|
||||
"mask-composite": DMaskComposite,
|
||||
"mask-image": DMaskImage,
|
||||
"mask-position": DMaskPosition,
|
||||
"mask-repeat": DMaskRepeat,
|
||||
"mask-size": DMaskSize,
|
||||
"mask-type": DMaskType,
|
||||
"max-block-size": DMaxBlockSize,
|
||||
"max-height": DMaxHeight,
|
||||
"max-inline-size": DMaxInlineSize,
|
||||
"max-width": DMaxWidth,
|
||||
"min-block-size": DMinBlockSize,
|
||||
"min-height": DMinHeight,
|
||||
"min-inline-size": DMinInlineSize,
|
||||
"min-width": DMinWidth,
|
||||
"object-fit": DObjectFit,
|
||||
"object-position": DObjectPosition,
|
||||
"opacity": DOpacity,
|
||||
"order": DOrder,
|
||||
"orphans": DOrphans,
|
||||
"outline": DOutline,
|
||||
"outline-color": DOutlineColor,
|
||||
"outline-offset": DOutlineOffset,
|
||||
"outline-style": DOutlineStyle,
|
||||
"outline-width": DOutlineWidth,
|
||||
"overflow": DOverflow,
|
||||
"overflow-anchor": DOverflowAnchor,
|
||||
"overflow-wrap": DOverflowWrap,
|
||||
"overflow-x": DOverflowX,
|
||||
"overflow-y": DOverflowY,
|
||||
"overscroll-behavior": DOverscrollBehavior,
|
||||
"overscroll-behavior-block": DOverscrollBehaviorBlock,
|
||||
"overscroll-behavior-inline": DOverscrollBehaviorInline,
|
||||
"overscroll-behavior-x": DOverscrollBehaviorX,
|
||||
"overscroll-behavior-y": DOverscrollBehaviorY,
|
||||
"padding": DPadding,
|
||||
"padding-block-end": DPaddingBlockEnd,
|
||||
"padding-block-start": DPaddingBlockStart,
|
||||
"padding-bottom": DPaddingBottom,
|
||||
"padding-inline-end": DPaddingInlineEnd,
|
||||
"padding-inline-start": DPaddingInlineStart,
|
||||
"padding-left": DPaddingLeft,
|
||||
"padding-right": DPaddingRight,
|
||||
"padding-top": DPaddingTop,
|
||||
"page-break-after": DPageBreakAfter,
|
||||
"page-break-before": DPageBreakBefore,
|
||||
"page-break-inside": DPageBreakInside,
|
||||
"paint-order": DPaintOrder,
|
||||
"perspective": DPerspective,
|
||||
"perspective-origin": DPerspectiveOrigin,
|
||||
"place-content": DPlaceContent,
|
||||
"place-items": DPlaceItems,
|
||||
"place-self": DPlaceSelf,
|
||||
"pointer-events": DPointerEvents,
|
||||
"position": DPosition,
|
||||
"quotes": DQuotes,
|
||||
"resize": DResize,
|
||||
"right": DRight,
|
||||
"rotate": DRotate,
|
||||
"row-gap": DRowGap,
|
||||
"ruby-align": DRubyAlign,
|
||||
"ruby-position": DRubyPosition,
|
||||
"scale": DScale,
|
||||
"scroll-behavior": DScrollBehavior,
|
||||
"shape-rendering": DShapeRendering,
|
||||
"stop-color": DStopColor,
|
||||
"stop-opacity": DStopOpacity,
|
||||
"stroke": DStroke,
|
||||
"stroke-dasharray": DStrokeDasharray,
|
||||
"stroke-dashoffset": DStrokeDashoffset,
|
||||
"stroke-linecap": DStrokeLinecap,
|
||||
"stroke-linejoin": DStrokeLinejoin,
|
||||
"stroke-miterlimit": DStrokeMiterlimit,
|
||||
"stroke-opacity": DStrokeOpacity,
|
||||
"stroke-width": DStrokeWidth,
|
||||
"tab-size": DTabSize,
|
||||
"table-layout": DTableLayout,
|
||||
"text-align": DTextAlign,
|
||||
"text-align-last": DTextAlignLast,
|
||||
"text-anchor": DTextAnchor,
|
||||
"text-combine-upright": DTextCombineUpright,
|
||||
"text-decoration": DTextDecoration,
|
||||
"text-decoration-color": DTextDecorationColor,
|
||||
"text-decoration-line": DTextDecorationLine,
|
||||
"text-decoration-style": DTextDecorationStyle,
|
||||
"text-emphasis": DTextEmphasis,
|
||||
"text-emphasis-color": DTextEmphasisColor,
|
||||
"text-emphasis-position": DTextEmphasisPosition,
|
||||
"text-emphasis-style": DTextEmphasisStyle,
|
||||
"text-indent": DTextIndent,
|
||||
"text-justify": DTextJustify,
|
||||
"text-orientation": DTextOrientation,
|
||||
"text-overflow": DTextOverflow,
|
||||
"text-rendering": DTextRendering,
|
||||
"text-shadow": DTextShadow,
|
||||
"text-transform": DTextTransform,
|
||||
"text-underline-position": DTextUnderlinePosition,
|
||||
"top": DTop,
|
||||
"touch-action": DTouchAction,
|
||||
"transform": DTransform,
|
||||
"transform-box": DTransformBox,
|
||||
"transform-origin": DTransformOrigin,
|
||||
"transform-style": DTransformStyle,
|
||||
"transition": DTransition,
|
||||
"transition-delay": DTransitionDelay,
|
||||
"transition-duration": DTransitionDuration,
|
||||
"transition-property": DTransitionProperty,
|
||||
"transition-timing-function": DTransitionTimingFunction,
|
||||
"translate": DTranslate,
|
||||
"unicode-bidi": DUnicodeBidi,
|
||||
"user-select": DUserSelect,
|
||||
"vertical-align": DVerticalAlign,
|
||||
"visibility": DVisibility,
|
||||
"white-space": DWhiteSpace,
|
||||
"widows": DWidows,
|
||||
"width": DWidth,
|
||||
"will-change": DWillChange,
|
||||
"word-break": DWordBreak,
|
||||
"word-spacing": DWordSpacing,
|
||||
"word-wrap": DWordWrap,
|
||||
"writing-mode": DWritingMode,
|
||||
"z-index": DZIndex,
|
||||
"zoom": DZoom,
|
||||
}
|
969
vendor/github.com/evanw/esbuild/internal/css_lexer/css_lexer.go
generated
vendored
969
vendor/github.com/evanw/esbuild/internal/css_lexer/css_lexer.go
generated
vendored
@ -1,969 +0,0 @@
|
||||
package css_lexer
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
// The lexer converts a source file to a stream of tokens. Unlike esbuild's
|
||||
// JavaScript lexer, this CSS lexer runs to completion before the CSS parser
|
||||
// begins, resulting in a single array of all tokens in the file.
|
||||
|
||||
type T uint8
|
||||
|
||||
const eof = -1
|
||||
|
||||
const (
|
||||
TEndOfFile T = iota
|
||||
|
||||
TAtKeyword
|
||||
TBadString
|
||||
TBadURL
|
||||
TCDC // "-->"
|
||||
TCDO // "<!--"
|
||||
TCloseBrace
|
||||
TCloseBracket
|
||||
TCloseParen
|
||||
TColon
|
||||
TComma
|
||||
TDelim
|
||||
TDelimAmpersand
|
||||
TDelimAsterisk
|
||||
TDelimBar
|
||||
TDelimCaret
|
||||
TDelimDollar
|
||||
TDelimDot
|
||||
TDelimEquals
|
||||
TDelimExclamation
|
||||
TDelimGreaterThan
|
||||
TDelimMinus
|
||||
TDelimPlus
|
||||
TDelimSlash
|
||||
TDelimTilde
|
||||
TDimension
|
||||
TFunction
|
||||
THash
|
||||
TIdent
|
||||
TNumber
|
||||
TOpenBrace
|
||||
TOpenBracket
|
||||
TOpenParen
|
||||
TPercentage
|
||||
TSemicolon
|
||||
TString
|
||||
TURL
|
||||
TWhitespace
|
||||
)
|
||||
|
||||
var tokenToString = []string{
|
||||
"end of file",
|
||||
"@-keyword",
|
||||
"bad string token",
|
||||
"bad URL token",
|
||||
"\"-->\"",
|
||||
"\"<!--\"",
|
||||
"\"}\"",
|
||||
"\"]\"",
|
||||
"\")\"",
|
||||
"\":\"",
|
||||
"\",\"",
|
||||
"delimiter",
|
||||
"\"&\"",
|
||||
"\"*\"",
|
||||
"\"|\"",
|
||||
"\"^\"",
|
||||
"\"$\"",
|
||||
"\".\"",
|
||||
"\"=\"",
|
||||
"\"!\"",
|
||||
"\">\"",
|
||||
"\"-\"",
|
||||
"\"+\"",
|
||||
"\"/\"",
|
||||
"\"~\"",
|
||||
"dimension",
|
||||
"function token",
|
||||
"hash token",
|
||||
"identifier",
|
||||
"number",
|
||||
"\"{\"",
|
||||
"\"[\"",
|
||||
"\"(\"",
|
||||
"percentage",
|
||||
"\";\"",
|
||||
"string token",
|
||||
"URL token",
|
||||
"whitespace",
|
||||
}
|
||||
|
||||
func (t T) String() string {
|
||||
return tokenToString[t]
|
||||
}
|
||||
|
||||
func (t T) IsNumeric() bool {
|
||||
return t == TNumber || t == TPercentage || t == TDimension
|
||||
}
|
||||
|
||||
// This token struct is designed to be memory-efficient. It just references a
|
||||
// range in the input file instead of directly containing the substring of text
|
||||
// since a range takes up less memory than a string.
|
||||
type Token struct {
|
||||
Range logger.Range // 8 bytes
|
||||
UnitOffset uint16 // 2 bytes
|
||||
Kind T // 1 byte
|
||||
IsID bool // 1 byte
|
||||
}
|
||||
|
||||
func (token Token) DecodedText(contents string) string {
|
||||
raw := contents[token.Range.Loc.Start:token.Range.End()]
|
||||
|
||||
switch token.Kind {
|
||||
case TIdent, TDimension:
|
||||
return decodeEscapesInToken(raw)
|
||||
|
||||
case TAtKeyword, THash:
|
||||
return decodeEscapesInToken(raw[1:])
|
||||
|
||||
case TFunction:
|
||||
return decodeEscapesInToken(raw[:len(raw)-1])
|
||||
|
||||
case TString:
|
||||
return decodeEscapesInToken(raw[1 : len(raw)-1])
|
||||
|
||||
case TURL:
|
||||
start := 4
|
||||
end := len(raw) - 1
|
||||
|
||||
// Trim leading and trailing whitespace
|
||||
for start < end && isWhitespace(rune(raw[start])) {
|
||||
start++
|
||||
}
|
||||
for start < end && isWhitespace(rune(raw[end-1])) {
|
||||
end--
|
||||
}
|
||||
|
||||
return decodeEscapesInToken(raw[start:end])
|
||||
}
|
||||
|
||||
return raw
|
||||
}
|
||||
|
||||
type lexer struct {
|
||||
log logger.Log
|
||||
source logger.Source
|
||||
tracker logger.LineColumnTracker
|
||||
current int
|
||||
codePoint rune
|
||||
Token Token
|
||||
legalCommentsBefore []Comment
|
||||
approximateNewlineCount int
|
||||
sourceMappingURL logger.Span
|
||||
}
|
||||
|
||||
type Comment struct {
|
||||
Text string
|
||||
Loc logger.Loc
|
||||
TokenIndexAfter uint32
|
||||
}
|
||||
|
||||
type TokenizeResult struct {
|
||||
Tokens []Token
|
||||
LegalComments []Comment
|
||||
ApproximateLineCount int32
|
||||
SourceMapComment logger.Span
|
||||
}
|
||||
|
||||
func Tokenize(log logger.Log, source logger.Source) TokenizeResult {
|
||||
lexer := lexer{
|
||||
log: log,
|
||||
source: source,
|
||||
tracker: logger.MakeLineColumnTracker(&source),
|
||||
}
|
||||
lexer.step()
|
||||
|
||||
// The U+FEFF character is usually a zero-width non-breaking space. However,
|
||||
// when it's used at the start of a text stream it is called a BOM (byte order
|
||||
// mark) instead and indicates that the text stream is UTF-8 encoded. This is
|
||||
// problematic for us because CSS does not treat U+FEFF as whitespace. Only
|
||||
// " \t\r\n\f" characters are treated as whitespace. Skip over the BOM if it
|
||||
// is present so it doesn't cause us trouble when we try to parse it.
|
||||
if lexer.codePoint == '\uFEFF' {
|
||||
lexer.step()
|
||||
}
|
||||
|
||||
lexer.next()
|
||||
var tokens []Token
|
||||
var comments []Comment
|
||||
for lexer.Token.Kind != TEndOfFile {
|
||||
if lexer.legalCommentsBefore != nil {
|
||||
for _, comment := range lexer.legalCommentsBefore {
|
||||
comment.TokenIndexAfter = uint32(len(tokens))
|
||||
comments = append(comments, comment)
|
||||
}
|
||||
lexer.legalCommentsBefore = nil
|
||||
}
|
||||
tokens = append(tokens, lexer.Token)
|
||||
lexer.next()
|
||||
}
|
||||
if lexer.legalCommentsBefore != nil {
|
||||
for _, comment := range lexer.legalCommentsBefore {
|
||||
comment.TokenIndexAfter = uint32(len(tokens))
|
||||
comments = append(comments, comment)
|
||||
}
|
||||
lexer.legalCommentsBefore = nil
|
||||
}
|
||||
return TokenizeResult{
|
||||
Tokens: tokens,
|
||||
LegalComments: comments,
|
||||
ApproximateLineCount: int32(lexer.approximateNewlineCount) + 1,
|
||||
SourceMapComment: lexer.sourceMappingURL,
|
||||
}
|
||||
}
|
||||
|
||||
func (lexer *lexer) step() {
|
||||
codePoint, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
|
||||
|
||||
// Use -1 to indicate the end of the file
|
||||
if width == 0 {
|
||||
codePoint = eof
|
||||
}
|
||||
|
||||
// Track the approximate number of newlines in the file so we can preallocate
|
||||
// the line offset table in the printer for source maps. The line offset table
|
||||
// is the #1 highest allocation in the heap profile, so this is worth doing.
|
||||
// This count is approximate because it handles "\n" and "\r\n" (the common
|
||||
// cases) but not "\r" or "\u2028" or "\u2029". Getting this wrong is harmless
|
||||
// because it's only a preallocation. The array will just grow if it's too small.
|
||||
if codePoint == '\n' {
|
||||
lexer.approximateNewlineCount++
|
||||
}
|
||||
|
||||
lexer.codePoint = codePoint
|
||||
lexer.Token.Range.Len = int32(lexer.current) - lexer.Token.Range.Loc.Start
|
||||
lexer.current += width
|
||||
}
|
||||
|
||||
func (lexer *lexer) next() {
|
||||
// Reference: https://www.w3.org/TR/css-syntax-3/
|
||||
|
||||
for {
|
||||
lexer.Token = Token{Range: logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}}}
|
||||
|
||||
switch lexer.codePoint {
|
||||
case eof:
|
||||
lexer.Token.Kind = TEndOfFile
|
||||
|
||||
case '/':
|
||||
lexer.step()
|
||||
switch lexer.codePoint {
|
||||
case '*':
|
||||
lexer.step()
|
||||
lexer.consumeToEndOfMultiLineComment(lexer.Token.Range)
|
||||
continue
|
||||
case '/':
|
||||
lexer.step()
|
||||
lexer.consumeToEndOfSingleLineComment()
|
||||
continue
|
||||
}
|
||||
lexer.Token.Kind = TDelimSlash
|
||||
|
||||
case ' ', '\t', '\n', '\r', '\f':
|
||||
lexer.step()
|
||||
for {
|
||||
if isWhitespace(lexer.codePoint) {
|
||||
lexer.step()
|
||||
} else if lexer.codePoint == '/' && lexer.current < len(lexer.source.Contents) && lexer.source.Contents[lexer.current] == '*' {
|
||||
startRange := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 2}
|
||||
lexer.step()
|
||||
lexer.step()
|
||||
lexer.consumeToEndOfMultiLineComment(startRange)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
lexer.Token.Kind = TWhitespace
|
||||
|
||||
case '"', '\'':
|
||||
lexer.Token.Kind = lexer.consumeString()
|
||||
|
||||
case '#':
|
||||
lexer.step()
|
||||
if IsNameContinue(lexer.codePoint) || lexer.isValidEscape() {
|
||||
lexer.Token.Kind = THash
|
||||
if lexer.wouldStartIdentifier() {
|
||||
lexer.Token.IsID = true
|
||||
}
|
||||
lexer.consumeName()
|
||||
} else {
|
||||
lexer.Token.Kind = TDelim
|
||||
}
|
||||
|
||||
case '(':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TOpenParen
|
||||
|
||||
case ')':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TCloseParen
|
||||
|
||||
case '[':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TOpenBracket
|
||||
|
||||
case ']':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TCloseBracket
|
||||
|
||||
case '{':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TOpenBrace
|
||||
|
||||
case '}':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TCloseBrace
|
||||
|
||||
case ',':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TComma
|
||||
|
||||
case ':':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TColon
|
||||
|
||||
case ';':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TSemicolon
|
||||
|
||||
case '+':
|
||||
if lexer.wouldStartNumber() {
|
||||
lexer.Token.Kind = lexer.consumeNumeric()
|
||||
} else {
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimPlus
|
||||
}
|
||||
|
||||
case '.':
|
||||
if lexer.wouldStartNumber() {
|
||||
lexer.Token.Kind = lexer.consumeNumeric()
|
||||
} else {
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimDot
|
||||
}
|
||||
|
||||
case '-':
|
||||
if lexer.wouldStartNumber() {
|
||||
lexer.Token.Kind = lexer.consumeNumeric()
|
||||
} else if lexer.current+2 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+2] == "->" {
|
||||
lexer.step()
|
||||
lexer.step()
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TCDC
|
||||
} else if lexer.wouldStartIdentifier() {
|
||||
lexer.Token.Kind = lexer.consumeIdentLike()
|
||||
} else {
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimMinus
|
||||
}
|
||||
|
||||
case '<':
|
||||
if lexer.current+3 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+3] == "!--" {
|
||||
lexer.step()
|
||||
lexer.step()
|
||||
lexer.step()
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TCDO
|
||||
} else {
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelim
|
||||
}
|
||||
|
||||
case '@':
|
||||
lexer.step()
|
||||
if lexer.wouldStartIdentifier() {
|
||||
lexer.consumeName()
|
||||
lexer.Token.Kind = TAtKeyword
|
||||
} else {
|
||||
lexer.Token.Kind = TDelim
|
||||
}
|
||||
|
||||
case '\\':
|
||||
if lexer.isValidEscape() {
|
||||
lexer.Token.Kind = lexer.consumeIdentLike()
|
||||
} else {
|
||||
lexer.step()
|
||||
lexer.log.Add(logger.Error, &lexer.tracker, lexer.Token.Range, "Invalid escape")
|
||||
lexer.Token.Kind = TDelim
|
||||
}
|
||||
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
lexer.Token.Kind = lexer.consumeNumeric()
|
||||
|
||||
case '>':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimGreaterThan
|
||||
|
||||
case '~':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimTilde
|
||||
|
||||
case '&':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimAmpersand
|
||||
|
||||
case '*':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimAsterisk
|
||||
|
||||
case '|':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimBar
|
||||
|
||||
case '!':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimExclamation
|
||||
|
||||
case '=':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimEquals
|
||||
|
||||
case '^':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimCaret
|
||||
|
||||
case '$':
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelimDollar
|
||||
|
||||
default:
|
||||
if IsNameStart(lexer.codePoint) {
|
||||
lexer.Token.Kind = lexer.consumeIdentLike()
|
||||
} else {
|
||||
lexer.step()
|
||||
lexer.Token.Kind = TDelim
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeToEndOfMultiLineComment(startRange logger.Range) {
|
||||
startOfSourceMappingURL := 0
|
||||
isLegalComment := false
|
||||
|
||||
switch lexer.codePoint {
|
||||
case '#', '@':
|
||||
// Keep track of the contents of the "sourceMappingURL=" comment
|
||||
if strings.HasPrefix(lexer.source.Contents[lexer.current:], " sourceMappingURL=") {
|
||||
startOfSourceMappingURL = lexer.current + len(" sourceMappingURL=")
|
||||
}
|
||||
|
||||
case '!':
|
||||
// Remember if this is a legal comment
|
||||
isLegalComment = true
|
||||
}
|
||||
|
||||
for {
|
||||
switch lexer.codePoint {
|
||||
case '*':
|
||||
endOfSourceMappingURL := lexer.current - 1
|
||||
lexer.step()
|
||||
if lexer.codePoint == '/' {
|
||||
commentEnd := lexer.current
|
||||
lexer.step()
|
||||
|
||||
// Record the source mapping URL
|
||||
if startOfSourceMappingURL != 0 {
|
||||
r := logger.Range{Loc: logger.Loc{Start: int32(startOfSourceMappingURL)}}
|
||||
text := lexer.source.Contents[startOfSourceMappingURL:endOfSourceMappingURL]
|
||||
for int(r.Len) < len(text) && !isWhitespace(rune(text[r.Len])) {
|
||||
r.Len++
|
||||
}
|
||||
lexer.sourceMappingURL = logger.Span{Text: text[:r.Len], Range: r}
|
||||
}
|
||||
|
||||
// Record legal comments
|
||||
if text := lexer.source.Contents[startRange.Loc.Start:commentEnd]; isLegalComment || containsAtPreserveOrAtLicense(text) {
|
||||
text = helpers.RemoveMultiLineCommentIndent(lexer.source.Contents[:startRange.Loc.Start], text)
|
||||
lexer.legalCommentsBefore = append(lexer.legalCommentsBefore, Comment{Loc: startRange.Loc, Text: text})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
case eof: // This indicates the end of the file
|
||||
lexer.log.AddWithNotes(logger.Error, &lexer.tracker, logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
|
||||
"Expected \"*/\" to terminate multi-line comment",
|
||||
[]logger.MsgData{lexer.tracker.MsgData(startRange, "The multi-line comment starts here:")})
|
||||
return
|
||||
|
||||
default:
|
||||
lexer.step()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func containsAtPreserveOrAtLicense(text string) bool {
|
||||
for i, c := range text {
|
||||
if c == '@' && (strings.HasPrefix(text[i+1:], "preserve") || strings.HasPrefix(text[i+1:], "license")) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeToEndOfSingleLineComment() {
|
||||
for !isNewline(lexer.codePoint) && lexer.codePoint != eof {
|
||||
lexer.step()
|
||||
}
|
||||
lexer.log.Add(logger.Warning, &lexer.tracker, lexer.Token.Range, "Comments in CSS use \"/* ... */\" instead of \"//\"")
|
||||
}
|
||||
|
||||
func (lexer *lexer) isValidEscape() bool {
|
||||
if lexer.codePoint != '\\' {
|
||||
return false
|
||||
}
|
||||
c, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
|
||||
return !isNewline(c)
|
||||
}
|
||||
|
||||
func (lexer *lexer) wouldStartIdentifier() bool {
|
||||
if IsNameStart(lexer.codePoint) {
|
||||
return true
|
||||
}
|
||||
|
||||
if lexer.codePoint == '-' {
|
||||
c, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
|
||||
if c == utf8.RuneError && width <= 1 {
|
||||
return false // Decoding error
|
||||
}
|
||||
if IsNameStart(c) || c == '-' {
|
||||
return true
|
||||
}
|
||||
if c == '\\' {
|
||||
c2, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current+width:])
|
||||
return !isNewline(c2)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
return lexer.isValidEscape()
|
||||
}
|
||||
|
||||
func WouldStartIdentifierWithoutEscapes(text string) bool {
|
||||
c, width := utf8.DecodeRuneInString(text)
|
||||
if c == utf8.RuneError && width <= 1 {
|
||||
return false // Decoding error
|
||||
}
|
||||
if IsNameStart(c) {
|
||||
return true
|
||||
}
|
||||
|
||||
if c == '-' {
|
||||
c2, width2 := utf8.DecodeRuneInString(text[width:])
|
||||
if c2 == utf8.RuneError && width2 <= 1 {
|
||||
return false // Decoding error
|
||||
}
|
||||
if IsNameStart(c2) || c2 == '-' {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (lexer *lexer) wouldStartNumber() bool {
|
||||
if lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
||||
return true
|
||||
} else if lexer.codePoint == '.' {
|
||||
contents := lexer.source.Contents
|
||||
if lexer.current < len(contents) {
|
||||
c := contents[lexer.current]
|
||||
return c >= '0' && c <= '9'
|
||||
}
|
||||
} else if lexer.codePoint == '+' || lexer.codePoint == '-' {
|
||||
contents := lexer.source.Contents
|
||||
n := len(contents)
|
||||
if lexer.current < n {
|
||||
c := contents[lexer.current]
|
||||
if c >= '0' && c <= '9' {
|
||||
return true
|
||||
}
|
||||
if c == '.' && lexer.current+1 < n {
|
||||
c = contents[lexer.current+1]
|
||||
return c >= '0' && c <= '9'
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeName() string {
|
||||
// Common case: no escapes, identifier is a substring of the input
|
||||
for IsNameContinue(lexer.codePoint) {
|
||||
lexer.step()
|
||||
}
|
||||
raw := lexer.source.Contents[lexer.Token.Range.Loc.Start:lexer.Token.Range.End()]
|
||||
if !lexer.isValidEscape() {
|
||||
return raw
|
||||
}
|
||||
|
||||
// Uncommon case: escapes, identifier is allocated
|
||||
sb := strings.Builder{}
|
||||
sb.WriteString(raw)
|
||||
sb.WriteRune(lexer.consumeEscape())
|
||||
for {
|
||||
if IsNameContinue(lexer.codePoint) {
|
||||
sb.WriteRune(lexer.codePoint)
|
||||
lexer.step()
|
||||
} else if lexer.isValidEscape() {
|
||||
sb.WriteRune(lexer.consumeEscape())
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeEscape() rune {
|
||||
lexer.step() // Skip the backslash
|
||||
c := lexer.codePoint
|
||||
|
||||
if hex, ok := isHex(c); ok {
|
||||
lexer.step()
|
||||
for i := 0; i < 5; i++ {
|
||||
if next, ok := isHex(lexer.codePoint); ok {
|
||||
lexer.step()
|
||||
hex = hex*16 + next
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if isWhitespace(lexer.codePoint) {
|
||||
lexer.step()
|
||||
}
|
||||
if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF {
|
||||
return utf8.RuneError
|
||||
}
|
||||
return rune(hex)
|
||||
}
|
||||
|
||||
if c == eof {
|
||||
return utf8.RuneError
|
||||
}
|
||||
|
||||
lexer.step()
|
||||
return c
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeIdentLike() T {
|
||||
name := lexer.consumeName()
|
||||
|
||||
if lexer.codePoint == '(' {
|
||||
lexer.step()
|
||||
if len(name) == 3 {
|
||||
u, r, l := name[0], name[1], name[2]
|
||||
if (u == 'u' || u == 'U') && (r == 'r' || r == 'R') && (l == 'l' || l == 'L') {
|
||||
for isWhitespace(lexer.codePoint) {
|
||||
lexer.step()
|
||||
}
|
||||
if lexer.codePoint != '"' && lexer.codePoint != '\'' {
|
||||
return lexer.consumeURL()
|
||||
}
|
||||
}
|
||||
}
|
||||
return TFunction
|
||||
}
|
||||
|
||||
return TIdent
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeURL() T {
|
||||
validURL:
|
||||
for {
|
||||
switch lexer.codePoint {
|
||||
case ')':
|
||||
lexer.step()
|
||||
return TURL
|
||||
|
||||
case eof:
|
||||
loc := logger.Loc{Start: lexer.Token.Range.End()}
|
||||
lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token")
|
||||
return TBadURL
|
||||
|
||||
case ' ', '\t', '\n', '\r', '\f':
|
||||
lexer.step()
|
||||
for isWhitespace(lexer.codePoint) {
|
||||
lexer.step()
|
||||
}
|
||||
if lexer.codePoint != ')' {
|
||||
loc := logger.Loc{Start: lexer.Token.Range.End()}
|
||||
lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token")
|
||||
break validURL
|
||||
}
|
||||
lexer.step()
|
||||
return TURL
|
||||
|
||||
case '"', '\'', '(':
|
||||
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
|
||||
lexer.log.Add(logger.Error, &lexer.tracker, r, "Expected \")\" to end URL token")
|
||||
break validURL
|
||||
|
||||
case '\\':
|
||||
if !lexer.isValidEscape() {
|
||||
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
|
||||
lexer.log.Add(logger.Error, &lexer.tracker, r, "Invalid escape")
|
||||
break validURL
|
||||
}
|
||||
lexer.consumeEscape()
|
||||
|
||||
default:
|
||||
if isNonPrintable(lexer.codePoint) {
|
||||
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
|
||||
lexer.log.Add(logger.Error, &lexer.tracker, r, "Unexpected non-printable character in URL token")
|
||||
}
|
||||
lexer.step()
|
||||
}
|
||||
}
|
||||
|
||||
// Consume the remnants of a bad url
|
||||
for {
|
||||
switch lexer.codePoint {
|
||||
case ')', eof:
|
||||
lexer.step()
|
||||
return TBadURL
|
||||
|
||||
case '\\':
|
||||
if lexer.isValidEscape() {
|
||||
lexer.consumeEscape()
|
||||
}
|
||||
}
|
||||
lexer.step()
|
||||
}
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeString() T {
|
||||
quote := lexer.codePoint
|
||||
lexer.step()
|
||||
|
||||
for {
|
||||
switch lexer.codePoint {
|
||||
case '\\':
|
||||
lexer.step()
|
||||
|
||||
// Handle Windows CRLF
|
||||
if lexer.codePoint == '\r' {
|
||||
lexer.step()
|
||||
if lexer.codePoint == '\n' {
|
||||
lexer.step()
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, fall through to ignore the character after the backslash
|
||||
|
||||
case eof:
|
||||
lexer.log.Add(logger.Error, &lexer.tracker,
|
||||
logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
|
||||
"Unterminated string token")
|
||||
return TBadString
|
||||
|
||||
case '\n', '\r', '\f':
|
||||
lexer.log.Add(logger.Error, &lexer.tracker,
|
||||
logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
|
||||
"Unterminated string token")
|
||||
return TBadString
|
||||
|
||||
case quote:
|
||||
lexer.step()
|
||||
return TString
|
||||
}
|
||||
lexer.step()
|
||||
}
|
||||
}
|
||||
|
||||
func (lexer *lexer) consumeNumeric() T {
|
||||
// Skip over leading sign
|
||||
if lexer.codePoint == '+' || lexer.codePoint == '-' {
|
||||
lexer.step()
|
||||
}
|
||||
|
||||
// Skip over leading digits
|
||||
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
||||
lexer.step()
|
||||
}
|
||||
|
||||
// Skip over digits after dot
|
||||
if lexer.codePoint == '.' {
|
||||
lexer.step()
|
||||
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
||||
lexer.step()
|
||||
}
|
||||
}
|
||||
|
||||
// Skip over exponent
|
||||
if lexer.codePoint == 'e' || lexer.codePoint == 'E' {
|
||||
contents := lexer.source.Contents
|
||||
|
||||
// Look ahead before advancing to make sure this is an exponent, not a unit
|
||||
if lexer.current < len(contents) {
|
||||
c := contents[lexer.current]
|
||||
if (c == '+' || c == '-') && lexer.current+1 < len(contents) {
|
||||
c = contents[lexer.current+1]
|
||||
}
|
||||
|
||||
// Only consume this if it's an exponent
|
||||
if c >= '0' && c <= '9' {
|
||||
lexer.step()
|
||||
if lexer.codePoint == '+' || lexer.codePoint == '-' {
|
||||
lexer.step()
|
||||
}
|
||||
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
||||
lexer.step()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the numeric type
|
||||
if lexer.wouldStartIdentifier() {
|
||||
lexer.Token.UnitOffset = uint16(lexer.Token.Range.Len)
|
||||
lexer.consumeName()
|
||||
return TDimension
|
||||
}
|
||||
if lexer.codePoint == '%' {
|
||||
lexer.step()
|
||||
return TPercentage
|
||||
}
|
||||
return TNumber
|
||||
}
|
||||
|
||||
func IsNameStart(c rune) bool {
|
||||
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c >= 0x80 || c == '\x00'
|
||||
}
|
||||
|
||||
func IsNameContinue(c rune) bool {
|
||||
return IsNameStart(c) || (c >= '0' && c <= '9') || c == '-'
|
||||
}
|
||||
|
||||
func isNewline(c rune) bool {
|
||||
switch c {
|
||||
case '\n', '\r', '\f':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isWhitespace(c rune) bool {
|
||||
switch c {
|
||||
case ' ', '\t', '\n', '\r', '\f':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isHex(c rune) (int, bool) {
|
||||
if c >= '0' && c <= '9' {
|
||||
return int(c - '0'), true
|
||||
}
|
||||
if c >= 'a' && c <= 'f' {
|
||||
return int(c + (10 - 'a')), true
|
||||
}
|
||||
if c >= 'A' && c <= 'F' {
|
||||
return int(c + (10 - 'A')), true
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func isNonPrintable(c rune) bool {
|
||||
return c <= 0x08 || c == 0x0B || (c >= 0x0E && c <= 0x1F) || c == 0x7F
|
||||
}
|
||||
|
||||
func decodeEscapesInToken(inner string) string {
|
||||
i := 0
|
||||
|
||||
for i < len(inner) {
|
||||
if c := inner[i]; c == '\\' || c == '\x00' {
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
if i == len(inner) {
|
||||
return inner
|
||||
}
|
||||
|
||||
sb := strings.Builder{}
|
||||
sb.WriteString(inner[:i])
|
||||
inner = inner[i:]
|
||||
|
||||
for len(inner) > 0 {
|
||||
c, width := utf8.DecodeRuneInString(inner)
|
||||
inner = inner[width:]
|
||||
|
||||
if c != '\\' {
|
||||
if c == '\x00' {
|
||||
c = utf8.RuneError
|
||||
}
|
||||
sb.WriteRune(c)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(inner) == 0 {
|
||||
sb.WriteRune(utf8.RuneError)
|
||||
continue
|
||||
}
|
||||
|
||||
c, width = utf8.DecodeRuneInString(inner)
|
||||
inner = inner[width:]
|
||||
hex, ok := isHex(c)
|
||||
|
||||
if !ok {
|
||||
if c == '\n' || c == '\f' {
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle Windows CRLF
|
||||
if c == '\r' {
|
||||
c, width = utf8.DecodeRuneInString(inner)
|
||||
if c == '\n' {
|
||||
inner = inner[width:]
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// If we get here, this is not a valid escape. However, this is still
|
||||
// allowed. In this case the backslash is just ignored.
|
||||
sb.WriteRune(c)
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse up to five additional hex characters (so six in total)
|
||||
for i := 0; i < 5 && len(inner) > 0; i++ {
|
||||
c, width = utf8.DecodeRuneInString(inner)
|
||||
if next, ok := isHex(c); ok {
|
||||
inner = inner[width:]
|
||||
hex = hex*16 + next
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(inner) > 0 {
|
||||
c, width = utf8.DecodeRuneInString(inner)
|
||||
if isWhitespace(c) {
|
||||
inner = inner[width:]
|
||||
}
|
||||
}
|
||||
|
||||
if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF {
|
||||
sb.WriteRune(utf8.RuneError)
|
||||
continue
|
||||
}
|
||||
|
||||
sb.WriteRune(rune(hex))
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
256
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls.go
generated
vendored
256
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls.go
generated
vendored
@ -1,256 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/compat"
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
func (p *parser) commaToken() css_ast.Token {
|
||||
t := css_ast.Token{
|
||||
Kind: css_lexer.TComma,
|
||||
Text: ",",
|
||||
}
|
||||
if !p.options.RemoveWhitespace {
|
||||
t.Whitespace = css_ast.WhitespaceAfter
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func expandTokenQuad(tokens []css_ast.Token, allowedIdent string) (result [4]css_ast.Token, ok bool) {
|
||||
n := len(tokens)
|
||||
if n < 1 || n > 4 {
|
||||
return
|
||||
}
|
||||
|
||||
// Don't do this if we encounter any unexpected tokens such as "var()"
|
||||
for i := 0; i < n; i++ {
|
||||
if t := tokens[i]; !t.Kind.IsNumeric() && (t.Kind != css_lexer.TIdent || allowedIdent == "" || t.Text != allowedIdent) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
result[0] = tokens[0]
|
||||
if n > 1 {
|
||||
result[1] = tokens[1]
|
||||
} else {
|
||||
result[1] = result[0]
|
||||
}
|
||||
if n > 2 {
|
||||
result[2] = tokens[2]
|
||||
} else {
|
||||
result[2] = result[0]
|
||||
}
|
||||
if n > 3 {
|
||||
result[3] = tokens[3]
|
||||
} else {
|
||||
result[3] = result[1]
|
||||
}
|
||||
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func compactTokenQuad(a css_ast.Token, b css_ast.Token, c css_ast.Token, d css_ast.Token, removeWhitespace bool) []css_ast.Token {
|
||||
tokens := []css_ast.Token{a, b, c, d}
|
||||
if tokens[3].EqualIgnoringWhitespace(tokens[1]) {
|
||||
if tokens[2].EqualIgnoringWhitespace(tokens[0]) {
|
||||
if tokens[1].EqualIgnoringWhitespace(tokens[0]) {
|
||||
tokens = tokens[:1]
|
||||
} else {
|
||||
tokens = tokens[:2]
|
||||
}
|
||||
} else {
|
||||
tokens = tokens[:3]
|
||||
}
|
||||
}
|
||||
for i := range tokens {
|
||||
var whitespace css_ast.WhitespaceFlags
|
||||
if !removeWhitespace || i > 0 {
|
||||
whitespace |= css_ast.WhitespaceBefore
|
||||
}
|
||||
if i+1 < len(tokens) {
|
||||
whitespace |= css_ast.WhitespaceAfter
|
||||
}
|
||||
tokens[i].Whitespace = whitespace
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func (p *parser) processDeclarations(rules []css_ast.Rule) []css_ast.Rule {
|
||||
margin := boxTracker{key: css_ast.DMargin, keyText: "margin", allowAuto: true}
|
||||
padding := boxTracker{key: css_ast.DPadding, keyText: "padding", allowAuto: false}
|
||||
inset := boxTracker{key: css_ast.DInset, keyText: "inset", allowAuto: true}
|
||||
borderRadius := borderRadiusTracker{}
|
||||
|
||||
for i, rule := range rules {
|
||||
decl, ok := rule.Data.(*css_ast.RDeclaration)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
switch decl.Key {
|
||||
case css_ast.DBackgroundColor,
|
||||
css_ast.DBorderBlockEndColor,
|
||||
css_ast.DBorderBlockStartColor,
|
||||
css_ast.DBorderBottomColor,
|
||||
css_ast.DBorderColor,
|
||||
css_ast.DBorderInlineEndColor,
|
||||
css_ast.DBorderInlineStartColor,
|
||||
css_ast.DBorderLeftColor,
|
||||
css_ast.DBorderRightColor,
|
||||
css_ast.DBorderTopColor,
|
||||
css_ast.DCaretColor,
|
||||
css_ast.DColor,
|
||||
css_ast.DColumnRuleColor,
|
||||
css_ast.DFill,
|
||||
css_ast.DFloodColor,
|
||||
css_ast.DLightingColor,
|
||||
css_ast.DOutlineColor,
|
||||
css_ast.DStopColor,
|
||||
css_ast.DStroke,
|
||||
css_ast.DTextDecorationColor,
|
||||
css_ast.DTextEmphasisColor:
|
||||
|
||||
if len(decl.Value) == 1 {
|
||||
decl.Value[0] = p.lowerColor(decl.Value[0])
|
||||
|
||||
if p.options.MangleSyntax {
|
||||
t := decl.Value[0]
|
||||
if hex, ok := parseColor(t); ok {
|
||||
decl.Value[0] = p.mangleColor(t, hex)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case css_ast.DFont:
|
||||
if p.options.MangleSyntax {
|
||||
decl.Value = p.mangleFont(decl.Value)
|
||||
}
|
||||
|
||||
case css_ast.DFontFamily:
|
||||
if p.options.MangleSyntax {
|
||||
if value, ok := p.mangleFontFamily(decl.Value); ok {
|
||||
decl.Value = value
|
||||
}
|
||||
}
|
||||
|
||||
case css_ast.DFontWeight:
|
||||
if len(decl.Value) == 1 && p.options.MangleSyntax {
|
||||
decl.Value[0] = p.mangleFontWeight(decl.Value[0])
|
||||
}
|
||||
|
||||
case css_ast.DTransform:
|
||||
if p.options.MangleSyntax {
|
||||
decl.Value = p.mangleTransforms(decl.Value)
|
||||
}
|
||||
|
||||
case css_ast.DBoxShadow:
|
||||
if p.options.MangleSyntax {
|
||||
decl.Value = p.mangleBoxShadows(decl.Value)
|
||||
}
|
||||
|
||||
// Margin
|
||||
case css_ast.DMargin:
|
||||
if p.options.MangleSyntax {
|
||||
margin.mangleSides(rules, decl, i, p.options.RemoveWhitespace)
|
||||
}
|
||||
case css_ast.DMarginTop:
|
||||
if p.options.MangleSyntax {
|
||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxTop)
|
||||
}
|
||||
case css_ast.DMarginRight:
|
||||
if p.options.MangleSyntax {
|
||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxRight)
|
||||
}
|
||||
case css_ast.DMarginBottom:
|
||||
if p.options.MangleSyntax {
|
||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxBottom)
|
||||
}
|
||||
case css_ast.DMarginLeft:
|
||||
if p.options.MangleSyntax {
|
||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxLeft)
|
||||
}
|
||||
|
||||
// Padding
|
||||
case css_ast.DPadding:
|
||||
if p.options.MangleSyntax {
|
||||
padding.mangleSides(rules, decl, i, p.options.RemoveWhitespace)
|
||||
}
|
||||
case css_ast.DPaddingTop:
|
||||
if p.options.MangleSyntax {
|
||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxTop)
|
||||
}
|
||||
case css_ast.DPaddingRight:
|
||||
if p.options.MangleSyntax {
|
||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxRight)
|
||||
}
|
||||
case css_ast.DPaddingBottom:
|
||||
if p.options.MangleSyntax {
|
||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxBottom)
|
||||
}
|
||||
case css_ast.DPaddingLeft:
|
||||
if p.options.MangleSyntax {
|
||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxLeft)
|
||||
}
|
||||
|
||||
// Inset
|
||||
case css_ast.DInset:
|
||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
||||
inset.mangleSides(rules, decl, i, p.options.RemoveWhitespace)
|
||||
}
|
||||
case css_ast.DTop:
|
||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxTop)
|
||||
}
|
||||
case css_ast.DRight:
|
||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxRight)
|
||||
}
|
||||
case css_ast.DBottom:
|
||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxBottom)
|
||||
}
|
||||
case css_ast.DLeft:
|
||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxLeft)
|
||||
}
|
||||
|
||||
// Border radius
|
||||
case css_ast.DBorderRadius:
|
||||
if p.options.MangleSyntax {
|
||||
borderRadius.mangleCorners(rules, decl, i, p.options.RemoveWhitespace)
|
||||
}
|
||||
case css_ast.DBorderTopLeftRadius:
|
||||
if p.options.MangleSyntax {
|
||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusTopLeft)
|
||||
}
|
||||
case css_ast.DBorderTopRightRadius:
|
||||
if p.options.MangleSyntax {
|
||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusTopRight)
|
||||
}
|
||||
case css_ast.DBorderBottomRightRadius:
|
||||
if p.options.MangleSyntax {
|
||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusBottomRight)
|
||||
}
|
||||
case css_ast.DBorderBottomLeftRadius:
|
||||
if p.options.MangleSyntax {
|
||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusBottomLeft)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compact removed rules
|
||||
if p.options.MangleSyntax {
|
||||
end := 0
|
||||
for _, rule := range rules {
|
||||
if rule.Data != nil {
|
||||
rules[end] = rule
|
||||
end++
|
||||
}
|
||||
}
|
||||
rules = rules[:end]
|
||||
}
|
||||
|
||||
return rules
|
||||
}
|
213
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_border_radius.go
generated
vendored
213
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_border_radius.go
generated
vendored
@ -1,213 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
const (
|
||||
borderRadiusTopLeft = iota
|
||||
borderRadiusTopRight
|
||||
borderRadiusBottomRight
|
||||
borderRadiusBottomLeft
|
||||
)
|
||||
|
||||
type borderRadiusCorner struct {
|
||||
firstToken css_ast.Token
|
||||
secondToken css_ast.Token
|
||||
unitSafety unitSafetyTracker
|
||||
ruleIndex uint32 // The index of the originating rule in the rules array
|
||||
wasSingleRule bool // True if the originating rule was just for this side
|
||||
}
|
||||
|
||||
type borderRadiusTracker struct {
|
||||
corners [4]borderRadiusCorner
|
||||
important bool // True if all active rules were flagged as "!important"
|
||||
}
|
||||
|
||||
func (borderRadius *borderRadiusTracker) updateCorner(rules []css_ast.Rule, corner int, new borderRadiusCorner) {
|
||||
if old := borderRadius.corners[corner]; old.firstToken.Kind != css_lexer.TEndOfFile &&
|
||||
(!new.wasSingleRule || old.wasSingleRule) &&
|
||||
old.unitSafety.status == unitSafe && new.unitSafety.status == unitSafe {
|
||||
rules[old.ruleIndex] = css_ast.Rule{}
|
||||
}
|
||||
borderRadius.corners[corner] = new
|
||||
}
|
||||
|
||||
func (borderRadius *borderRadiusTracker) mangleCorners(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool) {
|
||||
// Reset if we see a change in the "!important" flag
|
||||
if borderRadius.important != decl.Important {
|
||||
borderRadius.corners = [4]borderRadiusCorner{}
|
||||
borderRadius.important = decl.Important
|
||||
}
|
||||
|
||||
tokens := decl.Value
|
||||
beforeSplit := len(tokens)
|
||||
afterSplit := len(tokens)
|
||||
|
||||
// Search for the single slash if present
|
||||
for i, t := range tokens {
|
||||
if t.Kind == css_lexer.TDelimSlash {
|
||||
if beforeSplit == len(tokens) {
|
||||
beforeSplit = i
|
||||
afterSplit = i + 1
|
||||
} else {
|
||||
// Multiple slashes are an error
|
||||
borderRadius.corners = [4]borderRadiusCorner{}
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use a single tracker for the whole rule
|
||||
unitSafety := unitSafetyTracker{}
|
||||
for _, t := range tokens[:beforeSplit] {
|
||||
unitSafety.includeUnitOf(t)
|
||||
}
|
||||
for _, t := range tokens[afterSplit:] {
|
||||
unitSafety.includeUnitOf(t)
|
||||
}
|
||||
|
||||
firstRadii, firstRadiiOk := expandTokenQuad(tokens[:beforeSplit], "")
|
||||
lastRadii, lastRadiiOk := expandTokenQuad(tokens[afterSplit:], "")
|
||||
|
||||
// Stop now if the pattern wasn't matched
|
||||
if !firstRadiiOk || (beforeSplit < afterSplit && !lastRadiiOk) {
|
||||
borderRadius.corners = [4]borderRadiusCorner{}
|
||||
return
|
||||
}
|
||||
|
||||
// Handle the first radii
|
||||
for corner, t := range firstRadii {
|
||||
if unitSafety.status == unitSafe {
|
||||
t.TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
borderRadius.updateCorner(rules, corner, borderRadiusCorner{
|
||||
firstToken: t,
|
||||
secondToken: t,
|
||||
unitSafety: unitSafety,
|
||||
ruleIndex: uint32(index),
|
||||
})
|
||||
}
|
||||
|
||||
// Handle the last radii
|
||||
if lastRadiiOk {
|
||||
for corner, t := range lastRadii {
|
||||
if unitSafety.status == unitSafe {
|
||||
t.TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
borderRadius.corners[corner].secondToken = t
|
||||
}
|
||||
}
|
||||
|
||||
// Success
|
||||
borderRadius.compactRules(rules, decl.KeyRange, removeWhitespace)
|
||||
}
|
||||
|
||||
func (borderRadius *borderRadiusTracker) mangleCorner(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool, corner int) {
|
||||
// Reset if we see a change in the "!important" flag
|
||||
if borderRadius.important != decl.Important {
|
||||
borderRadius.corners = [4]borderRadiusCorner{}
|
||||
borderRadius.important = decl.Important
|
||||
}
|
||||
|
||||
if tokens := decl.Value; (len(tokens) == 1 && tokens[0].Kind.IsNumeric()) ||
|
||||
(len(tokens) == 2 && tokens[0].Kind.IsNumeric() && tokens[1].Kind.IsNumeric()) {
|
||||
firstToken := tokens[0]
|
||||
secondToken := firstToken
|
||||
if len(tokens) == 2 {
|
||||
secondToken = tokens[1]
|
||||
}
|
||||
|
||||
// Check to see if these units are safe to use in every browser
|
||||
unitSafety := unitSafetyTracker{}
|
||||
unitSafety.includeUnitOf(firstToken)
|
||||
unitSafety.includeUnitOf(secondToken)
|
||||
|
||||
// Only collapse "0unit" into "0" if the unit is safe
|
||||
if unitSafety.status == unitSafe && firstToken.TurnLengthIntoNumberIfZero() {
|
||||
tokens[0] = firstToken
|
||||
}
|
||||
if len(tokens) == 2 {
|
||||
if unitSafety.status == unitSafe && secondToken.TurnLengthIntoNumberIfZero() {
|
||||
tokens[1] = secondToken
|
||||
}
|
||||
|
||||
// If both tokens are equal, merge them into one
|
||||
if firstToken.EqualIgnoringWhitespace(secondToken) {
|
||||
tokens[0].Whitespace &= ^css_ast.WhitespaceAfter
|
||||
decl.Value = tokens[:1]
|
||||
}
|
||||
}
|
||||
|
||||
borderRadius.updateCorner(rules, corner, borderRadiusCorner{
|
||||
firstToken: firstToken,
|
||||
secondToken: secondToken,
|
||||
unitSafety: unitSafety,
|
||||
ruleIndex: uint32(index),
|
||||
wasSingleRule: true,
|
||||
})
|
||||
borderRadius.compactRules(rules, decl.KeyRange, removeWhitespace)
|
||||
} else {
|
||||
borderRadius.corners = [4]borderRadiusCorner{}
|
||||
}
|
||||
}
|
||||
|
||||
func (borderRadius *borderRadiusTracker) compactRules(rules []css_ast.Rule, keyRange logger.Range, removeWhitespace bool) {
|
||||
// All tokens must be present
|
||||
if eof := css_lexer.TEndOfFile; borderRadius.corners[0].firstToken.Kind == eof || borderRadius.corners[1].firstToken.Kind == eof ||
|
||||
borderRadius.corners[2].firstToken.Kind == eof || borderRadius.corners[3].firstToken.Kind == eof {
|
||||
return
|
||||
}
|
||||
|
||||
// All tokens must have the same unit
|
||||
for _, side := range borderRadius.corners[1:] {
|
||||
if !side.unitSafety.isSafeWith(borderRadius.corners[0].unitSafety) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the most minimal representation
|
||||
tokens := compactTokenQuad(
|
||||
borderRadius.corners[0].firstToken,
|
||||
borderRadius.corners[1].firstToken,
|
||||
borderRadius.corners[2].firstToken,
|
||||
borderRadius.corners[3].firstToken,
|
||||
removeWhitespace,
|
||||
)
|
||||
secondTokens := compactTokenQuad(
|
||||
borderRadius.corners[0].secondToken,
|
||||
borderRadius.corners[1].secondToken,
|
||||
borderRadius.corners[2].secondToken,
|
||||
borderRadius.corners[3].secondToken,
|
||||
removeWhitespace,
|
||||
)
|
||||
if !css_ast.TokensEqualIgnoringWhitespace(tokens, secondTokens) {
|
||||
var whitespace css_ast.WhitespaceFlags
|
||||
if !removeWhitespace {
|
||||
whitespace = css_ast.WhitespaceBefore | css_ast.WhitespaceAfter
|
||||
}
|
||||
tokens = append(tokens, css_ast.Token{
|
||||
Kind: css_lexer.TDelimSlash,
|
||||
Text: "/",
|
||||
Whitespace: whitespace,
|
||||
})
|
||||
tokens = append(tokens, secondTokens...)
|
||||
}
|
||||
|
||||
// Remove all of the existing declarations
|
||||
rules[borderRadius.corners[0].ruleIndex] = css_ast.Rule{}
|
||||
rules[borderRadius.corners[1].ruleIndex] = css_ast.Rule{}
|
||||
rules[borderRadius.corners[2].ruleIndex] = css_ast.Rule{}
|
||||
rules[borderRadius.corners[3].ruleIndex] = css_ast.Rule{}
|
||||
|
||||
// Insert the combined declaration where the last rule was
|
||||
rules[borderRadius.corners[3].ruleIndex].Data = &css_ast.RDeclaration{
|
||||
Key: css_ast.DBorderRadius,
|
||||
KeyText: "border-radius",
|
||||
Value: tokens,
|
||||
KeyRange: keyRange,
|
||||
Important: borderRadius.important,
|
||||
}
|
||||
}
|
198
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box.go
generated
vendored
198
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box.go
generated
vendored
@ -1,198 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
const (
|
||||
boxTop = iota
|
||||
boxRight
|
||||
boxBottom
|
||||
boxLeft
|
||||
)
|
||||
|
||||
type boxSide struct {
|
||||
token css_ast.Token
|
||||
unitSafety unitSafetyTracker
|
||||
ruleIndex uint32 // The index of the originating rule in the rules array
|
||||
wasSingleRule bool // True if the originating rule was just for this side
|
||||
}
|
||||
|
||||
type boxTracker struct {
|
||||
key css_ast.D
|
||||
keyText string
|
||||
allowAuto bool // If true, allow the "auto" keyword
|
||||
|
||||
sides [4]boxSide
|
||||
important bool // True if all active rules were flagged as "!important"
|
||||
}
|
||||
|
||||
type unitSafetyStatus uint8
|
||||
|
||||
const (
|
||||
unitSafe unitSafetyStatus = iota // "margin: 0 1px 2cm 3%;"
|
||||
unitUnsafeSingle // "margin: 0 1vw 2vw 3vw;"
|
||||
unitUnsafeMixed // "margin: 0 1vw 2vh 3ch;"
|
||||
)
|
||||
|
||||
// We can only compact rules together if they have the same unit safety level.
|
||||
// We want to avoid a situation where the browser treats some of the original
|
||||
// rules as valid and others as invalid.
|
||||
//
|
||||
// Safe:
|
||||
// top: 1px; left: 0; bottom: 1px; right: 0;
|
||||
// top: 1Q; left: 2Q; bottom: 3Q; right: 4Q;
|
||||
//
|
||||
// Unsafe:
|
||||
// top: 1vh; left: 2vw; bottom: 3vh; right: 4vw;
|
||||
// top: 1Q; left: 2Q; bottom: 3Q; right: 0;
|
||||
// inset: 1Q 0 0 0; top: 0;
|
||||
//
|
||||
type unitSafetyTracker struct {
|
||||
status unitSafetyStatus
|
||||
unit string
|
||||
}
|
||||
|
||||
func (a unitSafetyTracker) isSafeWith(b unitSafetyTracker) bool {
|
||||
return a.status == b.status && a.status != unitUnsafeMixed && (a.status != unitUnsafeSingle || a.unit == b.unit)
|
||||
}
|
||||
|
||||
func (t *unitSafetyTracker) includeUnitOf(token css_ast.Token) {
|
||||
switch token.Kind {
|
||||
case css_lexer.TNumber:
|
||||
if token.Text == "0" {
|
||||
return
|
||||
}
|
||||
|
||||
case css_lexer.TPercentage:
|
||||
return
|
||||
|
||||
case css_lexer.TDimension:
|
||||
if token.DimensionUnitIsSafeLength() {
|
||||
return
|
||||
} else if unit := token.DimensionUnit(); t.status == unitSafe {
|
||||
t.status = unitUnsafeSingle
|
||||
t.unit = unit
|
||||
return
|
||||
} else if t.status == unitUnsafeSingle && t.unit == unit {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
t.status = unitUnsafeMixed
|
||||
}
|
||||
|
||||
func (box *boxTracker) updateSide(rules []css_ast.Rule, side int, new boxSide) {
|
||||
if old := box.sides[side]; old.token.Kind != css_lexer.TEndOfFile &&
|
||||
(!new.wasSingleRule || old.wasSingleRule) &&
|
||||
old.unitSafety.status == unitSafe && new.unitSafety.status == unitSafe {
|
||||
rules[old.ruleIndex] = css_ast.Rule{}
|
||||
}
|
||||
box.sides[side] = new
|
||||
}
|
||||
|
||||
func (box *boxTracker) mangleSides(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool) {
|
||||
// Reset if we see a change in the "!important" flag
|
||||
if box.important != decl.Important {
|
||||
box.sides = [4]boxSide{}
|
||||
box.important = decl.Important
|
||||
}
|
||||
|
||||
allowedIdent := ""
|
||||
if box.allowAuto {
|
||||
allowedIdent = "auto"
|
||||
}
|
||||
if quad, ok := expandTokenQuad(decl.Value, allowedIdent); ok {
|
||||
// Use a single tracker for the whole rule
|
||||
unitSafety := unitSafetyTracker{}
|
||||
for _, t := range quad {
|
||||
if !box.allowAuto || t.Kind.IsNumeric() {
|
||||
unitSafety.includeUnitOf(t)
|
||||
}
|
||||
}
|
||||
for side, t := range quad {
|
||||
if unitSafety.status == unitSafe {
|
||||
t.TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
box.updateSide(rules, side, boxSide{
|
||||
token: t,
|
||||
ruleIndex: uint32(index),
|
||||
unitSafety: unitSafety,
|
||||
})
|
||||
}
|
||||
box.compactRules(rules, decl.KeyRange, removeWhitespace)
|
||||
} else {
|
||||
box.sides = [4]boxSide{}
|
||||
}
|
||||
}
|
||||
|
||||
func (box *boxTracker) mangleSide(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool, side int) {
|
||||
// Reset if we see a change in the "!important" flag
|
||||
if box.important != decl.Important {
|
||||
box.sides = [4]boxSide{}
|
||||
box.important = decl.Important
|
||||
}
|
||||
|
||||
if tokens := decl.Value; len(tokens) == 1 {
|
||||
if t := tokens[0]; t.Kind.IsNumeric() || (t.Kind == css_lexer.TIdent && box.allowAuto && t.Text == "auto") {
|
||||
unitSafety := unitSafetyTracker{}
|
||||
if !box.allowAuto || t.Kind.IsNumeric() {
|
||||
unitSafety.includeUnitOf(t)
|
||||
}
|
||||
if unitSafety.status == unitSafe && t.TurnLengthIntoNumberIfZero() {
|
||||
tokens[0] = t
|
||||
}
|
||||
box.updateSide(rules, side, boxSide{
|
||||
token: t,
|
||||
ruleIndex: uint32(index),
|
||||
wasSingleRule: true,
|
||||
unitSafety: unitSafety,
|
||||
})
|
||||
box.compactRules(rules, decl.KeyRange, removeWhitespace)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
box.sides = [4]boxSide{}
|
||||
}
|
||||
|
||||
func (box *boxTracker) compactRules(rules []css_ast.Rule, keyRange logger.Range, removeWhitespace bool) {
|
||||
// All tokens must be present
|
||||
if eof := css_lexer.TEndOfFile; box.sides[0].token.Kind == eof || box.sides[1].token.Kind == eof ||
|
||||
box.sides[2].token.Kind == eof || box.sides[3].token.Kind == eof {
|
||||
return
|
||||
}
|
||||
|
||||
// All tokens must have the same unit
|
||||
for _, side := range box.sides[1:] {
|
||||
if !side.unitSafety.isSafeWith(box.sides[0].unitSafety) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the most minimal representation
|
||||
tokens := compactTokenQuad(
|
||||
box.sides[0].token,
|
||||
box.sides[1].token,
|
||||
box.sides[2].token,
|
||||
box.sides[3].token,
|
||||
removeWhitespace,
|
||||
)
|
||||
|
||||
// Remove all of the existing declarations
|
||||
rules[box.sides[0].ruleIndex] = css_ast.Rule{}
|
||||
rules[box.sides[1].ruleIndex] = css_ast.Rule{}
|
||||
rules[box.sides[2].ruleIndex] = css_ast.Rule{}
|
||||
rules[box.sides[3].ruleIndex] = css_ast.Rule{}
|
||||
|
||||
// Insert the combined declaration where the last rule was
|
||||
rules[box.sides[3].ruleIndex].Data = &css_ast.RDeclaration{
|
||||
Key: box.key,
|
||||
KeyText: box.keyText,
|
||||
Value: tokens,
|
||||
KeyRange: keyRange,
|
||||
Important: box.important,
|
||||
}
|
||||
}
|
103
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box_shadow.go
generated
vendored
103
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box_shadow.go
generated
vendored
@ -1,103 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
func (p *parser) mangleBoxShadow(tokens []css_ast.Token) []css_ast.Token {
|
||||
insetCount := 0
|
||||
colorCount := 0
|
||||
numbersBegin := 0
|
||||
numbersCount := 0
|
||||
numbersDone := false
|
||||
foundUnexpectedToken := false
|
||||
|
||||
for i, t := range tokens {
|
||||
if t.Kind == css_lexer.TNumber || t.Kind == css_lexer.TDimension {
|
||||
if numbersDone {
|
||||
// Track if we found a non-number in between two numbers
|
||||
foundUnexpectedToken = true
|
||||
}
|
||||
if t.TurnLengthIntoNumberIfZero() {
|
||||
// "0px" => "0"
|
||||
tokens[i] = t
|
||||
}
|
||||
if numbersCount == 0 {
|
||||
// Track the index of the first number
|
||||
numbersBegin = i
|
||||
}
|
||||
numbersCount++
|
||||
} else {
|
||||
if numbersCount != 0 {
|
||||
// Track when we find a non-number after a number
|
||||
numbersDone = true
|
||||
}
|
||||
if hex, ok := parseColor(t); ok {
|
||||
colorCount++
|
||||
tokens[i] = p.mangleColor(t, hex)
|
||||
} else if t.Kind == css_lexer.TIdent && t.Text == "inset" {
|
||||
insetCount++
|
||||
} else {
|
||||
// Track if we found a token other than a number, a color, or "inset"
|
||||
foundUnexpectedToken = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If everything looks like a valid rule, trim trailing zeros off the numbers.
|
||||
// There are three valid configurations of numbers:
|
||||
//
|
||||
// offset-x | offset-y
|
||||
// offset-x | offset-y | blur-radius
|
||||
// offset-x | offset-y | blur-radius | spread-radius
|
||||
//
|
||||
// If omitted, blur-radius and spread-radius are implied to be zero.
|
||||
if insetCount <= 1 && colorCount <= 1 && numbersCount > 2 && numbersCount <= 4 && !foundUnexpectedToken {
|
||||
numbersEnd := numbersBegin + numbersCount
|
||||
for numbersCount > 2 && tokens[numbersBegin+numbersCount-1].IsZero() {
|
||||
numbersCount--
|
||||
}
|
||||
tokens = append(tokens[:numbersBegin+numbersCount], tokens[numbersEnd:]...)
|
||||
}
|
||||
|
||||
// Set the whitespace flags
|
||||
for i := range tokens {
|
||||
var whitespace css_ast.WhitespaceFlags
|
||||
if i > 0 || !p.options.RemoveWhitespace {
|
||||
whitespace |= css_ast.WhitespaceBefore
|
||||
}
|
||||
if i+1 < len(tokens) {
|
||||
whitespace |= css_ast.WhitespaceAfter
|
||||
}
|
||||
tokens[i].Whitespace = whitespace
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func (p *parser) mangleBoxShadows(tokens []css_ast.Token) []css_ast.Token {
|
||||
n := len(tokens)
|
||||
end := 0
|
||||
i := 0
|
||||
|
||||
for i < n {
|
||||
// Find the comma or the end of the token list
|
||||
comma := i
|
||||
for comma < n && tokens[comma].Kind != css_lexer.TComma {
|
||||
comma++
|
||||
}
|
||||
|
||||
// Mangle this individual shadow
|
||||
end += copy(tokens[end:], p.mangleBoxShadow(tokens[i:comma]))
|
||||
|
||||
// Skip over the comma
|
||||
if comma < n {
|
||||
tokens[end] = tokens[comma]
|
||||
end++
|
||||
comma++
|
||||
}
|
||||
i = comma
|
||||
}
|
||||
|
||||
return tokens[:end]
|
||||
}
|
669
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_color.go
generated
vendored
669
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_color.go
generated
vendored
@ -1,669 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/compat"
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
// These names are shorter than their hex codes
|
||||
var shortColorName = map[uint32]string{
|
||||
0x000080ff: "navy",
|
||||
0x008000ff: "green",
|
||||
0x008080ff: "teal",
|
||||
0x4b0082ff: "indigo",
|
||||
0x800000ff: "maroon",
|
||||
0x800080ff: "purple",
|
||||
0x808000ff: "olive",
|
||||
0x808080ff: "gray",
|
||||
0xa0522dff: "sienna",
|
||||
0xa52a2aff: "brown",
|
||||
0xc0c0c0ff: "silver",
|
||||
0xcd853fff: "peru",
|
||||
0xd2b48cff: "tan",
|
||||
0xda70d6ff: "orchid",
|
||||
0xdda0ddff: "plum",
|
||||
0xee82eeff: "violet",
|
||||
0xf0e68cff: "khaki",
|
||||
0xf0ffffff: "azure",
|
||||
0xf5deb3ff: "wheat",
|
||||
0xf5f5dcff: "beige",
|
||||
0xfa8072ff: "salmon",
|
||||
0xfaf0e6ff: "linen",
|
||||
0xff0000ff: "red",
|
||||
0xff6347ff: "tomato",
|
||||
0xff7f50ff: "coral",
|
||||
0xffa500ff: "orange",
|
||||
0xffc0cbff: "pink",
|
||||
0xffd700ff: "gold",
|
||||
0xffe4c4ff: "bisque",
|
||||
0xfffafaff: "snow",
|
||||
0xfffff0ff: "ivory",
|
||||
}
|
||||
|
||||
var colorNameToHex = map[string]uint32{
|
||||
"black": 0x000000ff,
|
||||
"silver": 0xc0c0c0ff,
|
||||
"gray": 0x808080ff,
|
||||
"white": 0xffffffff,
|
||||
"maroon": 0x800000ff,
|
||||
"red": 0xff0000ff,
|
||||
"purple": 0x800080ff,
|
||||
"fuchsia": 0xff00ffff,
|
||||
"green": 0x008000ff,
|
||||
"lime": 0x00ff00ff,
|
||||
"olive": 0x808000ff,
|
||||
"yellow": 0xffff00ff,
|
||||
"navy": 0x000080ff,
|
||||
"blue": 0x0000ffff,
|
||||
"teal": 0x008080ff,
|
||||
"aqua": 0x00ffffff,
|
||||
"orange": 0xffa500ff,
|
||||
"aliceblue": 0xf0f8ffff,
|
||||
"antiquewhite": 0xfaebd7ff,
|
||||
"aquamarine": 0x7fffd4ff,
|
||||
"azure": 0xf0ffffff,
|
||||
"beige": 0xf5f5dcff,
|
||||
"bisque": 0xffe4c4ff,
|
||||
"blanchedalmond": 0xffebcdff,
|
||||
"blueviolet": 0x8a2be2ff,
|
||||
"brown": 0xa52a2aff,
|
||||
"burlywood": 0xdeb887ff,
|
||||
"cadetblue": 0x5f9ea0ff,
|
||||
"chartreuse": 0x7fff00ff,
|
||||
"chocolate": 0xd2691eff,
|
||||
"coral": 0xff7f50ff,
|
||||
"cornflowerblue": 0x6495edff,
|
||||
"cornsilk": 0xfff8dcff,
|
||||
"crimson": 0xdc143cff,
|
||||
"cyan": 0x00ffffff,
|
||||
"darkblue": 0x00008bff,
|
||||
"darkcyan": 0x008b8bff,
|
||||
"darkgoldenrod": 0xb8860bff,
|
||||
"darkgray": 0xa9a9a9ff,
|
||||
"darkgreen": 0x006400ff,
|
||||
"darkgrey": 0xa9a9a9ff,
|
||||
"darkkhaki": 0xbdb76bff,
|
||||
"darkmagenta": 0x8b008bff,
|
||||
"darkolivegreen": 0x556b2fff,
|
||||
"darkorange": 0xff8c00ff,
|
||||
"darkorchid": 0x9932ccff,
|
||||
"darkred": 0x8b0000ff,
|
||||
"darksalmon": 0xe9967aff,
|
||||
"darkseagreen": 0x8fbc8fff,
|
||||
"darkslateblue": 0x483d8bff,
|
||||
"darkslategray": 0x2f4f4fff,
|
||||
"darkslategrey": 0x2f4f4fff,
|
||||
"darkturquoise": 0x00ced1ff,
|
||||
"darkviolet": 0x9400d3ff,
|
||||
"deeppink": 0xff1493ff,
|
||||
"deepskyblue": 0x00bfffff,
|
||||
"dimgray": 0x696969ff,
|
||||
"dimgrey": 0x696969ff,
|
||||
"dodgerblue": 0x1e90ffff,
|
||||
"firebrick": 0xb22222ff,
|
||||
"floralwhite": 0xfffaf0ff,
|
||||
"forestgreen": 0x228b22ff,
|
||||
"gainsboro": 0xdcdcdcff,
|
||||
"ghostwhite": 0xf8f8ffff,
|
||||
"gold": 0xffd700ff,
|
||||
"goldenrod": 0xdaa520ff,
|
||||
"greenyellow": 0xadff2fff,
|
||||
"grey": 0x808080ff,
|
||||
"honeydew": 0xf0fff0ff,
|
||||
"hotpink": 0xff69b4ff,
|
||||
"indianred": 0xcd5c5cff,
|
||||
"indigo": 0x4b0082ff,
|
||||
"ivory": 0xfffff0ff,
|
||||
"khaki": 0xf0e68cff,
|
||||
"lavender": 0xe6e6faff,
|
||||
"lavenderblush": 0xfff0f5ff,
|
||||
"lawngreen": 0x7cfc00ff,
|
||||
"lemonchiffon": 0xfffacdff,
|
||||
"lightblue": 0xadd8e6ff,
|
||||
"lightcoral": 0xf08080ff,
|
||||
"lightcyan": 0xe0ffffff,
|
||||
"lightgoldenrodyellow": 0xfafad2ff,
|
||||
"lightgray": 0xd3d3d3ff,
|
||||
"lightgreen": 0x90ee90ff,
|
||||
"lightgrey": 0xd3d3d3ff,
|
||||
"lightpink": 0xffb6c1ff,
|
||||
"lightsalmon": 0xffa07aff,
|
||||
"lightseagreen": 0x20b2aaff,
|
||||
"lightskyblue": 0x87cefaff,
|
||||
"lightslategray": 0x778899ff,
|
||||
"lightslategrey": 0x778899ff,
|
||||
"lightsteelblue": 0xb0c4deff,
|
||||
"lightyellow": 0xffffe0ff,
|
||||
"limegreen": 0x32cd32ff,
|
||||
"linen": 0xfaf0e6ff,
|
||||
"magenta": 0xff00ffff,
|
||||
"mediumaquamarine": 0x66cdaaff,
|
||||
"mediumblue": 0x0000cdff,
|
||||
"mediumorchid": 0xba55d3ff,
|
||||
"mediumpurple": 0x9370dbff,
|
||||
"mediumseagreen": 0x3cb371ff,
|
||||
"mediumslateblue": 0x7b68eeff,
|
||||
"mediumspringgreen": 0x00fa9aff,
|
||||
"mediumturquoise": 0x48d1ccff,
|
||||
"mediumvioletred": 0xc71585ff,
|
||||
"midnightblue": 0x191970ff,
|
||||
"mintcream": 0xf5fffaff,
|
||||
"mistyrose": 0xffe4e1ff,
|
||||
"moccasin": 0xffe4b5ff,
|
||||
"navajowhite": 0xffdeadff,
|
||||
"oldlace": 0xfdf5e6ff,
|
||||
"olivedrab": 0x6b8e23ff,
|
||||
"orangered": 0xff4500ff,
|
||||
"orchid": 0xda70d6ff,
|
||||
"palegoldenrod": 0xeee8aaff,
|
||||
"palegreen": 0x98fb98ff,
|
||||
"paleturquoise": 0xafeeeeff,
|
||||
"palevioletred": 0xdb7093ff,
|
||||
"papayawhip": 0xffefd5ff,
|
||||
"peachpuff": 0xffdab9ff,
|
||||
"peru": 0xcd853fff,
|
||||
"pink": 0xffc0cbff,
|
||||
"plum": 0xdda0ddff,
|
||||
"powderblue": 0xb0e0e6ff,
|
||||
"rosybrown": 0xbc8f8fff,
|
||||
"royalblue": 0x4169e1ff,
|
||||
"saddlebrown": 0x8b4513ff,
|
||||
"salmon": 0xfa8072ff,
|
||||
"sandybrown": 0xf4a460ff,
|
||||
"seagreen": 0x2e8b57ff,
|
||||
"seashell": 0xfff5eeff,
|
||||
"sienna": 0xa0522dff,
|
||||
"skyblue": 0x87ceebff,
|
||||
"slateblue": 0x6a5acdff,
|
||||
"slategray": 0x708090ff,
|
||||
"slategrey": 0x708090ff,
|
||||
"snow": 0xfffafaff,
|
||||
"springgreen": 0x00ff7fff,
|
||||
"steelblue": 0x4682b4ff,
|
||||
"tan": 0xd2b48cff,
|
||||
"thistle": 0xd8bfd8ff,
|
||||
"tomato": 0xff6347ff,
|
||||
"turquoise": 0x40e0d0ff,
|
||||
"violet": 0xee82eeff,
|
||||
"wheat": 0xf5deb3ff,
|
||||
"whitesmoke": 0xf5f5f5ff,
|
||||
"yellowgreen": 0x9acd32ff,
|
||||
"rebeccapurple": 0x663399ff,
|
||||
}
|
||||
|
||||
func parseHex(text string) (uint32, bool) {
|
||||
hex := uint32(0)
|
||||
for _, c := range text {
|
||||
hex <<= 4
|
||||
switch {
|
||||
case c >= '0' && c <= '9':
|
||||
hex |= uint32(c) - '0'
|
||||
case c >= 'a' && c <= 'f':
|
||||
hex |= uint32(c) - ('a' - 10)
|
||||
case c >= 'A' && c <= 'F':
|
||||
hex |= uint32(c) - ('A' - 10)
|
||||
default:
|
||||
return 0, false
|
||||
}
|
||||
}
|
||||
return hex, true
|
||||
}
|
||||
|
||||
// 0xAABBCCDD => 0xABCD
|
||||
func compactHex(v uint32) uint32 {
|
||||
return ((v & 0x0FF00000) >> 12) | ((v & 0x00000FF0) >> 4)
|
||||
}
|
||||
|
||||
// 0xABCD => 0xAABBCCDD
|
||||
func expandHex(v uint32) uint32 {
|
||||
return ((v & 0xF000) << 16) | ((v & 0xFF00) << 12) | ((v & 0x0FF0) << 8) | ((v & 0x00FF) << 4) | (v & 0x000F)
|
||||
}
|
||||
|
||||
func hexR(v uint32) int { return int(v >> 24) }
|
||||
func hexG(v uint32) int { return int((v >> 16) & 255) }
|
||||
func hexB(v uint32) int { return int((v >> 8) & 255) }
|
||||
func hexA(v uint32) int { return int(v & 255) }
|
||||
|
||||
func floatToStringForColor(a float64) string {
|
||||
text := fmt.Sprintf("%.03f", a)
|
||||
for text[len(text)-1] == '0' {
|
||||
text = text[:len(text)-1]
|
||||
}
|
||||
if text[len(text)-1] == '.' {
|
||||
text = text[:len(text)-1]
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
func degreesForAngle(token css_ast.Token) (float64, bool) {
|
||||
switch token.Kind {
|
||||
case css_lexer.TNumber:
|
||||
if value, err := strconv.ParseFloat(token.Text, 64); err == nil {
|
||||
return value, true
|
||||
}
|
||||
|
||||
case css_lexer.TDimension:
|
||||
if value, err := strconv.ParseFloat(token.DimensionValue(), 64); err == nil {
|
||||
switch token.DimensionUnit() {
|
||||
case "deg":
|
||||
return value, true
|
||||
case "grad":
|
||||
return value * (360.0 / 400.0), true
|
||||
case "rad":
|
||||
return value * (180.0 / math.Pi), true
|
||||
case "turn":
|
||||
return value * 360.0, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func lowerAlphaPercentageToNumber(token css_ast.Token) css_ast.Token {
|
||||
if token.Kind == css_lexer.TPercentage {
|
||||
if value, err := strconv.ParseFloat(token.Text[:len(token.Text)-1], 64); err == nil {
|
||||
token.Kind = css_lexer.TNumber
|
||||
token.Text = floatToStringForColor(value / 100.0)
|
||||
}
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
// Convert newer color syntax to older color syntax for older browsers
|
||||
func (p *parser) lowerColor(token css_ast.Token) css_ast.Token {
|
||||
text := token.Text
|
||||
|
||||
switch token.Kind {
|
||||
case css_lexer.THash:
|
||||
if p.options.UnsupportedCSSFeatures.Has(compat.HexRGBA) {
|
||||
switch len(text) {
|
||||
case 4:
|
||||
// "#1234" => "rgba(1, 2, 3, 0.004)"
|
||||
if hex, ok := parseHex(text); ok {
|
||||
hex = expandHex(hex)
|
||||
token.Kind = css_lexer.TFunction
|
||||
token.Text = "rgba"
|
||||
commaToken := p.commaToken()
|
||||
token.Children = &[]css_ast.Token{
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: floatToStringForColor(float64(hexA(hex)) / 255)},
|
||||
}
|
||||
}
|
||||
|
||||
case 8:
|
||||
// "#12345678" => "rgba(18, 52, 86, 0.47)"
|
||||
if hex, ok := parseHex(text); ok {
|
||||
token.Kind = css_lexer.TFunction
|
||||
token.Text = "rgba"
|
||||
commaToken := p.commaToken()
|
||||
token.Children = &[]css_ast.Token{
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: floatToStringForColor(float64(hexA(hex)) / 255)},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case css_lexer.TIdent:
|
||||
if text == "rebeccapurple" && p.options.UnsupportedCSSFeatures.Has(compat.RebeccaPurple) {
|
||||
token.Kind = css_lexer.THash
|
||||
token.Text = "663399"
|
||||
}
|
||||
|
||||
case css_lexer.TFunction:
|
||||
switch text {
|
||||
case "rgb", "rgba", "hsl", "hsla":
|
||||
if p.options.UnsupportedCSSFeatures.Has(compat.Modern_RGB_HSL) {
|
||||
args := *token.Children
|
||||
removeAlpha := false
|
||||
addAlpha := false
|
||||
|
||||
// "hsl(1deg, 2%, 3%)" => "hsl(1, 2%, 3%)"
|
||||
if (text == "hsl" || text == "hsla") && len(args) > 0 {
|
||||
if degrees, ok := degreesForAngle(args[0]); ok {
|
||||
args[0].Kind = css_lexer.TNumber
|
||||
args[0].Text = floatToStringForColor(degrees)
|
||||
}
|
||||
}
|
||||
|
||||
// These check for "IsNumeric" to reject "var()" since a single "var()"
|
||||
// can substitute for multiple tokens and that messes up pattern matching
|
||||
switch len(args) {
|
||||
case 3:
|
||||
// "rgba(1 2 3)" => "rgb(1, 2, 3)"
|
||||
// "hsla(1 2% 3%)" => "hsl(1, 2%, 3%)"
|
||||
if args[0].Kind.IsNumeric() && args[1].Kind.IsNumeric() && args[2].Kind.IsNumeric() {
|
||||
removeAlpha = true
|
||||
args[0].Whitespace = 0
|
||||
args[1].Whitespace = 0
|
||||
commaToken := p.commaToken()
|
||||
token.Children = &[]css_ast.Token{
|
||||
args[0], commaToken,
|
||||
args[1], commaToken,
|
||||
args[2],
|
||||
}
|
||||
}
|
||||
|
||||
case 5:
|
||||
// "rgba(1, 2, 3)" => "rgb(1, 2, 3)"
|
||||
// "hsla(1, 2%, 3%)" => "hsl(1%, 2%, 3%)"
|
||||
if args[0].Kind.IsNumeric() && args[1].Kind == css_lexer.TComma &&
|
||||
args[2].Kind.IsNumeric() && args[3].Kind == css_lexer.TComma &&
|
||||
args[4].Kind.IsNumeric() {
|
||||
removeAlpha = true
|
||||
break
|
||||
}
|
||||
|
||||
// "rgb(1 2 3 / 4%)" => "rgba(1, 2, 3, 0.04)"
|
||||
// "hsl(1 2% 3% / 4%)" => "hsla(1, 2%, 3%, 0.04)"
|
||||
if args[0].Kind.IsNumeric() && args[1].Kind.IsNumeric() && args[2].Kind.IsNumeric() &&
|
||||
args[3].Kind == css_lexer.TDelimSlash && args[4].Kind.IsNumeric() {
|
||||
addAlpha = true
|
||||
args[0].Whitespace = 0
|
||||
args[1].Whitespace = 0
|
||||
args[2].Whitespace = 0
|
||||
commaToken := p.commaToken()
|
||||
token.Children = &[]css_ast.Token{
|
||||
args[0], commaToken,
|
||||
args[1], commaToken,
|
||||
args[2], commaToken,
|
||||
lowerAlphaPercentageToNumber(args[4]),
|
||||
}
|
||||
}
|
||||
|
||||
case 7:
|
||||
// "rgb(1%, 2%, 3%, 4%)" => "rgba(1%, 2%, 3%, 0.04)"
|
||||
// "hsl(1, 2%, 3%, 4%)" => "hsla(1, 2%, 3%, 0.04)"
|
||||
if args[0].Kind.IsNumeric() && args[1].Kind == css_lexer.TComma &&
|
||||
args[2].Kind.IsNumeric() && args[3].Kind == css_lexer.TComma &&
|
||||
args[4].Kind.IsNumeric() && args[5].Kind == css_lexer.TComma &&
|
||||
args[6].Kind.IsNumeric() {
|
||||
addAlpha = true
|
||||
args[6] = lowerAlphaPercentageToNumber(args[6])
|
||||
}
|
||||
}
|
||||
|
||||
if removeAlpha {
|
||||
if text == "rgba" {
|
||||
token.Text = "rgb"
|
||||
} else if text == "hsla" {
|
||||
token.Text = "hsl"
|
||||
}
|
||||
} else if addAlpha {
|
||||
if text == "rgb" {
|
||||
token.Text = "rgba"
|
||||
} else if text == "hsl" {
|
||||
token.Text = "hsla"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
func parseColor(token css_ast.Token) (uint32, bool) {
|
||||
text := token.Text
|
||||
|
||||
switch token.Kind {
|
||||
case css_lexer.TIdent:
|
||||
if hex, ok := colorNameToHex[strings.ToLower(text)]; ok {
|
||||
return hex, true
|
||||
}
|
||||
|
||||
case css_lexer.THash:
|
||||
switch len(text) {
|
||||
case 3:
|
||||
// "#123"
|
||||
if hex, ok := parseHex(text); ok {
|
||||
return (expandHex(hex) << 8) | 0xFF, true
|
||||
}
|
||||
|
||||
case 4:
|
||||
// "#1234"
|
||||
if hex, ok := parseHex(text); ok {
|
||||
return expandHex(hex), true
|
||||
}
|
||||
|
||||
case 6:
|
||||
// "#112233"
|
||||
if hex, ok := parseHex(text); ok {
|
||||
return (hex << 8) | 0xFF, true
|
||||
}
|
||||
|
||||
case 8:
|
||||
// "#11223344"
|
||||
if hex, ok := parseHex(text); ok {
|
||||
return hex, true
|
||||
}
|
||||
}
|
||||
|
||||
case css_lexer.TFunction:
|
||||
switch text {
|
||||
case "rgb", "rgba":
|
||||
args := *token.Children
|
||||
var r, g, b, a css_ast.Token
|
||||
|
||||
switch len(args) {
|
||||
case 3:
|
||||
// "rgb(1 2 3)"
|
||||
r, g, b = args[0], args[1], args[2]
|
||||
|
||||
case 5:
|
||||
// "rgba(1, 2, 3)"
|
||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma {
|
||||
r, g, b = args[0], args[2], args[4]
|
||||
break
|
||||
}
|
||||
|
||||
// "rgb(1 2 3 / 4%)"
|
||||
if args[3].Kind == css_lexer.TDelimSlash {
|
||||
r, g, b, a = args[0], args[1], args[2], args[4]
|
||||
}
|
||||
|
||||
case 7:
|
||||
// "rgb(1%, 2%, 3%, 4%)"
|
||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma && args[5].Kind == css_lexer.TComma {
|
||||
r, g, b, a = args[0], args[2], args[4], args[6]
|
||||
}
|
||||
}
|
||||
|
||||
if r, ok := parseColorByte(r, 1); ok {
|
||||
if g, ok := parseColorByte(g, 1); ok {
|
||||
if b, ok := parseColorByte(b, 1); ok {
|
||||
if a, ok := parseAlphaByte(a); ok {
|
||||
return uint32((r << 24) | (g << 16) | (b << 8) | a), true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case "hsl", "hsla":
|
||||
args := *token.Children
|
||||
var h, s, l, a css_ast.Token
|
||||
|
||||
switch len(args) {
|
||||
case 3:
|
||||
// "hsl(1 2 3)"
|
||||
h, s, l = args[0], args[1], args[2]
|
||||
|
||||
case 5:
|
||||
// "hsla(1, 2, 3)"
|
||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma {
|
||||
h, s, l = args[0], args[2], args[4]
|
||||
break
|
||||
}
|
||||
|
||||
// "hsl(1 2 3 / 4%)"
|
||||
if args[3].Kind == css_lexer.TDelimSlash {
|
||||
h, s, l, a = args[0], args[1], args[2], args[4]
|
||||
}
|
||||
|
||||
case 7:
|
||||
// "hsl(1%, 2%, 3%, 4%)"
|
||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma && args[5].Kind == css_lexer.TComma {
|
||||
h, s, l, a = args[0], args[2], args[4], args[6]
|
||||
}
|
||||
}
|
||||
|
||||
// Convert from HSL to RGB. The algorithm is from the section
|
||||
// "Converting HSL colors to sRGB colors" in the specification.
|
||||
if h, ok := degreesForAngle(h); ok {
|
||||
if s, ok := s.FractionForPercentage(); ok {
|
||||
if l, ok := l.FractionForPercentage(); ok {
|
||||
if a, ok := parseAlphaByte(a); ok {
|
||||
h /= 360.0
|
||||
var t2 float64
|
||||
if l <= 0.5 {
|
||||
t2 = l * (s + 1)
|
||||
} else {
|
||||
t2 = l + s - (l * s)
|
||||
}
|
||||
t1 := l*2 - t2
|
||||
r := hueToRgb(t1, t2, h+1.0/3.0)
|
||||
g := hueToRgb(t1, t2, h)
|
||||
b := hueToRgb(t1, t2, h-1.0/3.0)
|
||||
return uint32((r << 24) | (g << 16) | (b << 8) | a), true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func hueToRgb(t1 float64, t2 float64, hue float64) uint32 {
|
||||
hue -= math.Floor(hue)
|
||||
hue *= 6.0
|
||||
var f float64
|
||||
if hue < 1 {
|
||||
f = (t2-t1)*hue + t1
|
||||
} else if hue < 3 {
|
||||
f = t2
|
||||
} else if hue < 4 {
|
||||
f = (t2-t1)*(4-hue) + t1
|
||||
} else {
|
||||
f = t1
|
||||
}
|
||||
i := int(math.Round(f * 255))
|
||||
if i < 0 {
|
||||
i = 0
|
||||
} else if i > 255 {
|
||||
i = 255
|
||||
}
|
||||
return uint32(i)
|
||||
}
|
||||
|
||||
func parseAlphaByte(token css_ast.Token) (uint32, bool) {
|
||||
if token.Kind == css_lexer.T(0) {
|
||||
return 255, true
|
||||
}
|
||||
return parseColorByte(token, 255)
|
||||
}
|
||||
|
||||
func parseColorByte(token css_ast.Token, scale float64) (uint32, bool) {
|
||||
var i int
|
||||
var ok bool
|
||||
|
||||
switch token.Kind {
|
||||
case css_lexer.TNumber:
|
||||
if f, err := strconv.ParseFloat(token.Text, 64); err == nil {
|
||||
i = int(math.Round(f * scale))
|
||||
ok = true
|
||||
}
|
||||
|
||||
case css_lexer.TPercentage:
|
||||
if f, err := strconv.ParseFloat(token.PercentageValue(), 64); err == nil {
|
||||
i = int(math.Round(f * (255.0 / 100.0)))
|
||||
ok = true
|
||||
}
|
||||
}
|
||||
|
||||
if i < 0 {
|
||||
i = 0
|
||||
} else if i > 255 {
|
||||
i = 255
|
||||
}
|
||||
return uint32(i), ok
|
||||
}
|
||||
|
||||
func (p *parser) mangleColor(token css_ast.Token, hex uint32) css_ast.Token {
|
||||
// Note: Do NOT remove color information from fully transparent colors.
|
||||
// Safari behaves differently than other browsers for color interpolation:
|
||||
// https://css-tricks.com/thing-know-gradients-transparent-black/
|
||||
|
||||
if hexA(hex) == 255 {
|
||||
token.Children = nil
|
||||
if name, ok := shortColorName[hex]; ok {
|
||||
token.Kind = css_lexer.TIdent
|
||||
token.Text = name
|
||||
} else {
|
||||
token.Kind = css_lexer.THash
|
||||
hex >>= 8
|
||||
compact := compactHex(hex)
|
||||
if hex == expandHex(compact) {
|
||||
token.Text = fmt.Sprintf("%03x", compact)
|
||||
} else {
|
||||
token.Text = fmt.Sprintf("%06x", hex)
|
||||
}
|
||||
}
|
||||
} else if !p.options.UnsupportedCSSFeatures.Has(compat.HexRGBA) {
|
||||
token.Children = nil
|
||||
token.Kind = css_lexer.THash
|
||||
compact := compactHex(hex)
|
||||
if hex == expandHex(compact) {
|
||||
token.Text = fmt.Sprintf("%04x", compact)
|
||||
} else {
|
||||
token.Text = fmt.Sprintf("%08x", hex)
|
||||
}
|
||||
} else {
|
||||
token.Kind = css_lexer.TFunction
|
||||
token.Text = "rgba"
|
||||
commaToken := p.commaToken()
|
||||
index := hexA(hex) * 4
|
||||
alpha := alphaFractionTable[index : index+4]
|
||||
if space := strings.IndexByte(alpha, ' '); space != -1 {
|
||||
alpha = alpha[:space]
|
||||
}
|
||||
token.Children = &[]css_ast.Token{
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken,
|
||||
{Kind: css_lexer.TNumber, Text: alpha},
|
||||
}
|
||||
}
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
// Every four characters in this table is the fraction for that index
|
||||
const alphaFractionTable string = "" +
|
||||
"0 .004.008.01 .016.02 .024.027.03 .035.04 .043.047.05 .055.06 " +
|
||||
".063.067.07 .075.08 .082.086.09 .094.098.1 .106.11 .114.118.12 " +
|
||||
".125.13 .133.137.14 .145.15 .153.157.16 .165.17 .173.176.18 .184" +
|
||||
".19 .192.196.2 .204.208.21 .216.22 .224.227.23 .235.24 .243.247" +
|
||||
".25 .255.26 .263.267.27 .275.28 .282.286.29 .294.298.3 .306.31 " +
|
||||
".314.318.32 .325.33 .333.337.34 .345.35 .353.357.36 .365.37 .373" +
|
||||
".376.38 .384.39 .392.396.4 .404.408.41 .416.42 .424.427.43 .435" +
|
||||
".44 .443.447.45 .455.46 .463.467.47 .475.48 .482.486.49 .494.498" +
|
||||
".5 .506.51 .514.518.52 .525.53 .533.537.54 .545.55 .553.557.56 " +
|
||||
".565.57 .573.576.58 .584.59 .592.596.6 .604.608.61 .616.62 .624" +
|
||||
".627.63 .635.64 .643.647.65 .655.66 .663.667.67 .675.68 .682.686" +
|
||||
".69 .694.698.7 .706.71 .714.718.72 .725.73 .733.737.74 .745.75 " +
|
||||
".753.757.76 .765.77 .773.776.78 .784.79 .792.796.8 .804.808.81 " +
|
||||
".816.82 .824.827.83 .835.84 .843.847.85 .855.86 .863.867.87 .875" +
|
||||
".88 .882.886.89 .894.898.9 .906.91 .914.918.92 .925.93 .933.937" +
|
||||
".94 .945.95 .953.957.96 .965.97 .973.976.98 .984.99 .992.9961 "
|
135
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font.go
generated
vendored
135
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font.go
generated
vendored
@ -1,135 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
// Specification: https://drafts.csswg.org/css-fonts/#font-prop
|
||||
// [ <font-style> || <font-variant-css2> || <font-weight> || <font-stretch-css3> ]? <font-size> [ / <line-height> ]? <font-family>
|
||||
func (p *parser) mangleFont(tokens []css_ast.Token) []css_ast.Token {
|
||||
var result []css_ast.Token
|
||||
|
||||
// Scan up to the font size
|
||||
pos := 0
|
||||
for ; pos < len(tokens); pos++ {
|
||||
token := tokens[pos]
|
||||
if isFontSize(token) {
|
||||
break
|
||||
}
|
||||
|
||||
switch token.Kind {
|
||||
case css_lexer.TIdent:
|
||||
switch strings.ToLower(token.Text) {
|
||||
case "normal":
|
||||
// "All subproperties of the font property are first reset to their initial values"
|
||||
// This implies that "normal" doesn't do anything. Also all of the optional values
|
||||
// contain "normal" as an option and they are unordered so it's impossible to say
|
||||
// what property "normal" corresponds to. Just drop these tokens to save space.
|
||||
continue
|
||||
|
||||
// <font-style>
|
||||
case "italic":
|
||||
case "oblique":
|
||||
if pos+1 < len(tokens) && tokens[pos+1].IsAngle() {
|
||||
result = append(result, token, tokens[pos+1])
|
||||
pos++
|
||||
continue
|
||||
}
|
||||
|
||||
// <font-variant-css2>
|
||||
case "small-caps":
|
||||
|
||||
// <font-weight>
|
||||
case "bold", "bolder", "lighter":
|
||||
result = append(result, p.mangleFontWeight(token))
|
||||
continue
|
||||
|
||||
// <font-stretch-css3>
|
||||
case "ultra-condensed", "extra-condensed", "condensed", "semi-condensed",
|
||||
"semi-expanded", "expanded", "extra-expanded", "ultra-expanded":
|
||||
|
||||
default:
|
||||
// All other tokens are unrecognized, so we bail if we hit one
|
||||
return tokens
|
||||
}
|
||||
result = append(result, token)
|
||||
|
||||
case css_lexer.TNumber:
|
||||
// "Only values greater than or equal to 1, and less than or equal to
|
||||
// 1000, are valid, and all other values are invalid."
|
||||
if value, err := strconv.ParseFloat(token.Text, 64); err != nil || value < 1 || value > 1000 {
|
||||
return tokens
|
||||
}
|
||||
result = append(result, token)
|
||||
|
||||
default:
|
||||
// All other tokens are unrecognized, so we bail if we hit one
|
||||
return tokens
|
||||
}
|
||||
}
|
||||
|
||||
// <font-size>
|
||||
if pos == len(tokens) {
|
||||
return tokens
|
||||
}
|
||||
result = append(result, tokens[pos])
|
||||
pos++
|
||||
|
||||
// / <line-height>
|
||||
if pos < len(tokens) && tokens[pos].Kind == css_lexer.TDelimSlash {
|
||||
if pos+1 == len(tokens) {
|
||||
return tokens
|
||||
}
|
||||
result = append(result, tokens[pos], tokens[pos+1])
|
||||
pos += 2
|
||||
|
||||
// Remove the whitespace around the "/" character
|
||||
if p.options.RemoveWhitespace {
|
||||
result[len(result)-3].Whitespace &= ^css_ast.WhitespaceAfter
|
||||
result[len(result)-2].Whitespace = 0
|
||||
result[len(result)-1].Whitespace &= ^css_ast.WhitespaceBefore
|
||||
}
|
||||
}
|
||||
|
||||
// <font-family>
|
||||
if family, ok := p.mangleFontFamily(tokens[pos:]); ok {
|
||||
return append(result, family...)
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
var fontSizeKeywords = map[string]bool{
|
||||
// <absolute-size>: https://drafts.csswg.org/css-fonts/#valdef-font-size-absolute-size
|
||||
"xx-small": true,
|
||||
"x-small": true,
|
||||
"small": true,
|
||||
"medium": true,
|
||||
"large": true,
|
||||
"x-large": true,
|
||||
"xx-large": true,
|
||||
"xxx-large": true,
|
||||
|
||||
// <relative-size>: https://drafts.csswg.org/css-fonts/#valdef-font-size-relative-size
|
||||
"larger": true,
|
||||
"smaller": true,
|
||||
}
|
||||
|
||||
// Specification: https://drafts.csswg.org/css-fonts/#font-size-prop
|
||||
func isFontSize(token css_ast.Token) bool {
|
||||
// <length-percentage>
|
||||
if token.Kind == css_lexer.TDimension || token.Kind == css_lexer.TPercentage {
|
||||
return true
|
||||
}
|
||||
|
||||
// <absolute-size> or <relative-size>
|
||||
if token.Kind == css_lexer.TIdent {
|
||||
_, ok := fontSizeKeywords[strings.ToLower(token.Text)]
|
||||
return ok
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
142
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_family.go
generated
vendored
142
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_family.go
generated
vendored
@ -1,142 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
// Specification: https://drafts.csswg.org/css-values-4/#common-keywords
|
||||
var wideKeywords = map[string]bool{
|
||||
"initial": true,
|
||||
"inherit": true,
|
||||
"unset": true,
|
||||
}
|
||||
|
||||
// Specification: https://drafts.csswg.org/css-fonts/#generic-font-families
|
||||
var genericFamilyNames = map[string]bool{
|
||||
"serif": true,
|
||||
"sans-serif": true,
|
||||
"cursive": true,
|
||||
"fantasy": true,
|
||||
"monospace": true,
|
||||
"system-ui": true,
|
||||
"emoji": true,
|
||||
"math": true,
|
||||
"fangsong": true,
|
||||
"ui-serif": true,
|
||||
"ui-sans-serif": true,
|
||||
"ui-monospace": true,
|
||||
"ui-rounded": true,
|
||||
}
|
||||
|
||||
// Specification: https://drafts.csswg.org/css-fonts/#font-family-prop
|
||||
func (p *parser) mangleFontFamily(tokens []css_ast.Token) ([]css_ast.Token, bool) {
|
||||
result, rest, ok := p.mangleFamilyNameOrGenericName(nil, tokens)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
for len(rest) > 0 && rest[0].Kind == css_lexer.TComma {
|
||||
result, rest, ok = p.mangleFamilyNameOrGenericName(append(result, rest[0]), rest[1:])
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
|
||||
if len(rest) > 0 {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return result, true
|
||||
}
|
||||
|
||||
func (p *parser) mangleFamilyNameOrGenericName(result []css_ast.Token, tokens []css_ast.Token) ([]css_ast.Token, []css_ast.Token, bool) {
|
||||
if len(tokens) > 0 {
|
||||
t := tokens[0]
|
||||
|
||||
// Handle <generic-family>
|
||||
if t.Kind == css_lexer.TIdent && genericFamilyNames[t.Text] {
|
||||
return append(result, t), tokens[1:], true
|
||||
}
|
||||
|
||||
// Handle <family-name>
|
||||
if t.Kind == css_lexer.TString {
|
||||
// "If a sequence of identifiers is given as a <family-name>, the computed
|
||||
// value is the name converted to a string by joining all the identifiers
|
||||
// in the sequence by single spaces."
|
||||
//
|
||||
// More information: https://mathiasbynens.be/notes/unquoted-font-family
|
||||
names := strings.Split(t.Text, " ")
|
||||
for _, name := range names {
|
||||
if !isValidCustomIdent(name, genericFamilyNames) {
|
||||
return append(result, t), tokens[1:], true
|
||||
}
|
||||
}
|
||||
for i, name := range names {
|
||||
var whitespace css_ast.WhitespaceFlags
|
||||
if i != 0 || !p.options.RemoveWhitespace {
|
||||
whitespace = css_ast.WhitespaceBefore
|
||||
}
|
||||
result = append(result, css_ast.Token{
|
||||
Kind: css_lexer.TIdent,
|
||||
Text: name,
|
||||
Whitespace: whitespace,
|
||||
})
|
||||
}
|
||||
return result, tokens[1:], true
|
||||
}
|
||||
|
||||
// "Font family names other than generic families must either be given
|
||||
// quoted as <string>s, or unquoted as a sequence of one or more
|
||||
// <custom-ident>."
|
||||
if t.Kind == css_lexer.TIdent {
|
||||
for {
|
||||
if !isValidCustomIdent(t.Text, genericFamilyNames) {
|
||||
return nil, nil, false
|
||||
}
|
||||
result = append(result, t)
|
||||
tokens = tokens[1:]
|
||||
if len(tokens) == 0 || tokens[0].Kind != css_lexer.TIdent {
|
||||
break
|
||||
}
|
||||
t = tokens[0]
|
||||
}
|
||||
return result, tokens, true
|
||||
}
|
||||
}
|
||||
|
||||
// Anything other than the cases listed above causes us to bail
|
||||
return nil, nil, false
|
||||
}
|
||||
|
||||
// Specification: https://drafts.csswg.org/css-values-4/#custom-idents
|
||||
func isValidCustomIdent(text string, predefinedKeywords map[string]bool) bool {
|
||||
loweredText := strings.ToLower(text)
|
||||
|
||||
if predefinedKeywords[loweredText] {
|
||||
return false
|
||||
}
|
||||
if wideKeywords[loweredText] {
|
||||
return false
|
||||
}
|
||||
if loweredText == "default" {
|
||||
return false
|
||||
}
|
||||
if loweredText == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
// validate if it contains characters which needs to be escaped
|
||||
if !css_lexer.WouldStartIdentifierWithoutEscapes(text) {
|
||||
return false
|
||||
}
|
||||
for _, c := range text {
|
||||
if !css_lexer.IsNameContinue(c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
25
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_weight.go
generated
vendored
25
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_weight.go
generated
vendored
@ -1,25 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
func (p *parser) mangleFontWeight(token css_ast.Token) css_ast.Token {
|
||||
if token.Kind != css_lexer.TIdent {
|
||||
return token
|
||||
}
|
||||
|
||||
switch strings.ToLower(token.Text) {
|
||||
case "normal":
|
||||
token.Text = "400"
|
||||
token.Kind = css_lexer.TNumber
|
||||
case "bold":
|
||||
token.Text = "700"
|
||||
token.Kind = css_lexer.TNumber
|
||||
}
|
||||
|
||||
return token
|
||||
}
|
391
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_transform.go
generated
vendored
391
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_transform.go
generated
vendored
@ -1,391 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
func turnPercentIntoNumberIfShorter(t *css_ast.Token) {
|
||||
if t.Kind == css_lexer.TPercentage {
|
||||
if shifted, ok := shiftDot(t.PercentageValue(), -2); ok && len(shifted) < len(t.Text) {
|
||||
t.Kind = css_lexer.TNumber
|
||||
t.Text = shifted
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://www.w3.org/TR/css-transforms-1/#two-d-transform-functions
|
||||
// https://drafts.csswg.org/css-transforms-2/#transform-functions
|
||||
func (p *parser) mangleTransforms(tokens []css_ast.Token) []css_ast.Token {
|
||||
for i := range tokens {
|
||||
if token := &tokens[i]; token.Kind == css_lexer.TFunction {
|
||||
if args := *token.Children; css_ast.TokensAreCommaSeparated(args) {
|
||||
n := len(args)
|
||||
|
||||
switch strings.ToLower(token.Text) {
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// 2D transforms
|
||||
|
||||
case "matrix":
|
||||
// specifies a 2D transformation in the form of a transformation
|
||||
// matrix of the six values a, b, c, d, e, f.
|
||||
if n == 11 {
|
||||
// | a c 0 e |
|
||||
// | b d 0 f |
|
||||
// | 0 0 1 0 |
|
||||
// | 0 0 0 1 |
|
||||
a, b, c, d, e, f := args[0], args[2], args[4], args[6], args[8], args[10]
|
||||
if b.IsZero() && c.IsZero() && e.IsZero() && f.IsZero() {
|
||||
// | a 0 0 0 |
|
||||
// | 0 d 0 0 |
|
||||
// | 0 0 1 0 |
|
||||
// | 0 0 0 1 |
|
||||
if a.EqualIgnoringWhitespace(d) {
|
||||
// "matrix(a, 0, 0, a, 0, 0)" => "scale(a)"
|
||||
token.Text = "scale"
|
||||
*token.Children = args[:1]
|
||||
} else if d.IsOne() {
|
||||
// "matrix(a, 0, 0, 1, 0, 0)" => "scaleX(a)"
|
||||
token.Text = "scaleX"
|
||||
*token.Children = args[:1]
|
||||
} else if a.IsOne() {
|
||||
// "matrix(1, 0, 0, d, 0, 0)" => "scaleY(d)"
|
||||
token.Text = "scaleY"
|
||||
*token.Children = args[6:7]
|
||||
} else {
|
||||
// "matrix(a, 0, 0, d, 0, 0)" => "scale(a, d)"
|
||||
token.Text = "scale"
|
||||
*token.Children = append(args[:2], d)
|
||||
}
|
||||
|
||||
// Note: A "matrix" cannot be directly converted into a "translate"
|
||||
// because "translate" requires units while "matrix" requires no
|
||||
// units. I'm not sure exactly what the semantics are so I'm not
|
||||
// sure if you can just add "px" or not. Even if that did work,
|
||||
// you still couldn't substitute values containing "var()" since
|
||||
// units would still not be substituted in that case.
|
||||
}
|
||||
}
|
||||
|
||||
case "translate":
|
||||
// specifies a 2D translation by the vector [tx, ty], where tx is the
|
||||
// first translation-value parameter and ty is the optional second
|
||||
// translation-value parameter. If <ty> is not provided, ty has zero
|
||||
// as a value.
|
||||
if n == 1 {
|
||||
args[0].TurnLengthOrPercentageIntoNumberIfZero()
|
||||
} else if n == 3 {
|
||||
tx, ty := &args[0], &args[2]
|
||||
tx.TurnLengthOrPercentageIntoNumberIfZero()
|
||||
ty.TurnLengthOrPercentageIntoNumberIfZero()
|
||||
if ty.IsZero() {
|
||||
// "translate(tx, 0)" => "translate(tx)"
|
||||
*token.Children = args[:1]
|
||||
} else if tx.IsZero() {
|
||||
// "translate(0, ty)" => "translateY(ty)"
|
||||
token.Text = "translateY"
|
||||
*token.Children = args[2:]
|
||||
}
|
||||
}
|
||||
|
||||
case "translatex":
|
||||
// specifies a translation by the given amount in the X direction.
|
||||
if n == 1 {
|
||||
// "translateX(tx)" => "translate(tx)"
|
||||
token.Text = "translate"
|
||||
args[0].TurnLengthOrPercentageIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "translatey":
|
||||
// specifies a translation by the given amount in the Y direction.
|
||||
if n == 1 {
|
||||
args[0].TurnLengthOrPercentageIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "scale":
|
||||
// specifies a 2D scale operation by the [sx,sy] scaling vector
|
||||
// described by the 2 parameters. If the second parameter is not
|
||||
// provided, it takes a value equal to the first. For example,
|
||||
// scale(1, 1) would leave an element unchanged, while scale(2, 2)
|
||||
// would cause it to appear twice as long in both the X and Y axes,
|
||||
// or four times its typical geometric size.
|
||||
if n == 1 {
|
||||
turnPercentIntoNumberIfShorter(&args[0])
|
||||
} else if n == 3 {
|
||||
sx, sy := &args[0], &args[2]
|
||||
turnPercentIntoNumberIfShorter(sx)
|
||||
turnPercentIntoNumberIfShorter(sy)
|
||||
if sx.EqualIgnoringWhitespace(*sy) {
|
||||
// "scale(s, s)" => "scale(s)"
|
||||
*token.Children = args[:1]
|
||||
} else if sy.IsOne() {
|
||||
// "scale(s, 1)" => "scaleX(s)"
|
||||
token.Text = "scaleX"
|
||||
*token.Children = args[:1]
|
||||
} else if sx.IsOne() {
|
||||
// "scale(1, s)" => "scaleY(s)"
|
||||
token.Text = "scaleY"
|
||||
*token.Children = args[2:]
|
||||
}
|
||||
}
|
||||
|
||||
case "scalex":
|
||||
// specifies a 2D scale operation using the [sx,1] scaling vector,
|
||||
// where sx is given as the parameter.
|
||||
if n == 1 {
|
||||
turnPercentIntoNumberIfShorter(&args[0])
|
||||
}
|
||||
|
||||
case "scaley":
|
||||
// specifies a 2D scale operation using the [1,sy] scaling vector,
|
||||
// where sy is given as the parameter.
|
||||
if n == 1 {
|
||||
turnPercentIntoNumberIfShorter(&args[0])
|
||||
}
|
||||
|
||||
case "rotate":
|
||||
// specifies a 2D rotation by the angle specified in the parameter
|
||||
// about the origin of the element, as defined by the
|
||||
// transform-origin property. For example, rotate(90deg) would
|
||||
// cause elements to appear rotated one-quarter of a turn in the
|
||||
// clockwise direction.
|
||||
if n == 1 {
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "skew":
|
||||
// specifies a 2D skew by [ax,ay] for X and Y. If the second
|
||||
// parameter is not provided, it has a zero value.
|
||||
if n == 1 {
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
} else if n == 3 {
|
||||
ax, ay := &args[0], &args[2]
|
||||
ax.TurnLengthIntoNumberIfZero()
|
||||
ay.TurnLengthIntoNumberIfZero()
|
||||
if ay.IsZero() {
|
||||
// "skew(ax, 0)" => "skew(ax)"
|
||||
*token.Children = args[:1]
|
||||
}
|
||||
}
|
||||
|
||||
case "skewx":
|
||||
// specifies a 2D skew transformation along the X axis by the given
|
||||
// angle.
|
||||
if n == 1 {
|
||||
// "skewX(ax)" => "skew(ax)"
|
||||
token.Text = "skew"
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "skewy":
|
||||
// specifies a 2D skew transformation along the Y axis by the given
|
||||
// angle.
|
||||
if n == 1 {
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// 3D transforms
|
||||
|
||||
case "matrix3d":
|
||||
// specifies a 3D transformation as a 4x4 homogeneous matrix of 16
|
||||
// values in column-major order.
|
||||
if n == 31 {
|
||||
// | m0 m4 m8 m12 |
|
||||
// | m1 m5 m9 m13 |
|
||||
// | m2 m6 m10 m14 |
|
||||
// | m3 m7 m11 m15 |
|
||||
mask := uint32(0)
|
||||
for i := 0; i < 16; i++ {
|
||||
if arg := args[i*2]; arg.IsZero() {
|
||||
mask |= 1 << i
|
||||
} else if arg.IsOne() {
|
||||
mask |= (1 << 16) << i
|
||||
}
|
||||
}
|
||||
const onlyScale = 0b1000_0000_0000_0000_0111_1011_1101_1110
|
||||
const only2D = 0b1000_0100_0000_0000_0100_1011_1100_1100
|
||||
if (mask & onlyScale) == onlyScale {
|
||||
// | m0 0 0 0 |
|
||||
// | 0 m5 0 0 |
|
||||
// | 0 0 m10 0 |
|
||||
// | 0 0 0 1 |
|
||||
sx, sy, sz := args[0], args[10], args[20]
|
||||
if sx.EqualIgnoringWhitespace(sy) && sz.IsOne() {
|
||||
token.Text = "scale"
|
||||
*token.Children = args[:1]
|
||||
} else if sy.IsOne() && sz.IsOne() {
|
||||
token.Text = "scaleX"
|
||||
*token.Children = args[:1]
|
||||
} else if sx.IsOne() && sz.IsOne() {
|
||||
token.Text = "scaleY"
|
||||
*token.Children = args[10:11]
|
||||
} else if sx.IsOne() && sy.IsOne() {
|
||||
token.Text = "scaleZ"
|
||||
*token.Children = args[20:21]
|
||||
} else if sz.IsOne() {
|
||||
token.Text = "scale"
|
||||
*token.Children = append(args[0:2], args[10])
|
||||
} else {
|
||||
token.Text = "scale3d"
|
||||
*token.Children = append(append(args[0:2], args[10:12]...), args[20])
|
||||
}
|
||||
} else if (mask & only2D) == only2D {
|
||||
// | m0 m4 0 m12 |
|
||||
// | m1 m5 0 m13 |
|
||||
// | 0 0 1 0 |
|
||||
// | 0 0 0 1 |
|
||||
token.Text = "matrix"
|
||||
*token.Children = append(append(args[0:4], args[8:12]...), args[24:27]...)
|
||||
}
|
||||
|
||||
// Note: A "matrix3d" cannot be directly converted into a "translate3d"
|
||||
// because "translate3d" requires units while "matrix3d" requires no
|
||||
// units. I'm not sure exactly what the semantics are so I'm not
|
||||
// sure if you can just add "px" or not. Even if that did work,
|
||||
// you still couldn't substitute values containing "var()" since
|
||||
// units would still not be substituted in that case.
|
||||
}
|
||||
|
||||
case "translate3d":
|
||||
// specifies a 3D translation by the vector [tx,ty,tz], with tx,
|
||||
// ty and tz being the first, second and third translation-value
|
||||
// parameters respectively.
|
||||
if n == 5 {
|
||||
tx, ty, tz := &args[0], &args[2], &args[4]
|
||||
tx.TurnLengthOrPercentageIntoNumberIfZero()
|
||||
ty.TurnLengthOrPercentageIntoNumberIfZero()
|
||||
tz.TurnLengthIntoNumberIfZero()
|
||||
if ty.IsZero() && tz.IsZero() {
|
||||
// "translate3d(tx, 0, 0)" => "translate(tx)"
|
||||
token.Text = "translate"
|
||||
*token.Children = args[:1]
|
||||
} else if tx.IsZero() && tz.IsZero() {
|
||||
// "translate3d(0, ty, 0)" => "translateY(ty)"
|
||||
token.Text = "translateY"
|
||||
*token.Children = args[2:3]
|
||||
} else if tx.IsZero() && ty.IsZero() {
|
||||
// "translate3d(0, 0, tz)" => "translateZ(tz)"
|
||||
token.Text = "translateZ"
|
||||
*token.Children = args[4:]
|
||||
} else if tz.IsZero() {
|
||||
// "translate3d(tx, ty, 0)" => "translate(tx, ty)"
|
||||
token.Text = "translate"
|
||||
*token.Children = args[:3]
|
||||
}
|
||||
}
|
||||
|
||||
case "translatez":
|
||||
// specifies a 3D translation by the vector [0,0,tz] with the given
|
||||
// amount in the Z direction.
|
||||
if n == 1 {
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "scale3d":
|
||||
// specifies a 3D scale operation by the [sx,sy,sz] scaling vector
|
||||
// described by the 3 parameters.
|
||||
if n == 5 {
|
||||
sx, sy, sz := &args[0], &args[2], &args[4]
|
||||
turnPercentIntoNumberIfShorter(sx)
|
||||
turnPercentIntoNumberIfShorter(sy)
|
||||
turnPercentIntoNumberIfShorter(sz)
|
||||
if sx.EqualIgnoringWhitespace(*sy) && sz.IsOne() {
|
||||
// "scale3d(s, s, 1)" => "scale(s)"
|
||||
token.Text = "scale"
|
||||
*token.Children = args[:1]
|
||||
} else if sy.IsOne() && sz.IsOne() {
|
||||
// "scale3d(sx, 1, 1)" => "scaleX(sx)"
|
||||
token.Text = "scaleX"
|
||||
*token.Children = args[:1]
|
||||
} else if sx.IsOne() && sz.IsOne() {
|
||||
// "scale3d(1, sy, 1)" => "scaleY(sy)"
|
||||
token.Text = "scaleY"
|
||||
*token.Children = args[2:3]
|
||||
} else if sx.IsOne() && sy.IsOne() {
|
||||
// "scale3d(1, 1, sz)" => "scaleZ(sz)"
|
||||
token.Text = "scaleZ"
|
||||
*token.Children = args[4:]
|
||||
} else if sz.IsOne() {
|
||||
// "scale3d(sx, sy, 1)" => "scale(sx, sy)"
|
||||
token.Text = "scale"
|
||||
*token.Children = args[:3]
|
||||
}
|
||||
}
|
||||
|
||||
case "scalez":
|
||||
// specifies a 3D scale operation using the [1,1,sz] scaling vector,
|
||||
// where sz is given as the parameter.
|
||||
if n == 1 {
|
||||
turnPercentIntoNumberIfShorter(&args[0])
|
||||
}
|
||||
|
||||
case "rotate3d":
|
||||
// specifies a 3D rotation by the angle specified in last parameter
|
||||
// about the [x,y,z] direction vector described by the first three
|
||||
// parameters. A direction vector that cannot be normalized, such as
|
||||
// [0,0,0], will cause the rotation to not be applied.
|
||||
if n == 7 {
|
||||
x, y, z, angle := &args[0], &args[2], &args[4], &args[6]
|
||||
angle.TurnLengthIntoNumberIfZero()
|
||||
if x.IsOne() && y.IsZero() && z.IsZero() {
|
||||
// "rotate3d(1, 0, 0, angle)" => "rotateX(angle)"
|
||||
token.Text = "rotateX"
|
||||
*token.Children = args[6:]
|
||||
} else if x.IsZero() && y.IsOne() && z.IsZero() {
|
||||
// "rotate3d(0, 1, 0, angle)" => "rotateY(angle)"
|
||||
token.Text = "rotateY"
|
||||
*token.Children = args[6:]
|
||||
} else if x.IsZero() && y.IsZero() && z.IsOne() {
|
||||
// "rotate3d(0, 0, 1, angle)" => "rotate(angle)"
|
||||
token.Text = "rotate"
|
||||
*token.Children = args[6:]
|
||||
}
|
||||
}
|
||||
|
||||
case "rotatex":
|
||||
// same as rotate3d(1, 0, 0, <angle>).
|
||||
if n == 1 {
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "rotatey":
|
||||
// same as rotate3d(0, 1, 0, <angle>).
|
||||
if n == 1 {
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "rotatez":
|
||||
// same as rotate3d(0, 0, 1, <angle>), which is a 3d transform
|
||||
// equivalent to the 2d transform rotate(<angle>).
|
||||
if n == 1 {
|
||||
// "rotateZ(angle)" => "rotate(angle)"
|
||||
token.Text = "rotate"
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
|
||||
case "perspective":
|
||||
// specifies a perspective projection matrix. This matrix scales
|
||||
// points in X and Y based on their Z value, scaling points with
|
||||
// positive Z values away from the origin, and those with negative Z
|
||||
// values towards the origin. Points on the z=0 plane are unchanged.
|
||||
// The parameter represents the distance of the z=0 plane from the
|
||||
// viewer.
|
||||
if n == 1 {
|
||||
args[0].TurnLengthIntoNumberIfZero()
|
||||
}
|
||||
}
|
||||
|
||||
// Trim whitespace at the ends
|
||||
if args := *token.Children; len(args) > 0 {
|
||||
args[0].Whitespace &= ^css_ast.WhitespaceBefore
|
||||
args[len(args)-1].Whitespace &= ^css_ast.WhitespaceAfter
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
1361
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go
generated
vendored
1361
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go
generated
vendored
File diff suppressed because it is too large
Load Diff
342
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser_selector.go
generated
vendored
342
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser_selector.go
generated
vendored
@ -1,342 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
func (p *parser) parseSelectorList() (list []css_ast.ComplexSelector, ok bool) {
|
||||
// Parse the first selector
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
sel, good := p.parseComplexSelector()
|
||||
if !good {
|
||||
return
|
||||
}
|
||||
list = append(list, sel)
|
||||
|
||||
// Parse the remaining selectors
|
||||
for {
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
if !p.eat(css_lexer.TComma) {
|
||||
break
|
||||
}
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
sel, good := p.parseComplexSelector()
|
||||
if !good {
|
||||
return
|
||||
}
|
||||
list = append(list, sel)
|
||||
}
|
||||
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func (p *parser) parseComplexSelector() (result css_ast.ComplexSelector, ok bool) {
|
||||
// Parent
|
||||
sel, good := p.parseCompoundSelector()
|
||||
if !good {
|
||||
return
|
||||
}
|
||||
result.Selectors = append(result.Selectors, sel)
|
||||
|
||||
for {
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
if p.peek(css_lexer.TEndOfFile) || p.peek(css_lexer.TComma) || p.peek(css_lexer.TOpenBrace) {
|
||||
break
|
||||
}
|
||||
|
||||
// Optional combinator
|
||||
combinator := p.parseCombinator()
|
||||
if combinator != "" {
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
}
|
||||
|
||||
// Child
|
||||
sel, good := p.parseCompoundSelector()
|
||||
if !good {
|
||||
return
|
||||
}
|
||||
sel.Combinator = combinator
|
||||
result.Selectors = append(result.Selectors, sel)
|
||||
}
|
||||
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func (p *parser) nameToken() css_ast.NameToken {
|
||||
return css_ast.NameToken{
|
||||
Kind: p.current().Kind,
|
||||
Text: p.decoded(),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parseCompoundSelector() (sel css_ast.CompoundSelector, ok bool) {
|
||||
// This is an extension: https://drafts.csswg.org/css-nesting-1/
|
||||
if p.eat(css_lexer.TDelimAmpersand) {
|
||||
sel.HasNestPrefix = true
|
||||
}
|
||||
|
||||
// Parse the type selector
|
||||
switch p.current().Kind {
|
||||
case css_lexer.TDelimBar, css_lexer.TIdent, css_lexer.TDelimAsterisk:
|
||||
nsName := css_ast.NamespacedName{}
|
||||
if !p.peek(css_lexer.TDelimBar) {
|
||||
nsName.Name = p.nameToken()
|
||||
p.advance()
|
||||
} else {
|
||||
// Hack: Create an empty "identifier" to represent this
|
||||
nsName.Name.Kind = css_lexer.TIdent
|
||||
}
|
||||
if p.eat(css_lexer.TDelimBar) {
|
||||
if !p.peek(css_lexer.TIdent) && !p.peek(css_lexer.TDelimAsterisk) {
|
||||
p.expect(css_lexer.TIdent)
|
||||
return
|
||||
}
|
||||
prefix := nsName.Name
|
||||
nsName.NamespacePrefix = &prefix
|
||||
nsName.Name = p.nameToken()
|
||||
p.advance()
|
||||
}
|
||||
sel.TypeSelector = &nsName
|
||||
}
|
||||
|
||||
// Parse the subclass selectors
|
||||
subclassSelectors:
|
||||
for {
|
||||
switch p.current().Kind {
|
||||
case css_lexer.THash:
|
||||
if !p.current().IsID {
|
||||
break subclassSelectors
|
||||
}
|
||||
name := p.decoded()
|
||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSHash{Name: name})
|
||||
p.advance()
|
||||
|
||||
case css_lexer.TDelimDot:
|
||||
p.advance()
|
||||
name := p.decoded()
|
||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSClass{Name: name})
|
||||
p.expect(css_lexer.TIdent)
|
||||
|
||||
case css_lexer.TOpenBracket:
|
||||
p.advance()
|
||||
attr, good := p.parseAttributeSelector()
|
||||
if !good {
|
||||
return
|
||||
}
|
||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &attr)
|
||||
|
||||
case css_lexer.TColon:
|
||||
if p.next().Kind == css_lexer.TColon {
|
||||
// Special-case the start of the pseudo-element selector section
|
||||
for p.current().Kind == css_lexer.TColon {
|
||||
isElement := p.next().Kind == css_lexer.TColon
|
||||
if isElement {
|
||||
p.advance()
|
||||
}
|
||||
pseudo := p.parsePseudoClassSelector()
|
||||
|
||||
// https://www.w3.org/TR/selectors-4/#single-colon-pseudos
|
||||
// The four Level 2 pseudo-elements (::before, ::after, ::first-line,
|
||||
// and ::first-letter) may, for legacy reasons, be represented using
|
||||
// the <pseudo-class-selector> grammar, with only a single ":"
|
||||
// character at their start.
|
||||
if p.options.MangleSyntax && isElement && len(pseudo.Args) == 0 {
|
||||
switch pseudo.Name {
|
||||
case "before", "after", "first-line", "first-letter":
|
||||
isElement = false
|
||||
}
|
||||
}
|
||||
|
||||
pseudo.IsElement = isElement
|
||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo)
|
||||
}
|
||||
break subclassSelectors
|
||||
}
|
||||
pseudo := p.parsePseudoClassSelector()
|
||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo)
|
||||
|
||||
default:
|
||||
break subclassSelectors
|
||||
}
|
||||
}
|
||||
|
||||
// The compound selector must be non-empty
|
||||
if !sel.HasNestPrefix && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0 {
|
||||
p.unexpected()
|
||||
return
|
||||
}
|
||||
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func (p *parser) parseAttributeSelector() (attr css_ast.SSAttribute, ok bool) {
|
||||
// Parse the namespaced name
|
||||
switch p.current().Kind {
|
||||
case css_lexer.TDelimBar, css_lexer.TDelimAsterisk:
|
||||
// "[|x]"
|
||||
// "[*|x]"
|
||||
if p.peek(css_lexer.TDelimAsterisk) {
|
||||
prefix := p.nameToken()
|
||||
p.advance()
|
||||
attr.NamespacedName.NamespacePrefix = &prefix
|
||||
} else {
|
||||
// "[|attr]" is equivalent to "[attr]". From the specification:
|
||||
// "In keeping with the Namespaces in the XML recommendation, default
|
||||
// namespaces do not apply to attributes, therefore attribute selectors
|
||||
// without a namespace component apply only to attributes that have no
|
||||
// namespace (equivalent to |attr)."
|
||||
}
|
||||
if !p.expect(css_lexer.TDelimBar) {
|
||||
return
|
||||
}
|
||||
attr.NamespacedName.Name = p.nameToken()
|
||||
if !p.expect(css_lexer.TIdent) {
|
||||
return
|
||||
}
|
||||
|
||||
default:
|
||||
// "[x]"
|
||||
// "[x|y]"
|
||||
attr.NamespacedName.Name = p.nameToken()
|
||||
if !p.expect(css_lexer.TIdent) {
|
||||
return
|
||||
}
|
||||
if p.next().Kind != css_lexer.TDelimEquals && p.eat(css_lexer.TDelimBar) {
|
||||
prefix := attr.NamespacedName.Name
|
||||
attr.NamespacedName.NamespacePrefix = &prefix
|
||||
attr.NamespacedName.Name = p.nameToken()
|
||||
if !p.expect(css_lexer.TIdent) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the optional matcher operator
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
if p.eat(css_lexer.TDelimEquals) {
|
||||
attr.MatcherOp = "="
|
||||
} else {
|
||||
switch p.current().Kind {
|
||||
case css_lexer.TDelimTilde:
|
||||
attr.MatcherOp = "~="
|
||||
case css_lexer.TDelimBar:
|
||||
attr.MatcherOp = "|="
|
||||
case css_lexer.TDelimCaret:
|
||||
attr.MatcherOp = "^="
|
||||
case css_lexer.TDelimDollar:
|
||||
attr.MatcherOp = "$="
|
||||
case css_lexer.TDelimAsterisk:
|
||||
attr.MatcherOp = "*="
|
||||
}
|
||||
if attr.MatcherOp != "" {
|
||||
p.advance()
|
||||
p.expect(css_lexer.TDelimEquals)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the optional matcher value
|
||||
if attr.MatcherOp != "" {
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
if !p.peek(css_lexer.TString) && !p.peek(css_lexer.TIdent) {
|
||||
p.unexpected()
|
||||
}
|
||||
attr.MatcherValue = p.decoded()
|
||||
p.advance()
|
||||
p.eat(css_lexer.TWhitespace)
|
||||
if p.peek(css_lexer.TIdent) {
|
||||
if modifier := p.decoded(); len(modifier) == 1 {
|
||||
if c := modifier[0]; c == 'i' || c == 'I' || c == 's' || c == 'S' {
|
||||
attr.MatcherModifier = c
|
||||
p.advance()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
p.expect(css_lexer.TCloseBracket)
|
||||
ok = true
|
||||
return
|
||||
}
|
||||
|
||||
func (p *parser) parsePseudoClassSelector() css_ast.SSPseudoClass {
|
||||
p.advance()
|
||||
|
||||
if p.peek(css_lexer.TFunction) {
|
||||
text := p.decoded()
|
||||
p.advance()
|
||||
args := p.convertTokens(p.parseAnyValue())
|
||||
p.expect(css_lexer.TCloseParen)
|
||||
return css_ast.SSPseudoClass{Name: text, Args: args}
|
||||
}
|
||||
|
||||
name := p.decoded()
|
||||
sel := css_ast.SSPseudoClass{}
|
||||
if p.expect(css_lexer.TIdent) {
|
||||
sel.Name = name
|
||||
}
|
||||
return sel
|
||||
}
|
||||
|
||||
func (p *parser) parseAnyValue() []css_lexer.Token {
|
||||
// Reference: https://drafts.csswg.org/css-syntax-3/#typedef-declaration-value
|
||||
|
||||
p.stack = p.stack[:0] // Reuse allocated memory
|
||||
start := p.index
|
||||
|
||||
loop:
|
||||
for {
|
||||
switch p.current().Kind {
|
||||
case css_lexer.TCloseParen, css_lexer.TCloseBracket, css_lexer.TCloseBrace:
|
||||
last := len(p.stack) - 1
|
||||
if last < 0 || !p.peek(p.stack[last]) {
|
||||
break loop
|
||||
}
|
||||
p.stack = p.stack[:last]
|
||||
|
||||
case css_lexer.TSemicolon, css_lexer.TDelimExclamation:
|
||||
if len(p.stack) == 0 {
|
||||
break loop
|
||||
}
|
||||
|
||||
case css_lexer.TOpenParen, css_lexer.TFunction:
|
||||
p.stack = append(p.stack, css_lexer.TCloseParen)
|
||||
|
||||
case css_lexer.TOpenBracket:
|
||||
p.stack = append(p.stack, css_lexer.TCloseBracket)
|
||||
|
||||
case css_lexer.TOpenBrace:
|
||||
p.stack = append(p.stack, css_lexer.TCloseBrace)
|
||||
}
|
||||
|
||||
p.advance()
|
||||
}
|
||||
|
||||
tokens := p.tokens[start:p.index]
|
||||
if len(tokens) == 0 {
|
||||
p.unexpected()
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func (p *parser) parseCombinator() string {
|
||||
switch p.current().Kind {
|
||||
case css_lexer.TDelimGreaterThan:
|
||||
p.advance()
|
||||
return ">"
|
||||
|
||||
case css_lexer.TDelimPlus:
|
||||
p.advance()
|
||||
return "+"
|
||||
|
||||
case css_lexer.TDelimTilde:
|
||||
p.advance()
|
||||
return "~"
|
||||
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
575
vendor/github.com/evanw/esbuild/internal/css_parser/css_reduce_calc.go
generated
vendored
575
vendor/github.com/evanw/esbuild/internal/css_parser/css_reduce_calc.go
generated
vendored
@ -1,575 +0,0 @@
|
||||
package css_parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
)
|
||||
|
||||
func (p *parser) tryToReduceCalcExpression(token css_ast.Token) css_ast.Token {
|
||||
if term := tryToParseCalcTerm(*token.Children); term != nil {
|
||||
whitespace := css_ast.WhitespaceBefore | css_ast.WhitespaceAfter
|
||||
if p.options.RemoveWhitespace {
|
||||
whitespace = 0
|
||||
}
|
||||
term = term.partiallySimplify()
|
||||
if result, ok := term.convertToToken(whitespace); ok {
|
||||
if result.Kind == css_lexer.TOpenParen {
|
||||
result.Kind = css_lexer.TFunction
|
||||
result.Text = "calc"
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
// See: https://www.w3.org/TR/css-values-4/#calc-internal
|
||||
type calcTerm interface {
|
||||
convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool)
|
||||
partiallySimplify() calcTerm
|
||||
}
|
||||
|
||||
type calcSum struct {
|
||||
terms []calcTerm
|
||||
}
|
||||
|
||||
type calcProduct struct {
|
||||
terms []calcTerm
|
||||
}
|
||||
|
||||
type calcNegate struct {
|
||||
term calcTerm
|
||||
}
|
||||
|
||||
type calcInvert struct {
|
||||
term calcTerm
|
||||
}
|
||||
|
||||
type calcNumeric struct {
|
||||
number float64
|
||||
unit string
|
||||
}
|
||||
|
||||
type calcValue struct {
|
||||
token css_ast.Token
|
||||
isInvalidPlusOrMinus bool
|
||||
}
|
||||
|
||||
func floatToStringForCalc(a float64) (string, bool) {
|
||||
// Handle non-finite cases
|
||||
if math.IsNaN(a) || math.IsInf(a, 0) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
// Print the number as a string
|
||||
text := fmt.Sprintf("%.05f", a)
|
||||
for text[len(text)-1] == '0' {
|
||||
text = text[:len(text)-1]
|
||||
}
|
||||
if text[len(text)-1] == '.' {
|
||||
text = text[:len(text)-1]
|
||||
}
|
||||
if strings.HasPrefix(text, "0.") {
|
||||
text = text[1:]
|
||||
} else if strings.HasPrefix(text, "-0.") {
|
||||
text = "-" + text[2:]
|
||||
}
|
||||
|
||||
// Bail if the number is not exactly represented
|
||||
if number, err := strconv.ParseFloat(text, 64); err != nil || number != a {
|
||||
return "", false
|
||||
}
|
||||
|
||||
return text, true
|
||||
}
|
||||
|
||||
func (c *calcSum) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
||||
tokens := make([]css_ast.Token, 0, len(c.terms)*2)
|
||||
|
||||
// ALGORITHM DEVIATION: Avoid parenthesizing product nodes inside sum nodes
|
||||
if product, ok := c.terms[0].(*calcProduct); ok {
|
||||
token, ok := product.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, *token.Children...)
|
||||
} else {
|
||||
token, ok := c.terms[0].convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, token)
|
||||
}
|
||||
|
||||
for _, term := range c.terms[1:] {
|
||||
// If child is a Negate node, append " - " to s, then serialize the Negate’s child and append the result to s.
|
||||
if negate, ok := term.(*calcNegate); ok {
|
||||
token, ok := negate.term.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, css_ast.Token{
|
||||
Kind: css_lexer.TDelimMinus,
|
||||
Text: "-",
|
||||
Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter,
|
||||
}, token)
|
||||
continue
|
||||
}
|
||||
|
||||
// If child is a negative numeric value, append " - " to s, then serialize the negation of child as normal and append the result to s.
|
||||
if numeric, ok := term.(*calcNumeric); ok && numeric.number < 0 {
|
||||
clone := *numeric
|
||||
clone.number = -clone.number
|
||||
token, ok := clone.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, css_ast.Token{
|
||||
Kind: css_lexer.TDelimMinus,
|
||||
Text: "-",
|
||||
Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter,
|
||||
}, token)
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, append " + " to s, then serialize child and append the result to s.
|
||||
tokens = append(tokens, css_ast.Token{
|
||||
Kind: css_lexer.TDelimPlus,
|
||||
Text: "+",
|
||||
Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter,
|
||||
})
|
||||
|
||||
// ALGORITHM DEVIATION: Avoid parenthesizing product nodes inside sum nodes
|
||||
if product, ok := term.(*calcProduct); ok {
|
||||
token, ok := product.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, *token.Children...)
|
||||
} else {
|
||||
token, ok := term.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, token)
|
||||
}
|
||||
}
|
||||
|
||||
return css_ast.Token{
|
||||
Kind: css_lexer.TOpenParen,
|
||||
Text: "(",
|
||||
Children: &tokens,
|
||||
}, true
|
||||
}
|
||||
|
||||
func (c *calcProduct) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
||||
tokens := make([]css_ast.Token, 0, len(c.terms)*2)
|
||||
token, ok := c.terms[0].convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, token)
|
||||
|
||||
for _, term := range c.terms[1:] {
|
||||
// If child is an Invert node, append " / " to s, then serialize the Invert’s child and append the result to s.
|
||||
if invert, ok := term.(*calcInvert); ok {
|
||||
token, ok := invert.term.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, css_ast.Token{
|
||||
Kind: css_lexer.TDelimSlash,
|
||||
Text: "/",
|
||||
Whitespace: whitespace,
|
||||
}, token)
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, append " * " to s, then serialize child and append the result to s.
|
||||
token, ok := term.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
tokens = append(tokens, css_ast.Token{
|
||||
Kind: css_lexer.TDelimAsterisk,
|
||||
Text: "*",
|
||||
Whitespace: whitespace,
|
||||
}, token)
|
||||
}
|
||||
|
||||
return css_ast.Token{
|
||||
Kind: css_lexer.TOpenParen,
|
||||
Text: "(",
|
||||
Children: &tokens,
|
||||
}, true
|
||||
}
|
||||
|
||||
func (c *calcNegate) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
||||
token, ok := c.term.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
return css_ast.Token{
|
||||
Kind: css_lexer.TOpenParen,
|
||||
Text: "(",
|
||||
Children: &[]css_ast.Token{
|
||||
{Kind: css_lexer.TNumber, Text: "-1"},
|
||||
{Kind: css_lexer.TDelimSlash, Text: "*", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter},
|
||||
token,
|
||||
},
|
||||
}, true
|
||||
}
|
||||
|
||||
func (c *calcInvert) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
||||
token, ok := c.term.convertToToken(whitespace)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
return css_ast.Token{
|
||||
Kind: css_lexer.TOpenParen,
|
||||
Text: "(",
|
||||
Children: &[]css_ast.Token{
|
||||
{Kind: css_lexer.TNumber, Text: "1"},
|
||||
{Kind: css_lexer.TDelimSlash, Text: "/", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter},
|
||||
token,
|
||||
},
|
||||
}, true
|
||||
}
|
||||
|
||||
func (c *calcNumeric) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
||||
text, ok := floatToStringForCalc(c.number)
|
||||
if !ok {
|
||||
return css_ast.Token{}, false
|
||||
}
|
||||
if c.unit == "" {
|
||||
return css_ast.Token{
|
||||
Kind: css_lexer.TNumber,
|
||||
Text: text,
|
||||
}, true
|
||||
} else if c.unit == "%" {
|
||||
return css_ast.Token{
|
||||
Kind: css_lexer.TPercentage,
|
||||
Text: text + "%",
|
||||
}, true
|
||||
} else {
|
||||
return css_ast.Token{
|
||||
Kind: css_lexer.TDimension,
|
||||
Text: text + c.unit,
|
||||
UnitOffset: uint16(len(text)),
|
||||
}, true
|
||||
}
|
||||
}
|
||||
|
||||
func (c *calcValue) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
||||
t := c.token
|
||||
t.Whitespace = 0
|
||||
return t, true
|
||||
}
|
||||
|
||||
func (c *calcSum) partiallySimplify() calcTerm {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
||||
|
||||
// For each of root’s children that are Sum nodes, replace them with their children.
|
||||
terms := make([]calcTerm, 0, len(c.terms))
|
||||
for _, term := range c.terms {
|
||||
term = term.partiallySimplify()
|
||||
if sum, ok := term.(*calcSum); ok {
|
||||
terms = append(terms, sum.terms...)
|
||||
} else {
|
||||
terms = append(terms, term)
|
||||
}
|
||||
}
|
||||
|
||||
// For each set of root’s children that are numeric values with identical units, remove
|
||||
// those children and replace them with a single numeric value containing the sum of the
|
||||
// removed nodes, and with the same unit. (E.g. combine numbers, combine percentages,
|
||||
// combine px values, etc.)
|
||||
for i := 0; i < len(terms); i++ {
|
||||
term := terms[i]
|
||||
if numeric, ok := term.(*calcNumeric); ok {
|
||||
end := i + 1
|
||||
for j := end; j < len(terms); j++ {
|
||||
term2 := terms[j]
|
||||
if numeric2, ok := term2.(*calcNumeric); ok && numeric2.unit == numeric.unit {
|
||||
numeric.number += numeric2.number
|
||||
} else {
|
||||
terms[end] = term2
|
||||
end++
|
||||
}
|
||||
}
|
||||
terms = terms[:end]
|
||||
}
|
||||
}
|
||||
|
||||
// If root has only a single child at this point, return the child.
|
||||
if len(terms) == 1 {
|
||||
return terms[0]
|
||||
}
|
||||
|
||||
// Otherwise, return root.
|
||||
c.terms = terms
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *calcProduct) partiallySimplify() calcTerm {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
||||
|
||||
// For each of root’s children that are Product nodes, replace them with their children.
|
||||
terms := make([]calcTerm, 0, len(c.terms))
|
||||
for _, term := range c.terms {
|
||||
term = term.partiallySimplify()
|
||||
if product, ok := term.(*calcProduct); ok {
|
||||
terms = append(terms, product.terms...)
|
||||
} else {
|
||||
terms = append(terms, term)
|
||||
}
|
||||
}
|
||||
|
||||
// If root has multiple children that are numbers (not percentages or dimensions), remove
|
||||
// them and replace them with a single number containing the product of the removed nodes.
|
||||
for i, term := range terms {
|
||||
if numeric, ok := term.(*calcNumeric); ok && numeric.unit == "" {
|
||||
end := i + 1
|
||||
for j := end; j < len(terms); j++ {
|
||||
term2 := terms[j]
|
||||
if numeric2, ok := term2.(*calcNumeric); ok && numeric2.unit == "" {
|
||||
numeric.number *= numeric2.number
|
||||
} else {
|
||||
terms[end] = term2
|
||||
end++
|
||||
}
|
||||
}
|
||||
terms = terms[:end]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If root contains only numeric values and/or Invert nodes containing numeric values,
|
||||
// and multiplying the types of all the children (noting that the type of an Invert
|
||||
// node is the inverse of its child’s type) results in a type that matches any of the
|
||||
// types that a math function can resolve to, return the result of multiplying all the
|
||||
// values of the children (noting that the value of an Invert node is the reciprocal
|
||||
// of its child’s value), expressed in the result’s canonical unit.
|
||||
if len(terms) == 2 {
|
||||
// Right now, only handle the case of two numbers, one of which has no unit
|
||||
if first, ok := terms[0].(*calcNumeric); ok {
|
||||
if second, ok := terms[1].(*calcNumeric); ok {
|
||||
if first.unit == "" {
|
||||
second.number *= first.number
|
||||
return second
|
||||
}
|
||||
if second.unit == "" {
|
||||
first.number *= second.number
|
||||
return first
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ALGORITHM DEVIATION: Divide instead of multiply if the reciprocal is shorter
|
||||
for i := 1; i < len(terms); i++ {
|
||||
if numeric, ok := terms[i].(*calcNumeric); ok {
|
||||
reciprocal := 1 / numeric.number
|
||||
if multiply, ok := floatToStringForCalc(numeric.number); ok {
|
||||
if divide, ok := floatToStringForCalc(reciprocal); ok && len(divide) < len(multiply) {
|
||||
numeric.number = reciprocal
|
||||
terms[i] = &calcInvert{term: numeric}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If root has only a single child at this point, return the child.
|
||||
if len(terms) == 1 {
|
||||
return terms[0]
|
||||
}
|
||||
|
||||
// Otherwise, return root.
|
||||
c.terms = terms
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *calcNegate) partiallySimplify() calcTerm {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
||||
|
||||
c.term = c.term.partiallySimplify()
|
||||
|
||||
// If root’s child is a numeric value, return an equivalent numeric value, but with the value negated (0 - value).
|
||||
if numeric, ok := c.term.(*calcNumeric); ok {
|
||||
numeric.number = -numeric.number
|
||||
return numeric
|
||||
}
|
||||
|
||||
// If root’s child is a Negate node, return the child’s child.
|
||||
if negate, ok := c.term.(*calcNegate); ok {
|
||||
return negate.term
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *calcInvert) partiallySimplify() calcTerm {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
||||
|
||||
c.term = c.term.partiallySimplify()
|
||||
|
||||
// If root’s child is a number (not a percentage or dimension) return the reciprocal of the child’s value.
|
||||
if numeric, ok := c.term.(*calcNumeric); ok && numeric.unit == "" {
|
||||
numeric.number = 1 / numeric.number
|
||||
return numeric
|
||||
}
|
||||
|
||||
// If root’s child is an Invert node, return the child’s child.
|
||||
if invert, ok := c.term.(*calcInvert); ok {
|
||||
return invert.term
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *calcNumeric) partiallySimplify() calcTerm {
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *calcValue) partiallySimplify() calcTerm {
|
||||
return c
|
||||
}
|
||||
|
||||
func tryToParseCalcTerm(tokens []css_ast.Token) calcTerm {
|
||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-internal
|
||||
terms := make([]calcTerm, len(tokens))
|
||||
|
||||
for i, token := range tokens {
|
||||
var term calcTerm
|
||||
if token.Kind == css_lexer.TFunction && token.Text == "var" {
|
||||
// Using "var()" should bail because it can expand to any number of tokens
|
||||
return nil
|
||||
} else if token.Kind == css_lexer.TOpenParen || (token.Kind == css_lexer.TFunction && token.Text == "calc") {
|
||||
term = tryToParseCalcTerm(*token.Children)
|
||||
if term == nil {
|
||||
return nil
|
||||
}
|
||||
} else if token.Kind == css_lexer.TNumber {
|
||||
if number, err := strconv.ParseFloat(token.Text, 64); err == nil {
|
||||
term = &calcNumeric{number: number}
|
||||
} else {
|
||||
term = &calcValue{token: token}
|
||||
}
|
||||
} else if token.Kind == css_lexer.TPercentage {
|
||||
if number, err := strconv.ParseFloat(token.PercentageValue(), 64); err == nil {
|
||||
term = &calcNumeric{number: number, unit: "%"}
|
||||
} else {
|
||||
term = &calcValue{token: token}
|
||||
}
|
||||
} else if token.Kind == css_lexer.TDimension {
|
||||
if number, err := strconv.ParseFloat(token.DimensionValue(), 64); err == nil {
|
||||
term = &calcNumeric{number: number, unit: token.DimensionUnit()}
|
||||
} else {
|
||||
term = &calcValue{token: token}
|
||||
}
|
||||
} else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "Infinity") {
|
||||
term = &calcNumeric{number: math.Inf(1)}
|
||||
} else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "-Infinity") {
|
||||
term = &calcNumeric{number: math.Inf(-1)}
|
||||
} else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "NaN") {
|
||||
term = &calcNumeric{number: math.NaN()}
|
||||
} else {
|
||||
term = &calcValue{
|
||||
token: token,
|
||||
|
||||
// From the specification: "In addition, whitespace is required on both sides of the
|
||||
// + and - operators. (The * and / operators can be used without white space around them.)"
|
||||
isInvalidPlusOrMinus: i > 0 && i+1 < len(tokens) &&
|
||||
(token.Kind == css_lexer.TDelimPlus || token.Kind == css_lexer.TDelimMinus) &&
|
||||
(((token.Whitespace&css_ast.WhitespaceBefore) == 0 && (tokens[i-1].Whitespace&css_ast.WhitespaceAfter) == 0) ||
|
||||
(token.Whitespace&css_ast.WhitespaceAfter) == 0 && (tokens[i+1].Whitespace&css_ast.WhitespaceBefore) == 0),
|
||||
}
|
||||
}
|
||||
terms[i] = term
|
||||
}
|
||||
|
||||
// Collect children into Product and Invert nodes
|
||||
first := 1
|
||||
for first+1 < len(terms) {
|
||||
// If this is a "*" or "/" operator
|
||||
if value, ok := terms[first].(*calcValue); ok && (value.token.Kind == css_lexer.TDelimAsterisk || value.token.Kind == css_lexer.TDelimSlash) {
|
||||
// Scan over the run
|
||||
last := first
|
||||
for last+3 < len(terms) {
|
||||
if value, ok := terms[last+2].(*calcValue); ok && (value.token.Kind == css_lexer.TDelimAsterisk || value.token.Kind == css_lexer.TDelimSlash) {
|
||||
last += 2
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a node for the run
|
||||
product := calcProduct{terms: make([]calcTerm, (last-first)/2+2)}
|
||||
for i := range product.terms {
|
||||
term := terms[first+i*2-1]
|
||||
if i > 0 && terms[first+i*2-2].(*calcValue).token.Kind == css_lexer.TDelimSlash {
|
||||
term = &calcInvert{term: term}
|
||||
}
|
||||
product.terms[i] = term
|
||||
}
|
||||
|
||||
// Replace the run with a single node
|
||||
terms[first-1] = &product
|
||||
terms = append(terms[:first], terms[last+2:]...)
|
||||
continue
|
||||
}
|
||||
|
||||
first++
|
||||
}
|
||||
|
||||
// Collect children into Sum and Negate nodes
|
||||
first = 1
|
||||
for first+1 < len(terms) {
|
||||
// If this is a "+" or "-" operator
|
||||
if value, ok := terms[first].(*calcValue); ok && !value.isInvalidPlusOrMinus &&
|
||||
(value.token.Kind == css_lexer.TDelimPlus || value.token.Kind == css_lexer.TDelimMinus) {
|
||||
// Scan over the run
|
||||
last := first
|
||||
for last+3 < len(terms) {
|
||||
if value, ok := terms[last+2].(*calcValue); ok && !value.isInvalidPlusOrMinus &&
|
||||
(value.token.Kind == css_lexer.TDelimPlus || value.token.Kind == css_lexer.TDelimMinus) {
|
||||
last += 2
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a node for the run
|
||||
sum := calcSum{terms: make([]calcTerm, (last-first)/2+2)}
|
||||
for i := range sum.terms {
|
||||
term := terms[first+i*2-1]
|
||||
if i > 0 && terms[first+i*2-2].(*calcValue).token.Kind == css_lexer.TDelimMinus {
|
||||
term = &calcNegate{term: term}
|
||||
}
|
||||
sum.terms[i] = term
|
||||
}
|
||||
|
||||
// Replace the run with a single node
|
||||
terms[first-1] = &sum
|
||||
terms = append(terms[:first], terms[last+2:]...)
|
||||
continue
|
||||
}
|
||||
|
||||
first++
|
||||
}
|
||||
|
||||
// This only succeeds if everything reduces to a single term
|
||||
if len(terms) == 1 {
|
||||
return terms[0]
|
||||
}
|
||||
return nil
|
||||
}
|
725
vendor/github.com/evanw/esbuild/internal/css_printer/css_printer.go
generated
vendored
725
vendor/github.com/evanw/esbuild/internal/css_printer/css_printer.go
generated
vendored
@ -1,725 +0,0 @@
|
||||
package css_printer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/config"
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/css_lexer"
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/sourcemap"
|
||||
)
|
||||
|
||||
const quoteForURL byte = 0
|
||||
|
||||
type printer struct {
|
||||
options Options
|
||||
importRecords []ast.ImportRecord
|
||||
css []byte
|
||||
extractedLegalComments map[string]bool
|
||||
builder sourcemap.ChunkBuilder
|
||||
}
|
||||
|
||||
type Options struct {
|
||||
RemoveWhitespace bool
|
||||
ASCIIOnly bool
|
||||
AddSourceMappings bool
|
||||
LegalComments config.LegalComments
|
||||
|
||||
// If we're writing out a source map, this table of line start indices lets
|
||||
// us do binary search on to figure out what line a given AST node came from
|
||||
LineOffsetTables []sourcemap.LineOffsetTable
|
||||
|
||||
// This will be present if the input file had a source map. In that case we
|
||||
// want to map all the way back to the original input file(s).
|
||||
InputSourceMap *sourcemap.SourceMap
|
||||
}
|
||||
|
||||
type PrintResult struct {
|
||||
CSS []byte
|
||||
ExtractedLegalComments map[string]bool
|
||||
SourceMapChunk sourcemap.Chunk
|
||||
}
|
||||
|
||||
func Print(tree css_ast.AST, options Options) PrintResult {
|
||||
p := printer{
|
||||
options: options,
|
||||
importRecords: tree.ImportRecords,
|
||||
builder: sourcemap.MakeChunkBuilder(options.InputSourceMap, options.LineOffsetTables),
|
||||
}
|
||||
for _, rule := range tree.Rules {
|
||||
p.printRule(rule, 0, false)
|
||||
}
|
||||
return PrintResult{
|
||||
CSS: p.css,
|
||||
ExtractedLegalComments: p.extractedLegalComments,
|
||||
SourceMapChunk: p.builder.GenerateChunk(p.css),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printRule(rule css_ast.Rule, indent int32, omitTrailingSemicolon bool) {
|
||||
if r, ok := rule.Data.(*css_ast.RComment); ok {
|
||||
switch p.options.LegalComments {
|
||||
case config.LegalCommentsNone:
|
||||
return
|
||||
|
||||
case config.LegalCommentsEndOfFile,
|
||||
config.LegalCommentsLinkedWithComment,
|
||||
config.LegalCommentsExternalWithoutComment:
|
||||
if p.extractedLegalComments == nil {
|
||||
p.extractedLegalComments = make(map[string]bool)
|
||||
}
|
||||
p.extractedLegalComments[r.Text] = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if p.options.AddSourceMappings {
|
||||
p.builder.AddSourceMapping(rule.Loc, p.css)
|
||||
}
|
||||
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.printIndent(indent)
|
||||
}
|
||||
|
||||
switch r := rule.Data.(type) {
|
||||
case *css_ast.RAtCharset:
|
||||
// It's not valid to remove the space in between these two tokens
|
||||
p.print("@charset ")
|
||||
|
||||
// It's not valid to print the string with single quotes
|
||||
p.printQuotedWithQuote(r.Encoding, '"')
|
||||
p.print(";")
|
||||
|
||||
case *css_ast.RAtImport:
|
||||
if p.options.RemoveWhitespace {
|
||||
p.print("@import")
|
||||
} else {
|
||||
p.print("@import ")
|
||||
}
|
||||
p.printQuoted(p.importRecords[r.ImportRecordIndex].Path.Text)
|
||||
p.printTokens(r.ImportConditions, printTokensOpts{})
|
||||
p.print(";")
|
||||
|
||||
case *css_ast.RAtKeyframes:
|
||||
p.print("@")
|
||||
p.printIdent(r.AtToken, identNormal, mayNeedWhitespaceAfter)
|
||||
p.print(" ")
|
||||
if r.Name == "" {
|
||||
p.print("\"\"")
|
||||
} else {
|
||||
p.printIdent(r.Name, identNormal, canDiscardWhitespaceAfter)
|
||||
}
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.print(" ")
|
||||
}
|
||||
if p.options.RemoveWhitespace {
|
||||
p.print("{")
|
||||
} else {
|
||||
p.print("{\n")
|
||||
}
|
||||
indent++
|
||||
for _, block := range r.Blocks {
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.printIndent(indent)
|
||||
}
|
||||
for i, sel := range block.Selectors {
|
||||
if i > 0 {
|
||||
if p.options.RemoveWhitespace {
|
||||
p.print(",")
|
||||
} else {
|
||||
p.print(", ")
|
||||
}
|
||||
}
|
||||
p.print(sel)
|
||||
}
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.print(" ")
|
||||
}
|
||||
p.printRuleBlock(block.Rules, indent)
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.print("\n")
|
||||
}
|
||||
}
|
||||
indent--
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.printIndent(indent)
|
||||
}
|
||||
p.print("}")
|
||||
|
||||
case *css_ast.RKnownAt:
|
||||
p.print("@")
|
||||
whitespace := mayNeedWhitespaceAfter
|
||||
if len(r.Prelude) == 0 {
|
||||
whitespace = canDiscardWhitespaceAfter
|
||||
}
|
||||
p.printIdent(r.AtToken, identNormal, whitespace)
|
||||
if !p.options.RemoveWhitespace || len(r.Prelude) > 0 {
|
||||
p.print(" ")
|
||||
}
|
||||
p.printTokens(r.Prelude, printTokensOpts{})
|
||||
if !p.options.RemoveWhitespace && len(r.Prelude) > 0 {
|
||||
p.print(" ")
|
||||
}
|
||||
p.printRuleBlock(r.Rules, indent)
|
||||
|
||||
case *css_ast.RUnknownAt:
|
||||
p.print("@")
|
||||
whitespace := mayNeedWhitespaceAfter
|
||||
if len(r.Prelude) == 0 {
|
||||
whitespace = canDiscardWhitespaceAfter
|
||||
}
|
||||
p.printIdent(r.AtToken, identNormal, whitespace)
|
||||
if (!p.options.RemoveWhitespace && r.Block != nil) || len(r.Prelude) > 0 {
|
||||
p.print(" ")
|
||||
}
|
||||
p.printTokens(r.Prelude, printTokensOpts{})
|
||||
if !p.options.RemoveWhitespace && r.Block != nil && len(r.Prelude) > 0 {
|
||||
p.print(" ")
|
||||
}
|
||||
if r.Block == nil {
|
||||
p.print(";")
|
||||
} else {
|
||||
p.printTokens(r.Block, printTokensOpts{})
|
||||
}
|
||||
|
||||
case *css_ast.RSelector:
|
||||
p.printComplexSelectors(r.Selectors, indent)
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.print(" ")
|
||||
}
|
||||
p.printRuleBlock(r.Rules, indent)
|
||||
|
||||
case *css_ast.RQualified:
|
||||
hasWhitespaceAfter := p.printTokens(r.Prelude, printTokensOpts{})
|
||||
if !hasWhitespaceAfter && !p.options.RemoveWhitespace {
|
||||
p.print(" ")
|
||||
}
|
||||
p.printRuleBlock(r.Rules, indent)
|
||||
|
||||
case *css_ast.RDeclaration:
|
||||
p.printIdent(r.KeyText, identNormal, canDiscardWhitespaceAfter)
|
||||
p.print(":")
|
||||
hasWhitespaceAfter := p.printTokens(r.Value, printTokensOpts{
|
||||
indent: indent,
|
||||
isDeclaration: true,
|
||||
})
|
||||
if r.Important {
|
||||
if !hasWhitespaceAfter && !p.options.RemoveWhitespace && len(r.Value) > 0 {
|
||||
p.print(" ")
|
||||
}
|
||||
p.print("!important")
|
||||
}
|
||||
if !omitTrailingSemicolon {
|
||||
p.print(";")
|
||||
}
|
||||
|
||||
case *css_ast.RBadDeclaration:
|
||||
p.printTokens(r.Tokens, printTokensOpts{})
|
||||
if !omitTrailingSemicolon {
|
||||
p.print(";")
|
||||
}
|
||||
|
||||
case *css_ast.RComment:
|
||||
p.printIndentedComment(indent, r.Text)
|
||||
|
||||
default:
|
||||
panic("Internal error")
|
||||
}
|
||||
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.print("\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printIndentedComment(indent int32, text string) {
|
||||
// Avoid generating a comment containing the character sequence "</style"
|
||||
text = helpers.EscapeClosingTag(text, "/style")
|
||||
|
||||
// Re-indent multi-line comments
|
||||
for {
|
||||
newline := strings.IndexByte(text, '\n')
|
||||
if newline == -1 {
|
||||
break
|
||||
}
|
||||
p.print(text[:newline+1])
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.printIndent(indent)
|
||||
}
|
||||
text = text[newline+1:]
|
||||
}
|
||||
p.print(text)
|
||||
}
|
||||
|
||||
func (p *printer) printRuleBlock(rules []css_ast.Rule, indent int32) {
|
||||
if p.options.RemoveWhitespace {
|
||||
p.print("{")
|
||||
} else {
|
||||
p.print("{\n")
|
||||
}
|
||||
|
||||
for i, decl := range rules {
|
||||
omitTrailingSemicolon := p.options.RemoveWhitespace && i+1 == len(rules)
|
||||
p.printRule(decl, indent+1, omitTrailingSemicolon)
|
||||
}
|
||||
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.printIndent(indent)
|
||||
}
|
||||
p.print("}")
|
||||
}
|
||||
|
||||
func (p *printer) printComplexSelectors(selectors []css_ast.ComplexSelector, indent int32) {
|
||||
for i, complex := range selectors {
|
||||
if i > 0 {
|
||||
if p.options.RemoveWhitespace {
|
||||
p.print(",")
|
||||
} else {
|
||||
p.print(",\n")
|
||||
p.printIndent(indent)
|
||||
}
|
||||
}
|
||||
|
||||
for j, compound := range complex.Selectors {
|
||||
p.printCompoundSelector(compound, j == 0, j+1 == len(complex.Selectors))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bool, isLast bool) {
|
||||
if !isFirst && sel.Combinator == "" {
|
||||
// A space is required in between compound selectors if there is no
|
||||
// combinator in the middle. It's fine to convert "a + b" into "a+b"
|
||||
// but not to convert "a b" into "ab".
|
||||
p.print(" ")
|
||||
}
|
||||
|
||||
if sel.HasNestPrefix {
|
||||
p.print("&")
|
||||
}
|
||||
|
||||
if sel.Combinator != "" {
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.print(" ")
|
||||
}
|
||||
p.print(sel.Combinator)
|
||||
if !p.options.RemoveWhitespace {
|
||||
p.print(" ")
|
||||
}
|
||||
}
|
||||
|
||||
if sel.TypeSelector != nil {
|
||||
whitespace := mayNeedWhitespaceAfter
|
||||
if len(sel.SubclassSelectors) > 0 {
|
||||
// There is no chance of whitespace before a subclass selector or pseudo
|
||||
// class selector
|
||||
whitespace = canDiscardWhitespaceAfter
|
||||
}
|
||||
p.printNamespacedName(*sel.TypeSelector, whitespace)
|
||||
}
|
||||
|
||||
for i, sub := range sel.SubclassSelectors {
|
||||
whitespace := mayNeedWhitespaceAfter
|
||||
|
||||
// There is no chance of whitespace between subclass selectors
|
||||
if i+1 < len(sel.SubclassSelectors) {
|
||||
whitespace = canDiscardWhitespaceAfter
|
||||
}
|
||||
|
||||
switch s := sub.(type) {
|
||||
case *css_ast.SSHash:
|
||||
p.print("#")
|
||||
|
||||
// This deliberately does not use identHash. From the specification:
|
||||
// "In <id-selector>, the <hash-token>'s value must be an identifier."
|
||||
p.printIdent(s.Name, identNormal, whitespace)
|
||||
|
||||
case *css_ast.SSClass:
|
||||
p.print(".")
|
||||
p.printIdent(s.Name, identNormal, whitespace)
|
||||
|
||||
case *css_ast.SSAttribute:
|
||||
p.print("[")
|
||||
p.printNamespacedName(s.NamespacedName, canDiscardWhitespaceAfter)
|
||||
if s.MatcherOp != "" {
|
||||
p.print(s.MatcherOp)
|
||||
printAsIdent := false
|
||||
|
||||
// Print the value as an identifier if it's possible
|
||||
if css_lexer.WouldStartIdentifierWithoutEscapes(s.MatcherValue) {
|
||||
printAsIdent = true
|
||||
for _, c := range s.MatcherValue {
|
||||
if !css_lexer.IsNameContinue(c) {
|
||||
printAsIdent = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if printAsIdent {
|
||||
p.printIdent(s.MatcherValue, identNormal, canDiscardWhitespaceAfter)
|
||||
} else {
|
||||
p.printQuoted(s.MatcherValue)
|
||||
}
|
||||
}
|
||||
if s.MatcherModifier != 0 {
|
||||
p.print(" ")
|
||||
p.print(string(rune(s.MatcherModifier)))
|
||||
}
|
||||
p.print("]")
|
||||
|
||||
case *css_ast.SSPseudoClass:
|
||||
p.printPseudoClassSelector(*s, whitespace)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printNamespacedName(nsName css_ast.NamespacedName, whitespace trailingWhitespace) {
|
||||
if nsName.NamespacePrefix != nil {
|
||||
switch nsName.NamespacePrefix.Kind {
|
||||
case css_lexer.TIdent:
|
||||
p.printIdent(nsName.NamespacePrefix.Text, identNormal, canDiscardWhitespaceAfter)
|
||||
case css_lexer.TDelimAsterisk:
|
||||
p.print("*")
|
||||
default:
|
||||
panic("Internal error")
|
||||
}
|
||||
|
||||
p.print("|")
|
||||
}
|
||||
|
||||
switch nsName.Name.Kind {
|
||||
case css_lexer.TIdent:
|
||||
p.printIdent(nsName.Name.Text, identNormal, whitespace)
|
||||
case css_lexer.TDelimAsterisk:
|
||||
p.print("*")
|
||||
case css_lexer.TDelimAmpersand:
|
||||
p.print("&")
|
||||
default:
|
||||
panic("Internal error")
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printPseudoClassSelector(pseudo css_ast.SSPseudoClass, whitespace trailingWhitespace) {
|
||||
if pseudo.IsElement {
|
||||
p.print("::")
|
||||
} else {
|
||||
p.print(":")
|
||||
}
|
||||
|
||||
if len(pseudo.Args) > 0 {
|
||||
p.printIdent(pseudo.Name, identNormal, canDiscardWhitespaceAfter)
|
||||
p.print("(")
|
||||
p.printTokens(pseudo.Args, printTokensOpts{})
|
||||
p.print(")")
|
||||
} else {
|
||||
p.printIdent(pseudo.Name, identNormal, whitespace)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) print(text string) {
|
||||
p.css = append(p.css, text...)
|
||||
}
|
||||
|
||||
func bestQuoteCharForString(text string, forURL bool) byte {
|
||||
forURLCost := 0
|
||||
singleCost := 2
|
||||
doubleCost := 2
|
||||
|
||||
for _, c := range text {
|
||||
switch c {
|
||||
case '\'':
|
||||
forURLCost++
|
||||
singleCost++
|
||||
|
||||
case '"':
|
||||
forURLCost++
|
||||
doubleCost++
|
||||
|
||||
case '(', ')', ' ', '\t':
|
||||
forURLCost++
|
||||
|
||||
case '\\', '\n', '\r', '\f':
|
||||
forURLCost++
|
||||
singleCost++
|
||||
doubleCost++
|
||||
}
|
||||
}
|
||||
|
||||
// Quotes can sometimes be omitted for URL tokens
|
||||
if forURL && forURLCost < singleCost && forURLCost < doubleCost {
|
||||
return quoteForURL
|
||||
}
|
||||
|
||||
// Prefer double quotes to single quotes if there is no cost difference
|
||||
if singleCost < doubleCost {
|
||||
return '\''
|
||||
}
|
||||
|
||||
return '"'
|
||||
}
|
||||
|
||||
func (p *printer) printQuoted(text string) {
|
||||
p.printQuotedWithQuote(text, bestQuoteCharForString(text, false))
|
||||
}
|
||||
|
||||
type escapeKind uint8
|
||||
|
||||
const (
|
||||
escapeNone escapeKind = iota
|
||||
escapeBackslash
|
||||
escapeHex
|
||||
)
|
||||
|
||||
func (p *printer) printWithEscape(c rune, escape escapeKind, remainingText string, mayNeedWhitespaceAfter bool) {
|
||||
var temp [utf8.UTFMax]byte
|
||||
|
||||
if escape == escapeBackslash && ((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')) {
|
||||
// Hexadecimal characters cannot use a plain backslash escape
|
||||
escape = escapeHex
|
||||
}
|
||||
|
||||
switch escape {
|
||||
case escapeNone:
|
||||
width := utf8.EncodeRune(temp[:], c)
|
||||
p.css = append(p.css, temp[:width]...)
|
||||
|
||||
case escapeBackslash:
|
||||
p.css = append(p.css, '\\')
|
||||
width := utf8.EncodeRune(temp[:], c)
|
||||
p.css = append(p.css, temp[:width]...)
|
||||
|
||||
case escapeHex:
|
||||
text := fmt.Sprintf("\\%x", c)
|
||||
p.css = append(p.css, text...)
|
||||
|
||||
// Make sure the next character is not interpreted as part of the escape sequence
|
||||
if len(text) < 1+6 {
|
||||
if next := utf8.RuneLen(c); next < len(remainingText) {
|
||||
c = rune(remainingText[next])
|
||||
if c == ' ' || c == '\t' || (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') {
|
||||
p.css = append(p.css, ' ')
|
||||
}
|
||||
} else if mayNeedWhitespaceAfter {
|
||||
// If the last character is a hexadecimal escape, print a space afterwards
|
||||
// for the escape sequence to consume. That way we're sure it won't
|
||||
// accidentally consume a semantically significant space afterward.
|
||||
p.css = append(p.css, ' ')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printQuotedWithQuote(text string, quote byte) {
|
||||
if quote != quoteForURL {
|
||||
p.css = append(p.css, quote)
|
||||
}
|
||||
|
||||
for i, c := range text {
|
||||
escape := escapeNone
|
||||
|
||||
switch c {
|
||||
case '\x00', '\r', '\n', '\f':
|
||||
// Use a hexadecimal escape for characters that would be invalid escapes
|
||||
escape = escapeHex
|
||||
|
||||
case '\\', rune(quote):
|
||||
escape = escapeBackslash
|
||||
|
||||
case '(', ')', ' ', '\t', '"', '\'':
|
||||
// These characters must be escaped in URL tokens
|
||||
if quote == quoteForURL {
|
||||
escape = escapeBackslash
|
||||
}
|
||||
|
||||
case '/':
|
||||
// Avoid generating the sequence "</style" in CSS code
|
||||
if i >= 1 && text[i-1] == '<' && i+6 <= len(text) && strings.EqualFold(text[i+1:i+6], "style") {
|
||||
escape = escapeBackslash
|
||||
}
|
||||
|
||||
default:
|
||||
if (p.options.ASCIIOnly && c >= 0x80) || c == '\uFEFF' {
|
||||
escape = escapeHex
|
||||
}
|
||||
}
|
||||
|
||||
p.printWithEscape(c, escape, text[i:], false)
|
||||
}
|
||||
|
||||
if quote != quoteForURL {
|
||||
p.css = append(p.css, quote)
|
||||
}
|
||||
}
|
||||
|
||||
type identMode uint8
|
||||
|
||||
const (
|
||||
identNormal identMode = iota
|
||||
identHash
|
||||
identDimensionUnit
|
||||
)
|
||||
|
||||
type trailingWhitespace uint8
|
||||
|
||||
const (
|
||||
mayNeedWhitespaceAfter trailingWhitespace = iota
|
||||
canDiscardWhitespaceAfter
|
||||
)
|
||||
|
||||
func (p *printer) printIdent(text string, mode identMode, whitespace trailingWhitespace) {
|
||||
for i, c := range text {
|
||||
escape := escapeNone
|
||||
|
||||
if p.options.ASCIIOnly && c >= 0x80 {
|
||||
escape = escapeHex
|
||||
} else if c == '\r' || c == '\n' || c == '\f' || c == '\uFEFF' {
|
||||
// Use a hexadecimal escape for characters that would be invalid escapes
|
||||
escape = escapeHex
|
||||
} else {
|
||||
// Escape non-identifier characters
|
||||
if !css_lexer.IsNameContinue(c) {
|
||||
escape = escapeBackslash
|
||||
}
|
||||
|
||||
// Special escape behavior for the first character
|
||||
if i == 0 {
|
||||
switch mode {
|
||||
case identNormal:
|
||||
if !css_lexer.WouldStartIdentifierWithoutEscapes(text) {
|
||||
escape = escapeBackslash
|
||||
}
|
||||
|
||||
case identDimensionUnit:
|
||||
if !css_lexer.WouldStartIdentifierWithoutEscapes(text) {
|
||||
escape = escapeBackslash
|
||||
} else if c >= '0' && c <= '9' {
|
||||
// Unit: "2x"
|
||||
escape = escapeHex
|
||||
} else if c == 'e' || c == 'E' {
|
||||
if len(text) >= 2 && text[1] >= '0' && text[1] <= '9' {
|
||||
// Unit: "e2x"
|
||||
escape = escapeBackslash
|
||||
} else if len(text) >= 3 && text[1] == '-' && text[2] >= '0' && text[2] <= '9' {
|
||||
// Unit: "e-2x"
|
||||
escape = escapeBackslash
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the last character is a hexadecimal escape, print a space afterwards
|
||||
// for the escape sequence to consume. That way we're sure it won't
|
||||
// accidentally consume a semantically significant space afterward.
|
||||
mayNeedWhitespaceAfter := whitespace == mayNeedWhitespaceAfter && escape != escapeNone && i+utf8.RuneLen(c) == len(text)
|
||||
p.printWithEscape(c, escape, text[i:], mayNeedWhitespaceAfter)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *printer) printIndent(indent int32) {
|
||||
for i, n := 0, int(indent); i < n; i++ {
|
||||
p.css = append(p.css, " "...)
|
||||
}
|
||||
}
|
||||
|
||||
type printTokensOpts struct {
|
||||
indent int32
|
||||
isDeclaration bool
|
||||
}
|
||||
|
||||
func (p *printer) printTokens(tokens []css_ast.Token, opts printTokensOpts) bool {
|
||||
hasWhitespaceAfter := len(tokens) > 0 && (tokens[0].Whitespace&css_ast.WhitespaceBefore) != 0
|
||||
|
||||
// Pretty-print long comma-separated declarations of 3 or more items
|
||||
isMultiLineValue := false
|
||||
if !p.options.RemoveWhitespace && opts.isDeclaration {
|
||||
commaCount := 0
|
||||
for _, t := range tokens {
|
||||
if t.Kind == css_lexer.TComma {
|
||||
commaCount++
|
||||
}
|
||||
}
|
||||
isMultiLineValue = commaCount >= 2
|
||||
}
|
||||
|
||||
for i, t := range tokens {
|
||||
if t.Kind == css_lexer.TWhitespace {
|
||||
hasWhitespaceAfter = true
|
||||
continue
|
||||
}
|
||||
if hasWhitespaceAfter {
|
||||
if isMultiLineValue && (i == 0 || tokens[i-1].Kind == css_lexer.TComma) {
|
||||
p.print("\n")
|
||||
p.printIndent(opts.indent + 1)
|
||||
} else {
|
||||
p.print(" ")
|
||||
}
|
||||
}
|
||||
hasWhitespaceAfter = (t.Whitespace&css_ast.WhitespaceAfter) != 0 ||
|
||||
(i+1 < len(tokens) && (tokens[i+1].Whitespace&css_ast.WhitespaceBefore) != 0)
|
||||
|
||||
whitespace := mayNeedWhitespaceAfter
|
||||
if !hasWhitespaceAfter {
|
||||
whitespace = canDiscardWhitespaceAfter
|
||||
}
|
||||
|
||||
switch t.Kind {
|
||||
case css_lexer.TIdent:
|
||||
p.printIdent(t.Text, identNormal, whitespace)
|
||||
|
||||
case css_lexer.TFunction:
|
||||
p.printIdent(t.Text, identNormal, whitespace)
|
||||
p.print("(")
|
||||
|
||||
case css_lexer.TDimension:
|
||||
p.print(t.DimensionValue())
|
||||
p.printIdent(t.DimensionUnit(), identDimensionUnit, whitespace)
|
||||
|
||||
case css_lexer.TAtKeyword:
|
||||
p.print("@")
|
||||
p.printIdent(t.Text, identNormal, whitespace)
|
||||
|
||||
case css_lexer.THash:
|
||||
p.print("#")
|
||||
p.printIdent(t.Text, identHash, whitespace)
|
||||
|
||||
case css_lexer.TString:
|
||||
p.printQuoted(t.Text)
|
||||
|
||||
case css_lexer.TURL:
|
||||
text := p.importRecords[t.ImportRecordIndex].Path.Text
|
||||
p.print("url(")
|
||||
p.printQuotedWithQuote(text, bestQuoteCharForString(text, true))
|
||||
p.print(")")
|
||||
|
||||
default:
|
||||
p.print(t.Text)
|
||||
}
|
||||
|
||||
if t.Children != nil {
|
||||
p.printTokens(*t.Children, printTokensOpts{})
|
||||
|
||||
switch t.Kind {
|
||||
case css_lexer.TFunction:
|
||||
p.print(")")
|
||||
|
||||
case css_lexer.TOpenParen:
|
||||
p.print(")")
|
||||
|
||||
case css_lexer.TOpenBrace:
|
||||
p.print("}")
|
||||
|
||||
case css_lexer.TOpenBracket:
|
||||
p.print("]")
|
||||
}
|
||||
}
|
||||
}
|
||||
if hasWhitespaceAfter {
|
||||
p.print(" ")
|
||||
}
|
||||
return hasWhitespaceAfter
|
||||
}
|
649
vendor/github.com/evanw/esbuild/internal/fs/filepath.go
generated
vendored
649
vendor/github.com/evanw/esbuild/internal/fs/filepath.go
generated
vendored
@ -1,649 +0,0 @@
|
||||
// Code in this file has been forked from the "filepath" module in the Go
|
||||
// source code to work around bugs with the WebAssembly build target. More
|
||||
// information about why here: https://github.com/golang/go/issues/43768.
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package fs
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"strings"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
type goFilepath struct {
|
||||
cwd string
|
||||
isWindows bool
|
||||
pathSeparator byte
|
||||
}
|
||||
|
||||
func isSlash(c uint8) bool {
|
||||
return c == '\\' || c == '/'
|
||||
}
|
||||
|
||||
// reservedNames lists reserved Windows names. Search for PRN in
|
||||
// https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
|
||||
// for details.
|
||||
var reservedNames = []string{
|
||||
"CON", "PRN", "AUX", "NUL",
|
||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
|
||||
}
|
||||
|
||||
// isReservedName returns true, if path is Windows reserved name.
|
||||
// See reservedNames for the full list.
|
||||
func isReservedName(path string) bool {
|
||||
if len(path) == 0 {
|
||||
return false
|
||||
}
|
||||
for _, reserved := range reservedNames {
|
||||
if strings.EqualFold(path, reserved) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsAbs reports whether the path is absolute.
|
||||
func (fp goFilepath) isAbs(path string) bool {
|
||||
if !fp.isWindows {
|
||||
return strings.HasPrefix(path, "/")
|
||||
}
|
||||
if isReservedName(path) {
|
||||
return true
|
||||
}
|
||||
l := fp.volumeNameLen(path)
|
||||
if l == 0 {
|
||||
return false
|
||||
}
|
||||
path = path[l:]
|
||||
if path == "" {
|
||||
return false
|
||||
}
|
||||
return isSlash(path[0])
|
||||
}
|
||||
|
||||
// Abs returns an absolute representation of path.
|
||||
// If the path is not absolute it will be joined with the current
|
||||
// working directory to turn it into an absolute path. The absolute
|
||||
// path name for a given file is not guaranteed to be unique.
|
||||
// Abs calls Clean on the result.
|
||||
func (fp goFilepath) abs(path string) (string, error) {
|
||||
if fp.isAbs(path) {
|
||||
return fp.clean(path), nil
|
||||
}
|
||||
return fp.join([]string{fp.cwd, path}), nil
|
||||
}
|
||||
|
||||
// IsPathSeparator reports whether c is a directory separator character.
|
||||
func (fp goFilepath) isPathSeparator(c uint8) bool {
|
||||
return c == '/' || (fp.isWindows && c == '\\')
|
||||
}
|
||||
|
||||
// volumeNameLen returns length of the leading volume name on Windows.
|
||||
// It returns 0 elsewhere.
|
||||
func (fp goFilepath) volumeNameLen(path string) int {
|
||||
if !fp.isWindows {
|
||||
return 0
|
||||
}
|
||||
if len(path) < 2 {
|
||||
return 0
|
||||
}
|
||||
// with drive letter
|
||||
c := path[0]
|
||||
if path[1] == ':' && ('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') {
|
||||
return 2
|
||||
}
|
||||
// is it UNC? https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
|
||||
if l := len(path); l >= 5 && isSlash(path[0]) && isSlash(path[1]) &&
|
||||
!isSlash(path[2]) && path[2] != '.' {
|
||||
// first, leading `\\` and next shouldn't be `\`. its server name.
|
||||
for n := 3; n < l-1; n++ {
|
||||
// second, next '\' shouldn't be repeated.
|
||||
if isSlash(path[n]) {
|
||||
n++
|
||||
// third, following something characters. its share name.
|
||||
if !isSlash(path[n]) {
|
||||
if path[n] == '.' {
|
||||
break
|
||||
}
|
||||
for ; n < l; n++ {
|
||||
if isSlash(path[n]) {
|
||||
break
|
||||
}
|
||||
}
|
||||
return n
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// EvalSymlinks returns the path name after the evaluation of any symbolic
|
||||
// links.
|
||||
// If path is relative the result will be relative to the current directory,
|
||||
// unless one of the components is an absolute symbolic link.
|
||||
// EvalSymlinks calls Clean on the result.
|
||||
func (fp goFilepath) evalSymlinks(path string) (string, error) {
|
||||
volLen := fp.volumeNameLen(path)
|
||||
pathSeparator := string(fp.pathSeparator)
|
||||
|
||||
if volLen < len(path) && fp.isPathSeparator(path[volLen]) {
|
||||
volLen++
|
||||
}
|
||||
vol := path[:volLen]
|
||||
dest := vol
|
||||
linksWalked := 0
|
||||
for start, end := volLen, volLen; start < len(path); start = end {
|
||||
for start < len(path) && fp.isPathSeparator(path[start]) {
|
||||
start++
|
||||
}
|
||||
end = start
|
||||
for end < len(path) && !fp.isPathSeparator(path[end]) {
|
||||
end++
|
||||
}
|
||||
|
||||
// On Windows, "." can be a symlink.
|
||||
// We look it up, and use the value if it is absolute.
|
||||
// If not, we just return ".".
|
||||
isWindowsDot := fp.isWindows && path[fp.volumeNameLen(path):] == "."
|
||||
|
||||
// The next path component is in path[start:end].
|
||||
if end == start {
|
||||
// No more path components.
|
||||
break
|
||||
} else if path[start:end] == "." && !isWindowsDot {
|
||||
// Ignore path component ".".
|
||||
continue
|
||||
} else if path[start:end] == ".." {
|
||||
// Back up to previous component if possible.
|
||||
// Note that volLen includes any leading slash.
|
||||
|
||||
// Set r to the index of the last slash in dest,
|
||||
// after the volume.
|
||||
var r int
|
||||
for r = len(dest) - 1; r >= volLen; r-- {
|
||||
if fp.isPathSeparator(dest[r]) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if r < volLen || dest[r+1:] == ".." {
|
||||
// Either path has no slashes
|
||||
// (it's empty or just "C:")
|
||||
// or it ends in a ".." we had to keep.
|
||||
// Either way, keep this "..".
|
||||
if len(dest) > volLen {
|
||||
dest += pathSeparator
|
||||
}
|
||||
dest += ".."
|
||||
} else {
|
||||
// Discard everything since the last slash.
|
||||
dest = dest[:r]
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Ordinary path component. Add it to result.
|
||||
|
||||
if len(dest) > fp.volumeNameLen(dest) && !fp.isPathSeparator(dest[len(dest)-1]) {
|
||||
dest += pathSeparator
|
||||
}
|
||||
|
||||
dest += path[start:end]
|
||||
|
||||
// Resolve symlink.
|
||||
|
||||
fi, err := os.Lstat(dest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if fi.Mode()&os.ModeSymlink == 0 {
|
||||
if !fi.Mode().IsDir() && end < len(path) {
|
||||
return "", syscall.ENOTDIR
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Found symlink.
|
||||
|
||||
linksWalked++
|
||||
if linksWalked > 255 {
|
||||
return "", errors.New("EvalSymlinks: too many links")
|
||||
}
|
||||
|
||||
link, err := os.Readlink(dest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if isWindowsDot && !fp.isAbs(link) {
|
||||
// On Windows, if "." is a relative symlink,
|
||||
// just return ".".
|
||||
break
|
||||
}
|
||||
|
||||
path = link + path[end:]
|
||||
|
||||
v := fp.volumeNameLen(link)
|
||||
if v > 0 {
|
||||
// Symlink to drive name is an absolute path.
|
||||
if v < len(link) && fp.isPathSeparator(link[v]) {
|
||||
v++
|
||||
}
|
||||
vol = link[:v]
|
||||
dest = vol
|
||||
end = len(vol)
|
||||
} else if len(link) > 0 && fp.isPathSeparator(link[0]) {
|
||||
// Symlink to absolute path.
|
||||
dest = link[:1]
|
||||
end = 1
|
||||
} else {
|
||||
// Symlink to relative path; replace last
|
||||
// path component in dest.
|
||||
var r int
|
||||
for r = len(dest) - 1; r >= volLen; r-- {
|
||||
if fp.isPathSeparator(dest[r]) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if r < volLen {
|
||||
dest = vol
|
||||
} else {
|
||||
dest = dest[:r]
|
||||
}
|
||||
end = 0
|
||||
}
|
||||
}
|
||||
return fp.clean(dest), nil
|
||||
}
|
||||
|
||||
// A lazybuf is a lazily constructed path buffer.
|
||||
// It supports append, reading previously appended bytes,
|
||||
// and retrieving the final string. It does not allocate a buffer
|
||||
// to hold the output until that output diverges from s.
|
||||
type lazybuf struct {
|
||||
path string
|
||||
buf []byte
|
||||
w int
|
||||
volAndPath string
|
||||
volLen int
|
||||
}
|
||||
|
||||
func (b *lazybuf) index(i int) byte {
|
||||
if b.buf != nil {
|
||||
return b.buf[i]
|
||||
}
|
||||
return b.path[i]
|
||||
}
|
||||
|
||||
func (b *lazybuf) append(c byte) {
|
||||
if b.buf == nil {
|
||||
if b.w < len(b.path) && b.path[b.w] == c {
|
||||
b.w++
|
||||
return
|
||||
}
|
||||
b.buf = make([]byte, len(b.path))
|
||||
copy(b.buf, b.path[:b.w])
|
||||
}
|
||||
b.buf[b.w] = c
|
||||
b.w++
|
||||
}
|
||||
|
||||
func (b *lazybuf) string() string {
|
||||
if b.buf == nil {
|
||||
return b.volAndPath[:b.volLen+b.w]
|
||||
}
|
||||
return b.volAndPath[:b.volLen] + string(b.buf[:b.w])
|
||||
}
|
||||
|
||||
// FromSlash returns the result of replacing each slash ('/') character
|
||||
// in path with a separator character. Multiple slashes are replaced
|
||||
// by multiple separators.
|
||||
func (fp goFilepath) fromSlash(path string) string {
|
||||
if !fp.isWindows {
|
||||
return path
|
||||
}
|
||||
return strings.ReplaceAll(path, "/", "\\")
|
||||
}
|
||||
|
||||
// Clean returns the shortest path name equivalent to path
|
||||
// by purely lexical processing. It applies the following rules
|
||||
// iteratively until no further processing can be done:
|
||||
//
|
||||
// 1. Replace multiple Separator elements with a single one.
|
||||
// 2. Eliminate each . path name element (the current directory).
|
||||
// 3. Eliminate each inner .. path name element (the parent directory)
|
||||
// along with the non-.. element that precedes it.
|
||||
// 4. Eliminate .. elements that begin a rooted path:
|
||||
// that is, replace "/.." by "/" at the beginning of a path,
|
||||
// assuming Separator is '/'.
|
||||
//
|
||||
// The returned path ends in a slash only if it represents a root directory,
|
||||
// such as "/" on Unix or `C:\` on Windows.
|
||||
//
|
||||
// Finally, any occurrences of slash are replaced by Separator.
|
||||
//
|
||||
// If the result of this process is an empty string, Clean
|
||||
// returns the string ".".
|
||||
//
|
||||
// See also Rob Pike, ``Lexical File Names in Plan 9 or
|
||||
// Getting Dot-Dot Right,''
|
||||
// https://9p.io/sys/doc/lexnames.html
|
||||
func (fp goFilepath) clean(path string) string {
|
||||
originalPath := path
|
||||
volLen := fp.volumeNameLen(path)
|
||||
path = path[volLen:]
|
||||
if path == "" {
|
||||
if volLen > 1 && originalPath[1] != ':' {
|
||||
// should be UNC
|
||||
return fp.fromSlash(originalPath)
|
||||
}
|
||||
return originalPath + "."
|
||||
}
|
||||
rooted := fp.isPathSeparator(path[0])
|
||||
|
||||
// Invariants:
|
||||
// reading from path; r is index of next byte to process.
|
||||
// writing to buf; w is index of next byte to write.
|
||||
// dotdot is index in buf where .. must stop, either because
|
||||
// it is the leading slash or it is a leading ../../.. prefix.
|
||||
n := len(path)
|
||||
out := lazybuf{path: path, volAndPath: originalPath, volLen: volLen}
|
||||
r, dotdot := 0, 0
|
||||
if rooted {
|
||||
out.append(fp.pathSeparator)
|
||||
r, dotdot = 1, 1
|
||||
}
|
||||
|
||||
for r < n {
|
||||
switch {
|
||||
case fp.isPathSeparator(path[r]):
|
||||
// empty path element
|
||||
r++
|
||||
case path[r] == '.' && (r+1 == n || fp.isPathSeparator(path[r+1])):
|
||||
// . element
|
||||
r++
|
||||
case path[r] == '.' && path[r+1] == '.' && (r+2 == n || fp.isPathSeparator(path[r+2])):
|
||||
// .. element: remove to last separator
|
||||
r += 2
|
||||
switch {
|
||||
case out.w > dotdot:
|
||||
// can backtrack
|
||||
out.w--
|
||||
for out.w > dotdot && !fp.isPathSeparator(out.index(out.w)) {
|
||||
out.w--
|
||||
}
|
||||
case !rooted:
|
||||
// cannot backtrack, but not rooted, so append .. element.
|
||||
if out.w > 0 {
|
||||
out.append(fp.pathSeparator)
|
||||
}
|
||||
out.append('.')
|
||||
out.append('.')
|
||||
dotdot = out.w
|
||||
}
|
||||
default:
|
||||
// real path element.
|
||||
// add slash if needed
|
||||
if rooted && out.w != 1 || !rooted && out.w != 0 {
|
||||
out.append(fp.pathSeparator)
|
||||
}
|
||||
// copy element
|
||||
for ; r < n && !fp.isPathSeparator(path[r]); r++ {
|
||||
out.append(path[r])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Turn empty string into "."
|
||||
if out.w == 0 {
|
||||
out.append('.')
|
||||
}
|
||||
|
||||
return fp.fromSlash(out.string())
|
||||
}
|
||||
|
||||
// VolumeName returns leading volume name.
|
||||
// Given "C:\foo\bar" it returns "C:" on Windows.
|
||||
// Given "\\host\share\foo" it returns "\\host\share".
|
||||
// On other platforms it returns "".
|
||||
func (fp goFilepath) volumeName(path string) string {
|
||||
return path[:fp.volumeNameLen(path)]
|
||||
}
|
||||
|
||||
// Base returns the last element of path.
|
||||
// Trailing path separators are removed before extracting the last element.
|
||||
// If the path is empty, Base returns ".".
|
||||
// If the path consists entirely of separators, Base returns a single separator.
|
||||
func (fp goFilepath) base(path string) string {
|
||||
if path == "" {
|
||||
return "."
|
||||
}
|
||||
// Strip trailing slashes.
|
||||
for len(path) > 0 && fp.isPathSeparator(path[len(path)-1]) {
|
||||
path = path[0 : len(path)-1]
|
||||
}
|
||||
// Throw away volume name
|
||||
path = path[len(fp.volumeName(path)):]
|
||||
// Find the last element
|
||||
i := len(path) - 1
|
||||
for i >= 0 && !fp.isPathSeparator(path[i]) {
|
||||
i--
|
||||
}
|
||||
if i >= 0 {
|
||||
path = path[i+1:]
|
||||
}
|
||||
// If empty now, it had only slashes.
|
||||
if path == "" {
|
||||
return string(fp.pathSeparator)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
// Dir returns all but the last element of path, typically the path's directory.
|
||||
// After dropping the final element, Dir calls Clean on the path and trailing
|
||||
// slashes are removed.
|
||||
// If the path is empty, Dir returns ".".
|
||||
// If the path consists entirely of separators, Dir returns a single separator.
|
||||
// The returned path does not end in a separator unless it is the root directory.
|
||||
func (fp goFilepath) dir(path string) string {
|
||||
vol := fp.volumeName(path)
|
||||
i := len(path) - 1
|
||||
for i >= len(vol) && !fp.isPathSeparator(path[i]) {
|
||||
i--
|
||||
}
|
||||
dir := fp.clean(path[len(vol) : i+1])
|
||||
if dir == "." && len(vol) > 2 {
|
||||
// must be UNC
|
||||
return vol
|
||||
}
|
||||
return vol + dir
|
||||
}
|
||||
|
||||
// Ext returns the file name extension used by path.
|
||||
// The extension is the suffix beginning at the final dot
|
||||
// in the final element of path; it is empty if there is
|
||||
// no dot.
|
||||
func (fp goFilepath) ext(path string) string {
|
||||
for i := len(path) - 1; i >= 0 && !fp.isPathSeparator(path[i]); i-- {
|
||||
if path[i] == '.' {
|
||||
return path[i:]
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Join joins any number of path elements into a single path,
|
||||
// separating them with an OS specific Separator. Empty elements
|
||||
// are ignored. The result is Cleaned. However, if the argument
|
||||
// list is empty or all its elements are empty, Join returns
|
||||
// an empty string.
|
||||
// On Windows, the result will only be a UNC path if the first
|
||||
// non-empty element is a UNC path.
|
||||
func (fp goFilepath) join(elem []string) string {
|
||||
for i, e := range elem {
|
||||
if e != "" {
|
||||
if fp.isWindows {
|
||||
return fp.joinNonEmpty(elem[i:])
|
||||
}
|
||||
return fp.clean(strings.Join(elem[i:], string(fp.pathSeparator)))
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// joinNonEmpty is like join, but it assumes that the first element is non-empty.
|
||||
func (fp goFilepath) joinNonEmpty(elem []string) string {
|
||||
if len(elem[0]) == 2 && elem[0][1] == ':' {
|
||||
// First element is drive letter without terminating slash.
|
||||
// Keep path relative to current directory on that drive.
|
||||
// Skip empty elements.
|
||||
i := 1
|
||||
for ; i < len(elem); i++ {
|
||||
if elem[i] != "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
return fp.clean(elem[0] + strings.Join(elem[i:], string(fp.pathSeparator)))
|
||||
}
|
||||
// The following logic prevents Join from inadvertently creating a
|
||||
// UNC path on Windows. Unless the first element is a UNC path, Join
|
||||
// shouldn't create a UNC path. See golang.org/issue/9167.
|
||||
p := fp.clean(strings.Join(elem, string(fp.pathSeparator)))
|
||||
if !fp.isUNC(p) {
|
||||
return p
|
||||
}
|
||||
// p == UNC only allowed when the first element is a UNC path.
|
||||
head := fp.clean(elem[0])
|
||||
if fp.isUNC(head) {
|
||||
return p
|
||||
}
|
||||
// head + tail == UNC, but joining two non-UNC paths should not result
|
||||
// in a UNC path. Undo creation of UNC path.
|
||||
tail := fp.clean(strings.Join(elem[1:], string(fp.pathSeparator)))
|
||||
if head[len(head)-1] == fp.pathSeparator {
|
||||
return head + tail
|
||||
}
|
||||
return head + string(fp.pathSeparator) + tail
|
||||
}
|
||||
|
||||
// isUNC reports whether path is a UNC path.
|
||||
func (fp goFilepath) isUNC(path string) bool {
|
||||
return fp.volumeNameLen(path) > 2
|
||||
}
|
||||
|
||||
// Rel returns a relative path that is lexically equivalent to targpath when
|
||||
// joined to basepath with an intervening separator. That is,
|
||||
// Join(basepath, Rel(basepath, targpath)) is equivalent to targpath itself.
|
||||
// On success, the returned path will always be relative to basepath,
|
||||
// even if basepath and targpath share no elements.
|
||||
// An error is returned if targpath can't be made relative to basepath or if
|
||||
// knowing the current working directory would be necessary to compute it.
|
||||
// Rel calls Clean on the result.
|
||||
func (fp goFilepath) rel(basepath, targpath string) (string, error) {
|
||||
baseVol := fp.volumeName(basepath)
|
||||
targVol := fp.volumeName(targpath)
|
||||
base := fp.clean(basepath)
|
||||
targ := fp.clean(targpath)
|
||||
if fp.sameWord(targ, base) {
|
||||
return ".", nil
|
||||
}
|
||||
base = base[len(baseVol):]
|
||||
targ = targ[len(targVol):]
|
||||
if base == "." {
|
||||
base = ""
|
||||
}
|
||||
// Can't use IsAbs - `\a` and `a` are both relative in Windows.
|
||||
baseSlashed := len(base) > 0 && base[0] == fp.pathSeparator
|
||||
targSlashed := len(targ) > 0 && targ[0] == fp.pathSeparator
|
||||
if baseSlashed != targSlashed || !fp.sameWord(baseVol, targVol) {
|
||||
return "", errors.New("Rel: can't make " + targpath + " relative to " + basepath)
|
||||
}
|
||||
// Position base[b0:bi] and targ[t0:ti] at the first differing elements.
|
||||
bl := len(base)
|
||||
tl := len(targ)
|
||||
var b0, bi, t0, ti int
|
||||
for {
|
||||
for bi < bl && base[bi] != fp.pathSeparator {
|
||||
bi++
|
||||
}
|
||||
for ti < tl && targ[ti] != fp.pathSeparator {
|
||||
ti++
|
||||
}
|
||||
if !fp.sameWord(targ[t0:ti], base[b0:bi]) {
|
||||
break
|
||||
}
|
||||
if bi < bl {
|
||||
bi++
|
||||
}
|
||||
if ti < tl {
|
||||
ti++
|
||||
}
|
||||
b0 = bi
|
||||
t0 = ti
|
||||
}
|
||||
if base[b0:bi] == ".." {
|
||||
return "", errors.New("Rel: can't make " + targpath + " relative to " + basepath)
|
||||
}
|
||||
if b0 != bl {
|
||||
// Base elements left. Must go up before going down.
|
||||
seps := strings.Count(base[b0:bl], string(fp.pathSeparator))
|
||||
size := 2 + seps*3
|
||||
if tl != t0 {
|
||||
size += 1 + tl - t0
|
||||
}
|
||||
buf := make([]byte, size)
|
||||
n := copy(buf, "..")
|
||||
for i := 0; i < seps; i++ {
|
||||
buf[n] = fp.pathSeparator
|
||||
copy(buf[n+1:], "..")
|
||||
n += 3
|
||||
}
|
||||
if t0 != tl {
|
||||
buf[n] = fp.pathSeparator
|
||||
copy(buf[n+1:], targ[t0:])
|
||||
}
|
||||
return string(buf), nil
|
||||
}
|
||||
return targ[t0:], nil
|
||||
}
|
||||
|
||||
func (fp goFilepath) sameWord(a, b string) bool {
|
||||
if !fp.isWindows {
|
||||
return a == b
|
||||
}
|
||||
return strings.EqualFold(a, b)
|
||||
}
|
270
vendor/github.com/evanw/esbuild/internal/fs/fs.go
generated
vendored
270
vendor/github.com/evanw/esbuild/internal/fs/fs.go
generated
vendored
@ -1,270 +0,0 @@
|
||||
package fs
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
type EntryKind uint8
|
||||
|
||||
const (
|
||||
DirEntry EntryKind = 1
|
||||
FileEntry EntryKind = 2
|
||||
)
|
||||
|
||||
type Entry struct {
|
||||
symlink string
|
||||
dir string
|
||||
base string
|
||||
mutex sync.Mutex
|
||||
kind EntryKind
|
||||
needStat bool
|
||||
}
|
||||
|
||||
func (e *Entry) Kind(fs FS) EntryKind {
|
||||
e.mutex.Lock()
|
||||
defer e.mutex.Unlock()
|
||||
if e.needStat {
|
||||
e.needStat = false
|
||||
e.symlink, e.kind = fs.kind(e.dir, e.base)
|
||||
}
|
||||
return e.kind
|
||||
}
|
||||
|
||||
func (e *Entry) Symlink(fs FS) string {
|
||||
e.mutex.Lock()
|
||||
defer e.mutex.Unlock()
|
||||
if e.needStat {
|
||||
e.needStat = false
|
||||
e.symlink, e.kind = fs.kind(e.dir, e.base)
|
||||
}
|
||||
return e.symlink
|
||||
}
|
||||
|
||||
type accessedEntries struct {
|
||||
mutex sync.Mutex
|
||||
wasPresent map[string]bool
|
||||
|
||||
// If this is nil, "SortedKeys()" was not accessed. This means we should
|
||||
// check for whether this directory has changed or not by seeing if any of
|
||||
// the entries in the "wasPresent" map have changed in "present or not"
|
||||
// status, since the only access was to individual entries via "Get()".
|
||||
//
|
||||
// If this is non-nil, "SortedKeys()" was accessed. This means we should
|
||||
// check for whether this directory has changed or not by checking the
|
||||
// "allEntries" array for equality with the existing entries list, since the
|
||||
// code asked for all entries and may have used the presence or absence of
|
||||
// entries in that list.
|
||||
//
|
||||
// The goal of having these two checks is to be as narrow as possible to
|
||||
// avoid unnecessary rebuilds. If only "Get()" is called on a few entries,
|
||||
// then we won't invalidate the build if random unrelated entries are added
|
||||
// or removed. But if "SortedKeys()" is called, we need to invalidate the
|
||||
// build if anything about the set of entries in this directory is changed.
|
||||
allEntries []string
|
||||
}
|
||||
|
||||
type DirEntries struct {
|
||||
dir string
|
||||
data map[string]*Entry
|
||||
accessedEntries *accessedEntries
|
||||
}
|
||||
|
||||
func MakeEmptyDirEntries(dir string) DirEntries {
|
||||
return DirEntries{dir, make(map[string]*Entry), nil}
|
||||
}
|
||||
|
||||
type DifferentCase struct {
|
||||
Dir string
|
||||
Query string
|
||||
Actual string
|
||||
}
|
||||
|
||||
func (entries DirEntries) Get(query string) (*Entry, *DifferentCase) {
|
||||
if entries.data != nil {
|
||||
key := strings.ToLower(query)
|
||||
entry := entries.data[key]
|
||||
|
||||
// Track whether this specific entry was present or absent for watch mode
|
||||
if accessed := entries.accessedEntries; accessed != nil {
|
||||
accessed.mutex.Lock()
|
||||
accessed.wasPresent[key] = entry != nil
|
||||
accessed.mutex.Unlock()
|
||||
}
|
||||
|
||||
if entry != nil {
|
||||
if entry.base != query {
|
||||
return entry, &DifferentCase{
|
||||
Dir: entries.dir,
|
||||
Query: query,
|
||||
Actual: entry.base,
|
||||
}
|
||||
}
|
||||
return entry, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (entries DirEntries) SortedKeys() (keys []string) {
|
||||
if entries.data != nil {
|
||||
keys = make([]string, 0, len(entries.data))
|
||||
for _, entry := range entries.data {
|
||||
keys = append(keys, entry.base)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
||||
// Track the exact set of all entries for watch mode
|
||||
if entries.accessedEntries != nil {
|
||||
entries.accessedEntries.mutex.Lock()
|
||||
entries.accessedEntries.allEntries = keys
|
||||
entries.accessedEntries.mutex.Unlock()
|
||||
}
|
||||
|
||||
return keys
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type OpenedFile interface {
|
||||
Len() int
|
||||
Read(start int, end int) ([]byte, error)
|
||||
Close() error
|
||||
}
|
||||
|
||||
type InMemoryOpenedFile struct {
|
||||
Contents []byte
|
||||
}
|
||||
|
||||
func (f *InMemoryOpenedFile) Len() int {
|
||||
return len(f.Contents)
|
||||
}
|
||||
|
||||
func (f *InMemoryOpenedFile) Read(start int, end int) ([]byte, error) {
|
||||
return []byte(f.Contents[start:end]), nil
|
||||
}
|
||||
|
||||
func (f *InMemoryOpenedFile) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type FS interface {
|
||||
// The returned map is immutable and is cached across invocations. Do not
|
||||
// mutate it.
|
||||
ReadDirectory(path string) (entries DirEntries, canonicalError error, originalError error)
|
||||
ReadFile(path string) (contents string, canonicalError error, originalError error)
|
||||
OpenFile(path string) (result OpenedFile, canonicalError error, originalError error)
|
||||
|
||||
// This is a key made from the information returned by "stat". It is intended
|
||||
// to be different if the file has been edited, and to otherwise be equal if
|
||||
// the file has not been edited. It should usually work, but no guarantees.
|
||||
//
|
||||
// See https://apenwarr.ca/log/20181113 for more information about why this
|
||||
// can be broken. For example, writing to a file with mmap on WSL on Windows
|
||||
// won't change this key. Hopefully this isn't too much of an issue.
|
||||
//
|
||||
// Additional reading:
|
||||
// - https://github.com/npm/npm/pull/20027
|
||||
// - https://github.com/golang/go/commit/7dea509703eb5ad66a35628b12a678110fbb1f72
|
||||
ModKey(path string) (ModKey, error)
|
||||
|
||||
// This is part of the interface because the mock interface used for tests
|
||||
// should not depend on file system behavior (i.e. different slashes for
|
||||
// Windows) while the real interface should.
|
||||
IsAbs(path string) bool
|
||||
Abs(path string) (string, bool)
|
||||
Dir(path string) string
|
||||
Base(path string) string
|
||||
Ext(path string) string
|
||||
Join(parts ...string) string
|
||||
Cwd() string
|
||||
Rel(base string, target string) (string, bool)
|
||||
|
||||
// This is used in the implementation of "Entry"
|
||||
kind(dir string, base string) (symlink string, kind EntryKind)
|
||||
|
||||
// This is a set of all files used and all directories checked. The build
|
||||
// must be invalidated if any of these watched files change.
|
||||
WatchData() WatchData
|
||||
}
|
||||
|
||||
type WatchData struct {
|
||||
// These functions return a non-empty path as a string if the file system
|
||||
// entry has been modified. For files, the returned path is the same as the
|
||||
// file path. For directories, the returned path is either the directory
|
||||
// itself or a file in the directory that was changed.
|
||||
Paths map[string]func() string
|
||||
}
|
||||
|
||||
type ModKey struct {
|
||||
// What gets filled in here is OS-dependent
|
||||
inode uint64
|
||||
size int64
|
||||
mtime_sec int64
|
||||
mtime_nsec int64
|
||||
mode uint32
|
||||
uid uint32
|
||||
}
|
||||
|
||||
// Some file systems have a time resolution of only a few seconds. If a mtime
|
||||
// value is too new, we won't be able to tell if it has been recently modified
|
||||
// or not. So we only use mtimes for comparison if they are sufficiently old.
|
||||
// Apparently the FAT file system has a resolution of two seconds according to
|
||||
// this article: https://en.wikipedia.org/wiki/Stat_(system_call).
|
||||
const modKeySafetyGap = 3 // In seconds
|
||||
var modKeyUnusable = errors.New("The modification key is unusable")
|
||||
|
||||
// Limit the number of files open simultaneously to avoid ulimit issues
|
||||
var fileOpenLimit = make(chan bool, 32)
|
||||
|
||||
func BeforeFileOpen() {
|
||||
// This will block if the number of open files is already at the limit
|
||||
fileOpenLimit <- false
|
||||
}
|
||||
|
||||
func AfterFileClose() {
|
||||
<-fileOpenLimit
|
||||
}
|
||||
|
||||
// This is a fork of "os.MkdirAll" to work around bugs with the WebAssembly
|
||||
// build target. More information here: https://github.com/golang/go/issues/43768.
|
||||
func MkdirAll(fs FS, path string, perm os.FileMode) error {
|
||||
// Run "Join" once to run "Clean" on the path, which removes trailing slashes
|
||||
return mkdirAll(fs, fs.Join(path), perm)
|
||||
}
|
||||
|
||||
func mkdirAll(fs FS, path string, perm os.FileMode) error {
|
||||
// Fast path: if we can tell whether path is a directory or file, stop with success or error.
|
||||
if dir, err := os.Stat(path); err == nil {
|
||||
if dir.IsDir() {
|
||||
return nil
|
||||
}
|
||||
return &os.PathError{Op: "mkdir", Path: path, Err: syscall.ENOTDIR}
|
||||
}
|
||||
|
||||
// Slow path: make sure parent exists and then call Mkdir for path.
|
||||
if parent := fs.Dir(path); parent != path {
|
||||
// Create parent.
|
||||
if err := mkdirAll(fs, parent, perm); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Parent now exists; invoke Mkdir and use its result.
|
||||
if err := os.Mkdir(path, perm); err != nil {
|
||||
// Handle arguments like "foo/." by
|
||||
// double-checking that directory doesn't exist.
|
||||
dir, err1 := os.Lstat(path)
|
||||
if err1 == nil && dir.IsDir() {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
157
vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go
generated
vendored
157
vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go
generated
vendored
@ -1,157 +0,0 @@
|
||||
// This is a mock implementation of the "fs" module for use with tests. It does
|
||||
// not actually read from the file system. Instead, it reads from a pre-specified
|
||||
// map of file paths to files.
|
||||
|
||||
package fs
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"path"
|
||||
"strings"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
type mockFS struct {
|
||||
dirs map[string]DirEntries
|
||||
files map[string]string
|
||||
}
|
||||
|
||||
func MockFS(input map[string]string) FS {
|
||||
dirs := make(map[string]DirEntries)
|
||||
files := make(map[string]string)
|
||||
|
||||
for k, v := range input {
|
||||
files[k] = v
|
||||
original := k
|
||||
|
||||
// Build the directory map
|
||||
for {
|
||||
kDir := path.Dir(k)
|
||||
dir, ok := dirs[kDir]
|
||||
if !ok {
|
||||
dir = DirEntries{kDir, make(map[string]*Entry), nil}
|
||||
dirs[kDir] = dir
|
||||
}
|
||||
if kDir == k {
|
||||
break
|
||||
}
|
||||
base := path.Base(k)
|
||||
if k == original {
|
||||
dir.data[strings.ToLower(base)] = &Entry{kind: FileEntry, base: base}
|
||||
} else {
|
||||
dir.data[strings.ToLower(base)] = &Entry{kind: DirEntry, base: base}
|
||||
}
|
||||
k = kDir
|
||||
}
|
||||
}
|
||||
|
||||
return &mockFS{dirs, files}
|
||||
}
|
||||
|
||||
func (fs *mockFS) ReadDirectory(path string) (DirEntries, error, error) {
|
||||
if dir, ok := fs.dirs[path]; ok {
|
||||
return dir, nil, nil
|
||||
}
|
||||
return DirEntries{}, syscall.ENOENT, syscall.ENOENT
|
||||
}
|
||||
|
||||
func (fs *mockFS) ReadFile(path string) (string, error, error) {
|
||||
if contents, ok := fs.files[path]; ok {
|
||||
return contents, nil, nil
|
||||
}
|
||||
return "", syscall.ENOENT, syscall.ENOENT
|
||||
}
|
||||
|
||||
func (fs *mockFS) OpenFile(path string) (OpenedFile, error, error) {
|
||||
if contents, ok := fs.files[path]; ok {
|
||||
return &InMemoryOpenedFile{Contents: []byte(contents)}, nil, nil
|
||||
}
|
||||
return nil, syscall.ENOENT, syscall.ENOENT
|
||||
}
|
||||
|
||||
func (fs *mockFS) ModKey(path string) (ModKey, error) {
|
||||
return ModKey{}, errors.New("This is not available during tests")
|
||||
}
|
||||
|
||||
func (*mockFS) IsAbs(p string) bool {
|
||||
return path.IsAbs(p)
|
||||
}
|
||||
|
||||
func (*mockFS) Abs(p string) (string, bool) {
|
||||
return path.Clean(path.Join("/", p)), true
|
||||
}
|
||||
|
||||
func (*mockFS) Dir(p string) string {
|
||||
return path.Dir(p)
|
||||
}
|
||||
|
||||
func (*mockFS) Base(p string) string {
|
||||
return path.Base(p)
|
||||
}
|
||||
|
||||
func (*mockFS) Ext(p string) string {
|
||||
return path.Ext(p)
|
||||
}
|
||||
|
||||
func (*mockFS) Join(parts ...string) string {
|
||||
return path.Clean(path.Join(parts...))
|
||||
}
|
||||
|
||||
func (*mockFS) Cwd() string {
|
||||
return "/"
|
||||
}
|
||||
|
||||
func splitOnSlash(path string) (string, string) {
|
||||
if slash := strings.IndexByte(path, '/'); slash != -1 {
|
||||
return path[:slash], path[slash+1:]
|
||||
}
|
||||
return path, ""
|
||||
}
|
||||
|
||||
func (*mockFS) Rel(base string, target string) (string, bool) {
|
||||
base = path.Clean(base)
|
||||
target = path.Clean(target)
|
||||
|
||||
// Base cases
|
||||
if base == "" || base == "." {
|
||||
return target, true
|
||||
}
|
||||
if base == target {
|
||||
return ".", true
|
||||
}
|
||||
|
||||
// Find the common parent directory
|
||||
for {
|
||||
bHead, bTail := splitOnSlash(base)
|
||||
tHead, tTail := splitOnSlash(target)
|
||||
if bHead != tHead {
|
||||
break
|
||||
}
|
||||
base = bTail
|
||||
target = tTail
|
||||
}
|
||||
|
||||
// Stop now if base is a subpath of target
|
||||
if base == "" {
|
||||
return target, true
|
||||
}
|
||||
|
||||
// Traverse up to the common parent
|
||||
commonParent := strings.Repeat("../", strings.Count(base, "/")+1)
|
||||
|
||||
// Stop now if target is a subpath of base
|
||||
if target == "" {
|
||||
return commonParent[:len(commonParent)-1], true
|
||||
}
|
||||
|
||||
// Otherwise, down to the parent
|
||||
return commonParent + target, true
|
||||
}
|
||||
|
||||
func (fs *mockFS) kind(dir string, base string) (symlink string, kind EntryKind) {
|
||||
panic("This should never be called")
|
||||
}
|
||||
|
||||
func (fs *mockFS) WatchData() WatchData {
|
||||
panic("This should never be called")
|
||||
}
|
529
vendor/github.com/evanw/esbuild/internal/fs/fs_real.go
generated
vendored
529
vendor/github.com/evanw/esbuild/internal/fs/fs_real.go
generated
vendored
@ -1,529 +0,0 @@
|
||||
package fs
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
type realFS struct {
|
||||
// Stores the file entries for directories we've listed before
|
||||
entriesMutex sync.Mutex
|
||||
entries map[string]entriesOrErr
|
||||
|
||||
// If true, do not use the "entries" cache
|
||||
doNotCacheEntries bool
|
||||
|
||||
// This stores data that will end up being returned by "WatchData()"
|
||||
watchMutex sync.Mutex
|
||||
watchData map[string]privateWatchData
|
||||
|
||||
// When building with WebAssembly, the Go compiler doesn't correctly handle
|
||||
// platform-specific path behavior. Hack around these bugs by compiling
|
||||
// support for both Unix and Windows paths into all executables and switch
|
||||
// between them at run-time instead.
|
||||
fp goFilepath
|
||||
}
|
||||
|
||||
type entriesOrErr struct {
|
||||
entries DirEntries
|
||||
canonicalError error
|
||||
originalError error
|
||||
}
|
||||
|
||||
type watchState uint8
|
||||
|
||||
const (
|
||||
stateNone watchState = iota
|
||||
stateDirHasAccessedEntries // Compare "accessedEntries"
|
||||
stateDirMissing // Compare directory presence
|
||||
stateFileHasModKey // Compare "modKey"
|
||||
stateFileNeedModKey // Need to transition to "stateFileHasModKey" or "stateFileUnusableModKey" before "WatchData()" returns
|
||||
stateFileMissing // Compare file presence
|
||||
stateFileUnusableModKey // Compare "fileContents"
|
||||
)
|
||||
|
||||
type privateWatchData struct {
|
||||
accessedEntries *accessedEntries
|
||||
fileContents string
|
||||
modKey ModKey
|
||||
state watchState
|
||||
}
|
||||
|
||||
type RealFSOptions struct {
|
||||
WantWatchData bool
|
||||
AbsWorkingDir string
|
||||
DoNotCache bool
|
||||
}
|
||||
|
||||
func RealFS(options RealFSOptions) (FS, error) {
|
||||
var fp goFilepath
|
||||
if CheckIfWindows() {
|
||||
fp.isWindows = true
|
||||
fp.pathSeparator = '\\'
|
||||
} else {
|
||||
fp.isWindows = false
|
||||
fp.pathSeparator = '/'
|
||||
}
|
||||
|
||||
// Come up with a default working directory if one was not specified
|
||||
fp.cwd = options.AbsWorkingDir
|
||||
if fp.cwd == "" {
|
||||
if cwd, err := os.Getwd(); err == nil {
|
||||
fp.cwd = cwd
|
||||
} else if fp.isWindows {
|
||||
fp.cwd = "C:\\"
|
||||
} else {
|
||||
fp.cwd = "/"
|
||||
}
|
||||
} else if !fp.isAbs(fp.cwd) {
|
||||
return nil, fmt.Errorf("The working directory %q is not an absolute path", fp.cwd)
|
||||
}
|
||||
|
||||
// Resolve symlinks in the current working directory. Symlinks are resolved
|
||||
// when input file paths are converted to absolute paths because we need to
|
||||
// recognize an input file as unique even if it has multiple symlinks
|
||||
// pointing to it. The build will generate relative paths from the current
|
||||
// working directory to the absolute input file paths for error messages,
|
||||
// so the current working directory should be processed the same way. Not
|
||||
// doing this causes test failures with esbuild when run from inside a
|
||||
// symlinked directory.
|
||||
//
|
||||
// This deliberately ignores errors due to e.g. infinite loops. If there is
|
||||
// an error, we will just use the original working directory and likely
|
||||
// encounter an error later anyway. And if we don't encounter an error
|
||||
// later, then the current working directory didn't even matter and the
|
||||
// error is unimportant.
|
||||
if path, err := fp.evalSymlinks(fp.cwd); err == nil {
|
||||
fp.cwd = path
|
||||
}
|
||||
|
||||
// Only allocate memory for watch data if necessary
|
||||
var watchData map[string]privateWatchData
|
||||
if options.WantWatchData {
|
||||
watchData = make(map[string]privateWatchData)
|
||||
}
|
||||
|
||||
return &realFS{
|
||||
entries: make(map[string]entriesOrErr),
|
||||
fp: fp,
|
||||
watchData: watchData,
|
||||
doNotCacheEntries: options.DoNotCache,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (fs *realFS) ReadDirectory(dir string) (entries DirEntries, canonicalError error, originalError error) {
|
||||
if !fs.doNotCacheEntries {
|
||||
// First, check the cache
|
||||
cached, ok := func() (cached entriesOrErr, ok bool) {
|
||||
fs.entriesMutex.Lock()
|
||||
defer fs.entriesMutex.Unlock()
|
||||
cached, ok = fs.entries[dir]
|
||||
return
|
||||
}()
|
||||
if ok {
|
||||
// Cache hit: stop now
|
||||
return cached.entries, cached.canonicalError, cached.originalError
|
||||
}
|
||||
}
|
||||
|
||||
// Cache miss: read the directory entries
|
||||
names, canonicalError, originalError := fs.readdir(dir)
|
||||
entries = DirEntries{dir, make(map[string]*Entry), nil}
|
||||
|
||||
// Unwrap to get the underlying error
|
||||
if pathErr, ok := canonicalError.(*os.PathError); ok {
|
||||
canonicalError = pathErr.Unwrap()
|
||||
}
|
||||
|
||||
if canonicalError == nil {
|
||||
for _, name := range names {
|
||||
// Call "stat" lazily for performance. The "@material-ui/icons" package
|
||||
// contains a directory with over 11,000 entries in it and running "stat"
|
||||
// for each entry was a big performance issue for that package.
|
||||
entries.data[strings.ToLower(name)] = &Entry{
|
||||
dir: dir,
|
||||
base: name,
|
||||
needStat: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store data for watch mode
|
||||
if fs.watchData != nil {
|
||||
defer fs.watchMutex.Unlock()
|
||||
fs.watchMutex.Lock()
|
||||
state := stateDirHasAccessedEntries
|
||||
if canonicalError != nil {
|
||||
state = stateDirMissing
|
||||
}
|
||||
entries.accessedEntries = &accessedEntries{wasPresent: make(map[string]bool)}
|
||||
fs.watchData[dir] = privateWatchData{
|
||||
accessedEntries: entries.accessedEntries,
|
||||
state: state,
|
||||
}
|
||||
}
|
||||
|
||||
// Update the cache unconditionally. Even if the read failed, we don't want to
|
||||
// retry again later. The directory is inaccessible so trying again is wasted.
|
||||
if canonicalError != nil {
|
||||
entries.data = nil
|
||||
}
|
||||
if !fs.doNotCacheEntries {
|
||||
fs.entriesMutex.Lock()
|
||||
defer fs.entriesMutex.Unlock()
|
||||
fs.entries[dir] = entriesOrErr{
|
||||
entries: entries,
|
||||
canonicalError: canonicalError,
|
||||
originalError: originalError,
|
||||
}
|
||||
}
|
||||
return entries, canonicalError, originalError
|
||||
}
|
||||
|
||||
func (fs *realFS) ReadFile(path string) (contents string, canonicalError error, originalError error) {
|
||||
BeforeFileOpen()
|
||||
defer AfterFileClose()
|
||||
buffer, originalError := ioutil.ReadFile(path)
|
||||
canonicalError = fs.canonicalizeError(originalError)
|
||||
|
||||
// Allocate the string once
|
||||
fileContents := string(buffer)
|
||||
|
||||
// Store data for watch mode
|
||||
if fs.watchData != nil {
|
||||
defer fs.watchMutex.Unlock()
|
||||
fs.watchMutex.Lock()
|
||||
data, ok := fs.watchData[path]
|
||||
if canonicalError != nil {
|
||||
data.state = stateFileMissing
|
||||
} else if !ok {
|
||||
data.state = stateFileNeedModKey
|
||||
}
|
||||
data.fileContents = fileContents
|
||||
fs.watchData[path] = data
|
||||
}
|
||||
|
||||
return fileContents, canonicalError, originalError
|
||||
}
|
||||
|
||||
type realOpenedFile struct {
|
||||
handle *os.File
|
||||
len int
|
||||
}
|
||||
|
||||
func (f *realOpenedFile) Len() int {
|
||||
return f.len
|
||||
}
|
||||
|
||||
func (f *realOpenedFile) Read(start int, end int) ([]byte, error) {
|
||||
bytes := make([]byte, end-start)
|
||||
remaining := bytes
|
||||
|
||||
_, err := f.handle.Seek(int64(start), io.SeekStart)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for len(remaining) > 0 {
|
||||
n, err := f.handle.Read(remaining)
|
||||
if err != nil && n <= 0 {
|
||||
return nil, err
|
||||
}
|
||||
remaining = remaining[n:]
|
||||
}
|
||||
|
||||
return bytes, nil
|
||||
}
|
||||
|
||||
func (f *realOpenedFile) Close() error {
|
||||
return f.handle.Close()
|
||||
}
|
||||
|
||||
func (fs *realFS) OpenFile(path string) (OpenedFile, error, error) {
|
||||
BeforeFileOpen()
|
||||
defer AfterFileClose()
|
||||
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, fs.canonicalizeError(err), err
|
||||
}
|
||||
|
||||
info, err := f.Stat()
|
||||
if err != nil {
|
||||
f.Close()
|
||||
return nil, fs.canonicalizeError(err), err
|
||||
}
|
||||
|
||||
return &realOpenedFile{f, int(info.Size())}, nil, nil
|
||||
}
|
||||
|
||||
func (fs *realFS) ModKey(path string) (ModKey, error) {
|
||||
BeforeFileOpen()
|
||||
defer AfterFileClose()
|
||||
key, err := modKey(path)
|
||||
|
||||
// Store data for watch mode
|
||||
if fs.watchData != nil {
|
||||
defer fs.watchMutex.Unlock()
|
||||
fs.watchMutex.Lock()
|
||||
data, ok := fs.watchData[path]
|
||||
if !ok {
|
||||
if err == modKeyUnusable {
|
||||
data.state = stateFileUnusableModKey
|
||||
} else if err != nil {
|
||||
data.state = stateFileMissing
|
||||
} else {
|
||||
data.state = stateFileHasModKey
|
||||
}
|
||||
} else if data.state == stateFileNeedModKey {
|
||||
data.state = stateFileHasModKey
|
||||
}
|
||||
data.modKey = key
|
||||
fs.watchData[path] = data
|
||||
}
|
||||
|
||||
return key, err
|
||||
}
|
||||
|
||||
func (fs *realFS) IsAbs(p string) bool {
|
||||
return fs.fp.isAbs(p)
|
||||
}
|
||||
|
||||
func (fs *realFS) Abs(p string) (string, bool) {
|
||||
abs, err := fs.fp.abs(p)
|
||||
return abs, err == nil
|
||||
}
|
||||
|
||||
func (fs *realFS) Dir(p string) string {
|
||||
return fs.fp.dir(p)
|
||||
}
|
||||
|
||||
func (fs *realFS) Base(p string) string {
|
||||
return fs.fp.base(p)
|
||||
}
|
||||
|
||||
func (fs *realFS) Ext(p string) string {
|
||||
return fs.fp.ext(p)
|
||||
}
|
||||
|
||||
func (fs *realFS) Join(parts ...string) string {
|
||||
return fs.fp.clean(fs.fp.join(parts))
|
||||
}
|
||||
|
||||
func (fs *realFS) Cwd() string {
|
||||
return fs.fp.cwd
|
||||
}
|
||||
|
||||
func (fs *realFS) Rel(base string, target string) (string, bool) {
|
||||
if rel, err := fs.fp.rel(base, target); err == nil {
|
||||
return rel, true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (fs *realFS) readdir(dirname string) (entries []string, canonicalError error, originalError error) {
|
||||
BeforeFileOpen()
|
||||
defer AfterFileClose()
|
||||
f, originalError := os.Open(dirname)
|
||||
canonicalError = fs.canonicalizeError(originalError)
|
||||
|
||||
// Stop now if there was an error
|
||||
if canonicalError != nil {
|
||||
return nil, canonicalError, originalError
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
entries, err := f.Readdirnames(-1)
|
||||
|
||||
// Unwrap to get the underlying error
|
||||
if syscallErr, ok := err.(*os.SyscallError); ok {
|
||||
err = syscallErr.Unwrap()
|
||||
}
|
||||
|
||||
// Don't convert ENOTDIR to ENOENT here. ENOTDIR is a legitimate error
|
||||
// condition for Readdirnames() on non-Windows platforms.
|
||||
|
||||
return entries, canonicalError, originalError
|
||||
}
|
||||
|
||||
func (fs *realFS) canonicalizeError(err error) error {
|
||||
// Unwrap to get the underlying error
|
||||
if pathErr, ok := err.(*os.PathError); ok {
|
||||
err = pathErr.Unwrap()
|
||||
}
|
||||
|
||||
// This has been copied from golang.org/x/sys/windows
|
||||
const ERROR_INVALID_NAME syscall.Errno = 123
|
||||
|
||||
// Windows is much more restrictive than Unix about file names. If a file name
|
||||
// is invalid, it will return ERROR_INVALID_NAME. Treat this as ENOENT (i.e.
|
||||
// "the file does not exist") so that the resolver continues trying to resolve
|
||||
// the path on this failure instead of aborting with an error.
|
||||
if fs.fp.isWindows && err == ERROR_INVALID_NAME {
|
||||
err = syscall.ENOENT
|
||||
}
|
||||
|
||||
// Windows returns ENOTDIR here even though nothing we've done yet has asked
|
||||
// for a directory. This really means ENOENT on Windows. Return ENOENT here
|
||||
// so callers that check for ENOENT will successfully detect this file as
|
||||
// missing.
|
||||
if err == syscall.ENOTDIR {
|
||||
err = syscall.ENOENT
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (fs *realFS) kind(dir string, base string) (symlink string, kind EntryKind) {
|
||||
entryPath := fs.fp.join([]string{dir, base})
|
||||
|
||||
// Use "lstat" since we want information about symbolic links
|
||||
BeforeFileOpen()
|
||||
defer AfterFileClose()
|
||||
stat, err := os.Lstat(entryPath)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
mode := stat.Mode()
|
||||
|
||||
// Follow symlinks now so the cache contains the translation
|
||||
if (mode & os.ModeSymlink) != 0 {
|
||||
symlink = entryPath
|
||||
linksWalked := 0
|
||||
for {
|
||||
linksWalked++
|
||||
if linksWalked > 255 {
|
||||
return // Error: too many links
|
||||
}
|
||||
link, err := os.Readlink(symlink)
|
||||
if err != nil {
|
||||
return // Skip over this entry
|
||||
}
|
||||
if !fs.fp.isAbs(link) {
|
||||
link = fs.fp.join([]string{dir, link})
|
||||
}
|
||||
symlink = fs.fp.clean(link)
|
||||
|
||||
// Re-run "lstat" on the symlink target
|
||||
stat2, err2 := os.Lstat(symlink)
|
||||
if err2 != nil {
|
||||
return // Skip over this entry
|
||||
}
|
||||
mode = stat2.Mode()
|
||||
if (mode & os.ModeSymlink) == 0 {
|
||||
break
|
||||
}
|
||||
dir = fs.fp.dir(symlink)
|
||||
}
|
||||
}
|
||||
|
||||
// We consider the entry either a directory or a file
|
||||
if (mode & os.ModeDir) != 0 {
|
||||
kind = DirEntry
|
||||
} else {
|
||||
kind = FileEntry
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (fs *realFS) WatchData() WatchData {
|
||||
paths := make(map[string]func() string)
|
||||
|
||||
for path, data := range fs.watchData {
|
||||
// Each closure below needs its own copy of these loop variables
|
||||
path := path
|
||||
data := data
|
||||
|
||||
// Each function should return true if the state has been changed
|
||||
if data.state == stateFileNeedModKey {
|
||||
key, err := modKey(path)
|
||||
if err == modKeyUnusable {
|
||||
data.state = stateFileUnusableModKey
|
||||
} else if err != nil {
|
||||
data.state = stateFileMissing
|
||||
} else {
|
||||
data.state = stateFileHasModKey
|
||||
data.modKey = key
|
||||
}
|
||||
}
|
||||
|
||||
switch data.state {
|
||||
case stateDirMissing:
|
||||
paths[path] = func() string {
|
||||
info, err := os.Stat(path)
|
||||
if err == nil && info.IsDir() {
|
||||
return path
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
case stateDirHasAccessedEntries:
|
||||
paths[path] = func() string {
|
||||
names, err, _ := fs.readdir(path)
|
||||
if err != nil {
|
||||
return path
|
||||
}
|
||||
data.accessedEntries.mutex.Lock()
|
||||
defer data.accessedEntries.mutex.Unlock()
|
||||
if allEntries := data.accessedEntries.allEntries; allEntries != nil {
|
||||
// Check all entries
|
||||
if len(names) != len(allEntries) {
|
||||
return path
|
||||
}
|
||||
sort.Strings(names)
|
||||
for i, s := range names {
|
||||
if s != allEntries[i] {
|
||||
return path
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check individual entries
|
||||
isPresent := make(map[string]bool, len(names))
|
||||
for _, name := range names {
|
||||
isPresent[strings.ToLower(name)] = true
|
||||
}
|
||||
for name, wasPresent := range data.accessedEntries.wasPresent {
|
||||
if wasPresent != isPresent[name] {
|
||||
return fs.Join(path, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
case stateFileMissing:
|
||||
paths[path] = func() string {
|
||||
if info, err := os.Stat(path); err == nil && !info.IsDir() {
|
||||
return path
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
case stateFileHasModKey:
|
||||
paths[path] = func() string {
|
||||
if key, err := modKey(path); err != nil || key != data.modKey {
|
||||
return path
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
case stateFileUnusableModKey:
|
||||
paths[path] = func() string {
|
||||
if buffer, err := ioutil.ReadFile(path); err != nil || string(buffer) != data.fileContents {
|
||||
return path
|
||||
}
|
||||
return ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return WatchData{
|
||||
Paths: paths,
|
||||
}
|
||||
}
|
9
vendor/github.com/evanw/esbuild/internal/fs/iswin_other.go
generated
vendored
9
vendor/github.com/evanw/esbuild/internal/fs/iswin_other.go
generated
vendored
@ -1,9 +0,0 @@
|
||||
//go:build (!js || !wasm) && !windows
|
||||
// +build !js !wasm
|
||||
// +build !windows
|
||||
|
||||
package fs
|
||||
|
||||
func CheckIfWindows() bool {
|
||||
return false
|
||||
}
|
25
vendor/github.com/evanw/esbuild/internal/fs/iswin_wasm.go
generated
vendored
25
vendor/github.com/evanw/esbuild/internal/fs/iswin_wasm.go
generated
vendored
@ -1,25 +0,0 @@
|
||||
//go:build js && wasm
|
||||
// +build js,wasm
|
||||
|
||||
package fs
|
||||
|
||||
import (
|
||||
"os"
|
||||
)
|
||||
|
||||
var checkedIfWindows bool
|
||||
var cachedIfWindows bool
|
||||
|
||||
func CheckIfWindows() bool {
|
||||
if !checkedIfWindows {
|
||||
checkedIfWindows = true
|
||||
|
||||
// Hack: Assume that we're on Windows if we're running WebAssembly and
|
||||
// the "C:\\" directory exists. This is a workaround for a bug in Go's
|
||||
// WebAssembly support: https://github.com/golang/go/issues/43768.
|
||||
_, err := os.Stat("C:\\")
|
||||
cachedIfWindows = err == nil
|
||||
}
|
||||
|
||||
return cachedIfWindows
|
||||
}
|
8
vendor/github.com/evanw/esbuild/internal/fs/iswin_windows.go
generated
vendored
8
vendor/github.com/evanw/esbuild/internal/fs/iswin_windows.go
generated
vendored
@ -1,8 +0,0 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package fs
|
||||
|
||||
func CheckIfWindows() bool {
|
||||
return true
|
||||
}
|
35
vendor/github.com/evanw/esbuild/internal/fs/modkey_other.go
generated
vendored
35
vendor/github.com/evanw/esbuild/internal/fs/modkey_other.go
generated
vendored
@ -1,35 +0,0 @@
|
||||
//go:build !darwin && !freebsd && !linux
|
||||
// +build !darwin,!freebsd,!linux
|
||||
|
||||
package fs
|
||||
|
||||
import (
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
var zeroTime time.Time
|
||||
|
||||
func modKey(path string) (ModKey, error) {
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return ModKey{}, err
|
||||
}
|
||||
|
||||
// We can't detect changes if the file system zeros out the modification time
|
||||
mtime := info.ModTime()
|
||||
if mtime == zeroTime || mtime.Unix() == 0 {
|
||||
return ModKey{}, modKeyUnusable
|
||||
}
|
||||
|
||||
// Don't generate a modification key if the file is too new
|
||||
if mtime.Add(modKeySafetyGap * time.Second).After(time.Now()) {
|
||||
return ModKey{}, modKeyUnusable
|
||||
}
|
||||
|
||||
return ModKey{
|
||||
size: info.Size(),
|
||||
mtime_sec: mtime.Unix(),
|
||||
mode: uint32(info.Mode()),
|
||||
}, nil
|
||||
}
|
41
vendor/github.com/evanw/esbuild/internal/fs/modkey_unix.go
generated
vendored
41
vendor/github.com/evanw/esbuild/internal/fs/modkey_unix.go
generated
vendored
@ -1,41 +0,0 @@
|
||||
//go:build darwin || freebsd || linux
|
||||
// +build darwin freebsd linux
|
||||
|
||||
package fs
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
func modKey(path string) (ModKey, error) {
|
||||
stat := unix.Stat_t{}
|
||||
if err := unix.Stat(path, &stat); err != nil {
|
||||
return ModKey{}, err
|
||||
}
|
||||
|
||||
// We can't detect changes if the file system zeros out the modification time
|
||||
if stat.Mtim.Sec == 0 && stat.Mtim.Nsec == 0 {
|
||||
return ModKey{}, modKeyUnusable
|
||||
}
|
||||
|
||||
// Don't generate a modification key if the file is too new
|
||||
now, err := unix.TimeToTimespec(time.Now())
|
||||
if err != nil {
|
||||
return ModKey{}, err
|
||||
}
|
||||
mtimeSec := stat.Mtim.Sec + modKeySafetyGap
|
||||
if mtimeSec > now.Sec || (mtimeSec == now.Sec && stat.Mtim.Nsec > now.Nsec) {
|
||||
return ModKey{}, modKeyUnusable
|
||||
}
|
||||
|
||||
return ModKey{
|
||||
inode: stat.Ino,
|
||||
size: stat.Size,
|
||||
mtime_sec: int64(stat.Mtim.Sec),
|
||||
mtime_nsec: int64(stat.Mtim.Nsec),
|
||||
mode: uint32(stat.Mode),
|
||||
uid: stat.Uid,
|
||||
}, nil
|
||||
}
|
385
vendor/github.com/evanw/esbuild/internal/graph/graph.go
generated
vendored
385
vendor/github.com/evanw/esbuild/internal/graph/graph.go
generated
vendored
@ -1,385 +0,0 @@
|
||||
package graph
|
||||
|
||||
// This graph represents the set of files that the linker operates on. Each
|
||||
// linker has a separate one of these graphs (there is one linker when code
|
||||
// splitting is on, but one linker per entry point when code splitting is off).
|
||||
//
|
||||
// The input data to the linker constructor must be considered immutable because
|
||||
// it's shared between linker invocations and is also stored in the cache for
|
||||
// incremental builds.
|
||||
//
|
||||
// The linker constructor makes a shallow clone of the input data and is careful
|
||||
// to pre-clone ahead of time the AST fields that it may modify. The Go language
|
||||
// doesn't have any type system features for immutability so this has to be
|
||||
// manually enforced. Please be careful.
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"sync"
|
||||
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
"github.com/evanw/esbuild/internal/runtime"
|
||||
)
|
||||
|
||||
type entryPointKind uint8
|
||||
|
||||
const (
|
||||
entryPointNone entryPointKind = iota
|
||||
entryPointUserSpecified
|
||||
entryPointDynamicImport
|
||||
)
|
||||
|
||||
type LinkerFile struct {
|
||||
InputFile InputFile
|
||||
|
||||
// This holds all entry points that can reach this file. It will be used to
|
||||
// assign the parts in this file to a chunk.
|
||||
EntryBits helpers.BitSet
|
||||
|
||||
// This is lazily-allocated because it's only needed if there are warnings
|
||||
// logged, which should be relatively rare.
|
||||
lazyLineColumnTracker *logger.LineColumnTracker
|
||||
|
||||
// The minimum number of links in the module graph to get from an entry point
|
||||
// to this file
|
||||
DistanceFromEntryPoint uint32
|
||||
|
||||
// If "entryPointKind" is not "entryPointNone", this is the index of the
|
||||
// corresponding entry point chunk.
|
||||
EntryPointChunkIndex uint32
|
||||
|
||||
// This file is an entry point if and only if this is not "entryPointNone".
|
||||
// Note that dynamically-imported files are allowed to also be specified by
|
||||
// the user as top-level entry points, so some dynamically-imported files
|
||||
// may be "entryPointUserSpecified" instead of "entryPointDynamicImport".
|
||||
entryPointKind entryPointKind
|
||||
|
||||
// This is true if this file has been marked as live by the tree shaking
|
||||
// algorithm.
|
||||
IsLive bool
|
||||
}
|
||||
|
||||
func (f *LinkerFile) IsEntryPoint() bool {
|
||||
return f.entryPointKind != entryPointNone
|
||||
}
|
||||
|
||||
func (f *LinkerFile) IsUserSpecifiedEntryPoint() bool {
|
||||
return f.entryPointKind == entryPointUserSpecified
|
||||
}
|
||||
|
||||
// Note: This is not guarded by a mutex. Make sure this isn't called from a
|
||||
// parallel part of the code.
|
||||
func (f *LinkerFile) LineColumnTracker() *logger.LineColumnTracker {
|
||||
if f.lazyLineColumnTracker == nil {
|
||||
tracker := logger.MakeLineColumnTracker(&f.InputFile.Source)
|
||||
f.lazyLineColumnTracker = &tracker
|
||||
}
|
||||
return f.lazyLineColumnTracker
|
||||
}
|
||||
|
||||
type EntryPoint struct {
|
||||
// This may be an absolute path or a relative path. If absolute, it will
|
||||
// eventually be turned into a relative path by computing the path relative
|
||||
// to the "outbase" directory. Then this relative path will be joined onto
|
||||
// the "outdir" directory to form the final output path for this entry point.
|
||||
OutputPath string
|
||||
|
||||
// This is the source index of the entry point. This file must have a valid
|
||||
// entry point kind (i.e. not "none").
|
||||
SourceIndex uint32
|
||||
|
||||
// Manually specified output paths are ignored when computing the default
|
||||
// "outbase" directory, which is computed as the lowest common ancestor of
|
||||
// all automatically generated output paths.
|
||||
OutputPathWasAutoGenerated bool
|
||||
}
|
||||
|
||||
type LinkerGraph struct {
|
||||
Files []LinkerFile
|
||||
entryPoints []EntryPoint
|
||||
Symbols js_ast.SymbolMap
|
||||
|
||||
// We should avoid traversing all files in the bundle, because the linker
|
||||
// should be able to run a linking operation on a large bundle where only
|
||||
// a few files are needed (e.g. an incremental compilation scenario). This
|
||||
// holds all files that could possibly be reached through the entry points.
|
||||
// If you need to iterate over all files in the linking operation, iterate
|
||||
// over this array. This array is also sorted in a deterministic ordering
|
||||
// to help ensure deterministic builds (source indices are random).
|
||||
ReachableFiles []uint32
|
||||
|
||||
// This maps from unstable source index to stable reachable file index. This
|
||||
// is useful as a deterministic key for sorting if you need to sort something
|
||||
// containing a source index (such as "js_ast.Ref" symbol references).
|
||||
StableSourceIndices []uint32
|
||||
}
|
||||
|
||||
func CloneLinkerGraph(
|
||||
inputFiles []InputFile,
|
||||
reachableFiles []uint32,
|
||||
originalEntryPoints []EntryPoint,
|
||||
codeSplitting bool,
|
||||
) LinkerGraph {
|
||||
entryPoints := append([]EntryPoint{}, originalEntryPoints...)
|
||||
symbols := js_ast.NewSymbolMap(len(inputFiles))
|
||||
files := make([]LinkerFile, len(inputFiles))
|
||||
|
||||
// Mark all entry points so we don't add them again for import() expressions
|
||||
for _, entryPoint := range entryPoints {
|
||||
files[entryPoint.SourceIndex].entryPointKind = entryPointUserSpecified
|
||||
}
|
||||
|
||||
// Clone various things since we may mutate them later. Do this in parallel
|
||||
// for a speedup (around ~2x faster for this function in the three.js
|
||||
// benchmark on a 6-core laptop).
|
||||
var dynamicImportEntryPoints []uint32
|
||||
var dynamicImportEntryPointsMutex sync.Mutex
|
||||
waitGroup := sync.WaitGroup{}
|
||||
waitGroup.Add(len(reachableFiles))
|
||||
stableSourceIndices := make([]uint32, len(inputFiles))
|
||||
for stableIndex, sourceIndex := range reachableFiles {
|
||||
// Create a way to convert source indices to a stable ordering
|
||||
stableSourceIndices[sourceIndex] = uint32(stableIndex)
|
||||
|
||||
go func(sourceIndex uint32) {
|
||||
file := &files[sourceIndex]
|
||||
file.InputFile = inputFiles[sourceIndex]
|
||||
|
||||
switch repr := file.InputFile.Repr.(type) {
|
||||
case *JSRepr:
|
||||
// Clone the representation
|
||||
{
|
||||
clone := *repr
|
||||
repr = &clone
|
||||
file.InputFile.Repr = repr
|
||||
}
|
||||
|
||||
// Clone the symbol map
|
||||
fileSymbols := append([]js_ast.Symbol{}, repr.AST.Symbols...)
|
||||
symbols.SymbolsForSource[sourceIndex] = fileSymbols
|
||||
repr.AST.Symbols = nil
|
||||
|
||||
// Clone the parts
|
||||
repr.AST.Parts = append([]js_ast.Part{}, repr.AST.Parts...)
|
||||
for i := range repr.AST.Parts {
|
||||
part := &repr.AST.Parts[i]
|
||||
clone := make(map[js_ast.Ref]js_ast.SymbolUse, len(part.SymbolUses))
|
||||
for ref, uses := range part.SymbolUses {
|
||||
clone[ref] = uses
|
||||
}
|
||||
part.SymbolUses = clone
|
||||
part.Dependencies = append([]js_ast.Dependency{}, part.Dependencies...)
|
||||
}
|
||||
|
||||
// Clone the import records
|
||||
repr.AST.ImportRecords = append([]ast.ImportRecord{}, repr.AST.ImportRecords...)
|
||||
|
||||
// Add dynamic imports as additional entry points if code splitting is active
|
||||
if codeSplitting {
|
||||
for importRecordIndex := range repr.AST.ImportRecords {
|
||||
if record := &repr.AST.ImportRecords[importRecordIndex]; record.SourceIndex.IsValid() && record.Kind == ast.ImportDynamic {
|
||||
dynamicImportEntryPointsMutex.Lock()
|
||||
dynamicImportEntryPoints = append(dynamicImportEntryPoints, record.SourceIndex.GetIndex())
|
||||
dynamicImportEntryPointsMutex.Unlock()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clone the import map
|
||||
namedImports := make(map[js_ast.Ref]js_ast.NamedImport, len(repr.AST.NamedImports))
|
||||
for k, v := range repr.AST.NamedImports {
|
||||
namedImports[k] = v
|
||||
}
|
||||
repr.AST.NamedImports = namedImports
|
||||
|
||||
// Clone the export map
|
||||
resolvedExports := make(map[string]ExportData)
|
||||
for alias, name := range repr.AST.NamedExports {
|
||||
resolvedExports[alias] = ExportData{
|
||||
Ref: name.Ref,
|
||||
SourceIndex: sourceIndex,
|
||||
NameLoc: name.AliasLoc,
|
||||
}
|
||||
}
|
||||
|
||||
// Clone the top-level scope so we can generate more variables
|
||||
{
|
||||
new := &js_ast.Scope{}
|
||||
*new = *repr.AST.ModuleScope
|
||||
new.Generated = append([]js_ast.Ref{}, new.Generated...)
|
||||
repr.AST.ModuleScope = new
|
||||
}
|
||||
|
||||
// Also associate some default metadata with the file
|
||||
repr.Meta.ResolvedExports = resolvedExports
|
||||
repr.Meta.IsProbablyTypeScriptType = make(map[js_ast.Ref]bool)
|
||||
repr.Meta.ImportsToBind = make(map[js_ast.Ref]ImportData)
|
||||
|
||||
case *CSSRepr:
|
||||
// Clone the representation
|
||||
{
|
||||
clone := *repr
|
||||
repr = &clone
|
||||
file.InputFile.Repr = repr
|
||||
}
|
||||
|
||||
// Clone the import records
|
||||
repr.AST.ImportRecords = append([]ast.ImportRecord{}, repr.AST.ImportRecords...)
|
||||
}
|
||||
|
||||
// All files start off as far as possible from an entry point
|
||||
file.DistanceFromEntryPoint = ^uint32(0)
|
||||
waitGroup.Done()
|
||||
}(sourceIndex)
|
||||
}
|
||||
waitGroup.Wait()
|
||||
|
||||
// Process dynamic entry points after merging control flow again
|
||||
stableEntryPoints := make([]int, 0, len(dynamicImportEntryPoints))
|
||||
for _, sourceIndex := range dynamicImportEntryPoints {
|
||||
if otherFile := &files[sourceIndex]; otherFile.entryPointKind == entryPointNone {
|
||||
stableEntryPoints = append(stableEntryPoints, int(stableSourceIndices[sourceIndex]))
|
||||
otherFile.entryPointKind = entryPointDynamicImport
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure to add dynamic entry points in a deterministic order
|
||||
sort.Ints(stableEntryPoints)
|
||||
for _, stableIndex := range stableEntryPoints {
|
||||
entryPoints = append(entryPoints, EntryPoint{SourceIndex: reachableFiles[stableIndex]})
|
||||
}
|
||||
|
||||
// Allocate the entry bit set now that the number of entry points is known
|
||||
bitCount := uint(len(entryPoints))
|
||||
for _, sourceIndex := range reachableFiles {
|
||||
files[sourceIndex].EntryBits = helpers.NewBitSet(bitCount)
|
||||
}
|
||||
|
||||
return LinkerGraph{
|
||||
Symbols: symbols,
|
||||
entryPoints: entryPoints,
|
||||
Files: files,
|
||||
ReachableFiles: reachableFiles,
|
||||
StableSourceIndices: stableSourceIndices,
|
||||
}
|
||||
}
|
||||
|
||||
// Prevent packages that depend on us from adding or removing entry points
|
||||
func (g *LinkerGraph) EntryPoints() []EntryPoint {
|
||||
return g.entryPoints
|
||||
}
|
||||
|
||||
func (g *LinkerGraph) AddPartToFile(sourceIndex uint32, part js_ast.Part) uint32 {
|
||||
// Invariant: this map is never null
|
||||
if part.SymbolUses == nil {
|
||||
part.SymbolUses = make(map[js_ast.Ref]js_ast.SymbolUse)
|
||||
}
|
||||
|
||||
repr := g.Files[sourceIndex].InputFile.Repr.(*JSRepr)
|
||||
partIndex := uint32(len(repr.AST.Parts))
|
||||
repr.AST.Parts = append(repr.AST.Parts, part)
|
||||
|
||||
// Invariant: the parts for all top-level symbols can be found in the file-level map
|
||||
for _, declaredSymbol := range part.DeclaredSymbols {
|
||||
if declaredSymbol.IsTopLevel {
|
||||
// Check for an existing overlay
|
||||
partIndices, ok := repr.Meta.TopLevelSymbolToPartsOverlay[declaredSymbol.Ref]
|
||||
|
||||
// If missing, initialize using the original values from the parser
|
||||
if !ok {
|
||||
partIndices = append(partIndices, repr.AST.TopLevelSymbolToPartsFromParser[declaredSymbol.Ref]...)
|
||||
}
|
||||
|
||||
// Add this part to the overlay
|
||||
partIndices = append(partIndices, partIndex)
|
||||
if repr.Meta.TopLevelSymbolToPartsOverlay == nil {
|
||||
repr.Meta.TopLevelSymbolToPartsOverlay = make(map[js_ast.Ref][]uint32)
|
||||
}
|
||||
repr.Meta.TopLevelSymbolToPartsOverlay[declaredSymbol.Ref] = partIndices
|
||||
}
|
||||
}
|
||||
|
||||
return partIndex
|
||||
}
|
||||
|
||||
func (g *LinkerGraph) GenerateNewSymbol(sourceIndex uint32, kind js_ast.SymbolKind, originalName string) js_ast.Ref {
|
||||
sourceSymbols := &g.Symbols.SymbolsForSource[sourceIndex]
|
||||
|
||||
ref := js_ast.Ref{
|
||||
SourceIndex: sourceIndex,
|
||||
InnerIndex: uint32(len(*sourceSymbols)),
|
||||
}
|
||||
|
||||
*sourceSymbols = append(*sourceSymbols, js_ast.Symbol{
|
||||
Kind: kind,
|
||||
OriginalName: originalName,
|
||||
Link: js_ast.InvalidRef,
|
||||
})
|
||||
|
||||
generated := &g.Files[sourceIndex].InputFile.Repr.(*JSRepr).AST.ModuleScope.Generated
|
||||
*generated = append(*generated, ref)
|
||||
return ref
|
||||
}
|
||||
|
||||
func (g *LinkerGraph) GenerateSymbolImportAndUse(
|
||||
sourceIndex uint32,
|
||||
partIndex uint32,
|
||||
ref js_ast.Ref,
|
||||
useCount uint32,
|
||||
sourceIndexToImportFrom uint32,
|
||||
) {
|
||||
if useCount == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
repr := g.Files[sourceIndex].InputFile.Repr.(*JSRepr)
|
||||
part := &repr.AST.Parts[partIndex]
|
||||
|
||||
// Mark this symbol as used by this part
|
||||
use := part.SymbolUses[ref]
|
||||
use.CountEstimate += useCount
|
||||
part.SymbolUses[ref] = use
|
||||
|
||||
// Uphold invariants about the CommonJS "exports" and "module" symbols
|
||||
if ref == repr.AST.ExportsRef {
|
||||
repr.AST.UsesExportsRef = true
|
||||
}
|
||||
if ref == repr.AST.ModuleRef {
|
||||
repr.AST.UsesModuleRef = true
|
||||
}
|
||||
|
||||
// Track that this specific symbol was imported
|
||||
if sourceIndexToImportFrom != sourceIndex {
|
||||
repr.Meta.ImportsToBind[ref] = ImportData{
|
||||
SourceIndex: sourceIndexToImportFrom,
|
||||
Ref: ref,
|
||||
}
|
||||
}
|
||||
|
||||
// Pull in all parts that declare this symbol
|
||||
targetRepr := g.Files[sourceIndexToImportFrom].InputFile.Repr.(*JSRepr)
|
||||
for _, partIndex := range targetRepr.TopLevelSymbolToParts(ref) {
|
||||
part.Dependencies = append(part.Dependencies, js_ast.Dependency{
|
||||
SourceIndex: sourceIndexToImportFrom,
|
||||
PartIndex: partIndex,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (g *LinkerGraph) GenerateRuntimeSymbolImportAndUse(
|
||||
sourceIndex uint32,
|
||||
partIndex uint32,
|
||||
name string,
|
||||
useCount uint32,
|
||||
) {
|
||||
if useCount == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
runtimeRepr := g.Files[runtime.SourceIndex].InputFile.Repr.(*JSRepr)
|
||||
ref := runtimeRepr.AST.NamedExports[name].Ref
|
||||
g.GenerateSymbolImportAndUse(sourceIndex, partIndex, ref, useCount, runtime.SourceIndex)
|
||||
}
|
117
vendor/github.com/evanw/esbuild/internal/graph/input.go
generated
vendored
117
vendor/github.com/evanw/esbuild/internal/graph/input.go
generated
vendored
@ -1,117 +0,0 @@
|
||||
package graph
|
||||
|
||||
// The code in this file mainly represents data that passes from the scan phase
|
||||
// to the compile phase of the bundler. There is currently one exception: the
|
||||
// "meta" member of the JavaScript file representation. That could have been
|
||||
// stored separately but is stored together for convenience and to avoid an
|
||||
// extra level of indirection. Instead it's kept in a separate type to keep
|
||||
// things organized.
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/config"
|
||||
"github.com/evanw/esbuild/internal/css_ast"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
"github.com/evanw/esbuild/internal/resolver"
|
||||
"github.com/evanw/esbuild/internal/sourcemap"
|
||||
)
|
||||
|
||||
type InputFile struct {
|
||||
Source logger.Source
|
||||
Repr InputFileRepr
|
||||
InputSourceMap *sourcemap.SourceMap
|
||||
|
||||
// If this file ends up being used in the bundle, these are additional files
|
||||
// that must be written to the output directory. It's used by the "file"
|
||||
// loader.
|
||||
AdditionalFiles []OutputFile
|
||||
UniqueKeyForFileLoader string
|
||||
|
||||
SideEffects SideEffects
|
||||
Loader config.Loader
|
||||
}
|
||||
|
||||
type OutputFile struct {
|
||||
AbsPath string
|
||||
Contents []byte
|
||||
|
||||
// If "AbsMetadataFile" is present, this will be filled out with information
|
||||
// about this file in JSON format. This is a partial JSON file that will be
|
||||
// fully assembled later.
|
||||
JSONMetadataChunk string
|
||||
|
||||
IsExecutable bool
|
||||
}
|
||||
|
||||
type SideEffects struct {
|
||||
// This is optional additional information for use in error messages
|
||||
Data *resolver.SideEffectsData
|
||||
|
||||
Kind SideEffectsKind
|
||||
}
|
||||
|
||||
type SideEffectsKind uint8
|
||||
|
||||
const (
|
||||
// The default value conservatively considers all files to have side effects.
|
||||
HasSideEffects SideEffectsKind = iota
|
||||
|
||||
// This file was listed as not having side effects by a "package.json"
|
||||
// file in one of our containing directories with a "sideEffects" field.
|
||||
NoSideEffects_PackageJSON
|
||||
|
||||
// This file is considered to have no side effects because the AST was empty
|
||||
// after parsing finished. This should be the case for ".d.ts" files.
|
||||
NoSideEffects_EmptyAST
|
||||
|
||||
// This file was loaded using a data-oriented loader (e.g. "text") that is
|
||||
// known to not have side effects.
|
||||
NoSideEffects_PureData
|
||||
|
||||
// Same as above but it came from a plugin. We don't want to warn about
|
||||
// unused imports to these files since running the plugin is a side effect.
|
||||
// Removing the import would not call the plugin which is observable.
|
||||
NoSideEffects_PureData_FromPlugin
|
||||
)
|
||||
|
||||
type InputFileRepr interface {
|
||||
ImportRecords() *[]ast.ImportRecord
|
||||
}
|
||||
|
||||
type JSRepr struct {
|
||||
AST js_ast.AST
|
||||
Meta JSReprMeta
|
||||
|
||||
// If present, this is the CSS file that this JavaScript stub corresponds to.
|
||||
// A JavaScript stub is automatically generated for a CSS file when it's
|
||||
// imported from a JavaScript file.
|
||||
CSSSourceIndex ast.Index32
|
||||
}
|
||||
|
||||
func (repr *JSRepr) ImportRecords() *[]ast.ImportRecord {
|
||||
return &repr.AST.ImportRecords
|
||||
}
|
||||
|
||||
func (repr *JSRepr) TopLevelSymbolToParts(ref js_ast.Ref) []uint32 {
|
||||
// Overlay the mutable map from the linker
|
||||
if parts, ok := repr.Meta.TopLevelSymbolToPartsOverlay[ref]; ok {
|
||||
return parts
|
||||
}
|
||||
|
||||
// Fall back to the immutable map from the parser
|
||||
return repr.AST.TopLevelSymbolToPartsFromParser[ref]
|
||||
}
|
||||
|
||||
type CSSRepr struct {
|
||||
AST css_ast.AST
|
||||
|
||||
// If present, this is the JavaScript stub corresponding to this CSS file.
|
||||
// A JavaScript stub is automatically generated for a CSS file when it's
|
||||
// imported from a JavaScript file.
|
||||
JSSourceIndex ast.Index32
|
||||
}
|
||||
|
||||
func (repr *CSSRepr) ImportRecords() *[]ast.ImportRecord {
|
||||
return &repr.AST.ImportRecords
|
||||
}
|
203
vendor/github.com/evanw/esbuild/internal/graph/meta.go
generated
vendored
203
vendor/github.com/evanw/esbuild/internal/graph/meta.go
generated
vendored
@ -1,203 +0,0 @@
|
||||
package graph
|
||||
|
||||
// The code in this file represents data that is required by the compile phase
|
||||
// of the bundler but that is not required by the scan phase.
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
type WrapKind uint8
|
||||
|
||||
const (
|
||||
WrapNone WrapKind = iota
|
||||
|
||||
// The module will be bundled CommonJS-style like this:
|
||||
//
|
||||
// // foo.ts
|
||||
// let require_foo = __commonJS((exports, module) => {
|
||||
// exports.foo = 123;
|
||||
// });
|
||||
//
|
||||
// // bar.ts
|
||||
// let foo = flag ? require_foo() : null;
|
||||
//
|
||||
WrapCJS
|
||||
|
||||
// The module will be bundled ESM-style like this:
|
||||
//
|
||||
// // foo.ts
|
||||
// var foo, foo_exports = {};
|
||||
// __export(foo_exports, {
|
||||
// foo: () => foo
|
||||
// });
|
||||
// let init_foo = __esm(() => {
|
||||
// foo = 123;
|
||||
// });
|
||||
//
|
||||
// // bar.ts
|
||||
// let foo = flag ? (init_foo(), __toCommonJS(foo_exports)) : null;
|
||||
//
|
||||
WrapESM
|
||||
)
|
||||
|
||||
// This contains linker-specific metadata corresponding to a "file" struct
|
||||
// from the initial scan phase of the bundler. It's separated out because it's
|
||||
// conceptually only used for a single linking operation and because multiple
|
||||
// linking operations may be happening in parallel with different metadata for
|
||||
// the same file.
|
||||
type JSReprMeta struct {
|
||||
// This is only for TypeScript files. If an import symbol is in this map, it
|
||||
// means the import couldn't be found and doesn't actually exist. This is not
|
||||
// an error in TypeScript because the import is probably just a type.
|
||||
//
|
||||
// Normally we remove all unused imports for TypeScript files during parsing,
|
||||
// which automatically removes type-only imports. But there are certain re-
|
||||
// export situations where it's impossible to tell if an import is a type or
|
||||
// not:
|
||||
//
|
||||
// import {typeOrNotTypeWhoKnows} from 'path';
|
||||
// export {typeOrNotTypeWhoKnows};
|
||||
//
|
||||
// Really people should be using the TypeScript "isolatedModules" flag with
|
||||
// bundlers like this one that compile TypeScript files independently without
|
||||
// type checking. That causes the TypeScript type checker to emit the error
|
||||
// "Re-exporting a type when the '--isolatedModules' flag is provided requires
|
||||
// using 'export type'." But we try to be robust to such code anyway.
|
||||
IsProbablyTypeScriptType map[js_ast.Ref]bool
|
||||
|
||||
// Imports are matched with exports in a separate pass from when the matched
|
||||
// exports are actually bound to the imports. Here "binding" means adding non-
|
||||
// local dependencies on the parts in the exporting file that declare the
|
||||
// exported symbol to all parts in the importing file that use the imported
|
||||
// symbol.
|
||||
//
|
||||
// This must be a separate pass because of the "probably TypeScript type"
|
||||
// check above. We can't generate the part for the export namespace until
|
||||
// we've matched imports with exports because the generated code must omit
|
||||
// type-only imports in the export namespace code. And we can't bind exports
|
||||
// to imports until the part for the export namespace is generated since that
|
||||
// part needs to participate in the binding.
|
||||
//
|
||||
// This array holds the deferred imports to bind so the pass can be split
|
||||
// into two separate passes.
|
||||
ImportsToBind map[js_ast.Ref]ImportData
|
||||
|
||||
// This includes both named exports and re-exports.
|
||||
//
|
||||
// Named exports come from explicit export statements in the original file,
|
||||
// and are copied from the "NamedExports" field in the AST.
|
||||
//
|
||||
// Re-exports come from other files and are the result of resolving export
|
||||
// star statements (i.e. "export * from 'foo'").
|
||||
ResolvedExports map[string]ExportData
|
||||
ResolvedExportStar *ExportData
|
||||
|
||||
// Never iterate over "resolvedExports" directly. Instead, iterate over this
|
||||
// array. Some exports in that map aren't meant to end up in generated code.
|
||||
// This array excludes these exports and is also sorted, which avoids non-
|
||||
// determinism due to random map iteration order.
|
||||
SortedAndFilteredExportAliases []string
|
||||
|
||||
// If this is an entry point, this array holds a reference to one free
|
||||
// temporary symbol for each entry in "sortedAndFilteredExportAliases".
|
||||
// These may be needed to store copies of CommonJS re-exports in ESM.
|
||||
CJSExportCopies []js_ast.Ref
|
||||
|
||||
// This is merged on top of the corresponding map from the parser in the AST.
|
||||
// You should call "TopLevelSymbolToParts" to access this instead of accessing
|
||||
// it directly.
|
||||
TopLevelSymbolToPartsOverlay map[js_ast.Ref][]uint32
|
||||
|
||||
// The index of the automatically-generated part used to represent the
|
||||
// CommonJS or ESM wrapper. This part is empty and is only useful for tree
|
||||
// shaking and code splitting. The wrapper can't be inserted into the part
|
||||
// because the wrapper contains other parts, which can't be represented by
|
||||
// the current part system. Only wrapped files have one of these.
|
||||
WrapperPartIndex ast.Index32
|
||||
|
||||
// The index of the automatically-generated part used to handle entry point
|
||||
// specific stuff. If a certain part is needed by the entry point, it's added
|
||||
// as a dependency of this part. This is important for parts that are marked
|
||||
// as removable when unused and that are not used by anything else. Only
|
||||
// entry point files have one of these.
|
||||
EntryPointPartIndex ast.Index32
|
||||
|
||||
// This is true if this file is affected by top-level await, either by having
|
||||
// a top-level await inside this file or by having an import/export statement
|
||||
// that transitively imports such a file. It is forbidden to call "require()"
|
||||
// on these files since they are evaluated asynchronously.
|
||||
IsAsyncOrHasAsyncDependency bool
|
||||
|
||||
Wrap WrapKind
|
||||
|
||||
// If true, we need to insert "var exports = {};". This is the case for ESM
|
||||
// files when the import namespace is captured via "import * as" and also
|
||||
// when they are the target of a "require()" call.
|
||||
NeedsExportsVariable bool
|
||||
|
||||
// If true, the "__export(exports, { ... })" call will be force-included even
|
||||
// if there are no parts that reference "exports". Otherwise this call will
|
||||
// be removed due to the tree shaking pass. This is used when for entry point
|
||||
// files when code related to the current output format needs to reference
|
||||
// the "exports" variable.
|
||||
ForceIncludeExportsForEntryPoint bool
|
||||
|
||||
// This is set when we need to pull in the "__export" symbol in to the part
|
||||
// at "nsExportPartIndex". This can't be done in "createExportsForFile"
|
||||
// because of concurrent map hazards. Instead, it must be done later.
|
||||
NeedsExportSymbolFromRuntime bool
|
||||
|
||||
// Wrapped files must also ensure that their dependencies are wrapped. This
|
||||
// flag is used during the traversal that enforces this invariant, and is used
|
||||
// to detect when the fixed point has been reached.
|
||||
DidWrapDependencies bool
|
||||
}
|
||||
|
||||
type ImportData struct {
|
||||
// This is an array of intermediate statements that re-exported this symbol
|
||||
// in a chain before getting to the final symbol. This can be done either with
|
||||
// "export * from" or "export {} from". If this is done with "export * from"
|
||||
// then this may not be the result of a single chain but may instead form
|
||||
// a diamond shape if this same symbol was re-exported multiple times from
|
||||
// different files.
|
||||
ReExports []js_ast.Dependency
|
||||
|
||||
NameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero
|
||||
Ref js_ast.Ref
|
||||
SourceIndex uint32
|
||||
}
|
||||
|
||||
type ExportData struct {
|
||||
Ref js_ast.Ref
|
||||
|
||||
// Export star resolution happens first before import resolution. That means
|
||||
// it cannot yet determine if duplicate names from export star resolution are
|
||||
// ambiguous (point to different symbols) or not (point to the same symbol).
|
||||
// This issue can happen in the following scenario:
|
||||
//
|
||||
// // entry.js
|
||||
// export * from './a'
|
||||
// export * from './b'
|
||||
//
|
||||
// // a.js
|
||||
// export * from './c'
|
||||
//
|
||||
// // b.js
|
||||
// export {x} from './c'
|
||||
//
|
||||
// // c.js
|
||||
// export let x = 1, y = 2
|
||||
//
|
||||
// In this case "entry.js" should have two exports "x" and "y", neither of
|
||||
// which are ambiguous. To handle this case, ambiguity resolution must be
|
||||
// deferred until import resolution time. That is done using this array.
|
||||
PotentiallyAmbiguousExportStarRefs []ImportData
|
||||
|
||||
// This is the file that the named export above came from. This will be
|
||||
// different from the file that contains this object if this is a re-export.
|
||||
NameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero
|
||||
SourceIndex uint32
|
||||
}
|
27
vendor/github.com/evanw/esbuild/internal/helpers/bitset.go
generated
vendored
27
vendor/github.com/evanw/esbuild/internal/helpers/bitset.go
generated
vendored
@ -1,27 +0,0 @@
|
||||
package helpers
|
||||
|
||||
import "bytes"
|
||||
|
||||
type BitSet struct {
|
||||
entries []byte
|
||||
}
|
||||
|
||||
func NewBitSet(bitCount uint) BitSet {
|
||||
return BitSet{make([]byte, (bitCount+7)/8)}
|
||||
}
|
||||
|
||||
func (bs BitSet) HasBit(bit uint) bool {
|
||||
return (bs.entries[bit/8] & (1 << (bit & 7))) != 0
|
||||
}
|
||||
|
||||
func (bs BitSet) SetBit(bit uint) {
|
||||
bs.entries[bit/8] |= 1 << (bit & 7)
|
||||
}
|
||||
|
||||
func (bs BitSet) Equals(other BitSet) bool {
|
||||
return bytes.Equal(bs.entries, other.entries)
|
||||
}
|
||||
|
||||
func (bs BitSet) String() string {
|
||||
return string(bs.entries)
|
||||
}
|
89
vendor/github.com/evanw/esbuild/internal/helpers/comment.go
generated
vendored
89
vendor/github.com/evanw/esbuild/internal/helpers/comment.go
generated
vendored
@ -1,89 +0,0 @@
|
||||
package helpers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func RemoveMultiLineCommentIndent(prefix string, text string) string {
|
||||
// Figure out the initial indent
|
||||
indent := 0
|
||||
seekBackwardToNewline:
|
||||
for len(prefix) > 0 {
|
||||
c, size := utf8.DecodeLastRuneInString(prefix)
|
||||
switch c {
|
||||
case '\r', '\n', '\u2028', '\u2029':
|
||||
break seekBackwardToNewline
|
||||
}
|
||||
prefix = prefix[:len(prefix)-size]
|
||||
indent++
|
||||
}
|
||||
|
||||
// Split the comment into lines
|
||||
var lines []string
|
||||
start := 0
|
||||
for i, c := range text {
|
||||
switch c {
|
||||
case '\r', '\n':
|
||||
// Don't double-append for Windows style "\r\n" newlines
|
||||
if start <= i {
|
||||
lines = append(lines, text[start:i])
|
||||
}
|
||||
|
||||
start = i + 1
|
||||
|
||||
// Ignore the second part of Windows style "\r\n" newlines
|
||||
if c == '\r' && start < len(text) && text[start] == '\n' {
|
||||
start++
|
||||
}
|
||||
|
||||
case '\u2028', '\u2029':
|
||||
lines = append(lines, text[start:i])
|
||||
start = i + 3
|
||||
}
|
||||
}
|
||||
lines = append(lines, text[start:])
|
||||
|
||||
// Find the minimum indent over all lines after the first line
|
||||
for _, line := range lines[1:] {
|
||||
lineIndent := 0
|
||||
for _, c := range line {
|
||||
if c != ' ' && c != '\t' {
|
||||
break
|
||||
}
|
||||
lineIndent++
|
||||
}
|
||||
if indent > lineIndent {
|
||||
indent = lineIndent
|
||||
}
|
||||
}
|
||||
|
||||
// Trim the indent off of all lines after the first line
|
||||
for i, line := range lines {
|
||||
if i > 0 {
|
||||
lines[i] = line[indent:]
|
||||
}
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
|
||||
func EscapeClosingTag(text string, slashTag string) string {
|
||||
i := strings.Index(text, "</")
|
||||
if i < 0 {
|
||||
return text
|
||||
}
|
||||
var b strings.Builder
|
||||
for {
|
||||
b.WriteString(text[:i+1])
|
||||
text = text[i+1:]
|
||||
if len(text) >= len(slashTag) && strings.EqualFold(text[:len(slashTag)], slashTag) {
|
||||
b.WriteByte('\\')
|
||||
}
|
||||
i = strings.Index(text, "</")
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
b.WriteString(text)
|
||||
return b.String()
|
||||
}
|
14
vendor/github.com/evanw/esbuild/internal/helpers/hash.go
generated
vendored
14
vendor/github.com/evanw/esbuild/internal/helpers/hash.go
generated
vendored
@ -1,14 +0,0 @@
|
||||
package helpers
|
||||
|
||||
// From: http://boost.sourceforge.net/doc/html/boost/hash_combine.html
|
||||
func HashCombine(seed uint32, hash uint32) uint32 {
|
||||
return seed ^ (hash + 0x9e3779b9 + (seed << 6) + (seed >> 2))
|
||||
}
|
||||
|
||||
func HashCombineString(seed uint32, text string) uint32 {
|
||||
seed = HashCombine(seed, uint32(len(text)))
|
||||
for _, c := range text {
|
||||
seed = HashCombine(seed, uint32(c))
|
||||
}
|
||||
return seed
|
||||
}
|
86
vendor/github.com/evanw/esbuild/internal/helpers/joiner.go
generated
vendored
86
vendor/github.com/evanw/esbuild/internal/helpers/joiner.go
generated
vendored
@ -1,86 +0,0 @@
|
||||
package helpers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// This provides an efficient way to join lots of big string and byte slices
|
||||
// together. It avoids the cost of repeatedly reallocating as the buffer grows
|
||||
// by measuring exactly how big the buffer should be and then allocating once.
|
||||
// This is a measurable speedup.
|
||||
type Joiner struct {
|
||||
lastByte byte
|
||||
strings []joinerString
|
||||
bytes []joinerBytes
|
||||
length uint32
|
||||
}
|
||||
|
||||
type joinerString struct {
|
||||
data string
|
||||
offset uint32
|
||||
}
|
||||
|
||||
type joinerBytes struct {
|
||||
data []byte
|
||||
offset uint32
|
||||
}
|
||||
|
||||
func (j *Joiner) AddString(data string) {
|
||||
if len(data) > 0 {
|
||||
j.lastByte = data[len(data)-1]
|
||||
}
|
||||
j.strings = append(j.strings, joinerString{data, j.length})
|
||||
j.length += uint32(len(data))
|
||||
}
|
||||
|
||||
func (j *Joiner) AddBytes(data []byte) {
|
||||
if len(data) > 0 {
|
||||
j.lastByte = data[len(data)-1]
|
||||
}
|
||||
j.bytes = append(j.bytes, joinerBytes{data, j.length})
|
||||
j.length += uint32(len(data))
|
||||
}
|
||||
|
||||
func (j *Joiner) LastByte() byte {
|
||||
return j.lastByte
|
||||
}
|
||||
|
||||
func (j *Joiner) Length() uint32 {
|
||||
return j.length
|
||||
}
|
||||
|
||||
func (j *Joiner) EnsureNewlineAtEnd() {
|
||||
if j.length > 0 && j.lastByte != '\n' {
|
||||
j.AddString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (j *Joiner) Done() []byte {
|
||||
if len(j.strings) == 0 && len(j.bytes) == 1 && j.bytes[0].offset == 0 {
|
||||
// No need to allocate if there was only a single byte array written
|
||||
return j.bytes[0].data
|
||||
}
|
||||
buffer := make([]byte, j.length)
|
||||
for _, item := range j.strings {
|
||||
copy(buffer[item.offset:], item.data)
|
||||
}
|
||||
for _, item := range j.bytes {
|
||||
copy(buffer[item.offset:], item.data)
|
||||
}
|
||||
return buffer
|
||||
}
|
||||
|
||||
func (j *Joiner) Contains(s string, b []byte) bool {
|
||||
for _, item := range j.strings {
|
||||
if strings.Contains(item.data, s) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
for _, item := range j.bytes {
|
||||
if bytes.Contains(item.data, b) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
31
vendor/github.com/evanw/esbuild/internal/helpers/mime.go
generated
vendored
31
vendor/github.com/evanw/esbuild/internal/helpers/mime.go
generated
vendored
@ -1,31 +0,0 @@
|
||||
package helpers
|
||||
|
||||
import "strings"
|
||||
|
||||
var builtinTypesLower = map[string]string{
|
||||
".css": "text/css; charset=utf-8",
|
||||
".gif": "image/gif",
|
||||
".htm": "text/html; charset=utf-8",
|
||||
".html": "text/html; charset=utf-8",
|
||||
".jpeg": "image/jpeg",
|
||||
".jpg": "image/jpeg",
|
||||
".js": "text/javascript; charset=utf-8",
|
||||
".json": "application/json",
|
||||
".mjs": "text/javascript; charset=utf-8",
|
||||
".pdf": "application/pdf",
|
||||
".png": "image/png",
|
||||
".svg": "image/svg+xml",
|
||||
".wasm": "application/wasm",
|
||||
".webp": "image/webp",
|
||||
".xml": "text/xml; charset=utf-8",
|
||||
}
|
||||
|
||||
// This is used instead of Go's built-in "mime.TypeByExtension" function because
|
||||
// that function is broken on Windows: https://github.com/golang/go/issues/32350.
|
||||
func MimeTypeByExtension(ext string) string {
|
||||
contentType := builtinTypesLower[ext]
|
||||
if contentType == "" {
|
||||
contentType = builtinTypesLower[strings.ToLower(ext)]
|
||||
}
|
||||
return contentType
|
||||
}
|
22
vendor/github.com/evanw/esbuild/internal/helpers/path.go
generated
vendored
22
vendor/github.com/evanw/esbuild/internal/helpers/path.go
generated
vendored
@ -1,22 +0,0 @@
|
||||
package helpers
|
||||
|
||||
import "strings"
|
||||
|
||||
func IsInsideNodeModules(path string) bool {
|
||||
for {
|
||||
// This is written in a platform-independent manner because it's run on
|
||||
// user-specified paths which can be arbitrary non-file-system things. So
|
||||
// for example Windows paths may end up being used on Unix or URLs may end
|
||||
// up being used on Windows. Be consistently agnostic to which kind of
|
||||
// slash is used on all platforms.
|
||||
slash := strings.LastIndexAny(path, "/\\")
|
||||
if slash == -1 {
|
||||
return false
|
||||
}
|
||||
dir, base := path[:slash], path[slash+1:]
|
||||
if base == "node_modules" {
|
||||
return true
|
||||
}
|
||||
path = dir
|
||||
}
|
||||
}
|
50
vendor/github.com/evanw/esbuild/internal/helpers/stack.go
generated
vendored
50
vendor/github.com/evanw/esbuild/internal/helpers/stack.go
generated
vendored
@ -1,50 +0,0 @@
|
||||
package helpers
|
||||
|
||||
import (
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func PrettyPrintedStack() string {
|
||||
lines := strings.Split(strings.TrimSpace(string(debug.Stack())), "\n")
|
||||
|
||||
// Strip the first "goroutine" line
|
||||
if len(lines) > 0 {
|
||||
if first := lines[0]; strings.HasPrefix(first, "goroutine ") && strings.HasSuffix(first, ":") {
|
||||
lines = lines[1:]
|
||||
}
|
||||
}
|
||||
|
||||
sb := strings.Builder{}
|
||||
|
||||
for _, line := range lines {
|
||||
// Indented lines are source locations
|
||||
if strings.HasPrefix(line, "\t") {
|
||||
line = line[1:]
|
||||
line = strings.TrimPrefix(line, "github.com/evanw/esbuild/")
|
||||
if offset := strings.LastIndex(line, " +0x"); offset != -1 {
|
||||
line = line[:offset]
|
||||
}
|
||||
sb.WriteString(" (")
|
||||
sb.WriteString(line)
|
||||
sb.WriteString(")")
|
||||
continue
|
||||
}
|
||||
|
||||
// Other lines are function calls
|
||||
if sb.Len() > 0 {
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
if strings.HasSuffix(line, ")") {
|
||||
if paren := strings.LastIndexByte(line, '('); paren != -1 {
|
||||
line = line[:paren]
|
||||
}
|
||||
}
|
||||
if slash := strings.LastIndexByte(line, '/'); slash != -1 {
|
||||
line = line[slash+1:]
|
||||
}
|
||||
sb.WriteString(line)
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
94
vendor/github.com/evanw/esbuild/internal/helpers/timer.go
generated
vendored
94
vendor/github.com/evanw/esbuild/internal/helpers/timer.go
generated
vendored
@ -1,94 +0,0 @@
|
||||
package helpers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
type Timer struct {
|
||||
mutex sync.Mutex
|
||||
data []timerData
|
||||
}
|
||||
|
||||
type timerData struct {
|
||||
name string
|
||||
time time.Time
|
||||
isEnd bool
|
||||
}
|
||||
|
||||
func (t *Timer) Begin(name string) {
|
||||
if t != nil {
|
||||
t.data = append(t.data, timerData{
|
||||
name: name,
|
||||
time: time.Now(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Timer) End(name string) {
|
||||
if t != nil {
|
||||
t.data = append(t.data, timerData{
|
||||
name: name,
|
||||
time: time.Now(),
|
||||
isEnd: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Timer) Fork() *Timer {
|
||||
if t != nil {
|
||||
return &Timer{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *Timer) Join(other *Timer) {
|
||||
if t != nil && other != nil {
|
||||
t.mutex.Lock()
|
||||
defer t.mutex.Unlock()
|
||||
t.data = append(t.data, other.data...)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Timer) Log(log logger.Log) {
|
||||
if t == nil {
|
||||
return
|
||||
}
|
||||
|
||||
type pair struct {
|
||||
timerData
|
||||
index uint32
|
||||
}
|
||||
|
||||
var notes []logger.MsgData
|
||||
var stack []pair
|
||||
indent := 0
|
||||
|
||||
for _, item := range t.data {
|
||||
if !item.isEnd {
|
||||
top := pair{timerData: item, index: uint32(len(notes))}
|
||||
notes = append(notes, logger.MsgData{})
|
||||
stack = append(stack, top)
|
||||
indent++
|
||||
} else {
|
||||
indent--
|
||||
last := len(stack) - 1
|
||||
top := stack[last]
|
||||
stack = stack[:last]
|
||||
if item.name != top.name {
|
||||
panic("Internal error")
|
||||
}
|
||||
notes[top.index].Text = fmt.Sprintf("%s%s: %dms",
|
||||
strings.Repeat(" ", indent),
|
||||
top.name,
|
||||
item.time.Sub(top.time).Milliseconds())
|
||||
}
|
||||
}
|
||||
|
||||
log.AddWithNotes(logger.Info, nil, logger.Range{},
|
||||
"Timing information (times may not nest hierarchically due to parallelism)", notes)
|
||||
}
|
2304
vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go
generated
vendored
2304
vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go
generated
vendored
File diff suppressed because it is too large
Load Diff
2961
vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go
generated
vendored
2961
vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go
generated
vendored
File diff suppressed because it is too large
Load Diff
382
vendor/github.com/evanw/esbuild/internal/js_lexer/tables.go
generated
vendored
382
vendor/github.com/evanw/esbuild/internal/js_lexer/tables.go
generated
vendored
@ -1,382 +0,0 @@
|
||||
package js_lexer
|
||||
|
||||
var tokenToString = map[T]string{
|
||||
TEndOfFile: "end of file",
|
||||
TSyntaxError: "syntax error",
|
||||
THashbang: "hashbang comment",
|
||||
|
||||
// Literals
|
||||
TNoSubstitutionTemplateLiteral: "template literal",
|
||||
TNumericLiteral: "number",
|
||||
TStringLiteral: "string",
|
||||
TBigIntegerLiteral: "bigint",
|
||||
|
||||
// Pseudo-literals
|
||||
TTemplateHead: "template literal",
|
||||
TTemplateMiddle: "template literal",
|
||||
TTemplateTail: "template literal",
|
||||
|
||||
// Punctuation
|
||||
TAmpersand: "\"&\"",
|
||||
TAmpersandAmpersand: "\"&&\"",
|
||||
TAsterisk: "\"*\"",
|
||||
TAsteriskAsterisk: "\"**\"",
|
||||
TAt: "\"@\"",
|
||||
TBar: "\"|\"",
|
||||
TBarBar: "\"||\"",
|
||||
TCaret: "\"^\"",
|
||||
TCloseBrace: "\"}\"",
|
||||
TCloseBracket: "\"]\"",
|
||||
TCloseParen: "\")\"",
|
||||
TColon: "\":\"",
|
||||
TComma: "\",\"",
|
||||
TDot: "\".\"",
|
||||
TDotDotDot: "\"...\"",
|
||||
TEqualsEquals: "\"==\"",
|
||||
TEqualsEqualsEquals: "\"===\"",
|
||||
TEqualsGreaterThan: "\"=>\"",
|
||||
TExclamation: "\"!\"",
|
||||
TExclamationEquals: "\"!=\"",
|
||||
TExclamationEqualsEquals: "\"!==\"",
|
||||
TGreaterThan: "\">\"",
|
||||
TGreaterThanEquals: "\">=\"",
|
||||
TGreaterThanGreaterThan: "\">>\"",
|
||||
TGreaterThanGreaterThanGreaterThan: "\">>>\"",
|
||||
TLessThan: "\"<\"",
|
||||
TLessThanEquals: "\"<=\"",
|
||||
TLessThanLessThan: "\"<<\"",
|
||||
TMinus: "\"-\"",
|
||||
TMinusMinus: "\"--\"",
|
||||
TOpenBrace: "\"{\"",
|
||||
TOpenBracket: "\"[\"",
|
||||
TOpenParen: "\"(\"",
|
||||
TPercent: "\"%\"",
|
||||
TPlus: "\"+\"",
|
||||
TPlusPlus: "\"++\"",
|
||||
TQuestion: "\"?\"",
|
||||
TQuestionDot: "\"?.\"",
|
||||
TQuestionQuestion: "\"??\"",
|
||||
TSemicolon: "\";\"",
|
||||
TSlash: "\"/\"",
|
||||
TTilde: "\"~\"",
|
||||
|
||||
// Assignments
|
||||
TAmpersandAmpersandEquals: "\"&&=\"",
|
||||
TAmpersandEquals: "\"&=\"",
|
||||
TAsteriskAsteriskEquals: "\"**=\"",
|
||||
TAsteriskEquals: "\"*=\"",
|
||||
TBarBarEquals: "\"||=\"",
|
||||
TBarEquals: "\"|=\"",
|
||||
TCaretEquals: "\"^=\"",
|
||||
TEquals: "\"=\"",
|
||||
TGreaterThanGreaterThanEquals: "\">>=\"",
|
||||
TGreaterThanGreaterThanGreaterThanEquals: "\">>>=\"",
|
||||
TLessThanLessThanEquals: "\"<<=\"",
|
||||
TMinusEquals: "\"-=\"",
|
||||
TPercentEquals: "\"%=\"",
|
||||
TPlusEquals: "\"+=\"",
|
||||
TQuestionQuestionEquals: "\"??=\"",
|
||||
TSlashEquals: "\"/=\"",
|
||||
|
||||
// Class-private fields and methods
|
||||
TPrivateIdentifier: "private identifier",
|
||||
|
||||
// Identifiers
|
||||
TIdentifier: "identifier",
|
||||
TEscapedKeyword: "escaped keyword",
|
||||
|
||||
// Reserved words
|
||||
TBreak: "\"break\"",
|
||||
TCase: "\"case\"",
|
||||
TCatch: "\"catch\"",
|
||||
TClass: "\"class\"",
|
||||
TConst: "\"const\"",
|
||||
TContinue: "\"continue\"",
|
||||
TDebugger: "\"debugger\"",
|
||||
TDefault: "\"default\"",
|
||||
TDelete: "\"delete\"",
|
||||
TDo: "\"do\"",
|
||||
TElse: "\"else\"",
|
||||
TEnum: "\"enum\"",
|
||||
TExport: "\"export\"",
|
||||
TExtends: "\"extends\"",
|
||||
TFalse: "\"false\"",
|
||||
TFinally: "\"finally\"",
|
||||
TFor: "\"for\"",
|
||||
TFunction: "\"function\"",
|
||||
TIf: "\"if\"",
|
||||
TImport: "\"import\"",
|
||||
TIn: "\"in\"",
|
||||
TInstanceof: "\"instanceof\"",
|
||||
TNew: "\"new\"",
|
||||
TNull: "\"null\"",
|
||||
TReturn: "\"return\"",
|
||||
TSuper: "\"super\"",
|
||||
TSwitch: "\"switch\"",
|
||||
TThis: "\"this\"",
|
||||
TThrow: "\"throw\"",
|
||||
TTrue: "\"true\"",
|
||||
TTry: "\"try\"",
|
||||
TTypeof: "\"typeof\"",
|
||||
TVar: "\"var\"",
|
||||
TVoid: "\"void\"",
|
||||
TWhile: "\"while\"",
|
||||
TWith: "\"with\"",
|
||||
}
|
||||
|
||||
// This is from https://github.com/microsoft/TypeScript/blob/master/src/compiler/transformers/jsx.ts
|
||||
var jsxEntity = map[string]rune{
|
||||
"quot": 0x0022,
|
||||
"amp": 0x0026,
|
||||
"apos": 0x0027,
|
||||
"lt": 0x003C,
|
||||
"gt": 0x003E,
|
||||
"nbsp": 0x00A0,
|
||||
"iexcl": 0x00A1,
|
||||
"cent": 0x00A2,
|
||||
"pound": 0x00A3,
|
||||
"curren": 0x00A4,
|
||||
"yen": 0x00A5,
|
||||
"brvbar": 0x00A6,
|
||||
"sect": 0x00A7,
|
||||
"uml": 0x00A8,
|
||||
"copy": 0x00A9,
|
||||
"ordf": 0x00AA,
|
||||
"laquo": 0x00AB,
|
||||
"not": 0x00AC,
|
||||
"shy": 0x00AD,
|
||||
"reg": 0x00AE,
|
||||
"macr": 0x00AF,
|
||||
"deg": 0x00B0,
|
||||
"plusmn": 0x00B1,
|
||||
"sup2": 0x00B2,
|
||||
"sup3": 0x00B3,
|
||||
"acute": 0x00B4,
|
||||
"micro": 0x00B5,
|
||||
"para": 0x00B6,
|
||||
"middot": 0x00B7,
|
||||
"cedil": 0x00B8,
|
||||
"sup1": 0x00B9,
|
||||
"ordm": 0x00BA,
|
||||
"raquo": 0x00BB,
|
||||
"frac14": 0x00BC,
|
||||
"frac12": 0x00BD,
|
||||
"frac34": 0x00BE,
|
||||
"iquest": 0x00BF,
|
||||
"Agrave": 0x00C0,
|
||||
"Aacute": 0x00C1,
|
||||
"Acirc": 0x00C2,
|
||||
"Atilde": 0x00C3,
|
||||
"Auml": 0x00C4,
|
||||
"Aring": 0x00C5,
|
||||
"AElig": 0x00C6,
|
||||
"Ccedil": 0x00C7,
|
||||
"Egrave": 0x00C8,
|
||||
"Eacute": 0x00C9,
|
||||
"Ecirc": 0x00CA,
|
||||
"Euml": 0x00CB,
|
||||
"Igrave": 0x00CC,
|
||||
"Iacute": 0x00CD,
|
||||
"Icirc": 0x00CE,
|
||||
"Iuml": 0x00CF,
|
||||
"ETH": 0x00D0,
|
||||
"Ntilde": 0x00D1,
|
||||
"Ograve": 0x00D2,
|
||||
"Oacute": 0x00D3,
|
||||
"Ocirc": 0x00D4,
|
||||
"Otilde": 0x00D5,
|
||||
"Ouml": 0x00D6,
|
||||
"times": 0x00D7,
|
||||
"Oslash": 0x00D8,
|
||||
"Ugrave": 0x00D9,
|
||||
"Uacute": 0x00DA,
|
||||
"Ucirc": 0x00DB,
|
||||
"Uuml": 0x00DC,
|
||||
"Yacute": 0x00DD,
|
||||
"THORN": 0x00DE,
|
||||
"szlig": 0x00DF,
|
||||
"agrave": 0x00E0,
|
||||
"aacute": 0x00E1,
|
||||
"acirc": 0x00E2,
|
||||
"atilde": 0x00E3,
|
||||
"auml": 0x00E4,
|
||||
"aring": 0x00E5,
|
||||
"aelig": 0x00E6,
|
||||
"ccedil": 0x00E7,
|
||||
"egrave": 0x00E8,
|
||||
"eacute": 0x00E9,
|
||||
"ecirc": 0x00EA,
|
||||
"euml": 0x00EB,
|
||||
"igrave": 0x00EC,
|
||||
"iacute": 0x00ED,
|
||||
"icirc": 0x00EE,
|
||||
"iuml": 0x00EF,
|
||||
"eth": 0x00F0,
|
||||
"ntilde": 0x00F1,
|
||||
"ograve": 0x00F2,
|
||||
"oacute": 0x00F3,
|
||||
"ocirc": 0x00F4,
|
||||
"otilde": 0x00F5,
|
||||
"ouml": 0x00F6,
|
||||
"divide": 0x00F7,
|
||||
"oslash": 0x00F8,
|
||||
"ugrave": 0x00F9,
|
||||
"uacute": 0x00FA,
|
||||
"ucirc": 0x00FB,
|
||||
"uuml": 0x00FC,
|
||||
"yacute": 0x00FD,
|
||||
"thorn": 0x00FE,
|
||||
"yuml": 0x00FF,
|
||||
"OElig": 0x0152,
|
||||
"oelig": 0x0153,
|
||||
"Scaron": 0x0160,
|
||||
"scaron": 0x0161,
|
||||
"Yuml": 0x0178,
|
||||
"fnof": 0x0192,
|
||||
"circ": 0x02C6,
|
||||
"tilde": 0x02DC,
|
||||
"Alpha": 0x0391,
|
||||
"Beta": 0x0392,
|
||||
"Gamma": 0x0393,
|
||||
"Delta": 0x0394,
|
||||
"Epsilon": 0x0395,
|
||||
"Zeta": 0x0396,
|
||||
"Eta": 0x0397,
|
||||
"Theta": 0x0398,
|
||||
"Iota": 0x0399,
|
||||
"Kappa": 0x039A,
|
||||
"Lambda": 0x039B,
|
||||
"Mu": 0x039C,
|
||||
"Nu": 0x039D,
|
||||
"Xi": 0x039E,
|
||||
"Omicron": 0x039F,
|
||||
"Pi": 0x03A0,
|
||||
"Rho": 0x03A1,
|
||||
"Sigma": 0x03A3,
|
||||
"Tau": 0x03A4,
|
||||
"Upsilon": 0x03A5,
|
||||
"Phi": 0x03A6,
|
||||
"Chi": 0x03A7,
|
||||
"Psi": 0x03A8,
|
||||
"Omega": 0x03A9,
|
||||
"alpha": 0x03B1,
|
||||
"beta": 0x03B2,
|
||||
"gamma": 0x03B3,
|
||||
"delta": 0x03B4,
|
||||
"epsilon": 0x03B5,
|
||||
"zeta": 0x03B6,
|
||||
"eta": 0x03B7,
|
||||
"theta": 0x03B8,
|
||||
"iota": 0x03B9,
|
||||
"kappa": 0x03BA,
|
||||
"lambda": 0x03BB,
|
||||
"mu": 0x03BC,
|
||||
"nu": 0x03BD,
|
||||
"xi": 0x03BE,
|
||||
"omicron": 0x03BF,
|
||||
"pi": 0x03C0,
|
||||
"rho": 0x03C1,
|
||||
"sigmaf": 0x03C2,
|
||||
"sigma": 0x03C3,
|
||||
"tau": 0x03C4,
|
||||
"upsilon": 0x03C5,
|
||||
"phi": 0x03C6,
|
||||
"chi": 0x03C7,
|
||||
"psi": 0x03C8,
|
||||
"omega": 0x03C9,
|
||||
"thetasym": 0x03D1,
|
||||
"upsih": 0x03D2,
|
||||
"piv": 0x03D6,
|
||||
"ensp": 0x2002,
|
||||
"emsp": 0x2003,
|
||||
"thinsp": 0x2009,
|
||||
"zwnj": 0x200C,
|
||||
"zwj": 0x200D,
|
||||
"lrm": 0x200E,
|
||||
"rlm": 0x200F,
|
||||
"ndash": 0x2013,
|
||||
"mdash": 0x2014,
|
||||
"lsquo": 0x2018,
|
||||
"rsquo": 0x2019,
|
||||
"sbquo": 0x201A,
|
||||
"ldquo": 0x201C,
|
||||
"rdquo": 0x201D,
|
||||
"bdquo": 0x201E,
|
||||
"dagger": 0x2020,
|
||||
"Dagger": 0x2021,
|
||||
"bull": 0x2022,
|
||||
"hellip": 0x2026,
|
||||
"permil": 0x2030,
|
||||
"prime": 0x2032,
|
||||
"Prime": 0x2033,
|
||||
"lsaquo": 0x2039,
|
||||
"rsaquo": 0x203A,
|
||||
"oline": 0x203E,
|
||||
"frasl": 0x2044,
|
||||
"euro": 0x20AC,
|
||||
"image": 0x2111,
|
||||
"weierp": 0x2118,
|
||||
"real": 0x211C,
|
||||
"trade": 0x2122,
|
||||
"alefsym": 0x2135,
|
||||
"larr": 0x2190,
|
||||
"uarr": 0x2191,
|
||||
"rarr": 0x2192,
|
||||
"darr": 0x2193,
|
||||
"harr": 0x2194,
|
||||
"crarr": 0x21B5,
|
||||
"lArr": 0x21D0,
|
||||
"uArr": 0x21D1,
|
||||
"rArr": 0x21D2,
|
||||
"dArr": 0x21D3,
|
||||
"hArr": 0x21D4,
|
||||
"forall": 0x2200,
|
||||
"part": 0x2202,
|
||||
"exist": 0x2203,
|
||||
"empty": 0x2205,
|
||||
"nabla": 0x2207,
|
||||
"isin": 0x2208,
|
||||
"notin": 0x2209,
|
||||
"ni": 0x220B,
|
||||
"prod": 0x220F,
|
||||
"sum": 0x2211,
|
||||
"minus": 0x2212,
|
||||
"lowast": 0x2217,
|
||||
"radic": 0x221A,
|
||||
"prop": 0x221D,
|
||||
"infin": 0x221E,
|
||||
"ang": 0x2220,
|
||||
"and": 0x2227,
|
||||
"or": 0x2228,
|
||||
"cap": 0x2229,
|
||||
"cup": 0x222A,
|
||||
"int": 0x222B,
|
||||
"there4": 0x2234,
|
||||
"sim": 0x223C,
|
||||
"cong": 0x2245,
|
||||
"asymp": 0x2248,
|
||||
"ne": 0x2260,
|
||||
"equiv": 0x2261,
|
||||
"le": 0x2264,
|
||||
"ge": 0x2265,
|
||||
"sub": 0x2282,
|
||||
"sup": 0x2283,
|
||||
"nsub": 0x2284,
|
||||
"sube": 0x2286,
|
||||
"supe": 0x2287,
|
||||
"oplus": 0x2295,
|
||||
"otimes": 0x2297,
|
||||
"perp": 0x22A5,
|
||||
"sdot": 0x22C5,
|
||||
"lceil": 0x2308,
|
||||
"rceil": 0x2309,
|
||||
"lfloor": 0x230A,
|
||||
"rfloor": 0x230B,
|
||||
"lang": 0x2329,
|
||||
"rang": 0x232A,
|
||||
"loz": 0x25CA,
|
||||
"spades": 0x2660,
|
||||
"clubs": 0x2663,
|
||||
"hearts": 0x2665,
|
||||
"diams": 0x2666,
|
||||
}
|
2039
vendor/github.com/evanw/esbuild/internal/js_lexer/unicode.go
generated
vendored
2039
vendor/github.com/evanw/esbuild/internal/js_lexer/unicode.go
generated
vendored
File diff suppressed because it is too large
Load Diff
48
vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go
generated
vendored
48
vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go
generated
vendored
@ -1,48 +0,0 @@
|
||||
package js_parser
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
func ParseGlobalName(log logger.Log, source logger.Source) (result []string, ok bool) {
|
||||
ok = true
|
||||
defer func() {
|
||||
r := recover()
|
||||
if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
|
||||
ok = false
|
||||
} else if r != nil {
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
lexer := js_lexer.NewLexerGlobalName(log, source)
|
||||
|
||||
// Start off with an identifier
|
||||
result = append(result, lexer.Identifier)
|
||||
lexer.Expect(js_lexer.TIdentifier)
|
||||
|
||||
// Follow with dot or index expressions
|
||||
for lexer.Token != js_lexer.TEndOfFile {
|
||||
switch lexer.Token {
|
||||
case js_lexer.TDot:
|
||||
lexer.Next()
|
||||
if !lexer.IsIdentifierOrKeyword() {
|
||||
lexer.Expect(js_lexer.TIdentifier)
|
||||
}
|
||||
result = append(result, lexer.Identifier)
|
||||
lexer.Next()
|
||||
|
||||
case js_lexer.TOpenBracket:
|
||||
lexer.Next()
|
||||
result = append(result, js_lexer.UTF16ToString(lexer.StringLiteral()))
|
||||
lexer.Expect(js_lexer.TStringLiteral)
|
||||
lexer.Expect(js_lexer.TCloseBracket)
|
||||
|
||||
default:
|
||||
lexer.Expect(js_lexer.TDot)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
15160
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go
generated
vendored
15160
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go
generated
vendored
File diff suppressed because it is too large
Load Diff
2980
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go
generated
vendored
2980
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go
generated
vendored
File diff suppressed because it is too large
Load Diff
187
vendor/github.com/evanw/esbuild/internal/js_parser/json_parser.go
generated
vendored
187
vendor/github.com/evanw/esbuild/internal/js_parser/json_parser.go
generated
vendored
@ -1,187 +0,0 @@
|
||||
package js_parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
type jsonParser struct {
|
||||
log logger.Log
|
||||
source logger.Source
|
||||
tracker logger.LineColumnTracker
|
||||
lexer js_lexer.Lexer
|
||||
options JSONOptions
|
||||
suppressWarningsAboutWeirdCode bool
|
||||
}
|
||||
|
||||
func (p *jsonParser) parseMaybeTrailingComma(closeToken js_lexer.T) bool {
|
||||
commaRange := p.lexer.Range()
|
||||
p.lexer.Expect(js_lexer.TComma)
|
||||
|
||||
if p.lexer.Token == closeToken {
|
||||
if !p.options.AllowTrailingCommas {
|
||||
p.log.Add(logger.Error, &p.tracker, commaRange, "JSON does not support trailing commas")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *jsonParser) parseExpr() js_ast.Expr {
|
||||
loc := p.lexer.Loc()
|
||||
|
||||
switch p.lexer.Token {
|
||||
case js_lexer.TFalse:
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: false}}
|
||||
|
||||
case js_lexer.TTrue:
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: true}}
|
||||
|
||||
case js_lexer.TNull:
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: js_ast.ENullShared}
|
||||
|
||||
case js_lexer.TStringLiteral:
|
||||
value := p.lexer.StringLiteral()
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EString{Value: value}}
|
||||
|
||||
case js_lexer.TNumericLiteral:
|
||||
value := p.lexer.Number
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: value}}
|
||||
|
||||
case js_lexer.TMinus:
|
||||
p.lexer.Next()
|
||||
value := p.lexer.Number
|
||||
p.lexer.Expect(js_lexer.TNumericLiteral)
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: -value}}
|
||||
|
||||
case js_lexer.TOpenBracket:
|
||||
p.lexer.Next()
|
||||
isSingleLine := !p.lexer.HasNewlineBefore
|
||||
items := []js_ast.Expr{}
|
||||
|
||||
for p.lexer.Token != js_lexer.TCloseBracket {
|
||||
if len(items) > 0 {
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
if !p.parseMaybeTrailingComma(js_lexer.TCloseBracket) {
|
||||
break
|
||||
}
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
}
|
||||
|
||||
item := p.parseExpr()
|
||||
items = append(items, item)
|
||||
}
|
||||
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
p.lexer.Expect(js_lexer.TCloseBracket)
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EArray{
|
||||
Items: items,
|
||||
IsSingleLine: isSingleLine,
|
||||
}}
|
||||
|
||||
case js_lexer.TOpenBrace:
|
||||
p.lexer.Next()
|
||||
isSingleLine := !p.lexer.HasNewlineBefore
|
||||
properties := []js_ast.Property{}
|
||||
duplicates := make(map[string]logger.Range)
|
||||
|
||||
for p.lexer.Token != js_lexer.TCloseBrace {
|
||||
if len(properties) > 0 {
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
if !p.parseMaybeTrailingComma(js_lexer.TCloseBrace) {
|
||||
break
|
||||
}
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
}
|
||||
|
||||
keyString := p.lexer.StringLiteral()
|
||||
keyRange := p.lexer.Range()
|
||||
key := js_ast.Expr{Loc: keyRange.Loc, Data: &js_ast.EString{Value: keyString}}
|
||||
p.lexer.Expect(js_lexer.TStringLiteral)
|
||||
|
||||
// Warn about duplicate keys
|
||||
if !p.suppressWarningsAboutWeirdCode {
|
||||
keyText := js_lexer.UTF16ToString(keyString)
|
||||
if prevRange, ok := duplicates[keyText]; ok {
|
||||
p.log.AddWithNotes(logger.Warning, &p.tracker, keyRange, fmt.Sprintf("Duplicate key %q in object literal", keyText),
|
||||
[]logger.MsgData{p.tracker.MsgData(prevRange, fmt.Sprintf("The original key %q is here:", keyText))})
|
||||
} else {
|
||||
duplicates[keyText] = keyRange
|
||||
}
|
||||
}
|
||||
|
||||
p.lexer.Expect(js_lexer.TColon)
|
||||
value := p.parseExpr()
|
||||
|
||||
property := js_ast.Property{
|
||||
Kind: js_ast.PropertyNormal,
|
||||
Key: key,
|
||||
ValueOrNil: value,
|
||||
}
|
||||
properties = append(properties, property)
|
||||
}
|
||||
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
p.lexer.Expect(js_lexer.TCloseBrace)
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EObject{
|
||||
Properties: properties,
|
||||
IsSingleLine: isSingleLine,
|
||||
}}
|
||||
|
||||
default:
|
||||
p.lexer.Unexpected()
|
||||
return js_ast.Expr{}
|
||||
}
|
||||
}
|
||||
|
||||
type JSONOptions struct {
|
||||
AllowComments bool
|
||||
AllowTrailingCommas bool
|
||||
}
|
||||
|
||||
func ParseJSON(log logger.Log, source logger.Source, options JSONOptions) (result js_ast.Expr, ok bool) {
|
||||
ok = true
|
||||
defer func() {
|
||||
r := recover()
|
||||
if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
|
||||
ok = false
|
||||
} else if r != nil {
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
p := &jsonParser{
|
||||
log: log,
|
||||
source: source,
|
||||
tracker: logger.MakeLineColumnTracker(&source),
|
||||
options: options,
|
||||
lexer: js_lexer.NewLexerJSON(log, source, options.AllowComments),
|
||||
suppressWarningsAboutWeirdCode: helpers.IsInsideNodeModules(source.KeyPath.Text),
|
||||
}
|
||||
|
||||
result = p.parseExpr()
|
||||
p.lexer.Expect(js_lexer.TEndOfFile)
|
||||
return
|
||||
}
|
251
vendor/github.com/evanw/esbuild/internal/js_parser/sourcemap_parser.go
generated
vendored
251
vendor/github.com/evanw/esbuild/internal/js_parser/sourcemap_parser.go
generated
vendored
@ -1,251 +0,0 @@
|
||||
package js_parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
"github.com/evanw/esbuild/internal/sourcemap"
|
||||
)
|
||||
|
||||
// Specification: https://sourcemaps.info/spec.html
|
||||
func ParseSourceMap(log logger.Log, source logger.Source) *sourcemap.SourceMap {
|
||||
expr, ok := ParseJSON(log, source, JSONOptions{})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := expr.Data.(*js_ast.EObject)
|
||||
tracker := logger.MakeLineColumnTracker(&source)
|
||||
if !ok {
|
||||
log.Add(logger.Error, &tracker, logger.Range{Loc: expr.Loc}, "Invalid source map")
|
||||
return nil
|
||||
}
|
||||
|
||||
var sources []string
|
||||
var sourcesContent []sourcemap.SourceContent
|
||||
var mappingsRaw []uint16
|
||||
var mappingsStart int32
|
||||
hasVersion := false
|
||||
|
||||
for _, prop := range obj.Properties {
|
||||
keyRange := source.RangeOfString(prop.Key.Loc)
|
||||
|
||||
switch js_lexer.UTF16ToString(prop.Key.Data.(*js_ast.EString).Value) {
|
||||
case "sections":
|
||||
log.Add(logger.Warning, &tracker, keyRange, "Source maps with \"sections\" are not supported")
|
||||
return nil
|
||||
|
||||
case "version":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.ENumber); ok && value.Value == 3 {
|
||||
hasVersion = true
|
||||
}
|
||||
|
||||
case "mappings":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EString); ok {
|
||||
mappingsRaw = value.Value
|
||||
mappingsStart = prop.ValueOrNil.Loc.Start + 1
|
||||
}
|
||||
|
||||
case "sources":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
||||
sources = nil
|
||||
for _, item := range value.Items {
|
||||
if element, ok := item.Data.(*js_ast.EString); ok {
|
||||
sources = append(sources, js_lexer.UTF16ToString(element.Value))
|
||||
} else {
|
||||
sources = append(sources, "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case "sourcesContent":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
||||
sourcesContent = nil
|
||||
for _, item := range value.Items {
|
||||
if element, ok := item.Data.(*js_ast.EString); ok {
|
||||
sourcesContent = append(sourcesContent, sourcemap.SourceContent{
|
||||
Value: element.Value,
|
||||
Quoted: source.TextForRange(source.RangeOfString(item.Loc)),
|
||||
})
|
||||
} else {
|
||||
sourcesContent = append(sourcesContent, sourcemap.SourceContent{})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Silently fail if the version was missing or incorrect
|
||||
if !hasVersion {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Silently fail if the source map is pointless (i.e. empty)
|
||||
if len(sources) == 0 || len(mappingsRaw) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var mappings mappingArray
|
||||
mappingsLen := len(mappingsRaw)
|
||||
sourcesLen := len(sources)
|
||||
generatedLine := 0
|
||||
generatedColumn := 0
|
||||
sourceIndex := 0
|
||||
originalLine := 0
|
||||
originalColumn := 0
|
||||
current := 0
|
||||
errorText := ""
|
||||
errorLen := 0
|
||||
needSort := false
|
||||
|
||||
// Parse the mappings
|
||||
for current < mappingsLen {
|
||||
// Handle a line break
|
||||
if mappingsRaw[current] == ';' {
|
||||
generatedLine++
|
||||
generatedColumn = 0
|
||||
current++
|
||||
continue
|
||||
}
|
||||
|
||||
// Read the generated column
|
||||
generatedColumnDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing generated column"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
if generatedColumnDelta < 0 {
|
||||
// This would mess up binary search
|
||||
needSort = true
|
||||
}
|
||||
generatedColumn += generatedColumnDelta
|
||||
if generatedColumn < 0 {
|
||||
errorText = fmt.Sprintf("Invalid generated column value: %d", generatedColumn)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// According to the specification, it's valid for a mapping to have 1,
|
||||
// 4, or 5 variable-length fields. Having one field means there's no
|
||||
// original location information, which is pretty useless. Just ignore
|
||||
// those entries.
|
||||
if current == mappingsLen {
|
||||
break
|
||||
}
|
||||
switch mappingsRaw[current] {
|
||||
case ',':
|
||||
current++
|
||||
continue
|
||||
case ';':
|
||||
continue
|
||||
}
|
||||
|
||||
// Read the original source
|
||||
sourceIndexDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing source index"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
sourceIndex += sourceIndexDelta
|
||||
if sourceIndex < 0 || sourceIndex >= sourcesLen {
|
||||
errorText = fmt.Sprintf("Invalid source index value: %d", sourceIndex)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// Read the original line
|
||||
originalLineDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing original line"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
originalLine += originalLineDelta
|
||||
if originalLine < 0 {
|
||||
errorText = fmt.Sprintf("Invalid original line value: %d", originalLine)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// Read the original column
|
||||
originalColumnDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing original column"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
originalColumn += originalColumnDelta
|
||||
if originalColumn < 0 {
|
||||
errorText = fmt.Sprintf("Invalid original column value: %d", originalColumn)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// Ignore the optional name index
|
||||
if _, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:]); ok {
|
||||
current += i
|
||||
}
|
||||
|
||||
// Handle the next character
|
||||
if current < mappingsLen {
|
||||
if c := mappingsRaw[current]; c == ',' {
|
||||
current++
|
||||
} else if c != ';' {
|
||||
errorText = fmt.Sprintf("Invalid character after mapping: %q",
|
||||
js_lexer.UTF16ToString(mappingsRaw[current:current+1]))
|
||||
errorLen = 1
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
mappings = append(mappings, sourcemap.Mapping{
|
||||
GeneratedLine: int32(generatedLine),
|
||||
GeneratedColumn: int32(generatedColumn),
|
||||
SourceIndex: int32(sourceIndex),
|
||||
OriginalLine: int32(originalLine),
|
||||
OriginalColumn: int32(originalColumn),
|
||||
})
|
||||
}
|
||||
|
||||
if errorText != "" {
|
||||
r := logger.Range{Loc: logger.Loc{Start: mappingsStart + int32(current)}, Len: int32(errorLen)}
|
||||
log.Add(logger.Warning, &tracker, r,
|
||||
fmt.Sprintf("Bad \"mappings\" data in source map at character %d: %s", current, errorText))
|
||||
return nil
|
||||
}
|
||||
|
||||
if needSort {
|
||||
// If we get here, some mappings are out of order. Lines can't be out of
|
||||
// order by construction but columns can. This is a pretty rare situation
|
||||
// because almost all source map generators always write out mappings in
|
||||
// order as they write the output instead of scrambling the order.
|
||||
sort.Stable(mappings)
|
||||
}
|
||||
|
||||
return &sourcemap.SourceMap{
|
||||
Sources: sources,
|
||||
SourcesContent: sourcesContent,
|
||||
Mappings: mappings,
|
||||
}
|
||||
}
|
||||
|
||||
// This type is just so we can use Go's native sort function
|
||||
type mappingArray []sourcemap.Mapping
|
||||
|
||||
func (a mappingArray) Len() int { return len(a) }
|
||||
func (a mappingArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
|
||||
|
||||
func (a mappingArray) Less(i int, j int) bool {
|
||||
ai := a[i]
|
||||
aj := a[j]
|
||||
return ai.GeneratedLine < aj.GeneratedLine || (ai.GeneratedLine == aj.GeneratedLine && ai.GeneratedColumn <= aj.GeneratedColumn)
|
||||
}
|
1601
vendor/github.com/evanw/esbuild/internal/js_parser/ts_parser.go
generated
vendored
1601
vendor/github.com/evanw/esbuild/internal/js_parser/ts_parser.go
generated
vendored
File diff suppressed because it is too large
Load Diff
3212
vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go
generated
vendored
3212
vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go
generated
vendored
File diff suppressed because it is too large
Load Diff
1612
vendor/github.com/evanw/esbuild/internal/logger/logger.go
generated
vendored
1612
vendor/github.com/evanw/esbuild/internal/logger/logger.go
generated
vendored
File diff suppressed because it is too large
Load Diff
34
vendor/github.com/evanw/esbuild/internal/logger/logger_darwin.go
generated
vendored
34
vendor/github.com/evanw/esbuild/internal/logger/logger_darwin.go
generated
vendored
@ -1,34 +0,0 @@
|
||||
//go:build darwin
|
||||
// +build darwin
|
||||
|
||||
package logger
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
const SupportsColorEscapes = true
|
||||
|
||||
func GetTerminalInfo(file *os.File) (info TerminalInfo) {
|
||||
fd := file.Fd()
|
||||
|
||||
// Is this file descriptor a terminal?
|
||||
if _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA); err == nil {
|
||||
info.IsTTY = true
|
||||
info.UseColorEscapes = !hasNoColorEnvironmentVariable()
|
||||
|
||||
// Get the width of the window
|
||||
if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil {
|
||||
info.Width = int(w.Col)
|
||||
info.Height = int(w.Row)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func writeStringWithColor(file *os.File, text string) {
|
||||
file.WriteString(text)
|
||||
}
|
34
vendor/github.com/evanw/esbuild/internal/logger/logger_linux.go
generated
vendored
34
vendor/github.com/evanw/esbuild/internal/logger/logger_linux.go
generated
vendored
@ -1,34 +0,0 @@
|
||||
//go:build linux
|
||||
// +build linux
|
||||
|
||||
package logger
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
const SupportsColorEscapes = true
|
||||
|
||||
func GetTerminalInfo(file *os.File) (info TerminalInfo) {
|
||||
fd := file.Fd()
|
||||
|
||||
// Is this file descriptor a terminal?
|
||||
if _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS); err == nil {
|
||||
info.IsTTY = true
|
||||
info.UseColorEscapes = !hasNoColorEnvironmentVariable()
|
||||
|
||||
// Get the width of the window
|
||||
if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil {
|
||||
info.Width = int(w.Col)
|
||||
info.Height = int(w.Row)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func writeStringWithColor(file *os.File, text string) {
|
||||
file.WriteString(text)
|
||||
}
|
16
vendor/github.com/evanw/esbuild/internal/logger/logger_other.go
generated
vendored
16
vendor/github.com/evanw/esbuild/internal/logger/logger_other.go
generated
vendored
@ -1,16 +0,0 @@
|
||||
//go:build !darwin && !linux && !windows
|
||||
// +build !darwin,!linux,!windows
|
||||
|
||||
package logger
|
||||
|
||||
import "os"
|
||||
|
||||
const SupportsColorEscapes = false
|
||||
|
||||
func GetTerminalInfo(*os.File) TerminalInfo {
|
||||
return TerminalInfo{}
|
||||
}
|
||||
|
||||
func writeStringWithColor(file *os.File, text string) {
|
||||
file.WriteString(text)
|
||||
}
|
136
vendor/github.com/evanw/esbuild/internal/logger/logger_windows.go
generated
vendored
136
vendor/github.com/evanw/esbuild/internal/logger/logger_windows.go
generated
vendored
@ -1,136 +0,0 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package logger
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const SupportsColorEscapes = true
|
||||
|
||||
var kernel32 = syscall.NewLazyDLL("kernel32.dll")
|
||||
var getConsoleMode = kernel32.NewProc("GetConsoleMode")
|
||||
var setConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
|
||||
var getConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
|
||||
|
||||
type consoleScreenBufferInfo struct {
|
||||
dwSizeX int16
|
||||
dwSizeY int16
|
||||
dwCursorPositionX int16
|
||||
dwCursorPositionY int16
|
||||
wAttributes uint16
|
||||
srWindowLeft int16
|
||||
srWindowTop int16
|
||||
srWindowRight int16
|
||||
srWindowBottom int16
|
||||
dwMaximumWindowSizeX int16
|
||||
dwMaximumWindowSizeY int16
|
||||
}
|
||||
|
||||
func GetTerminalInfo(file *os.File) TerminalInfo {
|
||||
fd := file.Fd()
|
||||
|
||||
// Is this file descriptor a terminal?
|
||||
var unused uint32
|
||||
isTTY, _, _ := syscall.Syscall(getConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&unused)), 0)
|
||||
|
||||
// Get the width of the window
|
||||
var info consoleScreenBufferInfo
|
||||
syscall.Syscall(getConsoleScreenBufferInfo.Addr(), 2, fd, uintptr(unsafe.Pointer(&info)), 0)
|
||||
|
||||
return TerminalInfo{
|
||||
IsTTY: isTTY != 0,
|
||||
Width: int(info.dwSizeX) - 1,
|
||||
Height: int(info.dwSizeY) - 1,
|
||||
UseColorEscapes: !hasNoColorEnvironmentVariable(),
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
FOREGROUND_BLUE uint8 = 1 << iota
|
||||
FOREGROUND_GREEN
|
||||
FOREGROUND_RED
|
||||
FOREGROUND_INTENSITY
|
||||
BACKGROUND_BLUE
|
||||
BACKGROUND_GREEN
|
||||
BACKGROUND_RED
|
||||
BACKGROUND_INTENSITY
|
||||
)
|
||||
|
||||
var windowsEscapeSequenceMap = map[string]uint8{
|
||||
TerminalColors.Reset: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
|
||||
TerminalColors.Dim: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
|
||||
TerminalColors.Bold: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY,
|
||||
|
||||
// Apparently underlines only work with the CJK locale on Windows :(
|
||||
TerminalColors.Underline: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
|
||||
|
||||
TerminalColors.Red: FOREGROUND_RED,
|
||||
TerminalColors.Green: FOREGROUND_GREEN,
|
||||
TerminalColors.Blue: FOREGROUND_BLUE,
|
||||
|
||||
TerminalColors.Cyan: FOREGROUND_GREEN | FOREGROUND_BLUE,
|
||||
TerminalColors.Magenta: FOREGROUND_RED | FOREGROUND_BLUE,
|
||||
TerminalColors.Yellow: FOREGROUND_RED | FOREGROUND_GREEN,
|
||||
|
||||
TerminalColors.RedBgRed: FOREGROUND_RED | BACKGROUND_RED,
|
||||
TerminalColors.RedBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_RED,
|
||||
TerminalColors.GreenBgGreen: FOREGROUND_GREEN | BACKGROUND_GREEN,
|
||||
TerminalColors.GreenBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN,
|
||||
TerminalColors.BlueBgBlue: FOREGROUND_BLUE | BACKGROUND_BLUE,
|
||||
TerminalColors.BlueBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_BLUE,
|
||||
|
||||
TerminalColors.CyanBgCyan: FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_BLUE,
|
||||
TerminalColors.CyanBgBlack: BACKGROUND_GREEN | BACKGROUND_BLUE,
|
||||
TerminalColors.MagentaBgMagenta: FOREGROUND_RED | FOREGROUND_BLUE | BACKGROUND_RED | BACKGROUND_BLUE,
|
||||
TerminalColors.MagentaBgBlack: BACKGROUND_RED | BACKGROUND_BLUE,
|
||||
TerminalColors.YellowBgYellow: FOREGROUND_RED | FOREGROUND_GREEN | BACKGROUND_RED | BACKGROUND_GREEN,
|
||||
TerminalColors.YellowBgBlack: BACKGROUND_RED | BACKGROUND_GREEN,
|
||||
}
|
||||
|
||||
func writeStringWithColor(file *os.File, text string) {
|
||||
fd := file.Fd()
|
||||
i := 0
|
||||
|
||||
for i < len(text) {
|
||||
// Find the escape
|
||||
if text[i] != 033 {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Find the 'm'
|
||||
window := text[i:]
|
||||
if len(window) > 8 {
|
||||
window = window[:8]
|
||||
}
|
||||
m := strings.IndexByte(window, 'm')
|
||||
if m == -1 {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
m += i + 1
|
||||
|
||||
// Find the escape sequence
|
||||
attributes, ok := windowsEscapeSequenceMap[text[i:m]]
|
||||
if !ok {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Write out the text before the escape sequence
|
||||
file.WriteString(text[:i])
|
||||
|
||||
// Apply the escape sequence
|
||||
text = text[m:]
|
||||
i = 0
|
||||
setConsoleTextAttribute.Call(fd, uintptr(attributes))
|
||||
}
|
||||
|
||||
// Write out the remaining text
|
||||
file.WriteString(text)
|
||||
}
|
624
vendor/github.com/evanw/esbuild/internal/renamer/renamer.go
generated
vendored
624
vendor/github.com/evanw/esbuild/internal/renamer/renamer.go
generated
vendored
@ -1,624 +0,0 @@
|
||||
package renamer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/evanw/esbuild/internal/ast"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
)
|
||||
|
||||
func ComputeReservedNames(moduleScopes []*js_ast.Scope, symbols js_ast.SymbolMap) map[string]uint32 {
|
||||
names := make(map[string]uint32)
|
||||
|
||||
// All keywords and strict mode reserved words are reserved names
|
||||
for k := range js_lexer.Keywords {
|
||||
names[k] = 1
|
||||
}
|
||||
for k := range js_lexer.StrictModeReservedWords {
|
||||
names[k] = 1
|
||||
}
|
||||
|
||||
// All unbound symbols must be reserved names
|
||||
for _, scope := range moduleScopes {
|
||||
computeReservedNamesForScope(scope, symbols, names)
|
||||
}
|
||||
|
||||
return names
|
||||
}
|
||||
|
||||
func computeReservedNamesForScope(scope *js_ast.Scope, symbols js_ast.SymbolMap, names map[string]uint32) {
|
||||
for _, member := range scope.Members {
|
||||
symbol := symbols.Get(member.Ref)
|
||||
if symbol.Kind == js_ast.SymbolUnbound || symbol.MustNotBeRenamed {
|
||||
names[symbol.OriginalName] = 1
|
||||
}
|
||||
}
|
||||
for _, ref := range scope.Generated {
|
||||
symbol := symbols.Get(ref)
|
||||
if symbol.Kind == js_ast.SymbolUnbound || symbol.MustNotBeRenamed {
|
||||
names[symbol.OriginalName] = 1
|
||||
}
|
||||
}
|
||||
|
||||
// If there's a direct "eval" somewhere inside the current scope, continue
|
||||
// traversing down the scope tree until we find it to get all reserved names
|
||||
if scope.ContainsDirectEval {
|
||||
for _, child := range scope.Children {
|
||||
if child.ContainsDirectEval {
|
||||
computeReservedNamesForScope(child, symbols, names)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Renamer interface {
|
||||
NameForSymbol(ref js_ast.Ref) string
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// noOpRenamer
|
||||
|
||||
type noOpRenamer struct {
|
||||
symbols js_ast.SymbolMap
|
||||
}
|
||||
|
||||
func NewNoOpRenamer(symbols js_ast.SymbolMap) Renamer {
|
||||
return &noOpRenamer{
|
||||
symbols: symbols,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *noOpRenamer) NameForSymbol(ref js_ast.Ref) string {
|
||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
||||
return r.symbols.Get(ref).OriginalName
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// MinifyRenamer
|
||||
|
||||
type symbolSlot struct {
|
||||
name string
|
||||
count uint32
|
||||
needsCapitalForJSX uint32 // This is really a bool but needs to be atomic
|
||||
}
|
||||
|
||||
type MinifyRenamer struct {
|
||||
symbols js_ast.SymbolMap
|
||||
reservedNames map[string]uint32
|
||||
slots [3][]symbolSlot
|
||||
topLevelSymbolToSlot map[js_ast.Ref]uint32
|
||||
}
|
||||
|
||||
func NewMinifyRenamer(symbols js_ast.SymbolMap, firstTopLevelSlots js_ast.SlotCounts, reservedNames map[string]uint32) *MinifyRenamer {
|
||||
return &MinifyRenamer{
|
||||
symbols: symbols,
|
||||
reservedNames: reservedNames,
|
||||
slots: [3][]symbolSlot{
|
||||
make([]symbolSlot, firstTopLevelSlots[0]),
|
||||
make([]symbolSlot, firstTopLevelSlots[1]),
|
||||
make([]symbolSlot, firstTopLevelSlots[2]),
|
||||
},
|
||||
topLevelSymbolToSlot: make(map[js_ast.Ref]uint32),
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MinifyRenamer) NameForSymbol(ref js_ast.Ref) string {
|
||||
// Follow links to get to the underlying symbol
|
||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
||||
symbol := r.symbols.Get(ref)
|
||||
|
||||
// Skip this symbol if the name is pinned
|
||||
ns := symbol.SlotNamespace()
|
||||
if ns == js_ast.SlotMustNotBeRenamed {
|
||||
return symbol.OriginalName
|
||||
}
|
||||
|
||||
// Check if it's a nested scope symbol
|
||||
i := symbol.NestedScopeSlot
|
||||
|
||||
// If it's not (i.e. it's in a top-level scope), look up the slot
|
||||
if !i.IsValid() {
|
||||
index, ok := r.topLevelSymbolToSlot[ref]
|
||||
if !ok {
|
||||
// If we get here, then we're printing a symbol that never had any
|
||||
// recorded uses. This is odd but can happen in certain scenarios.
|
||||
// For example, code in a branch with dead control flow won't mark
|
||||
// any uses but may still be printed. In that case it doesn't matter
|
||||
// what name we use since it's dead code.
|
||||
return symbol.OriginalName
|
||||
}
|
||||
i = ast.MakeIndex32(index)
|
||||
}
|
||||
|
||||
return r.slots[ns][i.GetIndex()].name
|
||||
}
|
||||
|
||||
// The sort order here is arbitrary but needs to be consistent between builds.
|
||||
// The InnerIndex should be stable because the parser for a single file is
|
||||
// single-threaded and deterministically assigns out InnerIndex values
|
||||
// sequentially. But the SourceIndex should be unstable because the main thread
|
||||
// assigns out source index values sequentially to newly-discovered dependencies
|
||||
// in a multi-threaded producer/consumer relationship. So instead we use the
|
||||
// index of the source in the DFS order over all entry points for stability.
|
||||
type DeferredTopLevelSymbol struct {
|
||||
StableSourceIndex uint32
|
||||
Ref js_ast.Ref
|
||||
Count uint32
|
||||
}
|
||||
|
||||
// This type is just so we can use Go's native sort function
|
||||
type DeferredTopLevelSymbolArray []DeferredTopLevelSymbol
|
||||
|
||||
func (a DeferredTopLevelSymbolArray) Len() int { return len(a) }
|
||||
func (a DeferredTopLevelSymbolArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a DeferredTopLevelSymbolArray) Less(i int, j int) bool {
|
||||
ai, aj := a[i], a[j]
|
||||
if ai.StableSourceIndex < aj.StableSourceIndex {
|
||||
return true
|
||||
}
|
||||
if ai.StableSourceIndex > aj.StableSourceIndex {
|
||||
return false
|
||||
}
|
||||
if ai.Ref.InnerIndex < aj.Ref.InnerIndex {
|
||||
return true
|
||||
}
|
||||
if ai.Ref.InnerIndex > aj.Ref.InnerIndex {
|
||||
return false
|
||||
}
|
||||
return ai.Count < aj.Count
|
||||
}
|
||||
|
||||
func (r *MinifyRenamer) AccumulateSymbolUseCounts(
|
||||
topLevelSymbols *DeferredTopLevelSymbolArray,
|
||||
symbolUses map[js_ast.Ref]js_ast.SymbolUse,
|
||||
stableSourceIndices []uint32,
|
||||
) {
|
||||
// NOTE: This function is run in parallel. Make sure to avoid data races.
|
||||
|
||||
for ref, use := range symbolUses {
|
||||
r.AccumulateSymbolCount(topLevelSymbols, ref, use.CountEstimate, stableSourceIndices)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MinifyRenamer) AccumulateSymbolCount(
|
||||
topLevelSymbols *DeferredTopLevelSymbolArray,
|
||||
ref js_ast.Ref,
|
||||
count uint32,
|
||||
stableSourceIndices []uint32,
|
||||
) {
|
||||
// NOTE: This function is run in parallel. Make sure to avoid data races.
|
||||
|
||||
// Follow links to get to the underlying symbol
|
||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
||||
symbol := r.symbols.Get(ref)
|
||||
for symbol.NamespaceAlias != nil {
|
||||
ref = js_ast.FollowSymbols(r.symbols, symbol.NamespaceAlias.NamespaceRef)
|
||||
symbol = r.symbols.Get(ref)
|
||||
}
|
||||
|
||||
// Skip this symbol if the name is pinned
|
||||
ns := symbol.SlotNamespace()
|
||||
if ns == js_ast.SlotMustNotBeRenamed {
|
||||
return
|
||||
}
|
||||
|
||||
// Check if it's a nested scope symbol
|
||||
if i := symbol.NestedScopeSlot; i.IsValid() {
|
||||
// If it is, accumulate the count using a parallel-safe atomic increment
|
||||
slot := &r.slots[ns][i.GetIndex()]
|
||||
atomic.AddUint32(&slot.count, count)
|
||||
if symbol.MustStartWithCapitalLetterForJSX {
|
||||
atomic.StoreUint32(&slot.needsCapitalForJSX, 1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// If it's a top-level symbol, defer it to later since we have
|
||||
// to allocate slots for these in serial instead of in parallel
|
||||
*topLevelSymbols = append(*topLevelSymbols, DeferredTopLevelSymbol{
|
||||
StableSourceIndex: stableSourceIndices[ref.SourceIndex],
|
||||
Ref: ref,
|
||||
Count: count,
|
||||
})
|
||||
}
|
||||
|
||||
// The parallel part of the symbol count accumulation algorithm above processes
|
||||
// nested symbols and generates on an array of top-level symbols to process later.
|
||||
// After the parallel part has finished, that array of top-level symbols is passed
|
||||
// to this function which processes them in serial.
|
||||
func (r *MinifyRenamer) AllocateTopLevelSymbolSlots(topLevelSymbols DeferredTopLevelSymbolArray) {
|
||||
for _, stable := range topLevelSymbols {
|
||||
symbol := r.symbols.Get(stable.Ref)
|
||||
slots := &r.slots[symbol.SlotNamespace()]
|
||||
if i, ok := r.topLevelSymbolToSlot[stable.Ref]; ok {
|
||||
slot := &(*slots)[i]
|
||||
slot.count += stable.Count
|
||||
if symbol.MustStartWithCapitalLetterForJSX {
|
||||
slot.needsCapitalForJSX = 1
|
||||
}
|
||||
} else {
|
||||
needsCapitalForJSX := uint32(0)
|
||||
if symbol.MustStartWithCapitalLetterForJSX {
|
||||
needsCapitalForJSX = 1
|
||||
}
|
||||
i = uint32(len(*slots))
|
||||
*slots = append(*slots, symbolSlot{
|
||||
count: stable.Count,
|
||||
needsCapitalForJSX: needsCapitalForJSX,
|
||||
})
|
||||
r.topLevelSymbolToSlot[stable.Ref] = i
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *MinifyRenamer) AssignNamesByFrequency(minifier *js_ast.NameMinifier) {
|
||||
for ns, slots := range r.slots {
|
||||
// Sort symbols by count
|
||||
sorted := make(slotAndCountArray, len(slots))
|
||||
for i, item := range slots {
|
||||
sorted[i] = slotAndCount{slot: uint32(i), count: item.count}
|
||||
}
|
||||
sort.Sort(sorted)
|
||||
|
||||
// Assign names to symbols
|
||||
nextName := 0
|
||||
for _, data := range sorted {
|
||||
slot := &slots[data.slot]
|
||||
name := minifier.NumberToMinifiedName(nextName)
|
||||
nextName++
|
||||
|
||||
// Make sure we never generate a reserved name. We only have to worry
|
||||
// about collisions with reserved identifiers for normal symbols, and we
|
||||
// only have to worry about collisions with keywords for labels. We do
|
||||
// not have to worry about either for private names because they start
|
||||
// with a "#" character.
|
||||
switch js_ast.SlotNamespace(ns) {
|
||||
case js_ast.SlotDefault:
|
||||
for r.reservedNames[name] != 0 {
|
||||
name = minifier.NumberToMinifiedName(nextName)
|
||||
nextName++
|
||||
}
|
||||
|
||||
// Make sure names of symbols used in JSX elements start with a capital letter
|
||||
if slot.needsCapitalForJSX != 0 {
|
||||
for name[0] >= 'a' && name[0] <= 'z' {
|
||||
name = minifier.NumberToMinifiedName(nextName)
|
||||
nextName++
|
||||
}
|
||||
}
|
||||
|
||||
case js_ast.SlotLabel:
|
||||
for js_lexer.Keywords[name] != 0 {
|
||||
name = minifier.NumberToMinifiedName(nextName)
|
||||
nextName++
|
||||
}
|
||||
}
|
||||
|
||||
// Private names must be prefixed with "#"
|
||||
if js_ast.SlotNamespace(ns) == js_ast.SlotPrivateName {
|
||||
name = "#" + name
|
||||
}
|
||||
|
||||
slot.name = name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the number of nested slots
|
||||
func AssignNestedScopeSlots(moduleScope *js_ast.Scope, symbols []js_ast.Symbol) (slotCounts js_ast.SlotCounts) {
|
||||
// Temporarily set the nested scope slots of top-level symbols to valid so
|
||||
// they aren't renamed in nested scopes. This prevents us from accidentally
|
||||
// assigning nested scope slots to variables declared using "var" in a nested
|
||||
// scope that are actually hoisted up to the module scope to become a top-
|
||||
// level symbol.
|
||||
validSlot := ast.MakeIndex32(1)
|
||||
for _, member := range moduleScope.Members {
|
||||
symbols[member.Ref.InnerIndex].NestedScopeSlot = validSlot
|
||||
}
|
||||
for _, ref := range moduleScope.Generated {
|
||||
symbols[ref.InnerIndex].NestedScopeSlot = validSlot
|
||||
}
|
||||
|
||||
// Assign nested scope slots independently for each nested scope
|
||||
for _, child := range moduleScope.Children {
|
||||
slotCounts.UnionMax(assignNestedScopeSlotsHelper(child, symbols, js_ast.SlotCounts{}))
|
||||
}
|
||||
|
||||
// Then set the nested scope slots of top-level symbols back to zero. Top-
|
||||
// level symbols are not supposed to have nested scope slots.
|
||||
for _, member := range moduleScope.Members {
|
||||
symbols[member.Ref.InnerIndex].NestedScopeSlot = ast.Index32{}
|
||||
}
|
||||
for _, ref := range moduleScope.Generated {
|
||||
symbols[ref.InnerIndex].NestedScopeSlot = ast.Index32{}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func assignNestedScopeSlotsHelper(scope *js_ast.Scope, symbols []js_ast.Symbol, slot js_ast.SlotCounts) js_ast.SlotCounts {
|
||||
// Sort member map keys for determinism
|
||||
sortedMembers := make([]int, 0, len(scope.Members))
|
||||
for _, member := range scope.Members {
|
||||
sortedMembers = append(sortedMembers, int(member.Ref.InnerIndex))
|
||||
}
|
||||
sort.Ints(sortedMembers)
|
||||
|
||||
// Assign slots for this scope's symbols. Only do this if the slot is
|
||||
// not already assigned. Nested scopes have copies of symbols from parent
|
||||
// scopes and we want to use the slot from the parent scope, not child scopes.
|
||||
for _, innerIndex := range sortedMembers {
|
||||
symbol := &symbols[innerIndex]
|
||||
if ns := symbol.SlotNamespace(); ns != js_ast.SlotMustNotBeRenamed && !symbol.NestedScopeSlot.IsValid() {
|
||||
symbol.NestedScopeSlot = ast.MakeIndex32(slot[ns])
|
||||
slot[ns]++
|
||||
}
|
||||
}
|
||||
for _, ref := range scope.Generated {
|
||||
symbol := &symbols[ref.InnerIndex]
|
||||
if ns := symbol.SlotNamespace(); ns != js_ast.SlotMustNotBeRenamed && !symbol.NestedScopeSlot.IsValid() {
|
||||
symbol.NestedScopeSlot = ast.MakeIndex32(slot[ns])
|
||||
slot[ns]++
|
||||
}
|
||||
}
|
||||
|
||||
// Labels are always declared in a nested scope, so we don't need to check.
|
||||
if scope.Label.Ref != js_ast.InvalidRef {
|
||||
symbol := &symbols[scope.Label.Ref.InnerIndex]
|
||||
symbol.NestedScopeSlot = ast.MakeIndex32(slot[js_ast.SlotLabel])
|
||||
slot[js_ast.SlotLabel]++
|
||||
}
|
||||
|
||||
// Assign slots for the symbols of child scopes
|
||||
slotCounts := slot
|
||||
for _, child := range scope.Children {
|
||||
slotCounts.UnionMax(assignNestedScopeSlotsHelper(child, symbols, slot))
|
||||
}
|
||||
return slotCounts
|
||||
}
|
||||
|
||||
type slotAndCount struct {
|
||||
slot uint32
|
||||
count uint32
|
||||
}
|
||||
|
||||
// This type is just so we can use Go's native sort function
|
||||
type slotAndCountArray []slotAndCount
|
||||
|
||||
func (a slotAndCountArray) Len() int { return len(a) }
|
||||
func (a slotAndCountArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a slotAndCountArray) Less(i int, j int) bool {
|
||||
ai, aj := a[i], a[j]
|
||||
return ai.count > aj.count || (ai.count == aj.count && ai.slot < aj.slot)
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// NumberRenamer
|
||||
|
||||
type NumberRenamer struct {
|
||||
symbols js_ast.SymbolMap
|
||||
names [][]string
|
||||
root numberScope
|
||||
}
|
||||
|
||||
func NewNumberRenamer(symbols js_ast.SymbolMap, reservedNames map[string]uint32) *NumberRenamer {
|
||||
return &NumberRenamer{
|
||||
symbols: symbols,
|
||||
names: make([][]string, len(symbols.SymbolsForSource)),
|
||||
root: numberScope{nameCounts: reservedNames},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *NumberRenamer) NameForSymbol(ref js_ast.Ref) string {
|
||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
||||
if inner := r.names[ref.SourceIndex]; inner != nil {
|
||||
if name := inner[ref.InnerIndex]; name != "" {
|
||||
return name
|
||||
}
|
||||
}
|
||||
return r.symbols.Get(ref).OriginalName
|
||||
}
|
||||
|
||||
func (r *NumberRenamer) AddTopLevelSymbol(ref js_ast.Ref) {
|
||||
r.assignName(&r.root, ref)
|
||||
}
|
||||
|
||||
func (r *NumberRenamer) assignName(scope *numberScope, ref js_ast.Ref) {
|
||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
||||
|
||||
// Don't rename the same symbol more than once
|
||||
inner := r.names[ref.SourceIndex]
|
||||
if inner != nil && inner[ref.InnerIndex] != "" {
|
||||
return
|
||||
}
|
||||
|
||||
// Don't rename unbound symbols, symbols marked as reserved names, labels, or private names
|
||||
symbol := r.symbols.Get(ref)
|
||||
if symbol.SlotNamespace() != js_ast.SlotDefault {
|
||||
return
|
||||
}
|
||||
|
||||
// Make sure names of symbols used in JSX elements start with a capital letter
|
||||
originalName := symbol.OriginalName
|
||||
if symbol.MustStartWithCapitalLetterForJSX {
|
||||
if first := rune(originalName[0]); first >= 'a' && first <= 'z' {
|
||||
originalName = fmt.Sprintf("%c%s", first+('A'-'a'), originalName[1:])
|
||||
}
|
||||
}
|
||||
|
||||
// Compute a new name
|
||||
name := scope.findUnusedName(originalName)
|
||||
|
||||
// Store the new name
|
||||
if inner == nil {
|
||||
// Note: This should not be a data race even though this method is run from
|
||||
// multiple threads. The parallel part only looks at symbols defined in
|
||||
// nested scopes, and those can only ever be accessed from within the file.
|
||||
// References to those symbols should never spread across files.
|
||||
//
|
||||
// While we could avoid the data race by densely preallocating the entire
|
||||
// "names" array ahead of time, that will waste a lot more memory for
|
||||
// builds that make heavy use of code splitting and have many chunks. Doing
|
||||
// things lazily like this means we use less memory but still stay safe.
|
||||
inner = make([]string, len(r.symbols.SymbolsForSource[ref.SourceIndex]))
|
||||
r.names[ref.SourceIndex] = inner
|
||||
}
|
||||
inner[ref.InnerIndex] = name
|
||||
}
|
||||
|
||||
func (r *NumberRenamer) assignNamesRecursive(scope *js_ast.Scope, sourceIndex uint32, parent *numberScope, sorted *[]int) {
|
||||
s := &numberScope{parent: parent, nameCounts: make(map[string]uint32)}
|
||||
|
||||
// Sort member map keys for determinism, reusing a shared memory buffer
|
||||
*sorted = (*sorted)[:0]
|
||||
for _, member := range scope.Members {
|
||||
*sorted = append(*sorted, int(member.Ref.InnerIndex))
|
||||
}
|
||||
sort.Ints(*sorted)
|
||||
|
||||
// Rename all symbols in this scope
|
||||
for _, innerIndex := range *sorted {
|
||||
r.assignName(s, js_ast.Ref{SourceIndex: sourceIndex, InnerIndex: uint32(innerIndex)})
|
||||
}
|
||||
for _, ref := range scope.Generated {
|
||||
r.assignName(s, ref)
|
||||
}
|
||||
|
||||
// Symbols in child scopes may also have to be renamed to avoid conflicts
|
||||
for _, child := range scope.Children {
|
||||
r.assignNamesRecursive(child, sourceIndex, s, sorted)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *NumberRenamer) AssignNamesByScope(nestedScopes map[uint32][]*js_ast.Scope) {
|
||||
waitGroup := sync.WaitGroup{}
|
||||
waitGroup.Add(len(nestedScopes))
|
||||
|
||||
// Rename nested scopes from separate files in parallel
|
||||
for sourceIndex, scopes := range nestedScopes {
|
||||
go func(sourceIndex uint32, scopes []*js_ast.Scope) {
|
||||
var sorted []int
|
||||
for _, scope := range scopes {
|
||||
r.assignNamesRecursive(scope, sourceIndex, &r.root, &sorted)
|
||||
}
|
||||
waitGroup.Done()
|
||||
}(sourceIndex, scopes)
|
||||
}
|
||||
|
||||
waitGroup.Wait()
|
||||
}
|
||||
|
||||
type numberScope struct {
|
||||
parent *numberScope
|
||||
|
||||
// This is used as a set of used names in this scope. This also maps the name
|
||||
// to the number of times the name has experienced a collision. When a name
|
||||
// collides with an already-used name, we need to rename it. This is done by
|
||||
// incrementing a number at the end until the name is unused. We save the
|
||||
// count here so that subsequent collisions can start counting from where the
|
||||
// previous collision ended instead of having to start counting from 1.
|
||||
nameCounts map[string]uint32
|
||||
}
|
||||
|
||||
type nameUse uint8
|
||||
|
||||
const (
|
||||
nameUnused nameUse = iota
|
||||
nameUsed
|
||||
nameUsedInSameScope
|
||||
)
|
||||
|
||||
func (s *numberScope) findNameUse(name string) nameUse {
|
||||
original := s
|
||||
for {
|
||||
if _, ok := s.nameCounts[name]; ok {
|
||||
if s == original {
|
||||
return nameUsedInSameScope
|
||||
}
|
||||
return nameUsed
|
||||
}
|
||||
s = s.parent
|
||||
if s == nil {
|
||||
return nameUnused
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *numberScope) findUnusedName(name string) string {
|
||||
name = js_lexer.ForceValidIdentifier(name)
|
||||
|
||||
if use := s.findNameUse(name); use != nameUnused {
|
||||
// If the name is already in use, generate a new name by appending a number
|
||||
tries := uint32(1)
|
||||
if use == nameUsedInSameScope {
|
||||
// To avoid O(n^2) behavior, the number must start off being the number
|
||||
// that we used last time there was a collision with this name. Otherwise
|
||||
// if there are many collisions with the same name, each name collision
|
||||
// would have to increment the counter past all previous name collisions
|
||||
// which is a O(n^2) time algorithm. Only do this if this symbol comes
|
||||
// from the same scope as the previous one since sibling scopes can reuse
|
||||
// the same name without problems.
|
||||
tries = s.nameCounts[name]
|
||||
}
|
||||
prefix := name
|
||||
|
||||
// Keep incrementing the number until the name is unused
|
||||
for {
|
||||
tries++
|
||||
name = prefix + strconv.Itoa(int(tries))
|
||||
|
||||
// Make sure this new name is unused
|
||||
if s.findNameUse(name) == nameUnused {
|
||||
// Store the count so we can start here next time instead of starting
|
||||
// from 1. This means we avoid O(n^2) behavior.
|
||||
if use == nameUsedInSameScope {
|
||||
s.nameCounts[prefix] = tries
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Each name starts off with a count of 1 so that the first collision with
|
||||
// "name" is called "name2"
|
||||
s.nameCounts[name] = 1
|
||||
return name
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// ExportRenamer
|
||||
|
||||
type ExportRenamer struct {
|
||||
count int
|
||||
used map[string]uint32
|
||||
}
|
||||
|
||||
func (r *ExportRenamer) NextRenamedName(name string) string {
|
||||
if r.used == nil {
|
||||
r.used = make(map[string]uint32)
|
||||
}
|
||||
if tries, ok := r.used[name]; ok {
|
||||
prefix := name
|
||||
for {
|
||||
tries++
|
||||
name = prefix + strconv.Itoa(int(tries))
|
||||
if _, ok := r.used[name]; !ok {
|
||||
break
|
||||
}
|
||||
}
|
||||
r.used[name] = tries
|
||||
} else {
|
||||
r.used[name] = 1
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
func (r *ExportRenamer) NextMinifiedName() string {
|
||||
name := js_ast.DefaultNameMinifier.NumberToMinifiedName(r.count)
|
||||
r.count++
|
||||
return name
|
||||
}
|
76
vendor/github.com/evanw/esbuild/internal/resolver/dataurl.go
generated
vendored
76
vendor/github.com/evanw/esbuild/internal/resolver/dataurl.go
generated
vendored
@ -1,76 +0,0 @@
|
||||
package resolver
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type DataURL struct {
|
||||
mimeType string
|
||||
data string
|
||||
isBase64 bool
|
||||
}
|
||||
|
||||
func ParseDataURL(url string) (parsed DataURL, ok bool) {
|
||||
if strings.HasPrefix(url, "data:") {
|
||||
if comma := strings.IndexByte(url, ','); comma != -1 {
|
||||
parsed.mimeType = url[len("data:"):comma]
|
||||
parsed.data = url[comma+1:]
|
||||
if strings.HasSuffix(parsed.mimeType, ";base64") {
|
||||
parsed.mimeType = parsed.mimeType[:len(parsed.mimeType)-len(";base64")]
|
||||
parsed.isBase64 = true
|
||||
}
|
||||
ok = true
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type MIMEType uint8
|
||||
|
||||
const (
|
||||
MIMETypeUnsupported MIMEType = iota
|
||||
MIMETypeTextCSS
|
||||
MIMETypeTextJavaScript
|
||||
MIMETypeApplicationJSON
|
||||
)
|
||||
|
||||
func (parsed DataURL) DecodeMIMEType() MIMEType {
|
||||
// Remove things like ";charset=utf-8"
|
||||
mimeType := parsed.mimeType
|
||||
if semicolon := strings.IndexByte(mimeType, ';'); semicolon != -1 {
|
||||
mimeType = mimeType[:semicolon]
|
||||
}
|
||||
|
||||
// Hard-code a few supported types
|
||||
switch mimeType {
|
||||
case "text/css":
|
||||
return MIMETypeTextCSS
|
||||
case "text/javascript":
|
||||
return MIMETypeTextJavaScript
|
||||
case "application/json":
|
||||
return MIMETypeApplicationJSON
|
||||
default:
|
||||
return MIMETypeUnsupported
|
||||
}
|
||||
}
|
||||
|
||||
func (parsed DataURL) DecodeData() (string, error) {
|
||||
// Try to read base64 data
|
||||
if parsed.isBase64 {
|
||||
bytes, err := base64.StdEncoding.DecodeString(parsed.data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not decode base64 data: %s", err.Error())
|
||||
}
|
||||
return string(bytes), nil
|
||||
}
|
||||
|
||||
// Try to read percent-escaped data
|
||||
content, err := url.PathUnescape(parsed.data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not decode percent-escaped data: %s", err.Error())
|
||||
}
|
||||
return content, nil
|
||||
}
|
1225
vendor/github.com/evanw/esbuild/internal/resolver/package_json.go
generated
vendored
1225
vendor/github.com/evanw/esbuild/internal/resolver/package_json.go
generated
vendored
File diff suppressed because it is too large
Load Diff
1967
vendor/github.com/evanw/esbuild/internal/resolver/resolver.go
generated
vendored
1967
vendor/github.com/evanw/esbuild/internal/resolver/resolver.go
generated
vendored
File diff suppressed because it is too large
Load Diff
319
vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go
generated
vendored
319
vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go
generated
vendored
@ -1,319 +0,0 @@
|
||||
package resolver
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/evanw/esbuild/internal/cache"
|
||||
"github.com/evanw/esbuild/internal/compat"
|
||||
"github.com/evanw/esbuild/internal/config"
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
"github.com/evanw/esbuild/internal/js_parser"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
type TSConfigJSON struct {
|
||||
AbsPath string
|
||||
|
||||
// The absolute path of "compilerOptions.baseUrl"
|
||||
BaseURL *string
|
||||
|
||||
// This is used if "Paths" is non-nil. It's equal to "BaseURL" except if
|
||||
// "BaseURL" is missing, in which case it is as if "BaseURL" was ".". This
|
||||
// is to implement the "paths without baseUrl" feature from TypeScript 4.1.
|
||||
// More info: https://github.com/microsoft/TypeScript/issues/31869
|
||||
BaseURLForPaths string
|
||||
|
||||
// The verbatim values of "compilerOptions.paths". The keys are patterns to
|
||||
// match and the values are arrays of fallback paths to search. Each key and
|
||||
// each fallback path can optionally have a single "*" wildcard character.
|
||||
// If both the key and the value have a wildcard, the substring matched by
|
||||
// the wildcard is substituted into the fallback path. The keys represent
|
||||
// module-style path names and the fallback paths are relative to the
|
||||
// "baseUrl" value in the "tsconfig.json" file.
|
||||
Paths map[string][]string
|
||||
|
||||
JSXFactory []string
|
||||
JSXFragmentFactory []string
|
||||
TSTarget *config.TSTarget
|
||||
UseDefineForClassFields config.MaybeBool
|
||||
PreserveImportsNotUsedAsValues bool
|
||||
PreserveValueImports bool
|
||||
}
|
||||
|
||||
func ParseTSConfigJSON(
|
||||
log logger.Log,
|
||||
source logger.Source,
|
||||
jsonCache *cache.JSONCache,
|
||||
extends func(string, logger.Range) *TSConfigJSON,
|
||||
) *TSConfigJSON {
|
||||
// Unfortunately "tsconfig.json" isn't actually JSON. It's some other
|
||||
// format that appears to be defined by the implementation details of the
|
||||
// TypeScript compiler.
|
||||
//
|
||||
// Attempt to parse it anyway by modifying the JSON parser, but just for
|
||||
// these particular files. This is likely not a completely accurate
|
||||
// emulation of what the TypeScript compiler does (e.g. string escape
|
||||
// behavior may also be different).
|
||||
json, ok := jsonCache.Parse(log, source, js_parser.JSONOptions{
|
||||
AllowComments: true, // https://github.com/microsoft/TypeScript/issues/4987
|
||||
AllowTrailingCommas: true,
|
||||
})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result TSConfigJSON
|
||||
result.AbsPath = source.KeyPath.Text
|
||||
tracker := logger.MakeLineColumnTracker(&source)
|
||||
|
||||
// Parse "extends"
|
||||
if extends != nil {
|
||||
if valueJSON, _, ok := getProperty(json, "extends"); ok {
|
||||
if value, ok := getString(valueJSON); ok {
|
||||
if base := extends(value, source.RangeOfString(valueJSON.Loc)); base != nil {
|
||||
result = *base
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "compilerOptions"
|
||||
if compilerOptionsJSON, _, ok := getProperty(json, "compilerOptions"); ok {
|
||||
// Parse "baseUrl"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "baseUrl"); ok {
|
||||
if value, ok := getString(valueJSON); ok {
|
||||
result.BaseURL = &value
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "jsxFactory"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "jsxFactory"); ok {
|
||||
if value, ok := getString(valueJSON); ok {
|
||||
result.JSXFactory = parseMemberExpressionForJSX(log, &source, &tracker, valueJSON.Loc, value)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "jsxFragmentFactory"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "jsxFragmentFactory"); ok {
|
||||
if value, ok := getString(valueJSON); ok {
|
||||
result.JSXFragmentFactory = parseMemberExpressionForJSX(log, &source, &tracker, valueJSON.Loc, value)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "useDefineForClassFields"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "useDefineForClassFields"); ok {
|
||||
if value, ok := getBool(valueJSON); ok {
|
||||
if value {
|
||||
result.UseDefineForClassFields = config.True
|
||||
} else {
|
||||
result.UseDefineForClassFields = config.False
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "target"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "target"); ok {
|
||||
if value, ok := getString(valueJSON); ok {
|
||||
constraints := make(map[compat.Engine][]int)
|
||||
r := source.RangeOfString(valueJSON.Loc)
|
||||
ok := true
|
||||
|
||||
// See https://www.typescriptlang.org/tsconfig#target
|
||||
switch strings.ToLower(value) {
|
||||
case "es5":
|
||||
constraints[compat.ES] = []int{5}
|
||||
case "es6", "es2015":
|
||||
constraints[compat.ES] = []int{2015}
|
||||
case "es2016":
|
||||
constraints[compat.ES] = []int{2016}
|
||||
case "es2017":
|
||||
constraints[compat.ES] = []int{2017}
|
||||
case "es2018":
|
||||
constraints[compat.ES] = []int{2018}
|
||||
case "es2019":
|
||||
constraints[compat.ES] = []int{2019}
|
||||
case "es2020":
|
||||
constraints[compat.ES] = []int{2020}
|
||||
case "es2021":
|
||||
constraints[compat.ES] = []int{2021}
|
||||
case "esnext":
|
||||
// Nothing to do in this case
|
||||
default:
|
||||
ok = false
|
||||
if !helpers.IsInsideNodeModules(source.KeyPath.Text) {
|
||||
log.Add(logger.Warning, &tracker, r,
|
||||
fmt.Sprintf("Unrecognized target environment %q", value))
|
||||
}
|
||||
}
|
||||
|
||||
// These feature restrictions are merged with esbuild's own restrictions
|
||||
if ok {
|
||||
result.TSTarget = &config.TSTarget{
|
||||
Source: source,
|
||||
Range: r,
|
||||
Target: value,
|
||||
UnsupportedJSFeatures: compat.UnsupportedJSFeatures(constraints),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "importsNotUsedAsValues"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "importsNotUsedAsValues"); ok {
|
||||
if value, ok := getString(valueJSON); ok {
|
||||
switch value {
|
||||
case "preserve", "error":
|
||||
result.PreserveImportsNotUsedAsValues = true
|
||||
case "remove":
|
||||
default:
|
||||
log.Add(logger.Warning, &tracker, source.RangeOfString(valueJSON.Loc),
|
||||
fmt.Sprintf("Invalid value %q for \"importsNotUsedAsValues\"", value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "preserveValueImports"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "preserveValueImports"); ok {
|
||||
if value, ok := getBool(valueJSON); ok {
|
||||
result.PreserveValueImports = value
|
||||
}
|
||||
}
|
||||
|
||||
// Parse "paths"
|
||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "paths"); ok {
|
||||
if paths, ok := valueJSON.Data.(*js_ast.EObject); ok {
|
||||
hasBaseURL := result.BaseURL != nil
|
||||
if hasBaseURL {
|
||||
result.BaseURLForPaths = *result.BaseURL
|
||||
} else {
|
||||
result.BaseURLForPaths = "."
|
||||
}
|
||||
result.Paths = make(map[string][]string)
|
||||
for _, prop := range paths.Properties {
|
||||
if key, ok := getString(prop.Key); ok {
|
||||
if !isValidTSConfigPathPattern(key, log, &source, &tracker, prop.Key.Loc) {
|
||||
continue
|
||||
}
|
||||
|
||||
// The "paths" field is an object which maps a pattern to an
|
||||
// array of remapping patterns to try, in priority order. See
|
||||
// the documentation for examples of how this is used:
|
||||
// https://www.typescriptlang.org/docs/handbook/module-resolution.html#path-mapping.
|
||||
//
|
||||
// One particular example:
|
||||
//
|
||||
// {
|
||||
// "compilerOptions": {
|
||||
// "baseUrl": "projectRoot",
|
||||
// "paths": {
|
||||
// "*": [
|
||||
// "*",
|
||||
// "generated/*"
|
||||
// ]
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Matching "folder1/file2" should first check "projectRoot/folder1/file2"
|
||||
// and then, if that didn't work, also check "projectRoot/generated/folder1/file2".
|
||||
if array, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
||||
for _, item := range array.Items {
|
||||
if str, ok := getString(item); ok {
|
||||
if isValidTSConfigPathPattern(str, log, &source, &tracker, item.Loc) &&
|
||||
(hasBaseURL || isValidTSConfigPathNoBaseURLPattern(str, log, &source, &tracker, item.Loc)) {
|
||||
result.Paths[key] = append(result.Paths[key], str)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Add(logger.Warning, &tracker, source.RangeOfString(prop.ValueOrNil.Loc), fmt.Sprintf(
|
||||
"Substitutions for pattern %q should be an array", key))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
||||
|
||||
func parseMemberExpressionForJSX(log logger.Log, source *logger.Source, tracker *logger.LineColumnTracker, loc logger.Loc, text string) []string {
|
||||
if text == "" {
|
||||
return nil
|
||||
}
|
||||
parts := strings.Split(text, ".")
|
||||
for _, part := range parts {
|
||||
if !js_lexer.IsIdentifier(part) {
|
||||
warnRange := source.RangeOfString(loc)
|
||||
log.Add(logger.Warning, tracker, warnRange, fmt.Sprintf("Invalid JSX member expression: %q", text))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return parts
|
||||
}
|
||||
|
||||
func isValidTSConfigPathPattern(text string, log logger.Log, source *logger.Source, tracker *logger.LineColumnTracker, loc logger.Loc) bool {
|
||||
foundAsterisk := false
|
||||
for i := 0; i < len(text); i++ {
|
||||
if text[i] == '*' {
|
||||
if foundAsterisk {
|
||||
r := source.RangeOfString(loc)
|
||||
log.Add(logger.Warning, tracker, r, fmt.Sprintf(
|
||||
"Invalid pattern %q, must have at most one \"*\" character", text))
|
||||
return false
|
||||
}
|
||||
foundAsterisk = true
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func isSlash(c byte) bool {
|
||||
return c == '/' || c == '\\'
|
||||
}
|
||||
|
||||
func isValidTSConfigPathNoBaseURLPattern(text string, log logger.Log, source *logger.Source, tracker *logger.LineColumnTracker, loc logger.Loc) bool {
|
||||
var c0 byte
|
||||
var c1 byte
|
||||
var c2 byte
|
||||
n := len(text)
|
||||
|
||||
if n > 0 {
|
||||
c0 = text[0]
|
||||
if n > 1 {
|
||||
c1 = text[1]
|
||||
if n > 2 {
|
||||
c2 = text[2]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Relative "." or ".."
|
||||
if c0 == '.' && (n == 1 || (n == 2 && c1 == '.')) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Relative "./" or "../" or ".\\" or "..\\"
|
||||
if c0 == '.' && (isSlash(c1) || (c1 == '.' && isSlash(c2))) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Absolute POSIX "/" or UNC "\\"
|
||||
if isSlash(c0) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Absolute DOS "c:/" or "c:\\"
|
||||
if ((c0 >= 'a' && c0 <= 'z') || (c0 >= 'A' && c0 <= 'Z')) && c1 == ':' && isSlash(c2) {
|
||||
return true
|
||||
}
|
||||
|
||||
r := source.RangeOfString(loc)
|
||||
log.Add(logger.Warning, tracker, r, fmt.Sprintf(
|
||||
"Non-relative path %q is not allowed when \"baseUrl\" is not set (did you forget a leading \"./\"?)", text))
|
||||
return false
|
||||
}
|
425
vendor/github.com/evanw/esbuild/internal/runtime/runtime.go
generated
vendored
425
vendor/github.com/evanw/esbuild/internal/runtime/runtime.go
generated
vendored
@ -1,425 +0,0 @@
|
||||
// This is esbuild's runtime code. It contains helper functions that are
|
||||
// automatically injected into output files to implement certain features. For
|
||||
// example, the "**" operator is replaced with a call to "__pow" when targeting
|
||||
// ES2015. Tree shaking automatically removes unused code from the runtime.
|
||||
|
||||
package runtime
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/compat"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
// The runtime source is always at a special index. The index is always zero
|
||||
// but this constant is always used instead to improve readability and ensure
|
||||
// all code that references this index can be discovered easily.
|
||||
const SourceIndex = uint32(0)
|
||||
|
||||
func CanUseES6(unsupportedFeatures compat.JSFeature) bool {
|
||||
return !unsupportedFeatures.Has(compat.Let) && !unsupportedFeatures.Has(compat.ForOf)
|
||||
}
|
||||
|
||||
func code(isES6 bool) string {
|
||||
// Note: These helper functions used to be named similar things to the helper
|
||||
// functions from the TypeScript compiler. However, people sometimes use these
|
||||
// two projects in combination and TypeScript's implementation of these helpers
|
||||
// causes name collisions. Some examples:
|
||||
//
|
||||
// * The "tslib" library will overwrite esbuild's helper functions if the bundled
|
||||
// code is run in the global scope: https://github.com/evanw/esbuild/issues/1102
|
||||
//
|
||||
// * Running the TypeScript compiler on esbuild's output to convert ES6 to ES5
|
||||
// will also overwrite esbuild's helper functions because TypeScript doesn't
|
||||
// change the names of its helper functions to avoid name collisions:
|
||||
// https://github.com/microsoft/TypeScript/issues/43296
|
||||
//
|
||||
// These can both be considered bugs in TypeScript. However, they are unlikely
|
||||
// to be fixed and it's simplest to just avoid using the same names to avoid
|
||||
// these bugs. Forbidden names (from "tslib"):
|
||||
//
|
||||
// __assign
|
||||
// __asyncDelegator
|
||||
// __asyncGenerator
|
||||
// __asyncValues
|
||||
// __await
|
||||
// __awaiter
|
||||
// __classPrivateFieldGet
|
||||
// __classPrivateFieldSet
|
||||
// __createBinding
|
||||
// __decorate
|
||||
// __exportStar
|
||||
// __extends
|
||||
// __generator
|
||||
// __importDefault
|
||||
// __importStar
|
||||
// __makeTemplateObject
|
||||
// __metadata
|
||||
// __param
|
||||
// __read
|
||||
// __rest
|
||||
// __spread
|
||||
// __spreadArray
|
||||
// __spreadArrays
|
||||
// __values
|
||||
//
|
||||
// Note: The "__objRest" function has a for-of loop which requires ES6, but
|
||||
// transforming destructuring to ES5 isn't even supported so it's ok.
|
||||
text := `
|
||||
var __create = Object.create
|
||||
var __freeze = Object.freeze
|
||||
var __defProp = Object.defineProperty
|
||||
var __defProps = Object.defineProperties
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor // Note: can return "undefined" due to a Safari bug
|
||||
var __getOwnPropDescs = Object.getOwnPropertyDescriptors
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames
|
||||
var __getOwnPropSymbols = Object.getOwnPropertySymbols
|
||||
var __getProtoOf = Object.getPrototypeOf
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty
|
||||
var __propIsEnum = Object.prototype.propertyIsEnumerable
|
||||
var __reflectGet = Reflect.get
|
||||
var __reflectSet = Reflect.set
|
||||
|
||||
export var __pow = Math.pow
|
||||
|
||||
var __defNormalProp = (obj, key, value) => key in obj
|
||||
? __defProp(obj, key, {enumerable: true, configurable: true, writable: true, value})
|
||||
: obj[key] = value
|
||||
|
||||
export var __spreadValues = (a, b) => {
|
||||
for (var prop in b ||= {})
|
||||
if (__hasOwnProp.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop])
|
||||
if (__getOwnPropSymbols)
|
||||
`
|
||||
|
||||
// Avoid "of" when not using ES6
|
||||
if isES6 {
|
||||
text += `
|
||||
for (var prop of __getOwnPropSymbols(b)) {
|
||||
`
|
||||
} else {
|
||||
text += `
|
||||
for (var props = __getOwnPropSymbols(b), i = 0, n = props.length, prop; i < n; i++) {
|
||||
prop = props[i]
|
||||
`
|
||||
}
|
||||
|
||||
text += `
|
||||
if (__propIsEnum.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop])
|
||||
}
|
||||
return a
|
||||
}
|
||||
export var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b))
|
||||
|
||||
// Tells importing modules that this can be considered an ES module
|
||||
var __markAsModule = target => __defProp(target, '__esModule', { value: true })
|
||||
|
||||
// Update the "name" property on the function or class for "--keep-names"
|
||||
export var __name = (target, value) => __defProp(target, 'name', { value, configurable: true })
|
||||
|
||||
// This fallback "require" function exists so that "typeof require" can
|
||||
// naturally be "function" even in non-CommonJS environments since esbuild
|
||||
// emulates a CommonJS environment (issue #1202). However, people want this
|
||||
// shim to fall back to "globalThis.require" even if it's defined later
|
||||
// (including property accesses such as "require.resolve") so we need to
|
||||
// use a proxy (issue #1614).
|
||||
export var __require =
|
||||
/* @__PURE__ */ (x =>
|
||||
typeof require !== 'undefined' ? require :
|
||||
typeof Proxy !== 'undefined' ? new Proxy(x, {
|
||||
get: (a, b) => (typeof require !== 'undefined' ? require : a)[b]
|
||||
}) : x
|
||||
)(function(x) {
|
||||
if (typeof require !== 'undefined') return require.apply(this, arguments)
|
||||
throw new Error('Dynamic require of "' + x + '" is not supported')
|
||||
})
|
||||
|
||||
// For object rest patterns
|
||||
export var __restKey = key => typeof key === 'symbol' ? key : key + ''
|
||||
export var __objRest = (source, exclude) => {
|
||||
var target = {}
|
||||
for (var prop in source)
|
||||
if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
|
||||
target[prop] = source[prop]
|
||||
if (source != null && __getOwnPropSymbols)
|
||||
`
|
||||
|
||||
// Avoid "of" when not using ES6
|
||||
if isES6 {
|
||||
text += `
|
||||
for (var prop of __getOwnPropSymbols(source)) {
|
||||
`
|
||||
} else {
|
||||
text += `
|
||||
for (var props = __getOwnPropSymbols(source), i = 0, n = props.length, prop; i < n; i++) {
|
||||
prop = props[i]
|
||||
`
|
||||
}
|
||||
|
||||
text += `
|
||||
if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
|
||||
target[prop] = source[prop]
|
||||
}
|
||||
return target
|
||||
}
|
||||
|
||||
// This is for lazily-initialized ESM code. This has two implementations, a
|
||||
// compact one for minified code and a verbose one that generates friendly
|
||||
// names in V8's profiler and in stack traces.
|
||||
export var __esm = (fn, res) => function __init() {
|
||||
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res
|
||||
}
|
||||
export var __esmMin = (fn, res) => () => (fn && (res = fn(fn = 0)), res)
|
||||
|
||||
// Wraps a CommonJS closure and returns a require() function. This has two
|
||||
// implementations, a compact one for minified code and a verbose one that
|
||||
// generates friendly names in V8's profiler and in stack traces.
|
||||
export var __commonJS = (cb, mod) => function __require() {
|
||||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = {exports: {}}).exports, mod), mod.exports
|
||||
}
|
||||
export var __commonJSMin = (cb, mod) => () => (mod || cb((mod = {exports: {}}).exports, mod), mod.exports)
|
||||
|
||||
// Used to implement ESM exports both for "require()" and "import * as"
|
||||
export var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true })
|
||||
}
|
||||
export var __reExport = (target, module, copyDefault, desc) => {
|
||||
if (module && typeof module === 'object' || typeof module === 'function')
|
||||
`
|
||||
|
||||
// Avoid "let" when not using ES6
|
||||
if isES6 {
|
||||
text += `
|
||||
for (let key of __getOwnPropNames(module))
|
||||
if (!__hasOwnProp.call(target, key) && (copyDefault || key !== 'default'))
|
||||
__defProp(target, key, { get: () => module[key], enumerable: !(desc = __getOwnPropDesc(module, key)) || desc.enumerable })
|
||||
`
|
||||
} else {
|
||||
text += `
|
||||
for (var keys = __getOwnPropNames(module), i = 0, n = keys.length, key; i < n; i++) {
|
||||
key = keys[i]
|
||||
if (!__hasOwnProp.call(target, key) && (copyDefault || key !== 'default'))
|
||||
__defProp(target, key, { get: (k => module[k]).bind(null, key), enumerable: !(desc = __getOwnPropDesc(module, key)) || desc.enumerable })
|
||||
}
|
||||
`
|
||||
}
|
||||
|
||||
text += `
|
||||
return target
|
||||
}
|
||||
|
||||
// Converts the module from CommonJS to ESM
|
||||
export var __toESM = (module, isNodeMode) => {
|
||||
return __reExport(__markAsModule(
|
||||
__defProp(
|
||||
module != null ? __create(__getProtoOf(module)) : {},
|
||||
'default',
|
||||
|
||||
// If the importer is not in node compatibility mode and this is an ESM
|
||||
// file that has been converted to a CommonJS file using a Babel-
|
||||
// compatible transform (i.e. "__esModule" has been set), then forward
|
||||
// "default" to the export named "default". Otherwise set "default" to
|
||||
// "module.exports" for node compatibility.
|
||||
!isNodeMode && module && module.__esModule
|
||||
? { get: () => module.default, enumerable: true }
|
||||
: { value: module, enumerable: true })
|
||||
), module)
|
||||
}
|
||||
|
||||
// Converts the module from ESM to CommonJS
|
||||
export var __toCommonJS = /* @__PURE__ */ (cache => {
|
||||
return (module, temp) => {
|
||||
return (cache && cache.get(module)) || (
|
||||
temp = __reExport(__markAsModule({}), module, /* copyDefault */ 1),
|
||||
cache && cache.set(module, temp),
|
||||
temp)
|
||||
}
|
||||
})(typeof WeakMap !== 'undefined' ? new WeakMap : 0)
|
||||
|
||||
// For TypeScript decorators
|
||||
// - kind === undefined: class
|
||||
// - kind === 1: method, parameter
|
||||
// - kind === 2: field
|
||||
export var __decorateClass = (decorators, target, key, kind) => {
|
||||
var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target
|
||||
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
||||
if (decorator = decorators[i])
|
||||
result = (kind ? decorator(target, key, result) : decorator(result)) || result
|
||||
if (kind && result)
|
||||
__defProp(target, key, result)
|
||||
return result
|
||||
}
|
||||
export var __decorateParam = (index, decorator) => (target, key) => decorator(target, key, index)
|
||||
|
||||
// For class members
|
||||
export var __publicField = (obj, key, value) => {
|
||||
__defNormalProp(obj, typeof key !== 'symbol' ? key + '' : key, value)
|
||||
return value
|
||||
}
|
||||
var __accessCheck = (obj, member, msg) => {
|
||||
if (!member.has(obj)) throw TypeError('Cannot ' + msg)
|
||||
}
|
||||
export var __privateIn = (member, obj) => {
|
||||
if (Object(obj) !== obj) throw TypeError('Cannot use the "in" operator on this value')
|
||||
return member.has(obj)
|
||||
}
|
||||
export var __privateGet = (obj, member, getter) => {
|
||||
__accessCheck(obj, member, 'read from private field')
|
||||
return getter ? getter.call(obj) : member.get(obj)
|
||||
}
|
||||
export var __privateAdd = (obj, member, value) => {
|
||||
if (member.has(obj)) throw TypeError('Cannot add the same private member more than once')
|
||||
member instanceof WeakSet ? member.add(obj) : member.set(obj, value)
|
||||
}
|
||||
export var __privateSet = (obj, member, value, setter) => {
|
||||
__accessCheck(obj, member, 'write to private field')
|
||||
setter ? setter.call(obj, value) : member.set(obj, value)
|
||||
return value
|
||||
}
|
||||
export var __privateWrapper = (obj, member, setter, getter) => {
|
||||
return {
|
||||
set _(value) { __privateSet(obj, member, value, setter) },
|
||||
get _() { return __privateGet(obj, member, getter) },
|
||||
}
|
||||
}
|
||||
export var __privateMethod = (obj, member, method) => {
|
||||
__accessCheck(obj, member, 'access private method')
|
||||
return method
|
||||
}
|
||||
|
||||
// For "super" property accesses
|
||||
export var __superStaticGet = (obj, member) => __reflectGet(__getProtoOf(obj), member, obj)
|
||||
export var __superStaticSet = (obj, member, value) => (__reflectSet(__getProtoOf(obj), member, value, obj), value)
|
||||
export var __superWrapper = (getter, setter, member) => {
|
||||
return {
|
||||
set _(value) { setter(member, value) },
|
||||
get _() { return getter(member) },
|
||||
}
|
||||
}
|
||||
export var __superStaticWrapper = (obj, member) => {
|
||||
return {
|
||||
set _(value) { __superStaticSet(obj, member, value) },
|
||||
get _() { return __superStaticGet(obj, member) },
|
||||
}
|
||||
}
|
||||
|
||||
// For lowering tagged template literals
|
||||
export var __template = (cooked, raw) => __freeze(__defProp(cooked, 'raw', { value: __freeze(raw || cooked.slice()) }))
|
||||
|
||||
// This helps for lowering async functions
|
||||
export var __async = (__this, __arguments, generator) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
var fulfilled = value => {
|
||||
try {
|
||||
step(generator.next(value))
|
||||
} catch (e) {
|
||||
reject(e)
|
||||
}
|
||||
}
|
||||
var rejected = value => {
|
||||
try {
|
||||
step(generator.throw(value))
|
||||
} catch (e) {
|
||||
reject(e)
|
||||
}
|
||||
}
|
||||
var step = x => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected)
|
||||
step((generator = generator.apply(__this, __arguments)).next())
|
||||
})
|
||||
}
|
||||
|
||||
// This is for the "binary" loader (custom code is ~2x faster than "atob")
|
||||
export var __toBinaryNode = base64 => new Uint8Array(Buffer.from(base64, 'base64'))
|
||||
export var __toBinary = /* @__PURE__ */ (() => {
|
||||
var table = new Uint8Array(128)
|
||||
for (var i = 0; i < 64; i++) table[i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i * 4 - 205] = i
|
||||
return base64 => {
|
||||
var n = base64.length, bytes = new Uint8Array((n - (base64[n - 1] == '=') - (base64[n - 2] == '=')) * 3 / 4 | 0)
|
||||
for (var i = 0, j = 0; i < n;) {
|
||||
var c0 = table[base64.charCodeAt(i++)], c1 = table[base64.charCodeAt(i++)]
|
||||
var c2 = table[base64.charCodeAt(i++)], c3 = table[base64.charCodeAt(i++)]
|
||||
bytes[j++] = (c0 << 2) | (c1 >> 4)
|
||||
bytes[j++] = (c1 << 4) | (c2 >> 2)
|
||||
bytes[j++] = (c2 << 6) | c3
|
||||
}
|
||||
return bytes
|
||||
}
|
||||
})()
|
||||
`
|
||||
|
||||
return text
|
||||
}
|
||||
|
||||
var ES6Source = logger.Source{
|
||||
Index: SourceIndex,
|
||||
KeyPath: logger.Path{Text: "<runtime>"},
|
||||
PrettyPath: "<runtime>",
|
||||
IdentifierName: "runtime",
|
||||
Contents: code(true /* isES6 */),
|
||||
}
|
||||
|
||||
var ES5Source = logger.Source{
|
||||
Index: SourceIndex,
|
||||
KeyPath: logger.Path{Text: "<runtime>"},
|
||||
PrettyPath: "<runtime>",
|
||||
IdentifierName: "runtime",
|
||||
Contents: code(false /* isES6 */),
|
||||
}
|
||||
|
||||
// The TypeScript decorator transform behaves similar to the official
|
||||
// TypeScript compiler.
|
||||
//
|
||||
// One difference is that the "__decorateClass" function doesn't contain a reference
|
||||
// to the non-existent "Reflect.decorate" function. This function was never
|
||||
// standardized and checking for it is wasted code (as well as a potentially
|
||||
// dangerous cause of unintentional behavior changes in the future).
|
||||
//
|
||||
// Another difference is that the "__decorateClass" function doesn't take in an
|
||||
// optional property descriptor like it does in the official TypeScript
|
||||
// compiler's support code. This appears to be a dead code path in the official
|
||||
// support code that is only there for legacy reasons.
|
||||
//
|
||||
// Here are some examples of how esbuild's decorator transform works:
|
||||
//
|
||||
// ============================= Class decorator ==============================
|
||||
//
|
||||
// // TypeScript // JavaScript
|
||||
// @dec let C = class {
|
||||
// class C { };
|
||||
// } C = __decorateClass([
|
||||
// dec
|
||||
// ], C);
|
||||
//
|
||||
// ============================ Method decorator ==============================
|
||||
//
|
||||
// // TypeScript // JavaScript
|
||||
// class C { class C {
|
||||
// @dec foo() {}
|
||||
// foo() {} }
|
||||
// } __decorateClass([
|
||||
// dec
|
||||
// ], C.prototype, 'foo', 1);
|
||||
//
|
||||
// =========================== Parameter decorator ============================
|
||||
//
|
||||
// // TypeScript // JavaScript
|
||||
// class C { class C {
|
||||
// foo(@dec bar) {} foo(bar) {}
|
||||
// } }
|
||||
// __decorateClass([
|
||||
// __decorateParam(0, dec)
|
||||
// ], C.prototype, 'foo', 1);
|
||||
//
|
||||
// ============================= Field decorator ==============================
|
||||
//
|
||||
// // TypeScript // JavaScript
|
||||
// class C { class C {
|
||||
// @dec constructor() {
|
||||
// foo = 123 this.foo = 123
|
||||
// } }
|
||||
// }
|
||||
// __decorateClass([
|
||||
// dec
|
||||
// ], C.prototype, 'foo', 2);
|
757
vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go
generated
vendored
757
vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go
generated
vendored
@ -1,757 +0,0 @@
|
||||
package sourcemap
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
type Mapping struct {
|
||||
GeneratedLine int32 // 0-based
|
||||
GeneratedColumn int32 // 0-based count of UTF-16 code units
|
||||
|
||||
SourceIndex int32 // 0-based
|
||||
OriginalLine int32 // 0-based
|
||||
OriginalColumn int32 // 0-based count of UTF-16 code units
|
||||
}
|
||||
|
||||
type SourceMap struct {
|
||||
Sources []string
|
||||
SourcesContent []SourceContent
|
||||
Mappings []Mapping
|
||||
}
|
||||
|
||||
type SourceContent struct {
|
||||
// This stores both the unquoted and the quoted values. We try to use the
|
||||
// already-quoted value if possible so we don't need to re-quote it
|
||||
// unnecessarily for maximum performance.
|
||||
Quoted string
|
||||
|
||||
// But sometimes we need to re-quote the value, such as when it contains
|
||||
// non-ASCII characters and we are in ASCII-only mode. In that case we quote
|
||||
// this parsed UTF-16 value.
|
||||
Value []uint16
|
||||
}
|
||||
|
||||
func (sm *SourceMap) Find(line int32, column int32) *Mapping {
|
||||
mappings := sm.Mappings
|
||||
|
||||
// Binary search
|
||||
count := len(mappings)
|
||||
index := 0
|
||||
for count > 0 {
|
||||
step := count / 2
|
||||
i := index + step
|
||||
mapping := mappings[i]
|
||||
if mapping.GeneratedLine < line || (mapping.GeneratedLine == line && mapping.GeneratedColumn <= column) {
|
||||
index = i + 1
|
||||
count -= step + 1
|
||||
} else {
|
||||
count = step
|
||||
}
|
||||
}
|
||||
|
||||
// Handle search failure
|
||||
if index > 0 {
|
||||
mapping := &mappings[index-1]
|
||||
|
||||
// Match the behavior of the popular "source-map" library from Mozilla
|
||||
if mapping.GeneratedLine == line {
|
||||
return mapping
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var base64 = []byte("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the continuation
|
||||
// bit. The continuation bit tells us whether there are more digits in this
|
||||
// value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
//
|
||||
func EncodeVLQ(value int) []byte {
|
||||
var vlq int
|
||||
if value < 0 {
|
||||
vlq = ((-value) << 1) | 1
|
||||
} else {
|
||||
vlq = value << 1
|
||||
}
|
||||
|
||||
// Handle the common case up front without allocations
|
||||
if (vlq >> 5) == 0 {
|
||||
digit := vlq & 31
|
||||
return base64[digit : digit+1]
|
||||
}
|
||||
|
||||
encoded := []byte{}
|
||||
for {
|
||||
digit := vlq & 31
|
||||
vlq >>= 5
|
||||
|
||||
// If there are still more digits in this value, we must make sure the
|
||||
// continuation bit is marked
|
||||
if vlq != 0 {
|
||||
digit |= 32
|
||||
}
|
||||
|
||||
encoded = append(encoded, base64[digit])
|
||||
|
||||
if vlq == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return encoded
|
||||
}
|
||||
|
||||
func DecodeVLQ(encoded []byte, start int) (int, int) {
|
||||
shift := 0
|
||||
vlq := 0
|
||||
|
||||
// Scan over the input
|
||||
for {
|
||||
index := bytes.IndexByte(base64, encoded[start])
|
||||
if index < 0 {
|
||||
break
|
||||
}
|
||||
|
||||
// Decode a single byte
|
||||
vlq |= (index & 31) << shift
|
||||
start++
|
||||
shift += 5
|
||||
|
||||
// Stop if there's no continuation bit
|
||||
if (index & 32) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Recover the value
|
||||
value := vlq >> 1
|
||||
if (vlq & 1) != 0 {
|
||||
value = -value
|
||||
}
|
||||
return value, start
|
||||
}
|
||||
|
||||
func DecodeVLQUTF16(encoded []uint16) (int, int, bool) {
|
||||
n := len(encoded)
|
||||
if n == 0 {
|
||||
return 0, 0, false
|
||||
}
|
||||
|
||||
// Scan over the input
|
||||
current := 0
|
||||
shift := 0
|
||||
vlq := 0
|
||||
for {
|
||||
if current >= n {
|
||||
return 0, 0, false
|
||||
}
|
||||
index := bytes.IndexByte(base64, byte(encoded[current]))
|
||||
if index < 0 {
|
||||
return 0, 0, false
|
||||
}
|
||||
|
||||
// Decode a single byte
|
||||
vlq |= (index & 31) << shift
|
||||
current++
|
||||
shift += 5
|
||||
|
||||
// Stop if there's no continuation bit
|
||||
if (index & 32) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Recover the value
|
||||
var value = vlq >> 1
|
||||
if (vlq & 1) != 0 {
|
||||
value = -value
|
||||
}
|
||||
return value, current, true
|
||||
}
|
||||
|
||||
type LineColumnOffset struct {
|
||||
Lines int
|
||||
Columns int
|
||||
}
|
||||
|
||||
func (a LineColumnOffset) ComesBefore(b LineColumnOffset) bool {
|
||||
return a.Lines < b.Lines || (a.Lines == b.Lines && a.Columns < b.Columns)
|
||||
}
|
||||
|
||||
func (a *LineColumnOffset) Add(b LineColumnOffset) {
|
||||
if b.Lines == 0 {
|
||||
a.Columns += b.Columns
|
||||
} else {
|
||||
a.Lines += b.Lines
|
||||
a.Columns = b.Columns
|
||||
}
|
||||
}
|
||||
|
||||
func (offset *LineColumnOffset) AdvanceBytes(bytes []byte) {
|
||||
columns := offset.Columns
|
||||
for len(bytes) > 0 {
|
||||
c, width := utf8.DecodeRune(bytes)
|
||||
bytes = bytes[width:]
|
||||
switch c {
|
||||
case '\r', '\n', '\u2028', '\u2029':
|
||||
// Handle Windows-specific "\r\n" newlines
|
||||
if c == '\r' && len(bytes) > 0 && bytes[0] == '\n' {
|
||||
columns++
|
||||
continue
|
||||
}
|
||||
|
||||
offset.Lines++
|
||||
columns = 0
|
||||
|
||||
default:
|
||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
||||
if c <= 0xFFFF {
|
||||
columns++
|
||||
} else {
|
||||
columns += 2
|
||||
}
|
||||
}
|
||||
}
|
||||
offset.Columns = columns
|
||||
}
|
||||
|
||||
func (offset *LineColumnOffset) AdvanceString(text string) {
|
||||
columns := offset.Columns
|
||||
for i, c := range text {
|
||||
switch c {
|
||||
case '\r', '\n', '\u2028', '\u2029':
|
||||
// Handle Windows-specific "\r\n" newlines
|
||||
if c == '\r' && i+1 < len(text) && text[i+1] == '\n' {
|
||||
columns++
|
||||
continue
|
||||
}
|
||||
|
||||
offset.Lines++
|
||||
columns = 0
|
||||
|
||||
default:
|
||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
||||
if c <= 0xFFFF {
|
||||
columns++
|
||||
} else {
|
||||
columns += 2
|
||||
}
|
||||
}
|
||||
}
|
||||
offset.Columns = columns
|
||||
}
|
||||
|
||||
type SourceMapPieces struct {
|
||||
Prefix []byte
|
||||
Mappings []byte
|
||||
Suffix []byte
|
||||
}
|
||||
|
||||
func (pieces SourceMapPieces) HasContent() bool {
|
||||
return len(pieces.Prefix)+len(pieces.Mappings)+len(pieces.Suffix) > 0
|
||||
}
|
||||
|
||||
type SourceMapShift struct {
|
||||
Before LineColumnOffset
|
||||
After LineColumnOffset
|
||||
}
|
||||
|
||||
func (pieces SourceMapPieces) Finalize(shifts []SourceMapShift) []byte {
|
||||
// An optimized path for when there are no shifts
|
||||
if len(shifts) == 1 {
|
||||
bytes := pieces.Prefix
|
||||
minCap := len(bytes) + len(pieces.Mappings) + len(pieces.Suffix)
|
||||
if cap(bytes) < minCap {
|
||||
bytes = append(make([]byte, 0, minCap), bytes...)
|
||||
}
|
||||
bytes = append(bytes, pieces.Mappings...)
|
||||
bytes = append(bytes, pieces.Suffix...)
|
||||
return bytes
|
||||
}
|
||||
|
||||
startOfRun := 0
|
||||
current := 0
|
||||
generated := LineColumnOffset{}
|
||||
prevShiftColumnDelta := 0
|
||||
j := helpers.Joiner{}
|
||||
|
||||
// Start the source map
|
||||
j.AddBytes(pieces.Prefix)
|
||||
|
||||
// This assumes that a) all mappings are valid and b) all mappings are ordered
|
||||
// by increasing generated position. This should be the case for all mappings
|
||||
// generated by esbuild, which should be the only mappings we process here.
|
||||
for current < len(pieces.Mappings) {
|
||||
// Handle a line break
|
||||
if pieces.Mappings[current] == ';' {
|
||||
generated.Lines++
|
||||
generated.Columns = 0
|
||||
prevShiftColumnDelta = 0
|
||||
current++
|
||||
continue
|
||||
}
|
||||
|
||||
potentialEndOfRun := current
|
||||
|
||||
// Read the generated column
|
||||
generatedColumnDelta, next := DecodeVLQ(pieces.Mappings, current)
|
||||
generated.Columns += generatedColumnDelta
|
||||
current = next
|
||||
|
||||
potentialStartOfRun := current
|
||||
|
||||
// Skip over the original position information
|
||||
_, current = DecodeVLQ(pieces.Mappings, current) // The original source
|
||||
_, current = DecodeVLQ(pieces.Mappings, current) // The original line
|
||||
_, current = DecodeVLQ(pieces.Mappings, current) // The original column
|
||||
|
||||
// Skip a trailing comma
|
||||
if current < len(pieces.Mappings) && pieces.Mappings[current] == ',' {
|
||||
current++
|
||||
}
|
||||
|
||||
// Detect crossing shift boundaries
|
||||
didCrossBoundary := false
|
||||
for len(shifts) > 1 && shifts[1].Before.ComesBefore(generated) {
|
||||
shifts = shifts[1:]
|
||||
didCrossBoundary = true
|
||||
}
|
||||
if !didCrossBoundary {
|
||||
continue
|
||||
}
|
||||
|
||||
// This shift isn't relevant if the next mapping after this shift is on a
|
||||
// following line. In that case, don't split and keep scanning instead.
|
||||
shift := shifts[0]
|
||||
if shift.After.Lines != generated.Lines {
|
||||
continue
|
||||
}
|
||||
|
||||
// Add all previous mappings in a single run for efficiency. Since source
|
||||
// mappings are relative, no data needs to be modified inside this run.
|
||||
j.AddBytes(pieces.Mappings[startOfRun:potentialEndOfRun])
|
||||
|
||||
// Then modify the first mapping across the shift boundary with the updated
|
||||
// generated column value. It's simplest to only support column shifts. This
|
||||
// is reasonable because import paths should not contain newlines.
|
||||
if shift.Before.Lines != shift.After.Lines {
|
||||
panic("Unexpected line change when shifting source maps")
|
||||
}
|
||||
shiftColumnDelta := shift.After.Columns - shift.Before.Columns
|
||||
j.AddBytes(EncodeVLQ(generatedColumnDelta + shiftColumnDelta - prevShiftColumnDelta))
|
||||
prevShiftColumnDelta = shiftColumnDelta
|
||||
|
||||
// Finally, start the next run after the end of this generated column offset
|
||||
startOfRun = potentialStartOfRun
|
||||
}
|
||||
|
||||
// Finish the source map
|
||||
j.AddBytes(pieces.Mappings[startOfRun:])
|
||||
j.AddBytes(pieces.Suffix)
|
||||
return j.Done()
|
||||
}
|
||||
|
||||
// Coordinates in source maps are stored using relative offsets for size
|
||||
// reasons. When joining together chunks of a source map that were emitted
|
||||
// in parallel for different parts of a file, we need to fix up the first
|
||||
// segment of each chunk to be relative to the end of the previous chunk.
|
||||
type SourceMapState struct {
|
||||
// This isn't stored in the source map. It's only used by the bundler to join
|
||||
// source map chunks together correctly.
|
||||
GeneratedLine int
|
||||
|
||||
// These are stored in the source map in VLQ format.
|
||||
GeneratedColumn int
|
||||
SourceIndex int
|
||||
OriginalLine int
|
||||
OriginalColumn int
|
||||
}
|
||||
|
||||
// Source map chunks are computed in parallel for speed. Each chunk is relative
|
||||
// to the zero state instead of being relative to the end state of the previous
|
||||
// chunk, since it's impossible to know the end state of the previous chunk in
|
||||
// a parallel computation.
|
||||
//
|
||||
// After all chunks are computed, they are joined together in a second pass.
|
||||
// This rewrites the first mapping in each chunk to be relative to the end
|
||||
// state of the previous chunk.
|
||||
func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startState SourceMapState, sourceMap []byte) {
|
||||
// Handle line breaks in between this mapping and the previous one
|
||||
if startState.GeneratedLine != 0 {
|
||||
j.AddBytes(bytes.Repeat([]byte{';'}, startState.GeneratedLine))
|
||||
prevEndState.GeneratedColumn = 0
|
||||
}
|
||||
|
||||
// Skip past any leading semicolons, which indicate line breaks
|
||||
semicolons := 0
|
||||
for sourceMap[semicolons] == ';' {
|
||||
semicolons++
|
||||
}
|
||||
if semicolons > 0 {
|
||||
j.AddBytes(sourceMap[:semicolons])
|
||||
sourceMap = sourceMap[semicolons:]
|
||||
prevEndState.GeneratedColumn = 0
|
||||
startState.GeneratedColumn = 0
|
||||
}
|
||||
|
||||
// Strip off the first mapping from the buffer. The first mapping should be
|
||||
// for the start of the original file (the printer always generates one for
|
||||
// the start of the file).
|
||||
generatedColumn, i := DecodeVLQ(sourceMap, 0)
|
||||
sourceIndex, i := DecodeVLQ(sourceMap, i)
|
||||
originalLine, i := DecodeVLQ(sourceMap, i)
|
||||
originalColumn, i := DecodeVLQ(sourceMap, i)
|
||||
sourceMap = sourceMap[i:]
|
||||
|
||||
// Rewrite the first mapping to be relative to the end state of the previous
|
||||
// chunk. We now know what the end state is because we're in the second pass
|
||||
// where all chunks have already been generated.
|
||||
startState.SourceIndex += sourceIndex
|
||||
startState.GeneratedColumn += generatedColumn
|
||||
startState.OriginalLine += originalLine
|
||||
startState.OriginalColumn += originalColumn
|
||||
j.AddBytes(appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState))
|
||||
|
||||
// Then append everything after that without modification.
|
||||
j.AddBytes(sourceMap)
|
||||
}
|
||||
|
||||
func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState) []byte {
|
||||
// Put commas in between mappings
|
||||
if lastByte != 0 && lastByte != ';' && lastByte != '"' {
|
||||
buffer = append(buffer, ',')
|
||||
}
|
||||
|
||||
// Record the generated column (the line is recorded using ';' elsewhere)
|
||||
buffer = append(buffer, EncodeVLQ(currentState.GeneratedColumn-prevState.GeneratedColumn)...)
|
||||
prevState.GeneratedColumn = currentState.GeneratedColumn
|
||||
|
||||
// Record the generated source
|
||||
buffer = append(buffer, EncodeVLQ(currentState.SourceIndex-prevState.SourceIndex)...)
|
||||
prevState.SourceIndex = currentState.SourceIndex
|
||||
|
||||
// Record the original line
|
||||
buffer = append(buffer, EncodeVLQ(currentState.OriginalLine-prevState.OriginalLine)...)
|
||||
prevState.OriginalLine = currentState.OriginalLine
|
||||
|
||||
// Record the original column
|
||||
buffer = append(buffer, EncodeVLQ(currentState.OriginalColumn-prevState.OriginalColumn)...)
|
||||
prevState.OriginalColumn = currentState.OriginalColumn
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
type LineOffsetTable struct {
|
||||
byteOffsetToStartOfLine int32
|
||||
|
||||
// The source map specification is very loose and does not specify what
|
||||
// column numbers actually mean. The popular "source-map" library from Mozilla
|
||||
// appears to interpret them as counts of UTF-16 code units, so we generate
|
||||
// those too for compatibility.
|
||||
//
|
||||
// We keep mapping tables around to accelerate conversion from byte offsets
|
||||
// to UTF-16 code unit counts. However, this mapping takes up a lot of memory
|
||||
// and generates a lot of garbage. Since most JavaScript is ASCII and the
|
||||
// mapping for ASCII is 1:1, we avoid creating a table for ASCII-only lines
|
||||
// as an optimization.
|
||||
byteOffsetToFirstNonASCII int32
|
||||
columnsForNonASCII []int32
|
||||
}
|
||||
|
||||
func GenerateLineOffsetTables(contents string, approximateLineCount int32) []LineOffsetTable {
|
||||
var ColumnsForNonASCII []int32
|
||||
ByteOffsetToFirstNonASCII := int32(0)
|
||||
lineByteOffset := 0
|
||||
columnByteOffset := 0
|
||||
column := int32(0)
|
||||
|
||||
// Preallocate the top-level table using the approximate line count from the lexer
|
||||
lineOffsetTables := make([]LineOffsetTable, 0, approximateLineCount)
|
||||
|
||||
for i, c := range contents {
|
||||
// Mark the start of the next line
|
||||
if column == 0 {
|
||||
lineByteOffset = i
|
||||
}
|
||||
|
||||
// Start the mapping if this character is non-ASCII
|
||||
if c > 0x7F && ColumnsForNonASCII == nil {
|
||||
columnByteOffset = i - lineByteOffset
|
||||
ByteOffsetToFirstNonASCII = int32(columnByteOffset)
|
||||
ColumnsForNonASCII = []int32{}
|
||||
}
|
||||
|
||||
// Update the per-byte column offsets
|
||||
if ColumnsForNonASCII != nil {
|
||||
for lineBytesSoFar := i - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ {
|
||||
ColumnsForNonASCII = append(ColumnsForNonASCII, column)
|
||||
}
|
||||
}
|
||||
|
||||
switch c {
|
||||
case '\r', '\n', '\u2028', '\u2029':
|
||||
// Handle Windows-specific "\r\n" newlines
|
||||
if c == '\r' && i+1 < len(contents) && contents[i+1] == '\n' {
|
||||
column++
|
||||
continue
|
||||
}
|
||||
|
||||
lineOffsetTables = append(lineOffsetTables, LineOffsetTable{
|
||||
byteOffsetToStartOfLine: int32(lineByteOffset),
|
||||
byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII,
|
||||
columnsForNonASCII: ColumnsForNonASCII,
|
||||
})
|
||||
columnByteOffset = 0
|
||||
ByteOffsetToFirstNonASCII = 0
|
||||
ColumnsForNonASCII = nil
|
||||
column = 0
|
||||
|
||||
default:
|
||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
||||
if c <= 0xFFFF {
|
||||
column++
|
||||
} else {
|
||||
column += 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Mark the start of the next line
|
||||
if column == 0 {
|
||||
lineByteOffset = len(contents)
|
||||
}
|
||||
|
||||
// Do one last update for the column at the end of the file
|
||||
if ColumnsForNonASCII != nil {
|
||||
for lineBytesSoFar := len(contents) - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ {
|
||||
ColumnsForNonASCII = append(ColumnsForNonASCII, column)
|
||||
}
|
||||
}
|
||||
|
||||
lineOffsetTables = append(lineOffsetTables, LineOffsetTable{
|
||||
byteOffsetToStartOfLine: int32(lineByteOffset),
|
||||
byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII,
|
||||
columnsForNonASCII: ColumnsForNonASCII,
|
||||
})
|
||||
return lineOffsetTables
|
||||
}
|
||||
|
||||
type Chunk struct {
|
||||
Buffer []byte
|
||||
|
||||
// This end state will be used to rewrite the start of the following source
|
||||
// map chunk so that the delta-encoded VLQ numbers are preserved.
|
||||
EndState SourceMapState
|
||||
|
||||
// There probably isn't a source mapping at the end of the file (nor should
|
||||
// there be) but if we're appending another source map chunk after this one,
|
||||
// we'll need to know how many characters were in the last line we generated.
|
||||
FinalGeneratedColumn int
|
||||
|
||||
ShouldIgnore bool
|
||||
}
|
||||
|
||||
type ChunkBuilder struct {
|
||||
inputSourceMap *SourceMap
|
||||
sourceMap []byte
|
||||
prevLoc logger.Loc
|
||||
prevState SourceMapState
|
||||
lastGeneratedUpdate int
|
||||
generatedColumn int
|
||||
hasPrevState bool
|
||||
lineOffsetTables []LineOffsetTable
|
||||
|
||||
// This is a workaround for a bug in the popular "source-map" library:
|
||||
// https://github.com/mozilla/source-map/issues/261. The library will
|
||||
// sometimes return null when querying a source map unless every line
|
||||
// starts with a mapping at column zero.
|
||||
//
|
||||
// The workaround is to replicate the previous mapping if a line ends
|
||||
// up not starting with a mapping. This is done lazily because we want
|
||||
// to avoid replicating the previous mapping if we don't need to.
|
||||
lineStartsWithMapping bool
|
||||
coverLinesWithoutMappings bool
|
||||
}
|
||||
|
||||
func MakeChunkBuilder(inputSourceMap *SourceMap, lineOffsetTables []LineOffsetTable) ChunkBuilder {
|
||||
return ChunkBuilder{
|
||||
inputSourceMap: inputSourceMap,
|
||||
prevLoc: logger.Loc{Start: -1},
|
||||
lineOffsetTables: lineOffsetTables,
|
||||
|
||||
// We automatically repeat the previous source mapping if we ever generate
|
||||
// a line that doesn't start with a mapping. This helps give files more
|
||||
// complete mapping coverage without gaps.
|
||||
//
|
||||
// However, we probably shouldn't do this if the input file has a nested
|
||||
// source map that we will be remapping through. We have no idea what state
|
||||
// that source map is in and it could be pretty scrambled.
|
||||
//
|
||||
// I've seen cases where blindly repeating the last mapping for subsequent
|
||||
// lines gives very strange and unhelpful results with source maps from
|
||||
// other tools.
|
||||
coverLinesWithoutMappings: inputSourceMap == nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (b *ChunkBuilder) AddSourceMapping(loc logger.Loc, output []byte) {
|
||||
if loc == b.prevLoc {
|
||||
return
|
||||
}
|
||||
b.prevLoc = loc
|
||||
|
||||
// Binary search to find the line
|
||||
lineOffsetTables := b.lineOffsetTables
|
||||
count := len(lineOffsetTables)
|
||||
originalLine := 0
|
||||
for count > 0 {
|
||||
step := count / 2
|
||||
i := originalLine + step
|
||||
if lineOffsetTables[i].byteOffsetToStartOfLine <= loc.Start {
|
||||
originalLine = i + 1
|
||||
count = count - step - 1
|
||||
} else {
|
||||
count = step
|
||||
}
|
||||
}
|
||||
originalLine--
|
||||
|
||||
// Use the line to compute the column
|
||||
line := &lineOffsetTables[originalLine]
|
||||
originalColumn := int(loc.Start - line.byteOffsetToStartOfLine)
|
||||
if line.columnsForNonASCII != nil && originalColumn >= int(line.byteOffsetToFirstNonASCII) {
|
||||
originalColumn = int(line.columnsForNonASCII[originalColumn-int(line.byteOffsetToFirstNonASCII)])
|
||||
}
|
||||
|
||||
b.updateGeneratedLineAndColumn(output)
|
||||
|
||||
// If this line doesn't start with a mapping and we're about to add a mapping
|
||||
// that's not at the start, insert a mapping first so the line starts with one.
|
||||
if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.generatedColumn > 0 && b.hasPrevState {
|
||||
b.appendMappingWithoutRemapping(SourceMapState{
|
||||
GeneratedLine: b.prevState.GeneratedLine,
|
||||
GeneratedColumn: 0,
|
||||
SourceIndex: b.prevState.SourceIndex,
|
||||
OriginalLine: b.prevState.OriginalLine,
|
||||
OriginalColumn: b.prevState.OriginalColumn,
|
||||
})
|
||||
}
|
||||
|
||||
b.appendMapping(SourceMapState{
|
||||
GeneratedLine: b.prevState.GeneratedLine,
|
||||
GeneratedColumn: b.generatedColumn,
|
||||
OriginalLine: originalLine,
|
||||
OriginalColumn: originalColumn,
|
||||
})
|
||||
|
||||
// This line now has a mapping on it, so don't insert another one
|
||||
b.lineStartsWithMapping = true
|
||||
}
|
||||
|
||||
func (b *ChunkBuilder) GenerateChunk(output []byte) Chunk {
|
||||
b.updateGeneratedLineAndColumn(output)
|
||||
shouldIgnore := true
|
||||
for _, c := range b.sourceMap {
|
||||
if c != ';' {
|
||||
shouldIgnore = false
|
||||
break
|
||||
}
|
||||
}
|
||||
return Chunk{
|
||||
Buffer: b.sourceMap,
|
||||
EndState: b.prevState,
|
||||
FinalGeneratedColumn: b.generatedColumn,
|
||||
ShouldIgnore: shouldIgnore,
|
||||
}
|
||||
}
|
||||
|
||||
// Scan over the printed text since the last source mapping and update the
|
||||
// generated line and column numbers
|
||||
func (b *ChunkBuilder) updateGeneratedLineAndColumn(output []byte) {
|
||||
for i, c := range string(output[b.lastGeneratedUpdate:]) {
|
||||
switch c {
|
||||
case '\r', '\n', '\u2028', '\u2029':
|
||||
// Handle Windows-specific "\r\n" newlines
|
||||
if c == '\r' {
|
||||
newlineCheck := b.lastGeneratedUpdate + i + 1
|
||||
if newlineCheck < len(output) && output[newlineCheck] == '\n' {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// If we're about to move to the next line and the previous line didn't have
|
||||
// any mappings, add a mapping at the start of the previous line.
|
||||
if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.hasPrevState {
|
||||
b.appendMappingWithoutRemapping(SourceMapState{
|
||||
GeneratedLine: b.prevState.GeneratedLine,
|
||||
GeneratedColumn: 0,
|
||||
SourceIndex: b.prevState.SourceIndex,
|
||||
OriginalLine: b.prevState.OriginalLine,
|
||||
OriginalColumn: b.prevState.OriginalColumn,
|
||||
})
|
||||
}
|
||||
|
||||
b.prevState.GeneratedLine++
|
||||
b.prevState.GeneratedColumn = 0
|
||||
b.generatedColumn = 0
|
||||
b.sourceMap = append(b.sourceMap, ';')
|
||||
|
||||
// This new line doesn't have a mapping yet
|
||||
b.lineStartsWithMapping = false
|
||||
|
||||
default:
|
||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
||||
if c <= 0xFFFF {
|
||||
b.generatedColumn++
|
||||
} else {
|
||||
b.generatedColumn += 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
b.lastGeneratedUpdate = len(output)
|
||||
}
|
||||
|
||||
func (b *ChunkBuilder) appendMapping(currentState SourceMapState) {
|
||||
// If the input file had a source map, map all the way back to the original
|
||||
if b.inputSourceMap != nil {
|
||||
mapping := b.inputSourceMap.Find(
|
||||
int32(currentState.OriginalLine),
|
||||
int32(currentState.OriginalColumn))
|
||||
|
||||
// Some locations won't have a mapping
|
||||
if mapping == nil {
|
||||
return
|
||||
}
|
||||
|
||||
currentState.SourceIndex = int(mapping.SourceIndex)
|
||||
currentState.OriginalLine = int(mapping.OriginalLine)
|
||||
currentState.OriginalColumn = int(mapping.OriginalColumn)
|
||||
}
|
||||
|
||||
b.appendMappingWithoutRemapping(currentState)
|
||||
}
|
||||
|
||||
func (b *ChunkBuilder) appendMappingWithoutRemapping(currentState SourceMapState) {
|
||||
var lastByte byte
|
||||
if len(b.sourceMap) != 0 {
|
||||
lastByte = b.sourceMap[len(b.sourceMap)-1]
|
||||
}
|
||||
|
||||
b.sourceMap = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState)
|
||||
b.prevState = currentState
|
||||
b.hasPrevState = true
|
||||
}
|
22
vendor/github.com/evanw/esbuild/internal/xxhash/LICENSE.txt
generated
vendored
22
vendor/github.com/evanw/esbuild/internal/xxhash/LICENSE.txt
generated
vendored
@ -1,22 +0,0 @@
|
||||
Copyright (c) 2016 Caleb Spare
|
||||
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user