Support watching for modified npm packages
Allow array of build configs.
This commit is contained in:
parent
a0d81ca2be
commit
bea89e2a80
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
|
||||||
|
vendor/
|
13
go.mod
13
go.mod
@ -1,18 +1,21 @@
|
|||||||
module github.com/trading-peter/gowebbuild
|
module github.com/trading-peter/gowebbuild
|
||||||
|
|
||||||
go 1.17
|
go 1.18
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/evanw/esbuild v0.14.5
|
github.com/evanw/esbuild v0.14.50
|
||||||
github.com/goyek/goyek v0.6.0
|
github.com/goyek/goyek v0.6.3
|
||||||
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71
|
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71
|
||||||
github.com/otiai10/copy v1.7.0
|
github.com/otiai10/copy v1.7.0
|
||||||
github.com/radovskyb/watcher v1.0.7
|
github.com/radovskyb/watcher v1.0.7
|
||||||
|
github.com/tidwall/gjson v1.14.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/gorilla/websocket v1.4.2 // indirect
|
github.com/gorilla/websocket v1.5.0 // indirect
|
||||||
github.com/smartystreets/goconvey v1.7.2 // indirect
|
github.com/smartystreets/goconvey v1.7.2 // indirect
|
||||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365 // indirect
|
github.com/tidwall/match v1.1.1 // indirect
|
||||||
|
github.com/tidwall/pretty v1.2.0 // indirect
|
||||||
|
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect
|
||||||
gopkg.in/fsnotify.v1 v1.4.7 // indirect
|
gopkg.in/fsnotify.v1 v1.4.7 // indirect
|
||||||
)
|
)
|
||||||
|
23
go.sum
23
go.sum
@ -1,11 +1,11 @@
|
|||||||
github.com/evanw/esbuild v0.14.5 h1:Gh/vGvDL/g++7erzQZofohZqFBzQblWfLdtYCf15zcQ=
|
github.com/evanw/esbuild v0.14.50 h1:h7sijkRPGB9ckpIOc6FMZ81/NMy/4g40LhsBAtPa3/I=
|
||||||
github.com/evanw/esbuild v0.14.5/go.mod h1:GG+zjdi59yh3ehDn4ZWfPcATxjPDUH53iU4ZJbp7dkY=
|
github.com/evanw/esbuild v0.14.50/go.mod h1:dkwI35DCMf0iR+tJDiCEiPKZ4A+AotmmeLpPEv3dl9k=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
|
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||||
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/goyek/goyek v0.6.0 h1:2YQ4V3X7q+zFF98IBWMc1WRwfzs0TQ8jrwOKY3XRQRk=
|
github.com/goyek/goyek v0.6.3 h1:t0h3gWdlvGeSChltiyAyka9Mlcp3CEPDRssRf0XHDTM=
|
||||||
github.com/goyek/goyek v0.6.0/go.mod h1:UGjZz3juJL2l2eMqRbxQYjG8ieyKb7WMYPv0KB0KVxA=
|
github.com/goyek/goyek v0.6.3/go.mod h1:UGjZz3juJL2l2eMqRbxQYjG8ieyKb7WMYPv0KB0KVxA=
|
||||||
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71 h1:24NdJ5N6gtrcoeS4JwLMeruKFmg20QdF/5UnX5S/j18=
|
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71 h1:24NdJ5N6gtrcoeS4JwLMeruKFmg20QdF/5UnX5S/j18=
|
||||||
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71/go.mod h1:ozZLfjiLmXytkIUh200wMeuoQJ4ww06wN+KZtFP6j3g=
|
github.com/jaschaephraim/lrserver v0.0.0-20171129202958-50d19f603f71/go.mod h1:ozZLfjiLmXytkIUh200wMeuoQJ4ww06wN+KZtFP6j3g=
|
||||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||||
@ -23,11 +23,18 @@ github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N
|
|||||||
github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
|
github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
|
||||||
github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg=
|
github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg=
|
||||||
github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM=
|
github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM=
|
||||||
|
github.com/tidwall/gjson v1.14.1 h1:iymTbGkQBhveq21bEvAQ81I0LEBork8BFe1CUZXdyuo=
|
||||||
|
github.com/tidwall/gjson v1.14.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
|
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||||
|
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||||
|
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
||||||
|
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365 h1:6wSTsvPddg9gc/mVEEyk9oOAoxn+bT4Z9q1zx+4RwA4=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg=
|
||||||
|
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||||
|
123
linker.go
Normal file
123
linker.go
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/fs"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/otiai10/copy"
|
||||||
|
"github.com/radovskyb/watcher"
|
||||||
|
"github.com/tidwall/gjson"
|
||||||
|
)
|
||||||
|
|
||||||
|
func link(from, to string) chan struct{} {
|
||||||
|
requestBuildCh := make(chan struct{})
|
||||||
|
|
||||||
|
// Load package.json in destination.
|
||||||
|
destPkg := readFileContent(filepath.Join(to, "package.json"))
|
||||||
|
depsRaw := gjson.Get(destPkg, "dependencies").Map()
|
||||||
|
deps := map[string]bool{}
|
||||||
|
for k := range depsRaw {
|
||||||
|
deps[k] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
packages := map[string]string{}
|
||||||
|
packageFiles := findFiles(from, "package.json")
|
||||||
|
|
||||||
|
for i := range packageFiles {
|
||||||
|
content := readFileContent(packageFiles[i])
|
||||||
|
name := gjson.Get(content, "name").String()
|
||||||
|
|
||||||
|
if deps[name] {
|
||||||
|
packages[name] = filepath.Dir(packageFiles[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
w := watcher.New()
|
||||||
|
w.SetMaxEvents(1)
|
||||||
|
w.FilterOps(watcher.Write, watcher.Rename, watcher.Move, watcher.Create, watcher.Remove)
|
||||||
|
|
||||||
|
if err := w.AddRecursive(from); err != nil {
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case event := <-w.Event:
|
||||||
|
fmt.Printf("File %s changed\n", event.Path)
|
||||||
|
for k, v := range packages {
|
||||||
|
if strings.HasPrefix(event.Path, v) {
|
||||||
|
src := filepath.Dir(event.Path)
|
||||||
|
dest := filepath.Join(to, "node_modules", k)
|
||||||
|
fmt.Printf("Copying %s to %s\n", src, dest)
|
||||||
|
err := copy.Copy(src, dest, copy.Options{
|
||||||
|
Skip: func(src string) (bool, error) {
|
||||||
|
ok, _ := filepath.Match("*.js", filepath.Base(src))
|
||||||
|
if ok && !strings.Contains(src, "node_modules") {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
},
|
||||||
|
Sync: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to copy %s: %v\n", k, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
requestBuildCh <- struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case err := <-w.Error:
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
case <-w.Closed:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
fmt.Printf("Watching packages in %s\n", from)
|
||||||
|
|
||||||
|
if err := w.Start(time.Millisecond * 100); err != nil {
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return requestBuildCh
|
||||||
|
}
|
||||||
|
|
||||||
|
func findFiles(root, name string) []string {
|
||||||
|
paths := []string{}
|
||||||
|
|
||||||
|
filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if !d.IsDir() && filepath.Base(path) == name && !strings.Contains(path, "node_modules") {
|
||||||
|
paths = append(paths, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return paths
|
||||||
|
}
|
||||||
|
|
||||||
|
func readFileContent(path string) string {
|
||||||
|
pkgData, err := os.ReadFile(path)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("%+v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(pkgData)
|
||||||
|
}
|
197
main.go
197
main.go
@ -41,11 +41,39 @@ type options struct {
|
|||||||
Search string
|
Search string
|
||||||
Replace string
|
Replace string
|
||||||
}
|
}
|
||||||
|
Link struct {
|
||||||
|
From string
|
||||||
|
To string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func readCfg(cfgPath string) []options {
|
||||||
|
cfgContent, err := os.ReadFile(cfgPath)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("%+v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
optsSetups := []options{}
|
||||||
|
|
||||||
|
err = json.Unmarshal(cfgContent, &optsSetups)
|
||||||
|
if err != nil {
|
||||||
|
opt := options{}
|
||||||
|
err = json.Unmarshal(cfgContent, &opt)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("%+v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
optsSetups = append(optsSetups, opt)
|
||||||
|
}
|
||||||
|
|
||||||
|
return optsSetups
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
flow := &goyek.Flow{}
|
flow := &goyek.Flow{}
|
||||||
opts := options{}
|
|
||||||
|
|
||||||
cfgPathParam := flow.RegisterStringParam(goyek.StringParam{
|
cfgPathParam := flow.RegisterStringParam(goyek.StringParam{
|
||||||
Name: "c",
|
Name: "c",
|
||||||
@ -65,30 +93,22 @@ func main() {
|
|||||||
Params: goyek.Params{cfgPathParam, prodParam},
|
Params: goyek.Params{cfgPathParam, prodParam},
|
||||||
Action: func(tf *goyek.TF) {
|
Action: func(tf *goyek.TF) {
|
||||||
cfgPath := cfgPathParam.Get(tf)
|
cfgPath := cfgPathParam.Get(tf)
|
||||||
cfgContent, err := os.ReadFile(cfgPath)
|
os.Chdir(filepath.Dir(cfgPath))
|
||||||
|
opts := readCfg(cfgPath)
|
||||||
|
|
||||||
if err != nil {
|
for _, o := range opts {
|
||||||
fmt.Printf("%+v\n", err)
|
cp(o)
|
||||||
os.Exit(1)
|
|
||||||
|
if prodParam.Get(tf) {
|
||||||
|
o.ESBuild.MinifyIdentifiers = true
|
||||||
|
o.ESBuild.MinifySyntax = true
|
||||||
|
o.ESBuild.MinifyWhitespace = true
|
||||||
|
o.ESBuild.Sourcemap = api.SourceMapNone
|
||||||
|
}
|
||||||
|
|
||||||
|
api.Build(o.ESBuild)
|
||||||
|
replace(o)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = json.Unmarshal(cfgContent, &opts)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("%+v\n", err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
cp(opts)
|
|
||||||
|
|
||||||
if prodParam.Get(tf) {
|
|
||||||
opts.ESBuild.MinifyIdentifiers = true
|
|
||||||
opts.ESBuild.MinifySyntax = true
|
|
||||||
opts.ESBuild.MinifyWhitespace = true
|
|
||||||
opts.ESBuild.Sourcemap = api.SourceMapNone
|
|
||||||
}
|
|
||||||
|
|
||||||
api.Build(opts.ESBuild)
|
|
||||||
replace(opts)
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,66 +118,90 @@ func main() {
|
|||||||
Params: goyek.Params{cfgPathParam},
|
Params: goyek.Params{cfgPathParam},
|
||||||
Action: func(tf *goyek.TF) {
|
Action: func(tf *goyek.TF) {
|
||||||
cfgPath := cfgPathParam.Get(tf)
|
cfgPath := cfgPathParam.Get(tf)
|
||||||
cfgContent, err := os.ReadFile(cfgPath)
|
os.Chdir(filepath.Dir(cfgPath))
|
||||||
|
optsSetups := readCfg(cfgPath)
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("%+v\n", err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = json.Unmarshal(cfgContent, &opts)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("%+v\n", err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
c := make(chan os.Signal, 1)
|
c := make(chan os.Signal, 1)
|
||||||
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
|
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
|
||||||
|
|
||||||
fmt.Println("Starting live reload server")
|
for i := range optsSetups {
|
||||||
|
opts := optsSetups[i]
|
||||||
|
|
||||||
go func() {
|
go func(opts options) {
|
||||||
w := watcher.New()
|
w := watcher.New()
|
||||||
w.SetMaxEvents(1)
|
w.SetMaxEvents(1)
|
||||||
w.FilterOps(watcher.Write, watcher.Rename, watcher.Move, watcher.Create, watcher.Remove)
|
w.FilterOps(watcher.Write, watcher.Rename, watcher.Move, watcher.Create, watcher.Remove)
|
||||||
|
|
||||||
if len(opts.Watch.Exclude) > 0 {
|
if len(opts.Watch.Exclude) > 0 {
|
||||||
w.Ignore(opts.Watch.Exclude...)
|
w.Ignore(opts.Watch.Exclude...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := w.AddRecursive(opts.Watch.Path); err != nil {
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case event := <-w.Event:
|
||||||
|
fmt.Printf("File %s changed\n", event.Name())
|
||||||
|
cp(opts)
|
||||||
|
build(opts)
|
||||||
|
replace(opts)
|
||||||
|
case err := <-w.Error:
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
case <-w.Closed:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
fmt.Printf("Watching %d elements in %s\n", len(w.WatchedFiles()), opts.Watch.Path)
|
||||||
|
|
||||||
|
cp(opts)
|
||||||
|
build(opts)
|
||||||
|
replace(opts)
|
||||||
|
|
||||||
|
if err := w.Start(time.Millisecond * 100); err != nil {
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
}
|
||||||
|
}(opts)
|
||||||
|
|
||||||
|
if opts.Serve.Path != "" {
|
||||||
|
go func() {
|
||||||
|
port := 8888
|
||||||
|
if opts.Serve.Port != 0 {
|
||||||
|
port = opts.Serve.Port
|
||||||
|
}
|
||||||
|
|
||||||
|
http.Handle("/", http.FileServer(http.Dir(opts.Serve.Path)))
|
||||||
|
|
||||||
|
fmt.Printf("Serving contents of %s at :%d\n", opts.Serve.Path, port)
|
||||||
|
err := http.ListenAndServe(fmt.Sprintf(":%d", port), nil)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("%+v\n", err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := w.AddRecursive(opts.Watch.Path); err != nil {
|
if opts.Link.From != "" {
|
||||||
fmt.Println(err.Error())
|
reqBuildCh := link(opts.Link.From, opts.Link.To)
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
for {
|
for range reqBuildCh {
|
||||||
select {
|
|
||||||
case event := <-w.Event:
|
|
||||||
fmt.Printf("File %s changed\n", event.Name())
|
|
||||||
cp(opts)
|
cp(opts)
|
||||||
build(opts)
|
build(opts)
|
||||||
replace(opts)
|
replace(opts)
|
||||||
case err := <-w.Error:
|
|
||||||
fmt.Println(err.Error())
|
|
||||||
case <-w.Closed:
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}()
|
||||||
}()
|
|
||||||
|
|
||||||
fmt.Printf("Watching %d elements in %s\n", len(w.WatchedFiles()), opts.Watch.Path)
|
|
||||||
|
|
||||||
cp(opts)
|
|
||||||
build(opts)
|
|
||||||
replace(opts)
|
|
||||||
|
|
||||||
if err := w.Start(time.Millisecond * 100); err != nil {
|
|
||||||
fmt.Println(err.Error())
|
|
||||||
}
|
}
|
||||||
}()
|
}
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
|
fmt.Println("Starting live reload server")
|
||||||
lr := lrserver.New(lrserver.DefaultName, lrserver.DefaultPort)
|
lr := lrserver.New(lrserver.DefaultName, lrserver.DefaultPort)
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
@ -174,25 +218,6 @@ func main() {
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
if opts.Serve.Path != "" {
|
|
||||||
go func() {
|
|
||||||
port := 8888
|
|
||||||
if opts.Serve.Port != 0 {
|
|
||||||
port = opts.Serve.Port
|
|
||||||
}
|
|
||||||
|
|
||||||
http.Handle("/", http.FileServer(http.Dir(opts.Serve.Path)))
|
|
||||||
|
|
||||||
fmt.Printf("Serving contents of %s at :%d\n", opts.Serve.Path, port)
|
|
||||||
err := http.ListenAndServe(fmt.Sprintf(":%d", port), nil)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("%+v\n", err.Error())
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
<-c
|
<-c
|
||||||
fmt.Println("\nExit")
|
fmt.Println("\nExit")
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
|
21
vendor/github.com/evanw/esbuild/LICENSE.md
generated
vendored
21
vendor/github.com/evanw/esbuild/LICENSE.md
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2020 Evan Wallace
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
7
vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go
generated
vendored
7
vendor/github.com/evanw/esbuild/internal/api_helpers/use_timer.go
generated
vendored
@ -1,7 +0,0 @@
|
|||||||
package api_helpers
|
|
||||||
|
|
||||||
// This flag is set by the CLI to activate the timer. It's put here instead of
|
|
||||||
// by the timer to discourage code from checking this flag. Only the code that
|
|
||||||
// creates the root timer should check this flag. Other code should check that
|
|
||||||
// the timer is not null to detect if the timer is being used or not.
|
|
||||||
var UseTimer bool
|
|
141
vendor/github.com/evanw/esbuild/internal/ast/ast.go
generated
vendored
141
vendor/github.com/evanw/esbuild/internal/ast/ast.go
generated
vendored
@ -1,141 +0,0 @@
|
|||||||
package ast
|
|
||||||
|
|
||||||
import "github.com/evanw/esbuild/internal/logger"
|
|
||||||
|
|
||||||
// This file contains data structures that are used with the AST packages for
|
|
||||||
// both JavaScript and CSS. This helps the bundler treat both AST formats in
|
|
||||||
// a somewhat format-agnostic manner.
|
|
||||||
|
|
||||||
type ImportKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
// An entry point provided by the user
|
|
||||||
ImportEntryPoint ImportKind = iota
|
|
||||||
|
|
||||||
// An ES6 import or re-export statement
|
|
||||||
ImportStmt
|
|
||||||
|
|
||||||
// A call to "require()"
|
|
||||||
ImportRequire
|
|
||||||
|
|
||||||
// An "import()" expression with a string argument
|
|
||||||
ImportDynamic
|
|
||||||
|
|
||||||
// A call to "require.resolve()"
|
|
||||||
ImportRequireResolve
|
|
||||||
|
|
||||||
// A CSS "@import" rule
|
|
||||||
ImportAt
|
|
||||||
|
|
||||||
// A CSS "@import" rule with import conditions
|
|
||||||
ImportAtConditional
|
|
||||||
|
|
||||||
// A CSS "url(...)" token
|
|
||||||
ImportURL
|
|
||||||
)
|
|
||||||
|
|
||||||
func (kind ImportKind) StringForMetafile() string {
|
|
||||||
switch kind {
|
|
||||||
case ImportStmt:
|
|
||||||
return "import-statement"
|
|
||||||
case ImportRequire:
|
|
||||||
return "require-call"
|
|
||||||
case ImportDynamic:
|
|
||||||
return "dynamic-import"
|
|
||||||
case ImportRequireResolve:
|
|
||||||
return "require-resolve"
|
|
||||||
case ImportAt, ImportAtConditional:
|
|
||||||
return "import-rule"
|
|
||||||
case ImportURL:
|
|
||||||
return "url-token"
|
|
||||||
case ImportEntryPoint:
|
|
||||||
return "entry-point"
|
|
||||||
default:
|
|
||||||
panic("Internal error")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (kind ImportKind) IsFromCSS() bool {
|
|
||||||
return kind == ImportAt || kind == ImportURL
|
|
||||||
}
|
|
||||||
|
|
||||||
type ImportRecord struct {
|
|
||||||
Range logger.Range
|
|
||||||
Path logger.Path
|
|
||||||
Assertions *[]AssertEntry
|
|
||||||
|
|
||||||
// The resolved source index for an internal import (within the bundle) or
|
|
||||||
// nil for an external import (not included in the bundle)
|
|
||||||
SourceIndex Index32
|
|
||||||
|
|
||||||
// Sometimes the parser creates an import record and decides it isn't needed.
|
|
||||||
// For example, TypeScript code may have import statements that later turn
|
|
||||||
// out to be type-only imports after analyzing the whole file.
|
|
||||||
IsUnused bool
|
|
||||||
|
|
||||||
// If this is true, the import contains syntax like "* as ns". This is used
|
|
||||||
// to determine whether modules that have no exports need to be wrapped in a
|
|
||||||
// CommonJS wrapper or not.
|
|
||||||
ContainsImportStar bool
|
|
||||||
|
|
||||||
// If this is true, the import contains an import for the alias "default",
|
|
||||||
// either via the "import x from" or "import {default as x} from" syntax.
|
|
||||||
ContainsDefaultAlias bool
|
|
||||||
|
|
||||||
// If true, this "export * from 'path'" statement is evaluated at run-time by
|
|
||||||
// calling the "__reExport()" helper function
|
|
||||||
CallsRunTimeReExportFn bool
|
|
||||||
|
|
||||||
// Tell the printer to wrap this call to "require()" in "__toESM(...)"
|
|
||||||
WrapWithToESM bool
|
|
||||||
|
|
||||||
// Tell the printer to wrap this ESM exports object in "__toCJS(...)"
|
|
||||||
WrapWithToCJS bool
|
|
||||||
|
|
||||||
// Tell the printer to use the runtime "__require()" instead of "require()"
|
|
||||||
CallRuntimeRequire bool
|
|
||||||
|
|
||||||
// True for the following cases:
|
|
||||||
//
|
|
||||||
// try { require('x') } catch { handle }
|
|
||||||
// try { await import('x') } catch { handle }
|
|
||||||
// try { require.resolve('x') } catch { handle }
|
|
||||||
// import('x').catch(handle)
|
|
||||||
// import('x').then(_, handle)
|
|
||||||
//
|
|
||||||
// In these cases we shouldn't generate an error if the path could not be
|
|
||||||
// resolved.
|
|
||||||
HandlesImportErrors bool
|
|
||||||
|
|
||||||
// If true, this was originally written as a bare "import 'file'" statement
|
|
||||||
WasOriginallyBareImport bool
|
|
||||||
|
|
||||||
Kind ImportKind
|
|
||||||
}
|
|
||||||
|
|
||||||
type AssertEntry struct {
|
|
||||||
Key []uint16 // An identifier or a string
|
|
||||||
Value []uint16 // Always a string
|
|
||||||
KeyLoc logger.Loc
|
|
||||||
ValueLoc logger.Loc
|
|
||||||
PreferQuotedKey bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// This stores a 32-bit index where the zero value is an invalid index. This is
|
|
||||||
// a better alternative to storing the index as a pointer since that has the
|
|
||||||
// same properties but takes up more space and costs an extra pointer traversal.
|
|
||||||
type Index32 struct {
|
|
||||||
flippedBits uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func MakeIndex32(index uint32) Index32 {
|
|
||||||
return Index32{flippedBits: ^index}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i Index32) IsValid() bool {
|
|
||||||
return i.flippedBits != 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i Index32) GetIndex() uint32 {
|
|
||||||
return ^i.flippedBits
|
|
||||||
}
|
|
2373
vendor/github.com/evanw/esbuild/internal/bundler/bundler.go
generated
vendored
2373
vendor/github.com/evanw/esbuild/internal/bundler/bundler.go
generated
vendored
File diff suppressed because it is too large
Load Diff
132
vendor/github.com/evanw/esbuild/internal/bundler/debug.go
generated
vendored
132
vendor/github.com/evanw/esbuild/internal/bundler/debug.go
generated
vendored
@ -1,132 +0,0 @@
|
|||||||
package bundler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/graph"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_printer"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Set this to true and then load the resulting metafile in "graph-debugger.html"
|
|
||||||
// to debug graph information.
|
|
||||||
//
|
|
||||||
// This is deliberately not exposed in the final binary. It is *very* internal
|
|
||||||
// and only exists to help debug esbuild itself. Make sure this is always set
|
|
||||||
// back to false before committing.
|
|
||||||
const debugVerboseMetafile = false
|
|
||||||
|
|
||||||
func (c *linkerContext) generateExtraDataForFileJS(sourceIndex uint32) string {
|
|
||||||
if !debugVerboseMetafile {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
file := &c.graph.Files[sourceIndex]
|
|
||||||
repr := file.InputFile.Repr.(*graph.JSRepr)
|
|
||||||
sb := strings.Builder{}
|
|
||||||
|
|
||||||
quoteSym := func(ref js_ast.Ref) string {
|
|
||||||
name := fmt.Sprintf("%d:%d [%s]", ref.SourceIndex, ref.InnerIndex, c.graph.Symbols.Get(ref).OriginalName)
|
|
||||||
return string(js_printer.QuoteForJSON(name, c.options.ASCIIOnly))
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.WriteString(`,"parts":[`)
|
|
||||||
for partIndex, part := range repr.AST.Parts {
|
|
||||||
if partIndex > 0 {
|
|
||||||
sb.WriteByte(',')
|
|
||||||
}
|
|
||||||
var isFirst bool
|
|
||||||
code := ""
|
|
||||||
|
|
||||||
sb.WriteString(fmt.Sprintf(`{"isLive":%v`, part.IsLive))
|
|
||||||
sb.WriteString(fmt.Sprintf(`,"canBeRemovedIfUnused":%v`, part.CanBeRemovedIfUnused))
|
|
||||||
|
|
||||||
if partIndex == int(js_ast.NSExportPartIndex) {
|
|
||||||
sb.WriteString(`,"nsExportPartIndex":true`)
|
|
||||||
} else if ast.MakeIndex32(uint32(partIndex)) == repr.Meta.WrapperPartIndex {
|
|
||||||
sb.WriteString(`,"wrapperPartIndex":true`)
|
|
||||||
} else if len(part.Stmts) > 0 {
|
|
||||||
start := part.Stmts[0].Loc.Start
|
|
||||||
end := len(file.InputFile.Source.Contents)
|
|
||||||
if partIndex+1 < len(repr.AST.Parts) {
|
|
||||||
if nextStmts := repr.AST.Parts[partIndex+1].Stmts; len(nextStmts) > 0 {
|
|
||||||
if nextStart := nextStmts[0].Loc.Start; nextStart >= start {
|
|
||||||
end = int(nextStart)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
code = file.InputFile.Source.Contents[start:end]
|
|
||||||
}
|
|
||||||
|
|
||||||
// importRecords
|
|
||||||
sb.WriteString(`,"importRecords":[`)
|
|
||||||
isFirst = true
|
|
||||||
for _, importRecordIndex := range part.ImportRecordIndices {
|
|
||||||
record := repr.AST.ImportRecords[importRecordIndex]
|
|
||||||
if !record.SourceIndex.IsValid() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if isFirst {
|
|
||||||
isFirst = false
|
|
||||||
} else {
|
|
||||||
sb.WriteByte(',')
|
|
||||||
}
|
|
||||||
path := c.graph.Files[record.SourceIndex.GetIndex()].InputFile.Source.PrettyPath
|
|
||||||
sb.WriteString(fmt.Sprintf(`{"source":%s}`, js_printer.QuoteForJSON(path, c.options.ASCIIOnly)))
|
|
||||||
}
|
|
||||||
sb.WriteByte(']')
|
|
||||||
|
|
||||||
// declaredSymbols
|
|
||||||
sb.WriteString(`,"declaredSymbols":[`)
|
|
||||||
isFirst = true
|
|
||||||
for _, declSym := range part.DeclaredSymbols {
|
|
||||||
if !declSym.IsTopLevel {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if isFirst {
|
|
||||||
isFirst = false
|
|
||||||
} else {
|
|
||||||
sb.WriteByte(',')
|
|
||||||
}
|
|
||||||
sb.WriteString(fmt.Sprintf(`{"name":%s}`, quoteSym(declSym.Ref)))
|
|
||||||
}
|
|
||||||
sb.WriteByte(']')
|
|
||||||
|
|
||||||
// symbolUses
|
|
||||||
sb.WriteString(`,"symbolUses":[`)
|
|
||||||
isFirst = true
|
|
||||||
for ref, uses := range part.SymbolUses {
|
|
||||||
if isFirst {
|
|
||||||
isFirst = false
|
|
||||||
} else {
|
|
||||||
sb.WriteByte(',')
|
|
||||||
}
|
|
||||||
sb.WriteString(fmt.Sprintf(`{"name":%s,"countEstimate":%d}`, quoteSym(ref), uses.CountEstimate))
|
|
||||||
}
|
|
||||||
sb.WriteByte(']')
|
|
||||||
|
|
||||||
// dependencies
|
|
||||||
sb.WriteString(`,"dependencies":[`)
|
|
||||||
for i, dep := range part.Dependencies {
|
|
||||||
if i > 0 {
|
|
||||||
sb.WriteByte(',')
|
|
||||||
}
|
|
||||||
sb.WriteString(fmt.Sprintf(`{"source":%s,"partIndex":%d}`,
|
|
||||||
js_printer.QuoteForJSON(c.graph.Files[dep.SourceIndex].InputFile.Source.PrettyPath, c.options.ASCIIOnly),
|
|
||||||
dep.PartIndex,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
sb.WriteByte(']')
|
|
||||||
|
|
||||||
// code
|
|
||||||
sb.WriteString(`,"code":`)
|
|
||||||
sb.Write(js_printer.QuoteForJSON(strings.TrimRight(code, "\n"), c.options.ASCIIOnly))
|
|
||||||
|
|
||||||
sb.WriteByte('}')
|
|
||||||
}
|
|
||||||
sb.WriteString(`]`)
|
|
||||||
|
|
||||||
return sb.String()
|
|
||||||
}
|
|
5581
vendor/github.com/evanw/esbuild/internal/bundler/linker.go
generated
vendored
5581
vendor/github.com/evanw/esbuild/internal/bundler/linker.go
generated
vendored
File diff suppressed because it is too large
Load Diff
101
vendor/github.com/evanw/esbuild/internal/cache/cache.go
generated
vendored
101
vendor/github.com/evanw/esbuild/internal/cache/cache.go
generated
vendored
@ -1,101 +0,0 @@
|
|||||||
package cache
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
"github.com/evanw/esbuild/internal/runtime"
|
|
||||||
)
|
|
||||||
|
|
||||||
// This is a cache of the parsed contents of a set of files. The idea is to be
|
|
||||||
// able to reuse the results of parsing between builds and make subsequent
|
|
||||||
// builds faster by avoiding redundant parsing work. This only works if:
|
|
||||||
//
|
|
||||||
// * The AST information in the cache must be considered immutable. There is
|
|
||||||
// no way to enforce this in Go, but please be disciplined about this. The
|
|
||||||
// ASTs are shared in between builds. Any information that must be mutated
|
|
||||||
// in the AST during a build must be done on a shallow clone of the data if
|
|
||||||
// the mutation happens after parsing (i.e. a clone that clones everything
|
|
||||||
// that will be mutated and shares only the parts that won't be mutated).
|
|
||||||
//
|
|
||||||
// * The information in the cache must not depend at all on the contents of
|
|
||||||
// any file other than the file being cached. Invalidating an entry in the
|
|
||||||
// cache does not also invalidate any entries that depend on that file, so
|
|
||||||
// caching information that depends on other files can result in incorrect
|
|
||||||
// results due to reusing stale data. For example, do not "bake in" some
|
|
||||||
// value imported from another file.
|
|
||||||
//
|
|
||||||
// * Cached ASTs must only be reused if the parsing options are identical
|
|
||||||
// between builds. For example, it would be bad if the AST parser depended
|
|
||||||
// on options inherited from a nearby "package.json" file but those options
|
|
||||||
// were not part of the cache key. Then the cached AST could incorrectly be
|
|
||||||
// reused even if the contents of that "package.json" file have changed.
|
|
||||||
//
|
|
||||||
type CacheSet struct {
|
|
||||||
SourceIndexCache SourceIndexCache
|
|
||||||
FSCache FSCache
|
|
||||||
CSSCache CSSCache
|
|
||||||
JSONCache JSONCache
|
|
||||||
JSCache JSCache
|
|
||||||
}
|
|
||||||
|
|
||||||
func MakeCacheSet() *CacheSet {
|
|
||||||
return &CacheSet{
|
|
||||||
SourceIndexCache: SourceIndexCache{
|
|
||||||
entries: make(map[sourceIndexKey]uint32),
|
|
||||||
nextSourceIndex: runtime.SourceIndex + 1,
|
|
||||||
},
|
|
||||||
FSCache: FSCache{
|
|
||||||
entries: make(map[string]*fsEntry),
|
|
||||||
},
|
|
||||||
CSSCache: CSSCache{
|
|
||||||
entries: make(map[logger.Path]*cssCacheEntry),
|
|
||||||
},
|
|
||||||
JSONCache: JSONCache{
|
|
||||||
entries: make(map[logger.Path]*jsonCacheEntry),
|
|
||||||
},
|
|
||||||
JSCache: JSCache{
|
|
||||||
entries: make(map[logger.Path]*jsCacheEntry),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type SourceIndexCache struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
entries map[sourceIndexKey]uint32
|
|
||||||
nextSourceIndex uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
type SourceIndexKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
SourceIndexNormal SourceIndexKind = iota
|
|
||||||
SourceIndexJSStubForCSS
|
|
||||||
)
|
|
||||||
|
|
||||||
type sourceIndexKey struct {
|
|
||||||
path logger.Path
|
|
||||||
kind SourceIndexKind
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *SourceIndexCache) LenHint() uint32 {
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
|
|
||||||
// Add some extra room at the end for a new file or two without reallocating
|
|
||||||
const someExtraRoom = 16
|
|
||||||
return c.nextSourceIndex + someExtraRoom
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *SourceIndexCache) Get(path logger.Path, kind SourceIndexKind) uint32 {
|
|
||||||
key := sourceIndexKey{path: path, kind: kind}
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
if sourceIndex, ok := c.entries[key]; ok {
|
|
||||||
return sourceIndex
|
|
||||||
}
|
|
||||||
sourceIndex := c.nextSourceIndex
|
|
||||||
c.nextSourceIndex++
|
|
||||||
c.entries[key] = sourceIndex
|
|
||||||
return sourceIndex
|
|
||||||
}
|
|
190
vendor/github.com/evanw/esbuild/internal/cache/cache_ast.go
generated
vendored
190
vendor/github.com/evanw/esbuild/internal/cache/cache_ast.go
generated
vendored
@ -1,190 +0,0 @@
|
|||||||
package cache
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_parser"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_parser"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
// This cache intends to avoid unnecessarily re-parsing files in subsequent
|
|
||||||
// builds. For a given path, parsing can be avoided if the contents of the file
|
|
||||||
// and the options for the parser are the same as last time. Even if the
|
|
||||||
// contents of the file are the same, the options for the parser may have
|
|
||||||
// changed if they depend on some other file ("package.json" for example).
|
|
||||||
//
|
|
||||||
// This cache checks if the file contents have changed even though we have
|
|
||||||
// the ability to detect if a file has changed on the file system by reading
|
|
||||||
// its metadata. First of all, if the file contents are cached then they should
|
|
||||||
// be the same pointer, which makes the comparison trivial. Also we want to
|
|
||||||
// cache the AST for plugins in the common case that the plugin output stays
|
|
||||||
// the same.
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// CSS
|
|
||||||
|
|
||||||
type CSSCache struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
entries map[logger.Path]*cssCacheEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
type cssCacheEntry struct {
|
|
||||||
source logger.Source
|
|
||||||
options css_parser.Options
|
|
||||||
ast css_ast.AST
|
|
||||||
msgs []logger.Msg
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *CSSCache) Parse(log logger.Log, source logger.Source, options css_parser.Options) css_ast.AST {
|
|
||||||
// Check the cache
|
|
||||||
entry := func() *cssCacheEntry {
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
return c.entries[source.KeyPath]
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Cache hit
|
|
||||||
if entry != nil && entry.source == source && entry.options == options {
|
|
||||||
for _, msg := range entry.msgs {
|
|
||||||
log.AddMsg(msg)
|
|
||||||
}
|
|
||||||
return entry.ast
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cache miss
|
|
||||||
tempLog := logger.NewDeferLog(logger.DeferLogAll)
|
|
||||||
ast := css_parser.Parse(tempLog, source, options)
|
|
||||||
msgs := tempLog.Done()
|
|
||||||
for _, msg := range msgs {
|
|
||||||
log.AddMsg(msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the cache entry
|
|
||||||
entry = &cssCacheEntry{
|
|
||||||
source: source,
|
|
||||||
options: options,
|
|
||||||
ast: ast,
|
|
||||||
msgs: msgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save for next time
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
c.entries[source.KeyPath] = entry
|
|
||||||
return ast
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// JSON
|
|
||||||
|
|
||||||
type JSONCache struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
entries map[logger.Path]*jsonCacheEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
type jsonCacheEntry struct {
|
|
||||||
source logger.Source
|
|
||||||
options js_parser.JSONOptions
|
|
||||||
expr js_ast.Expr
|
|
||||||
ok bool
|
|
||||||
msgs []logger.Msg
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *JSONCache) Parse(log logger.Log, source logger.Source, options js_parser.JSONOptions) (js_ast.Expr, bool) {
|
|
||||||
// Check the cache
|
|
||||||
entry := func() *jsonCacheEntry {
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
return c.entries[source.KeyPath]
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Cache hit
|
|
||||||
if entry != nil && entry.source == source && entry.options == options {
|
|
||||||
for _, msg := range entry.msgs {
|
|
||||||
log.AddMsg(msg)
|
|
||||||
}
|
|
||||||
return entry.expr, entry.ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cache miss
|
|
||||||
tempLog := logger.NewDeferLog(logger.DeferLogAll)
|
|
||||||
expr, ok := js_parser.ParseJSON(tempLog, source, options)
|
|
||||||
msgs := tempLog.Done()
|
|
||||||
for _, msg := range msgs {
|
|
||||||
log.AddMsg(msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the cache entry
|
|
||||||
entry = &jsonCacheEntry{
|
|
||||||
source: source,
|
|
||||||
options: options,
|
|
||||||
expr: expr,
|
|
||||||
ok: ok,
|
|
||||||
msgs: msgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save for next time
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
c.entries[source.KeyPath] = entry
|
|
||||||
return expr, ok
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// JS
|
|
||||||
|
|
||||||
type JSCache struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
entries map[logger.Path]*jsCacheEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
type jsCacheEntry struct {
|
|
||||||
source logger.Source
|
|
||||||
options js_parser.Options
|
|
||||||
ast js_ast.AST
|
|
||||||
ok bool
|
|
||||||
msgs []logger.Msg
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *JSCache) Parse(log logger.Log, source logger.Source, options js_parser.Options) (js_ast.AST, bool) {
|
|
||||||
// Check the cache
|
|
||||||
entry := func() *jsCacheEntry {
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
return c.entries[source.KeyPath]
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Cache hit
|
|
||||||
if entry != nil && entry.source == source && entry.options.Equal(&options) {
|
|
||||||
for _, msg := range entry.msgs {
|
|
||||||
log.AddMsg(msg)
|
|
||||||
}
|
|
||||||
return entry.ast, entry.ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cache miss
|
|
||||||
tempLog := logger.NewDeferLog(logger.DeferLogAll)
|
|
||||||
ast, ok := js_parser.Parse(tempLog, source, options)
|
|
||||||
msgs := tempLog.Done()
|
|
||||||
for _, msg := range msgs {
|
|
||||||
log.AddMsg(msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the cache entry
|
|
||||||
entry = &jsCacheEntry{
|
|
||||||
source: source,
|
|
||||||
options: options,
|
|
||||||
ast: ast,
|
|
||||||
ok: ok,
|
|
||||||
msgs: msgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save for next time
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
c.entries[source.KeyPath] = entry
|
|
||||||
return ast, ok
|
|
||||||
}
|
|
52
vendor/github.com/evanw/esbuild/internal/cache/cache_fs.go
generated
vendored
52
vendor/github.com/evanw/esbuild/internal/cache/cache_fs.go
generated
vendored
@ -1,52 +0,0 @@
|
|||||||
package cache
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/fs"
|
|
||||||
)
|
|
||||||
|
|
||||||
// This cache uses information from the "stat" syscall to try to avoid re-
|
|
||||||
// reading files from the file system during subsequent builds if the file
|
|
||||||
// hasn't changed. The assumption is reading the file metadata is faster than
|
|
||||||
// reading the file contents.
|
|
||||||
|
|
||||||
type FSCache struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
entries map[string]*fsEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
type fsEntry struct {
|
|
||||||
contents string
|
|
||||||
modKey fs.ModKey
|
|
||||||
isModKeyUsable bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *FSCache) ReadFile(fs fs.FS, path string) (contents string, canonicalError error, originalError error) {
|
|
||||||
entry := func() *fsEntry {
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
return c.entries[path]
|
|
||||||
}()
|
|
||||||
|
|
||||||
// If the file's modification key hasn't changed since it was cached, assume
|
|
||||||
// the contents of the file are also the same and skip reading the file.
|
|
||||||
modKey, modKeyErr := fs.ModKey(path)
|
|
||||||
if entry != nil && entry.isModKeyUsable && modKeyErr == nil && entry.modKey == modKey {
|
|
||||||
return entry.contents, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
contents, err, originalError := fs.ReadFile(path)
|
|
||||||
if err != nil {
|
|
||||||
return "", err, originalError
|
|
||||||
}
|
|
||||||
|
|
||||||
c.mutex.Lock()
|
|
||||||
defer c.mutex.Unlock()
|
|
||||||
c.entries[path] = &fsEntry{
|
|
||||||
contents: contents,
|
|
||||||
modKey: modKey,
|
|
||||||
isModKeyUsable: modKeyErr == nil,
|
|
||||||
}
|
|
||||||
return contents, nil, nil
|
|
||||||
}
|
|
45
vendor/github.com/evanw/esbuild/internal/compat/compat.go
generated
vendored
45
vendor/github.com/evanw/esbuild/internal/compat/compat.go
generated
vendored
@ -1,45 +0,0 @@
|
|||||||
package compat
|
|
||||||
|
|
||||||
type v struct {
|
|
||||||
major uint16
|
|
||||||
minor uint8
|
|
||||||
patch uint8
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns <0 if "a < b"
|
|
||||||
// Returns 0 if "a == b"
|
|
||||||
// Returns >0 if "a > b"
|
|
||||||
func compareVersions(a v, b []int) int {
|
|
||||||
diff := int(a.major)
|
|
||||||
if len(b) > 0 {
|
|
||||||
diff -= b[0]
|
|
||||||
}
|
|
||||||
if diff == 0 {
|
|
||||||
diff = int(a.minor)
|
|
||||||
if len(b) > 1 {
|
|
||||||
diff -= b[1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if diff == 0 {
|
|
||||||
diff = int(a.patch)
|
|
||||||
if len(b) > 2 {
|
|
||||||
diff -= b[2]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return diff
|
|
||||||
}
|
|
||||||
|
|
||||||
// The start is inclusive and the end is exclusive
|
|
||||||
type versionRange struct {
|
|
||||||
start v
|
|
||||||
end v // Use 0.0.0 for "no end"
|
|
||||||
}
|
|
||||||
|
|
||||||
func isVersionSupported(ranges []versionRange, version []int) bool {
|
|
||||||
for _, r := range ranges {
|
|
||||||
if compareVersions(r.start, version) <= 0 && (r.end == (v{}) || compareVersions(r.end, version) > 0) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
72
vendor/github.com/evanw/esbuild/internal/compat/css_table.go
generated
vendored
72
vendor/github.com/evanw/esbuild/internal/compat/css_table.go
generated
vendored
@ -1,72 +0,0 @@
|
|||||||
package compat
|
|
||||||
|
|
||||||
type CSSFeature uint32
|
|
||||||
|
|
||||||
const (
|
|
||||||
HexRGBA CSSFeature = 1 << iota
|
|
||||||
|
|
||||||
RebeccaPurple
|
|
||||||
|
|
||||||
// This feature includes all of the following:
|
|
||||||
// - Allow floats in rgb() and rgba()
|
|
||||||
// - hsl() can accept alpha values
|
|
||||||
// - rgb() can accept alpha values
|
|
||||||
// - Space-separated functional color notations
|
|
||||||
Modern_RGB_HSL
|
|
||||||
|
|
||||||
InsetProperty
|
|
||||||
)
|
|
||||||
|
|
||||||
func (features CSSFeature) Has(feature CSSFeature) bool {
|
|
||||||
return (features & feature) != 0
|
|
||||||
}
|
|
||||||
|
|
||||||
var cssTable = map[CSSFeature]map[Engine][]versionRange{
|
|
||||||
// Data from: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value
|
|
||||||
HexRGBA: {
|
|
||||||
Chrome: {{start: v{62, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
Firefox: {{start: v{49, 0, 0}}},
|
|
||||||
IOS: {{start: v{9, 3, 0}}},
|
|
||||||
Safari: {{start: v{9, 1, 0}}},
|
|
||||||
},
|
|
||||||
RebeccaPurple: {
|
|
||||||
Chrome: {{start: v{38, 0, 0}}},
|
|
||||||
Edge: {{start: v{12, 0, 0}}},
|
|
||||||
Firefox: {{start: v{33, 0, 0}}},
|
|
||||||
IOS: {{start: v{8, 0, 0}}},
|
|
||||||
Safari: {{start: v{9, 0, 0}}},
|
|
||||||
},
|
|
||||||
Modern_RGB_HSL: {
|
|
||||||
Chrome: {{start: v{66, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
Firefox: {{start: v{52, 0, 0}}},
|
|
||||||
IOS: {{start: v{12, 2, 0}}},
|
|
||||||
Safari: {{start: v{12, 1, 0}}},
|
|
||||||
},
|
|
||||||
|
|
||||||
// Data from: https://developer.mozilla.org/en-US/docs/Web/CSS/inset
|
|
||||||
InsetProperty: {
|
|
||||||
Chrome: {{start: v{87, 0, 0}}},
|
|
||||||
Edge: {{start: v{87, 0, 0}}},
|
|
||||||
Firefox: {{start: v{66, 0, 0}}},
|
|
||||||
IOS: {{start: v{14, 5, 0}}},
|
|
||||||
Safari: {{start: v{14, 1, 0}}},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return all features that are not available in at least one environment
|
|
||||||
func UnsupportedCSSFeatures(constraints map[Engine][]int) (unsupported CSSFeature) {
|
|
||||||
for feature, engines := range cssTable {
|
|
||||||
for engine, version := range constraints {
|
|
||||||
if engine == ES || engine == Node {
|
|
||||||
// Specifying "--target=es2020" shouldn't affect CSS
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) {
|
|
||||||
unsupported |= feature
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
465
vendor/github.com/evanw/esbuild/internal/compat/js_table.go
generated
vendored
465
vendor/github.com/evanw/esbuild/internal/compat/js_table.go
generated
vendored
@ -1,465 +0,0 @@
|
|||||||
// This file was automatically generated by "compat-table.js"
|
|
||||||
|
|
||||||
package compat
|
|
||||||
|
|
||||||
type Engine uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
Chrome Engine = iota
|
|
||||||
Edge
|
|
||||||
ES
|
|
||||||
Firefox
|
|
||||||
IOS
|
|
||||||
Node
|
|
||||||
Safari
|
|
||||||
)
|
|
||||||
|
|
||||||
func (e Engine) String() string {
|
|
||||||
switch e {
|
|
||||||
case Chrome:
|
|
||||||
return "chrome"
|
|
||||||
case Edge:
|
|
||||||
return "edge"
|
|
||||||
case ES:
|
|
||||||
return "es"
|
|
||||||
case Firefox:
|
|
||||||
return "firefox"
|
|
||||||
case IOS:
|
|
||||||
return "ios"
|
|
||||||
case Node:
|
|
||||||
return "node"
|
|
||||||
case Safari:
|
|
||||||
return "safari"
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSFeature uint64
|
|
||||||
|
|
||||||
const (
|
|
||||||
ArbitraryModuleNamespaceNames JSFeature = 1 << iota
|
|
||||||
ArraySpread
|
|
||||||
Arrow
|
|
||||||
AsyncAwait
|
|
||||||
AsyncGenerator
|
|
||||||
BigInt
|
|
||||||
Class
|
|
||||||
ClassField
|
|
||||||
ClassPrivateAccessor
|
|
||||||
ClassPrivateBrandCheck
|
|
||||||
ClassPrivateField
|
|
||||||
ClassPrivateMethod
|
|
||||||
ClassPrivateStaticAccessor
|
|
||||||
ClassPrivateStaticField
|
|
||||||
ClassPrivateStaticMethod
|
|
||||||
ClassStaticBlocks
|
|
||||||
ClassStaticField
|
|
||||||
Const
|
|
||||||
DefaultArgument
|
|
||||||
Destructuring
|
|
||||||
DynamicImport
|
|
||||||
ExponentOperator
|
|
||||||
ExportStarAs
|
|
||||||
ForAwait
|
|
||||||
ForOf
|
|
||||||
Generator
|
|
||||||
Hashbang
|
|
||||||
ImportAssertions
|
|
||||||
ImportMeta
|
|
||||||
Let
|
|
||||||
LogicalAssignment
|
|
||||||
NestedRestBinding
|
|
||||||
NewTarget
|
|
||||||
NodeColonPrefixImport
|
|
||||||
NodeColonPrefixRequire
|
|
||||||
NullishCoalescing
|
|
||||||
ObjectAccessors
|
|
||||||
ObjectExtensions
|
|
||||||
ObjectRestSpread
|
|
||||||
OptionalCatchBinding
|
|
||||||
OptionalChain
|
|
||||||
RestArgument
|
|
||||||
TemplateLiteral
|
|
||||||
TopLevelAwait
|
|
||||||
UnicodeEscapes
|
|
||||||
)
|
|
||||||
|
|
||||||
func (features JSFeature) Has(feature JSFeature) bool {
|
|
||||||
return (features & feature) != 0
|
|
||||||
}
|
|
||||||
|
|
||||||
var jsTable = map[JSFeature]map[Engine][]versionRange{
|
|
||||||
ArbitraryModuleNamespaceNames: {
|
|
||||||
Chrome: {{start: v{90, 0, 0}}},
|
|
||||||
Firefox: {{start: v{87, 0, 0}}},
|
|
||||||
Node: {{start: v{16, 0, 0}}},
|
|
||||||
},
|
|
||||||
ArraySpread: {
|
|
||||||
Chrome: {{start: v{46, 0, 0}}},
|
|
||||||
Edge: {{start: v{13, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{36, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{5, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
Arrow: {
|
|
||||||
Chrome: {{start: v{49, 0, 0}}},
|
|
||||||
Edge: {{start: v{13, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{45, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
AsyncAwait: {
|
|
||||||
Chrome: {{start: v{55, 0, 0}}},
|
|
||||||
Edge: {{start: v{15, 0, 0}}},
|
|
||||||
ES: {{start: v{2017, 0, 0}}},
|
|
||||||
Firefox: {{start: v{52, 0, 0}}},
|
|
||||||
IOS: {{start: v{11, 0, 0}}},
|
|
||||||
Node: {{start: v{7, 6, 0}}},
|
|
||||||
Safari: {{start: v{11, 0, 0}}},
|
|
||||||
},
|
|
||||||
AsyncGenerator: {
|
|
||||||
Chrome: {{start: v{63, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
ES: {{start: v{2018, 0, 0}}},
|
|
||||||
Firefox: {{start: v{57, 0, 0}}},
|
|
||||||
IOS: {{start: v{12, 0, 0}}},
|
|
||||||
Node: {{start: v{10, 0, 0}}},
|
|
||||||
Safari: {{start: v{12, 0, 0}}},
|
|
||||||
},
|
|
||||||
BigInt: {
|
|
||||||
Chrome: {{start: v{67, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
ES: {{start: v{2020, 0, 0}}},
|
|
||||||
Firefox: {{start: v{68, 0, 0}}},
|
|
||||||
IOS: {{start: v{14, 0, 0}}},
|
|
||||||
Node: {{start: v{10, 4, 0}}},
|
|
||||||
Safari: {{start: v{14, 0, 0}}},
|
|
||||||
},
|
|
||||||
Class: {
|
|
||||||
Chrome: {{start: v{49, 0, 0}}},
|
|
||||||
Edge: {{start: v{13, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{45, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
ClassField: {
|
|
||||||
Chrome: {{start: v{73, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
Firefox: {{start: v{69, 0, 0}}},
|
|
||||||
IOS: {{start: v{14, 0, 0}}},
|
|
||||||
Node: {{start: v{12, 0, 0}}},
|
|
||||||
Safari: {{start: v{14, 0, 0}}},
|
|
||||||
},
|
|
||||||
ClassPrivateAccessor: {
|
|
||||||
Chrome: {{start: v{84, 0, 0}}},
|
|
||||||
Edge: {{start: v{84, 0, 0}}},
|
|
||||||
Firefox: {{start: v{90, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{14, 6, 0}}},
|
|
||||||
Safari: {{start: v{15, 0, 0}}},
|
|
||||||
},
|
|
||||||
ClassPrivateBrandCheck: {
|
|
||||||
Chrome: {{start: v{91, 0, 0}}},
|
|
||||||
Edge: {{start: v{91, 0, 0}}},
|
|
||||||
Firefox: {{start: v{90, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{16, 9, 0}}},
|
|
||||||
Safari: {{start: v{15, 0, 0}}},
|
|
||||||
},
|
|
||||||
ClassPrivateField: {
|
|
||||||
Chrome: {{start: v{84, 0, 0}}},
|
|
||||||
Edge: {{start: v{84, 0, 0}}},
|
|
||||||
Firefox: {{start: v{90, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{14, 6, 0}}},
|
|
||||||
Safari: {{start: v{14, 1, 0}}},
|
|
||||||
},
|
|
||||||
ClassPrivateMethod: {
|
|
||||||
Chrome: {{start: v{84, 0, 0}}},
|
|
||||||
Edge: {{start: v{84, 0, 0}}},
|
|
||||||
Firefox: {{start: v{90, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{14, 6, 0}}},
|
|
||||||
Safari: {{start: v{15, 0, 0}}},
|
|
||||||
},
|
|
||||||
ClassPrivateStaticAccessor: {
|
|
||||||
Chrome: {{start: v{84, 0, 0}}},
|
|
||||||
Edge: {{start: v{84, 0, 0}}},
|
|
||||||
Firefox: {{start: v{90, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{14, 6, 0}}},
|
|
||||||
Safari: {{start: v{15, 0, 0}}},
|
|
||||||
},
|
|
||||||
ClassPrivateStaticField: {
|
|
||||||
Chrome: {{start: v{74, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
Firefox: {{start: v{90, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{12, 0, 0}}},
|
|
||||||
Safari: {{start: v{14, 1, 0}}},
|
|
||||||
},
|
|
||||||
ClassPrivateStaticMethod: {
|
|
||||||
Chrome: {{start: v{84, 0, 0}}},
|
|
||||||
Edge: {{start: v{84, 0, 0}}},
|
|
||||||
Firefox: {{start: v{90, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{14, 6, 0}}},
|
|
||||||
Safari: {{start: v{15, 0, 0}}},
|
|
||||||
},
|
|
||||||
ClassStaticBlocks: {
|
|
||||||
Chrome: {{start: v{91, 0, 0}}},
|
|
||||||
Node: {{start: v{16, 11, 0}}},
|
|
||||||
},
|
|
||||||
ClassStaticField: {
|
|
||||||
Chrome: {{start: v{73, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
Firefox: {{start: v{75, 0, 0}}},
|
|
||||||
IOS: {{start: v{15, 0, 0}}},
|
|
||||||
Node: {{start: v{12, 0, 0}}},
|
|
||||||
Safari: {{start: v{14, 1, 0}}},
|
|
||||||
},
|
|
||||||
Const: {
|
|
||||||
Chrome: {{start: v{49, 0, 0}}},
|
|
||||||
Edge: {{start: v{14, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{51, 0, 0}}},
|
|
||||||
IOS: {{start: v{11, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{11, 0, 0}}},
|
|
||||||
},
|
|
||||||
DefaultArgument: {
|
|
||||||
Chrome: {{start: v{49, 0, 0}}},
|
|
||||||
Edge: {{start: v{14, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{53, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
Destructuring: {
|
|
||||||
Chrome: {{start: v{51, 0, 0}}},
|
|
||||||
Edge: {{start: v{18, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{53, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 5, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
DynamicImport: {
|
|
||||||
Chrome: {{start: v{63, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{67, 0, 0}}},
|
|
||||||
IOS: {{start: v{11, 0, 0}}},
|
|
||||||
Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{13, 2, 0}}},
|
|
||||||
Safari: {{start: v{11, 1, 0}}},
|
|
||||||
},
|
|
||||||
ExponentOperator: {
|
|
||||||
Chrome: {{start: v{52, 0, 0}}},
|
|
||||||
Edge: {{start: v{14, 0, 0}}},
|
|
||||||
ES: {{start: v{2016, 0, 0}}},
|
|
||||||
Firefox: {{start: v{52, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 3, 0}}},
|
|
||||||
Node: {{start: v{7, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 1, 0}}},
|
|
||||||
},
|
|
||||||
ExportStarAs: {
|
|
||||||
Chrome: {{start: v{72, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
ES: {{start: v{2020, 0, 0}}},
|
|
||||||
Firefox: {{start: v{80, 0, 0}}},
|
|
||||||
Node: {{start: v{12, 0, 0}}},
|
|
||||||
},
|
|
||||||
ForAwait: {
|
|
||||||
Chrome: {{start: v{63, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
ES: {{start: v{2018, 0, 0}}},
|
|
||||||
Firefox: {{start: v{57, 0, 0}}},
|
|
||||||
IOS: {{start: v{12, 0, 0}}},
|
|
||||||
Node: {{start: v{10, 0, 0}}},
|
|
||||||
Safari: {{start: v{12, 0, 0}}},
|
|
||||||
},
|
|
||||||
ForOf: {
|
|
||||||
Chrome: {{start: v{51, 0, 0}}},
|
|
||||||
Edge: {{start: v{15, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{53, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 5, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
Generator: {
|
|
||||||
Chrome: {{start: v{50, 0, 0}}},
|
|
||||||
Edge: {{start: v{13, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{53, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
Hashbang: {
|
|
||||||
Chrome: {{start: v{74, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
Firefox: {{start: v{67, 0, 0}}},
|
|
||||||
IOS: {{start: v{13, 4, 0}}},
|
|
||||||
Node: {{start: v{12, 0, 0}}},
|
|
||||||
Safari: {{start: v{13, 1, 0}}},
|
|
||||||
},
|
|
||||||
ImportAssertions: {
|
|
||||||
Chrome: {{start: v{91, 0, 0}}},
|
|
||||||
},
|
|
||||||
ImportMeta: {
|
|
||||||
Chrome: {{start: v{64, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
ES: {{start: v{2020, 0, 0}}},
|
|
||||||
Firefox: {{start: v{62, 0, 0}}},
|
|
||||||
IOS: {{start: v{12, 0, 0}}},
|
|
||||||
Node: {{start: v{10, 4, 0}}},
|
|
||||||
Safari: {{start: v{11, 1, 0}}},
|
|
||||||
},
|
|
||||||
Let: {
|
|
||||||
Chrome: {{start: v{49, 0, 0}}},
|
|
||||||
Edge: {{start: v{14, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{51, 0, 0}}},
|
|
||||||
IOS: {{start: v{11, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{11, 0, 0}}},
|
|
||||||
},
|
|
||||||
LogicalAssignment: {
|
|
||||||
Chrome: {{start: v{85, 0, 0}}},
|
|
||||||
Edge: {{start: v{85, 0, 0}}},
|
|
||||||
ES: {{start: v{2021, 0, 0}}},
|
|
||||||
Firefox: {{start: v{79, 0, 0}}},
|
|
||||||
IOS: {{start: v{14, 0, 0}}},
|
|
||||||
Node: {{start: v{15, 0, 0}}},
|
|
||||||
Safari: {{start: v{14, 0, 0}}},
|
|
||||||
},
|
|
||||||
NestedRestBinding: {
|
|
||||||
Chrome: {{start: v{49, 0, 0}}},
|
|
||||||
Edge: {{start: v{14, 0, 0}}},
|
|
||||||
ES: {{start: v{2016, 0, 0}}},
|
|
||||||
Firefox: {{start: v{47, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 3, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 1, 0}}},
|
|
||||||
},
|
|
||||||
NewTarget: {
|
|
||||||
Chrome: {{start: v{46, 0, 0}}},
|
|
||||||
Edge: {{start: v{14, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{41, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{5, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
NodeColonPrefixImport: {
|
|
||||||
Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{14, 13, 1}}},
|
|
||||||
},
|
|
||||||
NodeColonPrefixRequire: {
|
|
||||||
Node: {{start: v{14, 18, 0}, end: v{15, 0, 0}}, {start: v{16, 0, 0}}},
|
|
||||||
},
|
|
||||||
NullishCoalescing: {
|
|
||||||
Chrome: {{start: v{80, 0, 0}}},
|
|
||||||
Edge: {{start: v{80, 0, 0}}},
|
|
||||||
ES: {{start: v{2020, 0, 0}}},
|
|
||||||
Firefox: {{start: v{72, 0, 0}}},
|
|
||||||
IOS: {{start: v{13, 4, 0}}},
|
|
||||||
Node: {{start: v{14, 0, 0}}},
|
|
||||||
Safari: {{start: v{13, 1, 0}}},
|
|
||||||
},
|
|
||||||
ObjectAccessors: {
|
|
||||||
Chrome: {{start: v{5, 0, 0}}},
|
|
||||||
Edge: {{start: v{12, 0, 0}}},
|
|
||||||
ES: {{start: v{5, 0, 0}}},
|
|
||||||
Firefox: {{start: v{2, 0, 0}}},
|
|
||||||
IOS: {{start: v{6, 0, 0}}},
|
|
||||||
Node: {{start: v{0, 10, 0}}},
|
|
||||||
Safari: {{start: v{3, 1, 0}}},
|
|
||||||
},
|
|
||||||
ObjectExtensions: {
|
|
||||||
Chrome: {{start: v{44, 0, 0}}},
|
|
||||||
Edge: {{start: v{12, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{34, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{4, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
ObjectRestSpread: {
|
|
||||||
ES: {{start: v{2018, 0, 0}}},
|
|
||||||
Firefox: {{start: v{55, 0, 0}}},
|
|
||||||
IOS: {{start: v{11, 3, 0}}},
|
|
||||||
Safari: {{start: v{11, 1, 0}}},
|
|
||||||
},
|
|
||||||
OptionalCatchBinding: {
|
|
||||||
Chrome: {{start: v{66, 0, 0}}},
|
|
||||||
Edge: {{start: v{79, 0, 0}}},
|
|
||||||
ES: {{start: v{2019, 0, 0}}},
|
|
||||||
Firefox: {{start: v{58, 0, 0}}},
|
|
||||||
IOS: {{start: v{11, 3, 0}}},
|
|
||||||
Node: {{start: v{10, 0, 0}}},
|
|
||||||
Safari: {{start: v{11, 1, 0}}},
|
|
||||||
},
|
|
||||||
OptionalChain: {
|
|
||||||
Chrome: {{start: v{91, 0, 0}}},
|
|
||||||
Edge: {{start: v{91, 0, 0}}},
|
|
||||||
ES: {{start: v{2020, 0, 0}}},
|
|
||||||
Firefox: {{start: v{74, 0, 0}}},
|
|
||||||
IOS: {{start: v{13, 4, 0}}},
|
|
||||||
Node: {{start: v{16, 9, 0}}},
|
|
||||||
Safari: {{start: v{13, 1, 0}}},
|
|
||||||
},
|
|
||||||
RestArgument: {
|
|
||||||
Chrome: {{start: v{47, 0, 0}}},
|
|
||||||
Edge: {{start: v{12, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{43, 0, 0}}},
|
|
||||||
IOS: {{start: v{10, 0, 0}}},
|
|
||||||
Node: {{start: v{6, 0, 0}}},
|
|
||||||
Safari: {{start: v{10, 0, 0}}},
|
|
||||||
},
|
|
||||||
TemplateLiteral: {
|
|
||||||
Chrome: {{start: v{41, 0, 0}}},
|
|
||||||
Edge: {{start: v{13, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{34, 0, 0}}},
|
|
||||||
IOS: {{start: v{9, 0, 0}}},
|
|
||||||
Node: {{start: v{4, 0, 0}}},
|
|
||||||
Safari: {{start: v{9, 0, 0}}},
|
|
||||||
},
|
|
||||||
TopLevelAwait: {
|
|
||||||
Chrome: {{start: v{89, 0, 0}}},
|
|
||||||
Edge: {{start: v{89, 0, 0}}},
|
|
||||||
Firefox: {{start: v{89, 0, 0}}},
|
|
||||||
Node: {{start: v{14, 8, 0}}},
|
|
||||||
Safari: {{start: v{15, 0, 0}}},
|
|
||||||
},
|
|
||||||
UnicodeEscapes: {
|
|
||||||
Chrome: {{start: v{44, 0, 0}}},
|
|
||||||
Edge: {{start: v{12, 0, 0}}},
|
|
||||||
ES: {{start: v{2015, 0, 0}}},
|
|
||||||
Firefox: {{start: v{53, 0, 0}}},
|
|
||||||
IOS: {{start: v{9, 0, 0}}},
|
|
||||||
Node: {{start: v{4, 0, 0}}},
|
|
||||||
Safari: {{start: v{9, 0, 0}}},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return all features that are not available in at least one environment
|
|
||||||
func UnsupportedJSFeatures(constraints map[Engine][]int) (unsupported JSFeature) {
|
|
||||||
for feature, engines := range jsTable {
|
|
||||||
for engine, version := range constraints {
|
|
||||||
if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) {
|
|
||||||
unsupported |= feature
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
583
vendor/github.com/evanw/esbuild/internal/config/config.go
generated
vendored
583
vendor/github.com/evanw/esbuild/internal/config/config.go
generated
vendored
@ -1,583 +0,0 @@
|
|||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/compat"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type JSXOptions struct {
|
|
||||||
Factory JSXExpr
|
|
||||||
Fragment JSXExpr
|
|
||||||
Parse bool
|
|
||||||
Preserve bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSXExpr struct {
|
|
||||||
Parts []string
|
|
||||||
Constant js_ast.E
|
|
||||||
}
|
|
||||||
|
|
||||||
type TSOptions struct {
|
|
||||||
Parse bool
|
|
||||||
NoAmbiguousLessThan bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type Platform uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
PlatformBrowser Platform = iota
|
|
||||||
PlatformNode
|
|
||||||
PlatformNeutral
|
|
||||||
)
|
|
||||||
|
|
||||||
type StrictOptions struct {
|
|
||||||
// Loose: "class Foo { foo = 1 }" => "class Foo { constructor() { this.foo = 1; } }"
|
|
||||||
// Strict: "class Foo { foo = 1 }" => "class Foo { constructor() { __publicField(this, 'foo', 1); } }"
|
|
||||||
//
|
|
||||||
// The disadvantage of strictness here is code bloat and performance. The
|
|
||||||
// advantage is following the class field specification accurately. For
|
|
||||||
// example, loose mode will incorrectly trigger setter methods while strict
|
|
||||||
// mode won't.
|
|
||||||
ClassFields bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type SourceMap uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
SourceMapNone SourceMap = iota
|
|
||||||
SourceMapInline
|
|
||||||
SourceMapLinkedWithComment
|
|
||||||
SourceMapExternalWithoutComment
|
|
||||||
SourceMapInlineAndExternal
|
|
||||||
)
|
|
||||||
|
|
||||||
type LegalComments uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
LegalCommentsInline LegalComments = iota
|
|
||||||
LegalCommentsNone
|
|
||||||
LegalCommentsEndOfFile
|
|
||||||
LegalCommentsLinkedWithComment
|
|
||||||
LegalCommentsExternalWithoutComment
|
|
||||||
)
|
|
||||||
|
|
||||||
func (lc LegalComments) HasExternalFile() bool {
|
|
||||||
return lc == LegalCommentsLinkedWithComment || lc == LegalCommentsExternalWithoutComment
|
|
||||||
}
|
|
||||||
|
|
||||||
type Loader int
|
|
||||||
|
|
||||||
const (
|
|
||||||
LoaderNone Loader = iota
|
|
||||||
LoaderJS
|
|
||||||
LoaderJSX
|
|
||||||
LoaderTS
|
|
||||||
LoaderTSNoAmbiguousLessThan // Used with ".mts" and ".cts"
|
|
||||||
LoaderTSX
|
|
||||||
LoaderJSON
|
|
||||||
LoaderText
|
|
||||||
LoaderBase64
|
|
||||||
LoaderDataURL
|
|
||||||
LoaderFile
|
|
||||||
LoaderBinary
|
|
||||||
LoaderCSS
|
|
||||||
LoaderDefault
|
|
||||||
)
|
|
||||||
|
|
||||||
func (loader Loader) IsTypeScript() bool {
|
|
||||||
switch loader {
|
|
||||||
case LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX:
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (loader Loader) CanHaveSourceMap() bool {
|
|
||||||
switch loader {
|
|
||||||
case LoaderJS, LoaderJSX, LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX, LoaderCSS:
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Format uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
// This is used when not bundling. It means to preserve whatever form the
|
|
||||||
// import or export was originally in. ES6 syntax stays ES6 syntax and
|
|
||||||
// CommonJS syntax stays CommonJS syntax.
|
|
||||||
FormatPreserve Format = iota
|
|
||||||
|
|
||||||
// IIFE stands for immediately-invoked function expression. That looks like
|
|
||||||
// this:
|
|
||||||
//
|
|
||||||
// (() => {
|
|
||||||
// ... bundled code ...
|
|
||||||
// })();
|
|
||||||
//
|
|
||||||
// If the optional GlobalName is configured, then we'll write out this:
|
|
||||||
//
|
|
||||||
// let globalName = (() => {
|
|
||||||
// ... bundled code ...
|
|
||||||
// return exports;
|
|
||||||
// })();
|
|
||||||
//
|
|
||||||
FormatIIFE
|
|
||||||
|
|
||||||
// The CommonJS format looks like this:
|
|
||||||
//
|
|
||||||
// ... bundled code ...
|
|
||||||
// module.exports = exports;
|
|
||||||
//
|
|
||||||
FormatCommonJS
|
|
||||||
|
|
||||||
// The ES module format looks like this:
|
|
||||||
//
|
|
||||||
// ... bundled code ...
|
|
||||||
// export {...};
|
|
||||||
//
|
|
||||||
FormatESModule
|
|
||||||
)
|
|
||||||
|
|
||||||
func (f Format) KeepES6ImportExportSyntax() bool {
|
|
||||||
return f == FormatPreserve || f == FormatESModule
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f Format) String() string {
|
|
||||||
switch f {
|
|
||||||
case FormatIIFE:
|
|
||||||
return "iife"
|
|
||||||
case FormatCommonJS:
|
|
||||||
return "cjs"
|
|
||||||
case FormatESModule:
|
|
||||||
return "esm"
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
type StdinInfo struct {
|
|
||||||
Loader Loader
|
|
||||||
Contents string
|
|
||||||
SourceFile string
|
|
||||||
AbsResolveDir string
|
|
||||||
}
|
|
||||||
|
|
||||||
type WildcardPattern struct {
|
|
||||||
Prefix string
|
|
||||||
Suffix string
|
|
||||||
}
|
|
||||||
|
|
||||||
type ExternalModules struct {
|
|
||||||
NodeModules map[string]bool
|
|
||||||
AbsPaths map[string]bool
|
|
||||||
Patterns []WildcardPattern
|
|
||||||
}
|
|
||||||
|
|
||||||
type Mode uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
ModePassThrough Mode = iota
|
|
||||||
ModeConvertFormat
|
|
||||||
ModeBundle
|
|
||||||
)
|
|
||||||
|
|
||||||
type MaybeBool uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
Unspecified MaybeBool = iota
|
|
||||||
True
|
|
||||||
False
|
|
||||||
)
|
|
||||||
|
|
||||||
type Options struct {
|
|
||||||
Mode Mode
|
|
||||||
ModuleType js_ast.ModuleType
|
|
||||||
PreserveSymlinks bool
|
|
||||||
RemoveWhitespace bool
|
|
||||||
MinifyIdentifiers bool
|
|
||||||
MangleSyntax bool
|
|
||||||
ProfilerNames bool
|
|
||||||
CodeSplitting bool
|
|
||||||
WatchMode bool
|
|
||||||
AllowOverwrite bool
|
|
||||||
LegalComments LegalComments
|
|
||||||
|
|
||||||
// If true, make sure to generate a single file that can be written to stdout
|
|
||||||
WriteToStdout bool
|
|
||||||
|
|
||||||
OmitRuntimeForTests bool
|
|
||||||
UnusedImportsTS UnusedImportsTS
|
|
||||||
UseDefineForClassFields MaybeBool
|
|
||||||
ASCIIOnly bool
|
|
||||||
KeepNames bool
|
|
||||||
IgnoreDCEAnnotations bool
|
|
||||||
TreeShaking bool
|
|
||||||
|
|
||||||
Defines *ProcessedDefines
|
|
||||||
TS TSOptions
|
|
||||||
JSX JSXOptions
|
|
||||||
Platform Platform
|
|
||||||
|
|
||||||
TargetFromAPI TargetFromAPI
|
|
||||||
UnsupportedJSFeatures compat.JSFeature
|
|
||||||
UnsupportedCSSFeatures compat.CSSFeature
|
|
||||||
TSTarget *TSTarget
|
|
||||||
|
|
||||||
// This is the original information that was used to generate the
|
|
||||||
// unsupported feature sets above. It's used for error messages.
|
|
||||||
OriginalTargetEnv string
|
|
||||||
|
|
||||||
ExtensionOrder []string
|
|
||||||
MainFields []string
|
|
||||||
Conditions []string
|
|
||||||
AbsNodePaths []string // The "NODE_PATH" variable from Node.js
|
|
||||||
ExternalModules ExternalModules
|
|
||||||
|
|
||||||
AbsOutputFile string
|
|
||||||
AbsOutputDir string
|
|
||||||
AbsOutputBase string
|
|
||||||
OutputExtensionJS string
|
|
||||||
OutputExtensionCSS string
|
|
||||||
GlobalName []string
|
|
||||||
TsConfigOverride string
|
|
||||||
ExtensionToLoader map[string]Loader
|
|
||||||
OutputFormat Format
|
|
||||||
PublicPath string
|
|
||||||
InjectAbsPaths []string
|
|
||||||
InjectedDefines []InjectedDefine
|
|
||||||
InjectedFiles []InjectedFile
|
|
||||||
|
|
||||||
JSBanner string
|
|
||||||
JSFooter string
|
|
||||||
CSSBanner string
|
|
||||||
CSSFooter string
|
|
||||||
|
|
||||||
EntryPathTemplate []PathTemplate
|
|
||||||
ChunkPathTemplate []PathTemplate
|
|
||||||
AssetPathTemplate []PathTemplate
|
|
||||||
|
|
||||||
Plugins []Plugin
|
|
||||||
|
|
||||||
NeedsMetafile bool
|
|
||||||
|
|
||||||
SourceMap SourceMap
|
|
||||||
SourceRoot string
|
|
||||||
ExcludeSourcesContent bool
|
|
||||||
|
|
||||||
Stdin *StdinInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
type TargetFromAPI uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
// In this state, the "target" field in "tsconfig.json" is respected
|
|
||||||
TargetWasUnconfigured TargetFromAPI = iota
|
|
||||||
|
|
||||||
// In this state, the "target" field in "tsconfig.json" is overridden
|
|
||||||
TargetWasConfigured
|
|
||||||
|
|
||||||
// In this state, "useDefineForClassFields" is true unless overridden
|
|
||||||
TargetWasConfiguredIncludingESNext
|
|
||||||
)
|
|
||||||
|
|
||||||
type UnusedImportsTS uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
// "import { unused } from 'foo'" => "" (TypeScript's default behavior)
|
|
||||||
UnusedImportsRemoveStmt UnusedImportsTS = iota
|
|
||||||
|
|
||||||
// "import { unused } from 'foo'" => "import 'foo'" ("importsNotUsedAsValues" != "remove")
|
|
||||||
UnusedImportsKeepStmtRemoveValues
|
|
||||||
|
|
||||||
// "import { unused } from 'foo'" => "import { unused } from 'foo'" ("preserveValueImports" == true)
|
|
||||||
UnusedImportsKeepValues
|
|
||||||
)
|
|
||||||
|
|
||||||
func UnusedImportsFromTsconfigValues(preserveImportsNotUsedAsValues bool, preserveValueImports bool) UnusedImportsTS {
|
|
||||||
if preserveValueImports {
|
|
||||||
return UnusedImportsKeepValues
|
|
||||||
}
|
|
||||||
if preserveImportsNotUsedAsValues {
|
|
||||||
return UnusedImportsKeepStmtRemoveValues
|
|
||||||
}
|
|
||||||
return UnusedImportsRemoveStmt
|
|
||||||
}
|
|
||||||
|
|
||||||
type TSTarget struct {
|
|
||||||
Source logger.Source
|
|
||||||
Range logger.Range
|
|
||||||
Target string
|
|
||||||
UnsupportedJSFeatures compat.JSFeature
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathPlaceholder uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
NoPlaceholder PathPlaceholder = iota
|
|
||||||
|
|
||||||
// The relative path from the original parent directory to the configured
|
|
||||||
// "outbase" directory, or to the lowest common ancestor directory
|
|
||||||
DirPlaceholder
|
|
||||||
|
|
||||||
// The original name of the file, or the manual chunk name, or the name of
|
|
||||||
// the type of output file ("entry" or "chunk" or "asset")
|
|
||||||
NamePlaceholder
|
|
||||||
|
|
||||||
// A hash of the contents of this file, and the contents and output paths of
|
|
||||||
// all dependencies (except for their hash placeholders)
|
|
||||||
HashPlaceholder
|
|
||||||
|
|
||||||
// The original extension of the file, or the name of the output file
|
|
||||||
// (e.g. "css", "svg", "png")
|
|
||||||
ExtPlaceholder
|
|
||||||
)
|
|
||||||
|
|
||||||
type PathTemplate struct {
|
|
||||||
Data string
|
|
||||||
Placeholder PathPlaceholder
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathPlaceholders struct {
|
|
||||||
Dir *string
|
|
||||||
Name *string
|
|
||||||
Hash *string
|
|
||||||
Ext *string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (placeholders PathPlaceholders) Get(placeholder PathPlaceholder) *string {
|
|
||||||
switch placeholder {
|
|
||||||
case DirPlaceholder:
|
|
||||||
return placeholders.Dir
|
|
||||||
case NamePlaceholder:
|
|
||||||
return placeholders.Name
|
|
||||||
case HashPlaceholder:
|
|
||||||
return placeholders.Hash
|
|
||||||
case ExtPlaceholder:
|
|
||||||
return placeholders.Ext
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TemplateToString(template []PathTemplate) string {
|
|
||||||
if len(template) == 1 && template[0].Placeholder == NoPlaceholder {
|
|
||||||
// Avoid allocations in this case
|
|
||||||
return template[0].Data
|
|
||||||
}
|
|
||||||
sb := strings.Builder{}
|
|
||||||
for _, part := range template {
|
|
||||||
sb.WriteString(part.Data)
|
|
||||||
switch part.Placeholder {
|
|
||||||
case DirPlaceholder:
|
|
||||||
sb.WriteString("[dir]")
|
|
||||||
case NamePlaceholder:
|
|
||||||
sb.WriteString("[name]")
|
|
||||||
case HashPlaceholder:
|
|
||||||
sb.WriteString("[hash]")
|
|
||||||
case ExtPlaceholder:
|
|
||||||
sb.WriteString("[ext]")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sb.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func HasPlaceholder(template []PathTemplate, placeholder PathPlaceholder) bool {
|
|
||||||
for _, part := range template {
|
|
||||||
if part.Placeholder == placeholder {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func SubstituteTemplate(template []PathTemplate, placeholders PathPlaceholders) []PathTemplate {
|
|
||||||
// Don't allocate if no substitution is possible and the template is already minimal
|
|
||||||
shouldSubstitute := false
|
|
||||||
for i, part := range template {
|
|
||||||
if placeholders.Get(part.Placeholder) != nil || (part.Placeholder == NoPlaceholder && i+1 < len(template)) {
|
|
||||||
shouldSubstitute = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !shouldSubstitute {
|
|
||||||
return template
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, substitute and merge as appropriate
|
|
||||||
result := make([]PathTemplate, 0, len(template))
|
|
||||||
for _, part := range template {
|
|
||||||
if sub := placeholders.Get(part.Placeholder); sub != nil {
|
|
||||||
part.Data += *sub
|
|
||||||
part.Placeholder = NoPlaceholder
|
|
||||||
}
|
|
||||||
if last := len(result) - 1; last >= 0 && result[last].Placeholder == NoPlaceholder {
|
|
||||||
last := &result[last]
|
|
||||||
last.Data += part.Data
|
|
||||||
last.Placeholder = part.Placeholder
|
|
||||||
} else {
|
|
||||||
result = append(result, part)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func ShouldCallRuntimeRequire(mode Mode, outputFormat Format) bool {
|
|
||||||
return mode == ModeBundle && outputFormat != FormatCommonJS
|
|
||||||
}
|
|
||||||
|
|
||||||
type InjectedDefine struct {
|
|
||||||
Source logger.Source
|
|
||||||
Data js_ast.E
|
|
||||||
Name string
|
|
||||||
}
|
|
||||||
|
|
||||||
type InjectedFile struct {
|
|
||||||
Source logger.Source
|
|
||||||
Exports []InjectableExport
|
|
||||||
DefineName string
|
|
||||||
}
|
|
||||||
|
|
||||||
type InjectableExport struct {
|
|
||||||
Alias string
|
|
||||||
Loc logger.Loc
|
|
||||||
}
|
|
||||||
|
|
||||||
var filterMutex sync.Mutex
|
|
||||||
var filterCache map[string]*regexp.Regexp
|
|
||||||
|
|
||||||
func compileFilter(filter string) (result *regexp.Regexp) {
|
|
||||||
if filter == "" {
|
|
||||||
// Must provide a filter
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ok := false
|
|
||||||
|
|
||||||
// Cache hit?
|
|
||||||
(func() {
|
|
||||||
filterMutex.Lock()
|
|
||||||
defer filterMutex.Unlock()
|
|
||||||
if filterCache != nil {
|
|
||||||
result, ok = filterCache[filter]
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
if ok {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cache miss
|
|
||||||
result, err := regexp.Compile(filter)
|
|
||||||
if err != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cache for next time
|
|
||||||
filterMutex.Lock()
|
|
||||||
defer filterMutex.Unlock()
|
|
||||||
if filterCache == nil {
|
|
||||||
filterCache = make(map[string]*regexp.Regexp)
|
|
||||||
}
|
|
||||||
filterCache[filter] = result
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func CompileFilterForPlugin(pluginName string, kind string, filter string) (*regexp.Regexp, error) {
|
|
||||||
if filter == "" {
|
|
||||||
return nil, fmt.Errorf("[%s] %q is missing a filter", pluginName, kind)
|
|
||||||
}
|
|
||||||
|
|
||||||
result := compileFilter(filter)
|
|
||||||
if result == nil {
|
|
||||||
return nil, fmt.Errorf("[%s] %q filter is not a valid Go regular expression: %q", pluginName, kind, filter)
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func PluginAppliesToPath(path logger.Path, filter *regexp.Regexp, namespace string) bool {
|
|
||||||
return (namespace == "" || path.Namespace == namespace) && filter.MatchString(path.Text)
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// Plugin API
|
|
||||||
|
|
||||||
type Plugin struct {
|
|
||||||
Name string
|
|
||||||
OnStart []OnStart
|
|
||||||
OnResolve []OnResolve
|
|
||||||
OnLoad []OnLoad
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnStart struct {
|
|
||||||
Name string
|
|
||||||
Callback func() OnStartResult
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnStartResult struct {
|
|
||||||
Msgs []logger.Msg
|
|
||||||
ThrownError error
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnResolve struct {
|
|
||||||
Name string
|
|
||||||
Filter *regexp.Regexp
|
|
||||||
Namespace string
|
|
||||||
Callback func(OnResolveArgs) OnResolveResult
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnResolveArgs struct {
|
|
||||||
Path string
|
|
||||||
Importer logger.Path
|
|
||||||
ResolveDir string
|
|
||||||
Kind ast.ImportKind
|
|
||||||
PluginData interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnResolveResult struct {
|
|
||||||
PluginName string
|
|
||||||
|
|
||||||
Path logger.Path
|
|
||||||
External bool
|
|
||||||
IsSideEffectFree bool
|
|
||||||
PluginData interface{}
|
|
||||||
|
|
||||||
Msgs []logger.Msg
|
|
||||||
ThrownError error
|
|
||||||
|
|
||||||
AbsWatchFiles []string
|
|
||||||
AbsWatchDirs []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnLoad struct {
|
|
||||||
Name string
|
|
||||||
Filter *regexp.Regexp
|
|
||||||
Namespace string
|
|
||||||
Callback func(OnLoadArgs) OnLoadResult
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnLoadArgs struct {
|
|
||||||
Path logger.Path
|
|
||||||
PluginData interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnLoadResult struct {
|
|
||||||
PluginName string
|
|
||||||
|
|
||||||
Contents *string
|
|
||||||
AbsResolveDir string
|
|
||||||
Loader Loader
|
|
||||||
PluginData interface{}
|
|
||||||
|
|
||||||
Msgs []logger.Msg
|
|
||||||
ThrownError error
|
|
||||||
|
|
||||||
AbsWatchFiles []string
|
|
||||||
AbsWatchDirs []string
|
|
||||||
}
|
|
969
vendor/github.com/evanw/esbuild/internal/config/globals.go
generated
vendored
969
vendor/github.com/evanw/esbuild/internal/config/globals.go
generated
vendored
@ -1,969 +0,0 @@
|
|||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
var processedGlobalsMutex sync.Mutex
|
|
||||||
var processedGlobals *ProcessedDefines
|
|
||||||
|
|
||||||
// If something is in this list, then a direct identifier expression or property
|
|
||||||
// access chain matching this will be assumed to have no side effects and will
|
|
||||||
// be removed.
|
|
||||||
//
|
|
||||||
// This also means code is allowed to be reordered past things in this list. For
|
|
||||||
// example, if "console.log" is in this list, permitting reordering allows for
|
|
||||||
// "if (a) console.log(b); else console.log(c)" to be reordered and transformed
|
|
||||||
// into "console.log(a ? b : c)". Notice that "a" and "console.log" are in a
|
|
||||||
// different order, which can only happen if evaluating the "console.log"
|
|
||||||
// property access can be assumed to not change the value of "a".
|
|
||||||
//
|
|
||||||
// Note that membership in this list says nothing about whether calling any of
|
|
||||||
// these functions has any side effects. It only says something about
|
|
||||||
// referencing these function without calling them.
|
|
||||||
var knownGlobals = [][]string{
|
|
||||||
// These global identifiers should exist in all JavaScript environments. This
|
|
||||||
// deliberately omits "NaN", "Infinity", and "undefined" because these are
|
|
||||||
// treated as automatically-inlined constants instead of identifiers.
|
|
||||||
{"Array"},
|
|
||||||
{"Boolean"},
|
|
||||||
{"Function"},
|
|
||||||
{"Math"},
|
|
||||||
{"Number"},
|
|
||||||
{"Object"},
|
|
||||||
{"RegExp"},
|
|
||||||
{"String"},
|
|
||||||
|
|
||||||
// Object: Static methods
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Static_methods
|
|
||||||
{"Object", "assign"},
|
|
||||||
{"Object", "create"},
|
|
||||||
{"Object", "defineProperties"},
|
|
||||||
{"Object", "defineProperty"},
|
|
||||||
{"Object", "entries"},
|
|
||||||
{"Object", "freeze"},
|
|
||||||
{"Object", "fromEntries"},
|
|
||||||
{"Object", "getOwnPropertyDescriptor"},
|
|
||||||
{"Object", "getOwnPropertyDescriptors"},
|
|
||||||
{"Object", "getOwnPropertyNames"},
|
|
||||||
{"Object", "getOwnPropertySymbols"},
|
|
||||||
{"Object", "getPrototypeOf"},
|
|
||||||
{"Object", "is"},
|
|
||||||
{"Object", "isExtensible"},
|
|
||||||
{"Object", "isFrozen"},
|
|
||||||
{"Object", "isSealed"},
|
|
||||||
{"Object", "keys"},
|
|
||||||
{"Object", "preventExtensions"},
|
|
||||||
{"Object", "seal"},
|
|
||||||
{"Object", "setPrototypeOf"},
|
|
||||||
{"Object", "values"},
|
|
||||||
|
|
||||||
// Object: Instance methods
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Instance_methods
|
|
||||||
{"Object", "prototype", "__defineGetter__"},
|
|
||||||
{"Object", "prototype", "__defineSetter__"},
|
|
||||||
{"Object", "prototype", "__lookupGetter__"},
|
|
||||||
{"Object", "prototype", "__lookupSetter__"},
|
|
||||||
{"Object", "prototype", "hasOwnProperty"},
|
|
||||||
{"Object", "prototype", "isPrototypeOf"},
|
|
||||||
{"Object", "prototype", "propertyIsEnumerable"},
|
|
||||||
{"Object", "prototype", "toLocaleString"},
|
|
||||||
{"Object", "prototype", "toString"},
|
|
||||||
{"Object", "prototype", "unwatch"},
|
|
||||||
{"Object", "prototype", "valueOf"},
|
|
||||||
{"Object", "prototype", "watch"},
|
|
||||||
|
|
||||||
// Math: Static properties
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_properties
|
|
||||||
{"Math", "E"},
|
|
||||||
{"Math", "LN10"},
|
|
||||||
{"Math", "LN2"},
|
|
||||||
{"Math", "LOG10E"},
|
|
||||||
{"Math", "LOG2E"},
|
|
||||||
{"Math", "PI"},
|
|
||||||
{"Math", "SQRT1_2"},
|
|
||||||
{"Math", "SQRT2"},
|
|
||||||
|
|
||||||
// Math: Static methods
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_methods
|
|
||||||
{"Math", "abs"},
|
|
||||||
{"Math", "acos"},
|
|
||||||
{"Math", "acosh"},
|
|
||||||
{"Math", "asin"},
|
|
||||||
{"Math", "asinh"},
|
|
||||||
{"Math", "atan"},
|
|
||||||
{"Math", "atan2"},
|
|
||||||
{"Math", "atanh"},
|
|
||||||
{"Math", "cbrt"},
|
|
||||||
{"Math", "ceil"},
|
|
||||||
{"Math", "clz32"},
|
|
||||||
{"Math", "cos"},
|
|
||||||
{"Math", "cosh"},
|
|
||||||
{"Math", "exp"},
|
|
||||||
{"Math", "expm1"},
|
|
||||||
{"Math", "floor"},
|
|
||||||
{"Math", "fround"},
|
|
||||||
{"Math", "hypot"},
|
|
||||||
{"Math", "imul"},
|
|
||||||
{"Math", "log"},
|
|
||||||
{"Math", "log10"},
|
|
||||||
{"Math", "log1p"},
|
|
||||||
{"Math", "log2"},
|
|
||||||
{"Math", "max"},
|
|
||||||
{"Math", "min"},
|
|
||||||
{"Math", "pow"},
|
|
||||||
{"Math", "random"},
|
|
||||||
{"Math", "round"},
|
|
||||||
{"Math", "sign"},
|
|
||||||
{"Math", "sin"},
|
|
||||||
{"Math", "sinh"},
|
|
||||||
{"Math", "sqrt"},
|
|
||||||
{"Math", "tan"},
|
|
||||||
{"Math", "tanh"},
|
|
||||||
{"Math", "trunc"},
|
|
||||||
|
|
||||||
// Reflect: Static methods
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect#static_methods
|
|
||||||
{"Reflect", "apply"},
|
|
||||||
{"Reflect", "construct"},
|
|
||||||
{"Reflect", "defineProperty"},
|
|
||||||
{"Reflect", "deleteProperty"},
|
|
||||||
{"Reflect", "get"},
|
|
||||||
{"Reflect", "getOwnPropertyDescriptor"},
|
|
||||||
{"Reflect", "getPrototypeOf"},
|
|
||||||
{"Reflect", "has"},
|
|
||||||
{"Reflect", "isExtensible"},
|
|
||||||
{"Reflect", "ownKeys"},
|
|
||||||
{"Reflect", "preventExtensions"},
|
|
||||||
{"Reflect", "set"},
|
|
||||||
{"Reflect", "setPrototypeOf"},
|
|
||||||
|
|
||||||
// Other globals present in both the browser and node (except "eval" because
|
|
||||||
// it has special behavior)
|
|
||||||
{"AbortController"},
|
|
||||||
{"AbortSignal"},
|
|
||||||
{"AggregateError"},
|
|
||||||
{"ArrayBuffer"},
|
|
||||||
{"BigInt"},
|
|
||||||
{"DataView"},
|
|
||||||
{"Date"},
|
|
||||||
{"Error"},
|
|
||||||
{"EvalError"},
|
|
||||||
{"Event"},
|
|
||||||
{"EventTarget"},
|
|
||||||
{"Float32Array"},
|
|
||||||
{"Float64Array"},
|
|
||||||
{"Int16Array"},
|
|
||||||
{"Int32Array"},
|
|
||||||
{"Int8Array"},
|
|
||||||
{"Intl"},
|
|
||||||
{"JSON"},
|
|
||||||
{"Map"},
|
|
||||||
{"MessageChannel"},
|
|
||||||
{"MessageEvent"},
|
|
||||||
{"MessagePort"},
|
|
||||||
{"Promise"},
|
|
||||||
{"Proxy"},
|
|
||||||
{"RangeError"},
|
|
||||||
{"ReferenceError"},
|
|
||||||
{"Reflect"},
|
|
||||||
{"Set"},
|
|
||||||
{"Symbol"},
|
|
||||||
{"SyntaxError"},
|
|
||||||
{"TextDecoder"},
|
|
||||||
{"TextEncoder"},
|
|
||||||
{"TypeError"},
|
|
||||||
{"URIError"},
|
|
||||||
{"URL"},
|
|
||||||
{"URLSearchParams"},
|
|
||||||
{"Uint16Array"},
|
|
||||||
{"Uint32Array"},
|
|
||||||
{"Uint8Array"},
|
|
||||||
{"Uint8ClampedArray"},
|
|
||||||
{"WeakMap"},
|
|
||||||
{"WeakSet"},
|
|
||||||
{"WebAssembly"},
|
|
||||||
{"clearInterval"},
|
|
||||||
{"clearTimeout"},
|
|
||||||
{"console"},
|
|
||||||
{"decodeURI"},
|
|
||||||
{"decodeURIComponent"},
|
|
||||||
{"encodeURI"},
|
|
||||||
{"encodeURIComponent"},
|
|
||||||
{"escape"},
|
|
||||||
{"globalThis"},
|
|
||||||
{"isFinite"},
|
|
||||||
{"isNaN"},
|
|
||||||
{"parseFloat"},
|
|
||||||
{"parseInt"},
|
|
||||||
{"queueMicrotask"},
|
|
||||||
{"setInterval"},
|
|
||||||
{"setTimeout"},
|
|
||||||
{"unescape"},
|
|
||||||
|
|
||||||
// Console method references are assumed to have no side effects
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/API/console
|
|
||||||
{"console", "assert"},
|
|
||||||
{"console", "clear"},
|
|
||||||
{"console", "count"},
|
|
||||||
{"console", "countReset"},
|
|
||||||
{"console", "debug"},
|
|
||||||
{"console", "dir"},
|
|
||||||
{"console", "dirxml"},
|
|
||||||
{"console", "error"},
|
|
||||||
{"console", "group"},
|
|
||||||
{"console", "groupCollapsed"},
|
|
||||||
{"console", "groupEnd"},
|
|
||||||
{"console", "info"},
|
|
||||||
{"console", "log"},
|
|
||||||
{"console", "table"},
|
|
||||||
{"console", "time"},
|
|
||||||
{"console", "timeEnd"},
|
|
||||||
{"console", "timeLog"},
|
|
||||||
{"console", "trace"},
|
|
||||||
{"console", "warn"},
|
|
||||||
|
|
||||||
// CSSOM APIs
|
|
||||||
{"CSSAnimation"},
|
|
||||||
{"CSSFontFaceRule"},
|
|
||||||
{"CSSImportRule"},
|
|
||||||
{"CSSKeyframeRule"},
|
|
||||||
{"CSSKeyframesRule"},
|
|
||||||
{"CSSMediaRule"},
|
|
||||||
{"CSSNamespaceRule"},
|
|
||||||
{"CSSPageRule"},
|
|
||||||
{"CSSRule"},
|
|
||||||
{"CSSRuleList"},
|
|
||||||
{"CSSStyleDeclaration"},
|
|
||||||
{"CSSStyleRule"},
|
|
||||||
{"CSSStyleSheet"},
|
|
||||||
{"CSSSupportsRule"},
|
|
||||||
{"CSSTransition"},
|
|
||||||
|
|
||||||
// SVG DOM
|
|
||||||
{"SVGAElement"},
|
|
||||||
{"SVGAngle"},
|
|
||||||
{"SVGAnimateElement"},
|
|
||||||
{"SVGAnimateMotionElement"},
|
|
||||||
{"SVGAnimateTransformElement"},
|
|
||||||
{"SVGAnimatedAngle"},
|
|
||||||
{"SVGAnimatedBoolean"},
|
|
||||||
{"SVGAnimatedEnumeration"},
|
|
||||||
{"SVGAnimatedInteger"},
|
|
||||||
{"SVGAnimatedLength"},
|
|
||||||
{"SVGAnimatedLengthList"},
|
|
||||||
{"SVGAnimatedNumber"},
|
|
||||||
{"SVGAnimatedNumberList"},
|
|
||||||
{"SVGAnimatedPreserveAspectRatio"},
|
|
||||||
{"SVGAnimatedRect"},
|
|
||||||
{"SVGAnimatedString"},
|
|
||||||
{"SVGAnimatedTransformList"},
|
|
||||||
{"SVGAnimationElement"},
|
|
||||||
{"SVGCircleElement"},
|
|
||||||
{"SVGClipPathElement"},
|
|
||||||
{"SVGComponentTransferFunctionElement"},
|
|
||||||
{"SVGDefsElement"},
|
|
||||||
{"SVGDescElement"},
|
|
||||||
{"SVGElement"},
|
|
||||||
{"SVGEllipseElement"},
|
|
||||||
{"SVGFEBlendElement"},
|
|
||||||
{"SVGFEColorMatrixElement"},
|
|
||||||
{"SVGFEComponentTransferElement"},
|
|
||||||
{"SVGFECompositeElement"},
|
|
||||||
{"SVGFEConvolveMatrixElement"},
|
|
||||||
{"SVGFEDiffuseLightingElement"},
|
|
||||||
{"SVGFEDisplacementMapElement"},
|
|
||||||
{"SVGFEDistantLightElement"},
|
|
||||||
{"SVGFEDropShadowElement"},
|
|
||||||
{"SVGFEFloodElement"},
|
|
||||||
{"SVGFEFuncAElement"},
|
|
||||||
{"SVGFEFuncBElement"},
|
|
||||||
{"SVGFEFuncGElement"},
|
|
||||||
{"SVGFEFuncRElement"},
|
|
||||||
{"SVGFEGaussianBlurElement"},
|
|
||||||
{"SVGFEImageElement"},
|
|
||||||
{"SVGFEMergeElement"},
|
|
||||||
{"SVGFEMergeNodeElement"},
|
|
||||||
{"SVGFEMorphologyElement"},
|
|
||||||
{"SVGFEOffsetElement"},
|
|
||||||
{"SVGFEPointLightElement"},
|
|
||||||
{"SVGFESpecularLightingElement"},
|
|
||||||
{"SVGFESpotLightElement"},
|
|
||||||
{"SVGFETileElement"},
|
|
||||||
{"SVGFETurbulenceElement"},
|
|
||||||
{"SVGFilterElement"},
|
|
||||||
{"SVGForeignObjectElement"},
|
|
||||||
{"SVGGElement"},
|
|
||||||
{"SVGGeometryElement"},
|
|
||||||
{"SVGGradientElement"},
|
|
||||||
{"SVGGraphicsElement"},
|
|
||||||
{"SVGImageElement"},
|
|
||||||
{"SVGLength"},
|
|
||||||
{"SVGLengthList"},
|
|
||||||
{"SVGLineElement"},
|
|
||||||
{"SVGLinearGradientElement"},
|
|
||||||
{"SVGMPathElement"},
|
|
||||||
{"SVGMarkerElement"},
|
|
||||||
{"SVGMaskElement"},
|
|
||||||
{"SVGMatrix"},
|
|
||||||
{"SVGMetadataElement"},
|
|
||||||
{"SVGNumber"},
|
|
||||||
{"SVGNumberList"},
|
|
||||||
{"SVGPathElement"},
|
|
||||||
{"SVGPatternElement"},
|
|
||||||
{"SVGPoint"},
|
|
||||||
{"SVGPointList"},
|
|
||||||
{"SVGPolygonElement"},
|
|
||||||
{"SVGPolylineElement"},
|
|
||||||
{"SVGPreserveAspectRatio"},
|
|
||||||
{"SVGRadialGradientElement"},
|
|
||||||
{"SVGRect"},
|
|
||||||
{"SVGRectElement"},
|
|
||||||
{"SVGSVGElement"},
|
|
||||||
{"SVGScriptElement"},
|
|
||||||
{"SVGSetElement"},
|
|
||||||
{"SVGStopElement"},
|
|
||||||
{"SVGStringList"},
|
|
||||||
{"SVGStyleElement"},
|
|
||||||
{"SVGSwitchElement"},
|
|
||||||
{"SVGSymbolElement"},
|
|
||||||
{"SVGTSpanElement"},
|
|
||||||
{"SVGTextContentElement"},
|
|
||||||
{"SVGTextElement"},
|
|
||||||
{"SVGTextPathElement"},
|
|
||||||
{"SVGTextPositioningElement"},
|
|
||||||
{"SVGTitleElement"},
|
|
||||||
{"SVGTransform"},
|
|
||||||
{"SVGTransformList"},
|
|
||||||
{"SVGUnitTypes"},
|
|
||||||
{"SVGUseElement"},
|
|
||||||
{"SVGViewElement"},
|
|
||||||
|
|
||||||
// Other browser APIs
|
|
||||||
//
|
|
||||||
// This list contains all globals present in modern versions of Chrome, Safari,
|
|
||||||
// and Firefox except for the following properties, since they have a side effect
|
|
||||||
// of triggering layout (https://gist.github.com/paulirish/5d52fb081b3570c81e3a):
|
|
||||||
//
|
|
||||||
// - scrollX
|
|
||||||
// - scrollY
|
|
||||||
// - innerWidth
|
|
||||||
// - innerHeight
|
|
||||||
// - pageXOffset
|
|
||||||
// - pageYOffset
|
|
||||||
//
|
|
||||||
// The following globals have also been removed since they sometimes throw an
|
|
||||||
// exception when accessed, which is a side effect (for more information see
|
|
||||||
// https://stackoverflow.com/a/33047477):
|
|
||||||
//
|
|
||||||
// - localStorage
|
|
||||||
// - sessionStorage
|
|
||||||
//
|
|
||||||
{"AnalyserNode"},
|
|
||||||
{"Animation"},
|
|
||||||
{"AnimationEffect"},
|
|
||||||
{"AnimationEvent"},
|
|
||||||
{"AnimationPlaybackEvent"},
|
|
||||||
{"AnimationTimeline"},
|
|
||||||
{"Attr"},
|
|
||||||
{"Audio"},
|
|
||||||
{"AudioBuffer"},
|
|
||||||
{"AudioBufferSourceNode"},
|
|
||||||
{"AudioDestinationNode"},
|
|
||||||
{"AudioListener"},
|
|
||||||
{"AudioNode"},
|
|
||||||
{"AudioParam"},
|
|
||||||
{"AudioProcessingEvent"},
|
|
||||||
{"AudioScheduledSourceNode"},
|
|
||||||
{"BarProp"},
|
|
||||||
{"BeforeUnloadEvent"},
|
|
||||||
{"BiquadFilterNode"},
|
|
||||||
{"Blob"},
|
|
||||||
{"BlobEvent"},
|
|
||||||
{"ByteLengthQueuingStrategy"},
|
|
||||||
{"CDATASection"},
|
|
||||||
{"CSS"},
|
|
||||||
{"CanvasGradient"},
|
|
||||||
{"CanvasPattern"},
|
|
||||||
{"CanvasRenderingContext2D"},
|
|
||||||
{"ChannelMergerNode"},
|
|
||||||
{"ChannelSplitterNode"},
|
|
||||||
{"CharacterData"},
|
|
||||||
{"ClipboardEvent"},
|
|
||||||
{"CloseEvent"},
|
|
||||||
{"Comment"},
|
|
||||||
{"CompositionEvent"},
|
|
||||||
{"ConvolverNode"},
|
|
||||||
{"CountQueuingStrategy"},
|
|
||||||
{"Crypto"},
|
|
||||||
{"CustomElementRegistry"},
|
|
||||||
{"CustomEvent"},
|
|
||||||
{"DOMException"},
|
|
||||||
{"DOMImplementation"},
|
|
||||||
{"DOMMatrix"},
|
|
||||||
{"DOMMatrixReadOnly"},
|
|
||||||
{"DOMParser"},
|
|
||||||
{"DOMPoint"},
|
|
||||||
{"DOMPointReadOnly"},
|
|
||||||
{"DOMQuad"},
|
|
||||||
{"DOMRect"},
|
|
||||||
{"DOMRectList"},
|
|
||||||
{"DOMRectReadOnly"},
|
|
||||||
{"DOMStringList"},
|
|
||||||
{"DOMStringMap"},
|
|
||||||
{"DOMTokenList"},
|
|
||||||
{"DataTransfer"},
|
|
||||||
{"DataTransferItem"},
|
|
||||||
{"DataTransferItemList"},
|
|
||||||
{"DelayNode"},
|
|
||||||
{"Document"},
|
|
||||||
{"DocumentFragment"},
|
|
||||||
{"DocumentTimeline"},
|
|
||||||
{"DocumentType"},
|
|
||||||
{"DragEvent"},
|
|
||||||
{"DynamicsCompressorNode"},
|
|
||||||
{"Element"},
|
|
||||||
{"ErrorEvent"},
|
|
||||||
{"EventSource"},
|
|
||||||
{"File"},
|
|
||||||
{"FileList"},
|
|
||||||
{"FileReader"},
|
|
||||||
{"FocusEvent"},
|
|
||||||
{"FontFace"},
|
|
||||||
{"FormData"},
|
|
||||||
{"GainNode"},
|
|
||||||
{"Gamepad"},
|
|
||||||
{"GamepadButton"},
|
|
||||||
{"GamepadEvent"},
|
|
||||||
{"Geolocation"},
|
|
||||||
{"GeolocationPositionError"},
|
|
||||||
{"HTMLAllCollection"},
|
|
||||||
{"HTMLAnchorElement"},
|
|
||||||
{"HTMLAreaElement"},
|
|
||||||
{"HTMLAudioElement"},
|
|
||||||
{"HTMLBRElement"},
|
|
||||||
{"HTMLBaseElement"},
|
|
||||||
{"HTMLBodyElement"},
|
|
||||||
{"HTMLButtonElement"},
|
|
||||||
{"HTMLCanvasElement"},
|
|
||||||
{"HTMLCollection"},
|
|
||||||
{"HTMLDListElement"},
|
|
||||||
{"HTMLDataElement"},
|
|
||||||
{"HTMLDataListElement"},
|
|
||||||
{"HTMLDetailsElement"},
|
|
||||||
{"HTMLDirectoryElement"},
|
|
||||||
{"HTMLDivElement"},
|
|
||||||
{"HTMLDocument"},
|
|
||||||
{"HTMLElement"},
|
|
||||||
{"HTMLEmbedElement"},
|
|
||||||
{"HTMLFieldSetElement"},
|
|
||||||
{"HTMLFontElement"},
|
|
||||||
{"HTMLFormControlsCollection"},
|
|
||||||
{"HTMLFormElement"},
|
|
||||||
{"HTMLFrameElement"},
|
|
||||||
{"HTMLFrameSetElement"},
|
|
||||||
{"HTMLHRElement"},
|
|
||||||
{"HTMLHeadElement"},
|
|
||||||
{"HTMLHeadingElement"},
|
|
||||||
{"HTMLHtmlElement"},
|
|
||||||
{"HTMLIFrameElement"},
|
|
||||||
{"HTMLImageElement"},
|
|
||||||
{"HTMLInputElement"},
|
|
||||||
{"HTMLLIElement"},
|
|
||||||
{"HTMLLabelElement"},
|
|
||||||
{"HTMLLegendElement"},
|
|
||||||
{"HTMLLinkElement"},
|
|
||||||
{"HTMLMapElement"},
|
|
||||||
{"HTMLMarqueeElement"},
|
|
||||||
{"HTMLMediaElement"},
|
|
||||||
{"HTMLMenuElement"},
|
|
||||||
{"HTMLMetaElement"},
|
|
||||||
{"HTMLMeterElement"},
|
|
||||||
{"HTMLModElement"},
|
|
||||||
{"HTMLOListElement"},
|
|
||||||
{"HTMLObjectElement"},
|
|
||||||
{"HTMLOptGroupElement"},
|
|
||||||
{"HTMLOptionElement"},
|
|
||||||
{"HTMLOptionsCollection"},
|
|
||||||
{"HTMLOutputElement"},
|
|
||||||
{"HTMLParagraphElement"},
|
|
||||||
{"HTMLParamElement"},
|
|
||||||
{"HTMLPictureElement"},
|
|
||||||
{"HTMLPreElement"},
|
|
||||||
{"HTMLProgressElement"},
|
|
||||||
{"HTMLQuoteElement"},
|
|
||||||
{"HTMLScriptElement"},
|
|
||||||
{"HTMLSelectElement"},
|
|
||||||
{"HTMLSlotElement"},
|
|
||||||
{"HTMLSourceElement"},
|
|
||||||
{"HTMLSpanElement"},
|
|
||||||
{"HTMLStyleElement"},
|
|
||||||
{"HTMLTableCaptionElement"},
|
|
||||||
{"HTMLTableCellElement"},
|
|
||||||
{"HTMLTableColElement"},
|
|
||||||
{"HTMLTableElement"},
|
|
||||||
{"HTMLTableRowElement"},
|
|
||||||
{"HTMLTableSectionElement"},
|
|
||||||
{"HTMLTemplateElement"},
|
|
||||||
{"HTMLTextAreaElement"},
|
|
||||||
{"HTMLTimeElement"},
|
|
||||||
{"HTMLTitleElement"},
|
|
||||||
{"HTMLTrackElement"},
|
|
||||||
{"HTMLUListElement"},
|
|
||||||
{"HTMLUnknownElement"},
|
|
||||||
{"HTMLVideoElement"},
|
|
||||||
{"HashChangeEvent"},
|
|
||||||
{"Headers"},
|
|
||||||
{"History"},
|
|
||||||
{"IDBCursor"},
|
|
||||||
{"IDBCursorWithValue"},
|
|
||||||
{"IDBDatabase"},
|
|
||||||
{"IDBFactory"},
|
|
||||||
{"IDBIndex"},
|
|
||||||
{"IDBKeyRange"},
|
|
||||||
{"IDBObjectStore"},
|
|
||||||
{"IDBOpenDBRequest"},
|
|
||||||
{"IDBRequest"},
|
|
||||||
{"IDBTransaction"},
|
|
||||||
{"IDBVersionChangeEvent"},
|
|
||||||
{"Image"},
|
|
||||||
{"ImageData"},
|
|
||||||
{"InputEvent"},
|
|
||||||
{"IntersectionObserver"},
|
|
||||||
{"IntersectionObserverEntry"},
|
|
||||||
{"KeyboardEvent"},
|
|
||||||
{"KeyframeEffect"},
|
|
||||||
{"Location"},
|
|
||||||
{"MediaCapabilities"},
|
|
||||||
{"MediaElementAudioSourceNode"},
|
|
||||||
{"MediaEncryptedEvent"},
|
|
||||||
{"MediaError"},
|
|
||||||
{"MediaList"},
|
|
||||||
{"MediaQueryList"},
|
|
||||||
{"MediaQueryListEvent"},
|
|
||||||
{"MediaRecorder"},
|
|
||||||
{"MediaSource"},
|
|
||||||
{"MediaStream"},
|
|
||||||
{"MediaStreamAudioDestinationNode"},
|
|
||||||
{"MediaStreamAudioSourceNode"},
|
|
||||||
{"MediaStreamTrack"},
|
|
||||||
{"MediaStreamTrackEvent"},
|
|
||||||
{"MimeType"},
|
|
||||||
{"MimeTypeArray"},
|
|
||||||
{"MouseEvent"},
|
|
||||||
{"MutationEvent"},
|
|
||||||
{"MutationObserver"},
|
|
||||||
{"MutationRecord"},
|
|
||||||
{"NamedNodeMap"},
|
|
||||||
{"Navigator"},
|
|
||||||
{"Node"},
|
|
||||||
{"NodeFilter"},
|
|
||||||
{"NodeIterator"},
|
|
||||||
{"NodeList"},
|
|
||||||
{"Notification"},
|
|
||||||
{"OfflineAudioCompletionEvent"},
|
|
||||||
{"Option"},
|
|
||||||
{"OscillatorNode"},
|
|
||||||
{"PageTransitionEvent"},
|
|
||||||
{"Path2D"},
|
|
||||||
{"Performance"},
|
|
||||||
{"PerformanceEntry"},
|
|
||||||
{"PerformanceMark"},
|
|
||||||
{"PerformanceMeasure"},
|
|
||||||
{"PerformanceNavigation"},
|
|
||||||
{"PerformanceObserver"},
|
|
||||||
{"PerformanceObserverEntryList"},
|
|
||||||
{"PerformanceResourceTiming"},
|
|
||||||
{"PerformanceTiming"},
|
|
||||||
{"PeriodicWave"},
|
|
||||||
{"Plugin"},
|
|
||||||
{"PluginArray"},
|
|
||||||
{"PointerEvent"},
|
|
||||||
{"PopStateEvent"},
|
|
||||||
{"ProcessingInstruction"},
|
|
||||||
{"ProgressEvent"},
|
|
||||||
{"PromiseRejectionEvent"},
|
|
||||||
{"RTCCertificate"},
|
|
||||||
{"RTCDTMFSender"},
|
|
||||||
{"RTCDTMFToneChangeEvent"},
|
|
||||||
{"RTCDataChannel"},
|
|
||||||
{"RTCDataChannelEvent"},
|
|
||||||
{"RTCIceCandidate"},
|
|
||||||
{"RTCPeerConnection"},
|
|
||||||
{"RTCPeerConnectionIceEvent"},
|
|
||||||
{"RTCRtpReceiver"},
|
|
||||||
{"RTCRtpSender"},
|
|
||||||
{"RTCRtpTransceiver"},
|
|
||||||
{"RTCSessionDescription"},
|
|
||||||
{"RTCStatsReport"},
|
|
||||||
{"RTCTrackEvent"},
|
|
||||||
{"RadioNodeList"},
|
|
||||||
{"Range"},
|
|
||||||
{"ReadableStream"},
|
|
||||||
{"Request"},
|
|
||||||
{"ResizeObserver"},
|
|
||||||
{"ResizeObserverEntry"},
|
|
||||||
{"Response"},
|
|
||||||
{"Screen"},
|
|
||||||
{"ScriptProcessorNode"},
|
|
||||||
{"SecurityPolicyViolationEvent"},
|
|
||||||
{"Selection"},
|
|
||||||
{"ShadowRoot"},
|
|
||||||
{"SourceBuffer"},
|
|
||||||
{"SourceBufferList"},
|
|
||||||
{"SpeechSynthesisEvent"},
|
|
||||||
{"SpeechSynthesisUtterance"},
|
|
||||||
{"StaticRange"},
|
|
||||||
{"Storage"},
|
|
||||||
{"StorageEvent"},
|
|
||||||
{"StyleSheet"},
|
|
||||||
{"StyleSheetList"},
|
|
||||||
{"Text"},
|
|
||||||
{"TextMetrics"},
|
|
||||||
{"TextTrack"},
|
|
||||||
{"TextTrackCue"},
|
|
||||||
{"TextTrackCueList"},
|
|
||||||
{"TextTrackList"},
|
|
||||||
{"TimeRanges"},
|
|
||||||
{"TrackEvent"},
|
|
||||||
{"TransitionEvent"},
|
|
||||||
{"TreeWalker"},
|
|
||||||
{"UIEvent"},
|
|
||||||
{"VTTCue"},
|
|
||||||
{"ValidityState"},
|
|
||||||
{"VisualViewport"},
|
|
||||||
{"WaveShaperNode"},
|
|
||||||
{"WebGLActiveInfo"},
|
|
||||||
{"WebGLBuffer"},
|
|
||||||
{"WebGLContextEvent"},
|
|
||||||
{"WebGLFramebuffer"},
|
|
||||||
{"WebGLProgram"},
|
|
||||||
{"WebGLQuery"},
|
|
||||||
{"WebGLRenderbuffer"},
|
|
||||||
{"WebGLRenderingContext"},
|
|
||||||
{"WebGLSampler"},
|
|
||||||
{"WebGLShader"},
|
|
||||||
{"WebGLShaderPrecisionFormat"},
|
|
||||||
{"WebGLSync"},
|
|
||||||
{"WebGLTexture"},
|
|
||||||
{"WebGLUniformLocation"},
|
|
||||||
{"WebKitCSSMatrix"},
|
|
||||||
{"WebSocket"},
|
|
||||||
{"WheelEvent"},
|
|
||||||
{"Window"},
|
|
||||||
{"Worker"},
|
|
||||||
{"XMLDocument"},
|
|
||||||
{"XMLHttpRequest"},
|
|
||||||
{"XMLHttpRequestEventTarget"},
|
|
||||||
{"XMLHttpRequestUpload"},
|
|
||||||
{"XMLSerializer"},
|
|
||||||
{"XPathEvaluator"},
|
|
||||||
{"XPathExpression"},
|
|
||||||
{"XPathResult"},
|
|
||||||
{"XSLTProcessor"},
|
|
||||||
{"alert"},
|
|
||||||
{"atob"},
|
|
||||||
{"blur"},
|
|
||||||
{"btoa"},
|
|
||||||
{"cancelAnimationFrame"},
|
|
||||||
{"captureEvents"},
|
|
||||||
{"close"},
|
|
||||||
{"closed"},
|
|
||||||
{"confirm"},
|
|
||||||
{"customElements"},
|
|
||||||
{"devicePixelRatio"},
|
|
||||||
{"document"},
|
|
||||||
{"event"},
|
|
||||||
{"fetch"},
|
|
||||||
{"find"},
|
|
||||||
{"focus"},
|
|
||||||
{"frameElement"},
|
|
||||||
{"frames"},
|
|
||||||
{"getComputedStyle"},
|
|
||||||
{"getSelection"},
|
|
||||||
{"history"},
|
|
||||||
{"indexedDB"},
|
|
||||||
{"isSecureContext"},
|
|
||||||
{"length"},
|
|
||||||
{"location"},
|
|
||||||
{"locationbar"},
|
|
||||||
{"matchMedia"},
|
|
||||||
{"menubar"},
|
|
||||||
{"moveBy"},
|
|
||||||
{"moveTo"},
|
|
||||||
{"name"},
|
|
||||||
{"navigator"},
|
|
||||||
{"onabort"},
|
|
||||||
{"onafterprint"},
|
|
||||||
{"onanimationend"},
|
|
||||||
{"onanimationiteration"},
|
|
||||||
{"onanimationstart"},
|
|
||||||
{"onbeforeprint"},
|
|
||||||
{"onbeforeunload"},
|
|
||||||
{"onblur"},
|
|
||||||
{"oncanplay"},
|
|
||||||
{"oncanplaythrough"},
|
|
||||||
{"onchange"},
|
|
||||||
{"onclick"},
|
|
||||||
{"oncontextmenu"},
|
|
||||||
{"oncuechange"},
|
|
||||||
{"ondblclick"},
|
|
||||||
{"ondrag"},
|
|
||||||
{"ondragend"},
|
|
||||||
{"ondragenter"},
|
|
||||||
{"ondragleave"},
|
|
||||||
{"ondragover"},
|
|
||||||
{"ondragstart"},
|
|
||||||
{"ondrop"},
|
|
||||||
{"ondurationchange"},
|
|
||||||
{"onemptied"},
|
|
||||||
{"onended"},
|
|
||||||
{"onerror"},
|
|
||||||
{"onfocus"},
|
|
||||||
{"ongotpointercapture"},
|
|
||||||
{"onhashchange"},
|
|
||||||
{"oninput"},
|
|
||||||
{"oninvalid"},
|
|
||||||
{"onkeydown"},
|
|
||||||
{"onkeypress"},
|
|
||||||
{"onkeyup"},
|
|
||||||
{"onlanguagechange"},
|
|
||||||
{"onload"},
|
|
||||||
{"onloadeddata"},
|
|
||||||
{"onloadedmetadata"},
|
|
||||||
{"onloadstart"},
|
|
||||||
{"onlostpointercapture"},
|
|
||||||
{"onmessage"},
|
|
||||||
{"onmousedown"},
|
|
||||||
{"onmouseenter"},
|
|
||||||
{"onmouseleave"},
|
|
||||||
{"onmousemove"},
|
|
||||||
{"onmouseout"},
|
|
||||||
{"onmouseover"},
|
|
||||||
{"onmouseup"},
|
|
||||||
{"onoffline"},
|
|
||||||
{"ononline"},
|
|
||||||
{"onpagehide"},
|
|
||||||
{"onpageshow"},
|
|
||||||
{"onpause"},
|
|
||||||
{"onplay"},
|
|
||||||
{"onplaying"},
|
|
||||||
{"onpointercancel"},
|
|
||||||
{"onpointerdown"},
|
|
||||||
{"onpointerenter"},
|
|
||||||
{"onpointerleave"},
|
|
||||||
{"onpointermove"},
|
|
||||||
{"onpointerout"},
|
|
||||||
{"onpointerover"},
|
|
||||||
{"onpointerup"},
|
|
||||||
{"onpopstate"},
|
|
||||||
{"onprogress"},
|
|
||||||
{"onratechange"},
|
|
||||||
{"onrejectionhandled"},
|
|
||||||
{"onreset"},
|
|
||||||
{"onresize"},
|
|
||||||
{"onscroll"},
|
|
||||||
{"onseeked"},
|
|
||||||
{"onseeking"},
|
|
||||||
{"onselect"},
|
|
||||||
{"onstalled"},
|
|
||||||
{"onstorage"},
|
|
||||||
{"onsubmit"},
|
|
||||||
{"onsuspend"},
|
|
||||||
{"ontimeupdate"},
|
|
||||||
{"ontoggle"},
|
|
||||||
{"ontransitioncancel"},
|
|
||||||
{"ontransitionend"},
|
|
||||||
{"ontransitionrun"},
|
|
||||||
{"ontransitionstart"},
|
|
||||||
{"onunhandledrejection"},
|
|
||||||
{"onunload"},
|
|
||||||
{"onvolumechange"},
|
|
||||||
{"onwaiting"},
|
|
||||||
{"onwebkitanimationend"},
|
|
||||||
{"onwebkitanimationiteration"},
|
|
||||||
{"onwebkitanimationstart"},
|
|
||||||
{"onwebkittransitionend"},
|
|
||||||
{"onwheel"},
|
|
||||||
{"open"},
|
|
||||||
{"opener"},
|
|
||||||
{"origin"},
|
|
||||||
{"outerHeight"},
|
|
||||||
{"outerWidth"},
|
|
||||||
{"parent"},
|
|
||||||
{"performance"},
|
|
||||||
{"personalbar"},
|
|
||||||
{"postMessage"},
|
|
||||||
{"print"},
|
|
||||||
{"prompt"},
|
|
||||||
{"releaseEvents"},
|
|
||||||
{"requestAnimationFrame"},
|
|
||||||
{"resizeBy"},
|
|
||||||
{"resizeTo"},
|
|
||||||
{"screen"},
|
|
||||||
{"screenLeft"},
|
|
||||||
{"screenTop"},
|
|
||||||
{"screenX"},
|
|
||||||
{"screenY"},
|
|
||||||
{"scroll"},
|
|
||||||
{"scrollBy"},
|
|
||||||
{"scrollTo"},
|
|
||||||
{"scrollbars"},
|
|
||||||
{"self"},
|
|
||||||
{"speechSynthesis"},
|
|
||||||
{"status"},
|
|
||||||
{"statusbar"},
|
|
||||||
{"stop"},
|
|
||||||
{"toolbar"},
|
|
||||||
{"top"},
|
|
||||||
{"webkitURL"},
|
|
||||||
{"window"},
|
|
||||||
}
|
|
||||||
|
|
||||||
type DefineArgs struct {
|
|
||||||
Loc logger.Loc
|
|
||||||
FindSymbol func(logger.Loc, string) js_ast.Ref
|
|
||||||
SymbolForDefine func(int) js_ast.Ref
|
|
||||||
}
|
|
||||||
|
|
||||||
type DefineFunc func(DefineArgs) js_ast.E
|
|
||||||
|
|
||||||
type DefineData struct {
|
|
||||||
DefineFunc DefineFunc
|
|
||||||
|
|
||||||
// True if accessing this value is known to not have any side effects. For
|
|
||||||
// example, a bare reference to "Object.create" can be removed because it
|
|
||||||
// does not have any observable side effects.
|
|
||||||
CanBeRemovedIfUnused bool
|
|
||||||
|
|
||||||
// True if a call to this value is known to not have any side effects. For
|
|
||||||
// example, a bare call to "Object()" can be removed because it does not
|
|
||||||
// have any observable side effects.
|
|
||||||
CallCanBeUnwrappedIfUnused bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeDefineData(old DefineData, new DefineData) DefineData {
|
|
||||||
if old.CanBeRemovedIfUnused {
|
|
||||||
new.CanBeRemovedIfUnused = true
|
|
||||||
}
|
|
||||||
if old.CallCanBeUnwrappedIfUnused {
|
|
||||||
new.CallCanBeUnwrappedIfUnused = true
|
|
||||||
}
|
|
||||||
return new
|
|
||||||
}
|
|
||||||
|
|
||||||
type DotDefine struct {
|
|
||||||
Parts []string
|
|
||||||
Data DefineData
|
|
||||||
}
|
|
||||||
|
|
||||||
type ProcessedDefines struct {
|
|
||||||
IdentifierDefines map[string]DefineData
|
|
||||||
DotDefines map[string][]DotDefine
|
|
||||||
}
|
|
||||||
|
|
||||||
// This transformation is expensive, so we only want to do it once. Make sure
|
|
||||||
// to only call processDefines() once per compilation. Unfortunately Golang
|
|
||||||
// doesn't have an efficient way to copy a map and the overhead of copying
|
|
||||||
// all of the properties into a new map once for every new parser noticeably
|
|
||||||
// slows down our benchmarks.
|
|
||||||
func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines {
|
|
||||||
// Optimization: reuse known globals if there are no user-specified defines
|
|
||||||
hasUserDefines := len(userDefines) != 0
|
|
||||||
if !hasUserDefines {
|
|
||||||
processedGlobalsMutex.Lock()
|
|
||||||
if processedGlobals != nil {
|
|
||||||
defer processedGlobalsMutex.Unlock()
|
|
||||||
return *processedGlobals
|
|
||||||
}
|
|
||||||
processedGlobalsMutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
result := ProcessedDefines{
|
|
||||||
IdentifierDefines: make(map[string]DefineData),
|
|
||||||
DotDefines: make(map[string][]DotDefine),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mark these property accesses as free of side effects. That means they can
|
|
||||||
// be removed if their result is unused. We can't just remove all unused
|
|
||||||
// property accesses since property accesses can have side effects. For
|
|
||||||
// example, the property access "a.b.c" has the side effect of throwing an
|
|
||||||
// exception if "a.b" is undefined.
|
|
||||||
for _, parts := range knownGlobals {
|
|
||||||
tail := parts[len(parts)-1]
|
|
||||||
if len(parts) == 1 {
|
|
||||||
result.IdentifierDefines[tail] = DefineData{CanBeRemovedIfUnused: true}
|
|
||||||
} else {
|
|
||||||
result.DotDefines[tail] = append(result.DotDefines[tail], DotDefine{Parts: parts, Data: DefineData{CanBeRemovedIfUnused: true}})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swap in certain literal values because those can be constant folded
|
|
||||||
result.IdentifierDefines["undefined"] = DefineData{
|
|
||||||
DefineFunc: func(DefineArgs) js_ast.E { return js_ast.EUndefinedShared },
|
|
||||||
}
|
|
||||||
result.IdentifierDefines["NaN"] = DefineData{
|
|
||||||
DefineFunc: func(DefineArgs) js_ast.E { return &js_ast.ENumber{Value: math.NaN()} },
|
|
||||||
}
|
|
||||||
result.IdentifierDefines["Infinity"] = DefineData{
|
|
||||||
DefineFunc: func(DefineArgs) js_ast.E { return &js_ast.ENumber{Value: math.Inf(1)} },
|
|
||||||
}
|
|
||||||
|
|
||||||
// Then copy the user-specified defines in afterwards, which will overwrite
|
|
||||||
// any known globals above.
|
|
||||||
for key, data := range userDefines {
|
|
||||||
parts := strings.Split(key, ".")
|
|
||||||
|
|
||||||
// Identifier defines are special-cased
|
|
||||||
if len(parts) == 1 {
|
|
||||||
result.IdentifierDefines[key] = mergeDefineData(result.IdentifierDefines[key], data)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
tail := parts[len(parts)-1]
|
|
||||||
dotDefines := result.DotDefines[tail]
|
|
||||||
found := false
|
|
||||||
|
|
||||||
// Try to merge with existing dot defines first
|
|
||||||
for i, define := range dotDefines {
|
|
||||||
if arePartsEqual(parts, define.Parts) {
|
|
||||||
define := &dotDefines[i]
|
|
||||||
define.Data = mergeDefineData(define.Data, data)
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !found {
|
|
||||||
dotDefines = append(dotDefines, DotDefine{Parts: parts, Data: data})
|
|
||||||
}
|
|
||||||
result.DotDefines[tail] = dotDefines
|
|
||||||
}
|
|
||||||
|
|
||||||
// Potentially cache the result for next time
|
|
||||||
if !hasUserDefines {
|
|
||||||
processedGlobalsMutex.Lock()
|
|
||||||
defer processedGlobalsMutex.Unlock()
|
|
||||||
if processedGlobals == nil {
|
|
||||||
processedGlobals = &result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func arePartsEqual(a []string, b []string) bool {
|
|
||||||
if len(a) != len(b) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i := range a {
|
|
||||||
if a[i] != b[i] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
646
vendor/github.com/evanw/esbuild/internal/css_ast/css_ast.go
generated
vendored
646
vendor/github.com/evanw/esbuild/internal/css_ast/css_ast.go
generated
vendored
@ -1,646 +0,0 @@
|
|||||||
package css_ast
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strconv"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
// CSS syntax comes in two layers: a minimal syntax that generally accepts
|
|
||||||
// anything that looks vaguely like CSS, and a large set of built-in rules
|
|
||||||
// (the things browsers actually interpret). That way CSS parsers can read
|
|
||||||
// unknown rules and skip over them without having to stop due to errors.
|
|
||||||
//
|
|
||||||
// This AST format is mostly just the minimal syntax. It parses unknown rules
|
|
||||||
// into a tree with enough information that it can write them back out again.
|
|
||||||
// There are some additional layers of syntax including selectors and @-rules
|
|
||||||
// which allow for better pretty-printing and minification.
|
|
||||||
//
|
|
||||||
// Most of the AST just references ranges of the original file by keeping the
|
|
||||||
// original "Token" values around from the lexer. This is a memory-efficient
|
|
||||||
// representation that helps provide good parsing and printing performance.
|
|
||||||
|
|
||||||
type AST struct {
|
|
||||||
ImportRecords []ast.ImportRecord
|
|
||||||
Rules []Rule
|
|
||||||
SourceMapComment logger.Span
|
|
||||||
ApproximateLineCount int32
|
|
||||||
}
|
|
||||||
|
|
||||||
// We create a lot of tokens, so make sure this layout is memory-efficient.
|
|
||||||
// The layout here isn't optimal because it biases for convenience (e.g.
|
|
||||||
// "string" could be shorter) but at least the ordering of fields was
|
|
||||||
// deliberately chosen to minimize size.
|
|
||||||
type Token struct {
|
|
||||||
// This is the raw contents of the token most of the time. However, it
|
|
||||||
// contains the decoded string contents for "TString" tokens.
|
|
||||||
Text string // 16 bytes
|
|
||||||
|
|
||||||
// Contains the child tokens for component values that are simple blocks.
|
|
||||||
// These are either "(", "{", "[", or function tokens. The closing token is
|
|
||||||
// implicit and is not stored.
|
|
||||||
Children *[]Token // 8 bytes
|
|
||||||
|
|
||||||
// URL tokens have an associated import record at the top-level of the AST.
|
|
||||||
// This index points to that import record.
|
|
||||||
ImportRecordIndex uint32 // 4 bytes
|
|
||||||
|
|
||||||
// The division between the number and the unit for "TDimension" tokens.
|
|
||||||
UnitOffset uint16 // 2 bytes
|
|
||||||
|
|
||||||
// This will never be "TWhitespace" because whitespace isn't stored as a
|
|
||||||
// token directly. Instead it is stored in "HasWhitespaceAfter" on the
|
|
||||||
// previous token. This is to make it easier to pattern-match against
|
|
||||||
// tokens when handling CSS rules, since whitespace almost always doesn't
|
|
||||||
// matter. That way you can pattern match against e.g. "rgb(r, g, b)" and
|
|
||||||
// not have to handle all possible combinations of embedded whitespace
|
|
||||||
// tokens.
|
|
||||||
//
|
|
||||||
// There is one exception to this: when in verbatim whitespace mode and
|
|
||||||
// the token list is non-empty and is only whitespace tokens. In that case
|
|
||||||
// a single whitespace token is emitted. This is because otherwise there
|
|
||||||
// would be no tokens to attach the whitespace before/after flags to.
|
|
||||||
Kind css_lexer.T // 1 byte
|
|
||||||
|
|
||||||
// These flags indicate the presence of a "TWhitespace" token before or after
|
|
||||||
// this token. There should be whitespace printed between two tokens if either
|
|
||||||
// token indicates that there should be whitespace. Note that whitespace may
|
|
||||||
// be altered by processing in certain situations (e.g. minification).
|
|
||||||
Whitespace WhitespaceFlags // 1 byte
|
|
||||||
}
|
|
||||||
|
|
||||||
type WhitespaceFlags uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
WhitespaceBefore WhitespaceFlags = 1 << iota
|
|
||||||
WhitespaceAfter
|
|
||||||
)
|
|
||||||
|
|
||||||
func (a Token) Equal(b Token) bool {
|
|
||||||
if a.Kind == b.Kind && a.Text == b.Text && a.ImportRecordIndex == b.ImportRecordIndex && a.Whitespace == b.Whitespace {
|
|
||||||
if a.Children == nil && b.Children == nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if a.Children != nil && b.Children != nil && TokensEqual(*a.Children, *b.Children) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func TokensEqual(a []Token, b []Token) bool {
|
|
||||||
if len(a) != len(b) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, c := range a {
|
|
||||||
if !c.Equal(b[i]) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func HashTokens(hash uint32, tokens []Token) uint32 {
|
|
||||||
hash = helpers.HashCombine(hash, uint32(len(tokens)))
|
|
||||||
|
|
||||||
for _, t := range tokens {
|
|
||||||
hash = helpers.HashCombine(hash, uint32(t.Kind))
|
|
||||||
hash = helpers.HashCombineString(hash, t.Text)
|
|
||||||
if t.Children != nil {
|
|
||||||
hash = HashTokens(hash, *t.Children)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a Token) EqualIgnoringWhitespace(b Token) bool {
|
|
||||||
if a.Kind == b.Kind && a.Text == b.Text && a.ImportRecordIndex == b.ImportRecordIndex {
|
|
||||||
if a.Children == nil && b.Children == nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if a.Children != nil && b.Children != nil && TokensEqualIgnoringWhitespace(*a.Children, *b.Children) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func TokensEqualIgnoringWhitespace(a []Token, b []Token) bool {
|
|
||||||
if len(a) != len(b) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, c := range a {
|
|
||||||
if !c.EqualIgnoringWhitespace(b[i]) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func TokensAreCommaSeparated(tokens []Token) bool {
|
|
||||||
if n := len(tokens); (n & 1) != 0 {
|
|
||||||
for i := 1; i < n; i += 2 {
|
|
||||||
if tokens[i].Kind != css_lexer.TComma {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) FractionForPercentage() (float64, bool) {
|
|
||||||
if t.Kind == css_lexer.TPercentage {
|
|
||||||
if f, err := strconv.ParseFloat(t.PercentageValue(), 64); err == nil {
|
|
||||||
if f < 0 {
|
|
||||||
return 0, true
|
|
||||||
}
|
|
||||||
if f > 100 {
|
|
||||||
return 1, true
|
|
||||||
}
|
|
||||||
return f / 100.0, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://drafts.csswg.org/css-values-3/#lengths
|
|
||||||
// For zero lengths the unit identifier is optional
|
|
||||||
// (i.e. can be syntactically represented as the <number> 0).
|
|
||||||
func (t *Token) TurnLengthIntoNumberIfZero() bool {
|
|
||||||
if t.Kind == css_lexer.TDimension && t.DimensionValue() == "0" {
|
|
||||||
t.Kind = css_lexer.TNumber
|
|
||||||
t.Text = "0"
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Token) TurnLengthOrPercentageIntoNumberIfZero() bool {
|
|
||||||
if t.Kind == css_lexer.TPercentage && t.PercentageValue() == "0" {
|
|
||||||
t.Kind = css_lexer.TNumber
|
|
||||||
t.Text = "0"
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return t.TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) PercentageValue() string {
|
|
||||||
return t.Text[:len(t.Text)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) DimensionValue() string {
|
|
||||||
return t.Text[:t.UnitOffset]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) DimensionUnit() string {
|
|
||||||
return t.Text[t.UnitOffset:]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) DimensionUnitIsSafeLength() bool {
|
|
||||||
switch t.DimensionUnit() {
|
|
||||||
// These units can be reasonably expected to be supported everywhere.
|
|
||||||
// Information used: https://developer.mozilla.org/en-US/docs/Web/CSS/length
|
|
||||||
case "cm", "em", "in", "mm", "pc", "pt", "px":
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) IsZero() bool {
|
|
||||||
return t.Kind == css_lexer.TNumber && t.Text == "0"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) IsOne() bool {
|
|
||||||
return t.Kind == css_lexer.TNumber && t.Text == "1"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t Token) IsAngle() bool {
|
|
||||||
if t.Kind == css_lexer.TDimension {
|
|
||||||
unit := t.DimensionUnit()
|
|
||||||
return unit == "deg" || unit == "grad" || unit == "rad" || unit == "turn"
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func CloneTokensWithImportRecords(
|
|
||||||
tokensIn []Token, importRecordsIn []ast.ImportRecord,
|
|
||||||
tokensOut []Token, importRecordsOut []ast.ImportRecord,
|
|
||||||
) ([]Token, []ast.ImportRecord) {
|
|
||||||
for _, t := range tokensIn {
|
|
||||||
// If this is a URL token, also clone the import record
|
|
||||||
if t.Kind == css_lexer.TURL {
|
|
||||||
importRecordIndex := uint32(len(importRecordsOut))
|
|
||||||
importRecordsOut = append(importRecordsOut, importRecordsIn[t.ImportRecordIndex])
|
|
||||||
t.ImportRecordIndex = importRecordIndex
|
|
||||||
}
|
|
||||||
|
|
||||||
// Also search for URL tokens in this token's children
|
|
||||||
if t.Children != nil {
|
|
||||||
var children []Token
|
|
||||||
children, importRecordsOut = CloneTokensWithImportRecords(*t.Children, importRecordsIn, children, importRecordsOut)
|
|
||||||
t.Children = &children
|
|
||||||
}
|
|
||||||
|
|
||||||
tokensOut = append(tokensOut, t)
|
|
||||||
}
|
|
||||||
|
|
||||||
return tokensOut, importRecordsOut
|
|
||||||
}
|
|
||||||
|
|
||||||
type Rule struct {
|
|
||||||
Loc logger.Loc
|
|
||||||
Data R
|
|
||||||
}
|
|
||||||
|
|
||||||
type R interface {
|
|
||||||
Equal(rule R) bool
|
|
||||||
Hash() (uint32, bool)
|
|
||||||
}
|
|
||||||
|
|
||||||
func RulesEqual(a []Rule, b []Rule) bool {
|
|
||||||
if len(a) != len(b) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, c := range a {
|
|
||||||
if !c.Data.Equal(b[i].Data) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func HashRules(hash uint32, rules []Rule) uint32 {
|
|
||||||
hash = helpers.HashCombine(hash, uint32(len(rules)))
|
|
||||||
for _, child := range rules {
|
|
||||||
if childHash, ok := child.Data.Hash(); ok {
|
|
||||||
hash = helpers.HashCombine(hash, childHash)
|
|
||||||
} else {
|
|
||||||
hash = helpers.HashCombine(hash, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
|
|
||||||
type RAtCharset struct {
|
|
||||||
Encoding string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RAtCharset) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RAtCharset)
|
|
||||||
return ok && a.Encoding == b.Encoding
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RAtCharset) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(1)
|
|
||||||
hash = helpers.HashCombineString(hash, r.Encoding)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RAtImport struct {
|
|
||||||
ImportRecordIndex uint32
|
|
||||||
ImportConditions []Token
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*RAtImport) Equal(rule R) bool {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RAtImport) Hash() (uint32, bool) {
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
type RAtKeyframes struct {
|
|
||||||
AtToken string
|
|
||||||
Name string
|
|
||||||
Blocks []KeyframeBlock
|
|
||||||
}
|
|
||||||
|
|
||||||
type KeyframeBlock struct {
|
|
||||||
Selectors []string
|
|
||||||
Rules []Rule
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RAtKeyframes) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RAtKeyframes)
|
|
||||||
if ok && a.AtToken == b.AtToken && a.Name == b.Name && len(a.Blocks) == len(b.Blocks) {
|
|
||||||
for i, ai := range a.Blocks {
|
|
||||||
bi := b.Blocks[i]
|
|
||||||
if len(ai.Selectors) != len(bi.Selectors) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for j, aj := range ai.Selectors {
|
|
||||||
if aj != bi.Selectors[j] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !RulesEqual(ai.Rules, bi.Rules) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RAtKeyframes) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(2)
|
|
||||||
hash = helpers.HashCombineString(hash, r.AtToken)
|
|
||||||
hash = helpers.HashCombineString(hash, r.Name)
|
|
||||||
hash = helpers.HashCombine(hash, uint32(len(r.Blocks)))
|
|
||||||
for _, block := range r.Blocks {
|
|
||||||
hash = helpers.HashCombine(hash, uint32(len(block.Selectors)))
|
|
||||||
for _, sel := range block.Selectors {
|
|
||||||
hash = helpers.HashCombineString(hash, sel)
|
|
||||||
}
|
|
||||||
hash = HashRules(hash, block.Rules)
|
|
||||||
}
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RKnownAt struct {
|
|
||||||
AtToken string
|
|
||||||
Prelude []Token
|
|
||||||
Rules []Rule
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RKnownAt) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RKnownAt)
|
|
||||||
return ok && a.AtToken == b.AtToken && TokensEqual(a.Prelude, b.Prelude) && RulesEqual(a.Rules, a.Rules)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RKnownAt) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(3)
|
|
||||||
hash = helpers.HashCombineString(hash, r.AtToken)
|
|
||||||
hash = HashTokens(hash, r.Prelude)
|
|
||||||
hash = HashRules(hash, r.Rules)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RUnknownAt struct {
|
|
||||||
AtToken string
|
|
||||||
Prelude []Token
|
|
||||||
Block []Token
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RUnknownAt) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RUnknownAt)
|
|
||||||
return ok && a.AtToken == b.AtToken && TokensEqual(a.Prelude, b.Prelude) && TokensEqual(a.Block, a.Block)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RUnknownAt) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(4)
|
|
||||||
hash = helpers.HashCombineString(hash, r.AtToken)
|
|
||||||
hash = HashTokens(hash, r.Prelude)
|
|
||||||
hash = HashTokens(hash, r.Block)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RSelector struct {
|
|
||||||
Selectors []ComplexSelector
|
|
||||||
Rules []Rule
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RSelector) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RSelector)
|
|
||||||
if ok && len(a.Selectors) == len(b.Selectors) {
|
|
||||||
for i, sel := range a.Selectors {
|
|
||||||
if !sel.Equal(b.Selectors[i]) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return RulesEqual(a.Rules, b.Rules)
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RSelector) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(5)
|
|
||||||
hash = helpers.HashCombine(hash, uint32(len(r.Selectors)))
|
|
||||||
for _, complex := range r.Selectors {
|
|
||||||
hash = helpers.HashCombine(hash, uint32(len(complex.Selectors)))
|
|
||||||
for _, sel := range complex.Selectors {
|
|
||||||
if sel.TypeSelector != nil {
|
|
||||||
hash = helpers.HashCombineString(hash, sel.TypeSelector.Name.Text)
|
|
||||||
} else {
|
|
||||||
hash = helpers.HashCombine(hash, 0)
|
|
||||||
}
|
|
||||||
hash = helpers.HashCombine(hash, uint32(len(sel.SubclassSelectors)))
|
|
||||||
for _, sub := range sel.SubclassSelectors {
|
|
||||||
hash = helpers.HashCombine(hash, sub.Hash())
|
|
||||||
}
|
|
||||||
hash = helpers.HashCombineString(hash, sel.Combinator)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
hash = HashRules(hash, r.Rules)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RQualified struct {
|
|
||||||
Prelude []Token
|
|
||||||
Rules []Rule
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RQualified) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RQualified)
|
|
||||||
return ok && TokensEqual(a.Prelude, b.Prelude) && RulesEqual(a.Rules, b.Rules)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RQualified) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(6)
|
|
||||||
hash = HashTokens(hash, r.Prelude)
|
|
||||||
hash = HashRules(hash, r.Rules)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RDeclaration struct {
|
|
||||||
KeyText string
|
|
||||||
Value []Token
|
|
||||||
KeyRange logger.Range
|
|
||||||
Key D // Compare using this instead of "Key" for speed
|
|
||||||
Important bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RDeclaration) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RDeclaration)
|
|
||||||
return ok && a.KeyText == b.KeyText && TokensEqual(a.Value, b.Value) && a.Important == b.Important
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RDeclaration) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(7)
|
|
||||||
hash = helpers.HashCombine(hash, uint32(r.Key))
|
|
||||||
hash = HashTokens(hash, r.Value)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RBadDeclaration struct {
|
|
||||||
Tokens []Token
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RBadDeclaration) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RBadDeclaration)
|
|
||||||
return ok && TokensEqual(a.Tokens, b.Tokens)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RBadDeclaration) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(8)
|
|
||||||
hash = HashTokens(hash, r.Tokens)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type RComment struct {
|
|
||||||
Text string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *RComment) Equal(rule R) bool {
|
|
||||||
b, ok := rule.(*RComment)
|
|
||||||
return ok && a.Text == b.Text
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *RComment) Hash() (uint32, bool) {
|
|
||||||
hash := uint32(9)
|
|
||||||
hash = helpers.HashCombineString(hash, r.Text)
|
|
||||||
return hash, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type ComplexSelector struct {
|
|
||||||
Selectors []CompoundSelector
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a ComplexSelector) Equal(b ComplexSelector) bool {
|
|
||||||
if len(a.Selectors) != len(b.Selectors) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, ai := range a.Selectors {
|
|
||||||
bi := b.Selectors[i]
|
|
||||||
if ai.HasNestPrefix != bi.HasNestPrefix || ai.Combinator != bi.Combinator {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if ats, bts := ai.TypeSelector, bi.TypeSelector; (ats == nil) != (bts == nil) {
|
|
||||||
return false
|
|
||||||
} else if ats != nil && bts != nil && !ats.Equal(*bts) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(ai.SubclassSelectors) != len(bi.SubclassSelectors) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for j, aj := range ai.SubclassSelectors {
|
|
||||||
if !aj.Equal(bi.SubclassSelectors[j]) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
type CompoundSelector struct {
|
|
||||||
HasNestPrefix bool // "&"
|
|
||||||
Combinator string // Optional, may be ""
|
|
||||||
TypeSelector *NamespacedName
|
|
||||||
SubclassSelectors []SS
|
|
||||||
}
|
|
||||||
|
|
||||||
type NameToken struct {
|
|
||||||
Kind css_lexer.T
|
|
||||||
Text string
|
|
||||||
}
|
|
||||||
|
|
||||||
type NamespacedName struct {
|
|
||||||
// If present, this is an identifier or "*" and is followed by a "|" character
|
|
||||||
NamespacePrefix *NameToken
|
|
||||||
|
|
||||||
// This is an identifier or "*"
|
|
||||||
Name NameToken
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a NamespacedName) Equal(b NamespacedName) bool {
|
|
||||||
return a.Name == b.Name && (a.NamespacePrefix == nil) == (b.NamespacePrefix == nil) &&
|
|
||||||
(a.NamespacePrefix == nil || b.NamespacePrefix == nil || *a.NamespacePrefix == *b.NamespacePrefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
type SS interface {
|
|
||||||
Equal(ss SS) bool
|
|
||||||
Hash() uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
type SSHash struct {
|
|
||||||
Name string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *SSHash) Equal(ss SS) bool {
|
|
||||||
b, ok := ss.(*SSHash)
|
|
||||||
return ok && a.Name == b.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ss *SSHash) Hash() uint32 {
|
|
||||||
hash := uint32(1)
|
|
||||||
hash = helpers.HashCombineString(hash, ss.Name)
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
|
|
||||||
type SSClass struct {
|
|
||||||
Name string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *SSClass) Equal(ss SS) bool {
|
|
||||||
b, ok := ss.(*SSClass)
|
|
||||||
return ok && a.Name == b.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ss *SSClass) Hash() uint32 {
|
|
||||||
hash := uint32(2)
|
|
||||||
hash = helpers.HashCombineString(hash, ss.Name)
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
|
|
||||||
type SSAttribute struct {
|
|
||||||
NamespacedName NamespacedName
|
|
||||||
MatcherOp string // Either "" or one of: "=" "~=" "|=" "^=" "$=" "*="
|
|
||||||
MatcherValue string
|
|
||||||
MatcherModifier byte // Either 0 or one of: 'i' 'I' 's' 'S'
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *SSAttribute) Equal(ss SS) bool {
|
|
||||||
b, ok := ss.(*SSAttribute)
|
|
||||||
return ok && a.NamespacedName.Equal(b.NamespacedName) && a.MatcherOp == b.MatcherOp &&
|
|
||||||
a.MatcherValue == b.MatcherValue && a.MatcherModifier == b.MatcherModifier
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ss *SSAttribute) Hash() uint32 {
|
|
||||||
hash := uint32(3)
|
|
||||||
hash = helpers.HashCombineString(hash, ss.NamespacedName.Name.Text)
|
|
||||||
hash = helpers.HashCombineString(hash, ss.MatcherOp)
|
|
||||||
hash = helpers.HashCombineString(hash, ss.MatcherValue)
|
|
||||||
return hash
|
|
||||||
}
|
|
||||||
|
|
||||||
type SSPseudoClass struct {
|
|
||||||
Name string
|
|
||||||
Args []Token
|
|
||||||
IsElement bool // If true, this is prefixed by "::" instead of ":"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *SSPseudoClass) Equal(ss SS) bool {
|
|
||||||
b, ok := ss.(*SSPseudoClass)
|
|
||||||
return ok && a.Name == b.Name && TokensEqual(a.Args, b.Args) && a.IsElement == b.IsElement
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ss *SSPseudoClass) Hash() uint32 {
|
|
||||||
hash := uint32(4)
|
|
||||||
hash = helpers.HashCombineString(hash, ss.Name)
|
|
||||||
hash = HashTokens(hash, ss.Args)
|
|
||||||
return hash
|
|
||||||
}
|
|
642
vendor/github.com/evanw/esbuild/internal/css_ast/css_decl_table.go
generated
vendored
642
vendor/github.com/evanw/esbuild/internal/css_ast/css_decl_table.go
generated
vendored
@ -1,642 +0,0 @@
|
|||||||
package css_ast
|
|
||||||
|
|
||||||
type D uint16
|
|
||||||
|
|
||||||
const (
|
|
||||||
DUnknown D = iota
|
|
||||||
DAlignContent
|
|
||||||
DAlignItems
|
|
||||||
DAlignSelf
|
|
||||||
DAlignmentBaseline
|
|
||||||
DAll
|
|
||||||
DAnimation
|
|
||||||
DAnimationDelay
|
|
||||||
DAnimationDirection
|
|
||||||
DAnimationDuration
|
|
||||||
DAnimationFillMode
|
|
||||||
DAnimationIterationCount
|
|
||||||
DAnimationName
|
|
||||||
DAnimationPlayState
|
|
||||||
DAnimationTimingFunction
|
|
||||||
DBackfaceVisibility
|
|
||||||
DBackground
|
|
||||||
DBackgroundAttachment
|
|
||||||
DBackgroundClip
|
|
||||||
DBackgroundColor
|
|
||||||
DBackgroundImage
|
|
||||||
DBackgroundOrigin
|
|
||||||
DBackgroundPosition
|
|
||||||
DBackgroundPositionX
|
|
||||||
DBackgroundPositionY
|
|
||||||
DBackgroundRepeat
|
|
||||||
DBackgroundSize
|
|
||||||
DBaselineShift
|
|
||||||
DBlockSize
|
|
||||||
DBorder
|
|
||||||
DBorderBlockEnd
|
|
||||||
DBorderBlockEndColor
|
|
||||||
DBorderBlockEndStyle
|
|
||||||
DBorderBlockEndWidth
|
|
||||||
DBorderBlockStart
|
|
||||||
DBorderBlockStartColor
|
|
||||||
DBorderBlockStartStyle
|
|
||||||
DBorderBlockStartWidth
|
|
||||||
DBorderBottom
|
|
||||||
DBorderBottomColor
|
|
||||||
DBorderBottomLeftRadius
|
|
||||||
DBorderBottomRightRadius
|
|
||||||
DBorderBottomStyle
|
|
||||||
DBorderBottomWidth
|
|
||||||
DBorderCollapse
|
|
||||||
DBorderColor
|
|
||||||
DBorderImage
|
|
||||||
DBorderImageOutset
|
|
||||||
DBorderImageRepeat
|
|
||||||
DBorderImageSlice
|
|
||||||
DBorderImageSource
|
|
||||||
DBorderImageWidth
|
|
||||||
DBorderInlineEnd
|
|
||||||
DBorderInlineEndColor
|
|
||||||
DBorderInlineEndStyle
|
|
||||||
DBorderInlineEndWidth
|
|
||||||
DBorderInlineStart
|
|
||||||
DBorderInlineStartColor
|
|
||||||
DBorderInlineStartStyle
|
|
||||||
DBorderInlineStartWidth
|
|
||||||
DBorderLeft
|
|
||||||
DBorderLeftColor
|
|
||||||
DBorderLeftStyle
|
|
||||||
DBorderLeftWidth
|
|
||||||
DBorderRadius
|
|
||||||
DBorderRight
|
|
||||||
DBorderRightColor
|
|
||||||
DBorderRightStyle
|
|
||||||
DBorderRightWidth
|
|
||||||
DBorderSpacing
|
|
||||||
DBorderStyle
|
|
||||||
DBorderTop
|
|
||||||
DBorderTopColor
|
|
||||||
DBorderTopLeftRadius
|
|
||||||
DBorderTopRightRadius
|
|
||||||
DBorderTopStyle
|
|
||||||
DBorderTopWidth
|
|
||||||
DBorderWidth
|
|
||||||
DBottom
|
|
||||||
DBoxShadow
|
|
||||||
DBoxSizing
|
|
||||||
DBreakAfter
|
|
||||||
DBreakBefore
|
|
||||||
DBreakInside
|
|
||||||
DCaptionSide
|
|
||||||
DCaretColor
|
|
||||||
DClear
|
|
||||||
DClip
|
|
||||||
DClipPath
|
|
||||||
DClipRule
|
|
||||||
DColor
|
|
||||||
DColorInterpolation
|
|
||||||
DColorInterpolationFilters
|
|
||||||
DColumnCount
|
|
||||||
DColumnFill
|
|
||||||
DColumnGap
|
|
||||||
DColumnRule
|
|
||||||
DColumnRuleColor
|
|
||||||
DColumnRuleStyle
|
|
||||||
DColumnRuleWidth
|
|
||||||
DColumnSpan
|
|
||||||
DColumnWidth
|
|
||||||
DColumns
|
|
||||||
DContent
|
|
||||||
DCounterIncrement
|
|
||||||
DCounterReset
|
|
||||||
DCssFloat
|
|
||||||
DCssText
|
|
||||||
DCursor
|
|
||||||
DDirection
|
|
||||||
DDisplay
|
|
||||||
DDominantBaseline
|
|
||||||
DEmptyCells
|
|
||||||
DFill
|
|
||||||
DFillOpacity
|
|
||||||
DFillRule
|
|
||||||
DFilter
|
|
||||||
DFlex
|
|
||||||
DFlexBasis
|
|
||||||
DFlexDirection
|
|
||||||
DFlexFlow
|
|
||||||
DFlexGrow
|
|
||||||
DFlexShrink
|
|
||||||
DFlexWrap
|
|
||||||
DFloat
|
|
||||||
DFloodColor
|
|
||||||
DFloodOpacity
|
|
||||||
DFont
|
|
||||||
DFontFamily
|
|
||||||
DFontFeatureSettings
|
|
||||||
DFontKerning
|
|
||||||
DFontSize
|
|
||||||
DFontSizeAdjust
|
|
||||||
DFontStretch
|
|
||||||
DFontStyle
|
|
||||||
DFontSynthesis
|
|
||||||
DFontVariant
|
|
||||||
DFontVariantCaps
|
|
||||||
DFontVariantEastAsian
|
|
||||||
DFontVariantLigatures
|
|
||||||
DFontVariantNumeric
|
|
||||||
DFontVariantPosition
|
|
||||||
DFontWeight
|
|
||||||
DGap
|
|
||||||
DGlyphOrientationVertical
|
|
||||||
DGrid
|
|
||||||
DGridArea
|
|
||||||
DGridAutoColumns
|
|
||||||
DGridAutoFlow
|
|
||||||
DGridAutoRows
|
|
||||||
DGridColumn
|
|
||||||
DGridColumnEnd
|
|
||||||
DGridColumnGap
|
|
||||||
DGridColumnStart
|
|
||||||
DGridGap
|
|
||||||
DGridRow
|
|
||||||
DGridRowEnd
|
|
||||||
DGridRowGap
|
|
||||||
DGridRowStart
|
|
||||||
DGridTemplate
|
|
||||||
DGridTemplateAreas
|
|
||||||
DGridTemplateColumns
|
|
||||||
DGridTemplateRows
|
|
||||||
DHeight
|
|
||||||
DHyphens
|
|
||||||
DImageOrientation
|
|
||||||
DImageRendering
|
|
||||||
DInlineSize
|
|
||||||
DInset
|
|
||||||
DJustifyContent
|
|
||||||
DJustifyItems
|
|
||||||
DJustifySelf
|
|
||||||
DLeft
|
|
||||||
DLetterSpacing
|
|
||||||
DLightingColor
|
|
||||||
DLineBreak
|
|
||||||
DLineHeight
|
|
||||||
DListStyle
|
|
||||||
DListStyleImage
|
|
||||||
DListStylePosition
|
|
||||||
DListStyleType
|
|
||||||
DMargin
|
|
||||||
DMarginBlockEnd
|
|
||||||
DMarginBlockStart
|
|
||||||
DMarginBottom
|
|
||||||
DMarginInlineEnd
|
|
||||||
DMarginInlineStart
|
|
||||||
DMarginLeft
|
|
||||||
DMarginRight
|
|
||||||
DMarginTop
|
|
||||||
DMarker
|
|
||||||
DMarkerEnd
|
|
||||||
DMarkerMid
|
|
||||||
DMarkerStart
|
|
||||||
DMask
|
|
||||||
DMaskComposite
|
|
||||||
DMaskImage
|
|
||||||
DMaskPosition
|
|
||||||
DMaskRepeat
|
|
||||||
DMaskSize
|
|
||||||
DMaskType
|
|
||||||
DMaxBlockSize
|
|
||||||
DMaxHeight
|
|
||||||
DMaxInlineSize
|
|
||||||
DMaxWidth
|
|
||||||
DMinBlockSize
|
|
||||||
DMinHeight
|
|
||||||
DMinInlineSize
|
|
||||||
DMinWidth
|
|
||||||
DObjectFit
|
|
||||||
DObjectPosition
|
|
||||||
DOpacity
|
|
||||||
DOrder
|
|
||||||
DOrphans
|
|
||||||
DOutline
|
|
||||||
DOutlineColor
|
|
||||||
DOutlineOffset
|
|
||||||
DOutlineStyle
|
|
||||||
DOutlineWidth
|
|
||||||
DOverflow
|
|
||||||
DOverflowAnchor
|
|
||||||
DOverflowWrap
|
|
||||||
DOverflowX
|
|
||||||
DOverflowY
|
|
||||||
DOverscrollBehavior
|
|
||||||
DOverscrollBehaviorBlock
|
|
||||||
DOverscrollBehaviorInline
|
|
||||||
DOverscrollBehaviorX
|
|
||||||
DOverscrollBehaviorY
|
|
||||||
DPadding
|
|
||||||
DPaddingBlockEnd
|
|
||||||
DPaddingBlockStart
|
|
||||||
DPaddingBottom
|
|
||||||
DPaddingInlineEnd
|
|
||||||
DPaddingInlineStart
|
|
||||||
DPaddingLeft
|
|
||||||
DPaddingRight
|
|
||||||
DPaddingTop
|
|
||||||
DPageBreakAfter
|
|
||||||
DPageBreakBefore
|
|
||||||
DPageBreakInside
|
|
||||||
DPaintOrder
|
|
||||||
DPerspective
|
|
||||||
DPerspectiveOrigin
|
|
||||||
DPlaceContent
|
|
||||||
DPlaceItems
|
|
||||||
DPlaceSelf
|
|
||||||
DPointerEvents
|
|
||||||
DPosition
|
|
||||||
DQuotes
|
|
||||||
DResize
|
|
||||||
DRight
|
|
||||||
DRotate
|
|
||||||
DRowGap
|
|
||||||
DRubyAlign
|
|
||||||
DRubyPosition
|
|
||||||
DScale
|
|
||||||
DScrollBehavior
|
|
||||||
DShapeRendering
|
|
||||||
DStopColor
|
|
||||||
DStopOpacity
|
|
||||||
DStroke
|
|
||||||
DStrokeDasharray
|
|
||||||
DStrokeDashoffset
|
|
||||||
DStrokeLinecap
|
|
||||||
DStrokeLinejoin
|
|
||||||
DStrokeMiterlimit
|
|
||||||
DStrokeOpacity
|
|
||||||
DStrokeWidth
|
|
||||||
DTabSize
|
|
||||||
DTableLayout
|
|
||||||
DTextAlign
|
|
||||||
DTextAlignLast
|
|
||||||
DTextAnchor
|
|
||||||
DTextCombineUpright
|
|
||||||
DTextDecoration
|
|
||||||
DTextDecorationColor
|
|
||||||
DTextDecorationLine
|
|
||||||
DTextDecorationStyle
|
|
||||||
DTextEmphasis
|
|
||||||
DTextEmphasisColor
|
|
||||||
DTextEmphasisPosition
|
|
||||||
DTextEmphasisStyle
|
|
||||||
DTextIndent
|
|
||||||
DTextJustify
|
|
||||||
DTextOrientation
|
|
||||||
DTextOverflow
|
|
||||||
DTextRendering
|
|
||||||
DTextShadow
|
|
||||||
DTextTransform
|
|
||||||
DTextUnderlinePosition
|
|
||||||
DTop
|
|
||||||
DTouchAction
|
|
||||||
DTransform
|
|
||||||
DTransformBox
|
|
||||||
DTransformOrigin
|
|
||||||
DTransformStyle
|
|
||||||
DTransition
|
|
||||||
DTransitionDelay
|
|
||||||
DTransitionDuration
|
|
||||||
DTransitionProperty
|
|
||||||
DTransitionTimingFunction
|
|
||||||
DTranslate
|
|
||||||
DUnicodeBidi
|
|
||||||
DUserSelect
|
|
||||||
DVerticalAlign
|
|
||||||
DVisibility
|
|
||||||
DWhiteSpace
|
|
||||||
DWidows
|
|
||||||
DWidth
|
|
||||||
DWillChange
|
|
||||||
DWordBreak
|
|
||||||
DWordSpacing
|
|
||||||
DWordWrap
|
|
||||||
DWritingMode
|
|
||||||
DZIndex
|
|
||||||
DZoom
|
|
||||||
)
|
|
||||||
|
|
||||||
var KnownDeclarations = map[string]D{
|
|
||||||
"align-content": DAlignContent,
|
|
||||||
"align-items": DAlignItems,
|
|
||||||
"align-self": DAlignSelf,
|
|
||||||
"alignment-baseline": DAlignmentBaseline,
|
|
||||||
"all": DAll,
|
|
||||||
"animation": DAnimation,
|
|
||||||
"animation-delay": DAnimationDelay,
|
|
||||||
"animation-direction": DAnimationDirection,
|
|
||||||
"animation-duration": DAnimationDuration,
|
|
||||||
"animation-fill-mode": DAnimationFillMode,
|
|
||||||
"animation-iteration-count": DAnimationIterationCount,
|
|
||||||
"animation-name": DAnimationName,
|
|
||||||
"animation-play-state": DAnimationPlayState,
|
|
||||||
"animation-timing-function": DAnimationTimingFunction,
|
|
||||||
"backface-visibility": DBackfaceVisibility,
|
|
||||||
"background": DBackground,
|
|
||||||
"background-attachment": DBackgroundAttachment,
|
|
||||||
"background-clip": DBackgroundClip,
|
|
||||||
"background-color": DBackgroundColor,
|
|
||||||
"background-image": DBackgroundImage,
|
|
||||||
"background-origin": DBackgroundOrigin,
|
|
||||||
"background-position": DBackgroundPosition,
|
|
||||||
"background-position-x": DBackgroundPositionX,
|
|
||||||
"background-position-y": DBackgroundPositionY,
|
|
||||||
"background-repeat": DBackgroundRepeat,
|
|
||||||
"background-size": DBackgroundSize,
|
|
||||||
"baseline-shift": DBaselineShift,
|
|
||||||
"block-size": DBlockSize,
|
|
||||||
"border": DBorder,
|
|
||||||
"border-block-end": DBorderBlockEnd,
|
|
||||||
"border-block-end-color": DBorderBlockEndColor,
|
|
||||||
"border-block-end-style": DBorderBlockEndStyle,
|
|
||||||
"border-block-end-width": DBorderBlockEndWidth,
|
|
||||||
"border-block-start": DBorderBlockStart,
|
|
||||||
"border-block-start-color": DBorderBlockStartColor,
|
|
||||||
"border-block-start-style": DBorderBlockStartStyle,
|
|
||||||
"border-block-start-width": DBorderBlockStartWidth,
|
|
||||||
"border-bottom": DBorderBottom,
|
|
||||||
"border-bottom-color": DBorderBottomColor,
|
|
||||||
"border-bottom-left-radius": DBorderBottomLeftRadius,
|
|
||||||
"border-bottom-right-radius": DBorderBottomRightRadius,
|
|
||||||
"border-bottom-style": DBorderBottomStyle,
|
|
||||||
"border-bottom-width": DBorderBottomWidth,
|
|
||||||
"border-collapse": DBorderCollapse,
|
|
||||||
"border-color": DBorderColor,
|
|
||||||
"border-image": DBorderImage,
|
|
||||||
"border-image-outset": DBorderImageOutset,
|
|
||||||
"border-image-repeat": DBorderImageRepeat,
|
|
||||||
"border-image-slice": DBorderImageSlice,
|
|
||||||
"border-image-source": DBorderImageSource,
|
|
||||||
"border-image-width": DBorderImageWidth,
|
|
||||||
"border-inline-end": DBorderInlineEnd,
|
|
||||||
"border-inline-end-color": DBorderInlineEndColor,
|
|
||||||
"border-inline-end-style": DBorderInlineEndStyle,
|
|
||||||
"border-inline-end-width": DBorderInlineEndWidth,
|
|
||||||
"border-inline-start": DBorderInlineStart,
|
|
||||||
"border-inline-start-color": DBorderInlineStartColor,
|
|
||||||
"border-inline-start-style": DBorderInlineStartStyle,
|
|
||||||
"border-inline-start-width": DBorderInlineStartWidth,
|
|
||||||
"border-left": DBorderLeft,
|
|
||||||
"border-left-color": DBorderLeftColor,
|
|
||||||
"border-left-style": DBorderLeftStyle,
|
|
||||||
"border-left-width": DBorderLeftWidth,
|
|
||||||
"border-radius": DBorderRadius,
|
|
||||||
"border-right": DBorderRight,
|
|
||||||
"border-right-color": DBorderRightColor,
|
|
||||||
"border-right-style": DBorderRightStyle,
|
|
||||||
"border-right-width": DBorderRightWidth,
|
|
||||||
"border-spacing": DBorderSpacing,
|
|
||||||
"border-style": DBorderStyle,
|
|
||||||
"border-top": DBorderTop,
|
|
||||||
"border-top-color": DBorderTopColor,
|
|
||||||
"border-top-left-radius": DBorderTopLeftRadius,
|
|
||||||
"border-top-right-radius": DBorderTopRightRadius,
|
|
||||||
"border-top-style": DBorderTopStyle,
|
|
||||||
"border-top-width": DBorderTopWidth,
|
|
||||||
"border-width": DBorderWidth,
|
|
||||||
"bottom": DBottom,
|
|
||||||
"box-shadow": DBoxShadow,
|
|
||||||
"box-sizing": DBoxSizing,
|
|
||||||
"break-after": DBreakAfter,
|
|
||||||
"break-before": DBreakBefore,
|
|
||||||
"break-inside": DBreakInside,
|
|
||||||
"caption-side": DCaptionSide,
|
|
||||||
"caret-color": DCaretColor,
|
|
||||||
"clear": DClear,
|
|
||||||
"clip": DClip,
|
|
||||||
"clip-path": DClipPath,
|
|
||||||
"clip-rule": DClipRule,
|
|
||||||
"color": DColor,
|
|
||||||
"color-interpolation": DColorInterpolation,
|
|
||||||
"color-interpolation-filters": DColorInterpolationFilters,
|
|
||||||
"column-count": DColumnCount,
|
|
||||||
"column-fill": DColumnFill,
|
|
||||||
"column-gap": DColumnGap,
|
|
||||||
"column-rule": DColumnRule,
|
|
||||||
"column-rule-color": DColumnRuleColor,
|
|
||||||
"column-rule-style": DColumnRuleStyle,
|
|
||||||
"column-rule-width": DColumnRuleWidth,
|
|
||||||
"column-span": DColumnSpan,
|
|
||||||
"column-width": DColumnWidth,
|
|
||||||
"columns": DColumns,
|
|
||||||
"content": DContent,
|
|
||||||
"counter-increment": DCounterIncrement,
|
|
||||||
"counter-reset": DCounterReset,
|
|
||||||
"css-float": DCssFloat,
|
|
||||||
"css-text": DCssText,
|
|
||||||
"cursor": DCursor,
|
|
||||||
"direction": DDirection,
|
|
||||||
"display": DDisplay,
|
|
||||||
"dominant-baseline": DDominantBaseline,
|
|
||||||
"empty-cells": DEmptyCells,
|
|
||||||
"fill": DFill,
|
|
||||||
"fill-opacity": DFillOpacity,
|
|
||||||
"fill-rule": DFillRule,
|
|
||||||
"filter": DFilter,
|
|
||||||
"flex": DFlex,
|
|
||||||
"flex-basis": DFlexBasis,
|
|
||||||
"flex-direction": DFlexDirection,
|
|
||||||
"flex-flow": DFlexFlow,
|
|
||||||
"flex-grow": DFlexGrow,
|
|
||||||
"flex-shrink": DFlexShrink,
|
|
||||||
"flex-wrap": DFlexWrap,
|
|
||||||
"float": DFloat,
|
|
||||||
"flood-color": DFloodColor,
|
|
||||||
"flood-opacity": DFloodOpacity,
|
|
||||||
"font": DFont,
|
|
||||||
"font-family": DFontFamily,
|
|
||||||
"font-feature-settings": DFontFeatureSettings,
|
|
||||||
"font-kerning": DFontKerning,
|
|
||||||
"font-size": DFontSize,
|
|
||||||
"font-size-adjust": DFontSizeAdjust,
|
|
||||||
"font-stretch": DFontStretch,
|
|
||||||
"font-style": DFontStyle,
|
|
||||||
"font-synthesis": DFontSynthesis,
|
|
||||||
"font-variant": DFontVariant,
|
|
||||||
"font-variant-caps": DFontVariantCaps,
|
|
||||||
"font-variant-east-asian": DFontVariantEastAsian,
|
|
||||||
"font-variant-ligatures": DFontVariantLigatures,
|
|
||||||
"font-variant-numeric": DFontVariantNumeric,
|
|
||||||
"font-variant-position": DFontVariantPosition,
|
|
||||||
"font-weight": DFontWeight,
|
|
||||||
"gap": DGap,
|
|
||||||
"glyph-orientation-vertical": DGlyphOrientationVertical,
|
|
||||||
"grid": DGrid,
|
|
||||||
"grid-area": DGridArea,
|
|
||||||
"grid-auto-columns": DGridAutoColumns,
|
|
||||||
"grid-auto-flow": DGridAutoFlow,
|
|
||||||
"grid-auto-rows": DGridAutoRows,
|
|
||||||
"grid-column": DGridColumn,
|
|
||||||
"grid-column-end": DGridColumnEnd,
|
|
||||||
"grid-column-gap": DGridColumnGap,
|
|
||||||
"grid-column-start": DGridColumnStart,
|
|
||||||
"grid-gap": DGridGap,
|
|
||||||
"grid-row": DGridRow,
|
|
||||||
"grid-row-end": DGridRowEnd,
|
|
||||||
"grid-row-gap": DGridRowGap,
|
|
||||||
"grid-row-start": DGridRowStart,
|
|
||||||
"grid-template": DGridTemplate,
|
|
||||||
"grid-template-areas": DGridTemplateAreas,
|
|
||||||
"grid-template-columns": DGridTemplateColumns,
|
|
||||||
"grid-template-rows": DGridTemplateRows,
|
|
||||||
"height": DHeight,
|
|
||||||
"hyphens": DHyphens,
|
|
||||||
"image-orientation": DImageOrientation,
|
|
||||||
"image-rendering": DImageRendering,
|
|
||||||
"inline-size": DInlineSize,
|
|
||||||
"inset": DInset,
|
|
||||||
"justify-content": DJustifyContent,
|
|
||||||
"justify-items": DJustifyItems,
|
|
||||||
"justify-self": DJustifySelf,
|
|
||||||
"left": DLeft,
|
|
||||||
"letter-spacing": DLetterSpacing,
|
|
||||||
"lighting-color": DLightingColor,
|
|
||||||
"line-break": DLineBreak,
|
|
||||||
"line-height": DLineHeight,
|
|
||||||
"list-style": DListStyle,
|
|
||||||
"list-style-image": DListStyleImage,
|
|
||||||
"list-style-position": DListStylePosition,
|
|
||||||
"list-style-type": DListStyleType,
|
|
||||||
"margin": DMargin,
|
|
||||||
"margin-block-end": DMarginBlockEnd,
|
|
||||||
"margin-block-start": DMarginBlockStart,
|
|
||||||
"margin-bottom": DMarginBottom,
|
|
||||||
"margin-inline-end": DMarginInlineEnd,
|
|
||||||
"margin-inline-start": DMarginInlineStart,
|
|
||||||
"margin-left": DMarginLeft,
|
|
||||||
"margin-right": DMarginRight,
|
|
||||||
"margin-top": DMarginTop,
|
|
||||||
"marker": DMarker,
|
|
||||||
"marker-end": DMarkerEnd,
|
|
||||||
"marker-mid": DMarkerMid,
|
|
||||||
"marker-start": DMarkerStart,
|
|
||||||
"mask": DMask,
|
|
||||||
"mask-composite": DMaskComposite,
|
|
||||||
"mask-image": DMaskImage,
|
|
||||||
"mask-position": DMaskPosition,
|
|
||||||
"mask-repeat": DMaskRepeat,
|
|
||||||
"mask-size": DMaskSize,
|
|
||||||
"mask-type": DMaskType,
|
|
||||||
"max-block-size": DMaxBlockSize,
|
|
||||||
"max-height": DMaxHeight,
|
|
||||||
"max-inline-size": DMaxInlineSize,
|
|
||||||
"max-width": DMaxWidth,
|
|
||||||
"min-block-size": DMinBlockSize,
|
|
||||||
"min-height": DMinHeight,
|
|
||||||
"min-inline-size": DMinInlineSize,
|
|
||||||
"min-width": DMinWidth,
|
|
||||||
"object-fit": DObjectFit,
|
|
||||||
"object-position": DObjectPosition,
|
|
||||||
"opacity": DOpacity,
|
|
||||||
"order": DOrder,
|
|
||||||
"orphans": DOrphans,
|
|
||||||
"outline": DOutline,
|
|
||||||
"outline-color": DOutlineColor,
|
|
||||||
"outline-offset": DOutlineOffset,
|
|
||||||
"outline-style": DOutlineStyle,
|
|
||||||
"outline-width": DOutlineWidth,
|
|
||||||
"overflow": DOverflow,
|
|
||||||
"overflow-anchor": DOverflowAnchor,
|
|
||||||
"overflow-wrap": DOverflowWrap,
|
|
||||||
"overflow-x": DOverflowX,
|
|
||||||
"overflow-y": DOverflowY,
|
|
||||||
"overscroll-behavior": DOverscrollBehavior,
|
|
||||||
"overscroll-behavior-block": DOverscrollBehaviorBlock,
|
|
||||||
"overscroll-behavior-inline": DOverscrollBehaviorInline,
|
|
||||||
"overscroll-behavior-x": DOverscrollBehaviorX,
|
|
||||||
"overscroll-behavior-y": DOverscrollBehaviorY,
|
|
||||||
"padding": DPadding,
|
|
||||||
"padding-block-end": DPaddingBlockEnd,
|
|
||||||
"padding-block-start": DPaddingBlockStart,
|
|
||||||
"padding-bottom": DPaddingBottom,
|
|
||||||
"padding-inline-end": DPaddingInlineEnd,
|
|
||||||
"padding-inline-start": DPaddingInlineStart,
|
|
||||||
"padding-left": DPaddingLeft,
|
|
||||||
"padding-right": DPaddingRight,
|
|
||||||
"padding-top": DPaddingTop,
|
|
||||||
"page-break-after": DPageBreakAfter,
|
|
||||||
"page-break-before": DPageBreakBefore,
|
|
||||||
"page-break-inside": DPageBreakInside,
|
|
||||||
"paint-order": DPaintOrder,
|
|
||||||
"perspective": DPerspective,
|
|
||||||
"perspective-origin": DPerspectiveOrigin,
|
|
||||||
"place-content": DPlaceContent,
|
|
||||||
"place-items": DPlaceItems,
|
|
||||||
"place-self": DPlaceSelf,
|
|
||||||
"pointer-events": DPointerEvents,
|
|
||||||
"position": DPosition,
|
|
||||||
"quotes": DQuotes,
|
|
||||||
"resize": DResize,
|
|
||||||
"right": DRight,
|
|
||||||
"rotate": DRotate,
|
|
||||||
"row-gap": DRowGap,
|
|
||||||
"ruby-align": DRubyAlign,
|
|
||||||
"ruby-position": DRubyPosition,
|
|
||||||
"scale": DScale,
|
|
||||||
"scroll-behavior": DScrollBehavior,
|
|
||||||
"shape-rendering": DShapeRendering,
|
|
||||||
"stop-color": DStopColor,
|
|
||||||
"stop-opacity": DStopOpacity,
|
|
||||||
"stroke": DStroke,
|
|
||||||
"stroke-dasharray": DStrokeDasharray,
|
|
||||||
"stroke-dashoffset": DStrokeDashoffset,
|
|
||||||
"stroke-linecap": DStrokeLinecap,
|
|
||||||
"stroke-linejoin": DStrokeLinejoin,
|
|
||||||
"stroke-miterlimit": DStrokeMiterlimit,
|
|
||||||
"stroke-opacity": DStrokeOpacity,
|
|
||||||
"stroke-width": DStrokeWidth,
|
|
||||||
"tab-size": DTabSize,
|
|
||||||
"table-layout": DTableLayout,
|
|
||||||
"text-align": DTextAlign,
|
|
||||||
"text-align-last": DTextAlignLast,
|
|
||||||
"text-anchor": DTextAnchor,
|
|
||||||
"text-combine-upright": DTextCombineUpright,
|
|
||||||
"text-decoration": DTextDecoration,
|
|
||||||
"text-decoration-color": DTextDecorationColor,
|
|
||||||
"text-decoration-line": DTextDecorationLine,
|
|
||||||
"text-decoration-style": DTextDecorationStyle,
|
|
||||||
"text-emphasis": DTextEmphasis,
|
|
||||||
"text-emphasis-color": DTextEmphasisColor,
|
|
||||||
"text-emphasis-position": DTextEmphasisPosition,
|
|
||||||
"text-emphasis-style": DTextEmphasisStyle,
|
|
||||||
"text-indent": DTextIndent,
|
|
||||||
"text-justify": DTextJustify,
|
|
||||||
"text-orientation": DTextOrientation,
|
|
||||||
"text-overflow": DTextOverflow,
|
|
||||||
"text-rendering": DTextRendering,
|
|
||||||
"text-shadow": DTextShadow,
|
|
||||||
"text-transform": DTextTransform,
|
|
||||||
"text-underline-position": DTextUnderlinePosition,
|
|
||||||
"top": DTop,
|
|
||||||
"touch-action": DTouchAction,
|
|
||||||
"transform": DTransform,
|
|
||||||
"transform-box": DTransformBox,
|
|
||||||
"transform-origin": DTransformOrigin,
|
|
||||||
"transform-style": DTransformStyle,
|
|
||||||
"transition": DTransition,
|
|
||||||
"transition-delay": DTransitionDelay,
|
|
||||||
"transition-duration": DTransitionDuration,
|
|
||||||
"transition-property": DTransitionProperty,
|
|
||||||
"transition-timing-function": DTransitionTimingFunction,
|
|
||||||
"translate": DTranslate,
|
|
||||||
"unicode-bidi": DUnicodeBidi,
|
|
||||||
"user-select": DUserSelect,
|
|
||||||
"vertical-align": DVerticalAlign,
|
|
||||||
"visibility": DVisibility,
|
|
||||||
"white-space": DWhiteSpace,
|
|
||||||
"widows": DWidows,
|
|
||||||
"width": DWidth,
|
|
||||||
"will-change": DWillChange,
|
|
||||||
"word-break": DWordBreak,
|
|
||||||
"word-spacing": DWordSpacing,
|
|
||||||
"word-wrap": DWordWrap,
|
|
||||||
"writing-mode": DWritingMode,
|
|
||||||
"z-index": DZIndex,
|
|
||||||
"zoom": DZoom,
|
|
||||||
}
|
|
969
vendor/github.com/evanw/esbuild/internal/css_lexer/css_lexer.go
generated
vendored
969
vendor/github.com/evanw/esbuild/internal/css_lexer/css_lexer.go
generated
vendored
@ -1,969 +0,0 @@
|
|||||||
package css_lexer
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
// The lexer converts a source file to a stream of tokens. Unlike esbuild's
|
|
||||||
// JavaScript lexer, this CSS lexer runs to completion before the CSS parser
|
|
||||||
// begins, resulting in a single array of all tokens in the file.
|
|
||||||
|
|
||||||
type T uint8
|
|
||||||
|
|
||||||
const eof = -1
|
|
||||||
|
|
||||||
const (
|
|
||||||
TEndOfFile T = iota
|
|
||||||
|
|
||||||
TAtKeyword
|
|
||||||
TBadString
|
|
||||||
TBadURL
|
|
||||||
TCDC // "-->"
|
|
||||||
TCDO // "<!--"
|
|
||||||
TCloseBrace
|
|
||||||
TCloseBracket
|
|
||||||
TCloseParen
|
|
||||||
TColon
|
|
||||||
TComma
|
|
||||||
TDelim
|
|
||||||
TDelimAmpersand
|
|
||||||
TDelimAsterisk
|
|
||||||
TDelimBar
|
|
||||||
TDelimCaret
|
|
||||||
TDelimDollar
|
|
||||||
TDelimDot
|
|
||||||
TDelimEquals
|
|
||||||
TDelimExclamation
|
|
||||||
TDelimGreaterThan
|
|
||||||
TDelimMinus
|
|
||||||
TDelimPlus
|
|
||||||
TDelimSlash
|
|
||||||
TDelimTilde
|
|
||||||
TDimension
|
|
||||||
TFunction
|
|
||||||
THash
|
|
||||||
TIdent
|
|
||||||
TNumber
|
|
||||||
TOpenBrace
|
|
||||||
TOpenBracket
|
|
||||||
TOpenParen
|
|
||||||
TPercentage
|
|
||||||
TSemicolon
|
|
||||||
TString
|
|
||||||
TURL
|
|
||||||
TWhitespace
|
|
||||||
)
|
|
||||||
|
|
||||||
var tokenToString = []string{
|
|
||||||
"end of file",
|
|
||||||
"@-keyword",
|
|
||||||
"bad string token",
|
|
||||||
"bad URL token",
|
|
||||||
"\"-->\"",
|
|
||||||
"\"<!--\"",
|
|
||||||
"\"}\"",
|
|
||||||
"\"]\"",
|
|
||||||
"\")\"",
|
|
||||||
"\":\"",
|
|
||||||
"\",\"",
|
|
||||||
"delimiter",
|
|
||||||
"\"&\"",
|
|
||||||
"\"*\"",
|
|
||||||
"\"|\"",
|
|
||||||
"\"^\"",
|
|
||||||
"\"$\"",
|
|
||||||
"\".\"",
|
|
||||||
"\"=\"",
|
|
||||||
"\"!\"",
|
|
||||||
"\">\"",
|
|
||||||
"\"-\"",
|
|
||||||
"\"+\"",
|
|
||||||
"\"/\"",
|
|
||||||
"\"~\"",
|
|
||||||
"dimension",
|
|
||||||
"function token",
|
|
||||||
"hash token",
|
|
||||||
"identifier",
|
|
||||||
"number",
|
|
||||||
"\"{\"",
|
|
||||||
"\"[\"",
|
|
||||||
"\"(\"",
|
|
||||||
"percentage",
|
|
||||||
"\";\"",
|
|
||||||
"string token",
|
|
||||||
"URL token",
|
|
||||||
"whitespace",
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t T) String() string {
|
|
||||||
return tokenToString[t]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t T) IsNumeric() bool {
|
|
||||||
return t == TNumber || t == TPercentage || t == TDimension
|
|
||||||
}
|
|
||||||
|
|
||||||
// This token struct is designed to be memory-efficient. It just references a
|
|
||||||
// range in the input file instead of directly containing the substring of text
|
|
||||||
// since a range takes up less memory than a string.
|
|
||||||
type Token struct {
|
|
||||||
Range logger.Range // 8 bytes
|
|
||||||
UnitOffset uint16 // 2 bytes
|
|
||||||
Kind T // 1 byte
|
|
||||||
IsID bool // 1 byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (token Token) DecodedText(contents string) string {
|
|
||||||
raw := contents[token.Range.Loc.Start:token.Range.End()]
|
|
||||||
|
|
||||||
switch token.Kind {
|
|
||||||
case TIdent, TDimension:
|
|
||||||
return decodeEscapesInToken(raw)
|
|
||||||
|
|
||||||
case TAtKeyword, THash:
|
|
||||||
return decodeEscapesInToken(raw[1:])
|
|
||||||
|
|
||||||
case TFunction:
|
|
||||||
return decodeEscapesInToken(raw[:len(raw)-1])
|
|
||||||
|
|
||||||
case TString:
|
|
||||||
return decodeEscapesInToken(raw[1 : len(raw)-1])
|
|
||||||
|
|
||||||
case TURL:
|
|
||||||
start := 4
|
|
||||||
end := len(raw) - 1
|
|
||||||
|
|
||||||
// Trim leading and trailing whitespace
|
|
||||||
for start < end && isWhitespace(rune(raw[start])) {
|
|
||||||
start++
|
|
||||||
}
|
|
||||||
for start < end && isWhitespace(rune(raw[end-1])) {
|
|
||||||
end--
|
|
||||||
}
|
|
||||||
|
|
||||||
return decodeEscapesInToken(raw[start:end])
|
|
||||||
}
|
|
||||||
|
|
||||||
return raw
|
|
||||||
}
|
|
||||||
|
|
||||||
type lexer struct {
|
|
||||||
log logger.Log
|
|
||||||
source logger.Source
|
|
||||||
tracker logger.LineColumnTracker
|
|
||||||
current int
|
|
||||||
codePoint rune
|
|
||||||
Token Token
|
|
||||||
legalCommentsBefore []Comment
|
|
||||||
approximateNewlineCount int
|
|
||||||
sourceMappingURL logger.Span
|
|
||||||
}
|
|
||||||
|
|
||||||
type Comment struct {
|
|
||||||
Text string
|
|
||||||
Loc logger.Loc
|
|
||||||
TokenIndexAfter uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
type TokenizeResult struct {
|
|
||||||
Tokens []Token
|
|
||||||
LegalComments []Comment
|
|
||||||
ApproximateLineCount int32
|
|
||||||
SourceMapComment logger.Span
|
|
||||||
}
|
|
||||||
|
|
||||||
func Tokenize(log logger.Log, source logger.Source) TokenizeResult {
|
|
||||||
lexer := lexer{
|
|
||||||
log: log,
|
|
||||||
source: source,
|
|
||||||
tracker: logger.MakeLineColumnTracker(&source),
|
|
||||||
}
|
|
||||||
lexer.step()
|
|
||||||
|
|
||||||
// The U+FEFF character is usually a zero-width non-breaking space. However,
|
|
||||||
// when it's used at the start of a text stream it is called a BOM (byte order
|
|
||||||
// mark) instead and indicates that the text stream is UTF-8 encoded. This is
|
|
||||||
// problematic for us because CSS does not treat U+FEFF as whitespace. Only
|
|
||||||
// " \t\r\n\f" characters are treated as whitespace. Skip over the BOM if it
|
|
||||||
// is present so it doesn't cause us trouble when we try to parse it.
|
|
||||||
if lexer.codePoint == '\uFEFF' {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
|
|
||||||
lexer.next()
|
|
||||||
var tokens []Token
|
|
||||||
var comments []Comment
|
|
||||||
for lexer.Token.Kind != TEndOfFile {
|
|
||||||
if lexer.legalCommentsBefore != nil {
|
|
||||||
for _, comment := range lexer.legalCommentsBefore {
|
|
||||||
comment.TokenIndexAfter = uint32(len(tokens))
|
|
||||||
comments = append(comments, comment)
|
|
||||||
}
|
|
||||||
lexer.legalCommentsBefore = nil
|
|
||||||
}
|
|
||||||
tokens = append(tokens, lexer.Token)
|
|
||||||
lexer.next()
|
|
||||||
}
|
|
||||||
if lexer.legalCommentsBefore != nil {
|
|
||||||
for _, comment := range lexer.legalCommentsBefore {
|
|
||||||
comment.TokenIndexAfter = uint32(len(tokens))
|
|
||||||
comments = append(comments, comment)
|
|
||||||
}
|
|
||||||
lexer.legalCommentsBefore = nil
|
|
||||||
}
|
|
||||||
return TokenizeResult{
|
|
||||||
Tokens: tokens,
|
|
||||||
LegalComments: comments,
|
|
||||||
ApproximateLineCount: int32(lexer.approximateNewlineCount) + 1,
|
|
||||||
SourceMapComment: lexer.sourceMappingURL,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) step() {
|
|
||||||
codePoint, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
|
|
||||||
|
|
||||||
// Use -1 to indicate the end of the file
|
|
||||||
if width == 0 {
|
|
||||||
codePoint = eof
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track the approximate number of newlines in the file so we can preallocate
|
|
||||||
// the line offset table in the printer for source maps. The line offset table
|
|
||||||
// is the #1 highest allocation in the heap profile, so this is worth doing.
|
|
||||||
// This count is approximate because it handles "\n" and "\r\n" (the common
|
|
||||||
// cases) but not "\r" or "\u2028" or "\u2029". Getting this wrong is harmless
|
|
||||||
// because it's only a preallocation. The array will just grow if it's too small.
|
|
||||||
if codePoint == '\n' {
|
|
||||||
lexer.approximateNewlineCount++
|
|
||||||
}
|
|
||||||
|
|
||||||
lexer.codePoint = codePoint
|
|
||||||
lexer.Token.Range.Len = int32(lexer.current) - lexer.Token.Range.Loc.Start
|
|
||||||
lexer.current += width
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) next() {
|
|
||||||
// Reference: https://www.w3.org/TR/css-syntax-3/
|
|
||||||
|
|
||||||
for {
|
|
||||||
lexer.Token = Token{Range: logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}}}
|
|
||||||
|
|
||||||
switch lexer.codePoint {
|
|
||||||
case eof:
|
|
||||||
lexer.Token.Kind = TEndOfFile
|
|
||||||
|
|
||||||
case '/':
|
|
||||||
lexer.step()
|
|
||||||
switch lexer.codePoint {
|
|
||||||
case '*':
|
|
||||||
lexer.step()
|
|
||||||
lexer.consumeToEndOfMultiLineComment(lexer.Token.Range)
|
|
||||||
continue
|
|
||||||
case '/':
|
|
||||||
lexer.step()
|
|
||||||
lexer.consumeToEndOfSingleLineComment()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
lexer.Token.Kind = TDelimSlash
|
|
||||||
|
|
||||||
case ' ', '\t', '\n', '\r', '\f':
|
|
||||||
lexer.step()
|
|
||||||
for {
|
|
||||||
if isWhitespace(lexer.codePoint) {
|
|
||||||
lexer.step()
|
|
||||||
} else if lexer.codePoint == '/' && lexer.current < len(lexer.source.Contents) && lexer.source.Contents[lexer.current] == '*' {
|
|
||||||
startRange := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 2}
|
|
||||||
lexer.step()
|
|
||||||
lexer.step()
|
|
||||||
lexer.consumeToEndOfMultiLineComment(startRange)
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lexer.Token.Kind = TWhitespace
|
|
||||||
|
|
||||||
case '"', '\'':
|
|
||||||
lexer.Token.Kind = lexer.consumeString()
|
|
||||||
|
|
||||||
case '#':
|
|
||||||
lexer.step()
|
|
||||||
if IsNameContinue(lexer.codePoint) || lexer.isValidEscape() {
|
|
||||||
lexer.Token.Kind = THash
|
|
||||||
if lexer.wouldStartIdentifier() {
|
|
||||||
lexer.Token.IsID = true
|
|
||||||
}
|
|
||||||
lexer.consumeName()
|
|
||||||
} else {
|
|
||||||
lexer.Token.Kind = TDelim
|
|
||||||
}
|
|
||||||
|
|
||||||
case '(':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TOpenParen
|
|
||||||
|
|
||||||
case ')':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TCloseParen
|
|
||||||
|
|
||||||
case '[':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TOpenBracket
|
|
||||||
|
|
||||||
case ']':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TCloseBracket
|
|
||||||
|
|
||||||
case '{':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TOpenBrace
|
|
||||||
|
|
||||||
case '}':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TCloseBrace
|
|
||||||
|
|
||||||
case ',':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TComma
|
|
||||||
|
|
||||||
case ':':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TColon
|
|
||||||
|
|
||||||
case ';':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TSemicolon
|
|
||||||
|
|
||||||
case '+':
|
|
||||||
if lexer.wouldStartNumber() {
|
|
||||||
lexer.Token.Kind = lexer.consumeNumeric()
|
|
||||||
} else {
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimPlus
|
|
||||||
}
|
|
||||||
|
|
||||||
case '.':
|
|
||||||
if lexer.wouldStartNumber() {
|
|
||||||
lexer.Token.Kind = lexer.consumeNumeric()
|
|
||||||
} else {
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimDot
|
|
||||||
}
|
|
||||||
|
|
||||||
case '-':
|
|
||||||
if lexer.wouldStartNumber() {
|
|
||||||
lexer.Token.Kind = lexer.consumeNumeric()
|
|
||||||
} else if lexer.current+2 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+2] == "->" {
|
|
||||||
lexer.step()
|
|
||||||
lexer.step()
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TCDC
|
|
||||||
} else if lexer.wouldStartIdentifier() {
|
|
||||||
lexer.Token.Kind = lexer.consumeIdentLike()
|
|
||||||
} else {
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimMinus
|
|
||||||
}
|
|
||||||
|
|
||||||
case '<':
|
|
||||||
if lexer.current+3 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+3] == "!--" {
|
|
||||||
lexer.step()
|
|
||||||
lexer.step()
|
|
||||||
lexer.step()
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TCDO
|
|
||||||
} else {
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelim
|
|
||||||
}
|
|
||||||
|
|
||||||
case '@':
|
|
||||||
lexer.step()
|
|
||||||
if lexer.wouldStartIdentifier() {
|
|
||||||
lexer.consumeName()
|
|
||||||
lexer.Token.Kind = TAtKeyword
|
|
||||||
} else {
|
|
||||||
lexer.Token.Kind = TDelim
|
|
||||||
}
|
|
||||||
|
|
||||||
case '\\':
|
|
||||||
if lexer.isValidEscape() {
|
|
||||||
lexer.Token.Kind = lexer.consumeIdentLike()
|
|
||||||
} else {
|
|
||||||
lexer.step()
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker, lexer.Token.Range, "Invalid escape")
|
|
||||||
lexer.Token.Kind = TDelim
|
|
||||||
}
|
|
||||||
|
|
||||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
|
||||||
lexer.Token.Kind = lexer.consumeNumeric()
|
|
||||||
|
|
||||||
case '>':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimGreaterThan
|
|
||||||
|
|
||||||
case '~':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimTilde
|
|
||||||
|
|
||||||
case '&':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimAmpersand
|
|
||||||
|
|
||||||
case '*':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimAsterisk
|
|
||||||
|
|
||||||
case '|':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimBar
|
|
||||||
|
|
||||||
case '!':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimExclamation
|
|
||||||
|
|
||||||
case '=':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimEquals
|
|
||||||
|
|
||||||
case '^':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimCaret
|
|
||||||
|
|
||||||
case '$':
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelimDollar
|
|
||||||
|
|
||||||
default:
|
|
||||||
if IsNameStart(lexer.codePoint) {
|
|
||||||
lexer.Token.Kind = lexer.consumeIdentLike()
|
|
||||||
} else {
|
|
||||||
lexer.step()
|
|
||||||
lexer.Token.Kind = TDelim
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeToEndOfMultiLineComment(startRange logger.Range) {
|
|
||||||
startOfSourceMappingURL := 0
|
|
||||||
isLegalComment := false
|
|
||||||
|
|
||||||
switch lexer.codePoint {
|
|
||||||
case '#', '@':
|
|
||||||
// Keep track of the contents of the "sourceMappingURL=" comment
|
|
||||||
if strings.HasPrefix(lexer.source.Contents[lexer.current:], " sourceMappingURL=") {
|
|
||||||
startOfSourceMappingURL = lexer.current + len(" sourceMappingURL=")
|
|
||||||
}
|
|
||||||
|
|
||||||
case '!':
|
|
||||||
// Remember if this is a legal comment
|
|
||||||
isLegalComment = true
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
|
||||||
switch lexer.codePoint {
|
|
||||||
case '*':
|
|
||||||
endOfSourceMappingURL := lexer.current - 1
|
|
||||||
lexer.step()
|
|
||||||
if lexer.codePoint == '/' {
|
|
||||||
commentEnd := lexer.current
|
|
||||||
lexer.step()
|
|
||||||
|
|
||||||
// Record the source mapping URL
|
|
||||||
if startOfSourceMappingURL != 0 {
|
|
||||||
r := logger.Range{Loc: logger.Loc{Start: int32(startOfSourceMappingURL)}}
|
|
||||||
text := lexer.source.Contents[startOfSourceMappingURL:endOfSourceMappingURL]
|
|
||||||
for int(r.Len) < len(text) && !isWhitespace(rune(text[r.Len])) {
|
|
||||||
r.Len++
|
|
||||||
}
|
|
||||||
lexer.sourceMappingURL = logger.Span{Text: text[:r.Len], Range: r}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Record legal comments
|
|
||||||
if text := lexer.source.Contents[startRange.Loc.Start:commentEnd]; isLegalComment || containsAtPreserveOrAtLicense(text) {
|
|
||||||
text = helpers.RemoveMultiLineCommentIndent(lexer.source.Contents[:startRange.Loc.Start], text)
|
|
||||||
lexer.legalCommentsBefore = append(lexer.legalCommentsBefore, Comment{Loc: startRange.Loc, Text: text})
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
case eof: // This indicates the end of the file
|
|
||||||
lexer.log.AddWithNotes(logger.Error, &lexer.tracker, logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
|
|
||||||
"Expected \"*/\" to terminate multi-line comment",
|
|
||||||
[]logger.MsgData{lexer.tracker.MsgData(startRange, "The multi-line comment starts here:")})
|
|
||||||
return
|
|
||||||
|
|
||||||
default:
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func containsAtPreserveOrAtLicense(text string) bool {
|
|
||||||
for i, c := range text {
|
|
||||||
if c == '@' && (strings.HasPrefix(text[i+1:], "preserve") || strings.HasPrefix(text[i+1:], "license")) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeToEndOfSingleLineComment() {
|
|
||||||
for !isNewline(lexer.codePoint) && lexer.codePoint != eof {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
lexer.log.Add(logger.Warning, &lexer.tracker, lexer.Token.Range, "Comments in CSS use \"/* ... */\" instead of \"//\"")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) isValidEscape() bool {
|
|
||||||
if lexer.codePoint != '\\' {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
c, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
|
|
||||||
return !isNewline(c)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) wouldStartIdentifier() bool {
|
|
||||||
if IsNameStart(lexer.codePoint) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if lexer.codePoint == '-' {
|
|
||||||
c, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
|
|
||||||
if c == utf8.RuneError && width <= 1 {
|
|
||||||
return false // Decoding error
|
|
||||||
}
|
|
||||||
if IsNameStart(c) || c == '-' {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if c == '\\' {
|
|
||||||
c2, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current+width:])
|
|
||||||
return !isNewline(c2)
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return lexer.isValidEscape()
|
|
||||||
}
|
|
||||||
|
|
||||||
func WouldStartIdentifierWithoutEscapes(text string) bool {
|
|
||||||
c, width := utf8.DecodeRuneInString(text)
|
|
||||||
if c == utf8.RuneError && width <= 1 {
|
|
||||||
return false // Decoding error
|
|
||||||
}
|
|
||||||
if IsNameStart(c) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if c == '-' {
|
|
||||||
c2, width2 := utf8.DecodeRuneInString(text[width:])
|
|
||||||
if c2 == utf8.RuneError && width2 <= 1 {
|
|
||||||
return false // Decoding error
|
|
||||||
}
|
|
||||||
if IsNameStart(c2) || c2 == '-' {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) wouldStartNumber() bool {
|
|
||||||
if lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
|
||||||
return true
|
|
||||||
} else if lexer.codePoint == '.' {
|
|
||||||
contents := lexer.source.Contents
|
|
||||||
if lexer.current < len(contents) {
|
|
||||||
c := contents[lexer.current]
|
|
||||||
return c >= '0' && c <= '9'
|
|
||||||
}
|
|
||||||
} else if lexer.codePoint == '+' || lexer.codePoint == '-' {
|
|
||||||
contents := lexer.source.Contents
|
|
||||||
n := len(contents)
|
|
||||||
if lexer.current < n {
|
|
||||||
c := contents[lexer.current]
|
|
||||||
if c >= '0' && c <= '9' {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if c == '.' && lexer.current+1 < n {
|
|
||||||
c = contents[lexer.current+1]
|
|
||||||
return c >= '0' && c <= '9'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeName() string {
|
|
||||||
// Common case: no escapes, identifier is a substring of the input
|
|
||||||
for IsNameContinue(lexer.codePoint) {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
raw := lexer.source.Contents[lexer.Token.Range.Loc.Start:lexer.Token.Range.End()]
|
|
||||||
if !lexer.isValidEscape() {
|
|
||||||
return raw
|
|
||||||
}
|
|
||||||
|
|
||||||
// Uncommon case: escapes, identifier is allocated
|
|
||||||
sb := strings.Builder{}
|
|
||||||
sb.WriteString(raw)
|
|
||||||
sb.WriteRune(lexer.consumeEscape())
|
|
||||||
for {
|
|
||||||
if IsNameContinue(lexer.codePoint) {
|
|
||||||
sb.WriteRune(lexer.codePoint)
|
|
||||||
lexer.step()
|
|
||||||
} else if lexer.isValidEscape() {
|
|
||||||
sb.WriteRune(lexer.consumeEscape())
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sb.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeEscape() rune {
|
|
||||||
lexer.step() // Skip the backslash
|
|
||||||
c := lexer.codePoint
|
|
||||||
|
|
||||||
if hex, ok := isHex(c); ok {
|
|
||||||
lexer.step()
|
|
||||||
for i := 0; i < 5; i++ {
|
|
||||||
if next, ok := isHex(lexer.codePoint); ok {
|
|
||||||
lexer.step()
|
|
||||||
hex = hex*16 + next
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if isWhitespace(lexer.codePoint) {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF {
|
|
||||||
return utf8.RuneError
|
|
||||||
}
|
|
||||||
return rune(hex)
|
|
||||||
}
|
|
||||||
|
|
||||||
if c == eof {
|
|
||||||
return utf8.RuneError
|
|
||||||
}
|
|
||||||
|
|
||||||
lexer.step()
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeIdentLike() T {
|
|
||||||
name := lexer.consumeName()
|
|
||||||
|
|
||||||
if lexer.codePoint == '(' {
|
|
||||||
lexer.step()
|
|
||||||
if len(name) == 3 {
|
|
||||||
u, r, l := name[0], name[1], name[2]
|
|
||||||
if (u == 'u' || u == 'U') && (r == 'r' || r == 'R') && (l == 'l' || l == 'L') {
|
|
||||||
for isWhitespace(lexer.codePoint) {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
if lexer.codePoint != '"' && lexer.codePoint != '\'' {
|
|
||||||
return lexer.consumeURL()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return TFunction
|
|
||||||
}
|
|
||||||
|
|
||||||
return TIdent
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeURL() T {
|
|
||||||
validURL:
|
|
||||||
for {
|
|
||||||
switch lexer.codePoint {
|
|
||||||
case ')':
|
|
||||||
lexer.step()
|
|
||||||
return TURL
|
|
||||||
|
|
||||||
case eof:
|
|
||||||
loc := logger.Loc{Start: lexer.Token.Range.End()}
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token")
|
|
||||||
return TBadURL
|
|
||||||
|
|
||||||
case ' ', '\t', '\n', '\r', '\f':
|
|
||||||
lexer.step()
|
|
||||||
for isWhitespace(lexer.codePoint) {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
if lexer.codePoint != ')' {
|
|
||||||
loc := logger.Loc{Start: lexer.Token.Range.End()}
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token")
|
|
||||||
break validURL
|
|
||||||
}
|
|
||||||
lexer.step()
|
|
||||||
return TURL
|
|
||||||
|
|
||||||
case '"', '\'', '(':
|
|
||||||
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker, r, "Expected \")\" to end URL token")
|
|
||||||
break validURL
|
|
||||||
|
|
||||||
case '\\':
|
|
||||||
if !lexer.isValidEscape() {
|
|
||||||
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker, r, "Invalid escape")
|
|
||||||
break validURL
|
|
||||||
}
|
|
||||||
lexer.consumeEscape()
|
|
||||||
|
|
||||||
default:
|
|
||||||
if isNonPrintable(lexer.codePoint) {
|
|
||||||
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker, r, "Unexpected non-printable character in URL token")
|
|
||||||
}
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Consume the remnants of a bad url
|
|
||||||
for {
|
|
||||||
switch lexer.codePoint {
|
|
||||||
case ')', eof:
|
|
||||||
lexer.step()
|
|
||||||
return TBadURL
|
|
||||||
|
|
||||||
case '\\':
|
|
||||||
if lexer.isValidEscape() {
|
|
||||||
lexer.consumeEscape()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeString() T {
|
|
||||||
quote := lexer.codePoint
|
|
||||||
lexer.step()
|
|
||||||
|
|
||||||
for {
|
|
||||||
switch lexer.codePoint {
|
|
||||||
case '\\':
|
|
||||||
lexer.step()
|
|
||||||
|
|
||||||
// Handle Windows CRLF
|
|
||||||
if lexer.codePoint == '\r' {
|
|
||||||
lexer.step()
|
|
||||||
if lexer.codePoint == '\n' {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, fall through to ignore the character after the backslash
|
|
||||||
|
|
||||||
case eof:
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker,
|
|
||||||
logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
|
|
||||||
"Unterminated string token")
|
|
||||||
return TBadString
|
|
||||||
|
|
||||||
case '\n', '\r', '\f':
|
|
||||||
lexer.log.Add(logger.Error, &lexer.tracker,
|
|
||||||
logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
|
|
||||||
"Unterminated string token")
|
|
||||||
return TBadString
|
|
||||||
|
|
||||||
case quote:
|
|
||||||
lexer.step()
|
|
||||||
return TString
|
|
||||||
}
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lexer *lexer) consumeNumeric() T {
|
|
||||||
// Skip over leading sign
|
|
||||||
if lexer.codePoint == '+' || lexer.codePoint == '-' {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip over leading digits
|
|
||||||
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip over digits after dot
|
|
||||||
if lexer.codePoint == '.' {
|
|
||||||
lexer.step()
|
|
||||||
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip over exponent
|
|
||||||
if lexer.codePoint == 'e' || lexer.codePoint == 'E' {
|
|
||||||
contents := lexer.source.Contents
|
|
||||||
|
|
||||||
// Look ahead before advancing to make sure this is an exponent, not a unit
|
|
||||||
if lexer.current < len(contents) {
|
|
||||||
c := contents[lexer.current]
|
|
||||||
if (c == '+' || c == '-') && lexer.current+1 < len(contents) {
|
|
||||||
c = contents[lexer.current+1]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only consume this if it's an exponent
|
|
||||||
if c >= '0' && c <= '9' {
|
|
||||||
lexer.step()
|
|
||||||
if lexer.codePoint == '+' || lexer.codePoint == '-' {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
|
|
||||||
lexer.step()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine the numeric type
|
|
||||||
if lexer.wouldStartIdentifier() {
|
|
||||||
lexer.Token.UnitOffset = uint16(lexer.Token.Range.Len)
|
|
||||||
lexer.consumeName()
|
|
||||||
return TDimension
|
|
||||||
}
|
|
||||||
if lexer.codePoint == '%' {
|
|
||||||
lexer.step()
|
|
||||||
return TPercentage
|
|
||||||
}
|
|
||||||
return TNumber
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsNameStart(c rune) bool {
|
|
||||||
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c >= 0x80 || c == '\x00'
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsNameContinue(c rune) bool {
|
|
||||||
return IsNameStart(c) || (c >= '0' && c <= '9') || c == '-'
|
|
||||||
}
|
|
||||||
|
|
||||||
func isNewline(c rune) bool {
|
|
||||||
switch c {
|
|
||||||
case '\n', '\r', '\f':
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func isWhitespace(c rune) bool {
|
|
||||||
switch c {
|
|
||||||
case ' ', '\t', '\n', '\r', '\f':
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func isHex(c rune) (int, bool) {
|
|
||||||
if c >= '0' && c <= '9' {
|
|
||||||
return int(c - '0'), true
|
|
||||||
}
|
|
||||||
if c >= 'a' && c <= 'f' {
|
|
||||||
return int(c + (10 - 'a')), true
|
|
||||||
}
|
|
||||||
if c >= 'A' && c <= 'F' {
|
|
||||||
return int(c + (10 - 'A')), true
|
|
||||||
}
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
func isNonPrintable(c rune) bool {
|
|
||||||
return c <= 0x08 || c == 0x0B || (c >= 0x0E && c <= 0x1F) || c == 0x7F
|
|
||||||
}
|
|
||||||
|
|
||||||
func decodeEscapesInToken(inner string) string {
|
|
||||||
i := 0
|
|
||||||
|
|
||||||
for i < len(inner) {
|
|
||||||
if c := inner[i]; c == '\\' || c == '\x00' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
|
|
||||||
if i == len(inner) {
|
|
||||||
return inner
|
|
||||||
}
|
|
||||||
|
|
||||||
sb := strings.Builder{}
|
|
||||||
sb.WriteString(inner[:i])
|
|
||||||
inner = inner[i:]
|
|
||||||
|
|
||||||
for len(inner) > 0 {
|
|
||||||
c, width := utf8.DecodeRuneInString(inner)
|
|
||||||
inner = inner[width:]
|
|
||||||
|
|
||||||
if c != '\\' {
|
|
||||||
if c == '\x00' {
|
|
||||||
c = utf8.RuneError
|
|
||||||
}
|
|
||||||
sb.WriteRune(c)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(inner) == 0 {
|
|
||||||
sb.WriteRune(utf8.RuneError)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
c, width = utf8.DecodeRuneInString(inner)
|
|
||||||
inner = inner[width:]
|
|
||||||
hex, ok := isHex(c)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
if c == '\n' || c == '\f' {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle Windows CRLF
|
|
||||||
if c == '\r' {
|
|
||||||
c, width = utf8.DecodeRuneInString(inner)
|
|
||||||
if c == '\n' {
|
|
||||||
inner = inner[width:]
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we get here, this is not a valid escape. However, this is still
|
|
||||||
// allowed. In this case the backslash is just ignored.
|
|
||||||
sb.WriteRune(c)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse up to five additional hex characters (so six in total)
|
|
||||||
for i := 0; i < 5 && len(inner) > 0; i++ {
|
|
||||||
c, width = utf8.DecodeRuneInString(inner)
|
|
||||||
if next, ok := isHex(c); ok {
|
|
||||||
inner = inner[width:]
|
|
||||||
hex = hex*16 + next
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(inner) > 0 {
|
|
||||||
c, width = utf8.DecodeRuneInString(inner)
|
|
||||||
if isWhitespace(c) {
|
|
||||||
inner = inner[width:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF {
|
|
||||||
sb.WriteRune(utf8.RuneError)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.WriteRune(rune(hex))
|
|
||||||
}
|
|
||||||
|
|
||||||
return sb.String()
|
|
||||||
}
|
|
256
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls.go
generated
vendored
256
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls.go
generated
vendored
@ -1,256 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/compat"
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *parser) commaToken() css_ast.Token {
|
|
||||||
t := css_ast.Token{
|
|
||||||
Kind: css_lexer.TComma,
|
|
||||||
Text: ",",
|
|
||||||
}
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
t.Whitespace = css_ast.WhitespaceAfter
|
|
||||||
}
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
|
|
||||||
func expandTokenQuad(tokens []css_ast.Token, allowedIdent string) (result [4]css_ast.Token, ok bool) {
|
|
||||||
n := len(tokens)
|
|
||||||
if n < 1 || n > 4 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't do this if we encounter any unexpected tokens such as "var()"
|
|
||||||
for i := 0; i < n; i++ {
|
|
||||||
if t := tokens[i]; !t.Kind.IsNumeric() && (t.Kind != css_lexer.TIdent || allowedIdent == "" || t.Text != allowedIdent) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result[0] = tokens[0]
|
|
||||||
if n > 1 {
|
|
||||||
result[1] = tokens[1]
|
|
||||||
} else {
|
|
||||||
result[1] = result[0]
|
|
||||||
}
|
|
||||||
if n > 2 {
|
|
||||||
result[2] = tokens[2]
|
|
||||||
} else {
|
|
||||||
result[2] = result[0]
|
|
||||||
}
|
|
||||||
if n > 3 {
|
|
||||||
result[3] = tokens[3]
|
|
||||||
} else {
|
|
||||||
result[3] = result[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
ok = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func compactTokenQuad(a css_ast.Token, b css_ast.Token, c css_ast.Token, d css_ast.Token, removeWhitespace bool) []css_ast.Token {
|
|
||||||
tokens := []css_ast.Token{a, b, c, d}
|
|
||||||
if tokens[3].EqualIgnoringWhitespace(tokens[1]) {
|
|
||||||
if tokens[2].EqualIgnoringWhitespace(tokens[0]) {
|
|
||||||
if tokens[1].EqualIgnoringWhitespace(tokens[0]) {
|
|
||||||
tokens = tokens[:1]
|
|
||||||
} else {
|
|
||||||
tokens = tokens[:2]
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
tokens = tokens[:3]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for i := range tokens {
|
|
||||||
var whitespace css_ast.WhitespaceFlags
|
|
||||||
if !removeWhitespace || i > 0 {
|
|
||||||
whitespace |= css_ast.WhitespaceBefore
|
|
||||||
}
|
|
||||||
if i+1 < len(tokens) {
|
|
||||||
whitespace |= css_ast.WhitespaceAfter
|
|
||||||
}
|
|
||||||
tokens[i].Whitespace = whitespace
|
|
||||||
}
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) processDeclarations(rules []css_ast.Rule) []css_ast.Rule {
|
|
||||||
margin := boxTracker{key: css_ast.DMargin, keyText: "margin", allowAuto: true}
|
|
||||||
padding := boxTracker{key: css_ast.DPadding, keyText: "padding", allowAuto: false}
|
|
||||||
inset := boxTracker{key: css_ast.DInset, keyText: "inset", allowAuto: true}
|
|
||||||
borderRadius := borderRadiusTracker{}
|
|
||||||
|
|
||||||
for i, rule := range rules {
|
|
||||||
decl, ok := rule.Data.(*css_ast.RDeclaration)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
switch decl.Key {
|
|
||||||
case css_ast.DBackgroundColor,
|
|
||||||
css_ast.DBorderBlockEndColor,
|
|
||||||
css_ast.DBorderBlockStartColor,
|
|
||||||
css_ast.DBorderBottomColor,
|
|
||||||
css_ast.DBorderColor,
|
|
||||||
css_ast.DBorderInlineEndColor,
|
|
||||||
css_ast.DBorderInlineStartColor,
|
|
||||||
css_ast.DBorderLeftColor,
|
|
||||||
css_ast.DBorderRightColor,
|
|
||||||
css_ast.DBorderTopColor,
|
|
||||||
css_ast.DCaretColor,
|
|
||||||
css_ast.DColor,
|
|
||||||
css_ast.DColumnRuleColor,
|
|
||||||
css_ast.DFill,
|
|
||||||
css_ast.DFloodColor,
|
|
||||||
css_ast.DLightingColor,
|
|
||||||
css_ast.DOutlineColor,
|
|
||||||
css_ast.DStopColor,
|
|
||||||
css_ast.DStroke,
|
|
||||||
css_ast.DTextDecorationColor,
|
|
||||||
css_ast.DTextEmphasisColor:
|
|
||||||
|
|
||||||
if len(decl.Value) == 1 {
|
|
||||||
decl.Value[0] = p.lowerColor(decl.Value[0])
|
|
||||||
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
t := decl.Value[0]
|
|
||||||
if hex, ok := parseColor(t); ok {
|
|
||||||
decl.Value[0] = p.mangleColor(t, hex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_ast.DFont:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
decl.Value = p.mangleFont(decl.Value)
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_ast.DFontFamily:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
if value, ok := p.mangleFontFamily(decl.Value); ok {
|
|
||||||
decl.Value = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_ast.DFontWeight:
|
|
||||||
if len(decl.Value) == 1 && p.options.MangleSyntax {
|
|
||||||
decl.Value[0] = p.mangleFontWeight(decl.Value[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_ast.DTransform:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
decl.Value = p.mangleTransforms(decl.Value)
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_ast.DBoxShadow:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
decl.Value = p.mangleBoxShadows(decl.Value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Margin
|
|
||||||
case css_ast.DMargin:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
margin.mangleSides(rules, decl, i, p.options.RemoveWhitespace)
|
|
||||||
}
|
|
||||||
case css_ast.DMarginTop:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxTop)
|
|
||||||
}
|
|
||||||
case css_ast.DMarginRight:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxRight)
|
|
||||||
}
|
|
||||||
case css_ast.DMarginBottom:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxBottom)
|
|
||||||
}
|
|
||||||
case css_ast.DMarginLeft:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
margin.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxLeft)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Padding
|
|
||||||
case css_ast.DPadding:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
padding.mangleSides(rules, decl, i, p.options.RemoveWhitespace)
|
|
||||||
}
|
|
||||||
case css_ast.DPaddingTop:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxTop)
|
|
||||||
}
|
|
||||||
case css_ast.DPaddingRight:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxRight)
|
|
||||||
}
|
|
||||||
case css_ast.DPaddingBottom:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxBottom)
|
|
||||||
}
|
|
||||||
case css_ast.DPaddingLeft:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
padding.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxLeft)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Inset
|
|
||||||
case css_ast.DInset:
|
|
||||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
|
||||||
inset.mangleSides(rules, decl, i, p.options.RemoveWhitespace)
|
|
||||||
}
|
|
||||||
case css_ast.DTop:
|
|
||||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
|
||||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxTop)
|
|
||||||
}
|
|
||||||
case css_ast.DRight:
|
|
||||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
|
||||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxRight)
|
|
||||||
}
|
|
||||||
case css_ast.DBottom:
|
|
||||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
|
||||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxBottom)
|
|
||||||
}
|
|
||||||
case css_ast.DLeft:
|
|
||||||
if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MangleSyntax {
|
|
||||||
inset.mangleSide(rules, decl, i, p.options.RemoveWhitespace, boxLeft)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Border radius
|
|
||||||
case css_ast.DBorderRadius:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
borderRadius.mangleCorners(rules, decl, i, p.options.RemoveWhitespace)
|
|
||||||
}
|
|
||||||
case css_ast.DBorderTopLeftRadius:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusTopLeft)
|
|
||||||
}
|
|
||||||
case css_ast.DBorderTopRightRadius:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusTopRight)
|
|
||||||
}
|
|
||||||
case css_ast.DBorderBottomRightRadius:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusBottomRight)
|
|
||||||
}
|
|
||||||
case css_ast.DBorderBottomLeftRadius:
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
borderRadius.mangleCorner(rules, decl, i, p.options.RemoveWhitespace, borderRadiusBottomLeft)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compact removed rules
|
|
||||||
if p.options.MangleSyntax {
|
|
||||||
end := 0
|
|
||||||
for _, rule := range rules {
|
|
||||||
if rule.Data != nil {
|
|
||||||
rules[end] = rule
|
|
||||||
end++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rules = rules[:end]
|
|
||||||
}
|
|
||||||
|
|
||||||
return rules
|
|
||||||
}
|
|
213
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_border_radius.go
generated
vendored
213
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_border_radius.go
generated
vendored
@ -1,213 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
borderRadiusTopLeft = iota
|
|
||||||
borderRadiusTopRight
|
|
||||||
borderRadiusBottomRight
|
|
||||||
borderRadiusBottomLeft
|
|
||||||
)
|
|
||||||
|
|
||||||
type borderRadiusCorner struct {
|
|
||||||
firstToken css_ast.Token
|
|
||||||
secondToken css_ast.Token
|
|
||||||
unitSafety unitSafetyTracker
|
|
||||||
ruleIndex uint32 // The index of the originating rule in the rules array
|
|
||||||
wasSingleRule bool // True if the originating rule was just for this side
|
|
||||||
}
|
|
||||||
|
|
||||||
type borderRadiusTracker struct {
|
|
||||||
corners [4]borderRadiusCorner
|
|
||||||
important bool // True if all active rules were flagged as "!important"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (borderRadius *borderRadiusTracker) updateCorner(rules []css_ast.Rule, corner int, new borderRadiusCorner) {
|
|
||||||
if old := borderRadius.corners[corner]; old.firstToken.Kind != css_lexer.TEndOfFile &&
|
|
||||||
(!new.wasSingleRule || old.wasSingleRule) &&
|
|
||||||
old.unitSafety.status == unitSafe && new.unitSafety.status == unitSafe {
|
|
||||||
rules[old.ruleIndex] = css_ast.Rule{}
|
|
||||||
}
|
|
||||||
borderRadius.corners[corner] = new
|
|
||||||
}
|
|
||||||
|
|
||||||
func (borderRadius *borderRadiusTracker) mangleCorners(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool) {
|
|
||||||
// Reset if we see a change in the "!important" flag
|
|
||||||
if borderRadius.important != decl.Important {
|
|
||||||
borderRadius.corners = [4]borderRadiusCorner{}
|
|
||||||
borderRadius.important = decl.Important
|
|
||||||
}
|
|
||||||
|
|
||||||
tokens := decl.Value
|
|
||||||
beforeSplit := len(tokens)
|
|
||||||
afterSplit := len(tokens)
|
|
||||||
|
|
||||||
// Search for the single slash if present
|
|
||||||
for i, t := range tokens {
|
|
||||||
if t.Kind == css_lexer.TDelimSlash {
|
|
||||||
if beforeSplit == len(tokens) {
|
|
||||||
beforeSplit = i
|
|
||||||
afterSplit = i + 1
|
|
||||||
} else {
|
|
||||||
// Multiple slashes are an error
|
|
||||||
borderRadius.corners = [4]borderRadiusCorner{}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use a single tracker for the whole rule
|
|
||||||
unitSafety := unitSafetyTracker{}
|
|
||||||
for _, t := range tokens[:beforeSplit] {
|
|
||||||
unitSafety.includeUnitOf(t)
|
|
||||||
}
|
|
||||||
for _, t := range tokens[afterSplit:] {
|
|
||||||
unitSafety.includeUnitOf(t)
|
|
||||||
}
|
|
||||||
|
|
||||||
firstRadii, firstRadiiOk := expandTokenQuad(tokens[:beforeSplit], "")
|
|
||||||
lastRadii, lastRadiiOk := expandTokenQuad(tokens[afterSplit:], "")
|
|
||||||
|
|
||||||
// Stop now if the pattern wasn't matched
|
|
||||||
if !firstRadiiOk || (beforeSplit < afterSplit && !lastRadiiOk) {
|
|
||||||
borderRadius.corners = [4]borderRadiusCorner{}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle the first radii
|
|
||||||
for corner, t := range firstRadii {
|
|
||||||
if unitSafety.status == unitSafe {
|
|
||||||
t.TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
borderRadius.updateCorner(rules, corner, borderRadiusCorner{
|
|
||||||
firstToken: t,
|
|
||||||
secondToken: t,
|
|
||||||
unitSafety: unitSafety,
|
|
||||||
ruleIndex: uint32(index),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle the last radii
|
|
||||||
if lastRadiiOk {
|
|
||||||
for corner, t := range lastRadii {
|
|
||||||
if unitSafety.status == unitSafe {
|
|
||||||
t.TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
borderRadius.corners[corner].secondToken = t
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Success
|
|
||||||
borderRadius.compactRules(rules, decl.KeyRange, removeWhitespace)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (borderRadius *borderRadiusTracker) mangleCorner(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool, corner int) {
|
|
||||||
// Reset if we see a change in the "!important" flag
|
|
||||||
if borderRadius.important != decl.Important {
|
|
||||||
borderRadius.corners = [4]borderRadiusCorner{}
|
|
||||||
borderRadius.important = decl.Important
|
|
||||||
}
|
|
||||||
|
|
||||||
if tokens := decl.Value; (len(tokens) == 1 && tokens[0].Kind.IsNumeric()) ||
|
|
||||||
(len(tokens) == 2 && tokens[0].Kind.IsNumeric() && tokens[1].Kind.IsNumeric()) {
|
|
||||||
firstToken := tokens[0]
|
|
||||||
secondToken := firstToken
|
|
||||||
if len(tokens) == 2 {
|
|
||||||
secondToken = tokens[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check to see if these units are safe to use in every browser
|
|
||||||
unitSafety := unitSafetyTracker{}
|
|
||||||
unitSafety.includeUnitOf(firstToken)
|
|
||||||
unitSafety.includeUnitOf(secondToken)
|
|
||||||
|
|
||||||
// Only collapse "0unit" into "0" if the unit is safe
|
|
||||||
if unitSafety.status == unitSafe && firstToken.TurnLengthIntoNumberIfZero() {
|
|
||||||
tokens[0] = firstToken
|
|
||||||
}
|
|
||||||
if len(tokens) == 2 {
|
|
||||||
if unitSafety.status == unitSafe && secondToken.TurnLengthIntoNumberIfZero() {
|
|
||||||
tokens[1] = secondToken
|
|
||||||
}
|
|
||||||
|
|
||||||
// If both tokens are equal, merge them into one
|
|
||||||
if firstToken.EqualIgnoringWhitespace(secondToken) {
|
|
||||||
tokens[0].Whitespace &= ^css_ast.WhitespaceAfter
|
|
||||||
decl.Value = tokens[:1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
borderRadius.updateCorner(rules, corner, borderRadiusCorner{
|
|
||||||
firstToken: firstToken,
|
|
||||||
secondToken: secondToken,
|
|
||||||
unitSafety: unitSafety,
|
|
||||||
ruleIndex: uint32(index),
|
|
||||||
wasSingleRule: true,
|
|
||||||
})
|
|
||||||
borderRadius.compactRules(rules, decl.KeyRange, removeWhitespace)
|
|
||||||
} else {
|
|
||||||
borderRadius.corners = [4]borderRadiusCorner{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (borderRadius *borderRadiusTracker) compactRules(rules []css_ast.Rule, keyRange logger.Range, removeWhitespace bool) {
|
|
||||||
// All tokens must be present
|
|
||||||
if eof := css_lexer.TEndOfFile; borderRadius.corners[0].firstToken.Kind == eof || borderRadius.corners[1].firstToken.Kind == eof ||
|
|
||||||
borderRadius.corners[2].firstToken.Kind == eof || borderRadius.corners[3].firstToken.Kind == eof {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// All tokens must have the same unit
|
|
||||||
for _, side := range borderRadius.corners[1:] {
|
|
||||||
if !side.unitSafety.isSafeWith(borderRadius.corners[0].unitSafety) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate the most minimal representation
|
|
||||||
tokens := compactTokenQuad(
|
|
||||||
borderRadius.corners[0].firstToken,
|
|
||||||
borderRadius.corners[1].firstToken,
|
|
||||||
borderRadius.corners[2].firstToken,
|
|
||||||
borderRadius.corners[3].firstToken,
|
|
||||||
removeWhitespace,
|
|
||||||
)
|
|
||||||
secondTokens := compactTokenQuad(
|
|
||||||
borderRadius.corners[0].secondToken,
|
|
||||||
borderRadius.corners[1].secondToken,
|
|
||||||
borderRadius.corners[2].secondToken,
|
|
||||||
borderRadius.corners[3].secondToken,
|
|
||||||
removeWhitespace,
|
|
||||||
)
|
|
||||||
if !css_ast.TokensEqualIgnoringWhitespace(tokens, secondTokens) {
|
|
||||||
var whitespace css_ast.WhitespaceFlags
|
|
||||||
if !removeWhitespace {
|
|
||||||
whitespace = css_ast.WhitespaceBefore | css_ast.WhitespaceAfter
|
|
||||||
}
|
|
||||||
tokens = append(tokens, css_ast.Token{
|
|
||||||
Kind: css_lexer.TDelimSlash,
|
|
||||||
Text: "/",
|
|
||||||
Whitespace: whitespace,
|
|
||||||
})
|
|
||||||
tokens = append(tokens, secondTokens...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove all of the existing declarations
|
|
||||||
rules[borderRadius.corners[0].ruleIndex] = css_ast.Rule{}
|
|
||||||
rules[borderRadius.corners[1].ruleIndex] = css_ast.Rule{}
|
|
||||||
rules[borderRadius.corners[2].ruleIndex] = css_ast.Rule{}
|
|
||||||
rules[borderRadius.corners[3].ruleIndex] = css_ast.Rule{}
|
|
||||||
|
|
||||||
// Insert the combined declaration where the last rule was
|
|
||||||
rules[borderRadius.corners[3].ruleIndex].Data = &css_ast.RDeclaration{
|
|
||||||
Key: css_ast.DBorderRadius,
|
|
||||||
KeyText: "border-radius",
|
|
||||||
Value: tokens,
|
|
||||||
KeyRange: keyRange,
|
|
||||||
Important: borderRadius.important,
|
|
||||||
}
|
|
||||||
}
|
|
198
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box.go
generated
vendored
198
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box.go
generated
vendored
@ -1,198 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
boxTop = iota
|
|
||||||
boxRight
|
|
||||||
boxBottom
|
|
||||||
boxLeft
|
|
||||||
)
|
|
||||||
|
|
||||||
type boxSide struct {
|
|
||||||
token css_ast.Token
|
|
||||||
unitSafety unitSafetyTracker
|
|
||||||
ruleIndex uint32 // The index of the originating rule in the rules array
|
|
||||||
wasSingleRule bool // True if the originating rule was just for this side
|
|
||||||
}
|
|
||||||
|
|
||||||
type boxTracker struct {
|
|
||||||
key css_ast.D
|
|
||||||
keyText string
|
|
||||||
allowAuto bool // If true, allow the "auto" keyword
|
|
||||||
|
|
||||||
sides [4]boxSide
|
|
||||||
important bool // True if all active rules were flagged as "!important"
|
|
||||||
}
|
|
||||||
|
|
||||||
type unitSafetyStatus uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
unitSafe unitSafetyStatus = iota // "margin: 0 1px 2cm 3%;"
|
|
||||||
unitUnsafeSingle // "margin: 0 1vw 2vw 3vw;"
|
|
||||||
unitUnsafeMixed // "margin: 0 1vw 2vh 3ch;"
|
|
||||||
)
|
|
||||||
|
|
||||||
// We can only compact rules together if they have the same unit safety level.
|
|
||||||
// We want to avoid a situation where the browser treats some of the original
|
|
||||||
// rules as valid and others as invalid.
|
|
||||||
//
|
|
||||||
// Safe:
|
|
||||||
// top: 1px; left: 0; bottom: 1px; right: 0;
|
|
||||||
// top: 1Q; left: 2Q; bottom: 3Q; right: 4Q;
|
|
||||||
//
|
|
||||||
// Unsafe:
|
|
||||||
// top: 1vh; left: 2vw; bottom: 3vh; right: 4vw;
|
|
||||||
// top: 1Q; left: 2Q; bottom: 3Q; right: 0;
|
|
||||||
// inset: 1Q 0 0 0; top: 0;
|
|
||||||
//
|
|
||||||
type unitSafetyTracker struct {
|
|
||||||
status unitSafetyStatus
|
|
||||||
unit string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a unitSafetyTracker) isSafeWith(b unitSafetyTracker) bool {
|
|
||||||
return a.status == b.status && a.status != unitUnsafeMixed && (a.status != unitUnsafeSingle || a.unit == b.unit)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *unitSafetyTracker) includeUnitOf(token css_ast.Token) {
|
|
||||||
switch token.Kind {
|
|
||||||
case css_lexer.TNumber:
|
|
||||||
if token.Text == "0" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.TPercentage:
|
|
||||||
return
|
|
||||||
|
|
||||||
case css_lexer.TDimension:
|
|
||||||
if token.DimensionUnitIsSafeLength() {
|
|
||||||
return
|
|
||||||
} else if unit := token.DimensionUnit(); t.status == unitSafe {
|
|
||||||
t.status = unitUnsafeSingle
|
|
||||||
t.unit = unit
|
|
||||||
return
|
|
||||||
} else if t.status == unitUnsafeSingle && t.unit == unit {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
t.status = unitUnsafeMixed
|
|
||||||
}
|
|
||||||
|
|
||||||
func (box *boxTracker) updateSide(rules []css_ast.Rule, side int, new boxSide) {
|
|
||||||
if old := box.sides[side]; old.token.Kind != css_lexer.TEndOfFile &&
|
|
||||||
(!new.wasSingleRule || old.wasSingleRule) &&
|
|
||||||
old.unitSafety.status == unitSafe && new.unitSafety.status == unitSafe {
|
|
||||||
rules[old.ruleIndex] = css_ast.Rule{}
|
|
||||||
}
|
|
||||||
box.sides[side] = new
|
|
||||||
}
|
|
||||||
|
|
||||||
func (box *boxTracker) mangleSides(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool) {
|
|
||||||
// Reset if we see a change in the "!important" flag
|
|
||||||
if box.important != decl.Important {
|
|
||||||
box.sides = [4]boxSide{}
|
|
||||||
box.important = decl.Important
|
|
||||||
}
|
|
||||||
|
|
||||||
allowedIdent := ""
|
|
||||||
if box.allowAuto {
|
|
||||||
allowedIdent = "auto"
|
|
||||||
}
|
|
||||||
if quad, ok := expandTokenQuad(decl.Value, allowedIdent); ok {
|
|
||||||
// Use a single tracker for the whole rule
|
|
||||||
unitSafety := unitSafetyTracker{}
|
|
||||||
for _, t := range quad {
|
|
||||||
if !box.allowAuto || t.Kind.IsNumeric() {
|
|
||||||
unitSafety.includeUnitOf(t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for side, t := range quad {
|
|
||||||
if unitSafety.status == unitSafe {
|
|
||||||
t.TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
box.updateSide(rules, side, boxSide{
|
|
||||||
token: t,
|
|
||||||
ruleIndex: uint32(index),
|
|
||||||
unitSafety: unitSafety,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
box.compactRules(rules, decl.KeyRange, removeWhitespace)
|
|
||||||
} else {
|
|
||||||
box.sides = [4]boxSide{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (box *boxTracker) mangleSide(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool, side int) {
|
|
||||||
// Reset if we see a change in the "!important" flag
|
|
||||||
if box.important != decl.Important {
|
|
||||||
box.sides = [4]boxSide{}
|
|
||||||
box.important = decl.Important
|
|
||||||
}
|
|
||||||
|
|
||||||
if tokens := decl.Value; len(tokens) == 1 {
|
|
||||||
if t := tokens[0]; t.Kind.IsNumeric() || (t.Kind == css_lexer.TIdent && box.allowAuto && t.Text == "auto") {
|
|
||||||
unitSafety := unitSafetyTracker{}
|
|
||||||
if !box.allowAuto || t.Kind.IsNumeric() {
|
|
||||||
unitSafety.includeUnitOf(t)
|
|
||||||
}
|
|
||||||
if unitSafety.status == unitSafe && t.TurnLengthIntoNumberIfZero() {
|
|
||||||
tokens[0] = t
|
|
||||||
}
|
|
||||||
box.updateSide(rules, side, boxSide{
|
|
||||||
token: t,
|
|
||||||
ruleIndex: uint32(index),
|
|
||||||
wasSingleRule: true,
|
|
||||||
unitSafety: unitSafety,
|
|
||||||
})
|
|
||||||
box.compactRules(rules, decl.KeyRange, removeWhitespace)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
box.sides = [4]boxSide{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (box *boxTracker) compactRules(rules []css_ast.Rule, keyRange logger.Range, removeWhitespace bool) {
|
|
||||||
// All tokens must be present
|
|
||||||
if eof := css_lexer.TEndOfFile; box.sides[0].token.Kind == eof || box.sides[1].token.Kind == eof ||
|
|
||||||
box.sides[2].token.Kind == eof || box.sides[3].token.Kind == eof {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// All tokens must have the same unit
|
|
||||||
for _, side := range box.sides[1:] {
|
|
||||||
if !side.unitSafety.isSafeWith(box.sides[0].unitSafety) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate the most minimal representation
|
|
||||||
tokens := compactTokenQuad(
|
|
||||||
box.sides[0].token,
|
|
||||||
box.sides[1].token,
|
|
||||||
box.sides[2].token,
|
|
||||||
box.sides[3].token,
|
|
||||||
removeWhitespace,
|
|
||||||
)
|
|
||||||
|
|
||||||
// Remove all of the existing declarations
|
|
||||||
rules[box.sides[0].ruleIndex] = css_ast.Rule{}
|
|
||||||
rules[box.sides[1].ruleIndex] = css_ast.Rule{}
|
|
||||||
rules[box.sides[2].ruleIndex] = css_ast.Rule{}
|
|
||||||
rules[box.sides[3].ruleIndex] = css_ast.Rule{}
|
|
||||||
|
|
||||||
// Insert the combined declaration where the last rule was
|
|
||||||
rules[box.sides[3].ruleIndex].Data = &css_ast.RDeclaration{
|
|
||||||
Key: box.key,
|
|
||||||
KeyText: box.keyText,
|
|
||||||
Value: tokens,
|
|
||||||
KeyRange: keyRange,
|
|
||||||
Important: box.important,
|
|
||||||
}
|
|
||||||
}
|
|
103
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box_shadow.go
generated
vendored
103
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_box_shadow.go
generated
vendored
@ -1,103 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *parser) mangleBoxShadow(tokens []css_ast.Token) []css_ast.Token {
|
|
||||||
insetCount := 0
|
|
||||||
colorCount := 0
|
|
||||||
numbersBegin := 0
|
|
||||||
numbersCount := 0
|
|
||||||
numbersDone := false
|
|
||||||
foundUnexpectedToken := false
|
|
||||||
|
|
||||||
for i, t := range tokens {
|
|
||||||
if t.Kind == css_lexer.TNumber || t.Kind == css_lexer.TDimension {
|
|
||||||
if numbersDone {
|
|
||||||
// Track if we found a non-number in between two numbers
|
|
||||||
foundUnexpectedToken = true
|
|
||||||
}
|
|
||||||
if t.TurnLengthIntoNumberIfZero() {
|
|
||||||
// "0px" => "0"
|
|
||||||
tokens[i] = t
|
|
||||||
}
|
|
||||||
if numbersCount == 0 {
|
|
||||||
// Track the index of the first number
|
|
||||||
numbersBegin = i
|
|
||||||
}
|
|
||||||
numbersCount++
|
|
||||||
} else {
|
|
||||||
if numbersCount != 0 {
|
|
||||||
// Track when we find a non-number after a number
|
|
||||||
numbersDone = true
|
|
||||||
}
|
|
||||||
if hex, ok := parseColor(t); ok {
|
|
||||||
colorCount++
|
|
||||||
tokens[i] = p.mangleColor(t, hex)
|
|
||||||
} else if t.Kind == css_lexer.TIdent && t.Text == "inset" {
|
|
||||||
insetCount++
|
|
||||||
} else {
|
|
||||||
// Track if we found a token other than a number, a color, or "inset"
|
|
||||||
foundUnexpectedToken = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If everything looks like a valid rule, trim trailing zeros off the numbers.
|
|
||||||
// There are three valid configurations of numbers:
|
|
||||||
//
|
|
||||||
// offset-x | offset-y
|
|
||||||
// offset-x | offset-y | blur-radius
|
|
||||||
// offset-x | offset-y | blur-radius | spread-radius
|
|
||||||
//
|
|
||||||
// If omitted, blur-radius and spread-radius are implied to be zero.
|
|
||||||
if insetCount <= 1 && colorCount <= 1 && numbersCount > 2 && numbersCount <= 4 && !foundUnexpectedToken {
|
|
||||||
numbersEnd := numbersBegin + numbersCount
|
|
||||||
for numbersCount > 2 && tokens[numbersBegin+numbersCount-1].IsZero() {
|
|
||||||
numbersCount--
|
|
||||||
}
|
|
||||||
tokens = append(tokens[:numbersBegin+numbersCount], tokens[numbersEnd:]...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the whitespace flags
|
|
||||||
for i := range tokens {
|
|
||||||
var whitespace css_ast.WhitespaceFlags
|
|
||||||
if i > 0 || !p.options.RemoveWhitespace {
|
|
||||||
whitespace |= css_ast.WhitespaceBefore
|
|
||||||
}
|
|
||||||
if i+1 < len(tokens) {
|
|
||||||
whitespace |= css_ast.WhitespaceAfter
|
|
||||||
}
|
|
||||||
tokens[i].Whitespace = whitespace
|
|
||||||
}
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) mangleBoxShadows(tokens []css_ast.Token) []css_ast.Token {
|
|
||||||
n := len(tokens)
|
|
||||||
end := 0
|
|
||||||
i := 0
|
|
||||||
|
|
||||||
for i < n {
|
|
||||||
// Find the comma or the end of the token list
|
|
||||||
comma := i
|
|
||||||
for comma < n && tokens[comma].Kind != css_lexer.TComma {
|
|
||||||
comma++
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mangle this individual shadow
|
|
||||||
end += copy(tokens[end:], p.mangleBoxShadow(tokens[i:comma]))
|
|
||||||
|
|
||||||
// Skip over the comma
|
|
||||||
if comma < n {
|
|
||||||
tokens[end] = tokens[comma]
|
|
||||||
end++
|
|
||||||
comma++
|
|
||||||
}
|
|
||||||
i = comma
|
|
||||||
}
|
|
||||||
|
|
||||||
return tokens[:end]
|
|
||||||
}
|
|
669
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_color.go
generated
vendored
669
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_color.go
generated
vendored
@ -1,669 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"math"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/compat"
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
// These names are shorter than their hex codes
|
|
||||||
var shortColorName = map[uint32]string{
|
|
||||||
0x000080ff: "navy",
|
|
||||||
0x008000ff: "green",
|
|
||||||
0x008080ff: "teal",
|
|
||||||
0x4b0082ff: "indigo",
|
|
||||||
0x800000ff: "maroon",
|
|
||||||
0x800080ff: "purple",
|
|
||||||
0x808000ff: "olive",
|
|
||||||
0x808080ff: "gray",
|
|
||||||
0xa0522dff: "sienna",
|
|
||||||
0xa52a2aff: "brown",
|
|
||||||
0xc0c0c0ff: "silver",
|
|
||||||
0xcd853fff: "peru",
|
|
||||||
0xd2b48cff: "tan",
|
|
||||||
0xda70d6ff: "orchid",
|
|
||||||
0xdda0ddff: "plum",
|
|
||||||
0xee82eeff: "violet",
|
|
||||||
0xf0e68cff: "khaki",
|
|
||||||
0xf0ffffff: "azure",
|
|
||||||
0xf5deb3ff: "wheat",
|
|
||||||
0xf5f5dcff: "beige",
|
|
||||||
0xfa8072ff: "salmon",
|
|
||||||
0xfaf0e6ff: "linen",
|
|
||||||
0xff0000ff: "red",
|
|
||||||
0xff6347ff: "tomato",
|
|
||||||
0xff7f50ff: "coral",
|
|
||||||
0xffa500ff: "orange",
|
|
||||||
0xffc0cbff: "pink",
|
|
||||||
0xffd700ff: "gold",
|
|
||||||
0xffe4c4ff: "bisque",
|
|
||||||
0xfffafaff: "snow",
|
|
||||||
0xfffff0ff: "ivory",
|
|
||||||
}
|
|
||||||
|
|
||||||
var colorNameToHex = map[string]uint32{
|
|
||||||
"black": 0x000000ff,
|
|
||||||
"silver": 0xc0c0c0ff,
|
|
||||||
"gray": 0x808080ff,
|
|
||||||
"white": 0xffffffff,
|
|
||||||
"maroon": 0x800000ff,
|
|
||||||
"red": 0xff0000ff,
|
|
||||||
"purple": 0x800080ff,
|
|
||||||
"fuchsia": 0xff00ffff,
|
|
||||||
"green": 0x008000ff,
|
|
||||||
"lime": 0x00ff00ff,
|
|
||||||
"olive": 0x808000ff,
|
|
||||||
"yellow": 0xffff00ff,
|
|
||||||
"navy": 0x000080ff,
|
|
||||||
"blue": 0x0000ffff,
|
|
||||||
"teal": 0x008080ff,
|
|
||||||
"aqua": 0x00ffffff,
|
|
||||||
"orange": 0xffa500ff,
|
|
||||||
"aliceblue": 0xf0f8ffff,
|
|
||||||
"antiquewhite": 0xfaebd7ff,
|
|
||||||
"aquamarine": 0x7fffd4ff,
|
|
||||||
"azure": 0xf0ffffff,
|
|
||||||
"beige": 0xf5f5dcff,
|
|
||||||
"bisque": 0xffe4c4ff,
|
|
||||||
"blanchedalmond": 0xffebcdff,
|
|
||||||
"blueviolet": 0x8a2be2ff,
|
|
||||||
"brown": 0xa52a2aff,
|
|
||||||
"burlywood": 0xdeb887ff,
|
|
||||||
"cadetblue": 0x5f9ea0ff,
|
|
||||||
"chartreuse": 0x7fff00ff,
|
|
||||||
"chocolate": 0xd2691eff,
|
|
||||||
"coral": 0xff7f50ff,
|
|
||||||
"cornflowerblue": 0x6495edff,
|
|
||||||
"cornsilk": 0xfff8dcff,
|
|
||||||
"crimson": 0xdc143cff,
|
|
||||||
"cyan": 0x00ffffff,
|
|
||||||
"darkblue": 0x00008bff,
|
|
||||||
"darkcyan": 0x008b8bff,
|
|
||||||
"darkgoldenrod": 0xb8860bff,
|
|
||||||
"darkgray": 0xa9a9a9ff,
|
|
||||||
"darkgreen": 0x006400ff,
|
|
||||||
"darkgrey": 0xa9a9a9ff,
|
|
||||||
"darkkhaki": 0xbdb76bff,
|
|
||||||
"darkmagenta": 0x8b008bff,
|
|
||||||
"darkolivegreen": 0x556b2fff,
|
|
||||||
"darkorange": 0xff8c00ff,
|
|
||||||
"darkorchid": 0x9932ccff,
|
|
||||||
"darkred": 0x8b0000ff,
|
|
||||||
"darksalmon": 0xe9967aff,
|
|
||||||
"darkseagreen": 0x8fbc8fff,
|
|
||||||
"darkslateblue": 0x483d8bff,
|
|
||||||
"darkslategray": 0x2f4f4fff,
|
|
||||||
"darkslategrey": 0x2f4f4fff,
|
|
||||||
"darkturquoise": 0x00ced1ff,
|
|
||||||
"darkviolet": 0x9400d3ff,
|
|
||||||
"deeppink": 0xff1493ff,
|
|
||||||
"deepskyblue": 0x00bfffff,
|
|
||||||
"dimgray": 0x696969ff,
|
|
||||||
"dimgrey": 0x696969ff,
|
|
||||||
"dodgerblue": 0x1e90ffff,
|
|
||||||
"firebrick": 0xb22222ff,
|
|
||||||
"floralwhite": 0xfffaf0ff,
|
|
||||||
"forestgreen": 0x228b22ff,
|
|
||||||
"gainsboro": 0xdcdcdcff,
|
|
||||||
"ghostwhite": 0xf8f8ffff,
|
|
||||||
"gold": 0xffd700ff,
|
|
||||||
"goldenrod": 0xdaa520ff,
|
|
||||||
"greenyellow": 0xadff2fff,
|
|
||||||
"grey": 0x808080ff,
|
|
||||||
"honeydew": 0xf0fff0ff,
|
|
||||||
"hotpink": 0xff69b4ff,
|
|
||||||
"indianred": 0xcd5c5cff,
|
|
||||||
"indigo": 0x4b0082ff,
|
|
||||||
"ivory": 0xfffff0ff,
|
|
||||||
"khaki": 0xf0e68cff,
|
|
||||||
"lavender": 0xe6e6faff,
|
|
||||||
"lavenderblush": 0xfff0f5ff,
|
|
||||||
"lawngreen": 0x7cfc00ff,
|
|
||||||
"lemonchiffon": 0xfffacdff,
|
|
||||||
"lightblue": 0xadd8e6ff,
|
|
||||||
"lightcoral": 0xf08080ff,
|
|
||||||
"lightcyan": 0xe0ffffff,
|
|
||||||
"lightgoldenrodyellow": 0xfafad2ff,
|
|
||||||
"lightgray": 0xd3d3d3ff,
|
|
||||||
"lightgreen": 0x90ee90ff,
|
|
||||||
"lightgrey": 0xd3d3d3ff,
|
|
||||||
"lightpink": 0xffb6c1ff,
|
|
||||||
"lightsalmon": 0xffa07aff,
|
|
||||||
"lightseagreen": 0x20b2aaff,
|
|
||||||
"lightskyblue": 0x87cefaff,
|
|
||||||
"lightslategray": 0x778899ff,
|
|
||||||
"lightslategrey": 0x778899ff,
|
|
||||||
"lightsteelblue": 0xb0c4deff,
|
|
||||||
"lightyellow": 0xffffe0ff,
|
|
||||||
"limegreen": 0x32cd32ff,
|
|
||||||
"linen": 0xfaf0e6ff,
|
|
||||||
"magenta": 0xff00ffff,
|
|
||||||
"mediumaquamarine": 0x66cdaaff,
|
|
||||||
"mediumblue": 0x0000cdff,
|
|
||||||
"mediumorchid": 0xba55d3ff,
|
|
||||||
"mediumpurple": 0x9370dbff,
|
|
||||||
"mediumseagreen": 0x3cb371ff,
|
|
||||||
"mediumslateblue": 0x7b68eeff,
|
|
||||||
"mediumspringgreen": 0x00fa9aff,
|
|
||||||
"mediumturquoise": 0x48d1ccff,
|
|
||||||
"mediumvioletred": 0xc71585ff,
|
|
||||||
"midnightblue": 0x191970ff,
|
|
||||||
"mintcream": 0xf5fffaff,
|
|
||||||
"mistyrose": 0xffe4e1ff,
|
|
||||||
"moccasin": 0xffe4b5ff,
|
|
||||||
"navajowhite": 0xffdeadff,
|
|
||||||
"oldlace": 0xfdf5e6ff,
|
|
||||||
"olivedrab": 0x6b8e23ff,
|
|
||||||
"orangered": 0xff4500ff,
|
|
||||||
"orchid": 0xda70d6ff,
|
|
||||||
"palegoldenrod": 0xeee8aaff,
|
|
||||||
"palegreen": 0x98fb98ff,
|
|
||||||
"paleturquoise": 0xafeeeeff,
|
|
||||||
"palevioletred": 0xdb7093ff,
|
|
||||||
"papayawhip": 0xffefd5ff,
|
|
||||||
"peachpuff": 0xffdab9ff,
|
|
||||||
"peru": 0xcd853fff,
|
|
||||||
"pink": 0xffc0cbff,
|
|
||||||
"plum": 0xdda0ddff,
|
|
||||||
"powderblue": 0xb0e0e6ff,
|
|
||||||
"rosybrown": 0xbc8f8fff,
|
|
||||||
"royalblue": 0x4169e1ff,
|
|
||||||
"saddlebrown": 0x8b4513ff,
|
|
||||||
"salmon": 0xfa8072ff,
|
|
||||||
"sandybrown": 0xf4a460ff,
|
|
||||||
"seagreen": 0x2e8b57ff,
|
|
||||||
"seashell": 0xfff5eeff,
|
|
||||||
"sienna": 0xa0522dff,
|
|
||||||
"skyblue": 0x87ceebff,
|
|
||||||
"slateblue": 0x6a5acdff,
|
|
||||||
"slategray": 0x708090ff,
|
|
||||||
"slategrey": 0x708090ff,
|
|
||||||
"snow": 0xfffafaff,
|
|
||||||
"springgreen": 0x00ff7fff,
|
|
||||||
"steelblue": 0x4682b4ff,
|
|
||||||
"tan": 0xd2b48cff,
|
|
||||||
"thistle": 0xd8bfd8ff,
|
|
||||||
"tomato": 0xff6347ff,
|
|
||||||
"turquoise": 0x40e0d0ff,
|
|
||||||
"violet": 0xee82eeff,
|
|
||||||
"wheat": 0xf5deb3ff,
|
|
||||||
"whitesmoke": 0xf5f5f5ff,
|
|
||||||
"yellowgreen": 0x9acd32ff,
|
|
||||||
"rebeccapurple": 0x663399ff,
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseHex(text string) (uint32, bool) {
|
|
||||||
hex := uint32(0)
|
|
||||||
for _, c := range text {
|
|
||||||
hex <<= 4
|
|
||||||
switch {
|
|
||||||
case c >= '0' && c <= '9':
|
|
||||||
hex |= uint32(c) - '0'
|
|
||||||
case c >= 'a' && c <= 'f':
|
|
||||||
hex |= uint32(c) - ('a' - 10)
|
|
||||||
case c >= 'A' && c <= 'F':
|
|
||||||
hex |= uint32(c) - ('A' - 10)
|
|
||||||
default:
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return hex, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// 0xAABBCCDD => 0xABCD
|
|
||||||
func compactHex(v uint32) uint32 {
|
|
||||||
return ((v & 0x0FF00000) >> 12) | ((v & 0x00000FF0) >> 4)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 0xABCD => 0xAABBCCDD
|
|
||||||
func expandHex(v uint32) uint32 {
|
|
||||||
return ((v & 0xF000) << 16) | ((v & 0xFF00) << 12) | ((v & 0x0FF0) << 8) | ((v & 0x00FF) << 4) | (v & 0x000F)
|
|
||||||
}
|
|
||||||
|
|
||||||
func hexR(v uint32) int { return int(v >> 24) }
|
|
||||||
func hexG(v uint32) int { return int((v >> 16) & 255) }
|
|
||||||
func hexB(v uint32) int { return int((v >> 8) & 255) }
|
|
||||||
func hexA(v uint32) int { return int(v & 255) }
|
|
||||||
|
|
||||||
func floatToStringForColor(a float64) string {
|
|
||||||
text := fmt.Sprintf("%.03f", a)
|
|
||||||
for text[len(text)-1] == '0' {
|
|
||||||
text = text[:len(text)-1]
|
|
||||||
}
|
|
||||||
if text[len(text)-1] == '.' {
|
|
||||||
text = text[:len(text)-1]
|
|
||||||
}
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
func degreesForAngle(token css_ast.Token) (float64, bool) {
|
|
||||||
switch token.Kind {
|
|
||||||
case css_lexer.TNumber:
|
|
||||||
if value, err := strconv.ParseFloat(token.Text, 64); err == nil {
|
|
||||||
return value, true
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.TDimension:
|
|
||||||
if value, err := strconv.ParseFloat(token.DimensionValue(), 64); err == nil {
|
|
||||||
switch token.DimensionUnit() {
|
|
||||||
case "deg":
|
|
||||||
return value, true
|
|
||||||
case "grad":
|
|
||||||
return value * (360.0 / 400.0), true
|
|
||||||
case "rad":
|
|
||||||
return value * (180.0 / math.Pi), true
|
|
||||||
case "turn":
|
|
||||||
return value * 360.0, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
func lowerAlphaPercentageToNumber(token css_ast.Token) css_ast.Token {
|
|
||||||
if token.Kind == css_lexer.TPercentage {
|
|
||||||
if value, err := strconv.ParseFloat(token.Text[:len(token.Text)-1], 64); err == nil {
|
|
||||||
token.Kind = css_lexer.TNumber
|
|
||||||
token.Text = floatToStringForColor(value / 100.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert newer color syntax to older color syntax for older browsers
|
|
||||||
func (p *parser) lowerColor(token css_ast.Token) css_ast.Token {
|
|
||||||
text := token.Text
|
|
||||||
|
|
||||||
switch token.Kind {
|
|
||||||
case css_lexer.THash:
|
|
||||||
if p.options.UnsupportedCSSFeatures.Has(compat.HexRGBA) {
|
|
||||||
switch len(text) {
|
|
||||||
case 4:
|
|
||||||
// "#1234" => "rgba(1, 2, 3, 0.004)"
|
|
||||||
if hex, ok := parseHex(text); ok {
|
|
||||||
hex = expandHex(hex)
|
|
||||||
token.Kind = css_lexer.TFunction
|
|
||||||
token.Text = "rgba"
|
|
||||||
commaToken := p.commaToken()
|
|
||||||
token.Children = &[]css_ast.Token{
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: floatToStringForColor(float64(hexA(hex)) / 255)},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 8:
|
|
||||||
// "#12345678" => "rgba(18, 52, 86, 0.47)"
|
|
||||||
if hex, ok := parseHex(text); ok {
|
|
||||||
token.Kind = css_lexer.TFunction
|
|
||||||
token.Text = "rgba"
|
|
||||||
commaToken := p.commaToken()
|
|
||||||
token.Children = &[]css_ast.Token{
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: floatToStringForColor(float64(hexA(hex)) / 255)},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.TIdent:
|
|
||||||
if text == "rebeccapurple" && p.options.UnsupportedCSSFeatures.Has(compat.RebeccaPurple) {
|
|
||||||
token.Kind = css_lexer.THash
|
|
||||||
token.Text = "663399"
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.TFunction:
|
|
||||||
switch text {
|
|
||||||
case "rgb", "rgba", "hsl", "hsla":
|
|
||||||
if p.options.UnsupportedCSSFeatures.Has(compat.Modern_RGB_HSL) {
|
|
||||||
args := *token.Children
|
|
||||||
removeAlpha := false
|
|
||||||
addAlpha := false
|
|
||||||
|
|
||||||
// "hsl(1deg, 2%, 3%)" => "hsl(1, 2%, 3%)"
|
|
||||||
if (text == "hsl" || text == "hsla") && len(args) > 0 {
|
|
||||||
if degrees, ok := degreesForAngle(args[0]); ok {
|
|
||||||
args[0].Kind = css_lexer.TNumber
|
|
||||||
args[0].Text = floatToStringForColor(degrees)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// These check for "IsNumeric" to reject "var()" since a single "var()"
|
|
||||||
// can substitute for multiple tokens and that messes up pattern matching
|
|
||||||
switch len(args) {
|
|
||||||
case 3:
|
|
||||||
// "rgba(1 2 3)" => "rgb(1, 2, 3)"
|
|
||||||
// "hsla(1 2% 3%)" => "hsl(1, 2%, 3%)"
|
|
||||||
if args[0].Kind.IsNumeric() && args[1].Kind.IsNumeric() && args[2].Kind.IsNumeric() {
|
|
||||||
removeAlpha = true
|
|
||||||
args[0].Whitespace = 0
|
|
||||||
args[1].Whitespace = 0
|
|
||||||
commaToken := p.commaToken()
|
|
||||||
token.Children = &[]css_ast.Token{
|
|
||||||
args[0], commaToken,
|
|
||||||
args[1], commaToken,
|
|
||||||
args[2],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 5:
|
|
||||||
// "rgba(1, 2, 3)" => "rgb(1, 2, 3)"
|
|
||||||
// "hsla(1, 2%, 3%)" => "hsl(1%, 2%, 3%)"
|
|
||||||
if args[0].Kind.IsNumeric() && args[1].Kind == css_lexer.TComma &&
|
|
||||||
args[2].Kind.IsNumeric() && args[3].Kind == css_lexer.TComma &&
|
|
||||||
args[4].Kind.IsNumeric() {
|
|
||||||
removeAlpha = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// "rgb(1 2 3 / 4%)" => "rgba(1, 2, 3, 0.04)"
|
|
||||||
// "hsl(1 2% 3% / 4%)" => "hsla(1, 2%, 3%, 0.04)"
|
|
||||||
if args[0].Kind.IsNumeric() && args[1].Kind.IsNumeric() && args[2].Kind.IsNumeric() &&
|
|
||||||
args[3].Kind == css_lexer.TDelimSlash && args[4].Kind.IsNumeric() {
|
|
||||||
addAlpha = true
|
|
||||||
args[0].Whitespace = 0
|
|
||||||
args[1].Whitespace = 0
|
|
||||||
args[2].Whitespace = 0
|
|
||||||
commaToken := p.commaToken()
|
|
||||||
token.Children = &[]css_ast.Token{
|
|
||||||
args[0], commaToken,
|
|
||||||
args[1], commaToken,
|
|
||||||
args[2], commaToken,
|
|
||||||
lowerAlphaPercentageToNumber(args[4]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case 7:
|
|
||||||
// "rgb(1%, 2%, 3%, 4%)" => "rgba(1%, 2%, 3%, 0.04)"
|
|
||||||
// "hsl(1, 2%, 3%, 4%)" => "hsla(1, 2%, 3%, 0.04)"
|
|
||||||
if args[0].Kind.IsNumeric() && args[1].Kind == css_lexer.TComma &&
|
|
||||||
args[2].Kind.IsNumeric() && args[3].Kind == css_lexer.TComma &&
|
|
||||||
args[4].Kind.IsNumeric() && args[5].Kind == css_lexer.TComma &&
|
|
||||||
args[6].Kind.IsNumeric() {
|
|
||||||
addAlpha = true
|
|
||||||
args[6] = lowerAlphaPercentageToNumber(args[6])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if removeAlpha {
|
|
||||||
if text == "rgba" {
|
|
||||||
token.Text = "rgb"
|
|
||||||
} else if text == "hsla" {
|
|
||||||
token.Text = "hsl"
|
|
||||||
}
|
|
||||||
} else if addAlpha {
|
|
||||||
if text == "rgb" {
|
|
||||||
token.Text = "rgba"
|
|
||||||
} else if text == "hsl" {
|
|
||||||
token.Text = "hsla"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseColor(token css_ast.Token) (uint32, bool) {
|
|
||||||
text := token.Text
|
|
||||||
|
|
||||||
switch token.Kind {
|
|
||||||
case css_lexer.TIdent:
|
|
||||||
if hex, ok := colorNameToHex[strings.ToLower(text)]; ok {
|
|
||||||
return hex, true
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.THash:
|
|
||||||
switch len(text) {
|
|
||||||
case 3:
|
|
||||||
// "#123"
|
|
||||||
if hex, ok := parseHex(text); ok {
|
|
||||||
return (expandHex(hex) << 8) | 0xFF, true
|
|
||||||
}
|
|
||||||
|
|
||||||
case 4:
|
|
||||||
// "#1234"
|
|
||||||
if hex, ok := parseHex(text); ok {
|
|
||||||
return expandHex(hex), true
|
|
||||||
}
|
|
||||||
|
|
||||||
case 6:
|
|
||||||
// "#112233"
|
|
||||||
if hex, ok := parseHex(text); ok {
|
|
||||||
return (hex << 8) | 0xFF, true
|
|
||||||
}
|
|
||||||
|
|
||||||
case 8:
|
|
||||||
// "#11223344"
|
|
||||||
if hex, ok := parseHex(text); ok {
|
|
||||||
return hex, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.TFunction:
|
|
||||||
switch text {
|
|
||||||
case "rgb", "rgba":
|
|
||||||
args := *token.Children
|
|
||||||
var r, g, b, a css_ast.Token
|
|
||||||
|
|
||||||
switch len(args) {
|
|
||||||
case 3:
|
|
||||||
// "rgb(1 2 3)"
|
|
||||||
r, g, b = args[0], args[1], args[2]
|
|
||||||
|
|
||||||
case 5:
|
|
||||||
// "rgba(1, 2, 3)"
|
|
||||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma {
|
|
||||||
r, g, b = args[0], args[2], args[4]
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// "rgb(1 2 3 / 4%)"
|
|
||||||
if args[3].Kind == css_lexer.TDelimSlash {
|
|
||||||
r, g, b, a = args[0], args[1], args[2], args[4]
|
|
||||||
}
|
|
||||||
|
|
||||||
case 7:
|
|
||||||
// "rgb(1%, 2%, 3%, 4%)"
|
|
||||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma && args[5].Kind == css_lexer.TComma {
|
|
||||||
r, g, b, a = args[0], args[2], args[4], args[6]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if r, ok := parseColorByte(r, 1); ok {
|
|
||||||
if g, ok := parseColorByte(g, 1); ok {
|
|
||||||
if b, ok := parseColorByte(b, 1); ok {
|
|
||||||
if a, ok := parseAlphaByte(a); ok {
|
|
||||||
return uint32((r << 24) | (g << 16) | (b << 8) | a), true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "hsl", "hsla":
|
|
||||||
args := *token.Children
|
|
||||||
var h, s, l, a css_ast.Token
|
|
||||||
|
|
||||||
switch len(args) {
|
|
||||||
case 3:
|
|
||||||
// "hsl(1 2 3)"
|
|
||||||
h, s, l = args[0], args[1], args[2]
|
|
||||||
|
|
||||||
case 5:
|
|
||||||
// "hsla(1, 2, 3)"
|
|
||||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma {
|
|
||||||
h, s, l = args[0], args[2], args[4]
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// "hsl(1 2 3 / 4%)"
|
|
||||||
if args[3].Kind == css_lexer.TDelimSlash {
|
|
||||||
h, s, l, a = args[0], args[1], args[2], args[4]
|
|
||||||
}
|
|
||||||
|
|
||||||
case 7:
|
|
||||||
// "hsl(1%, 2%, 3%, 4%)"
|
|
||||||
if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma && args[5].Kind == css_lexer.TComma {
|
|
||||||
h, s, l, a = args[0], args[2], args[4], args[6]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert from HSL to RGB. The algorithm is from the section
|
|
||||||
// "Converting HSL colors to sRGB colors" in the specification.
|
|
||||||
if h, ok := degreesForAngle(h); ok {
|
|
||||||
if s, ok := s.FractionForPercentage(); ok {
|
|
||||||
if l, ok := l.FractionForPercentage(); ok {
|
|
||||||
if a, ok := parseAlphaByte(a); ok {
|
|
||||||
h /= 360.0
|
|
||||||
var t2 float64
|
|
||||||
if l <= 0.5 {
|
|
||||||
t2 = l * (s + 1)
|
|
||||||
} else {
|
|
||||||
t2 = l + s - (l * s)
|
|
||||||
}
|
|
||||||
t1 := l*2 - t2
|
|
||||||
r := hueToRgb(t1, t2, h+1.0/3.0)
|
|
||||||
g := hueToRgb(t1, t2, h)
|
|
||||||
b := hueToRgb(t1, t2, h-1.0/3.0)
|
|
||||||
return uint32((r << 24) | (g << 16) | (b << 8) | a), true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
func hueToRgb(t1 float64, t2 float64, hue float64) uint32 {
|
|
||||||
hue -= math.Floor(hue)
|
|
||||||
hue *= 6.0
|
|
||||||
var f float64
|
|
||||||
if hue < 1 {
|
|
||||||
f = (t2-t1)*hue + t1
|
|
||||||
} else if hue < 3 {
|
|
||||||
f = t2
|
|
||||||
} else if hue < 4 {
|
|
||||||
f = (t2-t1)*(4-hue) + t1
|
|
||||||
} else {
|
|
||||||
f = t1
|
|
||||||
}
|
|
||||||
i := int(math.Round(f * 255))
|
|
||||||
if i < 0 {
|
|
||||||
i = 0
|
|
||||||
} else if i > 255 {
|
|
||||||
i = 255
|
|
||||||
}
|
|
||||||
return uint32(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseAlphaByte(token css_ast.Token) (uint32, bool) {
|
|
||||||
if token.Kind == css_lexer.T(0) {
|
|
||||||
return 255, true
|
|
||||||
}
|
|
||||||
return parseColorByte(token, 255)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseColorByte(token css_ast.Token, scale float64) (uint32, bool) {
|
|
||||||
var i int
|
|
||||||
var ok bool
|
|
||||||
|
|
||||||
switch token.Kind {
|
|
||||||
case css_lexer.TNumber:
|
|
||||||
if f, err := strconv.ParseFloat(token.Text, 64); err == nil {
|
|
||||||
i = int(math.Round(f * scale))
|
|
||||||
ok = true
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.TPercentage:
|
|
||||||
if f, err := strconv.ParseFloat(token.PercentageValue(), 64); err == nil {
|
|
||||||
i = int(math.Round(f * (255.0 / 100.0)))
|
|
||||||
ok = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if i < 0 {
|
|
||||||
i = 0
|
|
||||||
} else if i > 255 {
|
|
||||||
i = 255
|
|
||||||
}
|
|
||||||
return uint32(i), ok
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) mangleColor(token css_ast.Token, hex uint32) css_ast.Token {
|
|
||||||
// Note: Do NOT remove color information from fully transparent colors.
|
|
||||||
// Safari behaves differently than other browsers for color interpolation:
|
|
||||||
// https://css-tricks.com/thing-know-gradients-transparent-black/
|
|
||||||
|
|
||||||
if hexA(hex) == 255 {
|
|
||||||
token.Children = nil
|
|
||||||
if name, ok := shortColorName[hex]; ok {
|
|
||||||
token.Kind = css_lexer.TIdent
|
|
||||||
token.Text = name
|
|
||||||
} else {
|
|
||||||
token.Kind = css_lexer.THash
|
|
||||||
hex >>= 8
|
|
||||||
compact := compactHex(hex)
|
|
||||||
if hex == expandHex(compact) {
|
|
||||||
token.Text = fmt.Sprintf("%03x", compact)
|
|
||||||
} else {
|
|
||||||
token.Text = fmt.Sprintf("%06x", hex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if !p.options.UnsupportedCSSFeatures.Has(compat.HexRGBA) {
|
|
||||||
token.Children = nil
|
|
||||||
token.Kind = css_lexer.THash
|
|
||||||
compact := compactHex(hex)
|
|
||||||
if hex == expandHex(compact) {
|
|
||||||
token.Text = fmt.Sprintf("%04x", compact)
|
|
||||||
} else {
|
|
||||||
token.Text = fmt.Sprintf("%08x", hex)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
token.Kind = css_lexer.TFunction
|
|
||||||
token.Text = "rgba"
|
|
||||||
commaToken := p.commaToken()
|
|
||||||
index := hexA(hex) * 4
|
|
||||||
alpha := alphaFractionTable[index : index+4]
|
|
||||||
if space := strings.IndexByte(alpha, ' '); space != -1 {
|
|
||||||
alpha = alpha[:space]
|
|
||||||
}
|
|
||||||
token.Children = &[]css_ast.Token{
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken,
|
|
||||||
{Kind: css_lexer.TNumber, Text: alpha},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// Every four characters in this table is the fraction for that index
|
|
||||||
const alphaFractionTable string = "" +
|
|
||||||
"0 .004.008.01 .016.02 .024.027.03 .035.04 .043.047.05 .055.06 " +
|
|
||||||
".063.067.07 .075.08 .082.086.09 .094.098.1 .106.11 .114.118.12 " +
|
|
||||||
".125.13 .133.137.14 .145.15 .153.157.16 .165.17 .173.176.18 .184" +
|
|
||||||
".19 .192.196.2 .204.208.21 .216.22 .224.227.23 .235.24 .243.247" +
|
|
||||||
".25 .255.26 .263.267.27 .275.28 .282.286.29 .294.298.3 .306.31 " +
|
|
||||||
".314.318.32 .325.33 .333.337.34 .345.35 .353.357.36 .365.37 .373" +
|
|
||||||
".376.38 .384.39 .392.396.4 .404.408.41 .416.42 .424.427.43 .435" +
|
|
||||||
".44 .443.447.45 .455.46 .463.467.47 .475.48 .482.486.49 .494.498" +
|
|
||||||
".5 .506.51 .514.518.52 .525.53 .533.537.54 .545.55 .553.557.56 " +
|
|
||||||
".565.57 .573.576.58 .584.59 .592.596.6 .604.608.61 .616.62 .624" +
|
|
||||||
".627.63 .635.64 .643.647.65 .655.66 .663.667.67 .675.68 .682.686" +
|
|
||||||
".69 .694.698.7 .706.71 .714.718.72 .725.73 .733.737.74 .745.75 " +
|
|
||||||
".753.757.76 .765.77 .773.776.78 .784.79 .792.796.8 .804.808.81 " +
|
|
||||||
".816.82 .824.827.83 .835.84 .843.847.85 .855.86 .863.867.87 .875" +
|
|
||||||
".88 .882.886.89 .894.898.9 .906.91 .914.918.92 .925.93 .933.937" +
|
|
||||||
".94 .945.95 .953.957.96 .965.97 .973.976.98 .984.99 .992.9961 "
|
|
135
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font.go
generated
vendored
135
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font.go
generated
vendored
@ -1,135 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Specification: https://drafts.csswg.org/css-fonts/#font-prop
|
|
||||||
// [ <font-style> || <font-variant-css2> || <font-weight> || <font-stretch-css3> ]? <font-size> [ / <line-height> ]? <font-family>
|
|
||||||
func (p *parser) mangleFont(tokens []css_ast.Token) []css_ast.Token {
|
|
||||||
var result []css_ast.Token
|
|
||||||
|
|
||||||
// Scan up to the font size
|
|
||||||
pos := 0
|
|
||||||
for ; pos < len(tokens); pos++ {
|
|
||||||
token := tokens[pos]
|
|
||||||
if isFontSize(token) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
switch token.Kind {
|
|
||||||
case css_lexer.TIdent:
|
|
||||||
switch strings.ToLower(token.Text) {
|
|
||||||
case "normal":
|
|
||||||
// "All subproperties of the font property are first reset to their initial values"
|
|
||||||
// This implies that "normal" doesn't do anything. Also all of the optional values
|
|
||||||
// contain "normal" as an option and they are unordered so it's impossible to say
|
|
||||||
// what property "normal" corresponds to. Just drop these tokens to save space.
|
|
||||||
continue
|
|
||||||
|
|
||||||
// <font-style>
|
|
||||||
case "italic":
|
|
||||||
case "oblique":
|
|
||||||
if pos+1 < len(tokens) && tokens[pos+1].IsAngle() {
|
|
||||||
result = append(result, token, tokens[pos+1])
|
|
||||||
pos++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// <font-variant-css2>
|
|
||||||
case "small-caps":
|
|
||||||
|
|
||||||
// <font-weight>
|
|
||||||
case "bold", "bolder", "lighter":
|
|
||||||
result = append(result, p.mangleFontWeight(token))
|
|
||||||
continue
|
|
||||||
|
|
||||||
// <font-stretch-css3>
|
|
||||||
case "ultra-condensed", "extra-condensed", "condensed", "semi-condensed",
|
|
||||||
"semi-expanded", "expanded", "extra-expanded", "ultra-expanded":
|
|
||||||
|
|
||||||
default:
|
|
||||||
// All other tokens are unrecognized, so we bail if we hit one
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
result = append(result, token)
|
|
||||||
|
|
||||||
case css_lexer.TNumber:
|
|
||||||
// "Only values greater than or equal to 1, and less than or equal to
|
|
||||||
// 1000, are valid, and all other values are invalid."
|
|
||||||
if value, err := strconv.ParseFloat(token.Text, 64); err != nil || value < 1 || value > 1000 {
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
result = append(result, token)
|
|
||||||
|
|
||||||
default:
|
|
||||||
// All other tokens are unrecognized, so we bail if we hit one
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// <font-size>
|
|
||||||
if pos == len(tokens) {
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
result = append(result, tokens[pos])
|
|
||||||
pos++
|
|
||||||
|
|
||||||
// / <line-height>
|
|
||||||
if pos < len(tokens) && tokens[pos].Kind == css_lexer.TDelimSlash {
|
|
||||||
if pos+1 == len(tokens) {
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
result = append(result, tokens[pos], tokens[pos+1])
|
|
||||||
pos += 2
|
|
||||||
|
|
||||||
// Remove the whitespace around the "/" character
|
|
||||||
if p.options.RemoveWhitespace {
|
|
||||||
result[len(result)-3].Whitespace &= ^css_ast.WhitespaceAfter
|
|
||||||
result[len(result)-2].Whitespace = 0
|
|
||||||
result[len(result)-1].Whitespace &= ^css_ast.WhitespaceBefore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// <font-family>
|
|
||||||
if family, ok := p.mangleFontFamily(tokens[pos:]); ok {
|
|
||||||
return append(result, family...)
|
|
||||||
}
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
var fontSizeKeywords = map[string]bool{
|
|
||||||
// <absolute-size>: https://drafts.csswg.org/css-fonts/#valdef-font-size-absolute-size
|
|
||||||
"xx-small": true,
|
|
||||||
"x-small": true,
|
|
||||||
"small": true,
|
|
||||||
"medium": true,
|
|
||||||
"large": true,
|
|
||||||
"x-large": true,
|
|
||||||
"xx-large": true,
|
|
||||||
"xxx-large": true,
|
|
||||||
|
|
||||||
// <relative-size>: https://drafts.csswg.org/css-fonts/#valdef-font-size-relative-size
|
|
||||||
"larger": true,
|
|
||||||
"smaller": true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Specification: https://drafts.csswg.org/css-fonts/#font-size-prop
|
|
||||||
func isFontSize(token css_ast.Token) bool {
|
|
||||||
// <length-percentage>
|
|
||||||
if token.Kind == css_lexer.TDimension || token.Kind == css_lexer.TPercentage {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// <absolute-size> or <relative-size>
|
|
||||||
if token.Kind == css_lexer.TIdent {
|
|
||||||
_, ok := fontSizeKeywords[strings.ToLower(token.Text)]
|
|
||||||
return ok
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
142
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_family.go
generated
vendored
142
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_family.go
generated
vendored
@ -1,142 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Specification: https://drafts.csswg.org/css-values-4/#common-keywords
|
|
||||||
var wideKeywords = map[string]bool{
|
|
||||||
"initial": true,
|
|
||||||
"inherit": true,
|
|
||||||
"unset": true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Specification: https://drafts.csswg.org/css-fonts/#generic-font-families
|
|
||||||
var genericFamilyNames = map[string]bool{
|
|
||||||
"serif": true,
|
|
||||||
"sans-serif": true,
|
|
||||||
"cursive": true,
|
|
||||||
"fantasy": true,
|
|
||||||
"monospace": true,
|
|
||||||
"system-ui": true,
|
|
||||||
"emoji": true,
|
|
||||||
"math": true,
|
|
||||||
"fangsong": true,
|
|
||||||
"ui-serif": true,
|
|
||||||
"ui-sans-serif": true,
|
|
||||||
"ui-monospace": true,
|
|
||||||
"ui-rounded": true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Specification: https://drafts.csswg.org/css-fonts/#font-family-prop
|
|
||||||
func (p *parser) mangleFontFamily(tokens []css_ast.Token) ([]css_ast.Token, bool) {
|
|
||||||
result, rest, ok := p.mangleFamilyNameOrGenericName(nil, tokens)
|
|
||||||
if !ok {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
for len(rest) > 0 && rest[0].Kind == css_lexer.TComma {
|
|
||||||
result, rest, ok = p.mangleFamilyNameOrGenericName(append(result, rest[0]), rest[1:])
|
|
||||||
if !ok {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(rest) > 0 {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) mangleFamilyNameOrGenericName(result []css_ast.Token, tokens []css_ast.Token) ([]css_ast.Token, []css_ast.Token, bool) {
|
|
||||||
if len(tokens) > 0 {
|
|
||||||
t := tokens[0]
|
|
||||||
|
|
||||||
// Handle <generic-family>
|
|
||||||
if t.Kind == css_lexer.TIdent && genericFamilyNames[t.Text] {
|
|
||||||
return append(result, t), tokens[1:], true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle <family-name>
|
|
||||||
if t.Kind == css_lexer.TString {
|
|
||||||
// "If a sequence of identifiers is given as a <family-name>, the computed
|
|
||||||
// value is the name converted to a string by joining all the identifiers
|
|
||||||
// in the sequence by single spaces."
|
|
||||||
//
|
|
||||||
// More information: https://mathiasbynens.be/notes/unquoted-font-family
|
|
||||||
names := strings.Split(t.Text, " ")
|
|
||||||
for _, name := range names {
|
|
||||||
if !isValidCustomIdent(name, genericFamilyNames) {
|
|
||||||
return append(result, t), tokens[1:], true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for i, name := range names {
|
|
||||||
var whitespace css_ast.WhitespaceFlags
|
|
||||||
if i != 0 || !p.options.RemoveWhitespace {
|
|
||||||
whitespace = css_ast.WhitespaceBefore
|
|
||||||
}
|
|
||||||
result = append(result, css_ast.Token{
|
|
||||||
Kind: css_lexer.TIdent,
|
|
||||||
Text: name,
|
|
||||||
Whitespace: whitespace,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return result, tokens[1:], true
|
|
||||||
}
|
|
||||||
|
|
||||||
// "Font family names other than generic families must either be given
|
|
||||||
// quoted as <string>s, or unquoted as a sequence of one or more
|
|
||||||
// <custom-ident>."
|
|
||||||
if t.Kind == css_lexer.TIdent {
|
|
||||||
for {
|
|
||||||
if !isValidCustomIdent(t.Text, genericFamilyNames) {
|
|
||||||
return nil, nil, false
|
|
||||||
}
|
|
||||||
result = append(result, t)
|
|
||||||
tokens = tokens[1:]
|
|
||||||
if len(tokens) == 0 || tokens[0].Kind != css_lexer.TIdent {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
t = tokens[0]
|
|
||||||
}
|
|
||||||
return result, tokens, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Anything other than the cases listed above causes us to bail
|
|
||||||
return nil, nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Specification: https://drafts.csswg.org/css-values-4/#custom-idents
|
|
||||||
func isValidCustomIdent(text string, predefinedKeywords map[string]bool) bool {
|
|
||||||
loweredText := strings.ToLower(text)
|
|
||||||
|
|
||||||
if predefinedKeywords[loweredText] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if wideKeywords[loweredText] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if loweredText == "default" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if loweredText == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// validate if it contains characters which needs to be escaped
|
|
||||||
if !css_lexer.WouldStartIdentifierWithoutEscapes(text) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, c := range text {
|
|
||||||
if !css_lexer.IsNameContinue(c) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
25
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_weight.go
generated
vendored
25
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_font_weight.go
generated
vendored
@ -1,25 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *parser) mangleFontWeight(token css_ast.Token) css_ast.Token {
|
|
||||||
if token.Kind != css_lexer.TIdent {
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
switch strings.ToLower(token.Text) {
|
|
||||||
case "normal":
|
|
||||||
token.Text = "400"
|
|
||||||
token.Kind = css_lexer.TNumber
|
|
||||||
case "bold":
|
|
||||||
token.Text = "700"
|
|
||||||
token.Kind = css_lexer.TNumber
|
|
||||||
}
|
|
||||||
|
|
||||||
return token
|
|
||||||
}
|
|
391
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_transform.go
generated
vendored
391
vendor/github.com/evanw/esbuild/internal/css_parser/css_decls_transform.go
generated
vendored
@ -1,391 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
func turnPercentIntoNumberIfShorter(t *css_ast.Token) {
|
|
||||||
if t.Kind == css_lexer.TPercentage {
|
|
||||||
if shifted, ok := shiftDot(t.PercentageValue(), -2); ok && len(shifted) < len(t.Text) {
|
|
||||||
t.Kind = css_lexer.TNumber
|
|
||||||
t.Text = shifted
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://www.w3.org/TR/css-transforms-1/#two-d-transform-functions
|
|
||||||
// https://drafts.csswg.org/css-transforms-2/#transform-functions
|
|
||||||
func (p *parser) mangleTransforms(tokens []css_ast.Token) []css_ast.Token {
|
|
||||||
for i := range tokens {
|
|
||||||
if token := &tokens[i]; token.Kind == css_lexer.TFunction {
|
|
||||||
if args := *token.Children; css_ast.TokensAreCommaSeparated(args) {
|
|
||||||
n := len(args)
|
|
||||||
|
|
||||||
switch strings.ToLower(token.Text) {
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// 2D transforms
|
|
||||||
|
|
||||||
case "matrix":
|
|
||||||
// specifies a 2D transformation in the form of a transformation
|
|
||||||
// matrix of the six values a, b, c, d, e, f.
|
|
||||||
if n == 11 {
|
|
||||||
// | a c 0 e |
|
|
||||||
// | b d 0 f |
|
|
||||||
// | 0 0 1 0 |
|
|
||||||
// | 0 0 0 1 |
|
|
||||||
a, b, c, d, e, f := args[0], args[2], args[4], args[6], args[8], args[10]
|
|
||||||
if b.IsZero() && c.IsZero() && e.IsZero() && f.IsZero() {
|
|
||||||
// | a 0 0 0 |
|
|
||||||
// | 0 d 0 0 |
|
|
||||||
// | 0 0 1 0 |
|
|
||||||
// | 0 0 0 1 |
|
|
||||||
if a.EqualIgnoringWhitespace(d) {
|
|
||||||
// "matrix(a, 0, 0, a, 0, 0)" => "scale(a)"
|
|
||||||
token.Text = "scale"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if d.IsOne() {
|
|
||||||
// "matrix(a, 0, 0, 1, 0, 0)" => "scaleX(a)"
|
|
||||||
token.Text = "scaleX"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if a.IsOne() {
|
|
||||||
// "matrix(1, 0, 0, d, 0, 0)" => "scaleY(d)"
|
|
||||||
token.Text = "scaleY"
|
|
||||||
*token.Children = args[6:7]
|
|
||||||
} else {
|
|
||||||
// "matrix(a, 0, 0, d, 0, 0)" => "scale(a, d)"
|
|
||||||
token.Text = "scale"
|
|
||||||
*token.Children = append(args[:2], d)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: A "matrix" cannot be directly converted into a "translate"
|
|
||||||
// because "translate" requires units while "matrix" requires no
|
|
||||||
// units. I'm not sure exactly what the semantics are so I'm not
|
|
||||||
// sure if you can just add "px" or not. Even if that did work,
|
|
||||||
// you still couldn't substitute values containing "var()" since
|
|
||||||
// units would still not be substituted in that case.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "translate":
|
|
||||||
// specifies a 2D translation by the vector [tx, ty], where tx is the
|
|
||||||
// first translation-value parameter and ty is the optional second
|
|
||||||
// translation-value parameter. If <ty> is not provided, ty has zero
|
|
||||||
// as a value.
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthOrPercentageIntoNumberIfZero()
|
|
||||||
} else if n == 3 {
|
|
||||||
tx, ty := &args[0], &args[2]
|
|
||||||
tx.TurnLengthOrPercentageIntoNumberIfZero()
|
|
||||||
ty.TurnLengthOrPercentageIntoNumberIfZero()
|
|
||||||
if ty.IsZero() {
|
|
||||||
// "translate(tx, 0)" => "translate(tx)"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if tx.IsZero() {
|
|
||||||
// "translate(0, ty)" => "translateY(ty)"
|
|
||||||
token.Text = "translateY"
|
|
||||||
*token.Children = args[2:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "translatex":
|
|
||||||
// specifies a translation by the given amount in the X direction.
|
|
||||||
if n == 1 {
|
|
||||||
// "translateX(tx)" => "translate(tx)"
|
|
||||||
token.Text = "translate"
|
|
||||||
args[0].TurnLengthOrPercentageIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "translatey":
|
|
||||||
// specifies a translation by the given amount in the Y direction.
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthOrPercentageIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "scale":
|
|
||||||
// specifies a 2D scale operation by the [sx,sy] scaling vector
|
|
||||||
// described by the 2 parameters. If the second parameter is not
|
|
||||||
// provided, it takes a value equal to the first. For example,
|
|
||||||
// scale(1, 1) would leave an element unchanged, while scale(2, 2)
|
|
||||||
// would cause it to appear twice as long in both the X and Y axes,
|
|
||||||
// or four times its typical geometric size.
|
|
||||||
if n == 1 {
|
|
||||||
turnPercentIntoNumberIfShorter(&args[0])
|
|
||||||
} else if n == 3 {
|
|
||||||
sx, sy := &args[0], &args[2]
|
|
||||||
turnPercentIntoNumberIfShorter(sx)
|
|
||||||
turnPercentIntoNumberIfShorter(sy)
|
|
||||||
if sx.EqualIgnoringWhitespace(*sy) {
|
|
||||||
// "scale(s, s)" => "scale(s)"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if sy.IsOne() {
|
|
||||||
// "scale(s, 1)" => "scaleX(s)"
|
|
||||||
token.Text = "scaleX"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if sx.IsOne() {
|
|
||||||
// "scale(1, s)" => "scaleY(s)"
|
|
||||||
token.Text = "scaleY"
|
|
||||||
*token.Children = args[2:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "scalex":
|
|
||||||
// specifies a 2D scale operation using the [sx,1] scaling vector,
|
|
||||||
// where sx is given as the parameter.
|
|
||||||
if n == 1 {
|
|
||||||
turnPercentIntoNumberIfShorter(&args[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
case "scaley":
|
|
||||||
// specifies a 2D scale operation using the [1,sy] scaling vector,
|
|
||||||
// where sy is given as the parameter.
|
|
||||||
if n == 1 {
|
|
||||||
turnPercentIntoNumberIfShorter(&args[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
case "rotate":
|
|
||||||
// specifies a 2D rotation by the angle specified in the parameter
|
|
||||||
// about the origin of the element, as defined by the
|
|
||||||
// transform-origin property. For example, rotate(90deg) would
|
|
||||||
// cause elements to appear rotated one-quarter of a turn in the
|
|
||||||
// clockwise direction.
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "skew":
|
|
||||||
// specifies a 2D skew by [ax,ay] for X and Y. If the second
|
|
||||||
// parameter is not provided, it has a zero value.
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
} else if n == 3 {
|
|
||||||
ax, ay := &args[0], &args[2]
|
|
||||||
ax.TurnLengthIntoNumberIfZero()
|
|
||||||
ay.TurnLengthIntoNumberIfZero()
|
|
||||||
if ay.IsZero() {
|
|
||||||
// "skew(ax, 0)" => "skew(ax)"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "skewx":
|
|
||||||
// specifies a 2D skew transformation along the X axis by the given
|
|
||||||
// angle.
|
|
||||||
if n == 1 {
|
|
||||||
// "skewX(ax)" => "skew(ax)"
|
|
||||||
token.Text = "skew"
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "skewy":
|
|
||||||
// specifies a 2D skew transformation along the Y axis by the given
|
|
||||||
// angle.
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// 3D transforms
|
|
||||||
|
|
||||||
case "matrix3d":
|
|
||||||
// specifies a 3D transformation as a 4x4 homogeneous matrix of 16
|
|
||||||
// values in column-major order.
|
|
||||||
if n == 31 {
|
|
||||||
// | m0 m4 m8 m12 |
|
|
||||||
// | m1 m5 m9 m13 |
|
|
||||||
// | m2 m6 m10 m14 |
|
|
||||||
// | m3 m7 m11 m15 |
|
|
||||||
mask := uint32(0)
|
|
||||||
for i := 0; i < 16; i++ {
|
|
||||||
if arg := args[i*2]; arg.IsZero() {
|
|
||||||
mask |= 1 << i
|
|
||||||
} else if arg.IsOne() {
|
|
||||||
mask |= (1 << 16) << i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const onlyScale = 0b1000_0000_0000_0000_0111_1011_1101_1110
|
|
||||||
const only2D = 0b1000_0100_0000_0000_0100_1011_1100_1100
|
|
||||||
if (mask & onlyScale) == onlyScale {
|
|
||||||
// | m0 0 0 0 |
|
|
||||||
// | 0 m5 0 0 |
|
|
||||||
// | 0 0 m10 0 |
|
|
||||||
// | 0 0 0 1 |
|
|
||||||
sx, sy, sz := args[0], args[10], args[20]
|
|
||||||
if sx.EqualIgnoringWhitespace(sy) && sz.IsOne() {
|
|
||||||
token.Text = "scale"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if sy.IsOne() && sz.IsOne() {
|
|
||||||
token.Text = "scaleX"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if sx.IsOne() && sz.IsOne() {
|
|
||||||
token.Text = "scaleY"
|
|
||||||
*token.Children = args[10:11]
|
|
||||||
} else if sx.IsOne() && sy.IsOne() {
|
|
||||||
token.Text = "scaleZ"
|
|
||||||
*token.Children = args[20:21]
|
|
||||||
} else if sz.IsOne() {
|
|
||||||
token.Text = "scale"
|
|
||||||
*token.Children = append(args[0:2], args[10])
|
|
||||||
} else {
|
|
||||||
token.Text = "scale3d"
|
|
||||||
*token.Children = append(append(args[0:2], args[10:12]...), args[20])
|
|
||||||
}
|
|
||||||
} else if (mask & only2D) == only2D {
|
|
||||||
// | m0 m4 0 m12 |
|
|
||||||
// | m1 m5 0 m13 |
|
|
||||||
// | 0 0 1 0 |
|
|
||||||
// | 0 0 0 1 |
|
|
||||||
token.Text = "matrix"
|
|
||||||
*token.Children = append(append(args[0:4], args[8:12]...), args[24:27]...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: A "matrix3d" cannot be directly converted into a "translate3d"
|
|
||||||
// because "translate3d" requires units while "matrix3d" requires no
|
|
||||||
// units. I'm not sure exactly what the semantics are so I'm not
|
|
||||||
// sure if you can just add "px" or not. Even if that did work,
|
|
||||||
// you still couldn't substitute values containing "var()" since
|
|
||||||
// units would still not be substituted in that case.
|
|
||||||
}
|
|
||||||
|
|
||||||
case "translate3d":
|
|
||||||
// specifies a 3D translation by the vector [tx,ty,tz], with tx,
|
|
||||||
// ty and tz being the first, second and third translation-value
|
|
||||||
// parameters respectively.
|
|
||||||
if n == 5 {
|
|
||||||
tx, ty, tz := &args[0], &args[2], &args[4]
|
|
||||||
tx.TurnLengthOrPercentageIntoNumberIfZero()
|
|
||||||
ty.TurnLengthOrPercentageIntoNumberIfZero()
|
|
||||||
tz.TurnLengthIntoNumberIfZero()
|
|
||||||
if ty.IsZero() && tz.IsZero() {
|
|
||||||
// "translate3d(tx, 0, 0)" => "translate(tx)"
|
|
||||||
token.Text = "translate"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if tx.IsZero() && tz.IsZero() {
|
|
||||||
// "translate3d(0, ty, 0)" => "translateY(ty)"
|
|
||||||
token.Text = "translateY"
|
|
||||||
*token.Children = args[2:3]
|
|
||||||
} else if tx.IsZero() && ty.IsZero() {
|
|
||||||
// "translate3d(0, 0, tz)" => "translateZ(tz)"
|
|
||||||
token.Text = "translateZ"
|
|
||||||
*token.Children = args[4:]
|
|
||||||
} else if tz.IsZero() {
|
|
||||||
// "translate3d(tx, ty, 0)" => "translate(tx, ty)"
|
|
||||||
token.Text = "translate"
|
|
||||||
*token.Children = args[:3]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "translatez":
|
|
||||||
// specifies a 3D translation by the vector [0,0,tz] with the given
|
|
||||||
// amount in the Z direction.
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "scale3d":
|
|
||||||
// specifies a 3D scale operation by the [sx,sy,sz] scaling vector
|
|
||||||
// described by the 3 parameters.
|
|
||||||
if n == 5 {
|
|
||||||
sx, sy, sz := &args[0], &args[2], &args[4]
|
|
||||||
turnPercentIntoNumberIfShorter(sx)
|
|
||||||
turnPercentIntoNumberIfShorter(sy)
|
|
||||||
turnPercentIntoNumberIfShorter(sz)
|
|
||||||
if sx.EqualIgnoringWhitespace(*sy) && sz.IsOne() {
|
|
||||||
// "scale3d(s, s, 1)" => "scale(s)"
|
|
||||||
token.Text = "scale"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if sy.IsOne() && sz.IsOne() {
|
|
||||||
// "scale3d(sx, 1, 1)" => "scaleX(sx)"
|
|
||||||
token.Text = "scaleX"
|
|
||||||
*token.Children = args[:1]
|
|
||||||
} else if sx.IsOne() && sz.IsOne() {
|
|
||||||
// "scale3d(1, sy, 1)" => "scaleY(sy)"
|
|
||||||
token.Text = "scaleY"
|
|
||||||
*token.Children = args[2:3]
|
|
||||||
} else if sx.IsOne() && sy.IsOne() {
|
|
||||||
// "scale3d(1, 1, sz)" => "scaleZ(sz)"
|
|
||||||
token.Text = "scaleZ"
|
|
||||||
*token.Children = args[4:]
|
|
||||||
} else if sz.IsOne() {
|
|
||||||
// "scale3d(sx, sy, 1)" => "scale(sx, sy)"
|
|
||||||
token.Text = "scale"
|
|
||||||
*token.Children = args[:3]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "scalez":
|
|
||||||
// specifies a 3D scale operation using the [1,1,sz] scaling vector,
|
|
||||||
// where sz is given as the parameter.
|
|
||||||
if n == 1 {
|
|
||||||
turnPercentIntoNumberIfShorter(&args[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
case "rotate3d":
|
|
||||||
// specifies a 3D rotation by the angle specified in last parameter
|
|
||||||
// about the [x,y,z] direction vector described by the first three
|
|
||||||
// parameters. A direction vector that cannot be normalized, such as
|
|
||||||
// [0,0,0], will cause the rotation to not be applied.
|
|
||||||
if n == 7 {
|
|
||||||
x, y, z, angle := &args[0], &args[2], &args[4], &args[6]
|
|
||||||
angle.TurnLengthIntoNumberIfZero()
|
|
||||||
if x.IsOne() && y.IsZero() && z.IsZero() {
|
|
||||||
// "rotate3d(1, 0, 0, angle)" => "rotateX(angle)"
|
|
||||||
token.Text = "rotateX"
|
|
||||||
*token.Children = args[6:]
|
|
||||||
} else if x.IsZero() && y.IsOne() && z.IsZero() {
|
|
||||||
// "rotate3d(0, 1, 0, angle)" => "rotateY(angle)"
|
|
||||||
token.Text = "rotateY"
|
|
||||||
*token.Children = args[6:]
|
|
||||||
} else if x.IsZero() && y.IsZero() && z.IsOne() {
|
|
||||||
// "rotate3d(0, 0, 1, angle)" => "rotate(angle)"
|
|
||||||
token.Text = "rotate"
|
|
||||||
*token.Children = args[6:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "rotatex":
|
|
||||||
// same as rotate3d(1, 0, 0, <angle>).
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "rotatey":
|
|
||||||
// same as rotate3d(0, 1, 0, <angle>).
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "rotatez":
|
|
||||||
// same as rotate3d(0, 0, 1, <angle>), which is a 3d transform
|
|
||||||
// equivalent to the 2d transform rotate(<angle>).
|
|
||||||
if n == 1 {
|
|
||||||
// "rotateZ(angle)" => "rotate(angle)"
|
|
||||||
token.Text = "rotate"
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
|
|
||||||
case "perspective":
|
|
||||||
// specifies a perspective projection matrix. This matrix scales
|
|
||||||
// points in X and Y based on their Z value, scaling points with
|
|
||||||
// positive Z values away from the origin, and those with negative Z
|
|
||||||
// values towards the origin. Points on the z=0 plane are unchanged.
|
|
||||||
// The parameter represents the distance of the z=0 plane from the
|
|
||||||
// viewer.
|
|
||||||
if n == 1 {
|
|
||||||
args[0].TurnLengthIntoNumberIfZero()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Trim whitespace at the ends
|
|
||||||
if args := *token.Children; len(args) > 0 {
|
|
||||||
args[0].Whitespace &= ^css_ast.WhitespaceBefore
|
|
||||||
args[len(args)-1].Whitespace &= ^css_ast.WhitespaceAfter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return tokens
|
|
||||||
}
|
|
1361
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go
generated
vendored
1361
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go
generated
vendored
File diff suppressed because it is too large
Load Diff
342
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser_selector.go
generated
vendored
342
vendor/github.com/evanw/esbuild/internal/css_parser/css_parser_selector.go
generated
vendored
@ -1,342 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *parser) parseSelectorList() (list []css_ast.ComplexSelector, ok bool) {
|
|
||||||
// Parse the first selector
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
sel, good := p.parseComplexSelector()
|
|
||||||
if !good {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
list = append(list, sel)
|
|
||||||
|
|
||||||
// Parse the remaining selectors
|
|
||||||
for {
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
if !p.eat(css_lexer.TComma) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
sel, good := p.parseComplexSelector()
|
|
||||||
if !good {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
list = append(list, sel)
|
|
||||||
}
|
|
||||||
|
|
||||||
ok = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseComplexSelector() (result css_ast.ComplexSelector, ok bool) {
|
|
||||||
// Parent
|
|
||||||
sel, good := p.parseCompoundSelector()
|
|
||||||
if !good {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
result.Selectors = append(result.Selectors, sel)
|
|
||||||
|
|
||||||
for {
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
if p.peek(css_lexer.TEndOfFile) || p.peek(css_lexer.TComma) || p.peek(css_lexer.TOpenBrace) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Optional combinator
|
|
||||||
combinator := p.parseCombinator()
|
|
||||||
if combinator != "" {
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Child
|
|
||||||
sel, good := p.parseCompoundSelector()
|
|
||||||
if !good {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
sel.Combinator = combinator
|
|
||||||
result.Selectors = append(result.Selectors, sel)
|
|
||||||
}
|
|
||||||
|
|
||||||
ok = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) nameToken() css_ast.NameToken {
|
|
||||||
return css_ast.NameToken{
|
|
||||||
Kind: p.current().Kind,
|
|
||||||
Text: p.decoded(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseCompoundSelector() (sel css_ast.CompoundSelector, ok bool) {
|
|
||||||
// This is an extension: https://drafts.csswg.org/css-nesting-1/
|
|
||||||
if p.eat(css_lexer.TDelimAmpersand) {
|
|
||||||
sel.HasNestPrefix = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the type selector
|
|
||||||
switch p.current().Kind {
|
|
||||||
case css_lexer.TDelimBar, css_lexer.TIdent, css_lexer.TDelimAsterisk:
|
|
||||||
nsName := css_ast.NamespacedName{}
|
|
||||||
if !p.peek(css_lexer.TDelimBar) {
|
|
||||||
nsName.Name = p.nameToken()
|
|
||||||
p.advance()
|
|
||||||
} else {
|
|
||||||
// Hack: Create an empty "identifier" to represent this
|
|
||||||
nsName.Name.Kind = css_lexer.TIdent
|
|
||||||
}
|
|
||||||
if p.eat(css_lexer.TDelimBar) {
|
|
||||||
if !p.peek(css_lexer.TIdent) && !p.peek(css_lexer.TDelimAsterisk) {
|
|
||||||
p.expect(css_lexer.TIdent)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
prefix := nsName.Name
|
|
||||||
nsName.NamespacePrefix = &prefix
|
|
||||||
nsName.Name = p.nameToken()
|
|
||||||
p.advance()
|
|
||||||
}
|
|
||||||
sel.TypeSelector = &nsName
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the subclass selectors
|
|
||||||
subclassSelectors:
|
|
||||||
for {
|
|
||||||
switch p.current().Kind {
|
|
||||||
case css_lexer.THash:
|
|
||||||
if !p.current().IsID {
|
|
||||||
break subclassSelectors
|
|
||||||
}
|
|
||||||
name := p.decoded()
|
|
||||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSHash{Name: name})
|
|
||||||
p.advance()
|
|
||||||
|
|
||||||
case css_lexer.TDelimDot:
|
|
||||||
p.advance()
|
|
||||||
name := p.decoded()
|
|
||||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSClass{Name: name})
|
|
||||||
p.expect(css_lexer.TIdent)
|
|
||||||
|
|
||||||
case css_lexer.TOpenBracket:
|
|
||||||
p.advance()
|
|
||||||
attr, good := p.parseAttributeSelector()
|
|
||||||
if !good {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &attr)
|
|
||||||
|
|
||||||
case css_lexer.TColon:
|
|
||||||
if p.next().Kind == css_lexer.TColon {
|
|
||||||
// Special-case the start of the pseudo-element selector section
|
|
||||||
for p.current().Kind == css_lexer.TColon {
|
|
||||||
isElement := p.next().Kind == css_lexer.TColon
|
|
||||||
if isElement {
|
|
||||||
p.advance()
|
|
||||||
}
|
|
||||||
pseudo := p.parsePseudoClassSelector()
|
|
||||||
|
|
||||||
// https://www.w3.org/TR/selectors-4/#single-colon-pseudos
|
|
||||||
// The four Level 2 pseudo-elements (::before, ::after, ::first-line,
|
|
||||||
// and ::first-letter) may, for legacy reasons, be represented using
|
|
||||||
// the <pseudo-class-selector> grammar, with only a single ":"
|
|
||||||
// character at their start.
|
|
||||||
if p.options.MangleSyntax && isElement && len(pseudo.Args) == 0 {
|
|
||||||
switch pseudo.Name {
|
|
||||||
case "before", "after", "first-line", "first-letter":
|
|
||||||
isElement = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pseudo.IsElement = isElement
|
|
||||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo)
|
|
||||||
}
|
|
||||||
break subclassSelectors
|
|
||||||
}
|
|
||||||
pseudo := p.parsePseudoClassSelector()
|
|
||||||
sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo)
|
|
||||||
|
|
||||||
default:
|
|
||||||
break subclassSelectors
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The compound selector must be non-empty
|
|
||||||
if !sel.HasNestPrefix && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0 {
|
|
||||||
p.unexpected()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ok = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseAttributeSelector() (attr css_ast.SSAttribute, ok bool) {
|
|
||||||
// Parse the namespaced name
|
|
||||||
switch p.current().Kind {
|
|
||||||
case css_lexer.TDelimBar, css_lexer.TDelimAsterisk:
|
|
||||||
// "[|x]"
|
|
||||||
// "[*|x]"
|
|
||||||
if p.peek(css_lexer.TDelimAsterisk) {
|
|
||||||
prefix := p.nameToken()
|
|
||||||
p.advance()
|
|
||||||
attr.NamespacedName.NamespacePrefix = &prefix
|
|
||||||
} else {
|
|
||||||
// "[|attr]" is equivalent to "[attr]". From the specification:
|
|
||||||
// "In keeping with the Namespaces in the XML recommendation, default
|
|
||||||
// namespaces do not apply to attributes, therefore attribute selectors
|
|
||||||
// without a namespace component apply only to attributes that have no
|
|
||||||
// namespace (equivalent to |attr)."
|
|
||||||
}
|
|
||||||
if !p.expect(css_lexer.TDelimBar) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
attr.NamespacedName.Name = p.nameToken()
|
|
||||||
if !p.expect(css_lexer.TIdent) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
// "[x]"
|
|
||||||
// "[x|y]"
|
|
||||||
attr.NamespacedName.Name = p.nameToken()
|
|
||||||
if !p.expect(css_lexer.TIdent) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if p.next().Kind != css_lexer.TDelimEquals && p.eat(css_lexer.TDelimBar) {
|
|
||||||
prefix := attr.NamespacedName.Name
|
|
||||||
attr.NamespacedName.NamespacePrefix = &prefix
|
|
||||||
attr.NamespacedName.Name = p.nameToken()
|
|
||||||
if !p.expect(css_lexer.TIdent) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the optional matcher operator
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
if p.eat(css_lexer.TDelimEquals) {
|
|
||||||
attr.MatcherOp = "="
|
|
||||||
} else {
|
|
||||||
switch p.current().Kind {
|
|
||||||
case css_lexer.TDelimTilde:
|
|
||||||
attr.MatcherOp = "~="
|
|
||||||
case css_lexer.TDelimBar:
|
|
||||||
attr.MatcherOp = "|="
|
|
||||||
case css_lexer.TDelimCaret:
|
|
||||||
attr.MatcherOp = "^="
|
|
||||||
case css_lexer.TDelimDollar:
|
|
||||||
attr.MatcherOp = "$="
|
|
||||||
case css_lexer.TDelimAsterisk:
|
|
||||||
attr.MatcherOp = "*="
|
|
||||||
}
|
|
||||||
if attr.MatcherOp != "" {
|
|
||||||
p.advance()
|
|
||||||
p.expect(css_lexer.TDelimEquals)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the optional matcher value
|
|
||||||
if attr.MatcherOp != "" {
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
if !p.peek(css_lexer.TString) && !p.peek(css_lexer.TIdent) {
|
|
||||||
p.unexpected()
|
|
||||||
}
|
|
||||||
attr.MatcherValue = p.decoded()
|
|
||||||
p.advance()
|
|
||||||
p.eat(css_lexer.TWhitespace)
|
|
||||||
if p.peek(css_lexer.TIdent) {
|
|
||||||
if modifier := p.decoded(); len(modifier) == 1 {
|
|
||||||
if c := modifier[0]; c == 'i' || c == 'I' || c == 's' || c == 'S' {
|
|
||||||
attr.MatcherModifier = c
|
|
||||||
p.advance()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p.expect(css_lexer.TCloseBracket)
|
|
||||||
ok = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parsePseudoClassSelector() css_ast.SSPseudoClass {
|
|
||||||
p.advance()
|
|
||||||
|
|
||||||
if p.peek(css_lexer.TFunction) {
|
|
||||||
text := p.decoded()
|
|
||||||
p.advance()
|
|
||||||
args := p.convertTokens(p.parseAnyValue())
|
|
||||||
p.expect(css_lexer.TCloseParen)
|
|
||||||
return css_ast.SSPseudoClass{Name: text, Args: args}
|
|
||||||
}
|
|
||||||
|
|
||||||
name := p.decoded()
|
|
||||||
sel := css_ast.SSPseudoClass{}
|
|
||||||
if p.expect(css_lexer.TIdent) {
|
|
||||||
sel.Name = name
|
|
||||||
}
|
|
||||||
return sel
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseAnyValue() []css_lexer.Token {
|
|
||||||
// Reference: https://drafts.csswg.org/css-syntax-3/#typedef-declaration-value
|
|
||||||
|
|
||||||
p.stack = p.stack[:0] // Reuse allocated memory
|
|
||||||
start := p.index
|
|
||||||
|
|
||||||
loop:
|
|
||||||
for {
|
|
||||||
switch p.current().Kind {
|
|
||||||
case css_lexer.TCloseParen, css_lexer.TCloseBracket, css_lexer.TCloseBrace:
|
|
||||||
last := len(p.stack) - 1
|
|
||||||
if last < 0 || !p.peek(p.stack[last]) {
|
|
||||||
break loop
|
|
||||||
}
|
|
||||||
p.stack = p.stack[:last]
|
|
||||||
|
|
||||||
case css_lexer.TSemicolon, css_lexer.TDelimExclamation:
|
|
||||||
if len(p.stack) == 0 {
|
|
||||||
break loop
|
|
||||||
}
|
|
||||||
|
|
||||||
case css_lexer.TOpenParen, css_lexer.TFunction:
|
|
||||||
p.stack = append(p.stack, css_lexer.TCloseParen)
|
|
||||||
|
|
||||||
case css_lexer.TOpenBracket:
|
|
||||||
p.stack = append(p.stack, css_lexer.TCloseBracket)
|
|
||||||
|
|
||||||
case css_lexer.TOpenBrace:
|
|
||||||
p.stack = append(p.stack, css_lexer.TCloseBrace)
|
|
||||||
}
|
|
||||||
|
|
||||||
p.advance()
|
|
||||||
}
|
|
||||||
|
|
||||||
tokens := p.tokens[start:p.index]
|
|
||||||
if len(tokens) == 0 {
|
|
||||||
p.unexpected()
|
|
||||||
}
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseCombinator() string {
|
|
||||||
switch p.current().Kind {
|
|
||||||
case css_lexer.TDelimGreaterThan:
|
|
||||||
p.advance()
|
|
||||||
return ">"
|
|
||||||
|
|
||||||
case css_lexer.TDelimPlus:
|
|
||||||
p.advance()
|
|
||||||
return "+"
|
|
||||||
|
|
||||||
case css_lexer.TDelimTilde:
|
|
||||||
p.advance()
|
|
||||||
return "~"
|
|
||||||
|
|
||||||
default:
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
575
vendor/github.com/evanw/esbuild/internal/css_parser/css_reduce_calc.go
generated
vendored
575
vendor/github.com/evanw/esbuild/internal/css_parser/css_reduce_calc.go
generated
vendored
@ -1,575 +0,0 @@
|
|||||||
package css_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"math"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *parser) tryToReduceCalcExpression(token css_ast.Token) css_ast.Token {
|
|
||||||
if term := tryToParseCalcTerm(*token.Children); term != nil {
|
|
||||||
whitespace := css_ast.WhitespaceBefore | css_ast.WhitespaceAfter
|
|
||||||
if p.options.RemoveWhitespace {
|
|
||||||
whitespace = 0
|
|
||||||
}
|
|
||||||
term = term.partiallySimplify()
|
|
||||||
if result, ok := term.convertToToken(whitespace); ok {
|
|
||||||
if result.Kind == css_lexer.TOpenParen {
|
|
||||||
result.Kind = css_lexer.TFunction
|
|
||||||
result.Text = "calc"
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// See: https://www.w3.org/TR/css-values-4/#calc-internal
|
|
||||||
type calcTerm interface {
|
|
||||||
convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool)
|
|
||||||
partiallySimplify() calcTerm
|
|
||||||
}
|
|
||||||
|
|
||||||
type calcSum struct {
|
|
||||||
terms []calcTerm
|
|
||||||
}
|
|
||||||
|
|
||||||
type calcProduct struct {
|
|
||||||
terms []calcTerm
|
|
||||||
}
|
|
||||||
|
|
||||||
type calcNegate struct {
|
|
||||||
term calcTerm
|
|
||||||
}
|
|
||||||
|
|
||||||
type calcInvert struct {
|
|
||||||
term calcTerm
|
|
||||||
}
|
|
||||||
|
|
||||||
type calcNumeric struct {
|
|
||||||
number float64
|
|
||||||
unit string
|
|
||||||
}
|
|
||||||
|
|
||||||
type calcValue struct {
|
|
||||||
token css_ast.Token
|
|
||||||
isInvalidPlusOrMinus bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func floatToStringForCalc(a float64) (string, bool) {
|
|
||||||
// Handle non-finite cases
|
|
||||||
if math.IsNaN(a) || math.IsInf(a, 0) {
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print the number as a string
|
|
||||||
text := fmt.Sprintf("%.05f", a)
|
|
||||||
for text[len(text)-1] == '0' {
|
|
||||||
text = text[:len(text)-1]
|
|
||||||
}
|
|
||||||
if text[len(text)-1] == '.' {
|
|
||||||
text = text[:len(text)-1]
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(text, "0.") {
|
|
||||||
text = text[1:]
|
|
||||||
} else if strings.HasPrefix(text, "-0.") {
|
|
||||||
text = "-" + text[2:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Bail if the number is not exactly represented
|
|
||||||
if number, err := strconv.ParseFloat(text, 64); err != nil || number != a {
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
return text, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcSum) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
|
||||||
tokens := make([]css_ast.Token, 0, len(c.terms)*2)
|
|
||||||
|
|
||||||
// ALGORITHM DEVIATION: Avoid parenthesizing product nodes inside sum nodes
|
|
||||||
if product, ok := c.terms[0].(*calcProduct); ok {
|
|
||||||
token, ok := product.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, *token.Children...)
|
|
||||||
} else {
|
|
||||||
token, ok := c.terms[0].convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, token)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, term := range c.terms[1:] {
|
|
||||||
// If child is a Negate node, append " - " to s, then serialize the Negate’s child and append the result to s.
|
|
||||||
if negate, ok := term.(*calcNegate); ok {
|
|
||||||
token, ok := negate.term.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, css_ast.Token{
|
|
||||||
Kind: css_lexer.TDelimMinus,
|
|
||||||
Text: "-",
|
|
||||||
Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter,
|
|
||||||
}, token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// If child is a negative numeric value, append " - " to s, then serialize the negation of child as normal and append the result to s.
|
|
||||||
if numeric, ok := term.(*calcNumeric); ok && numeric.number < 0 {
|
|
||||||
clone := *numeric
|
|
||||||
clone.number = -clone.number
|
|
||||||
token, ok := clone.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, css_ast.Token{
|
|
||||||
Kind: css_lexer.TDelimMinus,
|
|
||||||
Text: "-",
|
|
||||||
Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter,
|
|
||||||
}, token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, append " + " to s, then serialize child and append the result to s.
|
|
||||||
tokens = append(tokens, css_ast.Token{
|
|
||||||
Kind: css_lexer.TDelimPlus,
|
|
||||||
Text: "+",
|
|
||||||
Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter,
|
|
||||||
})
|
|
||||||
|
|
||||||
// ALGORITHM DEVIATION: Avoid parenthesizing product nodes inside sum nodes
|
|
||||||
if product, ok := term.(*calcProduct); ok {
|
|
||||||
token, ok := product.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, *token.Children...)
|
|
||||||
} else {
|
|
||||||
token, ok := term.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, token)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return css_ast.Token{
|
|
||||||
Kind: css_lexer.TOpenParen,
|
|
||||||
Text: "(",
|
|
||||||
Children: &tokens,
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcProduct) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
|
||||||
tokens := make([]css_ast.Token, 0, len(c.terms)*2)
|
|
||||||
token, ok := c.terms[0].convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, token)
|
|
||||||
|
|
||||||
for _, term := range c.terms[1:] {
|
|
||||||
// If child is an Invert node, append " / " to s, then serialize the Invert’s child and append the result to s.
|
|
||||||
if invert, ok := term.(*calcInvert); ok {
|
|
||||||
token, ok := invert.term.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, css_ast.Token{
|
|
||||||
Kind: css_lexer.TDelimSlash,
|
|
||||||
Text: "/",
|
|
||||||
Whitespace: whitespace,
|
|
||||||
}, token)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, append " * " to s, then serialize child and append the result to s.
|
|
||||||
token, ok := term.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
tokens = append(tokens, css_ast.Token{
|
|
||||||
Kind: css_lexer.TDelimAsterisk,
|
|
||||||
Text: "*",
|
|
||||||
Whitespace: whitespace,
|
|
||||||
}, token)
|
|
||||||
}
|
|
||||||
|
|
||||||
return css_ast.Token{
|
|
||||||
Kind: css_lexer.TOpenParen,
|
|
||||||
Text: "(",
|
|
||||||
Children: &tokens,
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcNegate) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
|
||||||
token, ok := c.term.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
return css_ast.Token{
|
|
||||||
Kind: css_lexer.TOpenParen,
|
|
||||||
Text: "(",
|
|
||||||
Children: &[]css_ast.Token{
|
|
||||||
{Kind: css_lexer.TNumber, Text: "-1"},
|
|
||||||
{Kind: css_lexer.TDelimSlash, Text: "*", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter},
|
|
||||||
token,
|
|
||||||
},
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcInvert) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-serialize
|
|
||||||
token, ok := c.term.convertToToken(whitespace)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
return css_ast.Token{
|
|
||||||
Kind: css_lexer.TOpenParen,
|
|
||||||
Text: "(",
|
|
||||||
Children: &[]css_ast.Token{
|
|
||||||
{Kind: css_lexer.TNumber, Text: "1"},
|
|
||||||
{Kind: css_lexer.TDelimSlash, Text: "/", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter},
|
|
||||||
token,
|
|
||||||
},
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcNumeric) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
|
||||||
text, ok := floatToStringForCalc(c.number)
|
|
||||||
if !ok {
|
|
||||||
return css_ast.Token{}, false
|
|
||||||
}
|
|
||||||
if c.unit == "" {
|
|
||||||
return css_ast.Token{
|
|
||||||
Kind: css_lexer.TNumber,
|
|
||||||
Text: text,
|
|
||||||
}, true
|
|
||||||
} else if c.unit == "%" {
|
|
||||||
return css_ast.Token{
|
|
||||||
Kind: css_lexer.TPercentage,
|
|
||||||
Text: text + "%",
|
|
||||||
}, true
|
|
||||||
} else {
|
|
||||||
return css_ast.Token{
|
|
||||||
Kind: css_lexer.TDimension,
|
|
||||||
Text: text + c.unit,
|
|
||||||
UnitOffset: uint16(len(text)),
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcValue) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) {
|
|
||||||
t := c.token
|
|
||||||
t.Whitespace = 0
|
|
||||||
return t, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcSum) partiallySimplify() calcTerm {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
|
||||||
|
|
||||||
// For each of root’s children that are Sum nodes, replace them with their children.
|
|
||||||
terms := make([]calcTerm, 0, len(c.terms))
|
|
||||||
for _, term := range c.terms {
|
|
||||||
term = term.partiallySimplify()
|
|
||||||
if sum, ok := term.(*calcSum); ok {
|
|
||||||
terms = append(terms, sum.terms...)
|
|
||||||
} else {
|
|
||||||
terms = append(terms, term)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For each set of root’s children that are numeric values with identical units, remove
|
|
||||||
// those children and replace them with a single numeric value containing the sum of the
|
|
||||||
// removed nodes, and with the same unit. (E.g. combine numbers, combine percentages,
|
|
||||||
// combine px values, etc.)
|
|
||||||
for i := 0; i < len(terms); i++ {
|
|
||||||
term := terms[i]
|
|
||||||
if numeric, ok := term.(*calcNumeric); ok {
|
|
||||||
end := i + 1
|
|
||||||
for j := end; j < len(terms); j++ {
|
|
||||||
term2 := terms[j]
|
|
||||||
if numeric2, ok := term2.(*calcNumeric); ok && numeric2.unit == numeric.unit {
|
|
||||||
numeric.number += numeric2.number
|
|
||||||
} else {
|
|
||||||
terms[end] = term2
|
|
||||||
end++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
terms = terms[:end]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If root has only a single child at this point, return the child.
|
|
||||||
if len(terms) == 1 {
|
|
||||||
return terms[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, return root.
|
|
||||||
c.terms = terms
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcProduct) partiallySimplify() calcTerm {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
|
||||||
|
|
||||||
// For each of root’s children that are Product nodes, replace them with their children.
|
|
||||||
terms := make([]calcTerm, 0, len(c.terms))
|
|
||||||
for _, term := range c.terms {
|
|
||||||
term = term.partiallySimplify()
|
|
||||||
if product, ok := term.(*calcProduct); ok {
|
|
||||||
terms = append(terms, product.terms...)
|
|
||||||
} else {
|
|
||||||
terms = append(terms, term)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If root has multiple children that are numbers (not percentages or dimensions), remove
|
|
||||||
// them and replace them with a single number containing the product of the removed nodes.
|
|
||||||
for i, term := range terms {
|
|
||||||
if numeric, ok := term.(*calcNumeric); ok && numeric.unit == "" {
|
|
||||||
end := i + 1
|
|
||||||
for j := end; j < len(terms); j++ {
|
|
||||||
term2 := terms[j]
|
|
||||||
if numeric2, ok := term2.(*calcNumeric); ok && numeric2.unit == "" {
|
|
||||||
numeric.number *= numeric2.number
|
|
||||||
} else {
|
|
||||||
terms[end] = term2
|
|
||||||
end++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
terms = terms[:end]
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If root contains only numeric values and/or Invert nodes containing numeric values,
|
|
||||||
// and multiplying the types of all the children (noting that the type of an Invert
|
|
||||||
// node is the inverse of its child’s type) results in a type that matches any of the
|
|
||||||
// types that a math function can resolve to, return the result of multiplying all the
|
|
||||||
// values of the children (noting that the value of an Invert node is the reciprocal
|
|
||||||
// of its child’s value), expressed in the result’s canonical unit.
|
|
||||||
if len(terms) == 2 {
|
|
||||||
// Right now, only handle the case of two numbers, one of which has no unit
|
|
||||||
if first, ok := terms[0].(*calcNumeric); ok {
|
|
||||||
if second, ok := terms[1].(*calcNumeric); ok {
|
|
||||||
if first.unit == "" {
|
|
||||||
second.number *= first.number
|
|
||||||
return second
|
|
||||||
}
|
|
||||||
if second.unit == "" {
|
|
||||||
first.number *= second.number
|
|
||||||
return first
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ALGORITHM DEVIATION: Divide instead of multiply if the reciprocal is shorter
|
|
||||||
for i := 1; i < len(terms); i++ {
|
|
||||||
if numeric, ok := terms[i].(*calcNumeric); ok {
|
|
||||||
reciprocal := 1 / numeric.number
|
|
||||||
if multiply, ok := floatToStringForCalc(numeric.number); ok {
|
|
||||||
if divide, ok := floatToStringForCalc(reciprocal); ok && len(divide) < len(multiply) {
|
|
||||||
numeric.number = reciprocal
|
|
||||||
terms[i] = &calcInvert{term: numeric}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If root has only a single child at this point, return the child.
|
|
||||||
if len(terms) == 1 {
|
|
||||||
return terms[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, return root.
|
|
||||||
c.terms = terms
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcNegate) partiallySimplify() calcTerm {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
|
||||||
|
|
||||||
c.term = c.term.partiallySimplify()
|
|
||||||
|
|
||||||
// If root’s child is a numeric value, return an equivalent numeric value, but with the value negated (0 - value).
|
|
||||||
if numeric, ok := c.term.(*calcNumeric); ok {
|
|
||||||
numeric.number = -numeric.number
|
|
||||||
return numeric
|
|
||||||
}
|
|
||||||
|
|
||||||
// If root’s child is a Negate node, return the child’s child.
|
|
||||||
if negate, ok := c.term.(*calcNegate); ok {
|
|
||||||
return negate.term
|
|
||||||
}
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcInvert) partiallySimplify() calcTerm {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-simplification
|
|
||||||
|
|
||||||
c.term = c.term.partiallySimplify()
|
|
||||||
|
|
||||||
// If root’s child is a number (not a percentage or dimension) return the reciprocal of the child’s value.
|
|
||||||
if numeric, ok := c.term.(*calcNumeric); ok && numeric.unit == "" {
|
|
||||||
numeric.number = 1 / numeric.number
|
|
||||||
return numeric
|
|
||||||
}
|
|
||||||
|
|
||||||
// If root’s child is an Invert node, return the child’s child.
|
|
||||||
if invert, ok := c.term.(*calcInvert); ok {
|
|
||||||
return invert.term
|
|
||||||
}
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcNumeric) partiallySimplify() calcTerm {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *calcValue) partiallySimplify() calcTerm {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func tryToParseCalcTerm(tokens []css_ast.Token) calcTerm {
|
|
||||||
// Specification: https://www.w3.org/TR/css-values-4/#calc-internal
|
|
||||||
terms := make([]calcTerm, len(tokens))
|
|
||||||
|
|
||||||
for i, token := range tokens {
|
|
||||||
var term calcTerm
|
|
||||||
if token.Kind == css_lexer.TFunction && token.Text == "var" {
|
|
||||||
// Using "var()" should bail because it can expand to any number of tokens
|
|
||||||
return nil
|
|
||||||
} else if token.Kind == css_lexer.TOpenParen || (token.Kind == css_lexer.TFunction && token.Text == "calc") {
|
|
||||||
term = tryToParseCalcTerm(*token.Children)
|
|
||||||
if term == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
} else if token.Kind == css_lexer.TNumber {
|
|
||||||
if number, err := strconv.ParseFloat(token.Text, 64); err == nil {
|
|
||||||
term = &calcNumeric{number: number}
|
|
||||||
} else {
|
|
||||||
term = &calcValue{token: token}
|
|
||||||
}
|
|
||||||
} else if token.Kind == css_lexer.TPercentage {
|
|
||||||
if number, err := strconv.ParseFloat(token.PercentageValue(), 64); err == nil {
|
|
||||||
term = &calcNumeric{number: number, unit: "%"}
|
|
||||||
} else {
|
|
||||||
term = &calcValue{token: token}
|
|
||||||
}
|
|
||||||
} else if token.Kind == css_lexer.TDimension {
|
|
||||||
if number, err := strconv.ParseFloat(token.DimensionValue(), 64); err == nil {
|
|
||||||
term = &calcNumeric{number: number, unit: token.DimensionUnit()}
|
|
||||||
} else {
|
|
||||||
term = &calcValue{token: token}
|
|
||||||
}
|
|
||||||
} else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "Infinity") {
|
|
||||||
term = &calcNumeric{number: math.Inf(1)}
|
|
||||||
} else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "-Infinity") {
|
|
||||||
term = &calcNumeric{number: math.Inf(-1)}
|
|
||||||
} else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "NaN") {
|
|
||||||
term = &calcNumeric{number: math.NaN()}
|
|
||||||
} else {
|
|
||||||
term = &calcValue{
|
|
||||||
token: token,
|
|
||||||
|
|
||||||
// From the specification: "In addition, whitespace is required on both sides of the
|
|
||||||
// + and - operators. (The * and / operators can be used without white space around them.)"
|
|
||||||
isInvalidPlusOrMinus: i > 0 && i+1 < len(tokens) &&
|
|
||||||
(token.Kind == css_lexer.TDelimPlus || token.Kind == css_lexer.TDelimMinus) &&
|
|
||||||
(((token.Whitespace&css_ast.WhitespaceBefore) == 0 && (tokens[i-1].Whitespace&css_ast.WhitespaceAfter) == 0) ||
|
|
||||||
(token.Whitespace&css_ast.WhitespaceAfter) == 0 && (tokens[i+1].Whitespace&css_ast.WhitespaceBefore) == 0),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
terms[i] = term
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collect children into Product and Invert nodes
|
|
||||||
first := 1
|
|
||||||
for first+1 < len(terms) {
|
|
||||||
// If this is a "*" or "/" operator
|
|
||||||
if value, ok := terms[first].(*calcValue); ok && (value.token.Kind == css_lexer.TDelimAsterisk || value.token.Kind == css_lexer.TDelimSlash) {
|
|
||||||
// Scan over the run
|
|
||||||
last := first
|
|
||||||
for last+3 < len(terms) {
|
|
||||||
if value, ok := terms[last+2].(*calcValue); ok && (value.token.Kind == css_lexer.TDelimAsterisk || value.token.Kind == css_lexer.TDelimSlash) {
|
|
||||||
last += 2
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate a node for the run
|
|
||||||
product := calcProduct{terms: make([]calcTerm, (last-first)/2+2)}
|
|
||||||
for i := range product.terms {
|
|
||||||
term := terms[first+i*2-1]
|
|
||||||
if i > 0 && terms[first+i*2-2].(*calcValue).token.Kind == css_lexer.TDelimSlash {
|
|
||||||
term = &calcInvert{term: term}
|
|
||||||
}
|
|
||||||
product.terms[i] = term
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace the run with a single node
|
|
||||||
terms[first-1] = &product
|
|
||||||
terms = append(terms[:first], terms[last+2:]...)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
first++
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collect children into Sum and Negate nodes
|
|
||||||
first = 1
|
|
||||||
for first+1 < len(terms) {
|
|
||||||
// If this is a "+" or "-" operator
|
|
||||||
if value, ok := terms[first].(*calcValue); ok && !value.isInvalidPlusOrMinus &&
|
|
||||||
(value.token.Kind == css_lexer.TDelimPlus || value.token.Kind == css_lexer.TDelimMinus) {
|
|
||||||
// Scan over the run
|
|
||||||
last := first
|
|
||||||
for last+3 < len(terms) {
|
|
||||||
if value, ok := terms[last+2].(*calcValue); ok && !value.isInvalidPlusOrMinus &&
|
|
||||||
(value.token.Kind == css_lexer.TDelimPlus || value.token.Kind == css_lexer.TDelimMinus) {
|
|
||||||
last += 2
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate a node for the run
|
|
||||||
sum := calcSum{terms: make([]calcTerm, (last-first)/2+2)}
|
|
||||||
for i := range sum.terms {
|
|
||||||
term := terms[first+i*2-1]
|
|
||||||
if i > 0 && terms[first+i*2-2].(*calcValue).token.Kind == css_lexer.TDelimMinus {
|
|
||||||
term = &calcNegate{term: term}
|
|
||||||
}
|
|
||||||
sum.terms[i] = term
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace the run with a single node
|
|
||||||
terms[first-1] = &sum
|
|
||||||
terms = append(terms[:first], terms[last+2:]...)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
first++
|
|
||||||
}
|
|
||||||
|
|
||||||
// This only succeeds if everything reduces to a single term
|
|
||||||
if len(terms) == 1 {
|
|
||||||
return terms[0]
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
725
vendor/github.com/evanw/esbuild/internal/css_printer/css_printer.go
generated
vendored
725
vendor/github.com/evanw/esbuild/internal/css_printer/css_printer.go
generated
vendored
@ -1,725 +0,0 @@
|
|||||||
package css_printer
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/config"
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/css_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/sourcemap"
|
|
||||||
)
|
|
||||||
|
|
||||||
const quoteForURL byte = 0
|
|
||||||
|
|
||||||
type printer struct {
|
|
||||||
options Options
|
|
||||||
importRecords []ast.ImportRecord
|
|
||||||
css []byte
|
|
||||||
extractedLegalComments map[string]bool
|
|
||||||
builder sourcemap.ChunkBuilder
|
|
||||||
}
|
|
||||||
|
|
||||||
type Options struct {
|
|
||||||
RemoveWhitespace bool
|
|
||||||
ASCIIOnly bool
|
|
||||||
AddSourceMappings bool
|
|
||||||
LegalComments config.LegalComments
|
|
||||||
|
|
||||||
// If we're writing out a source map, this table of line start indices lets
|
|
||||||
// us do binary search on to figure out what line a given AST node came from
|
|
||||||
LineOffsetTables []sourcemap.LineOffsetTable
|
|
||||||
|
|
||||||
// This will be present if the input file had a source map. In that case we
|
|
||||||
// want to map all the way back to the original input file(s).
|
|
||||||
InputSourceMap *sourcemap.SourceMap
|
|
||||||
}
|
|
||||||
|
|
||||||
type PrintResult struct {
|
|
||||||
CSS []byte
|
|
||||||
ExtractedLegalComments map[string]bool
|
|
||||||
SourceMapChunk sourcemap.Chunk
|
|
||||||
}
|
|
||||||
|
|
||||||
func Print(tree css_ast.AST, options Options) PrintResult {
|
|
||||||
p := printer{
|
|
||||||
options: options,
|
|
||||||
importRecords: tree.ImportRecords,
|
|
||||||
builder: sourcemap.MakeChunkBuilder(options.InputSourceMap, options.LineOffsetTables),
|
|
||||||
}
|
|
||||||
for _, rule := range tree.Rules {
|
|
||||||
p.printRule(rule, 0, false)
|
|
||||||
}
|
|
||||||
return PrintResult{
|
|
||||||
CSS: p.css,
|
|
||||||
ExtractedLegalComments: p.extractedLegalComments,
|
|
||||||
SourceMapChunk: p.builder.GenerateChunk(p.css),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printRule(rule css_ast.Rule, indent int32, omitTrailingSemicolon bool) {
|
|
||||||
if r, ok := rule.Data.(*css_ast.RComment); ok {
|
|
||||||
switch p.options.LegalComments {
|
|
||||||
case config.LegalCommentsNone:
|
|
||||||
return
|
|
||||||
|
|
||||||
case config.LegalCommentsEndOfFile,
|
|
||||||
config.LegalCommentsLinkedWithComment,
|
|
||||||
config.LegalCommentsExternalWithoutComment:
|
|
||||||
if p.extractedLegalComments == nil {
|
|
||||||
p.extractedLegalComments = make(map[string]bool)
|
|
||||||
}
|
|
||||||
p.extractedLegalComments[r.Text] = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.options.AddSourceMappings {
|
|
||||||
p.builder.AddSourceMapping(rule.Loc, p.css)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.printIndent(indent)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch r := rule.Data.(type) {
|
|
||||||
case *css_ast.RAtCharset:
|
|
||||||
// It's not valid to remove the space in between these two tokens
|
|
||||||
p.print("@charset ")
|
|
||||||
|
|
||||||
// It's not valid to print the string with single quotes
|
|
||||||
p.printQuotedWithQuote(r.Encoding, '"')
|
|
||||||
p.print(";")
|
|
||||||
|
|
||||||
case *css_ast.RAtImport:
|
|
||||||
if p.options.RemoveWhitespace {
|
|
||||||
p.print("@import")
|
|
||||||
} else {
|
|
||||||
p.print("@import ")
|
|
||||||
}
|
|
||||||
p.printQuoted(p.importRecords[r.ImportRecordIndex].Path.Text)
|
|
||||||
p.printTokens(r.ImportConditions, printTokensOpts{})
|
|
||||||
p.print(";")
|
|
||||||
|
|
||||||
case *css_ast.RAtKeyframes:
|
|
||||||
p.print("@")
|
|
||||||
p.printIdent(r.AtToken, identNormal, mayNeedWhitespaceAfter)
|
|
||||||
p.print(" ")
|
|
||||||
if r.Name == "" {
|
|
||||||
p.print("\"\"")
|
|
||||||
} else {
|
|
||||||
p.printIdent(r.Name, identNormal, canDiscardWhitespaceAfter)
|
|
||||||
}
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
if p.options.RemoveWhitespace {
|
|
||||||
p.print("{")
|
|
||||||
} else {
|
|
||||||
p.print("{\n")
|
|
||||||
}
|
|
||||||
indent++
|
|
||||||
for _, block := range r.Blocks {
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.printIndent(indent)
|
|
||||||
}
|
|
||||||
for i, sel := range block.Selectors {
|
|
||||||
if i > 0 {
|
|
||||||
if p.options.RemoveWhitespace {
|
|
||||||
p.print(",")
|
|
||||||
} else {
|
|
||||||
p.print(", ")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.print(sel)
|
|
||||||
}
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.printRuleBlock(block.Rules, indent)
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.print("\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
indent--
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.printIndent(indent)
|
|
||||||
}
|
|
||||||
p.print("}")
|
|
||||||
|
|
||||||
case *css_ast.RKnownAt:
|
|
||||||
p.print("@")
|
|
||||||
whitespace := mayNeedWhitespaceAfter
|
|
||||||
if len(r.Prelude) == 0 {
|
|
||||||
whitespace = canDiscardWhitespaceAfter
|
|
||||||
}
|
|
||||||
p.printIdent(r.AtToken, identNormal, whitespace)
|
|
||||||
if !p.options.RemoveWhitespace || len(r.Prelude) > 0 {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.printTokens(r.Prelude, printTokensOpts{})
|
|
||||||
if !p.options.RemoveWhitespace && len(r.Prelude) > 0 {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.printRuleBlock(r.Rules, indent)
|
|
||||||
|
|
||||||
case *css_ast.RUnknownAt:
|
|
||||||
p.print("@")
|
|
||||||
whitespace := mayNeedWhitespaceAfter
|
|
||||||
if len(r.Prelude) == 0 {
|
|
||||||
whitespace = canDiscardWhitespaceAfter
|
|
||||||
}
|
|
||||||
p.printIdent(r.AtToken, identNormal, whitespace)
|
|
||||||
if (!p.options.RemoveWhitespace && r.Block != nil) || len(r.Prelude) > 0 {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.printTokens(r.Prelude, printTokensOpts{})
|
|
||||||
if !p.options.RemoveWhitespace && r.Block != nil && len(r.Prelude) > 0 {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
if r.Block == nil {
|
|
||||||
p.print(";")
|
|
||||||
} else {
|
|
||||||
p.printTokens(r.Block, printTokensOpts{})
|
|
||||||
}
|
|
||||||
|
|
||||||
case *css_ast.RSelector:
|
|
||||||
p.printComplexSelectors(r.Selectors, indent)
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.printRuleBlock(r.Rules, indent)
|
|
||||||
|
|
||||||
case *css_ast.RQualified:
|
|
||||||
hasWhitespaceAfter := p.printTokens(r.Prelude, printTokensOpts{})
|
|
||||||
if !hasWhitespaceAfter && !p.options.RemoveWhitespace {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.printRuleBlock(r.Rules, indent)
|
|
||||||
|
|
||||||
case *css_ast.RDeclaration:
|
|
||||||
p.printIdent(r.KeyText, identNormal, canDiscardWhitespaceAfter)
|
|
||||||
p.print(":")
|
|
||||||
hasWhitespaceAfter := p.printTokens(r.Value, printTokensOpts{
|
|
||||||
indent: indent,
|
|
||||||
isDeclaration: true,
|
|
||||||
})
|
|
||||||
if r.Important {
|
|
||||||
if !hasWhitespaceAfter && !p.options.RemoveWhitespace && len(r.Value) > 0 {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.print("!important")
|
|
||||||
}
|
|
||||||
if !omitTrailingSemicolon {
|
|
||||||
p.print(";")
|
|
||||||
}
|
|
||||||
|
|
||||||
case *css_ast.RBadDeclaration:
|
|
||||||
p.printTokens(r.Tokens, printTokensOpts{})
|
|
||||||
if !omitTrailingSemicolon {
|
|
||||||
p.print(";")
|
|
||||||
}
|
|
||||||
|
|
||||||
case *css_ast.RComment:
|
|
||||||
p.printIndentedComment(indent, r.Text)
|
|
||||||
|
|
||||||
default:
|
|
||||||
panic("Internal error")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.print("\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printIndentedComment(indent int32, text string) {
|
|
||||||
// Avoid generating a comment containing the character sequence "</style"
|
|
||||||
text = helpers.EscapeClosingTag(text, "/style")
|
|
||||||
|
|
||||||
// Re-indent multi-line comments
|
|
||||||
for {
|
|
||||||
newline := strings.IndexByte(text, '\n')
|
|
||||||
if newline == -1 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
p.print(text[:newline+1])
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.printIndent(indent)
|
|
||||||
}
|
|
||||||
text = text[newline+1:]
|
|
||||||
}
|
|
||||||
p.print(text)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printRuleBlock(rules []css_ast.Rule, indent int32) {
|
|
||||||
if p.options.RemoveWhitespace {
|
|
||||||
p.print("{")
|
|
||||||
} else {
|
|
||||||
p.print("{\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, decl := range rules {
|
|
||||||
omitTrailingSemicolon := p.options.RemoveWhitespace && i+1 == len(rules)
|
|
||||||
p.printRule(decl, indent+1, omitTrailingSemicolon)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.printIndent(indent)
|
|
||||||
}
|
|
||||||
p.print("}")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printComplexSelectors(selectors []css_ast.ComplexSelector, indent int32) {
|
|
||||||
for i, complex := range selectors {
|
|
||||||
if i > 0 {
|
|
||||||
if p.options.RemoveWhitespace {
|
|
||||||
p.print(",")
|
|
||||||
} else {
|
|
||||||
p.print(",\n")
|
|
||||||
p.printIndent(indent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for j, compound := range complex.Selectors {
|
|
||||||
p.printCompoundSelector(compound, j == 0, j+1 == len(complex.Selectors))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bool, isLast bool) {
|
|
||||||
if !isFirst && sel.Combinator == "" {
|
|
||||||
// A space is required in between compound selectors if there is no
|
|
||||||
// combinator in the middle. It's fine to convert "a + b" into "a+b"
|
|
||||||
// but not to convert "a b" into "ab".
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
|
|
||||||
if sel.HasNestPrefix {
|
|
||||||
p.print("&")
|
|
||||||
}
|
|
||||||
|
|
||||||
if sel.Combinator != "" {
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
p.print(sel.Combinator)
|
|
||||||
if !p.options.RemoveWhitespace {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if sel.TypeSelector != nil {
|
|
||||||
whitespace := mayNeedWhitespaceAfter
|
|
||||||
if len(sel.SubclassSelectors) > 0 {
|
|
||||||
// There is no chance of whitespace before a subclass selector or pseudo
|
|
||||||
// class selector
|
|
||||||
whitespace = canDiscardWhitespaceAfter
|
|
||||||
}
|
|
||||||
p.printNamespacedName(*sel.TypeSelector, whitespace)
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, sub := range sel.SubclassSelectors {
|
|
||||||
whitespace := mayNeedWhitespaceAfter
|
|
||||||
|
|
||||||
// There is no chance of whitespace between subclass selectors
|
|
||||||
if i+1 < len(sel.SubclassSelectors) {
|
|
||||||
whitespace = canDiscardWhitespaceAfter
|
|
||||||
}
|
|
||||||
|
|
||||||
switch s := sub.(type) {
|
|
||||||
case *css_ast.SSHash:
|
|
||||||
p.print("#")
|
|
||||||
|
|
||||||
// This deliberately does not use identHash. From the specification:
|
|
||||||
// "In <id-selector>, the <hash-token>'s value must be an identifier."
|
|
||||||
p.printIdent(s.Name, identNormal, whitespace)
|
|
||||||
|
|
||||||
case *css_ast.SSClass:
|
|
||||||
p.print(".")
|
|
||||||
p.printIdent(s.Name, identNormal, whitespace)
|
|
||||||
|
|
||||||
case *css_ast.SSAttribute:
|
|
||||||
p.print("[")
|
|
||||||
p.printNamespacedName(s.NamespacedName, canDiscardWhitespaceAfter)
|
|
||||||
if s.MatcherOp != "" {
|
|
||||||
p.print(s.MatcherOp)
|
|
||||||
printAsIdent := false
|
|
||||||
|
|
||||||
// Print the value as an identifier if it's possible
|
|
||||||
if css_lexer.WouldStartIdentifierWithoutEscapes(s.MatcherValue) {
|
|
||||||
printAsIdent = true
|
|
||||||
for _, c := range s.MatcherValue {
|
|
||||||
if !css_lexer.IsNameContinue(c) {
|
|
||||||
printAsIdent = false
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if printAsIdent {
|
|
||||||
p.printIdent(s.MatcherValue, identNormal, canDiscardWhitespaceAfter)
|
|
||||||
} else {
|
|
||||||
p.printQuoted(s.MatcherValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if s.MatcherModifier != 0 {
|
|
||||||
p.print(" ")
|
|
||||||
p.print(string(rune(s.MatcherModifier)))
|
|
||||||
}
|
|
||||||
p.print("]")
|
|
||||||
|
|
||||||
case *css_ast.SSPseudoClass:
|
|
||||||
p.printPseudoClassSelector(*s, whitespace)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printNamespacedName(nsName css_ast.NamespacedName, whitespace trailingWhitespace) {
|
|
||||||
if nsName.NamespacePrefix != nil {
|
|
||||||
switch nsName.NamespacePrefix.Kind {
|
|
||||||
case css_lexer.TIdent:
|
|
||||||
p.printIdent(nsName.NamespacePrefix.Text, identNormal, canDiscardWhitespaceAfter)
|
|
||||||
case css_lexer.TDelimAsterisk:
|
|
||||||
p.print("*")
|
|
||||||
default:
|
|
||||||
panic("Internal error")
|
|
||||||
}
|
|
||||||
|
|
||||||
p.print("|")
|
|
||||||
}
|
|
||||||
|
|
||||||
switch nsName.Name.Kind {
|
|
||||||
case css_lexer.TIdent:
|
|
||||||
p.printIdent(nsName.Name.Text, identNormal, whitespace)
|
|
||||||
case css_lexer.TDelimAsterisk:
|
|
||||||
p.print("*")
|
|
||||||
case css_lexer.TDelimAmpersand:
|
|
||||||
p.print("&")
|
|
||||||
default:
|
|
||||||
panic("Internal error")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printPseudoClassSelector(pseudo css_ast.SSPseudoClass, whitespace trailingWhitespace) {
|
|
||||||
if pseudo.IsElement {
|
|
||||||
p.print("::")
|
|
||||||
} else {
|
|
||||||
p.print(":")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(pseudo.Args) > 0 {
|
|
||||||
p.printIdent(pseudo.Name, identNormal, canDiscardWhitespaceAfter)
|
|
||||||
p.print("(")
|
|
||||||
p.printTokens(pseudo.Args, printTokensOpts{})
|
|
||||||
p.print(")")
|
|
||||||
} else {
|
|
||||||
p.printIdent(pseudo.Name, identNormal, whitespace)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) print(text string) {
|
|
||||||
p.css = append(p.css, text...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func bestQuoteCharForString(text string, forURL bool) byte {
|
|
||||||
forURLCost := 0
|
|
||||||
singleCost := 2
|
|
||||||
doubleCost := 2
|
|
||||||
|
|
||||||
for _, c := range text {
|
|
||||||
switch c {
|
|
||||||
case '\'':
|
|
||||||
forURLCost++
|
|
||||||
singleCost++
|
|
||||||
|
|
||||||
case '"':
|
|
||||||
forURLCost++
|
|
||||||
doubleCost++
|
|
||||||
|
|
||||||
case '(', ')', ' ', '\t':
|
|
||||||
forURLCost++
|
|
||||||
|
|
||||||
case '\\', '\n', '\r', '\f':
|
|
||||||
forURLCost++
|
|
||||||
singleCost++
|
|
||||||
doubleCost++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Quotes can sometimes be omitted for URL tokens
|
|
||||||
if forURL && forURLCost < singleCost && forURLCost < doubleCost {
|
|
||||||
return quoteForURL
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prefer double quotes to single quotes if there is no cost difference
|
|
||||||
if singleCost < doubleCost {
|
|
||||||
return '\''
|
|
||||||
}
|
|
||||||
|
|
||||||
return '"'
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printQuoted(text string) {
|
|
||||||
p.printQuotedWithQuote(text, bestQuoteCharForString(text, false))
|
|
||||||
}
|
|
||||||
|
|
||||||
type escapeKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
escapeNone escapeKind = iota
|
|
||||||
escapeBackslash
|
|
||||||
escapeHex
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *printer) printWithEscape(c rune, escape escapeKind, remainingText string, mayNeedWhitespaceAfter bool) {
|
|
||||||
var temp [utf8.UTFMax]byte
|
|
||||||
|
|
||||||
if escape == escapeBackslash && ((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')) {
|
|
||||||
// Hexadecimal characters cannot use a plain backslash escape
|
|
||||||
escape = escapeHex
|
|
||||||
}
|
|
||||||
|
|
||||||
switch escape {
|
|
||||||
case escapeNone:
|
|
||||||
width := utf8.EncodeRune(temp[:], c)
|
|
||||||
p.css = append(p.css, temp[:width]...)
|
|
||||||
|
|
||||||
case escapeBackslash:
|
|
||||||
p.css = append(p.css, '\\')
|
|
||||||
width := utf8.EncodeRune(temp[:], c)
|
|
||||||
p.css = append(p.css, temp[:width]...)
|
|
||||||
|
|
||||||
case escapeHex:
|
|
||||||
text := fmt.Sprintf("\\%x", c)
|
|
||||||
p.css = append(p.css, text...)
|
|
||||||
|
|
||||||
// Make sure the next character is not interpreted as part of the escape sequence
|
|
||||||
if len(text) < 1+6 {
|
|
||||||
if next := utf8.RuneLen(c); next < len(remainingText) {
|
|
||||||
c = rune(remainingText[next])
|
|
||||||
if c == ' ' || c == '\t' || (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') {
|
|
||||||
p.css = append(p.css, ' ')
|
|
||||||
}
|
|
||||||
} else if mayNeedWhitespaceAfter {
|
|
||||||
// If the last character is a hexadecimal escape, print a space afterwards
|
|
||||||
// for the escape sequence to consume. That way we're sure it won't
|
|
||||||
// accidentally consume a semantically significant space afterward.
|
|
||||||
p.css = append(p.css, ' ')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printQuotedWithQuote(text string, quote byte) {
|
|
||||||
if quote != quoteForURL {
|
|
||||||
p.css = append(p.css, quote)
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, c := range text {
|
|
||||||
escape := escapeNone
|
|
||||||
|
|
||||||
switch c {
|
|
||||||
case '\x00', '\r', '\n', '\f':
|
|
||||||
// Use a hexadecimal escape for characters that would be invalid escapes
|
|
||||||
escape = escapeHex
|
|
||||||
|
|
||||||
case '\\', rune(quote):
|
|
||||||
escape = escapeBackslash
|
|
||||||
|
|
||||||
case '(', ')', ' ', '\t', '"', '\'':
|
|
||||||
// These characters must be escaped in URL tokens
|
|
||||||
if quote == quoteForURL {
|
|
||||||
escape = escapeBackslash
|
|
||||||
}
|
|
||||||
|
|
||||||
case '/':
|
|
||||||
// Avoid generating the sequence "</style" in CSS code
|
|
||||||
if i >= 1 && text[i-1] == '<' && i+6 <= len(text) && strings.EqualFold(text[i+1:i+6], "style") {
|
|
||||||
escape = escapeBackslash
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
if (p.options.ASCIIOnly && c >= 0x80) || c == '\uFEFF' {
|
|
||||||
escape = escapeHex
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p.printWithEscape(c, escape, text[i:], false)
|
|
||||||
}
|
|
||||||
|
|
||||||
if quote != quoteForURL {
|
|
||||||
p.css = append(p.css, quote)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type identMode uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
identNormal identMode = iota
|
|
||||||
identHash
|
|
||||||
identDimensionUnit
|
|
||||||
)
|
|
||||||
|
|
||||||
type trailingWhitespace uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
mayNeedWhitespaceAfter trailingWhitespace = iota
|
|
||||||
canDiscardWhitespaceAfter
|
|
||||||
)
|
|
||||||
|
|
||||||
func (p *printer) printIdent(text string, mode identMode, whitespace trailingWhitespace) {
|
|
||||||
for i, c := range text {
|
|
||||||
escape := escapeNone
|
|
||||||
|
|
||||||
if p.options.ASCIIOnly && c >= 0x80 {
|
|
||||||
escape = escapeHex
|
|
||||||
} else if c == '\r' || c == '\n' || c == '\f' || c == '\uFEFF' {
|
|
||||||
// Use a hexadecimal escape for characters that would be invalid escapes
|
|
||||||
escape = escapeHex
|
|
||||||
} else {
|
|
||||||
// Escape non-identifier characters
|
|
||||||
if !css_lexer.IsNameContinue(c) {
|
|
||||||
escape = escapeBackslash
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special escape behavior for the first character
|
|
||||||
if i == 0 {
|
|
||||||
switch mode {
|
|
||||||
case identNormal:
|
|
||||||
if !css_lexer.WouldStartIdentifierWithoutEscapes(text) {
|
|
||||||
escape = escapeBackslash
|
|
||||||
}
|
|
||||||
|
|
||||||
case identDimensionUnit:
|
|
||||||
if !css_lexer.WouldStartIdentifierWithoutEscapes(text) {
|
|
||||||
escape = escapeBackslash
|
|
||||||
} else if c >= '0' && c <= '9' {
|
|
||||||
// Unit: "2x"
|
|
||||||
escape = escapeHex
|
|
||||||
} else if c == 'e' || c == 'E' {
|
|
||||||
if len(text) >= 2 && text[1] >= '0' && text[1] <= '9' {
|
|
||||||
// Unit: "e2x"
|
|
||||||
escape = escapeBackslash
|
|
||||||
} else if len(text) >= 3 && text[1] == '-' && text[2] >= '0' && text[2] <= '9' {
|
|
||||||
// Unit: "e-2x"
|
|
||||||
escape = escapeBackslash
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the last character is a hexadecimal escape, print a space afterwards
|
|
||||||
// for the escape sequence to consume. That way we're sure it won't
|
|
||||||
// accidentally consume a semantically significant space afterward.
|
|
||||||
mayNeedWhitespaceAfter := whitespace == mayNeedWhitespaceAfter && escape != escapeNone && i+utf8.RuneLen(c) == len(text)
|
|
||||||
p.printWithEscape(c, escape, text[i:], mayNeedWhitespaceAfter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printIndent(indent int32) {
|
|
||||||
for i, n := 0, int(indent); i < n; i++ {
|
|
||||||
p.css = append(p.css, " "...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type printTokensOpts struct {
|
|
||||||
indent int32
|
|
||||||
isDeclaration bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *printer) printTokens(tokens []css_ast.Token, opts printTokensOpts) bool {
|
|
||||||
hasWhitespaceAfter := len(tokens) > 0 && (tokens[0].Whitespace&css_ast.WhitespaceBefore) != 0
|
|
||||||
|
|
||||||
// Pretty-print long comma-separated declarations of 3 or more items
|
|
||||||
isMultiLineValue := false
|
|
||||||
if !p.options.RemoveWhitespace && opts.isDeclaration {
|
|
||||||
commaCount := 0
|
|
||||||
for _, t := range tokens {
|
|
||||||
if t.Kind == css_lexer.TComma {
|
|
||||||
commaCount++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
isMultiLineValue = commaCount >= 2
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, t := range tokens {
|
|
||||||
if t.Kind == css_lexer.TWhitespace {
|
|
||||||
hasWhitespaceAfter = true
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if hasWhitespaceAfter {
|
|
||||||
if isMultiLineValue && (i == 0 || tokens[i-1].Kind == css_lexer.TComma) {
|
|
||||||
p.print("\n")
|
|
||||||
p.printIndent(opts.indent + 1)
|
|
||||||
} else {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
hasWhitespaceAfter = (t.Whitespace&css_ast.WhitespaceAfter) != 0 ||
|
|
||||||
(i+1 < len(tokens) && (tokens[i+1].Whitespace&css_ast.WhitespaceBefore) != 0)
|
|
||||||
|
|
||||||
whitespace := mayNeedWhitespaceAfter
|
|
||||||
if !hasWhitespaceAfter {
|
|
||||||
whitespace = canDiscardWhitespaceAfter
|
|
||||||
}
|
|
||||||
|
|
||||||
switch t.Kind {
|
|
||||||
case css_lexer.TIdent:
|
|
||||||
p.printIdent(t.Text, identNormal, whitespace)
|
|
||||||
|
|
||||||
case css_lexer.TFunction:
|
|
||||||
p.printIdent(t.Text, identNormal, whitespace)
|
|
||||||
p.print("(")
|
|
||||||
|
|
||||||
case css_lexer.TDimension:
|
|
||||||
p.print(t.DimensionValue())
|
|
||||||
p.printIdent(t.DimensionUnit(), identDimensionUnit, whitespace)
|
|
||||||
|
|
||||||
case css_lexer.TAtKeyword:
|
|
||||||
p.print("@")
|
|
||||||
p.printIdent(t.Text, identNormal, whitespace)
|
|
||||||
|
|
||||||
case css_lexer.THash:
|
|
||||||
p.print("#")
|
|
||||||
p.printIdent(t.Text, identHash, whitespace)
|
|
||||||
|
|
||||||
case css_lexer.TString:
|
|
||||||
p.printQuoted(t.Text)
|
|
||||||
|
|
||||||
case css_lexer.TURL:
|
|
||||||
text := p.importRecords[t.ImportRecordIndex].Path.Text
|
|
||||||
p.print("url(")
|
|
||||||
p.printQuotedWithQuote(text, bestQuoteCharForString(text, true))
|
|
||||||
p.print(")")
|
|
||||||
|
|
||||||
default:
|
|
||||||
p.print(t.Text)
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.Children != nil {
|
|
||||||
p.printTokens(*t.Children, printTokensOpts{})
|
|
||||||
|
|
||||||
switch t.Kind {
|
|
||||||
case css_lexer.TFunction:
|
|
||||||
p.print(")")
|
|
||||||
|
|
||||||
case css_lexer.TOpenParen:
|
|
||||||
p.print(")")
|
|
||||||
|
|
||||||
case css_lexer.TOpenBrace:
|
|
||||||
p.print("}")
|
|
||||||
|
|
||||||
case css_lexer.TOpenBracket:
|
|
||||||
p.print("]")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if hasWhitespaceAfter {
|
|
||||||
p.print(" ")
|
|
||||||
}
|
|
||||||
return hasWhitespaceAfter
|
|
||||||
}
|
|
649
vendor/github.com/evanw/esbuild/internal/fs/filepath.go
generated
vendored
649
vendor/github.com/evanw/esbuild/internal/fs/filepath.go
generated
vendored
@ -1,649 +0,0 @@
|
|||||||
// Code in this file has been forked from the "filepath" module in the Go
|
|
||||||
// source code to work around bugs with the WebAssembly build target. More
|
|
||||||
// information about why here: https://github.com/golang/go/issues/43768.
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
// Copyright (c) 2009 The Go Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Redistribution and use in source and binary forms, with or without
|
|
||||||
// modification, are permitted provided that the following conditions are
|
|
||||||
// met:
|
|
||||||
//
|
|
||||||
// * Redistributions of source code must retain the above copyright
|
|
||||||
// notice, this list of conditions and the following disclaimer.
|
|
||||||
// * Redistributions in binary form must reproduce the above
|
|
||||||
// copyright notice, this list of conditions and the following disclaimer
|
|
||||||
// in the documentation and/or other materials provided with the
|
|
||||||
// distribution.
|
|
||||||
// * Neither the name of Google Inc. nor the names of its
|
|
||||||
// contributors may be used to endorse or promote products derived from
|
|
||||||
// this software without specific prior written permission.
|
|
||||||
//
|
|
||||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
package fs
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
type goFilepath struct {
|
|
||||||
cwd string
|
|
||||||
isWindows bool
|
|
||||||
pathSeparator byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func isSlash(c uint8) bool {
|
|
||||||
return c == '\\' || c == '/'
|
|
||||||
}
|
|
||||||
|
|
||||||
// reservedNames lists reserved Windows names. Search for PRN in
|
|
||||||
// https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
|
|
||||||
// for details.
|
|
||||||
var reservedNames = []string{
|
|
||||||
"CON", "PRN", "AUX", "NUL",
|
|
||||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
|
||||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
|
|
||||||
}
|
|
||||||
|
|
||||||
// isReservedName returns true, if path is Windows reserved name.
|
|
||||||
// See reservedNames for the full list.
|
|
||||||
func isReservedName(path string) bool {
|
|
||||||
if len(path) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, reserved := range reservedNames {
|
|
||||||
if strings.EqualFold(path, reserved) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsAbs reports whether the path is absolute.
|
|
||||||
func (fp goFilepath) isAbs(path string) bool {
|
|
||||||
if !fp.isWindows {
|
|
||||||
return strings.HasPrefix(path, "/")
|
|
||||||
}
|
|
||||||
if isReservedName(path) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
l := fp.volumeNameLen(path)
|
|
||||||
if l == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
path = path[l:]
|
|
||||||
if path == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return isSlash(path[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Abs returns an absolute representation of path.
|
|
||||||
// If the path is not absolute it will be joined with the current
|
|
||||||
// working directory to turn it into an absolute path. The absolute
|
|
||||||
// path name for a given file is not guaranteed to be unique.
|
|
||||||
// Abs calls Clean on the result.
|
|
||||||
func (fp goFilepath) abs(path string) (string, error) {
|
|
||||||
if fp.isAbs(path) {
|
|
||||||
return fp.clean(path), nil
|
|
||||||
}
|
|
||||||
return fp.join([]string{fp.cwd, path}), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsPathSeparator reports whether c is a directory separator character.
|
|
||||||
func (fp goFilepath) isPathSeparator(c uint8) bool {
|
|
||||||
return c == '/' || (fp.isWindows && c == '\\')
|
|
||||||
}
|
|
||||||
|
|
||||||
// volumeNameLen returns length of the leading volume name on Windows.
|
|
||||||
// It returns 0 elsewhere.
|
|
||||||
func (fp goFilepath) volumeNameLen(path string) int {
|
|
||||||
if !fp.isWindows {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
if len(path) < 2 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
// with drive letter
|
|
||||||
c := path[0]
|
|
||||||
if path[1] == ':' && ('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') {
|
|
||||||
return 2
|
|
||||||
}
|
|
||||||
// is it UNC? https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
|
|
||||||
if l := len(path); l >= 5 && isSlash(path[0]) && isSlash(path[1]) &&
|
|
||||||
!isSlash(path[2]) && path[2] != '.' {
|
|
||||||
// first, leading `\\` and next shouldn't be `\`. its server name.
|
|
||||||
for n := 3; n < l-1; n++ {
|
|
||||||
// second, next '\' shouldn't be repeated.
|
|
||||||
if isSlash(path[n]) {
|
|
||||||
n++
|
|
||||||
// third, following something characters. its share name.
|
|
||||||
if !isSlash(path[n]) {
|
|
||||||
if path[n] == '.' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
for ; n < l; n++ {
|
|
||||||
if isSlash(path[n]) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// EvalSymlinks returns the path name after the evaluation of any symbolic
|
|
||||||
// links.
|
|
||||||
// If path is relative the result will be relative to the current directory,
|
|
||||||
// unless one of the components is an absolute symbolic link.
|
|
||||||
// EvalSymlinks calls Clean on the result.
|
|
||||||
func (fp goFilepath) evalSymlinks(path string) (string, error) {
|
|
||||||
volLen := fp.volumeNameLen(path)
|
|
||||||
pathSeparator := string(fp.pathSeparator)
|
|
||||||
|
|
||||||
if volLen < len(path) && fp.isPathSeparator(path[volLen]) {
|
|
||||||
volLen++
|
|
||||||
}
|
|
||||||
vol := path[:volLen]
|
|
||||||
dest := vol
|
|
||||||
linksWalked := 0
|
|
||||||
for start, end := volLen, volLen; start < len(path); start = end {
|
|
||||||
for start < len(path) && fp.isPathSeparator(path[start]) {
|
|
||||||
start++
|
|
||||||
}
|
|
||||||
end = start
|
|
||||||
for end < len(path) && !fp.isPathSeparator(path[end]) {
|
|
||||||
end++
|
|
||||||
}
|
|
||||||
|
|
||||||
// On Windows, "." can be a symlink.
|
|
||||||
// We look it up, and use the value if it is absolute.
|
|
||||||
// If not, we just return ".".
|
|
||||||
isWindowsDot := fp.isWindows && path[fp.volumeNameLen(path):] == "."
|
|
||||||
|
|
||||||
// The next path component is in path[start:end].
|
|
||||||
if end == start {
|
|
||||||
// No more path components.
|
|
||||||
break
|
|
||||||
} else if path[start:end] == "." && !isWindowsDot {
|
|
||||||
// Ignore path component ".".
|
|
||||||
continue
|
|
||||||
} else if path[start:end] == ".." {
|
|
||||||
// Back up to previous component if possible.
|
|
||||||
// Note that volLen includes any leading slash.
|
|
||||||
|
|
||||||
// Set r to the index of the last slash in dest,
|
|
||||||
// after the volume.
|
|
||||||
var r int
|
|
||||||
for r = len(dest) - 1; r >= volLen; r-- {
|
|
||||||
if fp.isPathSeparator(dest[r]) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if r < volLen || dest[r+1:] == ".." {
|
|
||||||
// Either path has no slashes
|
|
||||||
// (it's empty or just "C:")
|
|
||||||
// or it ends in a ".." we had to keep.
|
|
||||||
// Either way, keep this "..".
|
|
||||||
if len(dest) > volLen {
|
|
||||||
dest += pathSeparator
|
|
||||||
}
|
|
||||||
dest += ".."
|
|
||||||
} else {
|
|
||||||
// Discard everything since the last slash.
|
|
||||||
dest = dest[:r]
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ordinary path component. Add it to result.
|
|
||||||
|
|
||||||
if len(dest) > fp.volumeNameLen(dest) && !fp.isPathSeparator(dest[len(dest)-1]) {
|
|
||||||
dest += pathSeparator
|
|
||||||
}
|
|
||||||
|
|
||||||
dest += path[start:end]
|
|
||||||
|
|
||||||
// Resolve symlink.
|
|
||||||
|
|
||||||
fi, err := os.Lstat(dest)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if fi.Mode()&os.ModeSymlink == 0 {
|
|
||||||
if !fi.Mode().IsDir() && end < len(path) {
|
|
||||||
return "", syscall.ENOTDIR
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Found symlink.
|
|
||||||
|
|
||||||
linksWalked++
|
|
||||||
if linksWalked > 255 {
|
|
||||||
return "", errors.New("EvalSymlinks: too many links")
|
|
||||||
}
|
|
||||||
|
|
||||||
link, err := os.Readlink(dest)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if isWindowsDot && !fp.isAbs(link) {
|
|
||||||
// On Windows, if "." is a relative symlink,
|
|
||||||
// just return ".".
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
path = link + path[end:]
|
|
||||||
|
|
||||||
v := fp.volumeNameLen(link)
|
|
||||||
if v > 0 {
|
|
||||||
// Symlink to drive name is an absolute path.
|
|
||||||
if v < len(link) && fp.isPathSeparator(link[v]) {
|
|
||||||
v++
|
|
||||||
}
|
|
||||||
vol = link[:v]
|
|
||||||
dest = vol
|
|
||||||
end = len(vol)
|
|
||||||
} else if len(link) > 0 && fp.isPathSeparator(link[0]) {
|
|
||||||
// Symlink to absolute path.
|
|
||||||
dest = link[:1]
|
|
||||||
end = 1
|
|
||||||
} else {
|
|
||||||
// Symlink to relative path; replace last
|
|
||||||
// path component in dest.
|
|
||||||
var r int
|
|
||||||
for r = len(dest) - 1; r >= volLen; r-- {
|
|
||||||
if fp.isPathSeparator(dest[r]) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if r < volLen {
|
|
||||||
dest = vol
|
|
||||||
} else {
|
|
||||||
dest = dest[:r]
|
|
||||||
}
|
|
||||||
end = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fp.clean(dest), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// A lazybuf is a lazily constructed path buffer.
|
|
||||||
// It supports append, reading previously appended bytes,
|
|
||||||
// and retrieving the final string. It does not allocate a buffer
|
|
||||||
// to hold the output until that output diverges from s.
|
|
||||||
type lazybuf struct {
|
|
||||||
path string
|
|
||||||
buf []byte
|
|
||||||
w int
|
|
||||||
volAndPath string
|
|
||||||
volLen int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *lazybuf) index(i int) byte {
|
|
||||||
if b.buf != nil {
|
|
||||||
return b.buf[i]
|
|
||||||
}
|
|
||||||
return b.path[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *lazybuf) append(c byte) {
|
|
||||||
if b.buf == nil {
|
|
||||||
if b.w < len(b.path) && b.path[b.w] == c {
|
|
||||||
b.w++
|
|
||||||
return
|
|
||||||
}
|
|
||||||
b.buf = make([]byte, len(b.path))
|
|
||||||
copy(b.buf, b.path[:b.w])
|
|
||||||
}
|
|
||||||
b.buf[b.w] = c
|
|
||||||
b.w++
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *lazybuf) string() string {
|
|
||||||
if b.buf == nil {
|
|
||||||
return b.volAndPath[:b.volLen+b.w]
|
|
||||||
}
|
|
||||||
return b.volAndPath[:b.volLen] + string(b.buf[:b.w])
|
|
||||||
}
|
|
||||||
|
|
||||||
// FromSlash returns the result of replacing each slash ('/') character
|
|
||||||
// in path with a separator character. Multiple slashes are replaced
|
|
||||||
// by multiple separators.
|
|
||||||
func (fp goFilepath) fromSlash(path string) string {
|
|
||||||
if !fp.isWindows {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
return strings.ReplaceAll(path, "/", "\\")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean returns the shortest path name equivalent to path
|
|
||||||
// by purely lexical processing. It applies the following rules
|
|
||||||
// iteratively until no further processing can be done:
|
|
||||||
//
|
|
||||||
// 1. Replace multiple Separator elements with a single one.
|
|
||||||
// 2. Eliminate each . path name element (the current directory).
|
|
||||||
// 3. Eliminate each inner .. path name element (the parent directory)
|
|
||||||
// along with the non-.. element that precedes it.
|
|
||||||
// 4. Eliminate .. elements that begin a rooted path:
|
|
||||||
// that is, replace "/.." by "/" at the beginning of a path,
|
|
||||||
// assuming Separator is '/'.
|
|
||||||
//
|
|
||||||
// The returned path ends in a slash only if it represents a root directory,
|
|
||||||
// such as "/" on Unix or `C:\` on Windows.
|
|
||||||
//
|
|
||||||
// Finally, any occurrences of slash are replaced by Separator.
|
|
||||||
//
|
|
||||||
// If the result of this process is an empty string, Clean
|
|
||||||
// returns the string ".".
|
|
||||||
//
|
|
||||||
// See also Rob Pike, ``Lexical File Names in Plan 9 or
|
|
||||||
// Getting Dot-Dot Right,''
|
|
||||||
// https://9p.io/sys/doc/lexnames.html
|
|
||||||
func (fp goFilepath) clean(path string) string {
|
|
||||||
originalPath := path
|
|
||||||
volLen := fp.volumeNameLen(path)
|
|
||||||
path = path[volLen:]
|
|
||||||
if path == "" {
|
|
||||||
if volLen > 1 && originalPath[1] != ':' {
|
|
||||||
// should be UNC
|
|
||||||
return fp.fromSlash(originalPath)
|
|
||||||
}
|
|
||||||
return originalPath + "."
|
|
||||||
}
|
|
||||||
rooted := fp.isPathSeparator(path[0])
|
|
||||||
|
|
||||||
// Invariants:
|
|
||||||
// reading from path; r is index of next byte to process.
|
|
||||||
// writing to buf; w is index of next byte to write.
|
|
||||||
// dotdot is index in buf where .. must stop, either because
|
|
||||||
// it is the leading slash or it is a leading ../../.. prefix.
|
|
||||||
n := len(path)
|
|
||||||
out := lazybuf{path: path, volAndPath: originalPath, volLen: volLen}
|
|
||||||
r, dotdot := 0, 0
|
|
||||||
if rooted {
|
|
||||||
out.append(fp.pathSeparator)
|
|
||||||
r, dotdot = 1, 1
|
|
||||||
}
|
|
||||||
|
|
||||||
for r < n {
|
|
||||||
switch {
|
|
||||||
case fp.isPathSeparator(path[r]):
|
|
||||||
// empty path element
|
|
||||||
r++
|
|
||||||
case path[r] == '.' && (r+1 == n || fp.isPathSeparator(path[r+1])):
|
|
||||||
// . element
|
|
||||||
r++
|
|
||||||
case path[r] == '.' && path[r+1] == '.' && (r+2 == n || fp.isPathSeparator(path[r+2])):
|
|
||||||
// .. element: remove to last separator
|
|
||||||
r += 2
|
|
||||||
switch {
|
|
||||||
case out.w > dotdot:
|
|
||||||
// can backtrack
|
|
||||||
out.w--
|
|
||||||
for out.w > dotdot && !fp.isPathSeparator(out.index(out.w)) {
|
|
||||||
out.w--
|
|
||||||
}
|
|
||||||
case !rooted:
|
|
||||||
// cannot backtrack, but not rooted, so append .. element.
|
|
||||||
if out.w > 0 {
|
|
||||||
out.append(fp.pathSeparator)
|
|
||||||
}
|
|
||||||
out.append('.')
|
|
||||||
out.append('.')
|
|
||||||
dotdot = out.w
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
// real path element.
|
|
||||||
// add slash if needed
|
|
||||||
if rooted && out.w != 1 || !rooted && out.w != 0 {
|
|
||||||
out.append(fp.pathSeparator)
|
|
||||||
}
|
|
||||||
// copy element
|
|
||||||
for ; r < n && !fp.isPathSeparator(path[r]); r++ {
|
|
||||||
out.append(path[r])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Turn empty string into "."
|
|
||||||
if out.w == 0 {
|
|
||||||
out.append('.')
|
|
||||||
}
|
|
||||||
|
|
||||||
return fp.fromSlash(out.string())
|
|
||||||
}
|
|
||||||
|
|
||||||
// VolumeName returns leading volume name.
|
|
||||||
// Given "C:\foo\bar" it returns "C:" on Windows.
|
|
||||||
// Given "\\host\share\foo" it returns "\\host\share".
|
|
||||||
// On other platforms it returns "".
|
|
||||||
func (fp goFilepath) volumeName(path string) string {
|
|
||||||
return path[:fp.volumeNameLen(path)]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Base returns the last element of path.
|
|
||||||
// Trailing path separators are removed before extracting the last element.
|
|
||||||
// If the path is empty, Base returns ".".
|
|
||||||
// If the path consists entirely of separators, Base returns a single separator.
|
|
||||||
func (fp goFilepath) base(path string) string {
|
|
||||||
if path == "" {
|
|
||||||
return "."
|
|
||||||
}
|
|
||||||
// Strip trailing slashes.
|
|
||||||
for len(path) > 0 && fp.isPathSeparator(path[len(path)-1]) {
|
|
||||||
path = path[0 : len(path)-1]
|
|
||||||
}
|
|
||||||
// Throw away volume name
|
|
||||||
path = path[len(fp.volumeName(path)):]
|
|
||||||
// Find the last element
|
|
||||||
i := len(path) - 1
|
|
||||||
for i >= 0 && !fp.isPathSeparator(path[i]) {
|
|
||||||
i--
|
|
||||||
}
|
|
||||||
if i >= 0 {
|
|
||||||
path = path[i+1:]
|
|
||||||
}
|
|
||||||
// If empty now, it had only slashes.
|
|
||||||
if path == "" {
|
|
||||||
return string(fp.pathSeparator)
|
|
||||||
}
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dir returns all but the last element of path, typically the path's directory.
|
|
||||||
// After dropping the final element, Dir calls Clean on the path and trailing
|
|
||||||
// slashes are removed.
|
|
||||||
// If the path is empty, Dir returns ".".
|
|
||||||
// If the path consists entirely of separators, Dir returns a single separator.
|
|
||||||
// The returned path does not end in a separator unless it is the root directory.
|
|
||||||
func (fp goFilepath) dir(path string) string {
|
|
||||||
vol := fp.volumeName(path)
|
|
||||||
i := len(path) - 1
|
|
||||||
for i >= len(vol) && !fp.isPathSeparator(path[i]) {
|
|
||||||
i--
|
|
||||||
}
|
|
||||||
dir := fp.clean(path[len(vol) : i+1])
|
|
||||||
if dir == "." && len(vol) > 2 {
|
|
||||||
// must be UNC
|
|
||||||
return vol
|
|
||||||
}
|
|
||||||
return vol + dir
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ext returns the file name extension used by path.
|
|
||||||
// The extension is the suffix beginning at the final dot
|
|
||||||
// in the final element of path; it is empty if there is
|
|
||||||
// no dot.
|
|
||||||
func (fp goFilepath) ext(path string) string {
|
|
||||||
for i := len(path) - 1; i >= 0 && !fp.isPathSeparator(path[i]); i-- {
|
|
||||||
if path[i] == '.' {
|
|
||||||
return path[i:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Join joins any number of path elements into a single path,
|
|
||||||
// separating them with an OS specific Separator. Empty elements
|
|
||||||
// are ignored. The result is Cleaned. However, if the argument
|
|
||||||
// list is empty or all its elements are empty, Join returns
|
|
||||||
// an empty string.
|
|
||||||
// On Windows, the result will only be a UNC path if the first
|
|
||||||
// non-empty element is a UNC path.
|
|
||||||
func (fp goFilepath) join(elem []string) string {
|
|
||||||
for i, e := range elem {
|
|
||||||
if e != "" {
|
|
||||||
if fp.isWindows {
|
|
||||||
return fp.joinNonEmpty(elem[i:])
|
|
||||||
}
|
|
||||||
return fp.clean(strings.Join(elem[i:], string(fp.pathSeparator)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// joinNonEmpty is like join, but it assumes that the first element is non-empty.
|
|
||||||
func (fp goFilepath) joinNonEmpty(elem []string) string {
|
|
||||||
if len(elem[0]) == 2 && elem[0][1] == ':' {
|
|
||||||
// First element is drive letter without terminating slash.
|
|
||||||
// Keep path relative to current directory on that drive.
|
|
||||||
// Skip empty elements.
|
|
||||||
i := 1
|
|
||||||
for ; i < len(elem); i++ {
|
|
||||||
if elem[i] != "" {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fp.clean(elem[0] + strings.Join(elem[i:], string(fp.pathSeparator)))
|
|
||||||
}
|
|
||||||
// The following logic prevents Join from inadvertently creating a
|
|
||||||
// UNC path on Windows. Unless the first element is a UNC path, Join
|
|
||||||
// shouldn't create a UNC path. See golang.org/issue/9167.
|
|
||||||
p := fp.clean(strings.Join(elem, string(fp.pathSeparator)))
|
|
||||||
if !fp.isUNC(p) {
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
// p == UNC only allowed when the first element is a UNC path.
|
|
||||||
head := fp.clean(elem[0])
|
|
||||||
if fp.isUNC(head) {
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
// head + tail == UNC, but joining two non-UNC paths should not result
|
|
||||||
// in a UNC path. Undo creation of UNC path.
|
|
||||||
tail := fp.clean(strings.Join(elem[1:], string(fp.pathSeparator)))
|
|
||||||
if head[len(head)-1] == fp.pathSeparator {
|
|
||||||
return head + tail
|
|
||||||
}
|
|
||||||
return head + string(fp.pathSeparator) + tail
|
|
||||||
}
|
|
||||||
|
|
||||||
// isUNC reports whether path is a UNC path.
|
|
||||||
func (fp goFilepath) isUNC(path string) bool {
|
|
||||||
return fp.volumeNameLen(path) > 2
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rel returns a relative path that is lexically equivalent to targpath when
|
|
||||||
// joined to basepath with an intervening separator. That is,
|
|
||||||
// Join(basepath, Rel(basepath, targpath)) is equivalent to targpath itself.
|
|
||||||
// On success, the returned path will always be relative to basepath,
|
|
||||||
// even if basepath and targpath share no elements.
|
|
||||||
// An error is returned if targpath can't be made relative to basepath or if
|
|
||||||
// knowing the current working directory would be necessary to compute it.
|
|
||||||
// Rel calls Clean on the result.
|
|
||||||
func (fp goFilepath) rel(basepath, targpath string) (string, error) {
|
|
||||||
baseVol := fp.volumeName(basepath)
|
|
||||||
targVol := fp.volumeName(targpath)
|
|
||||||
base := fp.clean(basepath)
|
|
||||||
targ := fp.clean(targpath)
|
|
||||||
if fp.sameWord(targ, base) {
|
|
||||||
return ".", nil
|
|
||||||
}
|
|
||||||
base = base[len(baseVol):]
|
|
||||||
targ = targ[len(targVol):]
|
|
||||||
if base == "." {
|
|
||||||
base = ""
|
|
||||||
}
|
|
||||||
// Can't use IsAbs - `\a` and `a` are both relative in Windows.
|
|
||||||
baseSlashed := len(base) > 0 && base[0] == fp.pathSeparator
|
|
||||||
targSlashed := len(targ) > 0 && targ[0] == fp.pathSeparator
|
|
||||||
if baseSlashed != targSlashed || !fp.sameWord(baseVol, targVol) {
|
|
||||||
return "", errors.New("Rel: can't make " + targpath + " relative to " + basepath)
|
|
||||||
}
|
|
||||||
// Position base[b0:bi] and targ[t0:ti] at the first differing elements.
|
|
||||||
bl := len(base)
|
|
||||||
tl := len(targ)
|
|
||||||
var b0, bi, t0, ti int
|
|
||||||
for {
|
|
||||||
for bi < bl && base[bi] != fp.pathSeparator {
|
|
||||||
bi++
|
|
||||||
}
|
|
||||||
for ti < tl && targ[ti] != fp.pathSeparator {
|
|
||||||
ti++
|
|
||||||
}
|
|
||||||
if !fp.sameWord(targ[t0:ti], base[b0:bi]) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if bi < bl {
|
|
||||||
bi++
|
|
||||||
}
|
|
||||||
if ti < tl {
|
|
||||||
ti++
|
|
||||||
}
|
|
||||||
b0 = bi
|
|
||||||
t0 = ti
|
|
||||||
}
|
|
||||||
if base[b0:bi] == ".." {
|
|
||||||
return "", errors.New("Rel: can't make " + targpath + " relative to " + basepath)
|
|
||||||
}
|
|
||||||
if b0 != bl {
|
|
||||||
// Base elements left. Must go up before going down.
|
|
||||||
seps := strings.Count(base[b0:bl], string(fp.pathSeparator))
|
|
||||||
size := 2 + seps*3
|
|
||||||
if tl != t0 {
|
|
||||||
size += 1 + tl - t0
|
|
||||||
}
|
|
||||||
buf := make([]byte, size)
|
|
||||||
n := copy(buf, "..")
|
|
||||||
for i := 0; i < seps; i++ {
|
|
||||||
buf[n] = fp.pathSeparator
|
|
||||||
copy(buf[n+1:], "..")
|
|
||||||
n += 3
|
|
||||||
}
|
|
||||||
if t0 != tl {
|
|
||||||
buf[n] = fp.pathSeparator
|
|
||||||
copy(buf[n+1:], targ[t0:])
|
|
||||||
}
|
|
||||||
return string(buf), nil
|
|
||||||
}
|
|
||||||
return targ[t0:], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fp goFilepath) sameWord(a, b string) bool {
|
|
||||||
if !fp.isWindows {
|
|
||||||
return a == b
|
|
||||||
}
|
|
||||||
return strings.EqualFold(a, b)
|
|
||||||
}
|
|
270
vendor/github.com/evanw/esbuild/internal/fs/fs.go
generated
vendored
270
vendor/github.com/evanw/esbuild/internal/fs/fs.go
generated
vendored
@ -1,270 +0,0 @@
|
|||||||
package fs
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"os"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
type EntryKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
DirEntry EntryKind = 1
|
|
||||||
FileEntry EntryKind = 2
|
|
||||||
)
|
|
||||||
|
|
||||||
type Entry struct {
|
|
||||||
symlink string
|
|
||||||
dir string
|
|
||||||
base string
|
|
||||||
mutex sync.Mutex
|
|
||||||
kind EntryKind
|
|
||||||
needStat bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *Entry) Kind(fs FS) EntryKind {
|
|
||||||
e.mutex.Lock()
|
|
||||||
defer e.mutex.Unlock()
|
|
||||||
if e.needStat {
|
|
||||||
e.needStat = false
|
|
||||||
e.symlink, e.kind = fs.kind(e.dir, e.base)
|
|
||||||
}
|
|
||||||
return e.kind
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *Entry) Symlink(fs FS) string {
|
|
||||||
e.mutex.Lock()
|
|
||||||
defer e.mutex.Unlock()
|
|
||||||
if e.needStat {
|
|
||||||
e.needStat = false
|
|
||||||
e.symlink, e.kind = fs.kind(e.dir, e.base)
|
|
||||||
}
|
|
||||||
return e.symlink
|
|
||||||
}
|
|
||||||
|
|
||||||
type accessedEntries struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
wasPresent map[string]bool
|
|
||||||
|
|
||||||
// If this is nil, "SortedKeys()" was not accessed. This means we should
|
|
||||||
// check for whether this directory has changed or not by seeing if any of
|
|
||||||
// the entries in the "wasPresent" map have changed in "present or not"
|
|
||||||
// status, since the only access was to individual entries via "Get()".
|
|
||||||
//
|
|
||||||
// If this is non-nil, "SortedKeys()" was accessed. This means we should
|
|
||||||
// check for whether this directory has changed or not by checking the
|
|
||||||
// "allEntries" array for equality with the existing entries list, since the
|
|
||||||
// code asked for all entries and may have used the presence or absence of
|
|
||||||
// entries in that list.
|
|
||||||
//
|
|
||||||
// The goal of having these two checks is to be as narrow as possible to
|
|
||||||
// avoid unnecessary rebuilds. If only "Get()" is called on a few entries,
|
|
||||||
// then we won't invalidate the build if random unrelated entries are added
|
|
||||||
// or removed. But if "SortedKeys()" is called, we need to invalidate the
|
|
||||||
// build if anything about the set of entries in this directory is changed.
|
|
||||||
allEntries []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type DirEntries struct {
|
|
||||||
dir string
|
|
||||||
data map[string]*Entry
|
|
||||||
accessedEntries *accessedEntries
|
|
||||||
}
|
|
||||||
|
|
||||||
func MakeEmptyDirEntries(dir string) DirEntries {
|
|
||||||
return DirEntries{dir, make(map[string]*Entry), nil}
|
|
||||||
}
|
|
||||||
|
|
||||||
type DifferentCase struct {
|
|
||||||
Dir string
|
|
||||||
Query string
|
|
||||||
Actual string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (entries DirEntries) Get(query string) (*Entry, *DifferentCase) {
|
|
||||||
if entries.data != nil {
|
|
||||||
key := strings.ToLower(query)
|
|
||||||
entry := entries.data[key]
|
|
||||||
|
|
||||||
// Track whether this specific entry was present or absent for watch mode
|
|
||||||
if accessed := entries.accessedEntries; accessed != nil {
|
|
||||||
accessed.mutex.Lock()
|
|
||||||
accessed.wasPresent[key] = entry != nil
|
|
||||||
accessed.mutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry != nil {
|
|
||||||
if entry.base != query {
|
|
||||||
return entry, &DifferentCase{
|
|
||||||
Dir: entries.dir,
|
|
||||||
Query: query,
|
|
||||||
Actual: entry.base,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return entry, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (entries DirEntries) SortedKeys() (keys []string) {
|
|
||||||
if entries.data != nil {
|
|
||||||
keys = make([]string, 0, len(entries.data))
|
|
||||||
for _, entry := range entries.data {
|
|
||||||
keys = append(keys, entry.base)
|
|
||||||
}
|
|
||||||
sort.Strings(keys)
|
|
||||||
|
|
||||||
// Track the exact set of all entries for watch mode
|
|
||||||
if entries.accessedEntries != nil {
|
|
||||||
entries.accessedEntries.mutex.Lock()
|
|
||||||
entries.accessedEntries.allEntries = keys
|
|
||||||
entries.accessedEntries.mutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
return keys
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
type OpenedFile interface {
|
|
||||||
Len() int
|
|
||||||
Read(start int, end int) ([]byte, error)
|
|
||||||
Close() error
|
|
||||||
}
|
|
||||||
|
|
||||||
type InMemoryOpenedFile struct {
|
|
||||||
Contents []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *InMemoryOpenedFile) Len() int {
|
|
||||||
return len(f.Contents)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *InMemoryOpenedFile) Read(start int, end int) ([]byte, error) {
|
|
||||||
return []byte(f.Contents[start:end]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *InMemoryOpenedFile) Close() error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type FS interface {
|
|
||||||
// The returned map is immutable and is cached across invocations. Do not
|
|
||||||
// mutate it.
|
|
||||||
ReadDirectory(path string) (entries DirEntries, canonicalError error, originalError error)
|
|
||||||
ReadFile(path string) (contents string, canonicalError error, originalError error)
|
|
||||||
OpenFile(path string) (result OpenedFile, canonicalError error, originalError error)
|
|
||||||
|
|
||||||
// This is a key made from the information returned by "stat". It is intended
|
|
||||||
// to be different if the file has been edited, and to otherwise be equal if
|
|
||||||
// the file has not been edited. It should usually work, but no guarantees.
|
|
||||||
//
|
|
||||||
// See https://apenwarr.ca/log/20181113 for more information about why this
|
|
||||||
// can be broken. For example, writing to a file with mmap on WSL on Windows
|
|
||||||
// won't change this key. Hopefully this isn't too much of an issue.
|
|
||||||
//
|
|
||||||
// Additional reading:
|
|
||||||
// - https://github.com/npm/npm/pull/20027
|
|
||||||
// - https://github.com/golang/go/commit/7dea509703eb5ad66a35628b12a678110fbb1f72
|
|
||||||
ModKey(path string) (ModKey, error)
|
|
||||||
|
|
||||||
// This is part of the interface because the mock interface used for tests
|
|
||||||
// should not depend on file system behavior (i.e. different slashes for
|
|
||||||
// Windows) while the real interface should.
|
|
||||||
IsAbs(path string) bool
|
|
||||||
Abs(path string) (string, bool)
|
|
||||||
Dir(path string) string
|
|
||||||
Base(path string) string
|
|
||||||
Ext(path string) string
|
|
||||||
Join(parts ...string) string
|
|
||||||
Cwd() string
|
|
||||||
Rel(base string, target string) (string, bool)
|
|
||||||
|
|
||||||
// This is used in the implementation of "Entry"
|
|
||||||
kind(dir string, base string) (symlink string, kind EntryKind)
|
|
||||||
|
|
||||||
// This is a set of all files used and all directories checked. The build
|
|
||||||
// must be invalidated if any of these watched files change.
|
|
||||||
WatchData() WatchData
|
|
||||||
}
|
|
||||||
|
|
||||||
type WatchData struct {
|
|
||||||
// These functions return a non-empty path as a string if the file system
|
|
||||||
// entry has been modified. For files, the returned path is the same as the
|
|
||||||
// file path. For directories, the returned path is either the directory
|
|
||||||
// itself or a file in the directory that was changed.
|
|
||||||
Paths map[string]func() string
|
|
||||||
}
|
|
||||||
|
|
||||||
type ModKey struct {
|
|
||||||
// What gets filled in here is OS-dependent
|
|
||||||
inode uint64
|
|
||||||
size int64
|
|
||||||
mtime_sec int64
|
|
||||||
mtime_nsec int64
|
|
||||||
mode uint32
|
|
||||||
uid uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
// Some file systems have a time resolution of only a few seconds. If a mtime
|
|
||||||
// value is too new, we won't be able to tell if it has been recently modified
|
|
||||||
// or not. So we only use mtimes for comparison if they are sufficiently old.
|
|
||||||
// Apparently the FAT file system has a resolution of two seconds according to
|
|
||||||
// this article: https://en.wikipedia.org/wiki/Stat_(system_call).
|
|
||||||
const modKeySafetyGap = 3 // In seconds
|
|
||||||
var modKeyUnusable = errors.New("The modification key is unusable")
|
|
||||||
|
|
||||||
// Limit the number of files open simultaneously to avoid ulimit issues
|
|
||||||
var fileOpenLimit = make(chan bool, 32)
|
|
||||||
|
|
||||||
func BeforeFileOpen() {
|
|
||||||
// This will block if the number of open files is already at the limit
|
|
||||||
fileOpenLimit <- false
|
|
||||||
}
|
|
||||||
|
|
||||||
func AfterFileClose() {
|
|
||||||
<-fileOpenLimit
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is a fork of "os.MkdirAll" to work around bugs with the WebAssembly
|
|
||||||
// build target. More information here: https://github.com/golang/go/issues/43768.
|
|
||||||
func MkdirAll(fs FS, path string, perm os.FileMode) error {
|
|
||||||
// Run "Join" once to run "Clean" on the path, which removes trailing slashes
|
|
||||||
return mkdirAll(fs, fs.Join(path), perm)
|
|
||||||
}
|
|
||||||
|
|
||||||
func mkdirAll(fs FS, path string, perm os.FileMode) error {
|
|
||||||
// Fast path: if we can tell whether path is a directory or file, stop with success or error.
|
|
||||||
if dir, err := os.Stat(path); err == nil {
|
|
||||||
if dir.IsDir() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return &os.PathError{Op: "mkdir", Path: path, Err: syscall.ENOTDIR}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Slow path: make sure parent exists and then call Mkdir for path.
|
|
||||||
if parent := fs.Dir(path); parent != path {
|
|
||||||
// Create parent.
|
|
||||||
if err := mkdirAll(fs, parent, perm); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parent now exists; invoke Mkdir and use its result.
|
|
||||||
if err := os.Mkdir(path, perm); err != nil {
|
|
||||||
// Handle arguments like "foo/." by
|
|
||||||
// double-checking that directory doesn't exist.
|
|
||||||
dir, err1 := os.Lstat(path)
|
|
||||||
if err1 == nil && dir.IsDir() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
157
vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go
generated
vendored
157
vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go
generated
vendored
@ -1,157 +0,0 @@
|
|||||||
// This is a mock implementation of the "fs" module for use with tests. It does
|
|
||||||
// not actually read from the file system. Instead, it reads from a pre-specified
|
|
||||||
// map of file paths to files.
|
|
||||||
|
|
||||||
package fs
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"path"
|
|
||||||
"strings"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
type mockFS struct {
|
|
||||||
dirs map[string]DirEntries
|
|
||||||
files map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
func MockFS(input map[string]string) FS {
|
|
||||||
dirs := make(map[string]DirEntries)
|
|
||||||
files := make(map[string]string)
|
|
||||||
|
|
||||||
for k, v := range input {
|
|
||||||
files[k] = v
|
|
||||||
original := k
|
|
||||||
|
|
||||||
// Build the directory map
|
|
||||||
for {
|
|
||||||
kDir := path.Dir(k)
|
|
||||||
dir, ok := dirs[kDir]
|
|
||||||
if !ok {
|
|
||||||
dir = DirEntries{kDir, make(map[string]*Entry), nil}
|
|
||||||
dirs[kDir] = dir
|
|
||||||
}
|
|
||||||
if kDir == k {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
base := path.Base(k)
|
|
||||||
if k == original {
|
|
||||||
dir.data[strings.ToLower(base)] = &Entry{kind: FileEntry, base: base}
|
|
||||||
} else {
|
|
||||||
dir.data[strings.ToLower(base)] = &Entry{kind: DirEntry, base: base}
|
|
||||||
}
|
|
||||||
k = kDir
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &mockFS{dirs, files}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *mockFS) ReadDirectory(path string) (DirEntries, error, error) {
|
|
||||||
if dir, ok := fs.dirs[path]; ok {
|
|
||||||
return dir, nil, nil
|
|
||||||
}
|
|
||||||
return DirEntries{}, syscall.ENOENT, syscall.ENOENT
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *mockFS) ReadFile(path string) (string, error, error) {
|
|
||||||
if contents, ok := fs.files[path]; ok {
|
|
||||||
return contents, nil, nil
|
|
||||||
}
|
|
||||||
return "", syscall.ENOENT, syscall.ENOENT
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *mockFS) OpenFile(path string) (OpenedFile, error, error) {
|
|
||||||
if contents, ok := fs.files[path]; ok {
|
|
||||||
return &InMemoryOpenedFile{Contents: []byte(contents)}, nil, nil
|
|
||||||
}
|
|
||||||
return nil, syscall.ENOENT, syscall.ENOENT
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *mockFS) ModKey(path string) (ModKey, error) {
|
|
||||||
return ModKey{}, errors.New("This is not available during tests")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) IsAbs(p string) bool {
|
|
||||||
return path.IsAbs(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) Abs(p string) (string, bool) {
|
|
||||||
return path.Clean(path.Join("/", p)), true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) Dir(p string) string {
|
|
||||||
return path.Dir(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) Base(p string) string {
|
|
||||||
return path.Base(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) Ext(p string) string {
|
|
||||||
return path.Ext(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) Join(parts ...string) string {
|
|
||||||
return path.Clean(path.Join(parts...))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) Cwd() string {
|
|
||||||
return "/"
|
|
||||||
}
|
|
||||||
|
|
||||||
func splitOnSlash(path string) (string, string) {
|
|
||||||
if slash := strings.IndexByte(path, '/'); slash != -1 {
|
|
||||||
return path[:slash], path[slash+1:]
|
|
||||||
}
|
|
||||||
return path, ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*mockFS) Rel(base string, target string) (string, bool) {
|
|
||||||
base = path.Clean(base)
|
|
||||||
target = path.Clean(target)
|
|
||||||
|
|
||||||
// Base cases
|
|
||||||
if base == "" || base == "." {
|
|
||||||
return target, true
|
|
||||||
}
|
|
||||||
if base == target {
|
|
||||||
return ".", true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the common parent directory
|
|
||||||
for {
|
|
||||||
bHead, bTail := splitOnSlash(base)
|
|
||||||
tHead, tTail := splitOnSlash(target)
|
|
||||||
if bHead != tHead {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
base = bTail
|
|
||||||
target = tTail
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop now if base is a subpath of target
|
|
||||||
if base == "" {
|
|
||||||
return target, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Traverse up to the common parent
|
|
||||||
commonParent := strings.Repeat("../", strings.Count(base, "/")+1)
|
|
||||||
|
|
||||||
// Stop now if target is a subpath of base
|
|
||||||
if target == "" {
|
|
||||||
return commonParent[:len(commonParent)-1], true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, down to the parent
|
|
||||||
return commonParent + target, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *mockFS) kind(dir string, base string) (symlink string, kind EntryKind) {
|
|
||||||
panic("This should never be called")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *mockFS) WatchData() WatchData {
|
|
||||||
panic("This should never be called")
|
|
||||||
}
|
|
529
vendor/github.com/evanw/esbuild/internal/fs/fs_real.go
generated
vendored
529
vendor/github.com/evanw/esbuild/internal/fs/fs_real.go
generated
vendored
@ -1,529 +0,0 @@
|
|||||||
package fs
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
type realFS struct {
|
|
||||||
// Stores the file entries for directories we've listed before
|
|
||||||
entriesMutex sync.Mutex
|
|
||||||
entries map[string]entriesOrErr
|
|
||||||
|
|
||||||
// If true, do not use the "entries" cache
|
|
||||||
doNotCacheEntries bool
|
|
||||||
|
|
||||||
// This stores data that will end up being returned by "WatchData()"
|
|
||||||
watchMutex sync.Mutex
|
|
||||||
watchData map[string]privateWatchData
|
|
||||||
|
|
||||||
// When building with WebAssembly, the Go compiler doesn't correctly handle
|
|
||||||
// platform-specific path behavior. Hack around these bugs by compiling
|
|
||||||
// support for both Unix and Windows paths into all executables and switch
|
|
||||||
// between them at run-time instead.
|
|
||||||
fp goFilepath
|
|
||||||
}
|
|
||||||
|
|
||||||
type entriesOrErr struct {
|
|
||||||
entries DirEntries
|
|
||||||
canonicalError error
|
|
||||||
originalError error
|
|
||||||
}
|
|
||||||
|
|
||||||
type watchState uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
stateNone watchState = iota
|
|
||||||
stateDirHasAccessedEntries // Compare "accessedEntries"
|
|
||||||
stateDirMissing // Compare directory presence
|
|
||||||
stateFileHasModKey // Compare "modKey"
|
|
||||||
stateFileNeedModKey // Need to transition to "stateFileHasModKey" or "stateFileUnusableModKey" before "WatchData()" returns
|
|
||||||
stateFileMissing // Compare file presence
|
|
||||||
stateFileUnusableModKey // Compare "fileContents"
|
|
||||||
)
|
|
||||||
|
|
||||||
type privateWatchData struct {
|
|
||||||
accessedEntries *accessedEntries
|
|
||||||
fileContents string
|
|
||||||
modKey ModKey
|
|
||||||
state watchState
|
|
||||||
}
|
|
||||||
|
|
||||||
type RealFSOptions struct {
|
|
||||||
WantWatchData bool
|
|
||||||
AbsWorkingDir string
|
|
||||||
DoNotCache bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func RealFS(options RealFSOptions) (FS, error) {
|
|
||||||
var fp goFilepath
|
|
||||||
if CheckIfWindows() {
|
|
||||||
fp.isWindows = true
|
|
||||||
fp.pathSeparator = '\\'
|
|
||||||
} else {
|
|
||||||
fp.isWindows = false
|
|
||||||
fp.pathSeparator = '/'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Come up with a default working directory if one was not specified
|
|
||||||
fp.cwd = options.AbsWorkingDir
|
|
||||||
if fp.cwd == "" {
|
|
||||||
if cwd, err := os.Getwd(); err == nil {
|
|
||||||
fp.cwd = cwd
|
|
||||||
} else if fp.isWindows {
|
|
||||||
fp.cwd = "C:\\"
|
|
||||||
} else {
|
|
||||||
fp.cwd = "/"
|
|
||||||
}
|
|
||||||
} else if !fp.isAbs(fp.cwd) {
|
|
||||||
return nil, fmt.Errorf("The working directory %q is not an absolute path", fp.cwd)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve symlinks in the current working directory. Symlinks are resolved
|
|
||||||
// when input file paths are converted to absolute paths because we need to
|
|
||||||
// recognize an input file as unique even if it has multiple symlinks
|
|
||||||
// pointing to it. The build will generate relative paths from the current
|
|
||||||
// working directory to the absolute input file paths for error messages,
|
|
||||||
// so the current working directory should be processed the same way. Not
|
|
||||||
// doing this causes test failures with esbuild when run from inside a
|
|
||||||
// symlinked directory.
|
|
||||||
//
|
|
||||||
// This deliberately ignores errors due to e.g. infinite loops. If there is
|
|
||||||
// an error, we will just use the original working directory and likely
|
|
||||||
// encounter an error later anyway. And if we don't encounter an error
|
|
||||||
// later, then the current working directory didn't even matter and the
|
|
||||||
// error is unimportant.
|
|
||||||
if path, err := fp.evalSymlinks(fp.cwd); err == nil {
|
|
||||||
fp.cwd = path
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only allocate memory for watch data if necessary
|
|
||||||
var watchData map[string]privateWatchData
|
|
||||||
if options.WantWatchData {
|
|
||||||
watchData = make(map[string]privateWatchData)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &realFS{
|
|
||||||
entries: make(map[string]entriesOrErr),
|
|
||||||
fp: fp,
|
|
||||||
watchData: watchData,
|
|
||||||
doNotCacheEntries: options.DoNotCache,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) ReadDirectory(dir string) (entries DirEntries, canonicalError error, originalError error) {
|
|
||||||
if !fs.doNotCacheEntries {
|
|
||||||
// First, check the cache
|
|
||||||
cached, ok := func() (cached entriesOrErr, ok bool) {
|
|
||||||
fs.entriesMutex.Lock()
|
|
||||||
defer fs.entriesMutex.Unlock()
|
|
||||||
cached, ok = fs.entries[dir]
|
|
||||||
return
|
|
||||||
}()
|
|
||||||
if ok {
|
|
||||||
// Cache hit: stop now
|
|
||||||
return cached.entries, cached.canonicalError, cached.originalError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cache miss: read the directory entries
|
|
||||||
names, canonicalError, originalError := fs.readdir(dir)
|
|
||||||
entries = DirEntries{dir, make(map[string]*Entry), nil}
|
|
||||||
|
|
||||||
// Unwrap to get the underlying error
|
|
||||||
if pathErr, ok := canonicalError.(*os.PathError); ok {
|
|
||||||
canonicalError = pathErr.Unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
if canonicalError == nil {
|
|
||||||
for _, name := range names {
|
|
||||||
// Call "stat" lazily for performance. The "@material-ui/icons" package
|
|
||||||
// contains a directory with over 11,000 entries in it and running "stat"
|
|
||||||
// for each entry was a big performance issue for that package.
|
|
||||||
entries.data[strings.ToLower(name)] = &Entry{
|
|
||||||
dir: dir,
|
|
||||||
base: name,
|
|
||||||
needStat: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store data for watch mode
|
|
||||||
if fs.watchData != nil {
|
|
||||||
defer fs.watchMutex.Unlock()
|
|
||||||
fs.watchMutex.Lock()
|
|
||||||
state := stateDirHasAccessedEntries
|
|
||||||
if canonicalError != nil {
|
|
||||||
state = stateDirMissing
|
|
||||||
}
|
|
||||||
entries.accessedEntries = &accessedEntries{wasPresent: make(map[string]bool)}
|
|
||||||
fs.watchData[dir] = privateWatchData{
|
|
||||||
accessedEntries: entries.accessedEntries,
|
|
||||||
state: state,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the cache unconditionally. Even if the read failed, we don't want to
|
|
||||||
// retry again later. The directory is inaccessible so trying again is wasted.
|
|
||||||
if canonicalError != nil {
|
|
||||||
entries.data = nil
|
|
||||||
}
|
|
||||||
if !fs.doNotCacheEntries {
|
|
||||||
fs.entriesMutex.Lock()
|
|
||||||
defer fs.entriesMutex.Unlock()
|
|
||||||
fs.entries[dir] = entriesOrErr{
|
|
||||||
entries: entries,
|
|
||||||
canonicalError: canonicalError,
|
|
||||||
originalError: originalError,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return entries, canonicalError, originalError
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) ReadFile(path string) (contents string, canonicalError error, originalError error) {
|
|
||||||
BeforeFileOpen()
|
|
||||||
defer AfterFileClose()
|
|
||||||
buffer, originalError := ioutil.ReadFile(path)
|
|
||||||
canonicalError = fs.canonicalizeError(originalError)
|
|
||||||
|
|
||||||
// Allocate the string once
|
|
||||||
fileContents := string(buffer)
|
|
||||||
|
|
||||||
// Store data for watch mode
|
|
||||||
if fs.watchData != nil {
|
|
||||||
defer fs.watchMutex.Unlock()
|
|
||||||
fs.watchMutex.Lock()
|
|
||||||
data, ok := fs.watchData[path]
|
|
||||||
if canonicalError != nil {
|
|
||||||
data.state = stateFileMissing
|
|
||||||
} else if !ok {
|
|
||||||
data.state = stateFileNeedModKey
|
|
||||||
}
|
|
||||||
data.fileContents = fileContents
|
|
||||||
fs.watchData[path] = data
|
|
||||||
}
|
|
||||||
|
|
||||||
return fileContents, canonicalError, originalError
|
|
||||||
}
|
|
||||||
|
|
||||||
type realOpenedFile struct {
|
|
||||||
handle *os.File
|
|
||||||
len int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *realOpenedFile) Len() int {
|
|
||||||
return f.len
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *realOpenedFile) Read(start int, end int) ([]byte, error) {
|
|
||||||
bytes := make([]byte, end-start)
|
|
||||||
remaining := bytes
|
|
||||||
|
|
||||||
_, err := f.handle.Seek(int64(start), io.SeekStart)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for len(remaining) > 0 {
|
|
||||||
n, err := f.handle.Read(remaining)
|
|
||||||
if err != nil && n <= 0 {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
remaining = remaining[n:]
|
|
||||||
}
|
|
||||||
|
|
||||||
return bytes, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *realOpenedFile) Close() error {
|
|
||||||
return f.handle.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) OpenFile(path string) (OpenedFile, error, error) {
|
|
||||||
BeforeFileOpen()
|
|
||||||
defer AfterFileClose()
|
|
||||||
|
|
||||||
f, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fs.canonicalizeError(err), err
|
|
||||||
}
|
|
||||||
|
|
||||||
info, err := f.Stat()
|
|
||||||
if err != nil {
|
|
||||||
f.Close()
|
|
||||||
return nil, fs.canonicalizeError(err), err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &realOpenedFile{f, int(info.Size())}, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) ModKey(path string) (ModKey, error) {
|
|
||||||
BeforeFileOpen()
|
|
||||||
defer AfterFileClose()
|
|
||||||
key, err := modKey(path)
|
|
||||||
|
|
||||||
// Store data for watch mode
|
|
||||||
if fs.watchData != nil {
|
|
||||||
defer fs.watchMutex.Unlock()
|
|
||||||
fs.watchMutex.Lock()
|
|
||||||
data, ok := fs.watchData[path]
|
|
||||||
if !ok {
|
|
||||||
if err == modKeyUnusable {
|
|
||||||
data.state = stateFileUnusableModKey
|
|
||||||
} else if err != nil {
|
|
||||||
data.state = stateFileMissing
|
|
||||||
} else {
|
|
||||||
data.state = stateFileHasModKey
|
|
||||||
}
|
|
||||||
} else if data.state == stateFileNeedModKey {
|
|
||||||
data.state = stateFileHasModKey
|
|
||||||
}
|
|
||||||
data.modKey = key
|
|
||||||
fs.watchData[path] = data
|
|
||||||
}
|
|
||||||
|
|
||||||
return key, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) IsAbs(p string) bool {
|
|
||||||
return fs.fp.isAbs(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) Abs(p string) (string, bool) {
|
|
||||||
abs, err := fs.fp.abs(p)
|
|
||||||
return abs, err == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) Dir(p string) string {
|
|
||||||
return fs.fp.dir(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) Base(p string) string {
|
|
||||||
return fs.fp.base(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) Ext(p string) string {
|
|
||||||
return fs.fp.ext(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) Join(parts ...string) string {
|
|
||||||
return fs.fp.clean(fs.fp.join(parts))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) Cwd() string {
|
|
||||||
return fs.fp.cwd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) Rel(base string, target string) (string, bool) {
|
|
||||||
if rel, err := fs.fp.rel(base, target); err == nil {
|
|
||||||
return rel, true
|
|
||||||
}
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) readdir(dirname string) (entries []string, canonicalError error, originalError error) {
|
|
||||||
BeforeFileOpen()
|
|
||||||
defer AfterFileClose()
|
|
||||||
f, originalError := os.Open(dirname)
|
|
||||||
canonicalError = fs.canonicalizeError(originalError)
|
|
||||||
|
|
||||||
// Stop now if there was an error
|
|
||||||
if canonicalError != nil {
|
|
||||||
return nil, canonicalError, originalError
|
|
||||||
}
|
|
||||||
|
|
||||||
defer f.Close()
|
|
||||||
entries, err := f.Readdirnames(-1)
|
|
||||||
|
|
||||||
// Unwrap to get the underlying error
|
|
||||||
if syscallErr, ok := err.(*os.SyscallError); ok {
|
|
||||||
err = syscallErr.Unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't convert ENOTDIR to ENOENT here. ENOTDIR is a legitimate error
|
|
||||||
// condition for Readdirnames() on non-Windows platforms.
|
|
||||||
|
|
||||||
return entries, canonicalError, originalError
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) canonicalizeError(err error) error {
|
|
||||||
// Unwrap to get the underlying error
|
|
||||||
if pathErr, ok := err.(*os.PathError); ok {
|
|
||||||
err = pathErr.Unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
// This has been copied from golang.org/x/sys/windows
|
|
||||||
const ERROR_INVALID_NAME syscall.Errno = 123
|
|
||||||
|
|
||||||
// Windows is much more restrictive than Unix about file names. If a file name
|
|
||||||
// is invalid, it will return ERROR_INVALID_NAME. Treat this as ENOENT (i.e.
|
|
||||||
// "the file does not exist") so that the resolver continues trying to resolve
|
|
||||||
// the path on this failure instead of aborting with an error.
|
|
||||||
if fs.fp.isWindows && err == ERROR_INVALID_NAME {
|
|
||||||
err = syscall.ENOENT
|
|
||||||
}
|
|
||||||
|
|
||||||
// Windows returns ENOTDIR here even though nothing we've done yet has asked
|
|
||||||
// for a directory. This really means ENOENT on Windows. Return ENOENT here
|
|
||||||
// so callers that check for ENOENT will successfully detect this file as
|
|
||||||
// missing.
|
|
||||||
if err == syscall.ENOTDIR {
|
|
||||||
err = syscall.ENOENT
|
|
||||||
}
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) kind(dir string, base string) (symlink string, kind EntryKind) {
|
|
||||||
entryPath := fs.fp.join([]string{dir, base})
|
|
||||||
|
|
||||||
// Use "lstat" since we want information about symbolic links
|
|
||||||
BeforeFileOpen()
|
|
||||||
defer AfterFileClose()
|
|
||||||
stat, err := os.Lstat(entryPath)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
mode := stat.Mode()
|
|
||||||
|
|
||||||
// Follow symlinks now so the cache contains the translation
|
|
||||||
if (mode & os.ModeSymlink) != 0 {
|
|
||||||
symlink = entryPath
|
|
||||||
linksWalked := 0
|
|
||||||
for {
|
|
||||||
linksWalked++
|
|
||||||
if linksWalked > 255 {
|
|
||||||
return // Error: too many links
|
|
||||||
}
|
|
||||||
link, err := os.Readlink(symlink)
|
|
||||||
if err != nil {
|
|
||||||
return // Skip over this entry
|
|
||||||
}
|
|
||||||
if !fs.fp.isAbs(link) {
|
|
||||||
link = fs.fp.join([]string{dir, link})
|
|
||||||
}
|
|
||||||
symlink = fs.fp.clean(link)
|
|
||||||
|
|
||||||
// Re-run "lstat" on the symlink target
|
|
||||||
stat2, err2 := os.Lstat(symlink)
|
|
||||||
if err2 != nil {
|
|
||||||
return // Skip over this entry
|
|
||||||
}
|
|
||||||
mode = stat2.Mode()
|
|
||||||
if (mode & os.ModeSymlink) == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
dir = fs.fp.dir(symlink)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// We consider the entry either a directory or a file
|
|
||||||
if (mode & os.ModeDir) != 0 {
|
|
||||||
kind = DirEntry
|
|
||||||
} else {
|
|
||||||
kind = FileEntry
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *realFS) WatchData() WatchData {
|
|
||||||
paths := make(map[string]func() string)
|
|
||||||
|
|
||||||
for path, data := range fs.watchData {
|
|
||||||
// Each closure below needs its own copy of these loop variables
|
|
||||||
path := path
|
|
||||||
data := data
|
|
||||||
|
|
||||||
// Each function should return true if the state has been changed
|
|
||||||
if data.state == stateFileNeedModKey {
|
|
||||||
key, err := modKey(path)
|
|
||||||
if err == modKeyUnusable {
|
|
||||||
data.state = stateFileUnusableModKey
|
|
||||||
} else if err != nil {
|
|
||||||
data.state = stateFileMissing
|
|
||||||
} else {
|
|
||||||
data.state = stateFileHasModKey
|
|
||||||
data.modKey = key
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch data.state {
|
|
||||||
case stateDirMissing:
|
|
||||||
paths[path] = func() string {
|
|
||||||
info, err := os.Stat(path)
|
|
||||||
if err == nil && info.IsDir() {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
case stateDirHasAccessedEntries:
|
|
||||||
paths[path] = func() string {
|
|
||||||
names, err, _ := fs.readdir(path)
|
|
||||||
if err != nil {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
data.accessedEntries.mutex.Lock()
|
|
||||||
defer data.accessedEntries.mutex.Unlock()
|
|
||||||
if allEntries := data.accessedEntries.allEntries; allEntries != nil {
|
|
||||||
// Check all entries
|
|
||||||
if len(names) != len(allEntries) {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
sort.Strings(names)
|
|
||||||
for i, s := range names {
|
|
||||||
if s != allEntries[i] {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Check individual entries
|
|
||||||
isPresent := make(map[string]bool, len(names))
|
|
||||||
for _, name := range names {
|
|
||||||
isPresent[strings.ToLower(name)] = true
|
|
||||||
}
|
|
||||||
for name, wasPresent := range data.accessedEntries.wasPresent {
|
|
||||||
if wasPresent != isPresent[name] {
|
|
||||||
return fs.Join(path, name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
case stateFileMissing:
|
|
||||||
paths[path] = func() string {
|
|
||||||
if info, err := os.Stat(path); err == nil && !info.IsDir() {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
case stateFileHasModKey:
|
|
||||||
paths[path] = func() string {
|
|
||||||
if key, err := modKey(path); err != nil || key != data.modKey {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
case stateFileUnusableModKey:
|
|
||||||
paths[path] = func() string {
|
|
||||||
if buffer, err := ioutil.ReadFile(path); err != nil || string(buffer) != data.fileContents {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return WatchData{
|
|
||||||
Paths: paths,
|
|
||||||
}
|
|
||||||
}
|
|
9
vendor/github.com/evanw/esbuild/internal/fs/iswin_other.go
generated
vendored
9
vendor/github.com/evanw/esbuild/internal/fs/iswin_other.go
generated
vendored
@ -1,9 +0,0 @@
|
|||||||
//go:build (!js || !wasm) && !windows
|
|
||||||
// +build !js !wasm
|
|
||||||
// +build !windows
|
|
||||||
|
|
||||||
package fs
|
|
||||||
|
|
||||||
func CheckIfWindows() bool {
|
|
||||||
return false
|
|
||||||
}
|
|
25
vendor/github.com/evanw/esbuild/internal/fs/iswin_wasm.go
generated
vendored
25
vendor/github.com/evanw/esbuild/internal/fs/iswin_wasm.go
generated
vendored
@ -1,25 +0,0 @@
|
|||||||
//go:build js && wasm
|
|
||||||
// +build js,wasm
|
|
||||||
|
|
||||||
package fs
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
var checkedIfWindows bool
|
|
||||||
var cachedIfWindows bool
|
|
||||||
|
|
||||||
func CheckIfWindows() bool {
|
|
||||||
if !checkedIfWindows {
|
|
||||||
checkedIfWindows = true
|
|
||||||
|
|
||||||
// Hack: Assume that we're on Windows if we're running WebAssembly and
|
|
||||||
// the "C:\\" directory exists. This is a workaround for a bug in Go's
|
|
||||||
// WebAssembly support: https://github.com/golang/go/issues/43768.
|
|
||||||
_, err := os.Stat("C:\\")
|
|
||||||
cachedIfWindows = err == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return cachedIfWindows
|
|
||||||
}
|
|
8
vendor/github.com/evanw/esbuild/internal/fs/iswin_windows.go
generated
vendored
8
vendor/github.com/evanw/esbuild/internal/fs/iswin_windows.go
generated
vendored
@ -1,8 +0,0 @@
|
|||||||
//go:build windows
|
|
||||||
// +build windows
|
|
||||||
|
|
||||||
package fs
|
|
||||||
|
|
||||||
func CheckIfWindows() bool {
|
|
||||||
return true
|
|
||||||
}
|
|
35
vendor/github.com/evanw/esbuild/internal/fs/modkey_other.go
generated
vendored
35
vendor/github.com/evanw/esbuild/internal/fs/modkey_other.go
generated
vendored
@ -1,35 +0,0 @@
|
|||||||
//go:build !darwin && !freebsd && !linux
|
|
||||||
// +build !darwin,!freebsd,!linux
|
|
||||||
|
|
||||||
package fs
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
var zeroTime time.Time
|
|
||||||
|
|
||||||
func modKey(path string) (ModKey, error) {
|
|
||||||
info, err := os.Stat(path)
|
|
||||||
if err != nil {
|
|
||||||
return ModKey{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// We can't detect changes if the file system zeros out the modification time
|
|
||||||
mtime := info.ModTime()
|
|
||||||
if mtime == zeroTime || mtime.Unix() == 0 {
|
|
||||||
return ModKey{}, modKeyUnusable
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't generate a modification key if the file is too new
|
|
||||||
if mtime.Add(modKeySafetyGap * time.Second).After(time.Now()) {
|
|
||||||
return ModKey{}, modKeyUnusable
|
|
||||||
}
|
|
||||||
|
|
||||||
return ModKey{
|
|
||||||
size: info.Size(),
|
|
||||||
mtime_sec: mtime.Unix(),
|
|
||||||
mode: uint32(info.Mode()),
|
|
||||||
}, nil
|
|
||||||
}
|
|
41
vendor/github.com/evanw/esbuild/internal/fs/modkey_unix.go
generated
vendored
41
vendor/github.com/evanw/esbuild/internal/fs/modkey_unix.go
generated
vendored
@ -1,41 +0,0 @@
|
|||||||
//go:build darwin || freebsd || linux
|
|
||||||
// +build darwin freebsd linux
|
|
||||||
|
|
||||||
package fs
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"golang.org/x/sys/unix"
|
|
||||||
)
|
|
||||||
|
|
||||||
func modKey(path string) (ModKey, error) {
|
|
||||||
stat := unix.Stat_t{}
|
|
||||||
if err := unix.Stat(path, &stat); err != nil {
|
|
||||||
return ModKey{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// We can't detect changes if the file system zeros out the modification time
|
|
||||||
if stat.Mtim.Sec == 0 && stat.Mtim.Nsec == 0 {
|
|
||||||
return ModKey{}, modKeyUnusable
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't generate a modification key if the file is too new
|
|
||||||
now, err := unix.TimeToTimespec(time.Now())
|
|
||||||
if err != nil {
|
|
||||||
return ModKey{}, err
|
|
||||||
}
|
|
||||||
mtimeSec := stat.Mtim.Sec + modKeySafetyGap
|
|
||||||
if mtimeSec > now.Sec || (mtimeSec == now.Sec && stat.Mtim.Nsec > now.Nsec) {
|
|
||||||
return ModKey{}, modKeyUnusable
|
|
||||||
}
|
|
||||||
|
|
||||||
return ModKey{
|
|
||||||
inode: stat.Ino,
|
|
||||||
size: stat.Size,
|
|
||||||
mtime_sec: int64(stat.Mtim.Sec),
|
|
||||||
mtime_nsec: int64(stat.Mtim.Nsec),
|
|
||||||
mode: uint32(stat.Mode),
|
|
||||||
uid: stat.Uid,
|
|
||||||
}, nil
|
|
||||||
}
|
|
385
vendor/github.com/evanw/esbuild/internal/graph/graph.go
generated
vendored
385
vendor/github.com/evanw/esbuild/internal/graph/graph.go
generated
vendored
@ -1,385 +0,0 @@
|
|||||||
package graph
|
|
||||||
|
|
||||||
// This graph represents the set of files that the linker operates on. Each
|
|
||||||
// linker has a separate one of these graphs (there is one linker when code
|
|
||||||
// splitting is on, but one linker per entry point when code splitting is off).
|
|
||||||
//
|
|
||||||
// The input data to the linker constructor must be considered immutable because
|
|
||||||
// it's shared between linker invocations and is also stored in the cache for
|
|
||||||
// incremental builds.
|
|
||||||
//
|
|
||||||
// The linker constructor makes a shallow clone of the input data and is careful
|
|
||||||
// to pre-clone ahead of time the AST fields that it may modify. The Go language
|
|
||||||
// doesn't have any type system features for immutability so this has to be
|
|
||||||
// manually enforced. Please be careful.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sort"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
"github.com/evanw/esbuild/internal/runtime"
|
|
||||||
)
|
|
||||||
|
|
||||||
type entryPointKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
entryPointNone entryPointKind = iota
|
|
||||||
entryPointUserSpecified
|
|
||||||
entryPointDynamicImport
|
|
||||||
)
|
|
||||||
|
|
||||||
type LinkerFile struct {
|
|
||||||
InputFile InputFile
|
|
||||||
|
|
||||||
// This holds all entry points that can reach this file. It will be used to
|
|
||||||
// assign the parts in this file to a chunk.
|
|
||||||
EntryBits helpers.BitSet
|
|
||||||
|
|
||||||
// This is lazily-allocated because it's only needed if there are warnings
|
|
||||||
// logged, which should be relatively rare.
|
|
||||||
lazyLineColumnTracker *logger.LineColumnTracker
|
|
||||||
|
|
||||||
// The minimum number of links in the module graph to get from an entry point
|
|
||||||
// to this file
|
|
||||||
DistanceFromEntryPoint uint32
|
|
||||||
|
|
||||||
// If "entryPointKind" is not "entryPointNone", this is the index of the
|
|
||||||
// corresponding entry point chunk.
|
|
||||||
EntryPointChunkIndex uint32
|
|
||||||
|
|
||||||
// This file is an entry point if and only if this is not "entryPointNone".
|
|
||||||
// Note that dynamically-imported files are allowed to also be specified by
|
|
||||||
// the user as top-level entry points, so some dynamically-imported files
|
|
||||||
// may be "entryPointUserSpecified" instead of "entryPointDynamicImport".
|
|
||||||
entryPointKind entryPointKind
|
|
||||||
|
|
||||||
// This is true if this file has been marked as live by the tree shaking
|
|
||||||
// algorithm.
|
|
||||||
IsLive bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *LinkerFile) IsEntryPoint() bool {
|
|
||||||
return f.entryPointKind != entryPointNone
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *LinkerFile) IsUserSpecifiedEntryPoint() bool {
|
|
||||||
return f.entryPointKind == entryPointUserSpecified
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: This is not guarded by a mutex. Make sure this isn't called from a
|
|
||||||
// parallel part of the code.
|
|
||||||
func (f *LinkerFile) LineColumnTracker() *logger.LineColumnTracker {
|
|
||||||
if f.lazyLineColumnTracker == nil {
|
|
||||||
tracker := logger.MakeLineColumnTracker(&f.InputFile.Source)
|
|
||||||
f.lazyLineColumnTracker = &tracker
|
|
||||||
}
|
|
||||||
return f.lazyLineColumnTracker
|
|
||||||
}
|
|
||||||
|
|
||||||
type EntryPoint struct {
|
|
||||||
// This may be an absolute path or a relative path. If absolute, it will
|
|
||||||
// eventually be turned into a relative path by computing the path relative
|
|
||||||
// to the "outbase" directory. Then this relative path will be joined onto
|
|
||||||
// the "outdir" directory to form the final output path for this entry point.
|
|
||||||
OutputPath string
|
|
||||||
|
|
||||||
// This is the source index of the entry point. This file must have a valid
|
|
||||||
// entry point kind (i.e. not "none").
|
|
||||||
SourceIndex uint32
|
|
||||||
|
|
||||||
// Manually specified output paths are ignored when computing the default
|
|
||||||
// "outbase" directory, which is computed as the lowest common ancestor of
|
|
||||||
// all automatically generated output paths.
|
|
||||||
OutputPathWasAutoGenerated bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type LinkerGraph struct {
|
|
||||||
Files []LinkerFile
|
|
||||||
entryPoints []EntryPoint
|
|
||||||
Symbols js_ast.SymbolMap
|
|
||||||
|
|
||||||
// We should avoid traversing all files in the bundle, because the linker
|
|
||||||
// should be able to run a linking operation on a large bundle where only
|
|
||||||
// a few files are needed (e.g. an incremental compilation scenario). This
|
|
||||||
// holds all files that could possibly be reached through the entry points.
|
|
||||||
// If you need to iterate over all files in the linking operation, iterate
|
|
||||||
// over this array. This array is also sorted in a deterministic ordering
|
|
||||||
// to help ensure deterministic builds (source indices are random).
|
|
||||||
ReachableFiles []uint32
|
|
||||||
|
|
||||||
// This maps from unstable source index to stable reachable file index. This
|
|
||||||
// is useful as a deterministic key for sorting if you need to sort something
|
|
||||||
// containing a source index (such as "js_ast.Ref" symbol references).
|
|
||||||
StableSourceIndices []uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func CloneLinkerGraph(
|
|
||||||
inputFiles []InputFile,
|
|
||||||
reachableFiles []uint32,
|
|
||||||
originalEntryPoints []EntryPoint,
|
|
||||||
codeSplitting bool,
|
|
||||||
) LinkerGraph {
|
|
||||||
entryPoints := append([]EntryPoint{}, originalEntryPoints...)
|
|
||||||
symbols := js_ast.NewSymbolMap(len(inputFiles))
|
|
||||||
files := make([]LinkerFile, len(inputFiles))
|
|
||||||
|
|
||||||
// Mark all entry points so we don't add them again for import() expressions
|
|
||||||
for _, entryPoint := range entryPoints {
|
|
||||||
files[entryPoint.SourceIndex].entryPointKind = entryPointUserSpecified
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone various things since we may mutate them later. Do this in parallel
|
|
||||||
// for a speedup (around ~2x faster for this function in the three.js
|
|
||||||
// benchmark on a 6-core laptop).
|
|
||||||
var dynamicImportEntryPoints []uint32
|
|
||||||
var dynamicImportEntryPointsMutex sync.Mutex
|
|
||||||
waitGroup := sync.WaitGroup{}
|
|
||||||
waitGroup.Add(len(reachableFiles))
|
|
||||||
stableSourceIndices := make([]uint32, len(inputFiles))
|
|
||||||
for stableIndex, sourceIndex := range reachableFiles {
|
|
||||||
// Create a way to convert source indices to a stable ordering
|
|
||||||
stableSourceIndices[sourceIndex] = uint32(stableIndex)
|
|
||||||
|
|
||||||
go func(sourceIndex uint32) {
|
|
||||||
file := &files[sourceIndex]
|
|
||||||
file.InputFile = inputFiles[sourceIndex]
|
|
||||||
|
|
||||||
switch repr := file.InputFile.Repr.(type) {
|
|
||||||
case *JSRepr:
|
|
||||||
// Clone the representation
|
|
||||||
{
|
|
||||||
clone := *repr
|
|
||||||
repr = &clone
|
|
||||||
file.InputFile.Repr = repr
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone the symbol map
|
|
||||||
fileSymbols := append([]js_ast.Symbol{}, repr.AST.Symbols...)
|
|
||||||
symbols.SymbolsForSource[sourceIndex] = fileSymbols
|
|
||||||
repr.AST.Symbols = nil
|
|
||||||
|
|
||||||
// Clone the parts
|
|
||||||
repr.AST.Parts = append([]js_ast.Part{}, repr.AST.Parts...)
|
|
||||||
for i := range repr.AST.Parts {
|
|
||||||
part := &repr.AST.Parts[i]
|
|
||||||
clone := make(map[js_ast.Ref]js_ast.SymbolUse, len(part.SymbolUses))
|
|
||||||
for ref, uses := range part.SymbolUses {
|
|
||||||
clone[ref] = uses
|
|
||||||
}
|
|
||||||
part.SymbolUses = clone
|
|
||||||
part.Dependencies = append([]js_ast.Dependency{}, part.Dependencies...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone the import records
|
|
||||||
repr.AST.ImportRecords = append([]ast.ImportRecord{}, repr.AST.ImportRecords...)
|
|
||||||
|
|
||||||
// Add dynamic imports as additional entry points if code splitting is active
|
|
||||||
if codeSplitting {
|
|
||||||
for importRecordIndex := range repr.AST.ImportRecords {
|
|
||||||
if record := &repr.AST.ImportRecords[importRecordIndex]; record.SourceIndex.IsValid() && record.Kind == ast.ImportDynamic {
|
|
||||||
dynamicImportEntryPointsMutex.Lock()
|
|
||||||
dynamicImportEntryPoints = append(dynamicImportEntryPoints, record.SourceIndex.GetIndex())
|
|
||||||
dynamicImportEntryPointsMutex.Unlock()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone the import map
|
|
||||||
namedImports := make(map[js_ast.Ref]js_ast.NamedImport, len(repr.AST.NamedImports))
|
|
||||||
for k, v := range repr.AST.NamedImports {
|
|
||||||
namedImports[k] = v
|
|
||||||
}
|
|
||||||
repr.AST.NamedImports = namedImports
|
|
||||||
|
|
||||||
// Clone the export map
|
|
||||||
resolvedExports := make(map[string]ExportData)
|
|
||||||
for alias, name := range repr.AST.NamedExports {
|
|
||||||
resolvedExports[alias] = ExportData{
|
|
||||||
Ref: name.Ref,
|
|
||||||
SourceIndex: sourceIndex,
|
|
||||||
NameLoc: name.AliasLoc,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone the top-level scope so we can generate more variables
|
|
||||||
{
|
|
||||||
new := &js_ast.Scope{}
|
|
||||||
*new = *repr.AST.ModuleScope
|
|
||||||
new.Generated = append([]js_ast.Ref{}, new.Generated...)
|
|
||||||
repr.AST.ModuleScope = new
|
|
||||||
}
|
|
||||||
|
|
||||||
// Also associate some default metadata with the file
|
|
||||||
repr.Meta.ResolvedExports = resolvedExports
|
|
||||||
repr.Meta.IsProbablyTypeScriptType = make(map[js_ast.Ref]bool)
|
|
||||||
repr.Meta.ImportsToBind = make(map[js_ast.Ref]ImportData)
|
|
||||||
|
|
||||||
case *CSSRepr:
|
|
||||||
// Clone the representation
|
|
||||||
{
|
|
||||||
clone := *repr
|
|
||||||
repr = &clone
|
|
||||||
file.InputFile.Repr = repr
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone the import records
|
|
||||||
repr.AST.ImportRecords = append([]ast.ImportRecord{}, repr.AST.ImportRecords...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// All files start off as far as possible from an entry point
|
|
||||||
file.DistanceFromEntryPoint = ^uint32(0)
|
|
||||||
waitGroup.Done()
|
|
||||||
}(sourceIndex)
|
|
||||||
}
|
|
||||||
waitGroup.Wait()
|
|
||||||
|
|
||||||
// Process dynamic entry points after merging control flow again
|
|
||||||
stableEntryPoints := make([]int, 0, len(dynamicImportEntryPoints))
|
|
||||||
for _, sourceIndex := range dynamicImportEntryPoints {
|
|
||||||
if otherFile := &files[sourceIndex]; otherFile.entryPointKind == entryPointNone {
|
|
||||||
stableEntryPoints = append(stableEntryPoints, int(stableSourceIndices[sourceIndex]))
|
|
||||||
otherFile.entryPointKind = entryPointDynamicImport
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure to add dynamic entry points in a deterministic order
|
|
||||||
sort.Ints(stableEntryPoints)
|
|
||||||
for _, stableIndex := range stableEntryPoints {
|
|
||||||
entryPoints = append(entryPoints, EntryPoint{SourceIndex: reachableFiles[stableIndex]})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allocate the entry bit set now that the number of entry points is known
|
|
||||||
bitCount := uint(len(entryPoints))
|
|
||||||
for _, sourceIndex := range reachableFiles {
|
|
||||||
files[sourceIndex].EntryBits = helpers.NewBitSet(bitCount)
|
|
||||||
}
|
|
||||||
|
|
||||||
return LinkerGraph{
|
|
||||||
Symbols: symbols,
|
|
||||||
entryPoints: entryPoints,
|
|
||||||
Files: files,
|
|
||||||
ReachableFiles: reachableFiles,
|
|
||||||
StableSourceIndices: stableSourceIndices,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prevent packages that depend on us from adding or removing entry points
|
|
||||||
func (g *LinkerGraph) EntryPoints() []EntryPoint {
|
|
||||||
return g.entryPoints
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *LinkerGraph) AddPartToFile(sourceIndex uint32, part js_ast.Part) uint32 {
|
|
||||||
// Invariant: this map is never null
|
|
||||||
if part.SymbolUses == nil {
|
|
||||||
part.SymbolUses = make(map[js_ast.Ref]js_ast.SymbolUse)
|
|
||||||
}
|
|
||||||
|
|
||||||
repr := g.Files[sourceIndex].InputFile.Repr.(*JSRepr)
|
|
||||||
partIndex := uint32(len(repr.AST.Parts))
|
|
||||||
repr.AST.Parts = append(repr.AST.Parts, part)
|
|
||||||
|
|
||||||
// Invariant: the parts for all top-level symbols can be found in the file-level map
|
|
||||||
for _, declaredSymbol := range part.DeclaredSymbols {
|
|
||||||
if declaredSymbol.IsTopLevel {
|
|
||||||
// Check for an existing overlay
|
|
||||||
partIndices, ok := repr.Meta.TopLevelSymbolToPartsOverlay[declaredSymbol.Ref]
|
|
||||||
|
|
||||||
// If missing, initialize using the original values from the parser
|
|
||||||
if !ok {
|
|
||||||
partIndices = append(partIndices, repr.AST.TopLevelSymbolToPartsFromParser[declaredSymbol.Ref]...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add this part to the overlay
|
|
||||||
partIndices = append(partIndices, partIndex)
|
|
||||||
if repr.Meta.TopLevelSymbolToPartsOverlay == nil {
|
|
||||||
repr.Meta.TopLevelSymbolToPartsOverlay = make(map[js_ast.Ref][]uint32)
|
|
||||||
}
|
|
||||||
repr.Meta.TopLevelSymbolToPartsOverlay[declaredSymbol.Ref] = partIndices
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return partIndex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *LinkerGraph) GenerateNewSymbol(sourceIndex uint32, kind js_ast.SymbolKind, originalName string) js_ast.Ref {
|
|
||||||
sourceSymbols := &g.Symbols.SymbolsForSource[sourceIndex]
|
|
||||||
|
|
||||||
ref := js_ast.Ref{
|
|
||||||
SourceIndex: sourceIndex,
|
|
||||||
InnerIndex: uint32(len(*sourceSymbols)),
|
|
||||||
}
|
|
||||||
|
|
||||||
*sourceSymbols = append(*sourceSymbols, js_ast.Symbol{
|
|
||||||
Kind: kind,
|
|
||||||
OriginalName: originalName,
|
|
||||||
Link: js_ast.InvalidRef,
|
|
||||||
})
|
|
||||||
|
|
||||||
generated := &g.Files[sourceIndex].InputFile.Repr.(*JSRepr).AST.ModuleScope.Generated
|
|
||||||
*generated = append(*generated, ref)
|
|
||||||
return ref
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *LinkerGraph) GenerateSymbolImportAndUse(
|
|
||||||
sourceIndex uint32,
|
|
||||||
partIndex uint32,
|
|
||||||
ref js_ast.Ref,
|
|
||||||
useCount uint32,
|
|
||||||
sourceIndexToImportFrom uint32,
|
|
||||||
) {
|
|
||||||
if useCount == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
repr := g.Files[sourceIndex].InputFile.Repr.(*JSRepr)
|
|
||||||
part := &repr.AST.Parts[partIndex]
|
|
||||||
|
|
||||||
// Mark this symbol as used by this part
|
|
||||||
use := part.SymbolUses[ref]
|
|
||||||
use.CountEstimate += useCount
|
|
||||||
part.SymbolUses[ref] = use
|
|
||||||
|
|
||||||
// Uphold invariants about the CommonJS "exports" and "module" symbols
|
|
||||||
if ref == repr.AST.ExportsRef {
|
|
||||||
repr.AST.UsesExportsRef = true
|
|
||||||
}
|
|
||||||
if ref == repr.AST.ModuleRef {
|
|
||||||
repr.AST.UsesModuleRef = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track that this specific symbol was imported
|
|
||||||
if sourceIndexToImportFrom != sourceIndex {
|
|
||||||
repr.Meta.ImportsToBind[ref] = ImportData{
|
|
||||||
SourceIndex: sourceIndexToImportFrom,
|
|
||||||
Ref: ref,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pull in all parts that declare this symbol
|
|
||||||
targetRepr := g.Files[sourceIndexToImportFrom].InputFile.Repr.(*JSRepr)
|
|
||||||
for _, partIndex := range targetRepr.TopLevelSymbolToParts(ref) {
|
|
||||||
part.Dependencies = append(part.Dependencies, js_ast.Dependency{
|
|
||||||
SourceIndex: sourceIndexToImportFrom,
|
|
||||||
PartIndex: partIndex,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *LinkerGraph) GenerateRuntimeSymbolImportAndUse(
|
|
||||||
sourceIndex uint32,
|
|
||||||
partIndex uint32,
|
|
||||||
name string,
|
|
||||||
useCount uint32,
|
|
||||||
) {
|
|
||||||
if useCount == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
runtimeRepr := g.Files[runtime.SourceIndex].InputFile.Repr.(*JSRepr)
|
|
||||||
ref := runtimeRepr.AST.NamedExports[name].Ref
|
|
||||||
g.GenerateSymbolImportAndUse(sourceIndex, partIndex, ref, useCount, runtime.SourceIndex)
|
|
||||||
}
|
|
117
vendor/github.com/evanw/esbuild/internal/graph/input.go
generated
vendored
117
vendor/github.com/evanw/esbuild/internal/graph/input.go
generated
vendored
@ -1,117 +0,0 @@
|
|||||||
package graph
|
|
||||||
|
|
||||||
// The code in this file mainly represents data that passes from the scan phase
|
|
||||||
// to the compile phase of the bundler. There is currently one exception: the
|
|
||||||
// "meta" member of the JavaScript file representation. That could have been
|
|
||||||
// stored separately but is stored together for convenience and to avoid an
|
|
||||||
// extra level of indirection. Instead it's kept in a separate type to keep
|
|
||||||
// things organized.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/config"
|
|
||||||
"github.com/evanw/esbuild/internal/css_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
"github.com/evanw/esbuild/internal/resolver"
|
|
||||||
"github.com/evanw/esbuild/internal/sourcemap"
|
|
||||||
)
|
|
||||||
|
|
||||||
type InputFile struct {
|
|
||||||
Source logger.Source
|
|
||||||
Repr InputFileRepr
|
|
||||||
InputSourceMap *sourcemap.SourceMap
|
|
||||||
|
|
||||||
// If this file ends up being used in the bundle, these are additional files
|
|
||||||
// that must be written to the output directory. It's used by the "file"
|
|
||||||
// loader.
|
|
||||||
AdditionalFiles []OutputFile
|
|
||||||
UniqueKeyForFileLoader string
|
|
||||||
|
|
||||||
SideEffects SideEffects
|
|
||||||
Loader config.Loader
|
|
||||||
}
|
|
||||||
|
|
||||||
type OutputFile struct {
|
|
||||||
AbsPath string
|
|
||||||
Contents []byte
|
|
||||||
|
|
||||||
// If "AbsMetadataFile" is present, this will be filled out with information
|
|
||||||
// about this file in JSON format. This is a partial JSON file that will be
|
|
||||||
// fully assembled later.
|
|
||||||
JSONMetadataChunk string
|
|
||||||
|
|
||||||
IsExecutable bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type SideEffects struct {
|
|
||||||
// This is optional additional information for use in error messages
|
|
||||||
Data *resolver.SideEffectsData
|
|
||||||
|
|
||||||
Kind SideEffectsKind
|
|
||||||
}
|
|
||||||
|
|
||||||
type SideEffectsKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
// The default value conservatively considers all files to have side effects.
|
|
||||||
HasSideEffects SideEffectsKind = iota
|
|
||||||
|
|
||||||
// This file was listed as not having side effects by a "package.json"
|
|
||||||
// file in one of our containing directories with a "sideEffects" field.
|
|
||||||
NoSideEffects_PackageJSON
|
|
||||||
|
|
||||||
// This file is considered to have no side effects because the AST was empty
|
|
||||||
// after parsing finished. This should be the case for ".d.ts" files.
|
|
||||||
NoSideEffects_EmptyAST
|
|
||||||
|
|
||||||
// This file was loaded using a data-oriented loader (e.g. "text") that is
|
|
||||||
// known to not have side effects.
|
|
||||||
NoSideEffects_PureData
|
|
||||||
|
|
||||||
// Same as above but it came from a plugin. We don't want to warn about
|
|
||||||
// unused imports to these files since running the plugin is a side effect.
|
|
||||||
// Removing the import would not call the plugin which is observable.
|
|
||||||
NoSideEffects_PureData_FromPlugin
|
|
||||||
)
|
|
||||||
|
|
||||||
type InputFileRepr interface {
|
|
||||||
ImportRecords() *[]ast.ImportRecord
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSRepr struct {
|
|
||||||
AST js_ast.AST
|
|
||||||
Meta JSReprMeta
|
|
||||||
|
|
||||||
// If present, this is the CSS file that this JavaScript stub corresponds to.
|
|
||||||
// A JavaScript stub is automatically generated for a CSS file when it's
|
|
||||||
// imported from a JavaScript file.
|
|
||||||
CSSSourceIndex ast.Index32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (repr *JSRepr) ImportRecords() *[]ast.ImportRecord {
|
|
||||||
return &repr.AST.ImportRecords
|
|
||||||
}
|
|
||||||
|
|
||||||
func (repr *JSRepr) TopLevelSymbolToParts(ref js_ast.Ref) []uint32 {
|
|
||||||
// Overlay the mutable map from the linker
|
|
||||||
if parts, ok := repr.Meta.TopLevelSymbolToPartsOverlay[ref]; ok {
|
|
||||||
return parts
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fall back to the immutable map from the parser
|
|
||||||
return repr.AST.TopLevelSymbolToPartsFromParser[ref]
|
|
||||||
}
|
|
||||||
|
|
||||||
type CSSRepr struct {
|
|
||||||
AST css_ast.AST
|
|
||||||
|
|
||||||
// If present, this is the JavaScript stub corresponding to this CSS file.
|
|
||||||
// A JavaScript stub is automatically generated for a CSS file when it's
|
|
||||||
// imported from a JavaScript file.
|
|
||||||
JSSourceIndex ast.Index32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (repr *CSSRepr) ImportRecords() *[]ast.ImportRecord {
|
|
||||||
return &repr.AST.ImportRecords
|
|
||||||
}
|
|
203
vendor/github.com/evanw/esbuild/internal/graph/meta.go
generated
vendored
203
vendor/github.com/evanw/esbuild/internal/graph/meta.go
generated
vendored
@ -1,203 +0,0 @@
|
|||||||
package graph
|
|
||||||
|
|
||||||
// The code in this file represents data that is required by the compile phase
|
|
||||||
// of the bundler but that is not required by the scan phase.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type WrapKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
WrapNone WrapKind = iota
|
|
||||||
|
|
||||||
// The module will be bundled CommonJS-style like this:
|
|
||||||
//
|
|
||||||
// // foo.ts
|
|
||||||
// let require_foo = __commonJS((exports, module) => {
|
|
||||||
// exports.foo = 123;
|
|
||||||
// });
|
|
||||||
//
|
|
||||||
// // bar.ts
|
|
||||||
// let foo = flag ? require_foo() : null;
|
|
||||||
//
|
|
||||||
WrapCJS
|
|
||||||
|
|
||||||
// The module will be bundled ESM-style like this:
|
|
||||||
//
|
|
||||||
// // foo.ts
|
|
||||||
// var foo, foo_exports = {};
|
|
||||||
// __export(foo_exports, {
|
|
||||||
// foo: () => foo
|
|
||||||
// });
|
|
||||||
// let init_foo = __esm(() => {
|
|
||||||
// foo = 123;
|
|
||||||
// });
|
|
||||||
//
|
|
||||||
// // bar.ts
|
|
||||||
// let foo = flag ? (init_foo(), __toCommonJS(foo_exports)) : null;
|
|
||||||
//
|
|
||||||
WrapESM
|
|
||||||
)
|
|
||||||
|
|
||||||
// This contains linker-specific metadata corresponding to a "file" struct
|
|
||||||
// from the initial scan phase of the bundler. It's separated out because it's
|
|
||||||
// conceptually only used for a single linking operation and because multiple
|
|
||||||
// linking operations may be happening in parallel with different metadata for
|
|
||||||
// the same file.
|
|
||||||
type JSReprMeta struct {
|
|
||||||
// This is only for TypeScript files. If an import symbol is in this map, it
|
|
||||||
// means the import couldn't be found and doesn't actually exist. This is not
|
|
||||||
// an error in TypeScript because the import is probably just a type.
|
|
||||||
//
|
|
||||||
// Normally we remove all unused imports for TypeScript files during parsing,
|
|
||||||
// which automatically removes type-only imports. But there are certain re-
|
|
||||||
// export situations where it's impossible to tell if an import is a type or
|
|
||||||
// not:
|
|
||||||
//
|
|
||||||
// import {typeOrNotTypeWhoKnows} from 'path';
|
|
||||||
// export {typeOrNotTypeWhoKnows};
|
|
||||||
//
|
|
||||||
// Really people should be using the TypeScript "isolatedModules" flag with
|
|
||||||
// bundlers like this one that compile TypeScript files independently without
|
|
||||||
// type checking. That causes the TypeScript type checker to emit the error
|
|
||||||
// "Re-exporting a type when the '--isolatedModules' flag is provided requires
|
|
||||||
// using 'export type'." But we try to be robust to such code anyway.
|
|
||||||
IsProbablyTypeScriptType map[js_ast.Ref]bool
|
|
||||||
|
|
||||||
// Imports are matched with exports in a separate pass from when the matched
|
|
||||||
// exports are actually bound to the imports. Here "binding" means adding non-
|
|
||||||
// local dependencies on the parts in the exporting file that declare the
|
|
||||||
// exported symbol to all parts in the importing file that use the imported
|
|
||||||
// symbol.
|
|
||||||
//
|
|
||||||
// This must be a separate pass because of the "probably TypeScript type"
|
|
||||||
// check above. We can't generate the part for the export namespace until
|
|
||||||
// we've matched imports with exports because the generated code must omit
|
|
||||||
// type-only imports in the export namespace code. And we can't bind exports
|
|
||||||
// to imports until the part for the export namespace is generated since that
|
|
||||||
// part needs to participate in the binding.
|
|
||||||
//
|
|
||||||
// This array holds the deferred imports to bind so the pass can be split
|
|
||||||
// into two separate passes.
|
|
||||||
ImportsToBind map[js_ast.Ref]ImportData
|
|
||||||
|
|
||||||
// This includes both named exports and re-exports.
|
|
||||||
//
|
|
||||||
// Named exports come from explicit export statements in the original file,
|
|
||||||
// and are copied from the "NamedExports" field in the AST.
|
|
||||||
//
|
|
||||||
// Re-exports come from other files and are the result of resolving export
|
|
||||||
// star statements (i.e. "export * from 'foo'").
|
|
||||||
ResolvedExports map[string]ExportData
|
|
||||||
ResolvedExportStar *ExportData
|
|
||||||
|
|
||||||
// Never iterate over "resolvedExports" directly. Instead, iterate over this
|
|
||||||
// array. Some exports in that map aren't meant to end up in generated code.
|
|
||||||
// This array excludes these exports and is also sorted, which avoids non-
|
|
||||||
// determinism due to random map iteration order.
|
|
||||||
SortedAndFilteredExportAliases []string
|
|
||||||
|
|
||||||
// If this is an entry point, this array holds a reference to one free
|
|
||||||
// temporary symbol for each entry in "sortedAndFilteredExportAliases".
|
|
||||||
// These may be needed to store copies of CommonJS re-exports in ESM.
|
|
||||||
CJSExportCopies []js_ast.Ref
|
|
||||||
|
|
||||||
// This is merged on top of the corresponding map from the parser in the AST.
|
|
||||||
// You should call "TopLevelSymbolToParts" to access this instead of accessing
|
|
||||||
// it directly.
|
|
||||||
TopLevelSymbolToPartsOverlay map[js_ast.Ref][]uint32
|
|
||||||
|
|
||||||
// The index of the automatically-generated part used to represent the
|
|
||||||
// CommonJS or ESM wrapper. This part is empty and is only useful for tree
|
|
||||||
// shaking and code splitting. The wrapper can't be inserted into the part
|
|
||||||
// because the wrapper contains other parts, which can't be represented by
|
|
||||||
// the current part system. Only wrapped files have one of these.
|
|
||||||
WrapperPartIndex ast.Index32
|
|
||||||
|
|
||||||
// The index of the automatically-generated part used to handle entry point
|
|
||||||
// specific stuff. If a certain part is needed by the entry point, it's added
|
|
||||||
// as a dependency of this part. This is important for parts that are marked
|
|
||||||
// as removable when unused and that are not used by anything else. Only
|
|
||||||
// entry point files have one of these.
|
|
||||||
EntryPointPartIndex ast.Index32
|
|
||||||
|
|
||||||
// This is true if this file is affected by top-level await, either by having
|
|
||||||
// a top-level await inside this file or by having an import/export statement
|
|
||||||
// that transitively imports such a file. It is forbidden to call "require()"
|
|
||||||
// on these files since they are evaluated asynchronously.
|
|
||||||
IsAsyncOrHasAsyncDependency bool
|
|
||||||
|
|
||||||
Wrap WrapKind
|
|
||||||
|
|
||||||
// If true, we need to insert "var exports = {};". This is the case for ESM
|
|
||||||
// files when the import namespace is captured via "import * as" and also
|
|
||||||
// when they are the target of a "require()" call.
|
|
||||||
NeedsExportsVariable bool
|
|
||||||
|
|
||||||
// If true, the "__export(exports, { ... })" call will be force-included even
|
|
||||||
// if there are no parts that reference "exports". Otherwise this call will
|
|
||||||
// be removed due to the tree shaking pass. This is used when for entry point
|
|
||||||
// files when code related to the current output format needs to reference
|
|
||||||
// the "exports" variable.
|
|
||||||
ForceIncludeExportsForEntryPoint bool
|
|
||||||
|
|
||||||
// This is set when we need to pull in the "__export" symbol in to the part
|
|
||||||
// at "nsExportPartIndex". This can't be done in "createExportsForFile"
|
|
||||||
// because of concurrent map hazards. Instead, it must be done later.
|
|
||||||
NeedsExportSymbolFromRuntime bool
|
|
||||||
|
|
||||||
// Wrapped files must also ensure that their dependencies are wrapped. This
|
|
||||||
// flag is used during the traversal that enforces this invariant, and is used
|
|
||||||
// to detect when the fixed point has been reached.
|
|
||||||
DidWrapDependencies bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type ImportData struct {
|
|
||||||
// This is an array of intermediate statements that re-exported this symbol
|
|
||||||
// in a chain before getting to the final symbol. This can be done either with
|
|
||||||
// "export * from" or "export {} from". If this is done with "export * from"
|
|
||||||
// then this may not be the result of a single chain but may instead form
|
|
||||||
// a diamond shape if this same symbol was re-exported multiple times from
|
|
||||||
// different files.
|
|
||||||
ReExports []js_ast.Dependency
|
|
||||||
|
|
||||||
NameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero
|
|
||||||
Ref js_ast.Ref
|
|
||||||
SourceIndex uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
type ExportData struct {
|
|
||||||
Ref js_ast.Ref
|
|
||||||
|
|
||||||
// Export star resolution happens first before import resolution. That means
|
|
||||||
// it cannot yet determine if duplicate names from export star resolution are
|
|
||||||
// ambiguous (point to different symbols) or not (point to the same symbol).
|
|
||||||
// This issue can happen in the following scenario:
|
|
||||||
//
|
|
||||||
// // entry.js
|
|
||||||
// export * from './a'
|
|
||||||
// export * from './b'
|
|
||||||
//
|
|
||||||
// // a.js
|
|
||||||
// export * from './c'
|
|
||||||
//
|
|
||||||
// // b.js
|
|
||||||
// export {x} from './c'
|
|
||||||
//
|
|
||||||
// // c.js
|
|
||||||
// export let x = 1, y = 2
|
|
||||||
//
|
|
||||||
// In this case "entry.js" should have two exports "x" and "y", neither of
|
|
||||||
// which are ambiguous. To handle this case, ambiguity resolution must be
|
|
||||||
// deferred until import resolution time. That is done using this array.
|
|
||||||
PotentiallyAmbiguousExportStarRefs []ImportData
|
|
||||||
|
|
||||||
// This is the file that the named export above came from. This will be
|
|
||||||
// different from the file that contains this object if this is a re-export.
|
|
||||||
NameLoc logger.Loc // Optional, goes with sourceIndex, ignore if zero
|
|
||||||
SourceIndex uint32
|
|
||||||
}
|
|
27
vendor/github.com/evanw/esbuild/internal/helpers/bitset.go
generated
vendored
27
vendor/github.com/evanw/esbuild/internal/helpers/bitset.go
generated
vendored
@ -1,27 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
import "bytes"
|
|
||||||
|
|
||||||
type BitSet struct {
|
|
||||||
entries []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewBitSet(bitCount uint) BitSet {
|
|
||||||
return BitSet{make([]byte, (bitCount+7)/8)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (bs BitSet) HasBit(bit uint) bool {
|
|
||||||
return (bs.entries[bit/8] & (1 << (bit & 7))) != 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (bs BitSet) SetBit(bit uint) {
|
|
||||||
bs.entries[bit/8] |= 1 << (bit & 7)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (bs BitSet) Equals(other BitSet) bool {
|
|
||||||
return bytes.Equal(bs.entries, other.entries)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (bs BitSet) String() string {
|
|
||||||
return string(bs.entries)
|
|
||||||
}
|
|
89
vendor/github.com/evanw/esbuild/internal/helpers/comment.go
generated
vendored
89
vendor/github.com/evanw/esbuild/internal/helpers/comment.go
generated
vendored
@ -1,89 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
func RemoveMultiLineCommentIndent(prefix string, text string) string {
|
|
||||||
// Figure out the initial indent
|
|
||||||
indent := 0
|
|
||||||
seekBackwardToNewline:
|
|
||||||
for len(prefix) > 0 {
|
|
||||||
c, size := utf8.DecodeLastRuneInString(prefix)
|
|
||||||
switch c {
|
|
||||||
case '\r', '\n', '\u2028', '\u2029':
|
|
||||||
break seekBackwardToNewline
|
|
||||||
}
|
|
||||||
prefix = prefix[:len(prefix)-size]
|
|
||||||
indent++
|
|
||||||
}
|
|
||||||
|
|
||||||
// Split the comment into lines
|
|
||||||
var lines []string
|
|
||||||
start := 0
|
|
||||||
for i, c := range text {
|
|
||||||
switch c {
|
|
||||||
case '\r', '\n':
|
|
||||||
// Don't double-append for Windows style "\r\n" newlines
|
|
||||||
if start <= i {
|
|
||||||
lines = append(lines, text[start:i])
|
|
||||||
}
|
|
||||||
|
|
||||||
start = i + 1
|
|
||||||
|
|
||||||
// Ignore the second part of Windows style "\r\n" newlines
|
|
||||||
if c == '\r' && start < len(text) && text[start] == '\n' {
|
|
||||||
start++
|
|
||||||
}
|
|
||||||
|
|
||||||
case '\u2028', '\u2029':
|
|
||||||
lines = append(lines, text[start:i])
|
|
||||||
start = i + 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lines = append(lines, text[start:])
|
|
||||||
|
|
||||||
// Find the minimum indent over all lines after the first line
|
|
||||||
for _, line := range lines[1:] {
|
|
||||||
lineIndent := 0
|
|
||||||
for _, c := range line {
|
|
||||||
if c != ' ' && c != '\t' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
lineIndent++
|
|
||||||
}
|
|
||||||
if indent > lineIndent {
|
|
||||||
indent = lineIndent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Trim the indent off of all lines after the first line
|
|
||||||
for i, line := range lines {
|
|
||||||
if i > 0 {
|
|
||||||
lines[i] = line[indent:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return strings.Join(lines, "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func EscapeClosingTag(text string, slashTag string) string {
|
|
||||||
i := strings.Index(text, "</")
|
|
||||||
if i < 0 {
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
var b strings.Builder
|
|
||||||
for {
|
|
||||||
b.WriteString(text[:i+1])
|
|
||||||
text = text[i+1:]
|
|
||||||
if len(text) >= len(slashTag) && strings.EqualFold(text[:len(slashTag)], slashTag) {
|
|
||||||
b.WriteByte('\\')
|
|
||||||
}
|
|
||||||
i = strings.Index(text, "</")
|
|
||||||
if i < 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
b.WriteString(text)
|
|
||||||
return b.String()
|
|
||||||
}
|
|
14
vendor/github.com/evanw/esbuild/internal/helpers/hash.go
generated
vendored
14
vendor/github.com/evanw/esbuild/internal/helpers/hash.go
generated
vendored
@ -1,14 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
// From: http://boost.sourceforge.net/doc/html/boost/hash_combine.html
|
|
||||||
func HashCombine(seed uint32, hash uint32) uint32 {
|
|
||||||
return seed ^ (hash + 0x9e3779b9 + (seed << 6) + (seed >> 2))
|
|
||||||
}
|
|
||||||
|
|
||||||
func HashCombineString(seed uint32, text string) uint32 {
|
|
||||||
seed = HashCombine(seed, uint32(len(text)))
|
|
||||||
for _, c := range text {
|
|
||||||
seed = HashCombine(seed, uint32(c))
|
|
||||||
}
|
|
||||||
return seed
|
|
||||||
}
|
|
86
vendor/github.com/evanw/esbuild/internal/helpers/joiner.go
generated
vendored
86
vendor/github.com/evanw/esbuild/internal/helpers/joiner.go
generated
vendored
@ -1,86 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// This provides an efficient way to join lots of big string and byte slices
|
|
||||||
// together. It avoids the cost of repeatedly reallocating as the buffer grows
|
|
||||||
// by measuring exactly how big the buffer should be and then allocating once.
|
|
||||||
// This is a measurable speedup.
|
|
||||||
type Joiner struct {
|
|
||||||
lastByte byte
|
|
||||||
strings []joinerString
|
|
||||||
bytes []joinerBytes
|
|
||||||
length uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
type joinerString struct {
|
|
||||||
data string
|
|
||||||
offset uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
type joinerBytes struct {
|
|
||||||
data []byte
|
|
||||||
offset uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Joiner) AddString(data string) {
|
|
||||||
if len(data) > 0 {
|
|
||||||
j.lastByte = data[len(data)-1]
|
|
||||||
}
|
|
||||||
j.strings = append(j.strings, joinerString{data, j.length})
|
|
||||||
j.length += uint32(len(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Joiner) AddBytes(data []byte) {
|
|
||||||
if len(data) > 0 {
|
|
||||||
j.lastByte = data[len(data)-1]
|
|
||||||
}
|
|
||||||
j.bytes = append(j.bytes, joinerBytes{data, j.length})
|
|
||||||
j.length += uint32(len(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Joiner) LastByte() byte {
|
|
||||||
return j.lastByte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Joiner) Length() uint32 {
|
|
||||||
return j.length
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Joiner) EnsureNewlineAtEnd() {
|
|
||||||
if j.length > 0 && j.lastByte != '\n' {
|
|
||||||
j.AddString("\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Joiner) Done() []byte {
|
|
||||||
if len(j.strings) == 0 && len(j.bytes) == 1 && j.bytes[0].offset == 0 {
|
|
||||||
// No need to allocate if there was only a single byte array written
|
|
||||||
return j.bytes[0].data
|
|
||||||
}
|
|
||||||
buffer := make([]byte, j.length)
|
|
||||||
for _, item := range j.strings {
|
|
||||||
copy(buffer[item.offset:], item.data)
|
|
||||||
}
|
|
||||||
for _, item := range j.bytes {
|
|
||||||
copy(buffer[item.offset:], item.data)
|
|
||||||
}
|
|
||||||
return buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Joiner) Contains(s string, b []byte) bool {
|
|
||||||
for _, item := range j.strings {
|
|
||||||
if strings.Contains(item.data, s) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, item := range j.bytes {
|
|
||||||
if bytes.Contains(item.data, b) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
31
vendor/github.com/evanw/esbuild/internal/helpers/mime.go
generated
vendored
31
vendor/github.com/evanw/esbuild/internal/helpers/mime.go
generated
vendored
@ -1,31 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
import "strings"
|
|
||||||
|
|
||||||
var builtinTypesLower = map[string]string{
|
|
||||||
".css": "text/css; charset=utf-8",
|
|
||||||
".gif": "image/gif",
|
|
||||||
".htm": "text/html; charset=utf-8",
|
|
||||||
".html": "text/html; charset=utf-8",
|
|
||||||
".jpeg": "image/jpeg",
|
|
||||||
".jpg": "image/jpeg",
|
|
||||||
".js": "text/javascript; charset=utf-8",
|
|
||||||
".json": "application/json",
|
|
||||||
".mjs": "text/javascript; charset=utf-8",
|
|
||||||
".pdf": "application/pdf",
|
|
||||||
".png": "image/png",
|
|
||||||
".svg": "image/svg+xml",
|
|
||||||
".wasm": "application/wasm",
|
|
||||||
".webp": "image/webp",
|
|
||||||
".xml": "text/xml; charset=utf-8",
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is used instead of Go's built-in "mime.TypeByExtension" function because
|
|
||||||
// that function is broken on Windows: https://github.com/golang/go/issues/32350.
|
|
||||||
func MimeTypeByExtension(ext string) string {
|
|
||||||
contentType := builtinTypesLower[ext]
|
|
||||||
if contentType == "" {
|
|
||||||
contentType = builtinTypesLower[strings.ToLower(ext)]
|
|
||||||
}
|
|
||||||
return contentType
|
|
||||||
}
|
|
22
vendor/github.com/evanw/esbuild/internal/helpers/path.go
generated
vendored
22
vendor/github.com/evanw/esbuild/internal/helpers/path.go
generated
vendored
@ -1,22 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
import "strings"
|
|
||||||
|
|
||||||
func IsInsideNodeModules(path string) bool {
|
|
||||||
for {
|
|
||||||
// This is written in a platform-independent manner because it's run on
|
|
||||||
// user-specified paths which can be arbitrary non-file-system things. So
|
|
||||||
// for example Windows paths may end up being used on Unix or URLs may end
|
|
||||||
// up being used on Windows. Be consistently agnostic to which kind of
|
|
||||||
// slash is used on all platforms.
|
|
||||||
slash := strings.LastIndexAny(path, "/\\")
|
|
||||||
if slash == -1 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
dir, base := path[:slash], path[slash+1:]
|
|
||||||
if base == "node_modules" {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
path = dir
|
|
||||||
}
|
|
||||||
}
|
|
50
vendor/github.com/evanw/esbuild/internal/helpers/stack.go
generated
vendored
50
vendor/github.com/evanw/esbuild/internal/helpers/stack.go
generated
vendored
@ -1,50 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"runtime/debug"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func PrettyPrintedStack() string {
|
|
||||||
lines := strings.Split(strings.TrimSpace(string(debug.Stack())), "\n")
|
|
||||||
|
|
||||||
// Strip the first "goroutine" line
|
|
||||||
if len(lines) > 0 {
|
|
||||||
if first := lines[0]; strings.HasPrefix(first, "goroutine ") && strings.HasSuffix(first, ":") {
|
|
||||||
lines = lines[1:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sb := strings.Builder{}
|
|
||||||
|
|
||||||
for _, line := range lines {
|
|
||||||
// Indented lines are source locations
|
|
||||||
if strings.HasPrefix(line, "\t") {
|
|
||||||
line = line[1:]
|
|
||||||
line = strings.TrimPrefix(line, "github.com/evanw/esbuild/")
|
|
||||||
if offset := strings.LastIndex(line, " +0x"); offset != -1 {
|
|
||||||
line = line[:offset]
|
|
||||||
}
|
|
||||||
sb.WriteString(" (")
|
|
||||||
sb.WriteString(line)
|
|
||||||
sb.WriteString(")")
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Other lines are function calls
|
|
||||||
if sb.Len() > 0 {
|
|
||||||
sb.WriteByte('\n')
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(line, ")") {
|
|
||||||
if paren := strings.LastIndexByte(line, '('); paren != -1 {
|
|
||||||
line = line[:paren]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if slash := strings.LastIndexByte(line, '/'); slash != -1 {
|
|
||||||
line = line[slash+1:]
|
|
||||||
}
|
|
||||||
sb.WriteString(line)
|
|
||||||
}
|
|
||||||
|
|
||||||
return sb.String()
|
|
||||||
}
|
|
94
vendor/github.com/evanw/esbuild/internal/helpers/timer.go
generated
vendored
94
vendor/github.com/evanw/esbuild/internal/helpers/timer.go
generated
vendored
@ -1,94 +0,0 @@
|
|||||||
package helpers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Timer struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
data []timerData
|
|
||||||
}
|
|
||||||
|
|
||||||
type timerData struct {
|
|
||||||
name string
|
|
||||||
time time.Time
|
|
||||||
isEnd bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Timer) Begin(name string) {
|
|
||||||
if t != nil {
|
|
||||||
t.data = append(t.data, timerData{
|
|
||||||
name: name,
|
|
||||||
time: time.Now(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Timer) End(name string) {
|
|
||||||
if t != nil {
|
|
||||||
t.data = append(t.data, timerData{
|
|
||||||
name: name,
|
|
||||||
time: time.Now(),
|
|
||||||
isEnd: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Timer) Fork() *Timer {
|
|
||||||
if t != nil {
|
|
||||||
return &Timer{}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Timer) Join(other *Timer) {
|
|
||||||
if t != nil && other != nil {
|
|
||||||
t.mutex.Lock()
|
|
||||||
defer t.mutex.Unlock()
|
|
||||||
t.data = append(t.data, other.data...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Timer) Log(log logger.Log) {
|
|
||||||
if t == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
type pair struct {
|
|
||||||
timerData
|
|
||||||
index uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
var notes []logger.MsgData
|
|
||||||
var stack []pair
|
|
||||||
indent := 0
|
|
||||||
|
|
||||||
for _, item := range t.data {
|
|
||||||
if !item.isEnd {
|
|
||||||
top := pair{timerData: item, index: uint32(len(notes))}
|
|
||||||
notes = append(notes, logger.MsgData{})
|
|
||||||
stack = append(stack, top)
|
|
||||||
indent++
|
|
||||||
} else {
|
|
||||||
indent--
|
|
||||||
last := len(stack) - 1
|
|
||||||
top := stack[last]
|
|
||||||
stack = stack[:last]
|
|
||||||
if item.name != top.name {
|
|
||||||
panic("Internal error")
|
|
||||||
}
|
|
||||||
notes[top.index].Text = fmt.Sprintf("%s%s: %dms",
|
|
||||||
strings.Repeat(" ", indent),
|
|
||||||
top.name,
|
|
||||||
item.time.Sub(top.time).Milliseconds())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.AddWithNotes(logger.Info, nil, logger.Range{},
|
|
||||||
"Timing information (times may not nest hierarchically due to parallelism)", notes)
|
|
||||||
}
|
|
2304
vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go
generated
vendored
2304
vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go
generated
vendored
File diff suppressed because it is too large
Load Diff
2961
vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go
generated
vendored
2961
vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go
generated
vendored
File diff suppressed because it is too large
Load Diff
382
vendor/github.com/evanw/esbuild/internal/js_lexer/tables.go
generated
vendored
382
vendor/github.com/evanw/esbuild/internal/js_lexer/tables.go
generated
vendored
@ -1,382 +0,0 @@
|
|||||||
package js_lexer
|
|
||||||
|
|
||||||
var tokenToString = map[T]string{
|
|
||||||
TEndOfFile: "end of file",
|
|
||||||
TSyntaxError: "syntax error",
|
|
||||||
THashbang: "hashbang comment",
|
|
||||||
|
|
||||||
// Literals
|
|
||||||
TNoSubstitutionTemplateLiteral: "template literal",
|
|
||||||
TNumericLiteral: "number",
|
|
||||||
TStringLiteral: "string",
|
|
||||||
TBigIntegerLiteral: "bigint",
|
|
||||||
|
|
||||||
// Pseudo-literals
|
|
||||||
TTemplateHead: "template literal",
|
|
||||||
TTemplateMiddle: "template literal",
|
|
||||||
TTemplateTail: "template literal",
|
|
||||||
|
|
||||||
// Punctuation
|
|
||||||
TAmpersand: "\"&\"",
|
|
||||||
TAmpersandAmpersand: "\"&&\"",
|
|
||||||
TAsterisk: "\"*\"",
|
|
||||||
TAsteriskAsterisk: "\"**\"",
|
|
||||||
TAt: "\"@\"",
|
|
||||||
TBar: "\"|\"",
|
|
||||||
TBarBar: "\"||\"",
|
|
||||||
TCaret: "\"^\"",
|
|
||||||
TCloseBrace: "\"}\"",
|
|
||||||
TCloseBracket: "\"]\"",
|
|
||||||
TCloseParen: "\")\"",
|
|
||||||
TColon: "\":\"",
|
|
||||||
TComma: "\",\"",
|
|
||||||
TDot: "\".\"",
|
|
||||||
TDotDotDot: "\"...\"",
|
|
||||||
TEqualsEquals: "\"==\"",
|
|
||||||
TEqualsEqualsEquals: "\"===\"",
|
|
||||||
TEqualsGreaterThan: "\"=>\"",
|
|
||||||
TExclamation: "\"!\"",
|
|
||||||
TExclamationEquals: "\"!=\"",
|
|
||||||
TExclamationEqualsEquals: "\"!==\"",
|
|
||||||
TGreaterThan: "\">\"",
|
|
||||||
TGreaterThanEquals: "\">=\"",
|
|
||||||
TGreaterThanGreaterThan: "\">>\"",
|
|
||||||
TGreaterThanGreaterThanGreaterThan: "\">>>\"",
|
|
||||||
TLessThan: "\"<\"",
|
|
||||||
TLessThanEquals: "\"<=\"",
|
|
||||||
TLessThanLessThan: "\"<<\"",
|
|
||||||
TMinus: "\"-\"",
|
|
||||||
TMinusMinus: "\"--\"",
|
|
||||||
TOpenBrace: "\"{\"",
|
|
||||||
TOpenBracket: "\"[\"",
|
|
||||||
TOpenParen: "\"(\"",
|
|
||||||
TPercent: "\"%\"",
|
|
||||||
TPlus: "\"+\"",
|
|
||||||
TPlusPlus: "\"++\"",
|
|
||||||
TQuestion: "\"?\"",
|
|
||||||
TQuestionDot: "\"?.\"",
|
|
||||||
TQuestionQuestion: "\"??\"",
|
|
||||||
TSemicolon: "\";\"",
|
|
||||||
TSlash: "\"/\"",
|
|
||||||
TTilde: "\"~\"",
|
|
||||||
|
|
||||||
// Assignments
|
|
||||||
TAmpersandAmpersandEquals: "\"&&=\"",
|
|
||||||
TAmpersandEquals: "\"&=\"",
|
|
||||||
TAsteriskAsteriskEquals: "\"**=\"",
|
|
||||||
TAsteriskEquals: "\"*=\"",
|
|
||||||
TBarBarEquals: "\"||=\"",
|
|
||||||
TBarEquals: "\"|=\"",
|
|
||||||
TCaretEquals: "\"^=\"",
|
|
||||||
TEquals: "\"=\"",
|
|
||||||
TGreaterThanGreaterThanEquals: "\">>=\"",
|
|
||||||
TGreaterThanGreaterThanGreaterThanEquals: "\">>>=\"",
|
|
||||||
TLessThanLessThanEquals: "\"<<=\"",
|
|
||||||
TMinusEquals: "\"-=\"",
|
|
||||||
TPercentEquals: "\"%=\"",
|
|
||||||
TPlusEquals: "\"+=\"",
|
|
||||||
TQuestionQuestionEquals: "\"??=\"",
|
|
||||||
TSlashEquals: "\"/=\"",
|
|
||||||
|
|
||||||
// Class-private fields and methods
|
|
||||||
TPrivateIdentifier: "private identifier",
|
|
||||||
|
|
||||||
// Identifiers
|
|
||||||
TIdentifier: "identifier",
|
|
||||||
TEscapedKeyword: "escaped keyword",
|
|
||||||
|
|
||||||
// Reserved words
|
|
||||||
TBreak: "\"break\"",
|
|
||||||
TCase: "\"case\"",
|
|
||||||
TCatch: "\"catch\"",
|
|
||||||
TClass: "\"class\"",
|
|
||||||
TConst: "\"const\"",
|
|
||||||
TContinue: "\"continue\"",
|
|
||||||
TDebugger: "\"debugger\"",
|
|
||||||
TDefault: "\"default\"",
|
|
||||||
TDelete: "\"delete\"",
|
|
||||||
TDo: "\"do\"",
|
|
||||||
TElse: "\"else\"",
|
|
||||||
TEnum: "\"enum\"",
|
|
||||||
TExport: "\"export\"",
|
|
||||||
TExtends: "\"extends\"",
|
|
||||||
TFalse: "\"false\"",
|
|
||||||
TFinally: "\"finally\"",
|
|
||||||
TFor: "\"for\"",
|
|
||||||
TFunction: "\"function\"",
|
|
||||||
TIf: "\"if\"",
|
|
||||||
TImport: "\"import\"",
|
|
||||||
TIn: "\"in\"",
|
|
||||||
TInstanceof: "\"instanceof\"",
|
|
||||||
TNew: "\"new\"",
|
|
||||||
TNull: "\"null\"",
|
|
||||||
TReturn: "\"return\"",
|
|
||||||
TSuper: "\"super\"",
|
|
||||||
TSwitch: "\"switch\"",
|
|
||||||
TThis: "\"this\"",
|
|
||||||
TThrow: "\"throw\"",
|
|
||||||
TTrue: "\"true\"",
|
|
||||||
TTry: "\"try\"",
|
|
||||||
TTypeof: "\"typeof\"",
|
|
||||||
TVar: "\"var\"",
|
|
||||||
TVoid: "\"void\"",
|
|
||||||
TWhile: "\"while\"",
|
|
||||||
TWith: "\"with\"",
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is from https://github.com/microsoft/TypeScript/blob/master/src/compiler/transformers/jsx.ts
|
|
||||||
var jsxEntity = map[string]rune{
|
|
||||||
"quot": 0x0022,
|
|
||||||
"amp": 0x0026,
|
|
||||||
"apos": 0x0027,
|
|
||||||
"lt": 0x003C,
|
|
||||||
"gt": 0x003E,
|
|
||||||
"nbsp": 0x00A0,
|
|
||||||
"iexcl": 0x00A1,
|
|
||||||
"cent": 0x00A2,
|
|
||||||
"pound": 0x00A3,
|
|
||||||
"curren": 0x00A4,
|
|
||||||
"yen": 0x00A5,
|
|
||||||
"brvbar": 0x00A6,
|
|
||||||
"sect": 0x00A7,
|
|
||||||
"uml": 0x00A8,
|
|
||||||
"copy": 0x00A9,
|
|
||||||
"ordf": 0x00AA,
|
|
||||||
"laquo": 0x00AB,
|
|
||||||
"not": 0x00AC,
|
|
||||||
"shy": 0x00AD,
|
|
||||||
"reg": 0x00AE,
|
|
||||||
"macr": 0x00AF,
|
|
||||||
"deg": 0x00B0,
|
|
||||||
"plusmn": 0x00B1,
|
|
||||||
"sup2": 0x00B2,
|
|
||||||
"sup3": 0x00B3,
|
|
||||||
"acute": 0x00B4,
|
|
||||||
"micro": 0x00B5,
|
|
||||||
"para": 0x00B6,
|
|
||||||
"middot": 0x00B7,
|
|
||||||
"cedil": 0x00B8,
|
|
||||||
"sup1": 0x00B9,
|
|
||||||
"ordm": 0x00BA,
|
|
||||||
"raquo": 0x00BB,
|
|
||||||
"frac14": 0x00BC,
|
|
||||||
"frac12": 0x00BD,
|
|
||||||
"frac34": 0x00BE,
|
|
||||||
"iquest": 0x00BF,
|
|
||||||
"Agrave": 0x00C0,
|
|
||||||
"Aacute": 0x00C1,
|
|
||||||
"Acirc": 0x00C2,
|
|
||||||
"Atilde": 0x00C3,
|
|
||||||
"Auml": 0x00C4,
|
|
||||||
"Aring": 0x00C5,
|
|
||||||
"AElig": 0x00C6,
|
|
||||||
"Ccedil": 0x00C7,
|
|
||||||
"Egrave": 0x00C8,
|
|
||||||
"Eacute": 0x00C9,
|
|
||||||
"Ecirc": 0x00CA,
|
|
||||||
"Euml": 0x00CB,
|
|
||||||
"Igrave": 0x00CC,
|
|
||||||
"Iacute": 0x00CD,
|
|
||||||
"Icirc": 0x00CE,
|
|
||||||
"Iuml": 0x00CF,
|
|
||||||
"ETH": 0x00D0,
|
|
||||||
"Ntilde": 0x00D1,
|
|
||||||
"Ograve": 0x00D2,
|
|
||||||
"Oacute": 0x00D3,
|
|
||||||
"Ocirc": 0x00D4,
|
|
||||||
"Otilde": 0x00D5,
|
|
||||||
"Ouml": 0x00D6,
|
|
||||||
"times": 0x00D7,
|
|
||||||
"Oslash": 0x00D8,
|
|
||||||
"Ugrave": 0x00D9,
|
|
||||||
"Uacute": 0x00DA,
|
|
||||||
"Ucirc": 0x00DB,
|
|
||||||
"Uuml": 0x00DC,
|
|
||||||
"Yacute": 0x00DD,
|
|
||||||
"THORN": 0x00DE,
|
|
||||||
"szlig": 0x00DF,
|
|
||||||
"agrave": 0x00E0,
|
|
||||||
"aacute": 0x00E1,
|
|
||||||
"acirc": 0x00E2,
|
|
||||||
"atilde": 0x00E3,
|
|
||||||
"auml": 0x00E4,
|
|
||||||
"aring": 0x00E5,
|
|
||||||
"aelig": 0x00E6,
|
|
||||||
"ccedil": 0x00E7,
|
|
||||||
"egrave": 0x00E8,
|
|
||||||
"eacute": 0x00E9,
|
|
||||||
"ecirc": 0x00EA,
|
|
||||||
"euml": 0x00EB,
|
|
||||||
"igrave": 0x00EC,
|
|
||||||
"iacute": 0x00ED,
|
|
||||||
"icirc": 0x00EE,
|
|
||||||
"iuml": 0x00EF,
|
|
||||||
"eth": 0x00F0,
|
|
||||||
"ntilde": 0x00F1,
|
|
||||||
"ograve": 0x00F2,
|
|
||||||
"oacute": 0x00F3,
|
|
||||||
"ocirc": 0x00F4,
|
|
||||||
"otilde": 0x00F5,
|
|
||||||
"ouml": 0x00F6,
|
|
||||||
"divide": 0x00F7,
|
|
||||||
"oslash": 0x00F8,
|
|
||||||
"ugrave": 0x00F9,
|
|
||||||
"uacute": 0x00FA,
|
|
||||||
"ucirc": 0x00FB,
|
|
||||||
"uuml": 0x00FC,
|
|
||||||
"yacute": 0x00FD,
|
|
||||||
"thorn": 0x00FE,
|
|
||||||
"yuml": 0x00FF,
|
|
||||||
"OElig": 0x0152,
|
|
||||||
"oelig": 0x0153,
|
|
||||||
"Scaron": 0x0160,
|
|
||||||
"scaron": 0x0161,
|
|
||||||
"Yuml": 0x0178,
|
|
||||||
"fnof": 0x0192,
|
|
||||||
"circ": 0x02C6,
|
|
||||||
"tilde": 0x02DC,
|
|
||||||
"Alpha": 0x0391,
|
|
||||||
"Beta": 0x0392,
|
|
||||||
"Gamma": 0x0393,
|
|
||||||
"Delta": 0x0394,
|
|
||||||
"Epsilon": 0x0395,
|
|
||||||
"Zeta": 0x0396,
|
|
||||||
"Eta": 0x0397,
|
|
||||||
"Theta": 0x0398,
|
|
||||||
"Iota": 0x0399,
|
|
||||||
"Kappa": 0x039A,
|
|
||||||
"Lambda": 0x039B,
|
|
||||||
"Mu": 0x039C,
|
|
||||||
"Nu": 0x039D,
|
|
||||||
"Xi": 0x039E,
|
|
||||||
"Omicron": 0x039F,
|
|
||||||
"Pi": 0x03A0,
|
|
||||||
"Rho": 0x03A1,
|
|
||||||
"Sigma": 0x03A3,
|
|
||||||
"Tau": 0x03A4,
|
|
||||||
"Upsilon": 0x03A5,
|
|
||||||
"Phi": 0x03A6,
|
|
||||||
"Chi": 0x03A7,
|
|
||||||
"Psi": 0x03A8,
|
|
||||||
"Omega": 0x03A9,
|
|
||||||
"alpha": 0x03B1,
|
|
||||||
"beta": 0x03B2,
|
|
||||||
"gamma": 0x03B3,
|
|
||||||
"delta": 0x03B4,
|
|
||||||
"epsilon": 0x03B5,
|
|
||||||
"zeta": 0x03B6,
|
|
||||||
"eta": 0x03B7,
|
|
||||||
"theta": 0x03B8,
|
|
||||||
"iota": 0x03B9,
|
|
||||||
"kappa": 0x03BA,
|
|
||||||
"lambda": 0x03BB,
|
|
||||||
"mu": 0x03BC,
|
|
||||||
"nu": 0x03BD,
|
|
||||||
"xi": 0x03BE,
|
|
||||||
"omicron": 0x03BF,
|
|
||||||
"pi": 0x03C0,
|
|
||||||
"rho": 0x03C1,
|
|
||||||
"sigmaf": 0x03C2,
|
|
||||||
"sigma": 0x03C3,
|
|
||||||
"tau": 0x03C4,
|
|
||||||
"upsilon": 0x03C5,
|
|
||||||
"phi": 0x03C6,
|
|
||||||
"chi": 0x03C7,
|
|
||||||
"psi": 0x03C8,
|
|
||||||
"omega": 0x03C9,
|
|
||||||
"thetasym": 0x03D1,
|
|
||||||
"upsih": 0x03D2,
|
|
||||||
"piv": 0x03D6,
|
|
||||||
"ensp": 0x2002,
|
|
||||||
"emsp": 0x2003,
|
|
||||||
"thinsp": 0x2009,
|
|
||||||
"zwnj": 0x200C,
|
|
||||||
"zwj": 0x200D,
|
|
||||||
"lrm": 0x200E,
|
|
||||||
"rlm": 0x200F,
|
|
||||||
"ndash": 0x2013,
|
|
||||||
"mdash": 0x2014,
|
|
||||||
"lsquo": 0x2018,
|
|
||||||
"rsquo": 0x2019,
|
|
||||||
"sbquo": 0x201A,
|
|
||||||
"ldquo": 0x201C,
|
|
||||||
"rdquo": 0x201D,
|
|
||||||
"bdquo": 0x201E,
|
|
||||||
"dagger": 0x2020,
|
|
||||||
"Dagger": 0x2021,
|
|
||||||
"bull": 0x2022,
|
|
||||||
"hellip": 0x2026,
|
|
||||||
"permil": 0x2030,
|
|
||||||
"prime": 0x2032,
|
|
||||||
"Prime": 0x2033,
|
|
||||||
"lsaquo": 0x2039,
|
|
||||||
"rsaquo": 0x203A,
|
|
||||||
"oline": 0x203E,
|
|
||||||
"frasl": 0x2044,
|
|
||||||
"euro": 0x20AC,
|
|
||||||
"image": 0x2111,
|
|
||||||
"weierp": 0x2118,
|
|
||||||
"real": 0x211C,
|
|
||||||
"trade": 0x2122,
|
|
||||||
"alefsym": 0x2135,
|
|
||||||
"larr": 0x2190,
|
|
||||||
"uarr": 0x2191,
|
|
||||||
"rarr": 0x2192,
|
|
||||||
"darr": 0x2193,
|
|
||||||
"harr": 0x2194,
|
|
||||||
"crarr": 0x21B5,
|
|
||||||
"lArr": 0x21D0,
|
|
||||||
"uArr": 0x21D1,
|
|
||||||
"rArr": 0x21D2,
|
|
||||||
"dArr": 0x21D3,
|
|
||||||
"hArr": 0x21D4,
|
|
||||||
"forall": 0x2200,
|
|
||||||
"part": 0x2202,
|
|
||||||
"exist": 0x2203,
|
|
||||||
"empty": 0x2205,
|
|
||||||
"nabla": 0x2207,
|
|
||||||
"isin": 0x2208,
|
|
||||||
"notin": 0x2209,
|
|
||||||
"ni": 0x220B,
|
|
||||||
"prod": 0x220F,
|
|
||||||
"sum": 0x2211,
|
|
||||||
"minus": 0x2212,
|
|
||||||
"lowast": 0x2217,
|
|
||||||
"radic": 0x221A,
|
|
||||||
"prop": 0x221D,
|
|
||||||
"infin": 0x221E,
|
|
||||||
"ang": 0x2220,
|
|
||||||
"and": 0x2227,
|
|
||||||
"or": 0x2228,
|
|
||||||
"cap": 0x2229,
|
|
||||||
"cup": 0x222A,
|
|
||||||
"int": 0x222B,
|
|
||||||
"there4": 0x2234,
|
|
||||||
"sim": 0x223C,
|
|
||||||
"cong": 0x2245,
|
|
||||||
"asymp": 0x2248,
|
|
||||||
"ne": 0x2260,
|
|
||||||
"equiv": 0x2261,
|
|
||||||
"le": 0x2264,
|
|
||||||
"ge": 0x2265,
|
|
||||||
"sub": 0x2282,
|
|
||||||
"sup": 0x2283,
|
|
||||||
"nsub": 0x2284,
|
|
||||||
"sube": 0x2286,
|
|
||||||
"supe": 0x2287,
|
|
||||||
"oplus": 0x2295,
|
|
||||||
"otimes": 0x2297,
|
|
||||||
"perp": 0x22A5,
|
|
||||||
"sdot": 0x22C5,
|
|
||||||
"lceil": 0x2308,
|
|
||||||
"rceil": 0x2309,
|
|
||||||
"lfloor": 0x230A,
|
|
||||||
"rfloor": 0x230B,
|
|
||||||
"lang": 0x2329,
|
|
||||||
"rang": 0x232A,
|
|
||||||
"loz": 0x25CA,
|
|
||||||
"spades": 0x2660,
|
|
||||||
"clubs": 0x2663,
|
|
||||||
"hearts": 0x2665,
|
|
||||||
"diams": 0x2666,
|
|
||||||
}
|
|
2039
vendor/github.com/evanw/esbuild/internal/js_lexer/unicode.go
generated
vendored
2039
vendor/github.com/evanw/esbuild/internal/js_lexer/unicode.go
generated
vendored
File diff suppressed because it is too large
Load Diff
48
vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go
generated
vendored
48
vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go
generated
vendored
@ -1,48 +0,0 @@
|
|||||||
package js_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/js_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ParseGlobalName(log logger.Log, source logger.Source) (result []string, ok bool) {
|
|
||||||
ok = true
|
|
||||||
defer func() {
|
|
||||||
r := recover()
|
|
||||||
if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
|
|
||||||
ok = false
|
|
||||||
} else if r != nil {
|
|
||||||
panic(r)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
lexer := js_lexer.NewLexerGlobalName(log, source)
|
|
||||||
|
|
||||||
// Start off with an identifier
|
|
||||||
result = append(result, lexer.Identifier)
|
|
||||||
lexer.Expect(js_lexer.TIdentifier)
|
|
||||||
|
|
||||||
// Follow with dot or index expressions
|
|
||||||
for lexer.Token != js_lexer.TEndOfFile {
|
|
||||||
switch lexer.Token {
|
|
||||||
case js_lexer.TDot:
|
|
||||||
lexer.Next()
|
|
||||||
if !lexer.IsIdentifierOrKeyword() {
|
|
||||||
lexer.Expect(js_lexer.TIdentifier)
|
|
||||||
}
|
|
||||||
result = append(result, lexer.Identifier)
|
|
||||||
lexer.Next()
|
|
||||||
|
|
||||||
case js_lexer.TOpenBracket:
|
|
||||||
lexer.Next()
|
|
||||||
result = append(result, js_lexer.UTF16ToString(lexer.StringLiteral()))
|
|
||||||
lexer.Expect(js_lexer.TStringLiteral)
|
|
||||||
lexer.Expect(js_lexer.TCloseBracket)
|
|
||||||
|
|
||||||
default:
|
|
||||||
lexer.Expect(js_lexer.TDot)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
15160
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go
generated
vendored
15160
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go
generated
vendored
File diff suppressed because it is too large
Load Diff
2980
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go
generated
vendored
2980
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go
generated
vendored
File diff suppressed because it is too large
Load Diff
187
vendor/github.com/evanw/esbuild/internal/js_parser/json_parser.go
generated
vendored
187
vendor/github.com/evanw/esbuild/internal/js_parser/json_parser.go
generated
vendored
@ -1,187 +0,0 @@
|
|||||||
package js_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type jsonParser struct {
|
|
||||||
log logger.Log
|
|
||||||
source logger.Source
|
|
||||||
tracker logger.LineColumnTracker
|
|
||||||
lexer js_lexer.Lexer
|
|
||||||
options JSONOptions
|
|
||||||
suppressWarningsAboutWeirdCode bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *jsonParser) parseMaybeTrailingComma(closeToken js_lexer.T) bool {
|
|
||||||
commaRange := p.lexer.Range()
|
|
||||||
p.lexer.Expect(js_lexer.TComma)
|
|
||||||
|
|
||||||
if p.lexer.Token == closeToken {
|
|
||||||
if !p.options.AllowTrailingCommas {
|
|
||||||
p.log.Add(logger.Error, &p.tracker, commaRange, "JSON does not support trailing commas")
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *jsonParser) parseExpr() js_ast.Expr {
|
|
||||||
loc := p.lexer.Loc()
|
|
||||||
|
|
||||||
switch p.lexer.Token {
|
|
||||||
case js_lexer.TFalse:
|
|
||||||
p.lexer.Next()
|
|
||||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: false}}
|
|
||||||
|
|
||||||
case js_lexer.TTrue:
|
|
||||||
p.lexer.Next()
|
|
||||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: true}}
|
|
||||||
|
|
||||||
case js_lexer.TNull:
|
|
||||||
p.lexer.Next()
|
|
||||||
return js_ast.Expr{Loc: loc, Data: js_ast.ENullShared}
|
|
||||||
|
|
||||||
case js_lexer.TStringLiteral:
|
|
||||||
value := p.lexer.StringLiteral()
|
|
||||||
p.lexer.Next()
|
|
||||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EString{Value: value}}
|
|
||||||
|
|
||||||
case js_lexer.TNumericLiteral:
|
|
||||||
value := p.lexer.Number
|
|
||||||
p.lexer.Next()
|
|
||||||
return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: value}}
|
|
||||||
|
|
||||||
case js_lexer.TMinus:
|
|
||||||
p.lexer.Next()
|
|
||||||
value := p.lexer.Number
|
|
||||||
p.lexer.Expect(js_lexer.TNumericLiteral)
|
|
||||||
return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: -value}}
|
|
||||||
|
|
||||||
case js_lexer.TOpenBracket:
|
|
||||||
p.lexer.Next()
|
|
||||||
isSingleLine := !p.lexer.HasNewlineBefore
|
|
||||||
items := []js_ast.Expr{}
|
|
||||||
|
|
||||||
for p.lexer.Token != js_lexer.TCloseBracket {
|
|
||||||
if len(items) > 0 {
|
|
||||||
if p.lexer.HasNewlineBefore {
|
|
||||||
isSingleLine = false
|
|
||||||
}
|
|
||||||
if !p.parseMaybeTrailingComma(js_lexer.TCloseBracket) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if p.lexer.HasNewlineBefore {
|
|
||||||
isSingleLine = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
item := p.parseExpr()
|
|
||||||
items = append(items, item)
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.lexer.HasNewlineBefore {
|
|
||||||
isSingleLine = false
|
|
||||||
}
|
|
||||||
p.lexer.Expect(js_lexer.TCloseBracket)
|
|
||||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EArray{
|
|
||||||
Items: items,
|
|
||||||
IsSingleLine: isSingleLine,
|
|
||||||
}}
|
|
||||||
|
|
||||||
case js_lexer.TOpenBrace:
|
|
||||||
p.lexer.Next()
|
|
||||||
isSingleLine := !p.lexer.HasNewlineBefore
|
|
||||||
properties := []js_ast.Property{}
|
|
||||||
duplicates := make(map[string]logger.Range)
|
|
||||||
|
|
||||||
for p.lexer.Token != js_lexer.TCloseBrace {
|
|
||||||
if len(properties) > 0 {
|
|
||||||
if p.lexer.HasNewlineBefore {
|
|
||||||
isSingleLine = false
|
|
||||||
}
|
|
||||||
if !p.parseMaybeTrailingComma(js_lexer.TCloseBrace) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if p.lexer.HasNewlineBefore {
|
|
||||||
isSingleLine = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
keyString := p.lexer.StringLiteral()
|
|
||||||
keyRange := p.lexer.Range()
|
|
||||||
key := js_ast.Expr{Loc: keyRange.Loc, Data: &js_ast.EString{Value: keyString}}
|
|
||||||
p.lexer.Expect(js_lexer.TStringLiteral)
|
|
||||||
|
|
||||||
// Warn about duplicate keys
|
|
||||||
if !p.suppressWarningsAboutWeirdCode {
|
|
||||||
keyText := js_lexer.UTF16ToString(keyString)
|
|
||||||
if prevRange, ok := duplicates[keyText]; ok {
|
|
||||||
p.log.AddWithNotes(logger.Warning, &p.tracker, keyRange, fmt.Sprintf("Duplicate key %q in object literal", keyText),
|
|
||||||
[]logger.MsgData{p.tracker.MsgData(prevRange, fmt.Sprintf("The original key %q is here:", keyText))})
|
|
||||||
} else {
|
|
||||||
duplicates[keyText] = keyRange
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p.lexer.Expect(js_lexer.TColon)
|
|
||||||
value := p.parseExpr()
|
|
||||||
|
|
||||||
property := js_ast.Property{
|
|
||||||
Kind: js_ast.PropertyNormal,
|
|
||||||
Key: key,
|
|
||||||
ValueOrNil: value,
|
|
||||||
}
|
|
||||||
properties = append(properties, property)
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.lexer.HasNewlineBefore {
|
|
||||||
isSingleLine = false
|
|
||||||
}
|
|
||||||
p.lexer.Expect(js_lexer.TCloseBrace)
|
|
||||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EObject{
|
|
||||||
Properties: properties,
|
|
||||||
IsSingleLine: isSingleLine,
|
|
||||||
}}
|
|
||||||
|
|
||||||
default:
|
|
||||||
p.lexer.Unexpected()
|
|
||||||
return js_ast.Expr{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSONOptions struct {
|
|
||||||
AllowComments bool
|
|
||||||
AllowTrailingCommas bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func ParseJSON(log logger.Log, source logger.Source, options JSONOptions) (result js_ast.Expr, ok bool) {
|
|
||||||
ok = true
|
|
||||||
defer func() {
|
|
||||||
r := recover()
|
|
||||||
if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
|
|
||||||
ok = false
|
|
||||||
} else if r != nil {
|
|
||||||
panic(r)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
p := &jsonParser{
|
|
||||||
log: log,
|
|
||||||
source: source,
|
|
||||||
tracker: logger.MakeLineColumnTracker(&source),
|
|
||||||
options: options,
|
|
||||||
lexer: js_lexer.NewLexerJSON(log, source, options.AllowComments),
|
|
||||||
suppressWarningsAboutWeirdCode: helpers.IsInsideNodeModules(source.KeyPath.Text),
|
|
||||||
}
|
|
||||||
|
|
||||||
result = p.parseExpr()
|
|
||||||
p.lexer.Expect(js_lexer.TEndOfFile)
|
|
||||||
return
|
|
||||||
}
|
|
251
vendor/github.com/evanw/esbuild/internal/js_parser/sourcemap_parser.go
generated
vendored
251
vendor/github.com/evanw/esbuild/internal/js_parser/sourcemap_parser.go
generated
vendored
@ -1,251 +0,0 @@
|
|||||||
package js_parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
"github.com/evanw/esbuild/internal/sourcemap"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Specification: https://sourcemaps.info/spec.html
|
|
||||||
func ParseSourceMap(log logger.Log, source logger.Source) *sourcemap.SourceMap {
|
|
||||||
expr, ok := ParseJSON(log, source, JSONOptions{})
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
obj, ok := expr.Data.(*js_ast.EObject)
|
|
||||||
tracker := logger.MakeLineColumnTracker(&source)
|
|
||||||
if !ok {
|
|
||||||
log.Add(logger.Error, &tracker, logger.Range{Loc: expr.Loc}, "Invalid source map")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var sources []string
|
|
||||||
var sourcesContent []sourcemap.SourceContent
|
|
||||||
var mappingsRaw []uint16
|
|
||||||
var mappingsStart int32
|
|
||||||
hasVersion := false
|
|
||||||
|
|
||||||
for _, prop := range obj.Properties {
|
|
||||||
keyRange := source.RangeOfString(prop.Key.Loc)
|
|
||||||
|
|
||||||
switch js_lexer.UTF16ToString(prop.Key.Data.(*js_ast.EString).Value) {
|
|
||||||
case "sections":
|
|
||||||
log.Add(logger.Warning, &tracker, keyRange, "Source maps with \"sections\" are not supported")
|
|
||||||
return nil
|
|
||||||
|
|
||||||
case "version":
|
|
||||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.ENumber); ok && value.Value == 3 {
|
|
||||||
hasVersion = true
|
|
||||||
}
|
|
||||||
|
|
||||||
case "mappings":
|
|
||||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EString); ok {
|
|
||||||
mappingsRaw = value.Value
|
|
||||||
mappingsStart = prop.ValueOrNil.Loc.Start + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
case "sources":
|
|
||||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
|
||||||
sources = nil
|
|
||||||
for _, item := range value.Items {
|
|
||||||
if element, ok := item.Data.(*js_ast.EString); ok {
|
|
||||||
sources = append(sources, js_lexer.UTF16ToString(element.Value))
|
|
||||||
} else {
|
|
||||||
sources = append(sources, "")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case "sourcesContent":
|
|
||||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
|
||||||
sourcesContent = nil
|
|
||||||
for _, item := range value.Items {
|
|
||||||
if element, ok := item.Data.(*js_ast.EString); ok {
|
|
||||||
sourcesContent = append(sourcesContent, sourcemap.SourceContent{
|
|
||||||
Value: element.Value,
|
|
||||||
Quoted: source.TextForRange(source.RangeOfString(item.Loc)),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
sourcesContent = append(sourcesContent, sourcemap.SourceContent{})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Silently fail if the version was missing or incorrect
|
|
||||||
if !hasVersion {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Silently fail if the source map is pointless (i.e. empty)
|
|
||||||
if len(sources) == 0 || len(mappingsRaw) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var mappings mappingArray
|
|
||||||
mappingsLen := len(mappingsRaw)
|
|
||||||
sourcesLen := len(sources)
|
|
||||||
generatedLine := 0
|
|
||||||
generatedColumn := 0
|
|
||||||
sourceIndex := 0
|
|
||||||
originalLine := 0
|
|
||||||
originalColumn := 0
|
|
||||||
current := 0
|
|
||||||
errorText := ""
|
|
||||||
errorLen := 0
|
|
||||||
needSort := false
|
|
||||||
|
|
||||||
// Parse the mappings
|
|
||||||
for current < mappingsLen {
|
|
||||||
// Handle a line break
|
|
||||||
if mappingsRaw[current] == ';' {
|
|
||||||
generatedLine++
|
|
||||||
generatedColumn = 0
|
|
||||||
current++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read the generated column
|
|
||||||
generatedColumnDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
|
||||||
if !ok {
|
|
||||||
errorText = "Missing generated column"
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if generatedColumnDelta < 0 {
|
|
||||||
// This would mess up binary search
|
|
||||||
needSort = true
|
|
||||||
}
|
|
||||||
generatedColumn += generatedColumnDelta
|
|
||||||
if generatedColumn < 0 {
|
|
||||||
errorText = fmt.Sprintf("Invalid generated column value: %d", generatedColumn)
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
current += i
|
|
||||||
|
|
||||||
// According to the specification, it's valid for a mapping to have 1,
|
|
||||||
// 4, or 5 variable-length fields. Having one field means there's no
|
|
||||||
// original location information, which is pretty useless. Just ignore
|
|
||||||
// those entries.
|
|
||||||
if current == mappingsLen {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
switch mappingsRaw[current] {
|
|
||||||
case ',':
|
|
||||||
current++
|
|
||||||
continue
|
|
||||||
case ';':
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read the original source
|
|
||||||
sourceIndexDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
|
||||||
if !ok {
|
|
||||||
errorText = "Missing source index"
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
sourceIndex += sourceIndexDelta
|
|
||||||
if sourceIndex < 0 || sourceIndex >= sourcesLen {
|
|
||||||
errorText = fmt.Sprintf("Invalid source index value: %d", sourceIndex)
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
current += i
|
|
||||||
|
|
||||||
// Read the original line
|
|
||||||
originalLineDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
|
||||||
if !ok {
|
|
||||||
errorText = "Missing original line"
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
originalLine += originalLineDelta
|
|
||||||
if originalLine < 0 {
|
|
||||||
errorText = fmt.Sprintf("Invalid original line value: %d", originalLine)
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
current += i
|
|
||||||
|
|
||||||
// Read the original column
|
|
||||||
originalColumnDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
|
||||||
if !ok {
|
|
||||||
errorText = "Missing original column"
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
originalColumn += originalColumnDelta
|
|
||||||
if originalColumn < 0 {
|
|
||||||
errorText = fmt.Sprintf("Invalid original column value: %d", originalColumn)
|
|
||||||
errorLen = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
current += i
|
|
||||||
|
|
||||||
// Ignore the optional name index
|
|
||||||
if _, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:]); ok {
|
|
||||||
current += i
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle the next character
|
|
||||||
if current < mappingsLen {
|
|
||||||
if c := mappingsRaw[current]; c == ',' {
|
|
||||||
current++
|
|
||||||
} else if c != ';' {
|
|
||||||
errorText = fmt.Sprintf("Invalid character after mapping: %q",
|
|
||||||
js_lexer.UTF16ToString(mappingsRaw[current:current+1]))
|
|
||||||
errorLen = 1
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mappings = append(mappings, sourcemap.Mapping{
|
|
||||||
GeneratedLine: int32(generatedLine),
|
|
||||||
GeneratedColumn: int32(generatedColumn),
|
|
||||||
SourceIndex: int32(sourceIndex),
|
|
||||||
OriginalLine: int32(originalLine),
|
|
||||||
OriginalColumn: int32(originalColumn),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if errorText != "" {
|
|
||||||
r := logger.Range{Loc: logger.Loc{Start: mappingsStart + int32(current)}, Len: int32(errorLen)}
|
|
||||||
log.Add(logger.Warning, &tracker, r,
|
|
||||||
fmt.Sprintf("Bad \"mappings\" data in source map at character %d: %s", current, errorText))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if needSort {
|
|
||||||
// If we get here, some mappings are out of order. Lines can't be out of
|
|
||||||
// order by construction but columns can. This is a pretty rare situation
|
|
||||||
// because almost all source map generators always write out mappings in
|
|
||||||
// order as they write the output instead of scrambling the order.
|
|
||||||
sort.Stable(mappings)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &sourcemap.SourceMap{
|
|
||||||
Sources: sources,
|
|
||||||
SourcesContent: sourcesContent,
|
|
||||||
Mappings: mappings,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This type is just so we can use Go's native sort function
|
|
||||||
type mappingArray []sourcemap.Mapping
|
|
||||||
|
|
||||||
func (a mappingArray) Len() int { return len(a) }
|
|
||||||
func (a mappingArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
|
|
||||||
func (a mappingArray) Less(i int, j int) bool {
|
|
||||||
ai := a[i]
|
|
||||||
aj := a[j]
|
|
||||||
return ai.GeneratedLine < aj.GeneratedLine || (ai.GeneratedLine == aj.GeneratedLine && ai.GeneratedColumn <= aj.GeneratedColumn)
|
|
||||||
}
|
|
1601
vendor/github.com/evanw/esbuild/internal/js_parser/ts_parser.go
generated
vendored
1601
vendor/github.com/evanw/esbuild/internal/js_parser/ts_parser.go
generated
vendored
File diff suppressed because it is too large
Load Diff
3212
vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go
generated
vendored
3212
vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go
generated
vendored
File diff suppressed because it is too large
Load Diff
1612
vendor/github.com/evanw/esbuild/internal/logger/logger.go
generated
vendored
1612
vendor/github.com/evanw/esbuild/internal/logger/logger.go
generated
vendored
File diff suppressed because it is too large
Load Diff
34
vendor/github.com/evanw/esbuild/internal/logger/logger_darwin.go
generated
vendored
34
vendor/github.com/evanw/esbuild/internal/logger/logger_darwin.go
generated
vendored
@ -1,34 +0,0 @@
|
|||||||
//go:build darwin
|
|
||||||
// +build darwin
|
|
||||||
|
|
||||||
package logger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"golang.org/x/sys/unix"
|
|
||||||
)
|
|
||||||
|
|
||||||
const SupportsColorEscapes = true
|
|
||||||
|
|
||||||
func GetTerminalInfo(file *os.File) (info TerminalInfo) {
|
|
||||||
fd := file.Fd()
|
|
||||||
|
|
||||||
// Is this file descriptor a terminal?
|
|
||||||
if _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA); err == nil {
|
|
||||||
info.IsTTY = true
|
|
||||||
info.UseColorEscapes = !hasNoColorEnvironmentVariable()
|
|
||||||
|
|
||||||
// Get the width of the window
|
|
||||||
if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil {
|
|
||||||
info.Width = int(w.Col)
|
|
||||||
info.Height = int(w.Row)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeStringWithColor(file *os.File, text string) {
|
|
||||||
file.WriteString(text)
|
|
||||||
}
|
|
34
vendor/github.com/evanw/esbuild/internal/logger/logger_linux.go
generated
vendored
34
vendor/github.com/evanw/esbuild/internal/logger/logger_linux.go
generated
vendored
@ -1,34 +0,0 @@
|
|||||||
//go:build linux
|
|
||||||
// +build linux
|
|
||||||
|
|
||||||
package logger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"golang.org/x/sys/unix"
|
|
||||||
)
|
|
||||||
|
|
||||||
const SupportsColorEscapes = true
|
|
||||||
|
|
||||||
func GetTerminalInfo(file *os.File) (info TerminalInfo) {
|
|
||||||
fd := file.Fd()
|
|
||||||
|
|
||||||
// Is this file descriptor a terminal?
|
|
||||||
if _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS); err == nil {
|
|
||||||
info.IsTTY = true
|
|
||||||
info.UseColorEscapes = !hasNoColorEnvironmentVariable()
|
|
||||||
|
|
||||||
// Get the width of the window
|
|
||||||
if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil {
|
|
||||||
info.Width = int(w.Col)
|
|
||||||
info.Height = int(w.Row)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeStringWithColor(file *os.File, text string) {
|
|
||||||
file.WriteString(text)
|
|
||||||
}
|
|
16
vendor/github.com/evanw/esbuild/internal/logger/logger_other.go
generated
vendored
16
vendor/github.com/evanw/esbuild/internal/logger/logger_other.go
generated
vendored
@ -1,16 +0,0 @@
|
|||||||
//go:build !darwin && !linux && !windows
|
|
||||||
// +build !darwin,!linux,!windows
|
|
||||||
|
|
||||||
package logger
|
|
||||||
|
|
||||||
import "os"
|
|
||||||
|
|
||||||
const SupportsColorEscapes = false
|
|
||||||
|
|
||||||
func GetTerminalInfo(*os.File) TerminalInfo {
|
|
||||||
return TerminalInfo{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeStringWithColor(file *os.File, text string) {
|
|
||||||
file.WriteString(text)
|
|
||||||
}
|
|
136
vendor/github.com/evanw/esbuild/internal/logger/logger_windows.go
generated
vendored
136
vendor/github.com/evanw/esbuild/internal/logger/logger_windows.go
generated
vendored
@ -1,136 +0,0 @@
|
|||||||
//go:build windows
|
|
||||||
// +build windows
|
|
||||||
|
|
||||||
package logger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
"syscall"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
const SupportsColorEscapes = true
|
|
||||||
|
|
||||||
var kernel32 = syscall.NewLazyDLL("kernel32.dll")
|
|
||||||
var getConsoleMode = kernel32.NewProc("GetConsoleMode")
|
|
||||||
var setConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
|
|
||||||
var getConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
|
|
||||||
|
|
||||||
type consoleScreenBufferInfo struct {
|
|
||||||
dwSizeX int16
|
|
||||||
dwSizeY int16
|
|
||||||
dwCursorPositionX int16
|
|
||||||
dwCursorPositionY int16
|
|
||||||
wAttributes uint16
|
|
||||||
srWindowLeft int16
|
|
||||||
srWindowTop int16
|
|
||||||
srWindowRight int16
|
|
||||||
srWindowBottom int16
|
|
||||||
dwMaximumWindowSizeX int16
|
|
||||||
dwMaximumWindowSizeY int16
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetTerminalInfo(file *os.File) TerminalInfo {
|
|
||||||
fd := file.Fd()
|
|
||||||
|
|
||||||
// Is this file descriptor a terminal?
|
|
||||||
var unused uint32
|
|
||||||
isTTY, _, _ := syscall.Syscall(getConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&unused)), 0)
|
|
||||||
|
|
||||||
// Get the width of the window
|
|
||||||
var info consoleScreenBufferInfo
|
|
||||||
syscall.Syscall(getConsoleScreenBufferInfo.Addr(), 2, fd, uintptr(unsafe.Pointer(&info)), 0)
|
|
||||||
|
|
||||||
return TerminalInfo{
|
|
||||||
IsTTY: isTTY != 0,
|
|
||||||
Width: int(info.dwSizeX) - 1,
|
|
||||||
Height: int(info.dwSizeY) - 1,
|
|
||||||
UseColorEscapes: !hasNoColorEnvironmentVariable(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
FOREGROUND_BLUE uint8 = 1 << iota
|
|
||||||
FOREGROUND_GREEN
|
|
||||||
FOREGROUND_RED
|
|
||||||
FOREGROUND_INTENSITY
|
|
||||||
BACKGROUND_BLUE
|
|
||||||
BACKGROUND_GREEN
|
|
||||||
BACKGROUND_RED
|
|
||||||
BACKGROUND_INTENSITY
|
|
||||||
)
|
|
||||||
|
|
||||||
var windowsEscapeSequenceMap = map[string]uint8{
|
|
||||||
TerminalColors.Reset: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
|
|
||||||
TerminalColors.Dim: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
|
|
||||||
TerminalColors.Bold: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY,
|
|
||||||
|
|
||||||
// Apparently underlines only work with the CJK locale on Windows :(
|
|
||||||
TerminalColors.Underline: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
|
|
||||||
|
|
||||||
TerminalColors.Red: FOREGROUND_RED,
|
|
||||||
TerminalColors.Green: FOREGROUND_GREEN,
|
|
||||||
TerminalColors.Blue: FOREGROUND_BLUE,
|
|
||||||
|
|
||||||
TerminalColors.Cyan: FOREGROUND_GREEN | FOREGROUND_BLUE,
|
|
||||||
TerminalColors.Magenta: FOREGROUND_RED | FOREGROUND_BLUE,
|
|
||||||
TerminalColors.Yellow: FOREGROUND_RED | FOREGROUND_GREEN,
|
|
||||||
|
|
||||||
TerminalColors.RedBgRed: FOREGROUND_RED | BACKGROUND_RED,
|
|
||||||
TerminalColors.RedBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_RED,
|
|
||||||
TerminalColors.GreenBgGreen: FOREGROUND_GREEN | BACKGROUND_GREEN,
|
|
||||||
TerminalColors.GreenBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN,
|
|
||||||
TerminalColors.BlueBgBlue: FOREGROUND_BLUE | BACKGROUND_BLUE,
|
|
||||||
TerminalColors.BlueBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_BLUE,
|
|
||||||
|
|
||||||
TerminalColors.CyanBgCyan: FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_BLUE,
|
|
||||||
TerminalColors.CyanBgBlack: BACKGROUND_GREEN | BACKGROUND_BLUE,
|
|
||||||
TerminalColors.MagentaBgMagenta: FOREGROUND_RED | FOREGROUND_BLUE | BACKGROUND_RED | BACKGROUND_BLUE,
|
|
||||||
TerminalColors.MagentaBgBlack: BACKGROUND_RED | BACKGROUND_BLUE,
|
|
||||||
TerminalColors.YellowBgYellow: FOREGROUND_RED | FOREGROUND_GREEN | BACKGROUND_RED | BACKGROUND_GREEN,
|
|
||||||
TerminalColors.YellowBgBlack: BACKGROUND_RED | BACKGROUND_GREEN,
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeStringWithColor(file *os.File, text string) {
|
|
||||||
fd := file.Fd()
|
|
||||||
i := 0
|
|
||||||
|
|
||||||
for i < len(text) {
|
|
||||||
// Find the escape
|
|
||||||
if text[i] != 033 {
|
|
||||||
i++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the 'm'
|
|
||||||
window := text[i:]
|
|
||||||
if len(window) > 8 {
|
|
||||||
window = window[:8]
|
|
||||||
}
|
|
||||||
m := strings.IndexByte(window, 'm')
|
|
||||||
if m == -1 {
|
|
||||||
i++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
m += i + 1
|
|
||||||
|
|
||||||
// Find the escape sequence
|
|
||||||
attributes, ok := windowsEscapeSequenceMap[text[i:m]]
|
|
||||||
if !ok {
|
|
||||||
i++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write out the text before the escape sequence
|
|
||||||
file.WriteString(text[:i])
|
|
||||||
|
|
||||||
// Apply the escape sequence
|
|
||||||
text = text[m:]
|
|
||||||
i = 0
|
|
||||||
setConsoleTextAttribute.Call(fd, uintptr(attributes))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write out the remaining text
|
|
||||||
file.WriteString(text)
|
|
||||||
}
|
|
624
vendor/github.com/evanw/esbuild/internal/renamer/renamer.go
generated
vendored
624
vendor/github.com/evanw/esbuild/internal/renamer/renamer.go
generated
vendored
@ -1,624 +0,0 @@
|
|||||||
package renamer
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_lexer"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ComputeReservedNames(moduleScopes []*js_ast.Scope, symbols js_ast.SymbolMap) map[string]uint32 {
|
|
||||||
names := make(map[string]uint32)
|
|
||||||
|
|
||||||
// All keywords and strict mode reserved words are reserved names
|
|
||||||
for k := range js_lexer.Keywords {
|
|
||||||
names[k] = 1
|
|
||||||
}
|
|
||||||
for k := range js_lexer.StrictModeReservedWords {
|
|
||||||
names[k] = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
// All unbound symbols must be reserved names
|
|
||||||
for _, scope := range moduleScopes {
|
|
||||||
computeReservedNamesForScope(scope, symbols, names)
|
|
||||||
}
|
|
||||||
|
|
||||||
return names
|
|
||||||
}
|
|
||||||
|
|
||||||
func computeReservedNamesForScope(scope *js_ast.Scope, symbols js_ast.SymbolMap, names map[string]uint32) {
|
|
||||||
for _, member := range scope.Members {
|
|
||||||
symbol := symbols.Get(member.Ref)
|
|
||||||
if symbol.Kind == js_ast.SymbolUnbound || symbol.MustNotBeRenamed {
|
|
||||||
names[symbol.OriginalName] = 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, ref := range scope.Generated {
|
|
||||||
symbol := symbols.Get(ref)
|
|
||||||
if symbol.Kind == js_ast.SymbolUnbound || symbol.MustNotBeRenamed {
|
|
||||||
names[symbol.OriginalName] = 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there's a direct "eval" somewhere inside the current scope, continue
|
|
||||||
// traversing down the scope tree until we find it to get all reserved names
|
|
||||||
if scope.ContainsDirectEval {
|
|
||||||
for _, child := range scope.Children {
|
|
||||||
if child.ContainsDirectEval {
|
|
||||||
computeReservedNamesForScope(child, symbols, names)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Renamer interface {
|
|
||||||
NameForSymbol(ref js_ast.Ref) string
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// noOpRenamer
|
|
||||||
|
|
||||||
type noOpRenamer struct {
|
|
||||||
symbols js_ast.SymbolMap
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewNoOpRenamer(symbols js_ast.SymbolMap) Renamer {
|
|
||||||
return &noOpRenamer{
|
|
||||||
symbols: symbols,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *noOpRenamer) NameForSymbol(ref js_ast.Ref) string {
|
|
||||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
|
||||||
return r.symbols.Get(ref).OriginalName
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// MinifyRenamer
|
|
||||||
|
|
||||||
type symbolSlot struct {
|
|
||||||
name string
|
|
||||||
count uint32
|
|
||||||
needsCapitalForJSX uint32 // This is really a bool but needs to be atomic
|
|
||||||
}
|
|
||||||
|
|
||||||
type MinifyRenamer struct {
|
|
||||||
symbols js_ast.SymbolMap
|
|
||||||
reservedNames map[string]uint32
|
|
||||||
slots [3][]symbolSlot
|
|
||||||
topLevelSymbolToSlot map[js_ast.Ref]uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewMinifyRenamer(symbols js_ast.SymbolMap, firstTopLevelSlots js_ast.SlotCounts, reservedNames map[string]uint32) *MinifyRenamer {
|
|
||||||
return &MinifyRenamer{
|
|
||||||
symbols: symbols,
|
|
||||||
reservedNames: reservedNames,
|
|
||||||
slots: [3][]symbolSlot{
|
|
||||||
make([]symbolSlot, firstTopLevelSlots[0]),
|
|
||||||
make([]symbolSlot, firstTopLevelSlots[1]),
|
|
||||||
make([]symbolSlot, firstTopLevelSlots[2]),
|
|
||||||
},
|
|
||||||
topLevelSymbolToSlot: make(map[js_ast.Ref]uint32),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *MinifyRenamer) NameForSymbol(ref js_ast.Ref) string {
|
|
||||||
// Follow links to get to the underlying symbol
|
|
||||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
|
||||||
symbol := r.symbols.Get(ref)
|
|
||||||
|
|
||||||
// Skip this symbol if the name is pinned
|
|
||||||
ns := symbol.SlotNamespace()
|
|
||||||
if ns == js_ast.SlotMustNotBeRenamed {
|
|
||||||
return symbol.OriginalName
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it's a nested scope symbol
|
|
||||||
i := symbol.NestedScopeSlot
|
|
||||||
|
|
||||||
// If it's not (i.e. it's in a top-level scope), look up the slot
|
|
||||||
if !i.IsValid() {
|
|
||||||
index, ok := r.topLevelSymbolToSlot[ref]
|
|
||||||
if !ok {
|
|
||||||
// If we get here, then we're printing a symbol that never had any
|
|
||||||
// recorded uses. This is odd but can happen in certain scenarios.
|
|
||||||
// For example, code in a branch with dead control flow won't mark
|
|
||||||
// any uses but may still be printed. In that case it doesn't matter
|
|
||||||
// what name we use since it's dead code.
|
|
||||||
return symbol.OriginalName
|
|
||||||
}
|
|
||||||
i = ast.MakeIndex32(index)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r.slots[ns][i.GetIndex()].name
|
|
||||||
}
|
|
||||||
|
|
||||||
// The sort order here is arbitrary but needs to be consistent between builds.
|
|
||||||
// The InnerIndex should be stable because the parser for a single file is
|
|
||||||
// single-threaded and deterministically assigns out InnerIndex values
|
|
||||||
// sequentially. But the SourceIndex should be unstable because the main thread
|
|
||||||
// assigns out source index values sequentially to newly-discovered dependencies
|
|
||||||
// in a multi-threaded producer/consumer relationship. So instead we use the
|
|
||||||
// index of the source in the DFS order over all entry points for stability.
|
|
||||||
type DeferredTopLevelSymbol struct {
|
|
||||||
StableSourceIndex uint32
|
|
||||||
Ref js_ast.Ref
|
|
||||||
Count uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
// This type is just so we can use Go's native sort function
|
|
||||||
type DeferredTopLevelSymbolArray []DeferredTopLevelSymbol
|
|
||||||
|
|
||||||
func (a DeferredTopLevelSymbolArray) Len() int { return len(a) }
|
|
||||||
func (a DeferredTopLevelSymbolArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
func (a DeferredTopLevelSymbolArray) Less(i int, j int) bool {
|
|
||||||
ai, aj := a[i], a[j]
|
|
||||||
if ai.StableSourceIndex < aj.StableSourceIndex {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if ai.StableSourceIndex > aj.StableSourceIndex {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if ai.Ref.InnerIndex < aj.Ref.InnerIndex {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if ai.Ref.InnerIndex > aj.Ref.InnerIndex {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return ai.Count < aj.Count
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *MinifyRenamer) AccumulateSymbolUseCounts(
|
|
||||||
topLevelSymbols *DeferredTopLevelSymbolArray,
|
|
||||||
symbolUses map[js_ast.Ref]js_ast.SymbolUse,
|
|
||||||
stableSourceIndices []uint32,
|
|
||||||
) {
|
|
||||||
// NOTE: This function is run in parallel. Make sure to avoid data races.
|
|
||||||
|
|
||||||
for ref, use := range symbolUses {
|
|
||||||
r.AccumulateSymbolCount(topLevelSymbols, ref, use.CountEstimate, stableSourceIndices)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *MinifyRenamer) AccumulateSymbolCount(
|
|
||||||
topLevelSymbols *DeferredTopLevelSymbolArray,
|
|
||||||
ref js_ast.Ref,
|
|
||||||
count uint32,
|
|
||||||
stableSourceIndices []uint32,
|
|
||||||
) {
|
|
||||||
// NOTE: This function is run in parallel. Make sure to avoid data races.
|
|
||||||
|
|
||||||
// Follow links to get to the underlying symbol
|
|
||||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
|
||||||
symbol := r.symbols.Get(ref)
|
|
||||||
for symbol.NamespaceAlias != nil {
|
|
||||||
ref = js_ast.FollowSymbols(r.symbols, symbol.NamespaceAlias.NamespaceRef)
|
|
||||||
symbol = r.symbols.Get(ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip this symbol if the name is pinned
|
|
||||||
ns := symbol.SlotNamespace()
|
|
||||||
if ns == js_ast.SlotMustNotBeRenamed {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it's a nested scope symbol
|
|
||||||
if i := symbol.NestedScopeSlot; i.IsValid() {
|
|
||||||
// If it is, accumulate the count using a parallel-safe atomic increment
|
|
||||||
slot := &r.slots[ns][i.GetIndex()]
|
|
||||||
atomic.AddUint32(&slot.count, count)
|
|
||||||
if symbol.MustStartWithCapitalLetterForJSX {
|
|
||||||
atomic.StoreUint32(&slot.needsCapitalForJSX, 1)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// If it's a top-level symbol, defer it to later since we have
|
|
||||||
// to allocate slots for these in serial instead of in parallel
|
|
||||||
*topLevelSymbols = append(*topLevelSymbols, DeferredTopLevelSymbol{
|
|
||||||
StableSourceIndex: stableSourceIndices[ref.SourceIndex],
|
|
||||||
Ref: ref,
|
|
||||||
Count: count,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// The parallel part of the symbol count accumulation algorithm above processes
|
|
||||||
// nested symbols and generates on an array of top-level symbols to process later.
|
|
||||||
// After the parallel part has finished, that array of top-level symbols is passed
|
|
||||||
// to this function which processes them in serial.
|
|
||||||
func (r *MinifyRenamer) AllocateTopLevelSymbolSlots(topLevelSymbols DeferredTopLevelSymbolArray) {
|
|
||||||
for _, stable := range topLevelSymbols {
|
|
||||||
symbol := r.symbols.Get(stable.Ref)
|
|
||||||
slots := &r.slots[symbol.SlotNamespace()]
|
|
||||||
if i, ok := r.topLevelSymbolToSlot[stable.Ref]; ok {
|
|
||||||
slot := &(*slots)[i]
|
|
||||||
slot.count += stable.Count
|
|
||||||
if symbol.MustStartWithCapitalLetterForJSX {
|
|
||||||
slot.needsCapitalForJSX = 1
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
needsCapitalForJSX := uint32(0)
|
|
||||||
if symbol.MustStartWithCapitalLetterForJSX {
|
|
||||||
needsCapitalForJSX = 1
|
|
||||||
}
|
|
||||||
i = uint32(len(*slots))
|
|
||||||
*slots = append(*slots, symbolSlot{
|
|
||||||
count: stable.Count,
|
|
||||||
needsCapitalForJSX: needsCapitalForJSX,
|
|
||||||
})
|
|
||||||
r.topLevelSymbolToSlot[stable.Ref] = i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *MinifyRenamer) AssignNamesByFrequency(minifier *js_ast.NameMinifier) {
|
|
||||||
for ns, slots := range r.slots {
|
|
||||||
// Sort symbols by count
|
|
||||||
sorted := make(slotAndCountArray, len(slots))
|
|
||||||
for i, item := range slots {
|
|
||||||
sorted[i] = slotAndCount{slot: uint32(i), count: item.count}
|
|
||||||
}
|
|
||||||
sort.Sort(sorted)
|
|
||||||
|
|
||||||
// Assign names to symbols
|
|
||||||
nextName := 0
|
|
||||||
for _, data := range sorted {
|
|
||||||
slot := &slots[data.slot]
|
|
||||||
name := minifier.NumberToMinifiedName(nextName)
|
|
||||||
nextName++
|
|
||||||
|
|
||||||
// Make sure we never generate a reserved name. We only have to worry
|
|
||||||
// about collisions with reserved identifiers for normal symbols, and we
|
|
||||||
// only have to worry about collisions with keywords for labels. We do
|
|
||||||
// not have to worry about either for private names because they start
|
|
||||||
// with a "#" character.
|
|
||||||
switch js_ast.SlotNamespace(ns) {
|
|
||||||
case js_ast.SlotDefault:
|
|
||||||
for r.reservedNames[name] != 0 {
|
|
||||||
name = minifier.NumberToMinifiedName(nextName)
|
|
||||||
nextName++
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure names of symbols used in JSX elements start with a capital letter
|
|
||||||
if slot.needsCapitalForJSX != 0 {
|
|
||||||
for name[0] >= 'a' && name[0] <= 'z' {
|
|
||||||
name = minifier.NumberToMinifiedName(nextName)
|
|
||||||
nextName++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case js_ast.SlotLabel:
|
|
||||||
for js_lexer.Keywords[name] != 0 {
|
|
||||||
name = minifier.NumberToMinifiedName(nextName)
|
|
||||||
nextName++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Private names must be prefixed with "#"
|
|
||||||
if js_ast.SlotNamespace(ns) == js_ast.SlotPrivateName {
|
|
||||||
name = "#" + name
|
|
||||||
}
|
|
||||||
|
|
||||||
slot.name = name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the number of nested slots
|
|
||||||
func AssignNestedScopeSlots(moduleScope *js_ast.Scope, symbols []js_ast.Symbol) (slotCounts js_ast.SlotCounts) {
|
|
||||||
// Temporarily set the nested scope slots of top-level symbols to valid so
|
|
||||||
// they aren't renamed in nested scopes. This prevents us from accidentally
|
|
||||||
// assigning nested scope slots to variables declared using "var" in a nested
|
|
||||||
// scope that are actually hoisted up to the module scope to become a top-
|
|
||||||
// level symbol.
|
|
||||||
validSlot := ast.MakeIndex32(1)
|
|
||||||
for _, member := range moduleScope.Members {
|
|
||||||
symbols[member.Ref.InnerIndex].NestedScopeSlot = validSlot
|
|
||||||
}
|
|
||||||
for _, ref := range moduleScope.Generated {
|
|
||||||
symbols[ref.InnerIndex].NestedScopeSlot = validSlot
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assign nested scope slots independently for each nested scope
|
|
||||||
for _, child := range moduleScope.Children {
|
|
||||||
slotCounts.UnionMax(assignNestedScopeSlotsHelper(child, symbols, js_ast.SlotCounts{}))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Then set the nested scope slots of top-level symbols back to zero. Top-
|
|
||||||
// level symbols are not supposed to have nested scope slots.
|
|
||||||
for _, member := range moduleScope.Members {
|
|
||||||
symbols[member.Ref.InnerIndex].NestedScopeSlot = ast.Index32{}
|
|
||||||
}
|
|
||||||
for _, ref := range moduleScope.Generated {
|
|
||||||
symbols[ref.InnerIndex].NestedScopeSlot = ast.Index32{}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func assignNestedScopeSlotsHelper(scope *js_ast.Scope, symbols []js_ast.Symbol, slot js_ast.SlotCounts) js_ast.SlotCounts {
|
|
||||||
// Sort member map keys for determinism
|
|
||||||
sortedMembers := make([]int, 0, len(scope.Members))
|
|
||||||
for _, member := range scope.Members {
|
|
||||||
sortedMembers = append(sortedMembers, int(member.Ref.InnerIndex))
|
|
||||||
}
|
|
||||||
sort.Ints(sortedMembers)
|
|
||||||
|
|
||||||
// Assign slots for this scope's symbols. Only do this if the slot is
|
|
||||||
// not already assigned. Nested scopes have copies of symbols from parent
|
|
||||||
// scopes and we want to use the slot from the parent scope, not child scopes.
|
|
||||||
for _, innerIndex := range sortedMembers {
|
|
||||||
symbol := &symbols[innerIndex]
|
|
||||||
if ns := symbol.SlotNamespace(); ns != js_ast.SlotMustNotBeRenamed && !symbol.NestedScopeSlot.IsValid() {
|
|
||||||
symbol.NestedScopeSlot = ast.MakeIndex32(slot[ns])
|
|
||||||
slot[ns]++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, ref := range scope.Generated {
|
|
||||||
symbol := &symbols[ref.InnerIndex]
|
|
||||||
if ns := symbol.SlotNamespace(); ns != js_ast.SlotMustNotBeRenamed && !symbol.NestedScopeSlot.IsValid() {
|
|
||||||
symbol.NestedScopeSlot = ast.MakeIndex32(slot[ns])
|
|
||||||
slot[ns]++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Labels are always declared in a nested scope, so we don't need to check.
|
|
||||||
if scope.Label.Ref != js_ast.InvalidRef {
|
|
||||||
symbol := &symbols[scope.Label.Ref.InnerIndex]
|
|
||||||
symbol.NestedScopeSlot = ast.MakeIndex32(slot[js_ast.SlotLabel])
|
|
||||||
slot[js_ast.SlotLabel]++
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assign slots for the symbols of child scopes
|
|
||||||
slotCounts := slot
|
|
||||||
for _, child := range scope.Children {
|
|
||||||
slotCounts.UnionMax(assignNestedScopeSlotsHelper(child, symbols, slot))
|
|
||||||
}
|
|
||||||
return slotCounts
|
|
||||||
}
|
|
||||||
|
|
||||||
type slotAndCount struct {
|
|
||||||
slot uint32
|
|
||||||
count uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
// This type is just so we can use Go's native sort function
|
|
||||||
type slotAndCountArray []slotAndCount
|
|
||||||
|
|
||||||
func (a slotAndCountArray) Len() int { return len(a) }
|
|
||||||
func (a slotAndCountArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
func (a slotAndCountArray) Less(i int, j int) bool {
|
|
||||||
ai, aj := a[i], a[j]
|
|
||||||
return ai.count > aj.count || (ai.count == aj.count && ai.slot < aj.slot)
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// NumberRenamer
|
|
||||||
|
|
||||||
type NumberRenamer struct {
|
|
||||||
symbols js_ast.SymbolMap
|
|
||||||
names [][]string
|
|
||||||
root numberScope
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewNumberRenamer(symbols js_ast.SymbolMap, reservedNames map[string]uint32) *NumberRenamer {
|
|
||||||
return &NumberRenamer{
|
|
||||||
symbols: symbols,
|
|
||||||
names: make([][]string, len(symbols.SymbolsForSource)),
|
|
||||||
root: numberScope{nameCounts: reservedNames},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *NumberRenamer) NameForSymbol(ref js_ast.Ref) string {
|
|
||||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
|
||||||
if inner := r.names[ref.SourceIndex]; inner != nil {
|
|
||||||
if name := inner[ref.InnerIndex]; name != "" {
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return r.symbols.Get(ref).OriginalName
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *NumberRenamer) AddTopLevelSymbol(ref js_ast.Ref) {
|
|
||||||
r.assignName(&r.root, ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *NumberRenamer) assignName(scope *numberScope, ref js_ast.Ref) {
|
|
||||||
ref = js_ast.FollowSymbols(r.symbols, ref)
|
|
||||||
|
|
||||||
// Don't rename the same symbol more than once
|
|
||||||
inner := r.names[ref.SourceIndex]
|
|
||||||
if inner != nil && inner[ref.InnerIndex] != "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't rename unbound symbols, symbols marked as reserved names, labels, or private names
|
|
||||||
symbol := r.symbols.Get(ref)
|
|
||||||
if symbol.SlotNamespace() != js_ast.SlotDefault {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure names of symbols used in JSX elements start with a capital letter
|
|
||||||
originalName := symbol.OriginalName
|
|
||||||
if symbol.MustStartWithCapitalLetterForJSX {
|
|
||||||
if first := rune(originalName[0]); first >= 'a' && first <= 'z' {
|
|
||||||
originalName = fmt.Sprintf("%c%s", first+('A'-'a'), originalName[1:])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compute a new name
|
|
||||||
name := scope.findUnusedName(originalName)
|
|
||||||
|
|
||||||
// Store the new name
|
|
||||||
if inner == nil {
|
|
||||||
// Note: This should not be a data race even though this method is run from
|
|
||||||
// multiple threads. The parallel part only looks at symbols defined in
|
|
||||||
// nested scopes, and those can only ever be accessed from within the file.
|
|
||||||
// References to those symbols should never spread across files.
|
|
||||||
//
|
|
||||||
// While we could avoid the data race by densely preallocating the entire
|
|
||||||
// "names" array ahead of time, that will waste a lot more memory for
|
|
||||||
// builds that make heavy use of code splitting and have many chunks. Doing
|
|
||||||
// things lazily like this means we use less memory but still stay safe.
|
|
||||||
inner = make([]string, len(r.symbols.SymbolsForSource[ref.SourceIndex]))
|
|
||||||
r.names[ref.SourceIndex] = inner
|
|
||||||
}
|
|
||||||
inner[ref.InnerIndex] = name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *NumberRenamer) assignNamesRecursive(scope *js_ast.Scope, sourceIndex uint32, parent *numberScope, sorted *[]int) {
|
|
||||||
s := &numberScope{parent: parent, nameCounts: make(map[string]uint32)}
|
|
||||||
|
|
||||||
// Sort member map keys for determinism, reusing a shared memory buffer
|
|
||||||
*sorted = (*sorted)[:0]
|
|
||||||
for _, member := range scope.Members {
|
|
||||||
*sorted = append(*sorted, int(member.Ref.InnerIndex))
|
|
||||||
}
|
|
||||||
sort.Ints(*sorted)
|
|
||||||
|
|
||||||
// Rename all symbols in this scope
|
|
||||||
for _, innerIndex := range *sorted {
|
|
||||||
r.assignName(s, js_ast.Ref{SourceIndex: sourceIndex, InnerIndex: uint32(innerIndex)})
|
|
||||||
}
|
|
||||||
for _, ref := range scope.Generated {
|
|
||||||
r.assignName(s, ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Symbols in child scopes may also have to be renamed to avoid conflicts
|
|
||||||
for _, child := range scope.Children {
|
|
||||||
r.assignNamesRecursive(child, sourceIndex, s, sorted)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *NumberRenamer) AssignNamesByScope(nestedScopes map[uint32][]*js_ast.Scope) {
|
|
||||||
waitGroup := sync.WaitGroup{}
|
|
||||||
waitGroup.Add(len(nestedScopes))
|
|
||||||
|
|
||||||
// Rename nested scopes from separate files in parallel
|
|
||||||
for sourceIndex, scopes := range nestedScopes {
|
|
||||||
go func(sourceIndex uint32, scopes []*js_ast.Scope) {
|
|
||||||
var sorted []int
|
|
||||||
for _, scope := range scopes {
|
|
||||||
r.assignNamesRecursive(scope, sourceIndex, &r.root, &sorted)
|
|
||||||
}
|
|
||||||
waitGroup.Done()
|
|
||||||
}(sourceIndex, scopes)
|
|
||||||
}
|
|
||||||
|
|
||||||
waitGroup.Wait()
|
|
||||||
}
|
|
||||||
|
|
||||||
type numberScope struct {
|
|
||||||
parent *numberScope
|
|
||||||
|
|
||||||
// This is used as a set of used names in this scope. This also maps the name
|
|
||||||
// to the number of times the name has experienced a collision. When a name
|
|
||||||
// collides with an already-used name, we need to rename it. This is done by
|
|
||||||
// incrementing a number at the end until the name is unused. We save the
|
|
||||||
// count here so that subsequent collisions can start counting from where the
|
|
||||||
// previous collision ended instead of having to start counting from 1.
|
|
||||||
nameCounts map[string]uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
type nameUse uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
nameUnused nameUse = iota
|
|
||||||
nameUsed
|
|
||||||
nameUsedInSameScope
|
|
||||||
)
|
|
||||||
|
|
||||||
func (s *numberScope) findNameUse(name string) nameUse {
|
|
||||||
original := s
|
|
||||||
for {
|
|
||||||
if _, ok := s.nameCounts[name]; ok {
|
|
||||||
if s == original {
|
|
||||||
return nameUsedInSameScope
|
|
||||||
}
|
|
||||||
return nameUsed
|
|
||||||
}
|
|
||||||
s = s.parent
|
|
||||||
if s == nil {
|
|
||||||
return nameUnused
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *numberScope) findUnusedName(name string) string {
|
|
||||||
name = js_lexer.ForceValidIdentifier(name)
|
|
||||||
|
|
||||||
if use := s.findNameUse(name); use != nameUnused {
|
|
||||||
// If the name is already in use, generate a new name by appending a number
|
|
||||||
tries := uint32(1)
|
|
||||||
if use == nameUsedInSameScope {
|
|
||||||
// To avoid O(n^2) behavior, the number must start off being the number
|
|
||||||
// that we used last time there was a collision with this name. Otherwise
|
|
||||||
// if there are many collisions with the same name, each name collision
|
|
||||||
// would have to increment the counter past all previous name collisions
|
|
||||||
// which is a O(n^2) time algorithm. Only do this if this symbol comes
|
|
||||||
// from the same scope as the previous one since sibling scopes can reuse
|
|
||||||
// the same name without problems.
|
|
||||||
tries = s.nameCounts[name]
|
|
||||||
}
|
|
||||||
prefix := name
|
|
||||||
|
|
||||||
// Keep incrementing the number until the name is unused
|
|
||||||
for {
|
|
||||||
tries++
|
|
||||||
name = prefix + strconv.Itoa(int(tries))
|
|
||||||
|
|
||||||
// Make sure this new name is unused
|
|
||||||
if s.findNameUse(name) == nameUnused {
|
|
||||||
// Store the count so we can start here next time instead of starting
|
|
||||||
// from 1. This means we avoid O(n^2) behavior.
|
|
||||||
if use == nameUsedInSameScope {
|
|
||||||
s.nameCounts[prefix] = tries
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Each name starts off with a count of 1 so that the first collision with
|
|
||||||
// "name" is called "name2"
|
|
||||||
s.nameCounts[name] = 1
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// ExportRenamer
|
|
||||||
|
|
||||||
type ExportRenamer struct {
|
|
||||||
count int
|
|
||||||
used map[string]uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *ExportRenamer) NextRenamedName(name string) string {
|
|
||||||
if r.used == nil {
|
|
||||||
r.used = make(map[string]uint32)
|
|
||||||
}
|
|
||||||
if tries, ok := r.used[name]; ok {
|
|
||||||
prefix := name
|
|
||||||
for {
|
|
||||||
tries++
|
|
||||||
name = prefix + strconv.Itoa(int(tries))
|
|
||||||
if _, ok := r.used[name]; !ok {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
r.used[name] = tries
|
|
||||||
} else {
|
|
||||||
r.used[name] = 1
|
|
||||||
}
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *ExportRenamer) NextMinifiedName() string {
|
|
||||||
name := js_ast.DefaultNameMinifier.NumberToMinifiedName(r.count)
|
|
||||||
r.count++
|
|
||||||
return name
|
|
||||||
}
|
|
76
vendor/github.com/evanw/esbuild/internal/resolver/dataurl.go
generated
vendored
76
vendor/github.com/evanw/esbuild/internal/resolver/dataurl.go
generated
vendored
@ -1,76 +0,0 @@
|
|||||||
package resolver
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/base64"
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
type DataURL struct {
|
|
||||||
mimeType string
|
|
||||||
data string
|
|
||||||
isBase64 bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func ParseDataURL(url string) (parsed DataURL, ok bool) {
|
|
||||||
if strings.HasPrefix(url, "data:") {
|
|
||||||
if comma := strings.IndexByte(url, ','); comma != -1 {
|
|
||||||
parsed.mimeType = url[len("data:"):comma]
|
|
||||||
parsed.data = url[comma+1:]
|
|
||||||
if strings.HasSuffix(parsed.mimeType, ";base64") {
|
|
||||||
parsed.mimeType = parsed.mimeType[:len(parsed.mimeType)-len(";base64")]
|
|
||||||
parsed.isBase64 = true
|
|
||||||
}
|
|
||||||
ok = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
type MIMEType uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
MIMETypeUnsupported MIMEType = iota
|
|
||||||
MIMETypeTextCSS
|
|
||||||
MIMETypeTextJavaScript
|
|
||||||
MIMETypeApplicationJSON
|
|
||||||
)
|
|
||||||
|
|
||||||
func (parsed DataURL) DecodeMIMEType() MIMEType {
|
|
||||||
// Remove things like ";charset=utf-8"
|
|
||||||
mimeType := parsed.mimeType
|
|
||||||
if semicolon := strings.IndexByte(mimeType, ';'); semicolon != -1 {
|
|
||||||
mimeType = mimeType[:semicolon]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hard-code a few supported types
|
|
||||||
switch mimeType {
|
|
||||||
case "text/css":
|
|
||||||
return MIMETypeTextCSS
|
|
||||||
case "text/javascript":
|
|
||||||
return MIMETypeTextJavaScript
|
|
||||||
case "application/json":
|
|
||||||
return MIMETypeApplicationJSON
|
|
||||||
default:
|
|
||||||
return MIMETypeUnsupported
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (parsed DataURL) DecodeData() (string, error) {
|
|
||||||
// Try to read base64 data
|
|
||||||
if parsed.isBase64 {
|
|
||||||
bytes, err := base64.StdEncoding.DecodeString(parsed.data)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("could not decode base64 data: %s", err.Error())
|
|
||||||
}
|
|
||||||
return string(bytes), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to read percent-escaped data
|
|
||||||
content, err := url.PathUnescape(parsed.data)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("could not decode percent-escaped data: %s", err.Error())
|
|
||||||
}
|
|
||||||
return content, nil
|
|
||||||
}
|
|
1225
vendor/github.com/evanw/esbuild/internal/resolver/package_json.go
generated
vendored
1225
vendor/github.com/evanw/esbuild/internal/resolver/package_json.go
generated
vendored
File diff suppressed because it is too large
Load Diff
1967
vendor/github.com/evanw/esbuild/internal/resolver/resolver.go
generated
vendored
1967
vendor/github.com/evanw/esbuild/internal/resolver/resolver.go
generated
vendored
File diff suppressed because it is too large
Load Diff
319
vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go
generated
vendored
319
vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go
generated
vendored
@ -1,319 +0,0 @@
|
|||||||
package resolver
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/cache"
|
|
||||||
"github.com/evanw/esbuild/internal/compat"
|
|
||||||
"github.com/evanw/esbuild/internal/config"
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/js_ast"
|
|
||||||
"github.com/evanw/esbuild/internal/js_lexer"
|
|
||||||
"github.com/evanw/esbuild/internal/js_parser"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TSConfigJSON struct {
|
|
||||||
AbsPath string
|
|
||||||
|
|
||||||
// The absolute path of "compilerOptions.baseUrl"
|
|
||||||
BaseURL *string
|
|
||||||
|
|
||||||
// This is used if "Paths" is non-nil. It's equal to "BaseURL" except if
|
|
||||||
// "BaseURL" is missing, in which case it is as if "BaseURL" was ".". This
|
|
||||||
// is to implement the "paths without baseUrl" feature from TypeScript 4.1.
|
|
||||||
// More info: https://github.com/microsoft/TypeScript/issues/31869
|
|
||||||
BaseURLForPaths string
|
|
||||||
|
|
||||||
// The verbatim values of "compilerOptions.paths". The keys are patterns to
|
|
||||||
// match and the values are arrays of fallback paths to search. Each key and
|
|
||||||
// each fallback path can optionally have a single "*" wildcard character.
|
|
||||||
// If both the key and the value have a wildcard, the substring matched by
|
|
||||||
// the wildcard is substituted into the fallback path. The keys represent
|
|
||||||
// module-style path names and the fallback paths are relative to the
|
|
||||||
// "baseUrl" value in the "tsconfig.json" file.
|
|
||||||
Paths map[string][]string
|
|
||||||
|
|
||||||
JSXFactory []string
|
|
||||||
JSXFragmentFactory []string
|
|
||||||
TSTarget *config.TSTarget
|
|
||||||
UseDefineForClassFields config.MaybeBool
|
|
||||||
PreserveImportsNotUsedAsValues bool
|
|
||||||
PreserveValueImports bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func ParseTSConfigJSON(
|
|
||||||
log logger.Log,
|
|
||||||
source logger.Source,
|
|
||||||
jsonCache *cache.JSONCache,
|
|
||||||
extends func(string, logger.Range) *TSConfigJSON,
|
|
||||||
) *TSConfigJSON {
|
|
||||||
// Unfortunately "tsconfig.json" isn't actually JSON. It's some other
|
|
||||||
// format that appears to be defined by the implementation details of the
|
|
||||||
// TypeScript compiler.
|
|
||||||
//
|
|
||||||
// Attempt to parse it anyway by modifying the JSON parser, but just for
|
|
||||||
// these particular files. This is likely not a completely accurate
|
|
||||||
// emulation of what the TypeScript compiler does (e.g. string escape
|
|
||||||
// behavior may also be different).
|
|
||||||
json, ok := jsonCache.Parse(log, source, js_parser.JSONOptions{
|
|
||||||
AllowComments: true, // https://github.com/microsoft/TypeScript/issues/4987
|
|
||||||
AllowTrailingCommas: true,
|
|
||||||
})
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var result TSConfigJSON
|
|
||||||
result.AbsPath = source.KeyPath.Text
|
|
||||||
tracker := logger.MakeLineColumnTracker(&source)
|
|
||||||
|
|
||||||
// Parse "extends"
|
|
||||||
if extends != nil {
|
|
||||||
if valueJSON, _, ok := getProperty(json, "extends"); ok {
|
|
||||||
if value, ok := getString(valueJSON); ok {
|
|
||||||
if base := extends(value, source.RangeOfString(valueJSON.Loc)); base != nil {
|
|
||||||
result = *base
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "compilerOptions"
|
|
||||||
if compilerOptionsJSON, _, ok := getProperty(json, "compilerOptions"); ok {
|
|
||||||
// Parse "baseUrl"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "baseUrl"); ok {
|
|
||||||
if value, ok := getString(valueJSON); ok {
|
|
||||||
result.BaseURL = &value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "jsxFactory"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "jsxFactory"); ok {
|
|
||||||
if value, ok := getString(valueJSON); ok {
|
|
||||||
result.JSXFactory = parseMemberExpressionForJSX(log, &source, &tracker, valueJSON.Loc, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "jsxFragmentFactory"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "jsxFragmentFactory"); ok {
|
|
||||||
if value, ok := getString(valueJSON); ok {
|
|
||||||
result.JSXFragmentFactory = parseMemberExpressionForJSX(log, &source, &tracker, valueJSON.Loc, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "useDefineForClassFields"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "useDefineForClassFields"); ok {
|
|
||||||
if value, ok := getBool(valueJSON); ok {
|
|
||||||
if value {
|
|
||||||
result.UseDefineForClassFields = config.True
|
|
||||||
} else {
|
|
||||||
result.UseDefineForClassFields = config.False
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "target"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "target"); ok {
|
|
||||||
if value, ok := getString(valueJSON); ok {
|
|
||||||
constraints := make(map[compat.Engine][]int)
|
|
||||||
r := source.RangeOfString(valueJSON.Loc)
|
|
||||||
ok := true
|
|
||||||
|
|
||||||
// See https://www.typescriptlang.org/tsconfig#target
|
|
||||||
switch strings.ToLower(value) {
|
|
||||||
case "es5":
|
|
||||||
constraints[compat.ES] = []int{5}
|
|
||||||
case "es6", "es2015":
|
|
||||||
constraints[compat.ES] = []int{2015}
|
|
||||||
case "es2016":
|
|
||||||
constraints[compat.ES] = []int{2016}
|
|
||||||
case "es2017":
|
|
||||||
constraints[compat.ES] = []int{2017}
|
|
||||||
case "es2018":
|
|
||||||
constraints[compat.ES] = []int{2018}
|
|
||||||
case "es2019":
|
|
||||||
constraints[compat.ES] = []int{2019}
|
|
||||||
case "es2020":
|
|
||||||
constraints[compat.ES] = []int{2020}
|
|
||||||
case "es2021":
|
|
||||||
constraints[compat.ES] = []int{2021}
|
|
||||||
case "esnext":
|
|
||||||
// Nothing to do in this case
|
|
||||||
default:
|
|
||||||
ok = false
|
|
||||||
if !helpers.IsInsideNodeModules(source.KeyPath.Text) {
|
|
||||||
log.Add(logger.Warning, &tracker, r,
|
|
||||||
fmt.Sprintf("Unrecognized target environment %q", value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// These feature restrictions are merged with esbuild's own restrictions
|
|
||||||
if ok {
|
|
||||||
result.TSTarget = &config.TSTarget{
|
|
||||||
Source: source,
|
|
||||||
Range: r,
|
|
||||||
Target: value,
|
|
||||||
UnsupportedJSFeatures: compat.UnsupportedJSFeatures(constraints),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "importsNotUsedAsValues"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "importsNotUsedAsValues"); ok {
|
|
||||||
if value, ok := getString(valueJSON); ok {
|
|
||||||
switch value {
|
|
||||||
case "preserve", "error":
|
|
||||||
result.PreserveImportsNotUsedAsValues = true
|
|
||||||
case "remove":
|
|
||||||
default:
|
|
||||||
log.Add(logger.Warning, &tracker, source.RangeOfString(valueJSON.Loc),
|
|
||||||
fmt.Sprintf("Invalid value %q for \"importsNotUsedAsValues\"", value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "preserveValueImports"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "preserveValueImports"); ok {
|
|
||||||
if value, ok := getBool(valueJSON); ok {
|
|
||||||
result.PreserveValueImports = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse "paths"
|
|
||||||
if valueJSON, _, ok := getProperty(compilerOptionsJSON, "paths"); ok {
|
|
||||||
if paths, ok := valueJSON.Data.(*js_ast.EObject); ok {
|
|
||||||
hasBaseURL := result.BaseURL != nil
|
|
||||||
if hasBaseURL {
|
|
||||||
result.BaseURLForPaths = *result.BaseURL
|
|
||||||
} else {
|
|
||||||
result.BaseURLForPaths = "."
|
|
||||||
}
|
|
||||||
result.Paths = make(map[string][]string)
|
|
||||||
for _, prop := range paths.Properties {
|
|
||||||
if key, ok := getString(prop.Key); ok {
|
|
||||||
if !isValidTSConfigPathPattern(key, log, &source, &tracker, prop.Key.Loc) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// The "paths" field is an object which maps a pattern to an
|
|
||||||
// array of remapping patterns to try, in priority order. See
|
|
||||||
// the documentation for examples of how this is used:
|
|
||||||
// https://www.typescriptlang.org/docs/handbook/module-resolution.html#path-mapping.
|
|
||||||
//
|
|
||||||
// One particular example:
|
|
||||||
//
|
|
||||||
// {
|
|
||||||
// "compilerOptions": {
|
|
||||||
// "baseUrl": "projectRoot",
|
|
||||||
// "paths": {
|
|
||||||
// "*": [
|
|
||||||
// "*",
|
|
||||||
// "generated/*"
|
|
||||||
// ]
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Matching "folder1/file2" should first check "projectRoot/folder1/file2"
|
|
||||||
// and then, if that didn't work, also check "projectRoot/generated/folder1/file2".
|
|
||||||
if array, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
|
||||||
for _, item := range array.Items {
|
|
||||||
if str, ok := getString(item); ok {
|
|
||||||
if isValidTSConfigPathPattern(str, log, &source, &tracker, item.Loc) &&
|
|
||||||
(hasBaseURL || isValidTSConfigPathNoBaseURLPattern(str, log, &source, &tracker, item.Loc)) {
|
|
||||||
result.Paths[key] = append(result.Paths[key], str)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.Add(logger.Warning, &tracker, source.RangeOfString(prop.ValueOrNil.Loc), fmt.Sprintf(
|
|
||||||
"Substitutions for pattern %q should be an array", key))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &result
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseMemberExpressionForJSX(log logger.Log, source *logger.Source, tracker *logger.LineColumnTracker, loc logger.Loc, text string) []string {
|
|
||||||
if text == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
parts := strings.Split(text, ".")
|
|
||||||
for _, part := range parts {
|
|
||||||
if !js_lexer.IsIdentifier(part) {
|
|
||||||
warnRange := source.RangeOfString(loc)
|
|
||||||
log.Add(logger.Warning, tracker, warnRange, fmt.Sprintf("Invalid JSX member expression: %q", text))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return parts
|
|
||||||
}
|
|
||||||
|
|
||||||
func isValidTSConfigPathPattern(text string, log logger.Log, source *logger.Source, tracker *logger.LineColumnTracker, loc logger.Loc) bool {
|
|
||||||
foundAsterisk := false
|
|
||||||
for i := 0; i < len(text); i++ {
|
|
||||||
if text[i] == '*' {
|
|
||||||
if foundAsterisk {
|
|
||||||
r := source.RangeOfString(loc)
|
|
||||||
log.Add(logger.Warning, tracker, r, fmt.Sprintf(
|
|
||||||
"Invalid pattern %q, must have at most one \"*\" character", text))
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
foundAsterisk = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func isSlash(c byte) bool {
|
|
||||||
return c == '/' || c == '\\'
|
|
||||||
}
|
|
||||||
|
|
||||||
func isValidTSConfigPathNoBaseURLPattern(text string, log logger.Log, source *logger.Source, tracker *logger.LineColumnTracker, loc logger.Loc) bool {
|
|
||||||
var c0 byte
|
|
||||||
var c1 byte
|
|
||||||
var c2 byte
|
|
||||||
n := len(text)
|
|
||||||
|
|
||||||
if n > 0 {
|
|
||||||
c0 = text[0]
|
|
||||||
if n > 1 {
|
|
||||||
c1 = text[1]
|
|
||||||
if n > 2 {
|
|
||||||
c2 = text[2]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Relative "." or ".."
|
|
||||||
if c0 == '.' && (n == 1 || (n == 2 && c1 == '.')) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Relative "./" or "../" or ".\\" or "..\\"
|
|
||||||
if c0 == '.' && (isSlash(c1) || (c1 == '.' && isSlash(c2))) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Absolute POSIX "/" or UNC "\\"
|
|
||||||
if isSlash(c0) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Absolute DOS "c:/" or "c:\\"
|
|
||||||
if ((c0 >= 'a' && c0 <= 'z') || (c0 >= 'A' && c0 <= 'Z')) && c1 == ':' && isSlash(c2) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
r := source.RangeOfString(loc)
|
|
||||||
log.Add(logger.Warning, tracker, r, fmt.Sprintf(
|
|
||||||
"Non-relative path %q is not allowed when \"baseUrl\" is not set (did you forget a leading \"./\"?)", text))
|
|
||||||
return false
|
|
||||||
}
|
|
425
vendor/github.com/evanw/esbuild/internal/runtime/runtime.go
generated
vendored
425
vendor/github.com/evanw/esbuild/internal/runtime/runtime.go
generated
vendored
@ -1,425 +0,0 @@
|
|||||||
// This is esbuild's runtime code. It contains helper functions that are
|
|
||||||
// automatically injected into output files to implement certain features. For
|
|
||||||
// example, the "**" operator is replaced with a call to "__pow" when targeting
|
|
||||||
// ES2015. Tree shaking automatically removes unused code from the runtime.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/evanw/esbuild/internal/compat"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
// The runtime source is always at a special index. The index is always zero
|
|
||||||
// but this constant is always used instead to improve readability and ensure
|
|
||||||
// all code that references this index can be discovered easily.
|
|
||||||
const SourceIndex = uint32(0)
|
|
||||||
|
|
||||||
func CanUseES6(unsupportedFeatures compat.JSFeature) bool {
|
|
||||||
return !unsupportedFeatures.Has(compat.Let) && !unsupportedFeatures.Has(compat.ForOf)
|
|
||||||
}
|
|
||||||
|
|
||||||
func code(isES6 bool) string {
|
|
||||||
// Note: These helper functions used to be named similar things to the helper
|
|
||||||
// functions from the TypeScript compiler. However, people sometimes use these
|
|
||||||
// two projects in combination and TypeScript's implementation of these helpers
|
|
||||||
// causes name collisions. Some examples:
|
|
||||||
//
|
|
||||||
// * The "tslib" library will overwrite esbuild's helper functions if the bundled
|
|
||||||
// code is run in the global scope: https://github.com/evanw/esbuild/issues/1102
|
|
||||||
//
|
|
||||||
// * Running the TypeScript compiler on esbuild's output to convert ES6 to ES5
|
|
||||||
// will also overwrite esbuild's helper functions because TypeScript doesn't
|
|
||||||
// change the names of its helper functions to avoid name collisions:
|
|
||||||
// https://github.com/microsoft/TypeScript/issues/43296
|
|
||||||
//
|
|
||||||
// These can both be considered bugs in TypeScript. However, they are unlikely
|
|
||||||
// to be fixed and it's simplest to just avoid using the same names to avoid
|
|
||||||
// these bugs. Forbidden names (from "tslib"):
|
|
||||||
//
|
|
||||||
// __assign
|
|
||||||
// __asyncDelegator
|
|
||||||
// __asyncGenerator
|
|
||||||
// __asyncValues
|
|
||||||
// __await
|
|
||||||
// __awaiter
|
|
||||||
// __classPrivateFieldGet
|
|
||||||
// __classPrivateFieldSet
|
|
||||||
// __createBinding
|
|
||||||
// __decorate
|
|
||||||
// __exportStar
|
|
||||||
// __extends
|
|
||||||
// __generator
|
|
||||||
// __importDefault
|
|
||||||
// __importStar
|
|
||||||
// __makeTemplateObject
|
|
||||||
// __metadata
|
|
||||||
// __param
|
|
||||||
// __read
|
|
||||||
// __rest
|
|
||||||
// __spread
|
|
||||||
// __spreadArray
|
|
||||||
// __spreadArrays
|
|
||||||
// __values
|
|
||||||
//
|
|
||||||
// Note: The "__objRest" function has a for-of loop which requires ES6, but
|
|
||||||
// transforming destructuring to ES5 isn't even supported so it's ok.
|
|
||||||
text := `
|
|
||||||
var __create = Object.create
|
|
||||||
var __freeze = Object.freeze
|
|
||||||
var __defProp = Object.defineProperty
|
|
||||||
var __defProps = Object.defineProperties
|
|
||||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor // Note: can return "undefined" due to a Safari bug
|
|
||||||
var __getOwnPropDescs = Object.getOwnPropertyDescriptors
|
|
||||||
var __getOwnPropNames = Object.getOwnPropertyNames
|
|
||||||
var __getOwnPropSymbols = Object.getOwnPropertySymbols
|
|
||||||
var __getProtoOf = Object.getPrototypeOf
|
|
||||||
var __hasOwnProp = Object.prototype.hasOwnProperty
|
|
||||||
var __propIsEnum = Object.prototype.propertyIsEnumerable
|
|
||||||
var __reflectGet = Reflect.get
|
|
||||||
var __reflectSet = Reflect.set
|
|
||||||
|
|
||||||
export var __pow = Math.pow
|
|
||||||
|
|
||||||
var __defNormalProp = (obj, key, value) => key in obj
|
|
||||||
? __defProp(obj, key, {enumerable: true, configurable: true, writable: true, value})
|
|
||||||
: obj[key] = value
|
|
||||||
|
|
||||||
export var __spreadValues = (a, b) => {
|
|
||||||
for (var prop in b ||= {})
|
|
||||||
if (__hasOwnProp.call(b, prop))
|
|
||||||
__defNormalProp(a, prop, b[prop])
|
|
||||||
if (__getOwnPropSymbols)
|
|
||||||
`
|
|
||||||
|
|
||||||
// Avoid "of" when not using ES6
|
|
||||||
if isES6 {
|
|
||||||
text += `
|
|
||||||
for (var prop of __getOwnPropSymbols(b)) {
|
|
||||||
`
|
|
||||||
} else {
|
|
||||||
text += `
|
|
||||||
for (var props = __getOwnPropSymbols(b), i = 0, n = props.length, prop; i < n; i++) {
|
|
||||||
prop = props[i]
|
|
||||||
`
|
|
||||||
}
|
|
||||||
|
|
||||||
text += `
|
|
||||||
if (__propIsEnum.call(b, prop))
|
|
||||||
__defNormalProp(a, prop, b[prop])
|
|
||||||
}
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
export var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b))
|
|
||||||
|
|
||||||
// Tells importing modules that this can be considered an ES module
|
|
||||||
var __markAsModule = target => __defProp(target, '__esModule', { value: true })
|
|
||||||
|
|
||||||
// Update the "name" property on the function or class for "--keep-names"
|
|
||||||
export var __name = (target, value) => __defProp(target, 'name', { value, configurable: true })
|
|
||||||
|
|
||||||
// This fallback "require" function exists so that "typeof require" can
|
|
||||||
// naturally be "function" even in non-CommonJS environments since esbuild
|
|
||||||
// emulates a CommonJS environment (issue #1202). However, people want this
|
|
||||||
// shim to fall back to "globalThis.require" even if it's defined later
|
|
||||||
// (including property accesses such as "require.resolve") so we need to
|
|
||||||
// use a proxy (issue #1614).
|
|
||||||
export var __require =
|
|
||||||
/* @__PURE__ */ (x =>
|
|
||||||
typeof require !== 'undefined' ? require :
|
|
||||||
typeof Proxy !== 'undefined' ? new Proxy(x, {
|
|
||||||
get: (a, b) => (typeof require !== 'undefined' ? require : a)[b]
|
|
||||||
}) : x
|
|
||||||
)(function(x) {
|
|
||||||
if (typeof require !== 'undefined') return require.apply(this, arguments)
|
|
||||||
throw new Error('Dynamic require of "' + x + '" is not supported')
|
|
||||||
})
|
|
||||||
|
|
||||||
// For object rest patterns
|
|
||||||
export var __restKey = key => typeof key === 'symbol' ? key : key + ''
|
|
||||||
export var __objRest = (source, exclude) => {
|
|
||||||
var target = {}
|
|
||||||
for (var prop in source)
|
|
||||||
if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
|
|
||||||
target[prop] = source[prop]
|
|
||||||
if (source != null && __getOwnPropSymbols)
|
|
||||||
`
|
|
||||||
|
|
||||||
// Avoid "of" when not using ES6
|
|
||||||
if isES6 {
|
|
||||||
text += `
|
|
||||||
for (var prop of __getOwnPropSymbols(source)) {
|
|
||||||
`
|
|
||||||
} else {
|
|
||||||
text += `
|
|
||||||
for (var props = __getOwnPropSymbols(source), i = 0, n = props.length, prop; i < n; i++) {
|
|
||||||
prop = props[i]
|
|
||||||
`
|
|
||||||
}
|
|
||||||
|
|
||||||
text += `
|
|
||||||
if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
|
|
||||||
target[prop] = source[prop]
|
|
||||||
}
|
|
||||||
return target
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is for lazily-initialized ESM code. This has two implementations, a
|
|
||||||
// compact one for minified code and a verbose one that generates friendly
|
|
||||||
// names in V8's profiler and in stack traces.
|
|
||||||
export var __esm = (fn, res) => function __init() {
|
|
||||||
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res
|
|
||||||
}
|
|
||||||
export var __esmMin = (fn, res) => () => (fn && (res = fn(fn = 0)), res)
|
|
||||||
|
|
||||||
// Wraps a CommonJS closure and returns a require() function. This has two
|
|
||||||
// implementations, a compact one for minified code and a verbose one that
|
|
||||||
// generates friendly names in V8's profiler and in stack traces.
|
|
||||||
export var __commonJS = (cb, mod) => function __require() {
|
|
||||||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = {exports: {}}).exports, mod), mod.exports
|
|
||||||
}
|
|
||||||
export var __commonJSMin = (cb, mod) => () => (mod || cb((mod = {exports: {}}).exports, mod), mod.exports)
|
|
||||||
|
|
||||||
// Used to implement ESM exports both for "require()" and "import * as"
|
|
||||||
export var __export = (target, all) => {
|
|
||||||
for (var name in all)
|
|
||||||
__defProp(target, name, { get: all[name], enumerable: true })
|
|
||||||
}
|
|
||||||
export var __reExport = (target, module, copyDefault, desc) => {
|
|
||||||
if (module && typeof module === 'object' || typeof module === 'function')
|
|
||||||
`
|
|
||||||
|
|
||||||
// Avoid "let" when not using ES6
|
|
||||||
if isES6 {
|
|
||||||
text += `
|
|
||||||
for (let key of __getOwnPropNames(module))
|
|
||||||
if (!__hasOwnProp.call(target, key) && (copyDefault || key !== 'default'))
|
|
||||||
__defProp(target, key, { get: () => module[key], enumerable: !(desc = __getOwnPropDesc(module, key)) || desc.enumerable })
|
|
||||||
`
|
|
||||||
} else {
|
|
||||||
text += `
|
|
||||||
for (var keys = __getOwnPropNames(module), i = 0, n = keys.length, key; i < n; i++) {
|
|
||||||
key = keys[i]
|
|
||||||
if (!__hasOwnProp.call(target, key) && (copyDefault || key !== 'default'))
|
|
||||||
__defProp(target, key, { get: (k => module[k]).bind(null, key), enumerable: !(desc = __getOwnPropDesc(module, key)) || desc.enumerable })
|
|
||||||
}
|
|
||||||
`
|
|
||||||
}
|
|
||||||
|
|
||||||
text += `
|
|
||||||
return target
|
|
||||||
}
|
|
||||||
|
|
||||||
// Converts the module from CommonJS to ESM
|
|
||||||
export var __toESM = (module, isNodeMode) => {
|
|
||||||
return __reExport(__markAsModule(
|
|
||||||
__defProp(
|
|
||||||
module != null ? __create(__getProtoOf(module)) : {},
|
|
||||||
'default',
|
|
||||||
|
|
||||||
// If the importer is not in node compatibility mode and this is an ESM
|
|
||||||
// file that has been converted to a CommonJS file using a Babel-
|
|
||||||
// compatible transform (i.e. "__esModule" has been set), then forward
|
|
||||||
// "default" to the export named "default". Otherwise set "default" to
|
|
||||||
// "module.exports" for node compatibility.
|
|
||||||
!isNodeMode && module && module.__esModule
|
|
||||||
? { get: () => module.default, enumerable: true }
|
|
||||||
: { value: module, enumerable: true })
|
|
||||||
), module)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Converts the module from ESM to CommonJS
|
|
||||||
export var __toCommonJS = /* @__PURE__ */ (cache => {
|
|
||||||
return (module, temp) => {
|
|
||||||
return (cache && cache.get(module)) || (
|
|
||||||
temp = __reExport(__markAsModule({}), module, /* copyDefault */ 1),
|
|
||||||
cache && cache.set(module, temp),
|
|
||||||
temp)
|
|
||||||
}
|
|
||||||
})(typeof WeakMap !== 'undefined' ? new WeakMap : 0)
|
|
||||||
|
|
||||||
// For TypeScript decorators
|
|
||||||
// - kind === undefined: class
|
|
||||||
// - kind === 1: method, parameter
|
|
||||||
// - kind === 2: field
|
|
||||||
export var __decorateClass = (decorators, target, key, kind) => {
|
|
||||||
var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target
|
|
||||||
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
||||||
if (decorator = decorators[i])
|
|
||||||
result = (kind ? decorator(target, key, result) : decorator(result)) || result
|
|
||||||
if (kind && result)
|
|
||||||
__defProp(target, key, result)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
export var __decorateParam = (index, decorator) => (target, key) => decorator(target, key, index)
|
|
||||||
|
|
||||||
// For class members
|
|
||||||
export var __publicField = (obj, key, value) => {
|
|
||||||
__defNormalProp(obj, typeof key !== 'symbol' ? key + '' : key, value)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
var __accessCheck = (obj, member, msg) => {
|
|
||||||
if (!member.has(obj)) throw TypeError('Cannot ' + msg)
|
|
||||||
}
|
|
||||||
export var __privateIn = (member, obj) => {
|
|
||||||
if (Object(obj) !== obj) throw TypeError('Cannot use the "in" operator on this value')
|
|
||||||
return member.has(obj)
|
|
||||||
}
|
|
||||||
export var __privateGet = (obj, member, getter) => {
|
|
||||||
__accessCheck(obj, member, 'read from private field')
|
|
||||||
return getter ? getter.call(obj) : member.get(obj)
|
|
||||||
}
|
|
||||||
export var __privateAdd = (obj, member, value) => {
|
|
||||||
if (member.has(obj)) throw TypeError('Cannot add the same private member more than once')
|
|
||||||
member instanceof WeakSet ? member.add(obj) : member.set(obj, value)
|
|
||||||
}
|
|
||||||
export var __privateSet = (obj, member, value, setter) => {
|
|
||||||
__accessCheck(obj, member, 'write to private field')
|
|
||||||
setter ? setter.call(obj, value) : member.set(obj, value)
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
export var __privateWrapper = (obj, member, setter, getter) => {
|
|
||||||
return {
|
|
||||||
set _(value) { __privateSet(obj, member, value, setter) },
|
|
||||||
get _() { return __privateGet(obj, member, getter) },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export var __privateMethod = (obj, member, method) => {
|
|
||||||
__accessCheck(obj, member, 'access private method')
|
|
||||||
return method
|
|
||||||
}
|
|
||||||
|
|
||||||
// For "super" property accesses
|
|
||||||
export var __superStaticGet = (obj, member) => __reflectGet(__getProtoOf(obj), member, obj)
|
|
||||||
export var __superStaticSet = (obj, member, value) => (__reflectSet(__getProtoOf(obj), member, value, obj), value)
|
|
||||||
export var __superWrapper = (getter, setter, member) => {
|
|
||||||
return {
|
|
||||||
set _(value) { setter(member, value) },
|
|
||||||
get _() { return getter(member) },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export var __superStaticWrapper = (obj, member) => {
|
|
||||||
return {
|
|
||||||
set _(value) { __superStaticSet(obj, member, value) },
|
|
||||||
get _() { return __superStaticGet(obj, member) },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For lowering tagged template literals
|
|
||||||
export var __template = (cooked, raw) => __freeze(__defProp(cooked, 'raw', { value: __freeze(raw || cooked.slice()) }))
|
|
||||||
|
|
||||||
// This helps for lowering async functions
|
|
||||||
export var __async = (__this, __arguments, generator) => {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
var fulfilled = value => {
|
|
||||||
try {
|
|
||||||
step(generator.next(value))
|
|
||||||
} catch (e) {
|
|
||||||
reject(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var rejected = value => {
|
|
||||||
try {
|
|
||||||
step(generator.throw(value))
|
|
||||||
} catch (e) {
|
|
||||||
reject(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var step = x => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected)
|
|
||||||
step((generator = generator.apply(__this, __arguments)).next())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is for the "binary" loader (custom code is ~2x faster than "atob")
|
|
||||||
export var __toBinaryNode = base64 => new Uint8Array(Buffer.from(base64, 'base64'))
|
|
||||||
export var __toBinary = /* @__PURE__ */ (() => {
|
|
||||||
var table = new Uint8Array(128)
|
|
||||||
for (var i = 0; i < 64; i++) table[i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i * 4 - 205] = i
|
|
||||||
return base64 => {
|
|
||||||
var n = base64.length, bytes = new Uint8Array((n - (base64[n - 1] == '=') - (base64[n - 2] == '=')) * 3 / 4 | 0)
|
|
||||||
for (var i = 0, j = 0; i < n;) {
|
|
||||||
var c0 = table[base64.charCodeAt(i++)], c1 = table[base64.charCodeAt(i++)]
|
|
||||||
var c2 = table[base64.charCodeAt(i++)], c3 = table[base64.charCodeAt(i++)]
|
|
||||||
bytes[j++] = (c0 << 2) | (c1 >> 4)
|
|
||||||
bytes[j++] = (c1 << 4) | (c2 >> 2)
|
|
||||||
bytes[j++] = (c2 << 6) | c3
|
|
||||||
}
|
|
||||||
return bytes
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
`
|
|
||||||
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
var ES6Source = logger.Source{
|
|
||||||
Index: SourceIndex,
|
|
||||||
KeyPath: logger.Path{Text: "<runtime>"},
|
|
||||||
PrettyPath: "<runtime>",
|
|
||||||
IdentifierName: "runtime",
|
|
||||||
Contents: code(true /* isES6 */),
|
|
||||||
}
|
|
||||||
|
|
||||||
var ES5Source = logger.Source{
|
|
||||||
Index: SourceIndex,
|
|
||||||
KeyPath: logger.Path{Text: "<runtime>"},
|
|
||||||
PrettyPath: "<runtime>",
|
|
||||||
IdentifierName: "runtime",
|
|
||||||
Contents: code(false /* isES6 */),
|
|
||||||
}
|
|
||||||
|
|
||||||
// The TypeScript decorator transform behaves similar to the official
|
|
||||||
// TypeScript compiler.
|
|
||||||
//
|
|
||||||
// One difference is that the "__decorateClass" function doesn't contain a reference
|
|
||||||
// to the non-existent "Reflect.decorate" function. This function was never
|
|
||||||
// standardized and checking for it is wasted code (as well as a potentially
|
|
||||||
// dangerous cause of unintentional behavior changes in the future).
|
|
||||||
//
|
|
||||||
// Another difference is that the "__decorateClass" function doesn't take in an
|
|
||||||
// optional property descriptor like it does in the official TypeScript
|
|
||||||
// compiler's support code. This appears to be a dead code path in the official
|
|
||||||
// support code that is only there for legacy reasons.
|
|
||||||
//
|
|
||||||
// Here are some examples of how esbuild's decorator transform works:
|
|
||||||
//
|
|
||||||
// ============================= Class decorator ==============================
|
|
||||||
//
|
|
||||||
// // TypeScript // JavaScript
|
|
||||||
// @dec let C = class {
|
|
||||||
// class C { };
|
|
||||||
// } C = __decorateClass([
|
|
||||||
// dec
|
|
||||||
// ], C);
|
|
||||||
//
|
|
||||||
// ============================ Method decorator ==============================
|
|
||||||
//
|
|
||||||
// // TypeScript // JavaScript
|
|
||||||
// class C { class C {
|
|
||||||
// @dec foo() {}
|
|
||||||
// foo() {} }
|
|
||||||
// } __decorateClass([
|
|
||||||
// dec
|
|
||||||
// ], C.prototype, 'foo', 1);
|
|
||||||
//
|
|
||||||
// =========================== Parameter decorator ============================
|
|
||||||
//
|
|
||||||
// // TypeScript // JavaScript
|
|
||||||
// class C { class C {
|
|
||||||
// foo(@dec bar) {} foo(bar) {}
|
|
||||||
// } }
|
|
||||||
// __decorateClass([
|
|
||||||
// __decorateParam(0, dec)
|
|
||||||
// ], C.prototype, 'foo', 1);
|
|
||||||
//
|
|
||||||
// ============================= Field decorator ==============================
|
|
||||||
//
|
|
||||||
// // TypeScript // JavaScript
|
|
||||||
// class C { class C {
|
|
||||||
// @dec constructor() {
|
|
||||||
// foo = 123 this.foo = 123
|
|
||||||
// } }
|
|
||||||
// }
|
|
||||||
// __decorateClass([
|
|
||||||
// dec
|
|
||||||
// ], C.prototype, 'foo', 2);
|
|
757
vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go
generated
vendored
757
vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go
generated
vendored
@ -1,757 +0,0 @@
|
|||||||
package sourcemap
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"unicode/utf8"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Mapping struct {
|
|
||||||
GeneratedLine int32 // 0-based
|
|
||||||
GeneratedColumn int32 // 0-based count of UTF-16 code units
|
|
||||||
|
|
||||||
SourceIndex int32 // 0-based
|
|
||||||
OriginalLine int32 // 0-based
|
|
||||||
OriginalColumn int32 // 0-based count of UTF-16 code units
|
|
||||||
}
|
|
||||||
|
|
||||||
type SourceMap struct {
|
|
||||||
Sources []string
|
|
||||||
SourcesContent []SourceContent
|
|
||||||
Mappings []Mapping
|
|
||||||
}
|
|
||||||
|
|
||||||
type SourceContent struct {
|
|
||||||
// This stores both the unquoted and the quoted values. We try to use the
|
|
||||||
// already-quoted value if possible so we don't need to re-quote it
|
|
||||||
// unnecessarily for maximum performance.
|
|
||||||
Quoted string
|
|
||||||
|
|
||||||
// But sometimes we need to re-quote the value, such as when it contains
|
|
||||||
// non-ASCII characters and we are in ASCII-only mode. In that case we quote
|
|
||||||
// this parsed UTF-16 value.
|
|
||||||
Value []uint16
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sm *SourceMap) Find(line int32, column int32) *Mapping {
|
|
||||||
mappings := sm.Mappings
|
|
||||||
|
|
||||||
// Binary search
|
|
||||||
count := len(mappings)
|
|
||||||
index := 0
|
|
||||||
for count > 0 {
|
|
||||||
step := count / 2
|
|
||||||
i := index + step
|
|
||||||
mapping := mappings[i]
|
|
||||||
if mapping.GeneratedLine < line || (mapping.GeneratedLine == line && mapping.GeneratedColumn <= column) {
|
|
||||||
index = i + 1
|
|
||||||
count -= step + 1
|
|
||||||
} else {
|
|
||||||
count = step
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle search failure
|
|
||||||
if index > 0 {
|
|
||||||
mapping := &mappings[index-1]
|
|
||||||
|
|
||||||
// Match the behavior of the popular "source-map" library from Mozilla
|
|
||||||
if mapping.GeneratedLine == line {
|
|
||||||
return mapping
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var base64 = []byte("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
|
|
||||||
|
|
||||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
|
||||||
// length quantities we use in the source map spec, the first bit is the sign,
|
|
||||||
// the next four bits are the actual value, and the 6th bit is the continuation
|
|
||||||
// bit. The continuation bit tells us whether there are more digits in this
|
|
||||||
// value following this digit.
|
|
||||||
//
|
|
||||||
// Continuation
|
|
||||||
// | Sign
|
|
||||||
// | |
|
|
||||||
// V V
|
|
||||||
// 101011
|
|
||||||
//
|
|
||||||
func EncodeVLQ(value int) []byte {
|
|
||||||
var vlq int
|
|
||||||
if value < 0 {
|
|
||||||
vlq = ((-value) << 1) | 1
|
|
||||||
} else {
|
|
||||||
vlq = value << 1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle the common case up front without allocations
|
|
||||||
if (vlq >> 5) == 0 {
|
|
||||||
digit := vlq & 31
|
|
||||||
return base64[digit : digit+1]
|
|
||||||
}
|
|
||||||
|
|
||||||
encoded := []byte{}
|
|
||||||
for {
|
|
||||||
digit := vlq & 31
|
|
||||||
vlq >>= 5
|
|
||||||
|
|
||||||
// If there are still more digits in this value, we must make sure the
|
|
||||||
// continuation bit is marked
|
|
||||||
if vlq != 0 {
|
|
||||||
digit |= 32
|
|
||||||
}
|
|
||||||
|
|
||||||
encoded = append(encoded, base64[digit])
|
|
||||||
|
|
||||||
if vlq == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return encoded
|
|
||||||
}
|
|
||||||
|
|
||||||
func DecodeVLQ(encoded []byte, start int) (int, int) {
|
|
||||||
shift := 0
|
|
||||||
vlq := 0
|
|
||||||
|
|
||||||
// Scan over the input
|
|
||||||
for {
|
|
||||||
index := bytes.IndexByte(base64, encoded[start])
|
|
||||||
if index < 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decode a single byte
|
|
||||||
vlq |= (index & 31) << shift
|
|
||||||
start++
|
|
||||||
shift += 5
|
|
||||||
|
|
||||||
// Stop if there's no continuation bit
|
|
||||||
if (index & 32) == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Recover the value
|
|
||||||
value := vlq >> 1
|
|
||||||
if (vlq & 1) != 0 {
|
|
||||||
value = -value
|
|
||||||
}
|
|
||||||
return value, start
|
|
||||||
}
|
|
||||||
|
|
||||||
func DecodeVLQUTF16(encoded []uint16) (int, int, bool) {
|
|
||||||
n := len(encoded)
|
|
||||||
if n == 0 {
|
|
||||||
return 0, 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Scan over the input
|
|
||||||
current := 0
|
|
||||||
shift := 0
|
|
||||||
vlq := 0
|
|
||||||
for {
|
|
||||||
if current >= n {
|
|
||||||
return 0, 0, false
|
|
||||||
}
|
|
||||||
index := bytes.IndexByte(base64, byte(encoded[current]))
|
|
||||||
if index < 0 {
|
|
||||||
return 0, 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decode a single byte
|
|
||||||
vlq |= (index & 31) << shift
|
|
||||||
current++
|
|
||||||
shift += 5
|
|
||||||
|
|
||||||
// Stop if there's no continuation bit
|
|
||||||
if (index & 32) == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Recover the value
|
|
||||||
var value = vlq >> 1
|
|
||||||
if (vlq & 1) != 0 {
|
|
||||||
value = -value
|
|
||||||
}
|
|
||||||
return value, current, true
|
|
||||||
}
|
|
||||||
|
|
||||||
type LineColumnOffset struct {
|
|
||||||
Lines int
|
|
||||||
Columns int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a LineColumnOffset) ComesBefore(b LineColumnOffset) bool {
|
|
||||||
return a.Lines < b.Lines || (a.Lines == b.Lines && a.Columns < b.Columns)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *LineColumnOffset) Add(b LineColumnOffset) {
|
|
||||||
if b.Lines == 0 {
|
|
||||||
a.Columns += b.Columns
|
|
||||||
} else {
|
|
||||||
a.Lines += b.Lines
|
|
||||||
a.Columns = b.Columns
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (offset *LineColumnOffset) AdvanceBytes(bytes []byte) {
|
|
||||||
columns := offset.Columns
|
|
||||||
for len(bytes) > 0 {
|
|
||||||
c, width := utf8.DecodeRune(bytes)
|
|
||||||
bytes = bytes[width:]
|
|
||||||
switch c {
|
|
||||||
case '\r', '\n', '\u2028', '\u2029':
|
|
||||||
// Handle Windows-specific "\r\n" newlines
|
|
||||||
if c == '\r' && len(bytes) > 0 && bytes[0] == '\n' {
|
|
||||||
columns++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
offset.Lines++
|
|
||||||
columns = 0
|
|
||||||
|
|
||||||
default:
|
|
||||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
|
||||||
if c <= 0xFFFF {
|
|
||||||
columns++
|
|
||||||
} else {
|
|
||||||
columns += 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
offset.Columns = columns
|
|
||||||
}
|
|
||||||
|
|
||||||
func (offset *LineColumnOffset) AdvanceString(text string) {
|
|
||||||
columns := offset.Columns
|
|
||||||
for i, c := range text {
|
|
||||||
switch c {
|
|
||||||
case '\r', '\n', '\u2028', '\u2029':
|
|
||||||
// Handle Windows-specific "\r\n" newlines
|
|
||||||
if c == '\r' && i+1 < len(text) && text[i+1] == '\n' {
|
|
||||||
columns++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
offset.Lines++
|
|
||||||
columns = 0
|
|
||||||
|
|
||||||
default:
|
|
||||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
|
||||||
if c <= 0xFFFF {
|
|
||||||
columns++
|
|
||||||
} else {
|
|
||||||
columns += 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
offset.Columns = columns
|
|
||||||
}
|
|
||||||
|
|
||||||
type SourceMapPieces struct {
|
|
||||||
Prefix []byte
|
|
||||||
Mappings []byte
|
|
||||||
Suffix []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pieces SourceMapPieces) HasContent() bool {
|
|
||||||
return len(pieces.Prefix)+len(pieces.Mappings)+len(pieces.Suffix) > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
type SourceMapShift struct {
|
|
||||||
Before LineColumnOffset
|
|
||||||
After LineColumnOffset
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pieces SourceMapPieces) Finalize(shifts []SourceMapShift) []byte {
|
|
||||||
// An optimized path for when there are no shifts
|
|
||||||
if len(shifts) == 1 {
|
|
||||||
bytes := pieces.Prefix
|
|
||||||
minCap := len(bytes) + len(pieces.Mappings) + len(pieces.Suffix)
|
|
||||||
if cap(bytes) < minCap {
|
|
||||||
bytes = append(make([]byte, 0, minCap), bytes...)
|
|
||||||
}
|
|
||||||
bytes = append(bytes, pieces.Mappings...)
|
|
||||||
bytes = append(bytes, pieces.Suffix...)
|
|
||||||
return bytes
|
|
||||||
}
|
|
||||||
|
|
||||||
startOfRun := 0
|
|
||||||
current := 0
|
|
||||||
generated := LineColumnOffset{}
|
|
||||||
prevShiftColumnDelta := 0
|
|
||||||
j := helpers.Joiner{}
|
|
||||||
|
|
||||||
// Start the source map
|
|
||||||
j.AddBytes(pieces.Prefix)
|
|
||||||
|
|
||||||
// This assumes that a) all mappings are valid and b) all mappings are ordered
|
|
||||||
// by increasing generated position. This should be the case for all mappings
|
|
||||||
// generated by esbuild, which should be the only mappings we process here.
|
|
||||||
for current < len(pieces.Mappings) {
|
|
||||||
// Handle a line break
|
|
||||||
if pieces.Mappings[current] == ';' {
|
|
||||||
generated.Lines++
|
|
||||||
generated.Columns = 0
|
|
||||||
prevShiftColumnDelta = 0
|
|
||||||
current++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
potentialEndOfRun := current
|
|
||||||
|
|
||||||
// Read the generated column
|
|
||||||
generatedColumnDelta, next := DecodeVLQ(pieces.Mappings, current)
|
|
||||||
generated.Columns += generatedColumnDelta
|
|
||||||
current = next
|
|
||||||
|
|
||||||
potentialStartOfRun := current
|
|
||||||
|
|
||||||
// Skip over the original position information
|
|
||||||
_, current = DecodeVLQ(pieces.Mappings, current) // The original source
|
|
||||||
_, current = DecodeVLQ(pieces.Mappings, current) // The original line
|
|
||||||
_, current = DecodeVLQ(pieces.Mappings, current) // The original column
|
|
||||||
|
|
||||||
// Skip a trailing comma
|
|
||||||
if current < len(pieces.Mappings) && pieces.Mappings[current] == ',' {
|
|
||||||
current++
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect crossing shift boundaries
|
|
||||||
didCrossBoundary := false
|
|
||||||
for len(shifts) > 1 && shifts[1].Before.ComesBefore(generated) {
|
|
||||||
shifts = shifts[1:]
|
|
||||||
didCrossBoundary = true
|
|
||||||
}
|
|
||||||
if !didCrossBoundary {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// This shift isn't relevant if the next mapping after this shift is on a
|
|
||||||
// following line. In that case, don't split and keep scanning instead.
|
|
||||||
shift := shifts[0]
|
|
||||||
if shift.After.Lines != generated.Lines {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add all previous mappings in a single run for efficiency. Since source
|
|
||||||
// mappings are relative, no data needs to be modified inside this run.
|
|
||||||
j.AddBytes(pieces.Mappings[startOfRun:potentialEndOfRun])
|
|
||||||
|
|
||||||
// Then modify the first mapping across the shift boundary with the updated
|
|
||||||
// generated column value. It's simplest to only support column shifts. This
|
|
||||||
// is reasonable because import paths should not contain newlines.
|
|
||||||
if shift.Before.Lines != shift.After.Lines {
|
|
||||||
panic("Unexpected line change when shifting source maps")
|
|
||||||
}
|
|
||||||
shiftColumnDelta := shift.After.Columns - shift.Before.Columns
|
|
||||||
j.AddBytes(EncodeVLQ(generatedColumnDelta + shiftColumnDelta - prevShiftColumnDelta))
|
|
||||||
prevShiftColumnDelta = shiftColumnDelta
|
|
||||||
|
|
||||||
// Finally, start the next run after the end of this generated column offset
|
|
||||||
startOfRun = potentialStartOfRun
|
|
||||||
}
|
|
||||||
|
|
||||||
// Finish the source map
|
|
||||||
j.AddBytes(pieces.Mappings[startOfRun:])
|
|
||||||
j.AddBytes(pieces.Suffix)
|
|
||||||
return j.Done()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Coordinates in source maps are stored using relative offsets for size
|
|
||||||
// reasons. When joining together chunks of a source map that were emitted
|
|
||||||
// in parallel for different parts of a file, we need to fix up the first
|
|
||||||
// segment of each chunk to be relative to the end of the previous chunk.
|
|
||||||
type SourceMapState struct {
|
|
||||||
// This isn't stored in the source map. It's only used by the bundler to join
|
|
||||||
// source map chunks together correctly.
|
|
||||||
GeneratedLine int
|
|
||||||
|
|
||||||
// These are stored in the source map in VLQ format.
|
|
||||||
GeneratedColumn int
|
|
||||||
SourceIndex int
|
|
||||||
OriginalLine int
|
|
||||||
OriginalColumn int
|
|
||||||
}
|
|
||||||
|
|
||||||
// Source map chunks are computed in parallel for speed. Each chunk is relative
|
|
||||||
// to the zero state instead of being relative to the end state of the previous
|
|
||||||
// chunk, since it's impossible to know the end state of the previous chunk in
|
|
||||||
// a parallel computation.
|
|
||||||
//
|
|
||||||
// After all chunks are computed, they are joined together in a second pass.
|
|
||||||
// This rewrites the first mapping in each chunk to be relative to the end
|
|
||||||
// state of the previous chunk.
|
|
||||||
func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startState SourceMapState, sourceMap []byte) {
|
|
||||||
// Handle line breaks in between this mapping and the previous one
|
|
||||||
if startState.GeneratedLine != 0 {
|
|
||||||
j.AddBytes(bytes.Repeat([]byte{';'}, startState.GeneratedLine))
|
|
||||||
prevEndState.GeneratedColumn = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip past any leading semicolons, which indicate line breaks
|
|
||||||
semicolons := 0
|
|
||||||
for sourceMap[semicolons] == ';' {
|
|
||||||
semicolons++
|
|
||||||
}
|
|
||||||
if semicolons > 0 {
|
|
||||||
j.AddBytes(sourceMap[:semicolons])
|
|
||||||
sourceMap = sourceMap[semicolons:]
|
|
||||||
prevEndState.GeneratedColumn = 0
|
|
||||||
startState.GeneratedColumn = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strip off the first mapping from the buffer. The first mapping should be
|
|
||||||
// for the start of the original file (the printer always generates one for
|
|
||||||
// the start of the file).
|
|
||||||
generatedColumn, i := DecodeVLQ(sourceMap, 0)
|
|
||||||
sourceIndex, i := DecodeVLQ(sourceMap, i)
|
|
||||||
originalLine, i := DecodeVLQ(sourceMap, i)
|
|
||||||
originalColumn, i := DecodeVLQ(sourceMap, i)
|
|
||||||
sourceMap = sourceMap[i:]
|
|
||||||
|
|
||||||
// Rewrite the first mapping to be relative to the end state of the previous
|
|
||||||
// chunk. We now know what the end state is because we're in the second pass
|
|
||||||
// where all chunks have already been generated.
|
|
||||||
startState.SourceIndex += sourceIndex
|
|
||||||
startState.GeneratedColumn += generatedColumn
|
|
||||||
startState.OriginalLine += originalLine
|
|
||||||
startState.OriginalColumn += originalColumn
|
|
||||||
j.AddBytes(appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState))
|
|
||||||
|
|
||||||
// Then append everything after that without modification.
|
|
||||||
j.AddBytes(sourceMap)
|
|
||||||
}
|
|
||||||
|
|
||||||
func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState) []byte {
|
|
||||||
// Put commas in between mappings
|
|
||||||
if lastByte != 0 && lastByte != ';' && lastByte != '"' {
|
|
||||||
buffer = append(buffer, ',')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Record the generated column (the line is recorded using ';' elsewhere)
|
|
||||||
buffer = append(buffer, EncodeVLQ(currentState.GeneratedColumn-prevState.GeneratedColumn)...)
|
|
||||||
prevState.GeneratedColumn = currentState.GeneratedColumn
|
|
||||||
|
|
||||||
// Record the generated source
|
|
||||||
buffer = append(buffer, EncodeVLQ(currentState.SourceIndex-prevState.SourceIndex)...)
|
|
||||||
prevState.SourceIndex = currentState.SourceIndex
|
|
||||||
|
|
||||||
// Record the original line
|
|
||||||
buffer = append(buffer, EncodeVLQ(currentState.OriginalLine-prevState.OriginalLine)...)
|
|
||||||
prevState.OriginalLine = currentState.OriginalLine
|
|
||||||
|
|
||||||
// Record the original column
|
|
||||||
buffer = append(buffer, EncodeVLQ(currentState.OriginalColumn-prevState.OriginalColumn)...)
|
|
||||||
prevState.OriginalColumn = currentState.OriginalColumn
|
|
||||||
|
|
||||||
return buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
type LineOffsetTable struct {
|
|
||||||
byteOffsetToStartOfLine int32
|
|
||||||
|
|
||||||
// The source map specification is very loose and does not specify what
|
|
||||||
// column numbers actually mean. The popular "source-map" library from Mozilla
|
|
||||||
// appears to interpret them as counts of UTF-16 code units, so we generate
|
|
||||||
// those too for compatibility.
|
|
||||||
//
|
|
||||||
// We keep mapping tables around to accelerate conversion from byte offsets
|
|
||||||
// to UTF-16 code unit counts. However, this mapping takes up a lot of memory
|
|
||||||
// and generates a lot of garbage. Since most JavaScript is ASCII and the
|
|
||||||
// mapping for ASCII is 1:1, we avoid creating a table for ASCII-only lines
|
|
||||||
// as an optimization.
|
|
||||||
byteOffsetToFirstNonASCII int32
|
|
||||||
columnsForNonASCII []int32
|
|
||||||
}
|
|
||||||
|
|
||||||
func GenerateLineOffsetTables(contents string, approximateLineCount int32) []LineOffsetTable {
|
|
||||||
var ColumnsForNonASCII []int32
|
|
||||||
ByteOffsetToFirstNonASCII := int32(0)
|
|
||||||
lineByteOffset := 0
|
|
||||||
columnByteOffset := 0
|
|
||||||
column := int32(0)
|
|
||||||
|
|
||||||
// Preallocate the top-level table using the approximate line count from the lexer
|
|
||||||
lineOffsetTables := make([]LineOffsetTable, 0, approximateLineCount)
|
|
||||||
|
|
||||||
for i, c := range contents {
|
|
||||||
// Mark the start of the next line
|
|
||||||
if column == 0 {
|
|
||||||
lineByteOffset = i
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start the mapping if this character is non-ASCII
|
|
||||||
if c > 0x7F && ColumnsForNonASCII == nil {
|
|
||||||
columnByteOffset = i - lineByteOffset
|
|
||||||
ByteOffsetToFirstNonASCII = int32(columnByteOffset)
|
|
||||||
ColumnsForNonASCII = []int32{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the per-byte column offsets
|
|
||||||
if ColumnsForNonASCII != nil {
|
|
||||||
for lineBytesSoFar := i - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ {
|
|
||||||
ColumnsForNonASCII = append(ColumnsForNonASCII, column)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch c {
|
|
||||||
case '\r', '\n', '\u2028', '\u2029':
|
|
||||||
// Handle Windows-specific "\r\n" newlines
|
|
||||||
if c == '\r' && i+1 < len(contents) && contents[i+1] == '\n' {
|
|
||||||
column++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
lineOffsetTables = append(lineOffsetTables, LineOffsetTable{
|
|
||||||
byteOffsetToStartOfLine: int32(lineByteOffset),
|
|
||||||
byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII,
|
|
||||||
columnsForNonASCII: ColumnsForNonASCII,
|
|
||||||
})
|
|
||||||
columnByteOffset = 0
|
|
||||||
ByteOffsetToFirstNonASCII = 0
|
|
||||||
ColumnsForNonASCII = nil
|
|
||||||
column = 0
|
|
||||||
|
|
||||||
default:
|
|
||||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
|
||||||
if c <= 0xFFFF {
|
|
||||||
column++
|
|
||||||
} else {
|
|
||||||
column += 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mark the start of the next line
|
|
||||||
if column == 0 {
|
|
||||||
lineByteOffset = len(contents)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do one last update for the column at the end of the file
|
|
||||||
if ColumnsForNonASCII != nil {
|
|
||||||
for lineBytesSoFar := len(contents) - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ {
|
|
||||||
ColumnsForNonASCII = append(ColumnsForNonASCII, column)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lineOffsetTables = append(lineOffsetTables, LineOffsetTable{
|
|
||||||
byteOffsetToStartOfLine: int32(lineByteOffset),
|
|
||||||
byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII,
|
|
||||||
columnsForNonASCII: ColumnsForNonASCII,
|
|
||||||
})
|
|
||||||
return lineOffsetTables
|
|
||||||
}
|
|
||||||
|
|
||||||
type Chunk struct {
|
|
||||||
Buffer []byte
|
|
||||||
|
|
||||||
// This end state will be used to rewrite the start of the following source
|
|
||||||
// map chunk so that the delta-encoded VLQ numbers are preserved.
|
|
||||||
EndState SourceMapState
|
|
||||||
|
|
||||||
// There probably isn't a source mapping at the end of the file (nor should
|
|
||||||
// there be) but if we're appending another source map chunk after this one,
|
|
||||||
// we'll need to know how many characters were in the last line we generated.
|
|
||||||
FinalGeneratedColumn int
|
|
||||||
|
|
||||||
ShouldIgnore bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type ChunkBuilder struct {
|
|
||||||
inputSourceMap *SourceMap
|
|
||||||
sourceMap []byte
|
|
||||||
prevLoc logger.Loc
|
|
||||||
prevState SourceMapState
|
|
||||||
lastGeneratedUpdate int
|
|
||||||
generatedColumn int
|
|
||||||
hasPrevState bool
|
|
||||||
lineOffsetTables []LineOffsetTable
|
|
||||||
|
|
||||||
// This is a workaround for a bug in the popular "source-map" library:
|
|
||||||
// https://github.com/mozilla/source-map/issues/261. The library will
|
|
||||||
// sometimes return null when querying a source map unless every line
|
|
||||||
// starts with a mapping at column zero.
|
|
||||||
//
|
|
||||||
// The workaround is to replicate the previous mapping if a line ends
|
|
||||||
// up not starting with a mapping. This is done lazily because we want
|
|
||||||
// to avoid replicating the previous mapping if we don't need to.
|
|
||||||
lineStartsWithMapping bool
|
|
||||||
coverLinesWithoutMappings bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func MakeChunkBuilder(inputSourceMap *SourceMap, lineOffsetTables []LineOffsetTable) ChunkBuilder {
|
|
||||||
return ChunkBuilder{
|
|
||||||
inputSourceMap: inputSourceMap,
|
|
||||||
prevLoc: logger.Loc{Start: -1},
|
|
||||||
lineOffsetTables: lineOffsetTables,
|
|
||||||
|
|
||||||
// We automatically repeat the previous source mapping if we ever generate
|
|
||||||
// a line that doesn't start with a mapping. This helps give files more
|
|
||||||
// complete mapping coverage without gaps.
|
|
||||||
//
|
|
||||||
// However, we probably shouldn't do this if the input file has a nested
|
|
||||||
// source map that we will be remapping through. We have no idea what state
|
|
||||||
// that source map is in and it could be pretty scrambled.
|
|
||||||
//
|
|
||||||
// I've seen cases where blindly repeating the last mapping for subsequent
|
|
||||||
// lines gives very strange and unhelpful results with source maps from
|
|
||||||
// other tools.
|
|
||||||
coverLinesWithoutMappings: inputSourceMap == nil,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ChunkBuilder) AddSourceMapping(loc logger.Loc, output []byte) {
|
|
||||||
if loc == b.prevLoc {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
b.prevLoc = loc
|
|
||||||
|
|
||||||
// Binary search to find the line
|
|
||||||
lineOffsetTables := b.lineOffsetTables
|
|
||||||
count := len(lineOffsetTables)
|
|
||||||
originalLine := 0
|
|
||||||
for count > 0 {
|
|
||||||
step := count / 2
|
|
||||||
i := originalLine + step
|
|
||||||
if lineOffsetTables[i].byteOffsetToStartOfLine <= loc.Start {
|
|
||||||
originalLine = i + 1
|
|
||||||
count = count - step - 1
|
|
||||||
} else {
|
|
||||||
count = step
|
|
||||||
}
|
|
||||||
}
|
|
||||||
originalLine--
|
|
||||||
|
|
||||||
// Use the line to compute the column
|
|
||||||
line := &lineOffsetTables[originalLine]
|
|
||||||
originalColumn := int(loc.Start - line.byteOffsetToStartOfLine)
|
|
||||||
if line.columnsForNonASCII != nil && originalColumn >= int(line.byteOffsetToFirstNonASCII) {
|
|
||||||
originalColumn = int(line.columnsForNonASCII[originalColumn-int(line.byteOffsetToFirstNonASCII)])
|
|
||||||
}
|
|
||||||
|
|
||||||
b.updateGeneratedLineAndColumn(output)
|
|
||||||
|
|
||||||
// If this line doesn't start with a mapping and we're about to add a mapping
|
|
||||||
// that's not at the start, insert a mapping first so the line starts with one.
|
|
||||||
if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.generatedColumn > 0 && b.hasPrevState {
|
|
||||||
b.appendMappingWithoutRemapping(SourceMapState{
|
|
||||||
GeneratedLine: b.prevState.GeneratedLine,
|
|
||||||
GeneratedColumn: 0,
|
|
||||||
SourceIndex: b.prevState.SourceIndex,
|
|
||||||
OriginalLine: b.prevState.OriginalLine,
|
|
||||||
OriginalColumn: b.prevState.OriginalColumn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
b.appendMapping(SourceMapState{
|
|
||||||
GeneratedLine: b.prevState.GeneratedLine,
|
|
||||||
GeneratedColumn: b.generatedColumn,
|
|
||||||
OriginalLine: originalLine,
|
|
||||||
OriginalColumn: originalColumn,
|
|
||||||
})
|
|
||||||
|
|
||||||
// This line now has a mapping on it, so don't insert another one
|
|
||||||
b.lineStartsWithMapping = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ChunkBuilder) GenerateChunk(output []byte) Chunk {
|
|
||||||
b.updateGeneratedLineAndColumn(output)
|
|
||||||
shouldIgnore := true
|
|
||||||
for _, c := range b.sourceMap {
|
|
||||||
if c != ';' {
|
|
||||||
shouldIgnore = false
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Chunk{
|
|
||||||
Buffer: b.sourceMap,
|
|
||||||
EndState: b.prevState,
|
|
||||||
FinalGeneratedColumn: b.generatedColumn,
|
|
||||||
ShouldIgnore: shouldIgnore,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Scan over the printed text since the last source mapping and update the
|
|
||||||
// generated line and column numbers
|
|
||||||
func (b *ChunkBuilder) updateGeneratedLineAndColumn(output []byte) {
|
|
||||||
for i, c := range string(output[b.lastGeneratedUpdate:]) {
|
|
||||||
switch c {
|
|
||||||
case '\r', '\n', '\u2028', '\u2029':
|
|
||||||
// Handle Windows-specific "\r\n" newlines
|
|
||||||
if c == '\r' {
|
|
||||||
newlineCheck := b.lastGeneratedUpdate + i + 1
|
|
||||||
if newlineCheck < len(output) && output[newlineCheck] == '\n' {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we're about to move to the next line and the previous line didn't have
|
|
||||||
// any mappings, add a mapping at the start of the previous line.
|
|
||||||
if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.hasPrevState {
|
|
||||||
b.appendMappingWithoutRemapping(SourceMapState{
|
|
||||||
GeneratedLine: b.prevState.GeneratedLine,
|
|
||||||
GeneratedColumn: 0,
|
|
||||||
SourceIndex: b.prevState.SourceIndex,
|
|
||||||
OriginalLine: b.prevState.OriginalLine,
|
|
||||||
OriginalColumn: b.prevState.OriginalColumn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
b.prevState.GeneratedLine++
|
|
||||||
b.prevState.GeneratedColumn = 0
|
|
||||||
b.generatedColumn = 0
|
|
||||||
b.sourceMap = append(b.sourceMap, ';')
|
|
||||||
|
|
||||||
// This new line doesn't have a mapping yet
|
|
||||||
b.lineStartsWithMapping = false
|
|
||||||
|
|
||||||
default:
|
|
||||||
// Mozilla's "source-map" library counts columns using UTF-16 code units
|
|
||||||
if c <= 0xFFFF {
|
|
||||||
b.generatedColumn++
|
|
||||||
} else {
|
|
||||||
b.generatedColumn += 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
b.lastGeneratedUpdate = len(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ChunkBuilder) appendMapping(currentState SourceMapState) {
|
|
||||||
// If the input file had a source map, map all the way back to the original
|
|
||||||
if b.inputSourceMap != nil {
|
|
||||||
mapping := b.inputSourceMap.Find(
|
|
||||||
int32(currentState.OriginalLine),
|
|
||||||
int32(currentState.OriginalColumn))
|
|
||||||
|
|
||||||
// Some locations won't have a mapping
|
|
||||||
if mapping == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
currentState.SourceIndex = int(mapping.SourceIndex)
|
|
||||||
currentState.OriginalLine = int(mapping.OriginalLine)
|
|
||||||
currentState.OriginalColumn = int(mapping.OriginalColumn)
|
|
||||||
}
|
|
||||||
|
|
||||||
b.appendMappingWithoutRemapping(currentState)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *ChunkBuilder) appendMappingWithoutRemapping(currentState SourceMapState) {
|
|
||||||
var lastByte byte
|
|
||||||
if len(b.sourceMap) != 0 {
|
|
||||||
lastByte = b.sourceMap[len(b.sourceMap)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
b.sourceMap = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState)
|
|
||||||
b.prevState = currentState
|
|
||||||
b.hasPrevState = true
|
|
||||||
}
|
|
22
vendor/github.com/evanw/esbuild/internal/xxhash/LICENSE.txt
generated
vendored
22
vendor/github.com/evanw/esbuild/internal/xxhash/LICENSE.txt
generated
vendored
@ -1,22 +0,0 @@
|
|||||||
Copyright (c) 2016 Caleb Spare
|
|
||||||
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
1
vendor/github.com/evanw/esbuild/internal/xxhash/README.md
generated
vendored
1
vendor/github.com/evanw/esbuild/internal/xxhash/README.md
generated
vendored
@ -1 +0,0 @@
|
|||||||
This Go implementation of xxHash is from https://github.com/cespare/xxhash.
|
|
235
vendor/github.com/evanw/esbuild/internal/xxhash/xxhash.go
generated
vendored
235
vendor/github.com/evanw/esbuild/internal/xxhash/xxhash.go
generated
vendored
@ -1,235 +0,0 @@
|
|||||||
// Package xxhash implements the 64-bit variant of xxHash (XXH64) as described
|
|
||||||
// at http://cyan4973.github.io/xxHash/.
|
|
||||||
package xxhash
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/binary"
|
|
||||||
"errors"
|
|
||||||
"math/bits"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
prime1 uint64 = 11400714785074694791
|
|
||||||
prime2 uint64 = 14029467366897019727
|
|
||||||
prime3 uint64 = 1609587929392839161
|
|
||||||
prime4 uint64 = 9650029242287828579
|
|
||||||
prime5 uint64 = 2870177450012600261
|
|
||||||
)
|
|
||||||
|
|
||||||
// NOTE(caleb): I'm using both consts and vars of the primes. Using consts where
|
|
||||||
// possible in the Go code is worth a small (but measurable) performance boost
|
|
||||||
// by avoiding some MOVQs. Vars are needed for the asm and also are useful for
|
|
||||||
// convenience in the Go code in a few places where we need to intentionally
|
|
||||||
// avoid constant arithmetic (e.g., v1 := prime1 + prime2 fails because the
|
|
||||||
// result overflows a uint64).
|
|
||||||
var (
|
|
||||||
prime1v = prime1
|
|
||||||
prime2v = prime2
|
|
||||||
prime3v = prime3
|
|
||||||
prime4v = prime4
|
|
||||||
prime5v = prime5
|
|
||||||
)
|
|
||||||
|
|
||||||
// Digest implements hash.Hash64.
|
|
||||||
type Digest struct {
|
|
||||||
v1 uint64
|
|
||||||
v2 uint64
|
|
||||||
v3 uint64
|
|
||||||
v4 uint64
|
|
||||||
total uint64
|
|
||||||
mem [32]byte
|
|
||||||
n int // how much of mem is used
|
|
||||||
}
|
|
||||||
|
|
||||||
// New creates a new Digest that computes the 64-bit xxHash algorithm.
|
|
||||||
func New() *Digest {
|
|
||||||
var d Digest
|
|
||||||
d.Reset()
|
|
||||||
return &d
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset clears the Digest's state so that it can be reused.
|
|
||||||
func (d *Digest) Reset() {
|
|
||||||
d.v1 = prime1v + prime2
|
|
||||||
d.v2 = prime2
|
|
||||||
d.v3 = 0
|
|
||||||
d.v4 = -prime1v
|
|
||||||
d.total = 0
|
|
||||||
d.n = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Size always returns 8 bytes.
|
|
||||||
func (d *Digest) Size() int { return 8 }
|
|
||||||
|
|
||||||
// BlockSize always returns 32 bytes.
|
|
||||||
func (d *Digest) BlockSize() int { return 32 }
|
|
||||||
|
|
||||||
// Write adds more data to d. It always returns len(b), nil.
|
|
||||||
func (d *Digest) Write(b []byte) (n int, err error) {
|
|
||||||
n = len(b)
|
|
||||||
d.total += uint64(n)
|
|
||||||
|
|
||||||
if d.n+n < 32 {
|
|
||||||
// This new data doesn't even fill the current block.
|
|
||||||
copy(d.mem[d.n:], b)
|
|
||||||
d.n += n
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if d.n > 0 {
|
|
||||||
// Finish off the partial block.
|
|
||||||
copy(d.mem[d.n:], b)
|
|
||||||
d.v1 = round(d.v1, u64(d.mem[0:8]))
|
|
||||||
d.v2 = round(d.v2, u64(d.mem[8:16]))
|
|
||||||
d.v3 = round(d.v3, u64(d.mem[16:24]))
|
|
||||||
d.v4 = round(d.v4, u64(d.mem[24:32]))
|
|
||||||
b = b[32-d.n:]
|
|
||||||
d.n = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(b) >= 32 {
|
|
||||||
// One or more full blocks left.
|
|
||||||
nw := writeBlocks(d, b)
|
|
||||||
b = b[nw:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store any remaining partial block.
|
|
||||||
copy(d.mem[:], b)
|
|
||||||
d.n = len(b)
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sum appends the current hash to b and returns the resulting slice.
|
|
||||||
func (d *Digest) Sum(b []byte) []byte {
|
|
||||||
s := d.Sum64()
|
|
||||||
return append(
|
|
||||||
b,
|
|
||||||
byte(s>>56),
|
|
||||||
byte(s>>48),
|
|
||||||
byte(s>>40),
|
|
||||||
byte(s>>32),
|
|
||||||
byte(s>>24),
|
|
||||||
byte(s>>16),
|
|
||||||
byte(s>>8),
|
|
||||||
byte(s),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sum64 returns the current hash.
|
|
||||||
func (d *Digest) Sum64() uint64 {
|
|
||||||
var h uint64
|
|
||||||
|
|
||||||
if d.total >= 32 {
|
|
||||||
v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4
|
|
||||||
h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4)
|
|
||||||
h = mergeRound(h, v1)
|
|
||||||
h = mergeRound(h, v2)
|
|
||||||
h = mergeRound(h, v3)
|
|
||||||
h = mergeRound(h, v4)
|
|
||||||
} else {
|
|
||||||
h = d.v3 + prime5
|
|
||||||
}
|
|
||||||
|
|
||||||
h += d.total
|
|
||||||
|
|
||||||
i, end := 0, d.n
|
|
||||||
for ; i+8 <= end; i += 8 {
|
|
||||||
k1 := round(0, u64(d.mem[i:i+8]))
|
|
||||||
h ^= k1
|
|
||||||
h = rol27(h)*prime1 + prime4
|
|
||||||
}
|
|
||||||
if i+4 <= end {
|
|
||||||
h ^= uint64(u32(d.mem[i:i+4])) * prime1
|
|
||||||
h = rol23(h)*prime2 + prime3
|
|
||||||
i += 4
|
|
||||||
}
|
|
||||||
for i < end {
|
|
||||||
h ^= uint64(d.mem[i]) * prime5
|
|
||||||
h = rol11(h) * prime1
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
|
|
||||||
h ^= h >> 33
|
|
||||||
h *= prime2
|
|
||||||
h ^= h >> 29
|
|
||||||
h *= prime3
|
|
||||||
h ^= h >> 32
|
|
||||||
|
|
||||||
return h
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
magic = "xxh\x06"
|
|
||||||
marshaledSize = len(magic) + 8*5 + 32
|
|
||||||
)
|
|
||||||
|
|
||||||
// MarshalBinary implements the encoding.BinaryMarshaler interface.
|
|
||||||
func (d *Digest) MarshalBinary() ([]byte, error) {
|
|
||||||
b := make([]byte, 0, marshaledSize)
|
|
||||||
b = append(b, magic...)
|
|
||||||
b = appendUint64(b, d.v1)
|
|
||||||
b = appendUint64(b, d.v2)
|
|
||||||
b = appendUint64(b, d.v3)
|
|
||||||
b = appendUint64(b, d.v4)
|
|
||||||
b = appendUint64(b, d.total)
|
|
||||||
b = append(b, d.mem[:d.n]...)
|
|
||||||
b = b[:len(b)+len(d.mem)-d.n]
|
|
||||||
return b, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
|
|
||||||
func (d *Digest) UnmarshalBinary(b []byte) error {
|
|
||||||
if len(b) < len(magic) || string(b[:len(magic)]) != magic {
|
|
||||||
return errors.New("xxhash: invalid hash state identifier")
|
|
||||||
}
|
|
||||||
if len(b) != marshaledSize {
|
|
||||||
return errors.New("xxhash: invalid hash state size")
|
|
||||||
}
|
|
||||||
b = b[len(magic):]
|
|
||||||
b, d.v1 = consumeUint64(b)
|
|
||||||
b, d.v2 = consumeUint64(b)
|
|
||||||
b, d.v3 = consumeUint64(b)
|
|
||||||
b, d.v4 = consumeUint64(b)
|
|
||||||
b, d.total = consumeUint64(b)
|
|
||||||
copy(d.mem[:], b)
|
|
||||||
d.n = int(d.total % uint64(len(d.mem)))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func appendUint64(b []byte, x uint64) []byte {
|
|
||||||
var a [8]byte
|
|
||||||
binary.LittleEndian.PutUint64(a[:], x)
|
|
||||||
return append(b, a[:]...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func consumeUint64(b []byte) ([]byte, uint64) {
|
|
||||||
x := u64(b)
|
|
||||||
return b[8:], x
|
|
||||||
}
|
|
||||||
|
|
||||||
func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) }
|
|
||||||
func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) }
|
|
||||||
|
|
||||||
func round(acc, input uint64) uint64 {
|
|
||||||
acc += input * prime2
|
|
||||||
acc = rol31(acc)
|
|
||||||
acc *= prime1
|
|
||||||
return acc
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeRound(acc, val uint64) uint64 {
|
|
||||||
val = round(0, val)
|
|
||||||
acc ^= val
|
|
||||||
acc = acc*prime1 + prime4
|
|
||||||
return acc
|
|
||||||
}
|
|
||||||
|
|
||||||
func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) }
|
|
||||||
func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) }
|
|
||||||
func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) }
|
|
||||||
func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) }
|
|
||||||
func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) }
|
|
||||||
func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) }
|
|
||||||
func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) }
|
|
||||||
func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) }
|
|
74
vendor/github.com/evanw/esbuild/internal/xxhash/xxhash_other.go
generated
vendored
74
vendor/github.com/evanw/esbuild/internal/xxhash/xxhash_other.go
generated
vendored
@ -1,74 +0,0 @@
|
|||||||
package xxhash
|
|
||||||
|
|
||||||
// Sum64 computes the 64-bit xxHash digest of b.
|
|
||||||
func Sum64(b []byte) uint64 {
|
|
||||||
// A simpler version would be
|
|
||||||
// d := New()
|
|
||||||
// d.Write(b)
|
|
||||||
// return d.Sum64()
|
|
||||||
// but this is faster, particularly for small inputs.
|
|
||||||
|
|
||||||
n := len(b)
|
|
||||||
var h uint64
|
|
||||||
|
|
||||||
if n >= 32 {
|
|
||||||
v1 := prime1v + prime2
|
|
||||||
v2 := prime2
|
|
||||||
v3 := uint64(0)
|
|
||||||
v4 := -prime1v
|
|
||||||
for len(b) >= 32 {
|
|
||||||
v1 = round(v1, u64(b[0:8:len(b)]))
|
|
||||||
v2 = round(v2, u64(b[8:16:len(b)]))
|
|
||||||
v3 = round(v3, u64(b[16:24:len(b)]))
|
|
||||||
v4 = round(v4, u64(b[24:32:len(b)]))
|
|
||||||
b = b[32:len(b):len(b)]
|
|
||||||
}
|
|
||||||
h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4)
|
|
||||||
h = mergeRound(h, v1)
|
|
||||||
h = mergeRound(h, v2)
|
|
||||||
h = mergeRound(h, v3)
|
|
||||||
h = mergeRound(h, v4)
|
|
||||||
} else {
|
|
||||||
h = prime5
|
|
||||||
}
|
|
||||||
|
|
||||||
h += uint64(n)
|
|
||||||
|
|
||||||
i, end := 0, len(b)
|
|
||||||
for ; i+8 <= end; i += 8 {
|
|
||||||
k1 := round(0, u64(b[i:i+8:len(b)]))
|
|
||||||
h ^= k1
|
|
||||||
h = rol27(h)*prime1 + prime4
|
|
||||||
}
|
|
||||||
if i+4 <= end {
|
|
||||||
h ^= uint64(u32(b[i:i+4:len(b)])) * prime1
|
|
||||||
h = rol23(h)*prime2 + prime3
|
|
||||||
i += 4
|
|
||||||
}
|
|
||||||
for ; i < end; i++ {
|
|
||||||
h ^= uint64(b[i]) * prime5
|
|
||||||
h = rol11(h) * prime1
|
|
||||||
}
|
|
||||||
|
|
||||||
h ^= h >> 33
|
|
||||||
h *= prime2
|
|
||||||
h ^= h >> 29
|
|
||||||
h *= prime3
|
|
||||||
h ^= h >> 32
|
|
||||||
|
|
||||||
return h
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeBlocks(d *Digest, b []byte) int {
|
|
||||||
v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4
|
|
||||||
n := len(b)
|
|
||||||
for len(b) >= 32 {
|
|
||||||
v1 = round(v1, u64(b[0:8:len(b)]))
|
|
||||||
v2 = round(v2, u64(b[8:16:len(b)]))
|
|
||||||
v3 = round(v3, u64(b[16:24:len(b)]))
|
|
||||||
v4 = round(v4, u64(b[24:32:len(b)]))
|
|
||||||
b = b[32:len(b):len(b)]
|
|
||||||
}
|
|
||||||
d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4
|
|
||||||
return n - len(b)
|
|
||||||
}
|
|
566
vendor/github.com/evanw/esbuild/pkg/api/api.go
generated
vendored
566
vendor/github.com/evanw/esbuild/pkg/api/api.go
generated
vendored
@ -1,566 +0,0 @@
|
|||||||
// This API exposes esbuild's two main operations: building and transforming.
|
|
||||||
// It's intended for integrating esbuild into other tools as a library.
|
|
||||||
//
|
|
||||||
// If you are just trying to run esbuild from Go without the overhead of
|
|
||||||
// creating a child process, there is also an API for the command-line
|
|
||||||
// interface itself: https://godoc.org/github.com/evanw/esbuild/pkg/cli.
|
|
||||||
//
|
|
||||||
// Build API
|
|
||||||
//
|
|
||||||
// This function runs an end-to-end build operation. It takes an array of file
|
|
||||||
// paths as entry points, parses them and all of their dependencies, and
|
|
||||||
// returns the output files to write to the file system. The available options
|
|
||||||
// roughly correspond to esbuild's command-line flags.
|
|
||||||
//
|
|
||||||
// Example usage:
|
|
||||||
//
|
|
||||||
// package main
|
|
||||||
//
|
|
||||||
// import (
|
|
||||||
// "os"
|
|
||||||
//
|
|
||||||
// "github.com/evanw/esbuild/pkg/api"
|
|
||||||
// )
|
|
||||||
//
|
|
||||||
// func main() {
|
|
||||||
// result := api.Build(api.BuildOptions{
|
|
||||||
// EntryPoints: []string{"input.js"},
|
|
||||||
// Outfile: "output.js",
|
|
||||||
// Bundle: true,
|
|
||||||
// Write: true,
|
|
||||||
// LogLevel: api.LogLevelInfo,
|
|
||||||
// })
|
|
||||||
//
|
|
||||||
// if len(result.Errors) > 0 {
|
|
||||||
// os.Exit(1)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Transform API
|
|
||||||
//
|
|
||||||
// This function transforms a string of source code into JavaScript. It can be
|
|
||||||
// used to minify JavaScript, convert TypeScript/JSX to JavaScript, or convert
|
|
||||||
// newer JavaScript to older JavaScript. The available options roughly
|
|
||||||
// correspond to esbuild's command-line flags.
|
|
||||||
//
|
|
||||||
// Example usage:
|
|
||||||
//
|
|
||||||
// package main
|
|
||||||
//
|
|
||||||
// import (
|
|
||||||
// "fmt"
|
|
||||||
// "os"
|
|
||||||
//
|
|
||||||
// "github.com/evanw/esbuild/pkg/api"
|
|
||||||
// )
|
|
||||||
//
|
|
||||||
// func main() {
|
|
||||||
// jsx := `
|
|
||||||
// import * as React from 'react'
|
|
||||||
// import * as ReactDOM from 'react-dom'
|
|
||||||
//
|
|
||||||
// ReactDOM.render(
|
|
||||||
// <h1>Hello, world!</h1>,
|
|
||||||
// document.getElementById('root')
|
|
||||||
// );
|
|
||||||
// `
|
|
||||||
//
|
|
||||||
// result := api.Transform(jsx, api.TransformOptions{
|
|
||||||
// Loader: api.LoaderJSX,
|
|
||||||
// })
|
|
||||||
//
|
|
||||||
// fmt.Printf("%d errors and %d warnings\n",
|
|
||||||
// len(result.Errors), len(result.Warnings))
|
|
||||||
//
|
|
||||||
// os.Stdout.Write(result.Code)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
package api
|
|
||||||
|
|
||||||
type SourceMap uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
SourceMapNone SourceMap = iota
|
|
||||||
SourceMapInline
|
|
||||||
SourceMapLinked
|
|
||||||
SourceMapExternal
|
|
||||||
SourceMapInlineAndExternal
|
|
||||||
)
|
|
||||||
|
|
||||||
type SourcesContent uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
SourcesContentInclude SourcesContent = iota
|
|
||||||
SourcesContentExclude
|
|
||||||
)
|
|
||||||
|
|
||||||
type LegalComments uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
LegalCommentsDefault LegalComments = iota
|
|
||||||
LegalCommentsNone
|
|
||||||
LegalCommentsInline
|
|
||||||
LegalCommentsEndOfFile
|
|
||||||
LegalCommentsLinked
|
|
||||||
LegalCommentsExternal
|
|
||||||
)
|
|
||||||
|
|
||||||
type JSXMode uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
JSXModeTransform JSXMode = iota
|
|
||||||
JSXModePreserve
|
|
||||||
)
|
|
||||||
|
|
||||||
type Target uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
DefaultTarget Target = iota
|
|
||||||
ESNext
|
|
||||||
ES5
|
|
||||||
ES2015
|
|
||||||
ES2016
|
|
||||||
ES2017
|
|
||||||
ES2018
|
|
||||||
ES2019
|
|
||||||
ES2020
|
|
||||||
ES2021
|
|
||||||
)
|
|
||||||
|
|
||||||
type Loader uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
LoaderNone Loader = iota
|
|
||||||
LoaderJS
|
|
||||||
LoaderJSX
|
|
||||||
LoaderTS
|
|
||||||
LoaderTSX
|
|
||||||
LoaderJSON
|
|
||||||
LoaderText
|
|
||||||
LoaderBase64
|
|
||||||
LoaderDataURL
|
|
||||||
LoaderFile
|
|
||||||
LoaderBinary
|
|
||||||
LoaderCSS
|
|
||||||
LoaderDefault
|
|
||||||
)
|
|
||||||
|
|
||||||
type Platform uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
PlatformBrowser Platform = iota
|
|
||||||
PlatformNode
|
|
||||||
PlatformNeutral
|
|
||||||
)
|
|
||||||
|
|
||||||
type Format uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
FormatDefault Format = iota
|
|
||||||
FormatIIFE
|
|
||||||
FormatCommonJS
|
|
||||||
FormatESModule
|
|
||||||
)
|
|
||||||
|
|
||||||
type EngineName uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
EngineChrome EngineName = iota
|
|
||||||
EngineEdge
|
|
||||||
EngineFirefox
|
|
||||||
EngineIOS
|
|
||||||
EngineNode
|
|
||||||
EngineSafari
|
|
||||||
)
|
|
||||||
|
|
||||||
type Engine struct {
|
|
||||||
Name EngineName
|
|
||||||
Version string
|
|
||||||
}
|
|
||||||
|
|
||||||
type Location struct {
|
|
||||||
File string
|
|
||||||
Namespace string
|
|
||||||
Line int // 1-based
|
|
||||||
Column int // 0-based, in bytes
|
|
||||||
Length int // in bytes
|
|
||||||
LineText string
|
|
||||||
Suggestion string
|
|
||||||
}
|
|
||||||
|
|
||||||
type Message struct {
|
|
||||||
PluginName string
|
|
||||||
Text string
|
|
||||||
Location *Location
|
|
||||||
Notes []Note
|
|
||||||
|
|
||||||
// Optional user-specified data that is passed through unmodified. You can
|
|
||||||
// use this to stash the original error, for example.
|
|
||||||
Detail interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Note struct {
|
|
||||||
Text string
|
|
||||||
Location *Location
|
|
||||||
}
|
|
||||||
|
|
||||||
type StderrColor uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
ColorIfTerminal StderrColor = iota
|
|
||||||
ColorNever
|
|
||||||
ColorAlways
|
|
||||||
)
|
|
||||||
|
|
||||||
type LogLevel uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
LogLevelSilent LogLevel = iota
|
|
||||||
LogLevelVerbose
|
|
||||||
LogLevelDebug
|
|
||||||
LogLevelInfo
|
|
||||||
LogLevelWarning
|
|
||||||
LogLevelError
|
|
||||||
)
|
|
||||||
|
|
||||||
type Charset uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
CharsetDefault Charset = iota
|
|
||||||
CharsetASCII
|
|
||||||
CharsetUTF8
|
|
||||||
)
|
|
||||||
|
|
||||||
type TreeShaking uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
TreeShakingDefault TreeShaking = iota
|
|
||||||
TreeShakingFalse
|
|
||||||
TreeShakingTrue
|
|
||||||
)
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// Build API
|
|
||||||
|
|
||||||
type BuildOptions struct {
|
|
||||||
Color StderrColor // Documentation: https://esbuild.github.io/api/#color
|
|
||||||
LogLimit int // Documentation: https://esbuild.github.io/api/#log-limit
|
|
||||||
LogLevel LogLevel // Documentation: https://esbuild.github.io/api/#log-level
|
|
||||||
|
|
||||||
Sourcemap SourceMap // Documentation: https://esbuild.github.io/api/#sourcemap
|
|
||||||
SourceRoot string // Documentation: https://esbuild.github.io/api/#source-root
|
|
||||||
SourcesContent SourcesContent // Documentation: https://esbuild.github.io/api/#sources-content
|
|
||||||
|
|
||||||
Target Target // Documentation: https://esbuild.github.io/api/#target
|
|
||||||
Engines []Engine // Documentation: https://esbuild.github.io/api/#target
|
|
||||||
|
|
||||||
MinifyWhitespace bool // Documentation: https://esbuild.github.io/api/#minify
|
|
||||||
MinifyIdentifiers bool // Documentation: https://esbuild.github.io/api/#minify
|
|
||||||
MinifySyntax bool // Documentation: https://esbuild.github.io/api/#minify
|
|
||||||
Charset Charset // Documentation: https://esbuild.github.io/api/#charset
|
|
||||||
TreeShaking TreeShaking // Documentation: https://esbuild.github.io/api/#tree-shaking
|
|
||||||
IgnoreAnnotations bool // Documentation: https://esbuild.github.io/api/#ignore-annotations
|
|
||||||
LegalComments LegalComments // Documentation: https://esbuild.github.io/api/#legal-comments
|
|
||||||
|
|
||||||
JSXMode JSXMode // Documentation: https://esbuild.github.io/api/#jsx-mode
|
|
||||||
JSXFactory string // Documentation: https://esbuild.github.io/api/#jsx-factory
|
|
||||||
JSXFragment string // Documentation: https://esbuild.github.io/api/#jsx-fragment
|
|
||||||
|
|
||||||
Define map[string]string // Documentation: https://esbuild.github.io/api/#define
|
|
||||||
Pure []string // Documentation: https://esbuild.github.io/api/#pure
|
|
||||||
KeepNames bool // Documentation: https://esbuild.github.io/api/#keep-names
|
|
||||||
|
|
||||||
GlobalName string // Documentation: https://esbuild.github.io/api/#global-name
|
|
||||||
Bundle bool // Documentation: https://esbuild.github.io/api/#bundle
|
|
||||||
PreserveSymlinks bool // Documentation: https://esbuild.github.io/api/#preserve-symlinks
|
|
||||||
Splitting bool // Documentation: https://esbuild.github.io/api/#splitting
|
|
||||||
Outfile string // Documentation: https://esbuild.github.io/api/#outfile
|
|
||||||
Metafile bool // Documentation: https://esbuild.github.io/api/#metafile
|
|
||||||
Outdir string // Documentation: https://esbuild.github.io/api/#outdir
|
|
||||||
Outbase string // Documentation: https://esbuild.github.io/api/#outbase
|
|
||||||
AbsWorkingDir string // Documentation: https://esbuild.github.io/api/#working-directory
|
|
||||||
Platform Platform // Documentation: https://esbuild.github.io/api/#platform
|
|
||||||
Format Format // Documentation: https://esbuild.github.io/api/#format
|
|
||||||
External []string // Documentation: https://esbuild.github.io/api/#external
|
|
||||||
MainFields []string // Documentation: https://esbuild.github.io/api/#main-fields
|
|
||||||
Conditions []string // Documentation: https://esbuild.github.io/api/#conditions
|
|
||||||
Loader map[string]Loader // Documentation: https://esbuild.github.io/api/#loader
|
|
||||||
ResolveExtensions []string // Documentation: https://esbuild.github.io/api/#resolve-extensions
|
|
||||||
Tsconfig string // Documentation: https://esbuild.github.io/api/#tsconfig
|
|
||||||
OutExtensions map[string]string // Documentation: https://esbuild.github.io/api/#out-extension
|
|
||||||
PublicPath string // Documentation: https://esbuild.github.io/api/#public-path
|
|
||||||
Inject []string // Documentation: https://esbuild.github.io/api/#inject
|
|
||||||
Banner map[string]string // Documentation: https://esbuild.github.io/api/#banner
|
|
||||||
Footer map[string]string // Documentation: https://esbuild.github.io/api/#footer
|
|
||||||
NodePaths []string // Documentation: https://esbuild.github.io/api/#node-paths
|
|
||||||
|
|
||||||
EntryNames string // Documentation: https://esbuild.github.io/api/#entry-names
|
|
||||||
ChunkNames string // Documentation: https://esbuild.github.io/api/#chunk-names
|
|
||||||
AssetNames string // Documentation: https://esbuild.github.io/api/#asset-names
|
|
||||||
|
|
||||||
EntryPoints []string // Documentation: https://esbuild.github.io/api/#entry-points
|
|
||||||
EntryPointsAdvanced []EntryPoint // Documentation: https://esbuild.github.io/api/#entry-points
|
|
||||||
|
|
||||||
Stdin *StdinOptions // Documentation: https://esbuild.github.io/api/#stdin
|
|
||||||
Write bool // Documentation: https://esbuild.github.io/api/#write
|
|
||||||
AllowOverwrite bool // Documentation: https://esbuild.github.io/api/#allow-overwrite
|
|
||||||
Incremental bool // Documentation: https://esbuild.github.io/api/#incremental
|
|
||||||
Plugins []Plugin // Documentation: https://esbuild.github.io/plugins/
|
|
||||||
|
|
||||||
Watch *WatchMode // Documentation: https://esbuild.github.io/api/#watch
|
|
||||||
}
|
|
||||||
|
|
||||||
type EntryPoint struct {
|
|
||||||
InputPath string
|
|
||||||
OutputPath string
|
|
||||||
}
|
|
||||||
|
|
||||||
type WatchMode struct {
|
|
||||||
OnRebuild func(BuildResult)
|
|
||||||
}
|
|
||||||
|
|
||||||
type StdinOptions struct {
|
|
||||||
Contents string
|
|
||||||
ResolveDir string
|
|
||||||
Sourcefile string
|
|
||||||
Loader Loader
|
|
||||||
}
|
|
||||||
|
|
||||||
type BuildResult struct {
|
|
||||||
Errors []Message
|
|
||||||
Warnings []Message
|
|
||||||
|
|
||||||
OutputFiles []OutputFile
|
|
||||||
Metafile string
|
|
||||||
|
|
||||||
Rebuild func() BuildResult // Only when "Incremental: true"
|
|
||||||
Stop func() // Only when "Watch: true"
|
|
||||||
}
|
|
||||||
|
|
||||||
type OutputFile struct {
|
|
||||||
Path string
|
|
||||||
Contents []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
// Documentation: https://esbuild.github.io/api/#build-api
|
|
||||||
func Build(options BuildOptions) BuildResult {
|
|
||||||
return buildImpl(options).result
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// Transform API
|
|
||||||
|
|
||||||
type TransformOptions struct {
|
|
||||||
Color StderrColor // Documentation: https://esbuild.github.io/api/#color
|
|
||||||
LogLimit int // Documentation: https://esbuild.github.io/api/#log-limit
|
|
||||||
LogLevel LogLevel // Documentation: https://esbuild.github.io/api/#log-level
|
|
||||||
|
|
||||||
Sourcemap SourceMap // Documentation: https://esbuild.github.io/api/#sourcemap
|
|
||||||
SourceRoot string // Documentation: https://esbuild.github.io/api/#source-root
|
|
||||||
SourcesContent SourcesContent // Documentation: https://esbuild.github.io/api/#sources-content
|
|
||||||
|
|
||||||
Target Target // Documentation: https://esbuild.github.io/api/#target
|
|
||||||
Engines []Engine // Documentation: https://esbuild.github.io/api/#target
|
|
||||||
|
|
||||||
Format Format // Documentation: https://esbuild.github.io/api/#format
|
|
||||||
GlobalName string // Documentation: https://esbuild.github.io/api/#global-name
|
|
||||||
|
|
||||||
MinifyWhitespace bool // Documentation: https://esbuild.github.io/api/#minify
|
|
||||||
MinifyIdentifiers bool // Documentation: https://esbuild.github.io/api/#minify
|
|
||||||
MinifySyntax bool // Documentation: https://esbuild.github.io/api/#minify
|
|
||||||
Charset Charset // Documentation: https://esbuild.github.io/api/#charset
|
|
||||||
TreeShaking TreeShaking // Documentation: https://esbuild.github.io/api/#tree-shaking
|
|
||||||
IgnoreAnnotations bool // Documentation: https://esbuild.github.io/api/#ignore-annotations
|
|
||||||
LegalComments LegalComments // Documentation: https://esbuild.github.io/api/#legal-comments
|
|
||||||
|
|
||||||
JSXMode JSXMode // Documentation: https://esbuild.github.io/api/#jsx
|
|
||||||
JSXFactory string // Documentation: https://esbuild.github.io/api/#jsx-factory
|
|
||||||
JSXFragment string // Documentation: https://esbuild.github.io/api/#jsx-fragment
|
|
||||||
|
|
||||||
TsconfigRaw string // Documentation: https://esbuild.github.io/api/#tsconfig-raw
|
|
||||||
Banner string // Documentation: https://esbuild.github.io/api/#banner
|
|
||||||
Footer string // Documentation: https://esbuild.github.io/api/#footer
|
|
||||||
|
|
||||||
Define map[string]string // Documentation: https://esbuild.github.io/api/#define
|
|
||||||
Pure []string // Documentation: https://esbuild.github.io/api/#pure
|
|
||||||
KeepNames bool // Documentation: https://esbuild.github.io/api/#keep-names
|
|
||||||
|
|
||||||
Sourcefile string // Documentation: https://esbuild.github.io/api/#sourcefile
|
|
||||||
Loader Loader // Documentation: https://esbuild.github.io/api/#loader
|
|
||||||
}
|
|
||||||
|
|
||||||
type TransformResult struct {
|
|
||||||
Errors []Message
|
|
||||||
Warnings []Message
|
|
||||||
|
|
||||||
Code []byte
|
|
||||||
Map []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
// Documentation: https://esbuild.github.io/api/#transform-api
|
|
||||||
func Transform(input string, options TransformOptions) TransformResult {
|
|
||||||
return transformImpl(input, options)
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// Serve API
|
|
||||||
|
|
||||||
// Documentation: https://esbuild.github.io/api/#serve-arguments
|
|
||||||
type ServeOptions struct {
|
|
||||||
Port uint16
|
|
||||||
Host string
|
|
||||||
Servedir string
|
|
||||||
OnRequest func(ServeOnRequestArgs)
|
|
||||||
}
|
|
||||||
|
|
||||||
type ServeOnRequestArgs struct {
|
|
||||||
RemoteAddress string
|
|
||||||
Method string
|
|
||||||
Path string
|
|
||||||
Status int
|
|
||||||
TimeInMS int // The time to generate the response, not to send it
|
|
||||||
}
|
|
||||||
|
|
||||||
// Documentation: https://esbuild.github.io/api/#serve-return-values
|
|
||||||
type ServeResult struct {
|
|
||||||
Port uint16
|
|
||||||
Host string
|
|
||||||
Wait func() error
|
|
||||||
Stop func()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Documentation: https://esbuild.github.io/api/#serve
|
|
||||||
func Serve(serveOptions ServeOptions, buildOptions BuildOptions) (ServeResult, error) {
|
|
||||||
return serveImpl(serveOptions, buildOptions)
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// Plugin API
|
|
||||||
|
|
||||||
type SideEffects uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
SideEffectsTrue SideEffects = iota
|
|
||||||
SideEffectsFalse
|
|
||||||
)
|
|
||||||
|
|
||||||
type Plugin struct {
|
|
||||||
Name string
|
|
||||||
Setup func(PluginBuild)
|
|
||||||
}
|
|
||||||
|
|
||||||
type PluginBuild struct {
|
|
||||||
InitialOptions *BuildOptions
|
|
||||||
OnStart func(callback func() (OnStartResult, error))
|
|
||||||
OnEnd func(callback func(result *BuildResult))
|
|
||||||
OnResolve func(options OnResolveOptions, callback func(OnResolveArgs) (OnResolveResult, error))
|
|
||||||
OnLoad func(options OnLoadOptions, callback func(OnLoadArgs) (OnLoadResult, error))
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnStartResult struct {
|
|
||||||
Errors []Message
|
|
||||||
Warnings []Message
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnResolveOptions struct {
|
|
||||||
Filter string
|
|
||||||
Namespace string
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnResolveArgs struct {
|
|
||||||
Path string
|
|
||||||
Importer string
|
|
||||||
Namespace string
|
|
||||||
ResolveDir string
|
|
||||||
Kind ResolveKind
|
|
||||||
PluginData interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnResolveResult struct {
|
|
||||||
PluginName string
|
|
||||||
|
|
||||||
Errors []Message
|
|
||||||
Warnings []Message
|
|
||||||
|
|
||||||
Path string
|
|
||||||
External bool
|
|
||||||
SideEffects SideEffects
|
|
||||||
Namespace string
|
|
||||||
Suffix string
|
|
||||||
PluginData interface{}
|
|
||||||
|
|
||||||
WatchFiles []string
|
|
||||||
WatchDirs []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnLoadOptions struct {
|
|
||||||
Filter string
|
|
||||||
Namespace string
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnLoadArgs struct {
|
|
||||||
Path string
|
|
||||||
Namespace string
|
|
||||||
Suffix string
|
|
||||||
PluginData interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
type OnLoadResult struct {
|
|
||||||
PluginName string
|
|
||||||
|
|
||||||
Errors []Message
|
|
||||||
Warnings []Message
|
|
||||||
|
|
||||||
Contents *string
|
|
||||||
ResolveDir string
|
|
||||||
Loader Loader
|
|
||||||
PluginData interface{}
|
|
||||||
|
|
||||||
WatchFiles []string
|
|
||||||
WatchDirs []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type ResolveKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
ResolveEntryPoint ResolveKind = iota
|
|
||||||
ResolveJSImportStatement
|
|
||||||
ResolveJSRequireCall
|
|
||||||
ResolveJSDynamicImport
|
|
||||||
ResolveJSRequireResolve
|
|
||||||
ResolveCSSImportRule
|
|
||||||
ResolveCSSURLToken
|
|
||||||
)
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// FormatMessages API
|
|
||||||
|
|
||||||
type MessageKind uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
ErrorMessage MessageKind = iota
|
|
||||||
WarningMessage
|
|
||||||
)
|
|
||||||
|
|
||||||
type FormatMessagesOptions struct {
|
|
||||||
TerminalWidth int
|
|
||||||
Kind MessageKind
|
|
||||||
Color bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func FormatMessages(msgs []Message, opts FormatMessagesOptions) []string {
|
|
||||||
return formatMsgsImpl(msgs, opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// AnalyzeMetafile API
|
|
||||||
|
|
||||||
type AnalyzeMetafileOptions struct {
|
|
||||||
Color bool
|
|
||||||
Verbose bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Documentation: https://esbuild.github.io/api/#analyze
|
|
||||||
func AnalyzeMetafile(metafile string, opts AnalyzeMetafileOptions) string {
|
|
||||||
return analyzeMetafileImpl(metafile, opts)
|
|
||||||
}
|
|
1949
vendor/github.com/evanw/esbuild/pkg/api/api_impl.go
generated
vendored
1949
vendor/github.com/evanw/esbuild/pkg/api/api_impl.go
generated
vendored
File diff suppressed because it is too large
Load Diff
620
vendor/github.com/evanw/esbuild/pkg/api/serve_other.go
generated
vendored
620
vendor/github.com/evanw/esbuild/pkg/api/serve_other.go
generated
vendored
@ -1,620 +0,0 @@
|
|||||||
//go:build !js || !wasm
|
|
||||||
// +build !js !wasm
|
|
||||||
|
|
||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net"
|
|
||||||
"net/http"
|
|
||||||
"path"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"syscall"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/evanw/esbuild/internal/config"
|
|
||||||
"github.com/evanw/esbuild/internal/fs"
|
|
||||||
"github.com/evanw/esbuild/internal/helpers"
|
|
||||||
"github.com/evanw/esbuild/internal/logger"
|
|
||||||
)
|
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// Serve API
|
|
||||||
|
|
||||||
type apiHandler struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
outdirPathPrefix string
|
|
||||||
servedir string
|
|
||||||
options *config.Options
|
|
||||||
onRequest func(ServeOnRequestArgs)
|
|
||||||
rebuild func() BuildResult
|
|
||||||
currentBuild *runningBuild
|
|
||||||
fs fs.FS
|
|
||||||
serveWaitGroup sync.WaitGroup
|
|
||||||
serveError error
|
|
||||||
}
|
|
||||||
|
|
||||||
type runningBuild struct {
|
|
||||||
waitGroup sync.WaitGroup
|
|
||||||
result BuildResult
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *apiHandler) build() BuildResult {
|
|
||||||
build := func() *runningBuild {
|
|
||||||
h.mutex.Lock()
|
|
||||||
defer h.mutex.Unlock()
|
|
||||||
if h.currentBuild == nil {
|
|
||||||
build := &runningBuild{}
|
|
||||||
build.waitGroup.Add(1)
|
|
||||||
h.currentBuild = build
|
|
||||||
|
|
||||||
// Build on another thread
|
|
||||||
go func() {
|
|
||||||
result := h.rebuild()
|
|
||||||
h.rebuild = result.Rebuild
|
|
||||||
build.result = result
|
|
||||||
build.waitGroup.Done()
|
|
||||||
|
|
||||||
// Build results stay valid for a little bit afterward since a page
|
|
||||||
// load may involve multiple requests and don't want to rebuild
|
|
||||||
// separately for each of those requests.
|
|
||||||
time.Sleep(250 * time.Millisecond)
|
|
||||||
h.mutex.Lock()
|
|
||||||
defer h.mutex.Unlock()
|
|
||||||
h.currentBuild = nil
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
return h.currentBuild
|
|
||||||
}()
|
|
||||||
build.waitGroup.Wait()
|
|
||||||
return build.result
|
|
||||||
}
|
|
||||||
|
|
||||||
func escapeForHTML(text string) string {
|
|
||||||
text = strings.ReplaceAll(text, "&", "&")
|
|
||||||
text = strings.ReplaceAll(text, "<", "<")
|
|
||||||
text = strings.ReplaceAll(text, ">", ">")
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
func escapeForAttribute(text string) string {
|
|
||||||
text = escapeForHTML(text)
|
|
||||||
text = strings.ReplaceAll(text, "\"", """)
|
|
||||||
text = strings.ReplaceAll(text, "'", "'")
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *apiHandler) notifyRequest(duration time.Duration, req *http.Request, status int) {
|
|
||||||
if h.onRequest != nil {
|
|
||||||
h.onRequest(ServeOnRequestArgs{
|
|
||||||
RemoteAddress: req.RemoteAddr,
|
|
||||||
Method: req.Method,
|
|
||||||
Path: req.URL.Path,
|
|
||||||
Status: status,
|
|
||||||
TimeInMS: int(duration.Milliseconds()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func errorsToString(errors []Message) string {
|
|
||||||
stderrOptions := logger.OutputOptions{IncludeSource: true}
|
|
||||||
terminalOptions := logger.TerminalInfo{}
|
|
||||||
sb := strings.Builder{}
|
|
||||||
limit := 5
|
|
||||||
for i, msg := range convertMessagesToInternal(nil, logger.Error, errors) {
|
|
||||||
if i == limit {
|
|
||||||
sb.WriteString(fmt.Sprintf("%d out of %d errors shown\n", limit, len(errors)))
|
|
||||||
break
|
|
||||||
}
|
|
||||||
sb.WriteString(msg.String(stderrOptions, terminalOptions))
|
|
||||||
}
|
|
||||||
return sb.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *apiHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
|
|
||||||
start := time.Now()
|
|
||||||
|
|
||||||
// Handle get requests
|
|
||||||
if req.Method == "GET" && strings.HasPrefix(req.URL.Path, "/") {
|
|
||||||
res.Header().Set("Access-Control-Allow-Origin", "*")
|
|
||||||
queryPath := path.Clean(req.URL.Path)[1:]
|
|
||||||
result := h.build()
|
|
||||||
|
|
||||||
// Requests fail if the build had errors
|
|
||||||
if len(result.Errors) > 0 {
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusServiceUnavailable)
|
|
||||||
res.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
|
||||||
res.WriteHeader(http.StatusServiceUnavailable)
|
|
||||||
res.Write([]byte(errorsToString(result.Errors)))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var kind fs.EntryKind
|
|
||||||
var fileContents fs.OpenedFile
|
|
||||||
dirEntries := make(map[string]bool)
|
|
||||||
fileEntries := make(map[string]bool)
|
|
||||||
|
|
||||||
// Check for a match with the results if we're within the output directory
|
|
||||||
if strings.HasPrefix(queryPath, h.outdirPathPrefix) {
|
|
||||||
outdirQueryPath := queryPath[len(h.outdirPathPrefix):]
|
|
||||||
if strings.HasPrefix(outdirQueryPath, "/") {
|
|
||||||
outdirQueryPath = outdirQueryPath[1:]
|
|
||||||
}
|
|
||||||
resultKind, inMemoryBytes := h.matchQueryPathToResult(outdirQueryPath, &result, dirEntries, fileEntries)
|
|
||||||
kind = resultKind
|
|
||||||
fileContents = &fs.InMemoryOpenedFile{Contents: inMemoryBytes}
|
|
||||||
} else {
|
|
||||||
// Create a fake directory entry for the output path so that it appears to be a real directory
|
|
||||||
p := h.outdirPathPrefix
|
|
||||||
for p != "" {
|
|
||||||
var dir string
|
|
||||||
var base string
|
|
||||||
if slash := strings.IndexByte(p, '/'); slash == -1 {
|
|
||||||
base = p
|
|
||||||
} else {
|
|
||||||
dir = p[:slash]
|
|
||||||
base = p[slash+1:]
|
|
||||||
}
|
|
||||||
if dir == queryPath {
|
|
||||||
kind = fs.DirEntry
|
|
||||||
dirEntries[base] = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
p = dir
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for a file in the fallback directory
|
|
||||||
if h.servedir != "" && kind != fs.FileEntry {
|
|
||||||
absPath := h.fs.Join(h.servedir, queryPath)
|
|
||||||
if absDir := h.fs.Dir(absPath); absDir != absPath {
|
|
||||||
if entries, err, _ := h.fs.ReadDirectory(absDir); err == nil {
|
|
||||||
if entry, _ := entries.Get(h.fs.Base(absPath)); entry != nil && entry.Kind(h.fs) == fs.FileEntry {
|
|
||||||
if contents, err, _ := h.fs.OpenFile(absPath); err == nil {
|
|
||||||
defer contents.Close()
|
|
||||||
fileContents = contents
|
|
||||||
kind = fs.FileEntry
|
|
||||||
} else if err != syscall.ENOENT {
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
|
|
||||||
res.WriteHeader(http.StatusInternalServerError)
|
|
||||||
res.Write([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for a directory in the fallback directory
|
|
||||||
var fallbackIndexName string
|
|
||||||
if h.servedir != "" && kind != fs.FileEntry {
|
|
||||||
if entries, err, _ := h.fs.ReadDirectory(h.fs.Join(h.servedir, queryPath)); err == nil {
|
|
||||||
kind = fs.DirEntry
|
|
||||||
for _, name := range entries.SortedKeys() {
|
|
||||||
entry, _ := entries.Get(name)
|
|
||||||
switch entry.Kind(h.fs) {
|
|
||||||
case fs.DirEntry:
|
|
||||||
dirEntries[name] = true
|
|
||||||
case fs.FileEntry:
|
|
||||||
fileEntries[name] = true
|
|
||||||
if name == "index.html" {
|
|
||||||
fallbackIndexName = name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if err != syscall.ENOENT {
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
|
|
||||||
res.WriteHeader(http.StatusInternalServerError)
|
|
||||||
res.Write([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Redirect to a trailing slash for directories
|
|
||||||
if kind == fs.DirEntry && !strings.HasSuffix(req.URL.Path, "/") {
|
|
||||||
res.Header().Set("Location", req.URL.Path+"/")
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusFound)
|
|
||||||
res.WriteHeader(http.StatusFound)
|
|
||||||
res.Write(nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Serve a "index.html" file if present
|
|
||||||
if kind == fs.DirEntry && fallbackIndexName != "" {
|
|
||||||
queryPath += "/" + fallbackIndexName
|
|
||||||
if contents, err, _ := h.fs.OpenFile(h.fs.Join(h.servedir, queryPath)); err == nil {
|
|
||||||
defer contents.Close()
|
|
||||||
fileContents = contents
|
|
||||||
kind = fs.FileEntry
|
|
||||||
} else if err != syscall.ENOENT {
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
|
|
||||||
res.WriteHeader(http.StatusInternalServerError)
|
|
||||||
res.Write([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Serve a file
|
|
||||||
if kind == fs.FileEntry {
|
|
||||||
// Default to serving the whole file
|
|
||||||
status := http.StatusOK
|
|
||||||
fileContentsLen := fileContents.Len()
|
|
||||||
begin := 0
|
|
||||||
end := fileContentsLen
|
|
||||||
isRange := false
|
|
||||||
|
|
||||||
// Handle range requests so that video playback works in Safari
|
|
||||||
if rangeBegin, rangeEnd, ok := parseRangeHeader(req.Header.Get("Range"), fileContentsLen); ok && rangeBegin < rangeEnd {
|
|
||||||
// Note: The content range is inclusive so subtract 1 from the end
|
|
||||||
isRange = true
|
|
||||||
begin = rangeBegin
|
|
||||||
end = rangeEnd
|
|
||||||
status = http.StatusPartialContent
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to read the range from the file, which may fail
|
|
||||||
fileBytes, err := fileContents.Read(begin, end)
|
|
||||||
if err != nil {
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
|
|
||||||
res.WriteHeader(http.StatusInternalServerError)
|
|
||||||
res.Write([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we get here, the request was successful
|
|
||||||
if contentType := helpers.MimeTypeByExtension(path.Ext(queryPath)); contentType != "" {
|
|
||||||
res.Header().Set("Content-Type", contentType)
|
|
||||||
} else {
|
|
||||||
res.Header().Set("Content-Type", "application/octet-stream")
|
|
||||||
}
|
|
||||||
if isRange {
|
|
||||||
res.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", begin, end-1, fileContentsLen))
|
|
||||||
}
|
|
||||||
res.Header().Set("Content-Length", fmt.Sprintf("%d", len(fileBytes)))
|
|
||||||
go h.notifyRequest(time.Since(start), req, status)
|
|
||||||
res.WriteHeader(status)
|
|
||||||
res.Write(fileBytes)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Serve a directory listing
|
|
||||||
if kind == fs.DirEntry {
|
|
||||||
html := respondWithDirList(queryPath, dirEntries, fileEntries)
|
|
||||||
res.Header().Set("Content-Type", "text/html; charset=utf-8")
|
|
||||||
res.Header().Set("Content-Length", fmt.Sprintf("%d", len(html)))
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusOK)
|
|
||||||
res.Write(html)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default to a 404
|
|
||||||
res.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
|
||||||
go h.notifyRequest(time.Since(start), req, http.StatusNotFound)
|
|
||||||
res.WriteHeader(http.StatusNotFound)
|
|
||||||
res.Write([]byte("404 - Not Found"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle enough of the range specification so that video playback works in Safari
|
|
||||||
func parseRangeHeader(r string, contentLength int) (int, int, bool) {
|
|
||||||
if strings.HasPrefix(r, "bytes=") {
|
|
||||||
r = r[len("bytes="):]
|
|
||||||
if dash := strings.IndexByte(r, '-'); dash != -1 {
|
|
||||||
// Note: The range is inclusive so the limit is deliberately "length - 1"
|
|
||||||
if begin, ok := parseRangeInt(r[:dash], contentLength-1); ok {
|
|
||||||
if end, ok := parseRangeInt(r[dash+1:], contentLength-1); ok {
|
|
||||||
// Note: The range is inclusive so a range of "0-1" is two bytes long
|
|
||||||
return begin, end + 1, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0, 0, false
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseRangeInt(text string, maxValue int) (int, bool) {
|
|
||||||
if text == "" {
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
value := 0
|
|
||||||
for _, c := range text {
|
|
||||||
if c < '0' || c > '9' {
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
value = value*10 + int(c-'0')
|
|
||||||
if value > maxValue {
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return value, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *apiHandler) matchQueryPathToResult(
|
|
||||||
queryPath string,
|
|
||||||
result *BuildResult,
|
|
||||||
dirEntries map[string]bool,
|
|
||||||
fileEntries map[string]bool,
|
|
||||||
) (fs.EntryKind, []byte) {
|
|
||||||
queryIsDir := false
|
|
||||||
queryDir := queryPath
|
|
||||||
if queryDir != "" {
|
|
||||||
queryDir += "/"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check the output files for a match
|
|
||||||
for _, file := range result.OutputFiles {
|
|
||||||
if relPath, ok := h.fs.Rel(h.options.AbsOutputDir, file.Path); ok {
|
|
||||||
relPath = strings.ReplaceAll(relPath, "\\", "/")
|
|
||||||
|
|
||||||
// An exact match
|
|
||||||
if relPath == queryPath {
|
|
||||||
return fs.FileEntry, file.Contents
|
|
||||||
}
|
|
||||||
|
|
||||||
// A match inside this directory
|
|
||||||
if strings.HasPrefix(relPath, queryDir) {
|
|
||||||
entry := relPath[len(queryDir):]
|
|
||||||
queryIsDir = true
|
|
||||||
if slash := strings.IndexByte(entry, '/'); slash == -1 {
|
|
||||||
fileEntries[entry] = true
|
|
||||||
} else if dir := entry[:slash]; !dirEntries[dir] {
|
|
||||||
dirEntries[dir] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Treat this as a directory if it's non-empty
|
|
||||||
if queryIsDir {
|
|
||||||
return fs.DirEntry, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func respondWithDirList(queryPath string, dirEntries map[string]bool, fileEntries map[string]bool) []byte {
|
|
||||||
queryPath = "/" + queryPath
|
|
||||||
queryDir := queryPath
|
|
||||||
if queryDir != "/" {
|
|
||||||
queryDir += "/"
|
|
||||||
}
|
|
||||||
html := strings.Builder{}
|
|
||||||
html.WriteString(`<!doctype html>`)
|
|
||||||
html.WriteString(`<meta charset="utf8">`)
|
|
||||||
html.WriteString(`<title>Directory: `)
|
|
||||||
html.WriteString(escapeForHTML(queryDir))
|
|
||||||
html.WriteString(`</title>`)
|
|
||||||
html.WriteString(`<h1>Directory: `)
|
|
||||||
html.WriteString(escapeForHTML(queryDir))
|
|
||||||
html.WriteString(`</h1>`)
|
|
||||||
html.WriteString(`<ul>`)
|
|
||||||
|
|
||||||
// Link to the parent directory
|
|
||||||
if queryPath != "/" {
|
|
||||||
parentDir := path.Dir(queryPath)
|
|
||||||
if parentDir != "/" {
|
|
||||||
parentDir += "/"
|
|
||||||
}
|
|
||||||
html.WriteString(fmt.Sprintf(`<li><a href="%s">../</a></li>`, escapeForAttribute(parentDir)))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Link to child directories
|
|
||||||
strings := make([]string, 0, len(dirEntries)+len(fileEntries))
|
|
||||||
for entry := range dirEntries {
|
|
||||||
strings = append(strings, entry)
|
|
||||||
}
|
|
||||||
sort.Strings(strings)
|
|
||||||
for _, entry := range strings {
|
|
||||||
html.WriteString(fmt.Sprintf(`<li><a href="%s/">%s/</a></li>`, escapeForAttribute(path.Join(queryPath, entry)), escapeForHTML(entry)))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Link to files in the directory
|
|
||||||
strings = strings[:0]
|
|
||||||
for entry := range fileEntries {
|
|
||||||
strings = append(strings, entry)
|
|
||||||
}
|
|
||||||
sort.Strings(strings)
|
|
||||||
for _, entry := range strings {
|
|
||||||
html.WriteString(fmt.Sprintf(`<li><a href="%s">%s</a></li>`, escapeForAttribute(path.Join(queryPath, entry)), escapeForHTML(entry)))
|
|
||||||
}
|
|
||||||
|
|
||||||
html.WriteString(`</ul>`)
|
|
||||||
return []byte(html.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is used to make error messages platform-independent
|
|
||||||
func prettyPrintPath(fs fs.FS, path string) string {
|
|
||||||
if relPath, ok := fs.Rel(fs.Cwd(), path); ok {
|
|
||||||
return strings.ReplaceAll(relPath, "\\", "/")
|
|
||||||
}
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
|
|
||||||
func serveImpl(serveOptions ServeOptions, buildOptions BuildOptions) (ServeResult, error) {
|
|
||||||
realFS, err := fs.RealFS(fs.RealFSOptions{
|
|
||||||
AbsWorkingDir: buildOptions.AbsWorkingDir,
|
|
||||||
|
|
||||||
// This is a long-lived file system object so do not cache calls to
|
|
||||||
// ReadDirectory() (they are normally cached for the duration of a build
|
|
||||||
// for performance).
|
|
||||||
DoNotCache: true,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return ServeResult{}, err
|
|
||||||
}
|
|
||||||
buildOptions.Incremental = true
|
|
||||||
buildOptions.Write = false
|
|
||||||
|
|
||||||
// Watch and serve are both different ways of rebuilding, and cannot be combined
|
|
||||||
if buildOptions.Watch != nil {
|
|
||||||
return ServeResult{}, fmt.Errorf("Cannot use \"watch\" with \"serve\"")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the fallback path
|
|
||||||
if serveOptions.Servedir != "" {
|
|
||||||
if absPath, ok := realFS.Abs(serveOptions.Servedir); ok {
|
|
||||||
serveOptions.Servedir = absPath
|
|
||||||
} else {
|
|
||||||
return ServeResult{}, fmt.Errorf("Invalid serve path: %s", serveOptions.Servedir)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there is no output directory, set the output directory to something so
|
|
||||||
// the build doesn't try to write to stdout. Make sure not to set this to a
|
|
||||||
// path that may contain the user's files in it since we don't want to get
|
|
||||||
// errors about overwriting input files.
|
|
||||||
outdirPathPrefix := ""
|
|
||||||
if buildOptions.Outdir == "" && buildOptions.Outfile == "" {
|
|
||||||
buildOptions.Outdir = realFS.Join(realFS.Cwd(), "...")
|
|
||||||
} else if serveOptions.Servedir != "" {
|
|
||||||
// Compute the output directory
|
|
||||||
var outdir string
|
|
||||||
if buildOptions.Outdir != "" {
|
|
||||||
if absPath, ok := realFS.Abs(buildOptions.Outdir); ok {
|
|
||||||
outdir = absPath
|
|
||||||
} else {
|
|
||||||
return ServeResult{}, fmt.Errorf("Invalid outdir path: %s", buildOptions.Outdir)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if absPath, ok := realFS.Abs(buildOptions.Outfile); ok {
|
|
||||||
outdir = realFS.Dir(absPath)
|
|
||||||
} else {
|
|
||||||
return ServeResult{}, fmt.Errorf("Invalid outdir path: %s", buildOptions.Outfile)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure the output directory is contained in the fallback directory
|
|
||||||
relPath, ok := realFS.Rel(serveOptions.Servedir, outdir)
|
|
||||||
if !ok {
|
|
||||||
return ServeResult{}, fmt.Errorf(
|
|
||||||
"Cannot compute relative path from %q to %q\n", serveOptions.Servedir, outdir)
|
|
||||||
}
|
|
||||||
relPath = strings.ReplaceAll(relPath, "\\", "/") // Fix paths on Windows
|
|
||||||
if relPath == ".." || strings.HasPrefix(relPath, "../") {
|
|
||||||
return ServeResult{}, fmt.Errorf(
|
|
||||||
"Output directory %q must be contained in serve directory %q",
|
|
||||||
prettyPrintPath(realFS, outdir),
|
|
||||||
prettyPrintPath(realFS, serveOptions.Servedir),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
if relPath != "." {
|
|
||||||
outdirPathPrefix = relPath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine the host
|
|
||||||
var listener net.Listener
|
|
||||||
network := "tcp4"
|
|
||||||
host := "0.0.0.0"
|
|
||||||
if serveOptions.Host != "" {
|
|
||||||
host = serveOptions.Host
|
|
||||||
|
|
||||||
// Only use "tcp4" if this is an IPv4 address, otherwise use "tcp"
|
|
||||||
if ip := net.ParseIP(host); ip == nil || ip.To4() == nil {
|
|
||||||
network = "tcp"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pick the port
|
|
||||||
if serveOptions.Port == 0 {
|
|
||||||
// Default to picking a "800X" port
|
|
||||||
for port := 8000; port <= 8009; port++ {
|
|
||||||
if result, err := net.Listen(network, net.JoinHostPort(host, fmt.Sprintf("%d", port))); err == nil {
|
|
||||||
listener = result
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if listener == nil {
|
|
||||||
// Otherwise pick the provided port
|
|
||||||
if result, err := net.Listen(network, net.JoinHostPort(host, fmt.Sprintf("%d", serveOptions.Port))); err != nil {
|
|
||||||
return ServeResult{}, err
|
|
||||||
} else {
|
|
||||||
listener = result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try listening on the provided port
|
|
||||||
addr := listener.Addr().String()
|
|
||||||
|
|
||||||
// Extract the real port in case we passed a port of "0"
|
|
||||||
var result ServeResult
|
|
||||||
if host, text, err := net.SplitHostPort(addr); err == nil {
|
|
||||||
if port, err := strconv.ParseInt(text, 10, 32); err == nil {
|
|
||||||
result.Port = uint16(port)
|
|
||||||
result.Host = host
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var stoppingMutex sync.Mutex
|
|
||||||
isStopping := false
|
|
||||||
|
|
||||||
// The first build will just build normally
|
|
||||||
var handler *apiHandler
|
|
||||||
handler = &apiHandler{
|
|
||||||
onRequest: serveOptions.OnRequest,
|
|
||||||
outdirPathPrefix: outdirPathPrefix,
|
|
||||||
servedir: serveOptions.Servedir,
|
|
||||||
rebuild: func() BuildResult {
|
|
||||||
stoppingMutex.Lock()
|
|
||||||
defer stoppingMutex.Unlock()
|
|
||||||
|
|
||||||
// Don't start more rebuilds if we were told to stop
|
|
||||||
if isStopping {
|
|
||||||
return BuildResult{}
|
|
||||||
}
|
|
||||||
|
|
||||||
build := buildImpl(buildOptions)
|
|
||||||
if handler.options == nil {
|
|
||||||
handler.options = &build.options
|
|
||||||
}
|
|
||||||
return build.result
|
|
||||||
},
|
|
||||||
fs: realFS,
|
|
||||||
}
|
|
||||||
|
|
||||||
// When wait is called, block until the server's call to "Serve()" returns
|
|
||||||
result.Wait = func() error {
|
|
||||||
handler.serveWaitGroup.Wait()
|
|
||||||
return handler.serveError
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the server
|
|
||||||
server := &http.Server{Addr: addr, Handler: handler}
|
|
||||||
|
|
||||||
// When stop is called, block further rebuilds and then close the server
|
|
||||||
result.Stop = func() {
|
|
||||||
stoppingMutex.Lock()
|
|
||||||
defer stoppingMutex.Unlock()
|
|
||||||
|
|
||||||
// Only try to close the server once
|
|
||||||
if isStopping {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
isStopping = true
|
|
||||||
|
|
||||||
// Close the server and wait for it to close
|
|
||||||
server.Close()
|
|
||||||
handler.serveWaitGroup.Wait()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start the server and signal on "serveWaitGroup" when it stops
|
|
||||||
handler.serveWaitGroup.Add(1)
|
|
||||||
go func() {
|
|
||||||
if err := server.Serve(listener); err != http.ErrServerClosed {
|
|
||||||
handler.serveError = err
|
|
||||||
}
|
|
||||||
handler.serveWaitGroup.Done()
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Start the first build shortly after this function returns (but not
|
|
||||||
// immediately so that stuff we print right after this will come first)
|
|
||||||
go func() {
|
|
||||||
time.Sleep(10 * time.Millisecond)
|
|
||||||
handler.build()
|
|
||||||
}()
|
|
||||||
return result, nil
|
|
||||||
}
|
|
12
vendor/github.com/evanw/esbuild/pkg/api/serve_wasm.go
generated
vendored
12
vendor/github.com/evanw/esbuild/pkg/api/serve_wasm.go
generated
vendored
@ -1,12 +0,0 @@
|
|||||||
//go:build js && wasm
|
|
||||||
// +build js,wasm
|
|
||||||
|
|
||||||
package api
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
// Remove the serve API in the WebAssembly build. This removes 2.7mb of stuff.
|
|
||||||
|
|
||||||
func serveImpl(serveOptions ServeOptions, buildOptions BuildOptions) (ServeResult, error) {
|
|
||||||
return ServeResult{}, fmt.Errorf("The \"serve\" API is not supported when using WebAssembly")
|
|
||||||
}
|
|
25
vendor/github.com/gorilla/websocket/.gitignore
generated
vendored
25
vendor/github.com/gorilla/websocket/.gitignore
generated
vendored
@ -1,25 +0,0 @@
|
|||||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
|
||||||
*.o
|
|
||||||
*.a
|
|
||||||
*.so
|
|
||||||
|
|
||||||
# Folders
|
|
||||||
_obj
|
|
||||||
_test
|
|
||||||
|
|
||||||
# Architecture specific extensions/prefixes
|
|
||||||
*.[568vq]
|
|
||||||
[568vq].out
|
|
||||||
|
|
||||||
*.cgo1.go
|
|
||||||
*.cgo2.c
|
|
||||||
_cgo_defun.c
|
|
||||||
_cgo_gotypes.go
|
|
||||||
_cgo_export.*
|
|
||||||
|
|
||||||
_testmain.go
|
|
||||||
|
|
||||||
*.exe
|
|
||||||
|
|
||||||
.idea/
|
|
||||||
*.iml
|
|
9
vendor/github.com/gorilla/websocket/AUTHORS
generated
vendored
9
vendor/github.com/gorilla/websocket/AUTHORS
generated
vendored
@ -1,9 +0,0 @@
|
|||||||
# This is the official list of Gorilla WebSocket authors for copyright
|
|
||||||
# purposes.
|
|
||||||
#
|
|
||||||
# Please keep the list sorted.
|
|
||||||
|
|
||||||
Gary Burd <gary@beagledreams.com>
|
|
||||||
Google LLC (https://opensource.google.com/)
|
|
||||||
Joachim Bauch <mail@joachim-bauch.de>
|
|
||||||
|
|
22
vendor/github.com/gorilla/websocket/LICENSE
generated
vendored
22
vendor/github.com/gorilla/websocket/LICENSE
generated
vendored
@ -1,22 +0,0 @@
|
|||||||
Copyright (c) 2013 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
64
vendor/github.com/gorilla/websocket/README.md
generated
vendored
64
vendor/github.com/gorilla/websocket/README.md
generated
vendored
@ -1,64 +0,0 @@
|
|||||||
# Gorilla WebSocket
|
|
||||||
|
|
||||||
[![GoDoc](https://godoc.org/github.com/gorilla/websocket?status.svg)](https://godoc.org/github.com/gorilla/websocket)
|
|
||||||
[![CircleCI](https://circleci.com/gh/gorilla/websocket.svg?style=svg)](https://circleci.com/gh/gorilla/websocket)
|
|
||||||
|
|
||||||
Gorilla WebSocket is a [Go](http://golang.org/) implementation of the
|
|
||||||
[WebSocket](http://www.rfc-editor.org/rfc/rfc6455.txt) protocol.
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
* [API Reference](https://pkg.go.dev/github.com/gorilla/websocket?tab=doc)
|
|
||||||
* [Chat example](https://github.com/gorilla/websocket/tree/master/examples/chat)
|
|
||||||
* [Command example](https://github.com/gorilla/websocket/tree/master/examples/command)
|
|
||||||
* [Client and server example](https://github.com/gorilla/websocket/tree/master/examples/echo)
|
|
||||||
* [File watch example](https://github.com/gorilla/websocket/tree/master/examples/filewatch)
|
|
||||||
|
|
||||||
### Status
|
|
||||||
|
|
||||||
The Gorilla WebSocket package provides a complete and tested implementation of
|
|
||||||
the [WebSocket](http://www.rfc-editor.org/rfc/rfc6455.txt) protocol. The
|
|
||||||
package API is stable.
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
go get github.com/gorilla/websocket
|
|
||||||
|
|
||||||
### Protocol Compliance
|
|
||||||
|
|
||||||
The Gorilla WebSocket package passes the server tests in the [Autobahn Test
|
|
||||||
Suite](https://github.com/crossbario/autobahn-testsuite) using the application in the [examples/autobahn
|
|
||||||
subdirectory](https://github.com/gorilla/websocket/tree/master/examples/autobahn).
|
|
||||||
|
|
||||||
### Gorilla WebSocket compared with other packages
|
|
||||||
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<th></th>
|
|
||||||
<th><a href="http://godoc.org/github.com/gorilla/websocket">github.com/gorilla</a></th>
|
|
||||||
<th><a href="http://godoc.org/golang.org/x/net/websocket">golang.org/x/net</a></th>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<tr><td colspan="3"><a href="http://tools.ietf.org/html/rfc6455">RFC 6455</a> Features</td></tr>
|
|
||||||
<tr><td>Passes <a href="https://github.com/crossbario/autobahn-testsuite">Autobahn Test Suite</a></td><td><a href="https://github.com/gorilla/websocket/tree/master/examples/autobahn">Yes</a></td><td>No</td></tr>
|
|
||||||
<tr><td>Receive <a href="https://tools.ietf.org/html/rfc6455#section-5.4">fragmented</a> message<td>Yes</td><td><a href="https://code.google.com/p/go/issues/detail?id=7632">No</a>, see note 1</td></tr>
|
|
||||||
<tr><td>Send <a href="https://tools.ietf.org/html/rfc6455#section-5.5.1">close</a> message</td><td><a href="http://godoc.org/github.com/gorilla/websocket#hdr-Control_Messages">Yes</a></td><td><a href="https://code.google.com/p/go/issues/detail?id=4588">No</a></td></tr>
|
|
||||||
<tr><td>Send <a href="https://tools.ietf.org/html/rfc6455#section-5.5.2">pings</a> and receive <a href="https://tools.ietf.org/html/rfc6455#section-5.5.3">pongs</a></td><td><a href="http://godoc.org/github.com/gorilla/websocket#hdr-Control_Messages">Yes</a></td><td>No</td></tr>
|
|
||||||
<tr><td>Get the <a href="https://tools.ietf.org/html/rfc6455#section-5.6">type</a> of a received data message</td><td>Yes</td><td>Yes, see note 2</td></tr>
|
|
||||||
<tr><td colspan="3">Other Features</tr></td>
|
|
||||||
<tr><td><a href="https://tools.ietf.org/html/rfc7692">Compression Extensions</a></td><td>Experimental</td><td>No</td></tr>
|
|
||||||
<tr><td>Read message using io.Reader</td><td><a href="http://godoc.org/github.com/gorilla/websocket#Conn.NextReader">Yes</a></td><td>No, see note 3</td></tr>
|
|
||||||
<tr><td>Write message using io.WriteCloser</td><td><a href="http://godoc.org/github.com/gorilla/websocket#Conn.NextWriter">Yes</a></td><td>No, see note 3</td></tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
|
|
||||||
1. Large messages are fragmented in [Chrome's new WebSocket implementation](http://www.ietf.org/mail-archive/web/hybi/current/msg10503.html).
|
|
||||||
2. The application can get the type of a received data message by implementing
|
|
||||||
a [Codec marshal](http://godoc.org/golang.org/x/net/websocket#Codec.Marshal)
|
|
||||||
function.
|
|
||||||
3. The go.net io.Reader and io.Writer operate across WebSocket frame boundaries.
|
|
||||||
Read returns when the input buffer is full or a frame boundary is
|
|
||||||
encountered. Each call to Write sends a single frame message. The Gorilla
|
|
||||||
io.Reader and io.WriteCloser operate on a single WebSocket message.
|
|
||||||
|
|
395
vendor/github.com/gorilla/websocket/client.go
generated
vendored
395
vendor/github.com/gorilla/websocket/client.go
generated
vendored
@ -1,395 +0,0 @@
|
|||||||
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"crypto/tls"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"net"
|
|
||||||
"net/http"
|
|
||||||
"net/http/httptrace"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ErrBadHandshake is returned when the server response to opening handshake is
|
|
||||||
// invalid.
|
|
||||||
var ErrBadHandshake = errors.New("websocket: bad handshake")
|
|
||||||
|
|
||||||
var errInvalidCompression = errors.New("websocket: invalid compression negotiation")
|
|
||||||
|
|
||||||
// NewClient creates a new client connection using the given net connection.
|
|
||||||
// The URL u specifies the host and request URI. Use requestHeader to specify
|
|
||||||
// the origin (Origin), subprotocols (Sec-WebSocket-Protocol) and cookies
|
|
||||||
// (Cookie). Use the response.Header to get the selected subprotocol
|
|
||||||
// (Sec-WebSocket-Protocol) and cookies (Set-Cookie).
|
|
||||||
//
|
|
||||||
// If the WebSocket handshake fails, ErrBadHandshake is returned along with a
|
|
||||||
// non-nil *http.Response so that callers can handle redirects, authentication,
|
|
||||||
// etc.
|
|
||||||
//
|
|
||||||
// Deprecated: Use Dialer instead.
|
|
||||||
func NewClient(netConn net.Conn, u *url.URL, requestHeader http.Header, readBufSize, writeBufSize int) (c *Conn, response *http.Response, err error) {
|
|
||||||
d := Dialer{
|
|
||||||
ReadBufferSize: readBufSize,
|
|
||||||
WriteBufferSize: writeBufSize,
|
|
||||||
NetDial: func(net, addr string) (net.Conn, error) {
|
|
||||||
return netConn, nil
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return d.Dial(u.String(), requestHeader)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Dialer contains options for connecting to WebSocket server.
|
|
||||||
type Dialer struct {
|
|
||||||
// NetDial specifies the dial function for creating TCP connections. If
|
|
||||||
// NetDial is nil, net.Dial is used.
|
|
||||||
NetDial func(network, addr string) (net.Conn, error)
|
|
||||||
|
|
||||||
// NetDialContext specifies the dial function for creating TCP connections. If
|
|
||||||
// NetDialContext is nil, net.DialContext is used.
|
|
||||||
NetDialContext func(ctx context.Context, network, addr string) (net.Conn, error)
|
|
||||||
|
|
||||||
// Proxy specifies a function to return a proxy for a given
|
|
||||||
// Request. If the function returns a non-nil error, the
|
|
||||||
// request is aborted with the provided error.
|
|
||||||
// If Proxy is nil or returns a nil *URL, no proxy is used.
|
|
||||||
Proxy func(*http.Request) (*url.URL, error)
|
|
||||||
|
|
||||||
// TLSClientConfig specifies the TLS configuration to use with tls.Client.
|
|
||||||
// If nil, the default configuration is used.
|
|
||||||
TLSClientConfig *tls.Config
|
|
||||||
|
|
||||||
// HandshakeTimeout specifies the duration for the handshake to complete.
|
|
||||||
HandshakeTimeout time.Duration
|
|
||||||
|
|
||||||
// ReadBufferSize and WriteBufferSize specify I/O buffer sizes in bytes. If a buffer
|
|
||||||
// size is zero, then a useful default size is used. The I/O buffer sizes
|
|
||||||
// do not limit the size of the messages that can be sent or received.
|
|
||||||
ReadBufferSize, WriteBufferSize int
|
|
||||||
|
|
||||||
// WriteBufferPool is a pool of buffers for write operations. If the value
|
|
||||||
// is not set, then write buffers are allocated to the connection for the
|
|
||||||
// lifetime of the connection.
|
|
||||||
//
|
|
||||||
// A pool is most useful when the application has a modest volume of writes
|
|
||||||
// across a large number of connections.
|
|
||||||
//
|
|
||||||
// Applications should use a single pool for each unique value of
|
|
||||||
// WriteBufferSize.
|
|
||||||
WriteBufferPool BufferPool
|
|
||||||
|
|
||||||
// Subprotocols specifies the client's requested subprotocols.
|
|
||||||
Subprotocols []string
|
|
||||||
|
|
||||||
// EnableCompression specifies if the client should attempt to negotiate
|
|
||||||
// per message compression (RFC 7692). Setting this value to true does not
|
|
||||||
// guarantee that compression will be supported. Currently only "no context
|
|
||||||
// takeover" modes are supported.
|
|
||||||
EnableCompression bool
|
|
||||||
|
|
||||||
// Jar specifies the cookie jar.
|
|
||||||
// If Jar is nil, cookies are not sent in requests and ignored
|
|
||||||
// in responses.
|
|
||||||
Jar http.CookieJar
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dial creates a new client connection by calling DialContext with a background context.
|
|
||||||
func (d *Dialer) Dial(urlStr string, requestHeader http.Header) (*Conn, *http.Response, error) {
|
|
||||||
return d.DialContext(context.Background(), urlStr, requestHeader)
|
|
||||||
}
|
|
||||||
|
|
||||||
var errMalformedURL = errors.New("malformed ws or wss URL")
|
|
||||||
|
|
||||||
func hostPortNoPort(u *url.URL) (hostPort, hostNoPort string) {
|
|
||||||
hostPort = u.Host
|
|
||||||
hostNoPort = u.Host
|
|
||||||
if i := strings.LastIndex(u.Host, ":"); i > strings.LastIndex(u.Host, "]") {
|
|
||||||
hostNoPort = hostNoPort[:i]
|
|
||||||
} else {
|
|
||||||
switch u.Scheme {
|
|
||||||
case "wss":
|
|
||||||
hostPort += ":443"
|
|
||||||
case "https":
|
|
||||||
hostPort += ":443"
|
|
||||||
default:
|
|
||||||
hostPort += ":80"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return hostPort, hostNoPort
|
|
||||||
}
|
|
||||||
|
|
||||||
// DefaultDialer is a dialer with all fields set to the default values.
|
|
||||||
var DefaultDialer = &Dialer{
|
|
||||||
Proxy: http.ProxyFromEnvironment,
|
|
||||||
HandshakeTimeout: 45 * time.Second,
|
|
||||||
}
|
|
||||||
|
|
||||||
// nilDialer is dialer to use when receiver is nil.
|
|
||||||
var nilDialer = *DefaultDialer
|
|
||||||
|
|
||||||
// DialContext creates a new client connection. Use requestHeader to specify the
|
|
||||||
// origin (Origin), subprotocols (Sec-WebSocket-Protocol) and cookies (Cookie).
|
|
||||||
// Use the response.Header to get the selected subprotocol
|
|
||||||
// (Sec-WebSocket-Protocol) and cookies (Set-Cookie).
|
|
||||||
//
|
|
||||||
// The context will be used in the request and in the Dialer.
|
|
||||||
//
|
|
||||||
// If the WebSocket handshake fails, ErrBadHandshake is returned along with a
|
|
||||||
// non-nil *http.Response so that callers can handle redirects, authentication,
|
|
||||||
// etcetera. The response body may not contain the entire response and does not
|
|
||||||
// need to be closed by the application.
|
|
||||||
func (d *Dialer) DialContext(ctx context.Context, urlStr string, requestHeader http.Header) (*Conn, *http.Response, error) {
|
|
||||||
if d == nil {
|
|
||||||
d = &nilDialer
|
|
||||||
}
|
|
||||||
|
|
||||||
challengeKey, err := generateChallengeKey()
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
u, err := url.Parse(urlStr)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch u.Scheme {
|
|
||||||
case "ws":
|
|
||||||
u.Scheme = "http"
|
|
||||||
case "wss":
|
|
||||||
u.Scheme = "https"
|
|
||||||
default:
|
|
||||||
return nil, nil, errMalformedURL
|
|
||||||
}
|
|
||||||
|
|
||||||
if u.User != nil {
|
|
||||||
// User name and password are not allowed in websocket URIs.
|
|
||||||
return nil, nil, errMalformedURL
|
|
||||||
}
|
|
||||||
|
|
||||||
req := &http.Request{
|
|
||||||
Method: "GET",
|
|
||||||
URL: u,
|
|
||||||
Proto: "HTTP/1.1",
|
|
||||||
ProtoMajor: 1,
|
|
||||||
ProtoMinor: 1,
|
|
||||||
Header: make(http.Header),
|
|
||||||
Host: u.Host,
|
|
||||||
}
|
|
||||||
req = req.WithContext(ctx)
|
|
||||||
|
|
||||||
// Set the cookies present in the cookie jar of the dialer
|
|
||||||
if d.Jar != nil {
|
|
||||||
for _, cookie := range d.Jar.Cookies(u) {
|
|
||||||
req.AddCookie(cookie)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the request headers using the capitalization for names and values in
|
|
||||||
// RFC examples. Although the capitalization shouldn't matter, there are
|
|
||||||
// servers that depend on it. The Header.Set method is not used because the
|
|
||||||
// method canonicalizes the header names.
|
|
||||||
req.Header["Upgrade"] = []string{"websocket"}
|
|
||||||
req.Header["Connection"] = []string{"Upgrade"}
|
|
||||||
req.Header["Sec-WebSocket-Key"] = []string{challengeKey}
|
|
||||||
req.Header["Sec-WebSocket-Version"] = []string{"13"}
|
|
||||||
if len(d.Subprotocols) > 0 {
|
|
||||||
req.Header["Sec-WebSocket-Protocol"] = []string{strings.Join(d.Subprotocols, ", ")}
|
|
||||||
}
|
|
||||||
for k, vs := range requestHeader {
|
|
||||||
switch {
|
|
||||||
case k == "Host":
|
|
||||||
if len(vs) > 0 {
|
|
||||||
req.Host = vs[0]
|
|
||||||
}
|
|
||||||
case k == "Upgrade" ||
|
|
||||||
k == "Connection" ||
|
|
||||||
k == "Sec-Websocket-Key" ||
|
|
||||||
k == "Sec-Websocket-Version" ||
|
|
||||||
k == "Sec-Websocket-Extensions" ||
|
|
||||||
(k == "Sec-Websocket-Protocol" && len(d.Subprotocols) > 0):
|
|
||||||
return nil, nil, errors.New("websocket: duplicate header not allowed: " + k)
|
|
||||||
case k == "Sec-Websocket-Protocol":
|
|
||||||
req.Header["Sec-WebSocket-Protocol"] = vs
|
|
||||||
default:
|
|
||||||
req.Header[k] = vs
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if d.EnableCompression {
|
|
||||||
req.Header["Sec-WebSocket-Extensions"] = []string{"permessage-deflate; server_no_context_takeover; client_no_context_takeover"}
|
|
||||||
}
|
|
||||||
|
|
||||||
if d.HandshakeTimeout != 0 {
|
|
||||||
var cancel func()
|
|
||||||
ctx, cancel = context.WithTimeout(ctx, d.HandshakeTimeout)
|
|
||||||
defer cancel()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get network dial function.
|
|
||||||
var netDial func(network, add string) (net.Conn, error)
|
|
||||||
|
|
||||||
if d.NetDialContext != nil {
|
|
||||||
netDial = func(network, addr string) (net.Conn, error) {
|
|
||||||
return d.NetDialContext(ctx, network, addr)
|
|
||||||
}
|
|
||||||
} else if d.NetDial != nil {
|
|
||||||
netDial = d.NetDial
|
|
||||||
} else {
|
|
||||||
netDialer := &net.Dialer{}
|
|
||||||
netDial = func(network, addr string) (net.Conn, error) {
|
|
||||||
return netDialer.DialContext(ctx, network, addr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If needed, wrap the dial function to set the connection deadline.
|
|
||||||
if deadline, ok := ctx.Deadline(); ok {
|
|
||||||
forwardDial := netDial
|
|
||||||
netDial = func(network, addr string) (net.Conn, error) {
|
|
||||||
c, err := forwardDial(network, addr)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
err = c.SetDeadline(deadline)
|
|
||||||
if err != nil {
|
|
||||||
c.Close()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return c, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If needed, wrap the dial function to connect through a proxy.
|
|
||||||
if d.Proxy != nil {
|
|
||||||
proxyURL, err := d.Proxy(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
if proxyURL != nil {
|
|
||||||
dialer, err := proxy_FromURL(proxyURL, netDialerFunc(netDial))
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
netDial = dialer.Dial
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
hostPort, hostNoPort := hostPortNoPort(u)
|
|
||||||
trace := httptrace.ContextClientTrace(ctx)
|
|
||||||
if trace != nil && trace.GetConn != nil {
|
|
||||||
trace.GetConn(hostPort)
|
|
||||||
}
|
|
||||||
|
|
||||||
netConn, err := netDial("tcp", hostPort)
|
|
||||||
if trace != nil && trace.GotConn != nil {
|
|
||||||
trace.GotConn(httptrace.GotConnInfo{
|
|
||||||
Conn: netConn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
defer func() {
|
|
||||||
if netConn != nil {
|
|
||||||
netConn.Close()
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
if u.Scheme == "https" {
|
|
||||||
cfg := cloneTLSConfig(d.TLSClientConfig)
|
|
||||||
if cfg.ServerName == "" {
|
|
||||||
cfg.ServerName = hostNoPort
|
|
||||||
}
|
|
||||||
tlsConn := tls.Client(netConn, cfg)
|
|
||||||
netConn = tlsConn
|
|
||||||
|
|
||||||
var err error
|
|
||||||
if trace != nil {
|
|
||||||
err = doHandshakeWithTrace(trace, tlsConn, cfg)
|
|
||||||
} else {
|
|
||||||
err = doHandshake(tlsConn, cfg)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
conn := newConn(netConn, false, d.ReadBufferSize, d.WriteBufferSize, d.WriteBufferPool, nil, nil)
|
|
||||||
|
|
||||||
if err := req.Write(netConn); err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if trace != nil && trace.GotFirstResponseByte != nil {
|
|
||||||
if peek, err := conn.br.Peek(1); err == nil && len(peek) == 1 {
|
|
||||||
trace.GotFirstResponseByte()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := http.ReadResponse(conn.br, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if d.Jar != nil {
|
|
||||||
if rc := resp.Cookies(); len(rc) > 0 {
|
|
||||||
d.Jar.SetCookies(u, rc)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.StatusCode != 101 ||
|
|
||||||
!strings.EqualFold(resp.Header.Get("Upgrade"), "websocket") ||
|
|
||||||
!strings.EqualFold(resp.Header.Get("Connection"), "upgrade") ||
|
|
||||||
resp.Header.Get("Sec-Websocket-Accept") != computeAcceptKey(challengeKey) {
|
|
||||||
// Before closing the network connection on return from this
|
|
||||||
// function, slurp up some of the response to aid application
|
|
||||||
// debugging.
|
|
||||||
buf := make([]byte, 1024)
|
|
||||||
n, _ := io.ReadFull(resp.Body, buf)
|
|
||||||
resp.Body = ioutil.NopCloser(bytes.NewReader(buf[:n]))
|
|
||||||
return nil, resp, ErrBadHandshake
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, ext := range parseExtensions(resp.Header) {
|
|
||||||
if ext[""] != "permessage-deflate" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
_, snct := ext["server_no_context_takeover"]
|
|
||||||
_, cnct := ext["client_no_context_takeover"]
|
|
||||||
if !snct || !cnct {
|
|
||||||
return nil, resp, errInvalidCompression
|
|
||||||
}
|
|
||||||
conn.newCompressionWriter = compressNoContextTakeover
|
|
||||||
conn.newDecompressionReader = decompressNoContextTakeover
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
resp.Body = ioutil.NopCloser(bytes.NewReader([]byte{}))
|
|
||||||
conn.subprotocol = resp.Header.Get("Sec-Websocket-Protocol")
|
|
||||||
|
|
||||||
netConn.SetDeadline(time.Time{})
|
|
||||||
netConn = nil // to avoid close in defer.
|
|
||||||
return conn, resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func doHandshake(tlsConn *tls.Conn, cfg *tls.Config) error {
|
|
||||||
if err := tlsConn.Handshake(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if !cfg.InsecureSkipVerify {
|
|
||||||
if err := tlsConn.VerifyHostname(cfg.ServerName); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
16
vendor/github.com/gorilla/websocket/client_clone.go
generated
vendored
16
vendor/github.com/gorilla/websocket/client_clone.go
generated
vendored
@ -1,16 +0,0 @@
|
|||||||
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build go1.8
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
import "crypto/tls"
|
|
||||||
|
|
||||||
func cloneTLSConfig(cfg *tls.Config) *tls.Config {
|
|
||||||
if cfg == nil {
|
|
||||||
return &tls.Config{}
|
|
||||||
}
|
|
||||||
return cfg.Clone()
|
|
||||||
}
|
|
38
vendor/github.com/gorilla/websocket/client_clone_legacy.go
generated
vendored
38
vendor/github.com/gorilla/websocket/client_clone_legacy.go
generated
vendored
@ -1,38 +0,0 @@
|
|||||||
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build !go1.8
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
import "crypto/tls"
|
|
||||||
|
|
||||||
// cloneTLSConfig clones all public fields except the fields
|
|
||||||
// SessionTicketsDisabled and SessionTicketKey. This avoids copying the
|
|
||||||
// sync.Mutex in the sync.Once and makes it safe to call cloneTLSConfig on a
|
|
||||||
// config in active use.
|
|
||||||
func cloneTLSConfig(cfg *tls.Config) *tls.Config {
|
|
||||||
if cfg == nil {
|
|
||||||
return &tls.Config{}
|
|
||||||
}
|
|
||||||
return &tls.Config{
|
|
||||||
Rand: cfg.Rand,
|
|
||||||
Time: cfg.Time,
|
|
||||||
Certificates: cfg.Certificates,
|
|
||||||
NameToCertificate: cfg.NameToCertificate,
|
|
||||||
GetCertificate: cfg.GetCertificate,
|
|
||||||
RootCAs: cfg.RootCAs,
|
|
||||||
NextProtos: cfg.NextProtos,
|
|
||||||
ServerName: cfg.ServerName,
|
|
||||||
ClientAuth: cfg.ClientAuth,
|
|
||||||
ClientCAs: cfg.ClientCAs,
|
|
||||||
InsecureSkipVerify: cfg.InsecureSkipVerify,
|
|
||||||
CipherSuites: cfg.CipherSuites,
|
|
||||||
PreferServerCipherSuites: cfg.PreferServerCipherSuites,
|
|
||||||
ClientSessionCache: cfg.ClientSessionCache,
|
|
||||||
MinVersion: cfg.MinVersion,
|
|
||||||
MaxVersion: cfg.MaxVersion,
|
|
||||||
CurvePreferences: cfg.CurvePreferences,
|
|
||||||
}
|
|
||||||
}
|
|
148
vendor/github.com/gorilla/websocket/compression.go
generated
vendored
148
vendor/github.com/gorilla/websocket/compression.go
generated
vendored
@ -1,148 +0,0 @@
|
|||||||
// Copyright 2017 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
import (
|
|
||||||
"compress/flate"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
minCompressionLevel = -2 // flate.HuffmanOnly not defined in Go < 1.6
|
|
||||||
maxCompressionLevel = flate.BestCompression
|
|
||||||
defaultCompressionLevel = 1
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
flateWriterPools [maxCompressionLevel - minCompressionLevel + 1]sync.Pool
|
|
||||||
flateReaderPool = sync.Pool{New: func() interface{} {
|
|
||||||
return flate.NewReader(nil)
|
|
||||||
}}
|
|
||||||
)
|
|
||||||
|
|
||||||
func decompressNoContextTakeover(r io.Reader) io.ReadCloser {
|
|
||||||
const tail =
|
|
||||||
// Add four bytes as specified in RFC
|
|
||||||
"\x00\x00\xff\xff" +
|
|
||||||
// Add final block to squelch unexpected EOF error from flate reader.
|
|
||||||
"\x01\x00\x00\xff\xff"
|
|
||||||
|
|
||||||
fr, _ := flateReaderPool.Get().(io.ReadCloser)
|
|
||||||
fr.(flate.Resetter).Reset(io.MultiReader(r, strings.NewReader(tail)), nil)
|
|
||||||
return &flateReadWrapper{fr}
|
|
||||||
}
|
|
||||||
|
|
||||||
func isValidCompressionLevel(level int) bool {
|
|
||||||
return minCompressionLevel <= level && level <= maxCompressionLevel
|
|
||||||
}
|
|
||||||
|
|
||||||
func compressNoContextTakeover(w io.WriteCloser, level int) io.WriteCloser {
|
|
||||||
p := &flateWriterPools[level-minCompressionLevel]
|
|
||||||
tw := &truncWriter{w: w}
|
|
||||||
fw, _ := p.Get().(*flate.Writer)
|
|
||||||
if fw == nil {
|
|
||||||
fw, _ = flate.NewWriter(tw, level)
|
|
||||||
} else {
|
|
||||||
fw.Reset(tw)
|
|
||||||
}
|
|
||||||
return &flateWriteWrapper{fw: fw, tw: tw, p: p}
|
|
||||||
}
|
|
||||||
|
|
||||||
// truncWriter is an io.Writer that writes all but the last four bytes of the
|
|
||||||
// stream to another io.Writer.
|
|
||||||
type truncWriter struct {
|
|
||||||
w io.WriteCloser
|
|
||||||
n int
|
|
||||||
p [4]byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *truncWriter) Write(p []byte) (int, error) {
|
|
||||||
n := 0
|
|
||||||
|
|
||||||
// fill buffer first for simplicity.
|
|
||||||
if w.n < len(w.p) {
|
|
||||||
n = copy(w.p[w.n:], p)
|
|
||||||
p = p[n:]
|
|
||||||
w.n += n
|
|
||||||
if len(p) == 0 {
|
|
||||||
return n, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
m := len(p)
|
|
||||||
if m > len(w.p) {
|
|
||||||
m = len(w.p)
|
|
||||||
}
|
|
||||||
|
|
||||||
if nn, err := w.w.Write(w.p[:m]); err != nil {
|
|
||||||
return n + nn, err
|
|
||||||
}
|
|
||||||
|
|
||||||
copy(w.p[:], w.p[m:])
|
|
||||||
copy(w.p[len(w.p)-m:], p[len(p)-m:])
|
|
||||||
nn, err := w.w.Write(p[:len(p)-m])
|
|
||||||
return n + nn, err
|
|
||||||
}
|
|
||||||
|
|
||||||
type flateWriteWrapper struct {
|
|
||||||
fw *flate.Writer
|
|
||||||
tw *truncWriter
|
|
||||||
p *sync.Pool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *flateWriteWrapper) Write(p []byte) (int, error) {
|
|
||||||
if w.fw == nil {
|
|
||||||
return 0, errWriteClosed
|
|
||||||
}
|
|
||||||
return w.fw.Write(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *flateWriteWrapper) Close() error {
|
|
||||||
if w.fw == nil {
|
|
||||||
return errWriteClosed
|
|
||||||
}
|
|
||||||
err1 := w.fw.Flush()
|
|
||||||
w.p.Put(w.fw)
|
|
||||||
w.fw = nil
|
|
||||||
if w.tw.p != [4]byte{0, 0, 0xff, 0xff} {
|
|
||||||
return errors.New("websocket: internal error, unexpected bytes at end of flate stream")
|
|
||||||
}
|
|
||||||
err2 := w.tw.w.Close()
|
|
||||||
if err1 != nil {
|
|
||||||
return err1
|
|
||||||
}
|
|
||||||
return err2
|
|
||||||
}
|
|
||||||
|
|
||||||
type flateReadWrapper struct {
|
|
||||||
fr io.ReadCloser
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *flateReadWrapper) Read(p []byte) (int, error) {
|
|
||||||
if r.fr == nil {
|
|
||||||
return 0, io.ErrClosedPipe
|
|
||||||
}
|
|
||||||
n, err := r.fr.Read(p)
|
|
||||||
if err == io.EOF {
|
|
||||||
// Preemptively place the reader back in the pool. This helps with
|
|
||||||
// scenarios where the application does not call NextReader() soon after
|
|
||||||
// this final read.
|
|
||||||
r.Close()
|
|
||||||
}
|
|
||||||
return n, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *flateReadWrapper) Close() error {
|
|
||||||
if r.fr == nil {
|
|
||||||
return io.ErrClosedPipe
|
|
||||||
}
|
|
||||||
err := r.fr.Close()
|
|
||||||
flateReaderPool.Put(r.fr)
|
|
||||||
r.fr = nil
|
|
||||||
return err
|
|
||||||
}
|
|
1201
vendor/github.com/gorilla/websocket/conn.go
generated
vendored
1201
vendor/github.com/gorilla/websocket/conn.go
generated
vendored
File diff suppressed because it is too large
Load Diff
15
vendor/github.com/gorilla/websocket/conn_write.go
generated
vendored
15
vendor/github.com/gorilla/websocket/conn_write.go
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
// Copyright 2016 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build go1.8
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
import "net"
|
|
||||||
|
|
||||||
func (c *Conn) writeBufs(bufs ...[]byte) error {
|
|
||||||
b := net.Buffers(bufs)
|
|
||||||
_, err := b.WriteTo(c.conn)
|
|
||||||
return err
|
|
||||||
}
|
|
18
vendor/github.com/gorilla/websocket/conn_write_legacy.go
generated
vendored
18
vendor/github.com/gorilla/websocket/conn_write_legacy.go
generated
vendored
@ -1,18 +0,0 @@
|
|||||||
// Copyright 2016 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build !go1.8
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
func (c *Conn) writeBufs(bufs ...[]byte) error {
|
|
||||||
for _, buf := range bufs {
|
|
||||||
if len(buf) > 0 {
|
|
||||||
if _, err := c.conn.Write(buf); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
227
vendor/github.com/gorilla/websocket/doc.go
generated
vendored
227
vendor/github.com/gorilla/websocket/doc.go
generated
vendored
@ -1,227 +0,0 @@
|
|||||||
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Package websocket implements the WebSocket protocol defined in RFC 6455.
|
|
||||||
//
|
|
||||||
// Overview
|
|
||||||
//
|
|
||||||
// The Conn type represents a WebSocket connection. A server application calls
|
|
||||||
// the Upgrader.Upgrade method from an HTTP request handler to get a *Conn:
|
|
||||||
//
|
|
||||||
// var upgrader = websocket.Upgrader{
|
|
||||||
// ReadBufferSize: 1024,
|
|
||||||
// WriteBufferSize: 1024,
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func handler(w http.ResponseWriter, r *http.Request) {
|
|
||||||
// conn, err := upgrader.Upgrade(w, r, nil)
|
|
||||||
// if err != nil {
|
|
||||||
// log.Println(err)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
// ... Use conn to send and receive messages.
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Call the connection's WriteMessage and ReadMessage methods to send and
|
|
||||||
// receive messages as a slice of bytes. This snippet of code shows how to echo
|
|
||||||
// messages using these methods:
|
|
||||||
//
|
|
||||||
// for {
|
|
||||||
// messageType, p, err := conn.ReadMessage()
|
|
||||||
// if err != nil {
|
|
||||||
// log.Println(err)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
// if err := conn.WriteMessage(messageType, p); err != nil {
|
|
||||||
// log.Println(err)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// In above snippet of code, p is a []byte and messageType is an int with value
|
|
||||||
// websocket.BinaryMessage or websocket.TextMessage.
|
|
||||||
//
|
|
||||||
// An application can also send and receive messages using the io.WriteCloser
|
|
||||||
// and io.Reader interfaces. To send a message, call the connection NextWriter
|
|
||||||
// method to get an io.WriteCloser, write the message to the writer and close
|
|
||||||
// the writer when done. To receive a message, call the connection NextReader
|
|
||||||
// method to get an io.Reader and read until io.EOF is returned. This snippet
|
|
||||||
// shows how to echo messages using the NextWriter and NextReader methods:
|
|
||||||
//
|
|
||||||
// for {
|
|
||||||
// messageType, r, err := conn.NextReader()
|
|
||||||
// if err != nil {
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
// w, err := conn.NextWriter(messageType)
|
|
||||||
// if err != nil {
|
|
||||||
// return err
|
|
||||||
// }
|
|
||||||
// if _, err := io.Copy(w, r); err != nil {
|
|
||||||
// return err
|
|
||||||
// }
|
|
||||||
// if err := w.Close(); err != nil {
|
|
||||||
// return err
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Data Messages
|
|
||||||
//
|
|
||||||
// The WebSocket protocol distinguishes between text and binary data messages.
|
|
||||||
// Text messages are interpreted as UTF-8 encoded text. The interpretation of
|
|
||||||
// binary messages is left to the application.
|
|
||||||
//
|
|
||||||
// This package uses the TextMessage and BinaryMessage integer constants to
|
|
||||||
// identify the two data message types. The ReadMessage and NextReader methods
|
|
||||||
// return the type of the received message. The messageType argument to the
|
|
||||||
// WriteMessage and NextWriter methods specifies the type of a sent message.
|
|
||||||
//
|
|
||||||
// It is the application's responsibility to ensure that text messages are
|
|
||||||
// valid UTF-8 encoded text.
|
|
||||||
//
|
|
||||||
// Control Messages
|
|
||||||
//
|
|
||||||
// The WebSocket protocol defines three types of control messages: close, ping
|
|
||||||
// and pong. Call the connection WriteControl, WriteMessage or NextWriter
|
|
||||||
// methods to send a control message to the peer.
|
|
||||||
//
|
|
||||||
// Connections handle received close messages by calling the handler function
|
|
||||||
// set with the SetCloseHandler method and by returning a *CloseError from the
|
|
||||||
// NextReader, ReadMessage or the message Read method. The default close
|
|
||||||
// handler sends a close message to the peer.
|
|
||||||
//
|
|
||||||
// Connections handle received ping messages by calling the handler function
|
|
||||||
// set with the SetPingHandler method. The default ping handler sends a pong
|
|
||||||
// message to the peer.
|
|
||||||
//
|
|
||||||
// Connections handle received pong messages by calling the handler function
|
|
||||||
// set with the SetPongHandler method. The default pong handler does nothing.
|
|
||||||
// If an application sends ping messages, then the application should set a
|
|
||||||
// pong handler to receive the corresponding pong.
|
|
||||||
//
|
|
||||||
// The control message handler functions are called from the NextReader,
|
|
||||||
// ReadMessage and message reader Read methods. The default close and ping
|
|
||||||
// handlers can block these methods for a short time when the handler writes to
|
|
||||||
// the connection.
|
|
||||||
//
|
|
||||||
// The application must read the connection to process close, ping and pong
|
|
||||||
// messages sent from the peer. If the application is not otherwise interested
|
|
||||||
// in messages from the peer, then the application should start a goroutine to
|
|
||||||
// read and discard messages from the peer. A simple example is:
|
|
||||||
//
|
|
||||||
// func readLoop(c *websocket.Conn) {
|
|
||||||
// for {
|
|
||||||
// if _, _, err := c.NextReader(); err != nil {
|
|
||||||
// c.Close()
|
|
||||||
// break
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Concurrency
|
|
||||||
//
|
|
||||||
// Connections support one concurrent reader and one concurrent writer.
|
|
||||||
//
|
|
||||||
// Applications are responsible for ensuring that no more than one goroutine
|
|
||||||
// calls the write methods (NextWriter, SetWriteDeadline, WriteMessage,
|
|
||||||
// WriteJSON, EnableWriteCompression, SetCompressionLevel) concurrently and
|
|
||||||
// that no more than one goroutine calls the read methods (NextReader,
|
|
||||||
// SetReadDeadline, ReadMessage, ReadJSON, SetPongHandler, SetPingHandler)
|
|
||||||
// concurrently.
|
|
||||||
//
|
|
||||||
// The Close and WriteControl methods can be called concurrently with all other
|
|
||||||
// methods.
|
|
||||||
//
|
|
||||||
// Origin Considerations
|
|
||||||
//
|
|
||||||
// Web browsers allow Javascript applications to open a WebSocket connection to
|
|
||||||
// any host. It's up to the server to enforce an origin policy using the Origin
|
|
||||||
// request header sent by the browser.
|
|
||||||
//
|
|
||||||
// The Upgrader calls the function specified in the CheckOrigin field to check
|
|
||||||
// the origin. If the CheckOrigin function returns false, then the Upgrade
|
|
||||||
// method fails the WebSocket handshake with HTTP status 403.
|
|
||||||
//
|
|
||||||
// If the CheckOrigin field is nil, then the Upgrader uses a safe default: fail
|
|
||||||
// the handshake if the Origin request header is present and the Origin host is
|
|
||||||
// not equal to the Host request header.
|
|
||||||
//
|
|
||||||
// The deprecated package-level Upgrade function does not perform origin
|
|
||||||
// checking. The application is responsible for checking the Origin header
|
|
||||||
// before calling the Upgrade function.
|
|
||||||
//
|
|
||||||
// Buffers
|
|
||||||
//
|
|
||||||
// Connections buffer network input and output to reduce the number
|
|
||||||
// of system calls when reading or writing messages.
|
|
||||||
//
|
|
||||||
// Write buffers are also used for constructing WebSocket frames. See RFC 6455,
|
|
||||||
// Section 5 for a discussion of message framing. A WebSocket frame header is
|
|
||||||
// written to the network each time a write buffer is flushed to the network.
|
|
||||||
// Decreasing the size of the write buffer can increase the amount of framing
|
|
||||||
// overhead on the connection.
|
|
||||||
//
|
|
||||||
// The buffer sizes in bytes are specified by the ReadBufferSize and
|
|
||||||
// WriteBufferSize fields in the Dialer and Upgrader. The Dialer uses a default
|
|
||||||
// size of 4096 when a buffer size field is set to zero. The Upgrader reuses
|
|
||||||
// buffers created by the HTTP server when a buffer size field is set to zero.
|
|
||||||
// The HTTP server buffers have a size of 4096 at the time of this writing.
|
|
||||||
//
|
|
||||||
// The buffer sizes do not limit the size of a message that can be read or
|
|
||||||
// written by a connection.
|
|
||||||
//
|
|
||||||
// Buffers are held for the lifetime of the connection by default. If the
|
|
||||||
// Dialer or Upgrader WriteBufferPool field is set, then a connection holds the
|
|
||||||
// write buffer only when writing a message.
|
|
||||||
//
|
|
||||||
// Applications should tune the buffer sizes to balance memory use and
|
|
||||||
// performance. Increasing the buffer size uses more memory, but can reduce the
|
|
||||||
// number of system calls to read or write the network. In the case of writing,
|
|
||||||
// increasing the buffer size can reduce the number of frame headers written to
|
|
||||||
// the network.
|
|
||||||
//
|
|
||||||
// Some guidelines for setting buffer parameters are:
|
|
||||||
//
|
|
||||||
// Limit the buffer sizes to the maximum expected message size. Buffers larger
|
|
||||||
// than the largest message do not provide any benefit.
|
|
||||||
//
|
|
||||||
// Depending on the distribution of message sizes, setting the buffer size to
|
|
||||||
// a value less than the maximum expected message size can greatly reduce memory
|
|
||||||
// use with a small impact on performance. Here's an example: If 99% of the
|
|
||||||
// messages are smaller than 256 bytes and the maximum message size is 512
|
|
||||||
// bytes, then a buffer size of 256 bytes will result in 1.01 more system calls
|
|
||||||
// than a buffer size of 512 bytes. The memory savings is 50%.
|
|
||||||
//
|
|
||||||
// A write buffer pool is useful when the application has a modest number
|
|
||||||
// writes over a large number of connections. when buffers are pooled, a larger
|
|
||||||
// buffer size has a reduced impact on total memory use and has the benefit of
|
|
||||||
// reducing system calls and frame overhead.
|
|
||||||
//
|
|
||||||
// Compression EXPERIMENTAL
|
|
||||||
//
|
|
||||||
// Per message compression extensions (RFC 7692) are experimentally supported
|
|
||||||
// by this package in a limited capacity. Setting the EnableCompression option
|
|
||||||
// to true in Dialer or Upgrader will attempt to negotiate per message deflate
|
|
||||||
// support.
|
|
||||||
//
|
|
||||||
// var upgrader = websocket.Upgrader{
|
|
||||||
// EnableCompression: true,
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// If compression was successfully negotiated with the connection's peer, any
|
|
||||||
// message received in compressed form will be automatically decompressed.
|
|
||||||
// All Read methods will return uncompressed bytes.
|
|
||||||
//
|
|
||||||
// Per message compression of messages written to a connection can be enabled
|
|
||||||
// or disabled by calling the corresponding Conn method:
|
|
||||||
//
|
|
||||||
// conn.EnableWriteCompression(false)
|
|
||||||
//
|
|
||||||
// Currently this package does not support compression with "context takeover".
|
|
||||||
// This means that messages must be compressed and decompressed in isolation,
|
|
||||||
// without retaining sliding window or dictionary state across messages. For
|
|
||||||
// more details refer to RFC 7692.
|
|
||||||
//
|
|
||||||
// Use of compression is experimental and may result in decreased performance.
|
|
||||||
package websocket
|
|
42
vendor/github.com/gorilla/websocket/join.go
generated
vendored
42
vendor/github.com/gorilla/websocket/join.go
generated
vendored
@ -1,42 +0,0 @@
|
|||||||
// Copyright 2019 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// JoinMessages concatenates received messages to create a single io.Reader.
|
|
||||||
// The string term is appended to each message. The returned reader does not
|
|
||||||
// support concurrent calls to the Read method.
|
|
||||||
func JoinMessages(c *Conn, term string) io.Reader {
|
|
||||||
return &joinReader{c: c, term: term}
|
|
||||||
}
|
|
||||||
|
|
||||||
type joinReader struct {
|
|
||||||
c *Conn
|
|
||||||
term string
|
|
||||||
r io.Reader
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *joinReader) Read(p []byte) (int, error) {
|
|
||||||
if r.r == nil {
|
|
||||||
var err error
|
|
||||||
_, r.r, err = r.c.NextReader()
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
if r.term != "" {
|
|
||||||
r.r = io.MultiReader(r.r, strings.NewReader(r.term))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
n, err := r.r.Read(p)
|
|
||||||
if err == io.EOF {
|
|
||||||
err = nil
|
|
||||||
r.r = nil
|
|
||||||
}
|
|
||||||
return n, err
|
|
||||||
}
|
|
60
vendor/github.com/gorilla/websocket/json.go
generated
vendored
60
vendor/github.com/gorilla/websocket/json.go
generated
vendored
@ -1,60 +0,0 @@
|
|||||||
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package websocket
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
// WriteJSON writes the JSON encoding of v as a message.
|
|
||||||
//
|
|
||||||
// Deprecated: Use c.WriteJSON instead.
|
|
||||||
func WriteJSON(c *Conn, v interface{}) error {
|
|
||||||
return c.WriteJSON(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteJSON writes the JSON encoding of v as a message.
|
|
||||||
//
|
|
||||||
// See the documentation for encoding/json Marshal for details about the
|
|
||||||
// conversion of Go values to JSON.
|
|
||||||
func (c *Conn) WriteJSON(v interface{}) error {
|
|
||||||
w, err := c.NextWriter(TextMessage)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err1 := json.NewEncoder(w).Encode(v)
|
|
||||||
err2 := w.Close()
|
|
||||||
if err1 != nil {
|
|
||||||
return err1
|
|
||||||
}
|
|
||||||
return err2
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReadJSON reads the next JSON-encoded message from the connection and stores
|
|
||||||
// it in the value pointed to by v.
|
|
||||||
//
|
|
||||||
// Deprecated: Use c.ReadJSON instead.
|
|
||||||
func ReadJSON(c *Conn, v interface{}) error {
|
|
||||||
return c.ReadJSON(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReadJSON reads the next JSON-encoded message from the connection and stores
|
|
||||||
// it in the value pointed to by v.
|
|
||||||
//
|
|
||||||
// See the documentation for the encoding/json Unmarshal function for details
|
|
||||||
// about the conversion of JSON to a Go value.
|
|
||||||
func (c *Conn) ReadJSON(v interface{}) error {
|
|
||||||
_, r, err := c.NextReader()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err = json.NewDecoder(r).Decode(v)
|
|
||||||
if err == io.EOF {
|
|
||||||
// One value is expected in the message.
|
|
||||||
err = io.ErrUnexpectedEOF
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user