Add support for copying files and folders.
This commit is contained in:
48
vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go
generated
vendored
Normal file
48
vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
package js_parser
|
||||
|
||||
import (
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
func ParseGlobalName(log logger.Log, source logger.Source) (result []string, ok bool) {
|
||||
ok = true
|
||||
defer func() {
|
||||
r := recover()
|
||||
if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
|
||||
ok = false
|
||||
} else if r != nil {
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
lexer := js_lexer.NewLexerGlobalName(log, source)
|
||||
|
||||
// Start off with an identifier
|
||||
result = append(result, lexer.Identifier)
|
||||
lexer.Expect(js_lexer.TIdentifier)
|
||||
|
||||
// Follow with dot or index expressions
|
||||
for lexer.Token != js_lexer.TEndOfFile {
|
||||
switch lexer.Token {
|
||||
case js_lexer.TDot:
|
||||
lexer.Next()
|
||||
if !lexer.IsIdentifierOrKeyword() {
|
||||
lexer.Expect(js_lexer.TIdentifier)
|
||||
}
|
||||
result = append(result, lexer.Identifier)
|
||||
lexer.Next()
|
||||
|
||||
case js_lexer.TOpenBracket:
|
||||
lexer.Next()
|
||||
result = append(result, js_lexer.UTF16ToString(lexer.StringLiteral()))
|
||||
lexer.Expect(js_lexer.TStringLiteral)
|
||||
lexer.Expect(js_lexer.TCloseBracket)
|
||||
|
||||
default:
|
||||
lexer.Expect(js_lexer.TDot)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
15160
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go
generated
vendored
Normal file
15160
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2980
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go
generated
vendored
Normal file
2980
vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
187
vendor/github.com/evanw/esbuild/internal/js_parser/json_parser.go
generated
vendored
Normal file
187
vendor/github.com/evanw/esbuild/internal/js_parser/json_parser.go
generated
vendored
Normal file
@ -0,0 +1,187 @@
|
||||
package js_parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/evanw/esbuild/internal/helpers"
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
)
|
||||
|
||||
type jsonParser struct {
|
||||
log logger.Log
|
||||
source logger.Source
|
||||
tracker logger.LineColumnTracker
|
||||
lexer js_lexer.Lexer
|
||||
options JSONOptions
|
||||
suppressWarningsAboutWeirdCode bool
|
||||
}
|
||||
|
||||
func (p *jsonParser) parseMaybeTrailingComma(closeToken js_lexer.T) bool {
|
||||
commaRange := p.lexer.Range()
|
||||
p.lexer.Expect(js_lexer.TComma)
|
||||
|
||||
if p.lexer.Token == closeToken {
|
||||
if !p.options.AllowTrailingCommas {
|
||||
p.log.Add(logger.Error, &p.tracker, commaRange, "JSON does not support trailing commas")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *jsonParser) parseExpr() js_ast.Expr {
|
||||
loc := p.lexer.Loc()
|
||||
|
||||
switch p.lexer.Token {
|
||||
case js_lexer.TFalse:
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: false}}
|
||||
|
||||
case js_lexer.TTrue:
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: true}}
|
||||
|
||||
case js_lexer.TNull:
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: js_ast.ENullShared}
|
||||
|
||||
case js_lexer.TStringLiteral:
|
||||
value := p.lexer.StringLiteral()
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EString{Value: value}}
|
||||
|
||||
case js_lexer.TNumericLiteral:
|
||||
value := p.lexer.Number
|
||||
p.lexer.Next()
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: value}}
|
||||
|
||||
case js_lexer.TMinus:
|
||||
p.lexer.Next()
|
||||
value := p.lexer.Number
|
||||
p.lexer.Expect(js_lexer.TNumericLiteral)
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: -value}}
|
||||
|
||||
case js_lexer.TOpenBracket:
|
||||
p.lexer.Next()
|
||||
isSingleLine := !p.lexer.HasNewlineBefore
|
||||
items := []js_ast.Expr{}
|
||||
|
||||
for p.lexer.Token != js_lexer.TCloseBracket {
|
||||
if len(items) > 0 {
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
if !p.parseMaybeTrailingComma(js_lexer.TCloseBracket) {
|
||||
break
|
||||
}
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
}
|
||||
|
||||
item := p.parseExpr()
|
||||
items = append(items, item)
|
||||
}
|
||||
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
p.lexer.Expect(js_lexer.TCloseBracket)
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EArray{
|
||||
Items: items,
|
||||
IsSingleLine: isSingleLine,
|
||||
}}
|
||||
|
||||
case js_lexer.TOpenBrace:
|
||||
p.lexer.Next()
|
||||
isSingleLine := !p.lexer.HasNewlineBefore
|
||||
properties := []js_ast.Property{}
|
||||
duplicates := make(map[string]logger.Range)
|
||||
|
||||
for p.lexer.Token != js_lexer.TCloseBrace {
|
||||
if len(properties) > 0 {
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
if !p.parseMaybeTrailingComma(js_lexer.TCloseBrace) {
|
||||
break
|
||||
}
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
}
|
||||
|
||||
keyString := p.lexer.StringLiteral()
|
||||
keyRange := p.lexer.Range()
|
||||
key := js_ast.Expr{Loc: keyRange.Loc, Data: &js_ast.EString{Value: keyString}}
|
||||
p.lexer.Expect(js_lexer.TStringLiteral)
|
||||
|
||||
// Warn about duplicate keys
|
||||
if !p.suppressWarningsAboutWeirdCode {
|
||||
keyText := js_lexer.UTF16ToString(keyString)
|
||||
if prevRange, ok := duplicates[keyText]; ok {
|
||||
p.log.AddWithNotes(logger.Warning, &p.tracker, keyRange, fmt.Sprintf("Duplicate key %q in object literal", keyText),
|
||||
[]logger.MsgData{p.tracker.MsgData(prevRange, fmt.Sprintf("The original key %q is here:", keyText))})
|
||||
} else {
|
||||
duplicates[keyText] = keyRange
|
||||
}
|
||||
}
|
||||
|
||||
p.lexer.Expect(js_lexer.TColon)
|
||||
value := p.parseExpr()
|
||||
|
||||
property := js_ast.Property{
|
||||
Kind: js_ast.PropertyNormal,
|
||||
Key: key,
|
||||
ValueOrNil: value,
|
||||
}
|
||||
properties = append(properties, property)
|
||||
}
|
||||
|
||||
if p.lexer.HasNewlineBefore {
|
||||
isSingleLine = false
|
||||
}
|
||||
p.lexer.Expect(js_lexer.TCloseBrace)
|
||||
return js_ast.Expr{Loc: loc, Data: &js_ast.EObject{
|
||||
Properties: properties,
|
||||
IsSingleLine: isSingleLine,
|
||||
}}
|
||||
|
||||
default:
|
||||
p.lexer.Unexpected()
|
||||
return js_ast.Expr{}
|
||||
}
|
||||
}
|
||||
|
||||
type JSONOptions struct {
|
||||
AllowComments bool
|
||||
AllowTrailingCommas bool
|
||||
}
|
||||
|
||||
func ParseJSON(log logger.Log, source logger.Source, options JSONOptions) (result js_ast.Expr, ok bool) {
|
||||
ok = true
|
||||
defer func() {
|
||||
r := recover()
|
||||
if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
|
||||
ok = false
|
||||
} else if r != nil {
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
p := &jsonParser{
|
||||
log: log,
|
||||
source: source,
|
||||
tracker: logger.MakeLineColumnTracker(&source),
|
||||
options: options,
|
||||
lexer: js_lexer.NewLexerJSON(log, source, options.AllowComments),
|
||||
suppressWarningsAboutWeirdCode: helpers.IsInsideNodeModules(source.KeyPath.Text),
|
||||
}
|
||||
|
||||
result = p.parseExpr()
|
||||
p.lexer.Expect(js_lexer.TEndOfFile)
|
||||
return
|
||||
}
|
251
vendor/github.com/evanw/esbuild/internal/js_parser/sourcemap_parser.go
generated
vendored
Normal file
251
vendor/github.com/evanw/esbuild/internal/js_parser/sourcemap_parser.go
generated
vendored
Normal file
@ -0,0 +1,251 @@
|
||||
package js_parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/evanw/esbuild/internal/js_ast"
|
||||
"github.com/evanw/esbuild/internal/js_lexer"
|
||||
"github.com/evanw/esbuild/internal/logger"
|
||||
"github.com/evanw/esbuild/internal/sourcemap"
|
||||
)
|
||||
|
||||
// Specification: https://sourcemaps.info/spec.html
|
||||
func ParseSourceMap(log logger.Log, source logger.Source) *sourcemap.SourceMap {
|
||||
expr, ok := ParseJSON(log, source, JSONOptions{})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := expr.Data.(*js_ast.EObject)
|
||||
tracker := logger.MakeLineColumnTracker(&source)
|
||||
if !ok {
|
||||
log.Add(logger.Error, &tracker, logger.Range{Loc: expr.Loc}, "Invalid source map")
|
||||
return nil
|
||||
}
|
||||
|
||||
var sources []string
|
||||
var sourcesContent []sourcemap.SourceContent
|
||||
var mappingsRaw []uint16
|
||||
var mappingsStart int32
|
||||
hasVersion := false
|
||||
|
||||
for _, prop := range obj.Properties {
|
||||
keyRange := source.RangeOfString(prop.Key.Loc)
|
||||
|
||||
switch js_lexer.UTF16ToString(prop.Key.Data.(*js_ast.EString).Value) {
|
||||
case "sections":
|
||||
log.Add(logger.Warning, &tracker, keyRange, "Source maps with \"sections\" are not supported")
|
||||
return nil
|
||||
|
||||
case "version":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.ENumber); ok && value.Value == 3 {
|
||||
hasVersion = true
|
||||
}
|
||||
|
||||
case "mappings":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EString); ok {
|
||||
mappingsRaw = value.Value
|
||||
mappingsStart = prop.ValueOrNil.Loc.Start + 1
|
||||
}
|
||||
|
||||
case "sources":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
||||
sources = nil
|
||||
for _, item := range value.Items {
|
||||
if element, ok := item.Data.(*js_ast.EString); ok {
|
||||
sources = append(sources, js_lexer.UTF16ToString(element.Value))
|
||||
} else {
|
||||
sources = append(sources, "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case "sourcesContent":
|
||||
if value, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
|
||||
sourcesContent = nil
|
||||
for _, item := range value.Items {
|
||||
if element, ok := item.Data.(*js_ast.EString); ok {
|
||||
sourcesContent = append(sourcesContent, sourcemap.SourceContent{
|
||||
Value: element.Value,
|
||||
Quoted: source.TextForRange(source.RangeOfString(item.Loc)),
|
||||
})
|
||||
} else {
|
||||
sourcesContent = append(sourcesContent, sourcemap.SourceContent{})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Silently fail if the version was missing or incorrect
|
||||
if !hasVersion {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Silently fail if the source map is pointless (i.e. empty)
|
||||
if len(sources) == 0 || len(mappingsRaw) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var mappings mappingArray
|
||||
mappingsLen := len(mappingsRaw)
|
||||
sourcesLen := len(sources)
|
||||
generatedLine := 0
|
||||
generatedColumn := 0
|
||||
sourceIndex := 0
|
||||
originalLine := 0
|
||||
originalColumn := 0
|
||||
current := 0
|
||||
errorText := ""
|
||||
errorLen := 0
|
||||
needSort := false
|
||||
|
||||
// Parse the mappings
|
||||
for current < mappingsLen {
|
||||
// Handle a line break
|
||||
if mappingsRaw[current] == ';' {
|
||||
generatedLine++
|
||||
generatedColumn = 0
|
||||
current++
|
||||
continue
|
||||
}
|
||||
|
||||
// Read the generated column
|
||||
generatedColumnDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing generated column"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
if generatedColumnDelta < 0 {
|
||||
// This would mess up binary search
|
||||
needSort = true
|
||||
}
|
||||
generatedColumn += generatedColumnDelta
|
||||
if generatedColumn < 0 {
|
||||
errorText = fmt.Sprintf("Invalid generated column value: %d", generatedColumn)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// According to the specification, it's valid for a mapping to have 1,
|
||||
// 4, or 5 variable-length fields. Having one field means there's no
|
||||
// original location information, which is pretty useless. Just ignore
|
||||
// those entries.
|
||||
if current == mappingsLen {
|
||||
break
|
||||
}
|
||||
switch mappingsRaw[current] {
|
||||
case ',':
|
||||
current++
|
||||
continue
|
||||
case ';':
|
||||
continue
|
||||
}
|
||||
|
||||
// Read the original source
|
||||
sourceIndexDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing source index"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
sourceIndex += sourceIndexDelta
|
||||
if sourceIndex < 0 || sourceIndex >= sourcesLen {
|
||||
errorText = fmt.Sprintf("Invalid source index value: %d", sourceIndex)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// Read the original line
|
||||
originalLineDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing original line"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
originalLine += originalLineDelta
|
||||
if originalLine < 0 {
|
||||
errorText = fmt.Sprintf("Invalid original line value: %d", originalLine)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// Read the original column
|
||||
originalColumnDelta, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:])
|
||||
if !ok {
|
||||
errorText = "Missing original column"
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
originalColumn += originalColumnDelta
|
||||
if originalColumn < 0 {
|
||||
errorText = fmt.Sprintf("Invalid original column value: %d", originalColumn)
|
||||
errorLen = i
|
||||
break
|
||||
}
|
||||
current += i
|
||||
|
||||
// Ignore the optional name index
|
||||
if _, i, ok := sourcemap.DecodeVLQUTF16(mappingsRaw[current:]); ok {
|
||||
current += i
|
||||
}
|
||||
|
||||
// Handle the next character
|
||||
if current < mappingsLen {
|
||||
if c := mappingsRaw[current]; c == ',' {
|
||||
current++
|
||||
} else if c != ';' {
|
||||
errorText = fmt.Sprintf("Invalid character after mapping: %q",
|
||||
js_lexer.UTF16ToString(mappingsRaw[current:current+1]))
|
||||
errorLen = 1
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
mappings = append(mappings, sourcemap.Mapping{
|
||||
GeneratedLine: int32(generatedLine),
|
||||
GeneratedColumn: int32(generatedColumn),
|
||||
SourceIndex: int32(sourceIndex),
|
||||
OriginalLine: int32(originalLine),
|
||||
OriginalColumn: int32(originalColumn),
|
||||
})
|
||||
}
|
||||
|
||||
if errorText != "" {
|
||||
r := logger.Range{Loc: logger.Loc{Start: mappingsStart + int32(current)}, Len: int32(errorLen)}
|
||||
log.Add(logger.Warning, &tracker, r,
|
||||
fmt.Sprintf("Bad \"mappings\" data in source map at character %d: %s", current, errorText))
|
||||
return nil
|
||||
}
|
||||
|
||||
if needSort {
|
||||
// If we get here, some mappings are out of order. Lines can't be out of
|
||||
// order by construction but columns can. This is a pretty rare situation
|
||||
// because almost all source map generators always write out mappings in
|
||||
// order as they write the output instead of scrambling the order.
|
||||
sort.Stable(mappings)
|
||||
}
|
||||
|
||||
return &sourcemap.SourceMap{
|
||||
Sources: sources,
|
||||
SourcesContent: sourcesContent,
|
||||
Mappings: mappings,
|
||||
}
|
||||
}
|
||||
|
||||
// This type is just so we can use Go's native sort function
|
||||
type mappingArray []sourcemap.Mapping
|
||||
|
||||
func (a mappingArray) Len() int { return len(a) }
|
||||
func (a mappingArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
|
||||
|
||||
func (a mappingArray) Less(i int, j int) bool {
|
||||
ai := a[i]
|
||||
aj := a[j]
|
||||
return ai.GeneratedLine < aj.GeneratedLine || (ai.GeneratedLine == aj.GeneratedLine && ai.GeneratedColumn <= aj.GeneratedColumn)
|
||||
}
|
1601
vendor/github.com/evanw/esbuild/internal/js_parser/ts_parser.go
generated
vendored
Normal file
1601
vendor/github.com/evanw/esbuild/internal/js_parser/ts_parser.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user