Add support for copying files and folders.

This commit is contained in:
2021-12-19 14:31:57 +01:00
parent 161cb79b88
commit 311339685c
450 changed files with 232338 additions and 3 deletions

View File

@ -0,0 +1,27 @@
package helpers
import "bytes"
type BitSet struct {
entries []byte
}
func NewBitSet(bitCount uint) BitSet {
return BitSet{make([]byte, (bitCount+7)/8)}
}
func (bs BitSet) HasBit(bit uint) bool {
return (bs.entries[bit/8] & (1 << (bit & 7))) != 0
}
func (bs BitSet) SetBit(bit uint) {
bs.entries[bit/8] |= 1 << (bit & 7)
}
func (bs BitSet) Equals(other BitSet) bool {
return bytes.Equal(bs.entries, other.entries)
}
func (bs BitSet) String() string {
return string(bs.entries)
}

View File

@ -0,0 +1,89 @@
package helpers
import (
"strings"
"unicode/utf8"
)
func RemoveMultiLineCommentIndent(prefix string, text string) string {
// Figure out the initial indent
indent := 0
seekBackwardToNewline:
for len(prefix) > 0 {
c, size := utf8.DecodeLastRuneInString(prefix)
switch c {
case '\r', '\n', '\u2028', '\u2029':
break seekBackwardToNewline
}
prefix = prefix[:len(prefix)-size]
indent++
}
// Split the comment into lines
var lines []string
start := 0
for i, c := range text {
switch c {
case '\r', '\n':
// Don't double-append for Windows style "\r\n" newlines
if start <= i {
lines = append(lines, text[start:i])
}
start = i + 1
// Ignore the second part of Windows style "\r\n" newlines
if c == '\r' && start < len(text) && text[start] == '\n' {
start++
}
case '\u2028', '\u2029':
lines = append(lines, text[start:i])
start = i + 3
}
}
lines = append(lines, text[start:])
// Find the minimum indent over all lines after the first line
for _, line := range lines[1:] {
lineIndent := 0
for _, c := range line {
if c != ' ' && c != '\t' {
break
}
lineIndent++
}
if indent > lineIndent {
indent = lineIndent
}
}
// Trim the indent off of all lines after the first line
for i, line := range lines {
if i > 0 {
lines[i] = line[indent:]
}
}
return strings.Join(lines, "\n")
}
func EscapeClosingTag(text string, slashTag string) string {
i := strings.Index(text, "</")
if i < 0 {
return text
}
var b strings.Builder
for {
b.WriteString(text[:i+1])
text = text[i+1:]
if len(text) >= len(slashTag) && strings.EqualFold(text[:len(slashTag)], slashTag) {
b.WriteByte('\\')
}
i = strings.Index(text, "</")
if i < 0 {
break
}
}
b.WriteString(text)
return b.String()
}

View File

@ -0,0 +1,14 @@
package helpers
// From: http://boost.sourceforge.net/doc/html/boost/hash_combine.html
func HashCombine(seed uint32, hash uint32) uint32 {
return seed ^ (hash + 0x9e3779b9 + (seed << 6) + (seed >> 2))
}
func HashCombineString(seed uint32, text string) uint32 {
seed = HashCombine(seed, uint32(len(text)))
for _, c := range text {
seed = HashCombine(seed, uint32(c))
}
return seed
}

View File

@ -0,0 +1,86 @@
package helpers
import (
"bytes"
"strings"
)
// This provides an efficient way to join lots of big string and byte slices
// together. It avoids the cost of repeatedly reallocating as the buffer grows
// by measuring exactly how big the buffer should be and then allocating once.
// This is a measurable speedup.
type Joiner struct {
lastByte byte
strings []joinerString
bytes []joinerBytes
length uint32
}
type joinerString struct {
data string
offset uint32
}
type joinerBytes struct {
data []byte
offset uint32
}
func (j *Joiner) AddString(data string) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.strings = append(j.strings, joinerString{data, j.length})
j.length += uint32(len(data))
}
func (j *Joiner) AddBytes(data []byte) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.bytes = append(j.bytes, joinerBytes{data, j.length})
j.length += uint32(len(data))
}
func (j *Joiner) LastByte() byte {
return j.lastByte
}
func (j *Joiner) Length() uint32 {
return j.length
}
func (j *Joiner) EnsureNewlineAtEnd() {
if j.length > 0 && j.lastByte != '\n' {
j.AddString("\n")
}
}
func (j *Joiner) Done() []byte {
if len(j.strings) == 0 && len(j.bytes) == 1 && j.bytes[0].offset == 0 {
// No need to allocate if there was only a single byte array written
return j.bytes[0].data
}
buffer := make([]byte, j.length)
for _, item := range j.strings {
copy(buffer[item.offset:], item.data)
}
for _, item := range j.bytes {
copy(buffer[item.offset:], item.data)
}
return buffer
}
func (j *Joiner) Contains(s string, b []byte) bool {
for _, item := range j.strings {
if strings.Contains(item.data, s) {
return true
}
}
for _, item := range j.bytes {
if bytes.Contains(item.data, b) {
return true
}
}
return false
}

View File

@ -0,0 +1,31 @@
package helpers
import "strings"
var builtinTypesLower = map[string]string{
".css": "text/css; charset=utf-8",
".gif": "image/gif",
".htm": "text/html; charset=utf-8",
".html": "text/html; charset=utf-8",
".jpeg": "image/jpeg",
".jpg": "image/jpeg",
".js": "text/javascript; charset=utf-8",
".json": "application/json",
".mjs": "text/javascript; charset=utf-8",
".pdf": "application/pdf",
".png": "image/png",
".svg": "image/svg+xml",
".wasm": "application/wasm",
".webp": "image/webp",
".xml": "text/xml; charset=utf-8",
}
// This is used instead of Go's built-in "mime.TypeByExtension" function because
// that function is broken on Windows: https://github.com/golang/go/issues/32350.
func MimeTypeByExtension(ext string) string {
contentType := builtinTypesLower[ext]
if contentType == "" {
contentType = builtinTypesLower[strings.ToLower(ext)]
}
return contentType
}

View File

@ -0,0 +1,22 @@
package helpers
import "strings"
func IsInsideNodeModules(path string) bool {
for {
// This is written in a platform-independent manner because it's run on
// user-specified paths which can be arbitrary non-file-system things. So
// for example Windows paths may end up being used on Unix or URLs may end
// up being used on Windows. Be consistently agnostic to which kind of
// slash is used on all platforms.
slash := strings.LastIndexAny(path, "/\\")
if slash == -1 {
return false
}
dir, base := path[:slash], path[slash+1:]
if base == "node_modules" {
return true
}
path = dir
}
}

View File

@ -0,0 +1,50 @@
package helpers
import (
"runtime/debug"
"strings"
)
func PrettyPrintedStack() string {
lines := strings.Split(strings.TrimSpace(string(debug.Stack())), "\n")
// Strip the first "goroutine" line
if len(lines) > 0 {
if first := lines[0]; strings.HasPrefix(first, "goroutine ") && strings.HasSuffix(first, ":") {
lines = lines[1:]
}
}
sb := strings.Builder{}
for _, line := range lines {
// Indented lines are source locations
if strings.HasPrefix(line, "\t") {
line = line[1:]
line = strings.TrimPrefix(line, "github.com/evanw/esbuild/")
if offset := strings.LastIndex(line, " +0x"); offset != -1 {
line = line[:offset]
}
sb.WriteString(" (")
sb.WriteString(line)
sb.WriteString(")")
continue
}
// Other lines are function calls
if sb.Len() > 0 {
sb.WriteByte('\n')
}
if strings.HasSuffix(line, ")") {
if paren := strings.LastIndexByte(line, '('); paren != -1 {
line = line[:paren]
}
}
if slash := strings.LastIndexByte(line, '/'); slash != -1 {
line = line[slash+1:]
}
sb.WriteString(line)
}
return sb.String()
}

View File

@ -0,0 +1,94 @@
package helpers
import (
"fmt"
"strings"
"sync"
"time"
"github.com/evanw/esbuild/internal/logger"
)
type Timer struct {
mutex sync.Mutex
data []timerData
}
type timerData struct {
name string
time time.Time
isEnd bool
}
func (t *Timer) Begin(name string) {
if t != nil {
t.data = append(t.data, timerData{
name: name,
time: time.Now(),
})
}
}
func (t *Timer) End(name string) {
if t != nil {
t.data = append(t.data, timerData{
name: name,
time: time.Now(),
isEnd: true,
})
}
}
func (t *Timer) Fork() *Timer {
if t != nil {
return &Timer{}
}
return nil
}
func (t *Timer) Join(other *Timer) {
if t != nil && other != nil {
t.mutex.Lock()
defer t.mutex.Unlock()
t.data = append(t.data, other.data...)
}
}
func (t *Timer) Log(log logger.Log) {
if t == nil {
return
}
type pair struct {
timerData
index uint32
}
var notes []logger.MsgData
var stack []pair
indent := 0
for _, item := range t.data {
if !item.isEnd {
top := pair{timerData: item, index: uint32(len(notes))}
notes = append(notes, logger.MsgData{})
stack = append(stack, top)
indent++
} else {
indent--
last := len(stack) - 1
top := stack[last]
stack = stack[:last]
if item.name != top.name {
panic("Internal error")
}
notes[top.index].Text = fmt.Sprintf("%s%s: %dms",
strings.Repeat(" ", indent),
top.name,
item.time.Sub(top.time).Milliseconds())
}
}
log.AddWithNotes(logger.Info, nil, logger.Range{},
"Timing information (times may not nest hierarchically due to parallelism)", notes)
}