mirror of git://gcc.gnu.org/git/gcc.git
libgo: Update to weekly.2012-02-22 release.
From-SVN: r184819
This commit is contained in:
parent
34c5f21a38
commit
501699af16
|
|
@ -4,6 +4,9 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Test that the Go environment variables are present and accessible through
|
||||||
|
// package os and package runtime.
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
@ -12,18 +15,14 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
ga, e0 := os.Getenverror("GOARCH")
|
ga := os.Getenv("GOARCH")
|
||||||
if e0 != nil {
|
|
||||||
print("$GOARCH: ", e0.Error(), "\n")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
if ga != runtime.GOARCH {
|
if ga != runtime.GOARCH {
|
||||||
print("$GOARCH=", ga, "!= runtime.GOARCH=", runtime.GOARCH, "\n")
|
print("$GOARCH=", ga, "!= runtime.GOARCH=", runtime.GOARCH, "\n")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
xxx, e1 := os.Getenverror("DOES_NOT_EXIST")
|
xxx := os.Getenv("DOES_NOT_EXIST")
|
||||||
if e1 != os.ENOENV {
|
if xxx != "" {
|
||||||
print("$DOES_NOT_EXIST=", xxx, "; err = ", e1.Error(), "\n")
|
print("$DOES_NOT_EXIST=", xxx, "\n")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"errors"
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -44,7 +44,7 @@ func main() {
|
||||||
}
|
}
|
||||||
mm := make(map[string]error)
|
mm := make(map[string]error)
|
||||||
trace = ""
|
trace = ""
|
||||||
mm["abc"] = os.EINVAL
|
mm["abc"] = errors.New("invalid")
|
||||||
*i(), mm[f()] = strconv.Atoi(h())
|
*i(), mm[f()] = strconv.Atoi(h())
|
||||||
if mm["abc"] != nil || trace != "ifh" {
|
if mm["abc"] != nil || trace != "ifh" {
|
||||||
println("BUG1", mm["abc"], trace)
|
println("BUG1", mm["abc"], trace)
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
43cf9b39b647
|
96bd78e7d35e
|
||||||
|
|
||||||
The first line of this file holds the Mercurial revision number of the
|
The first line of this file holds the Mercurial revision number of the
|
||||||
last merge done from the master library sources.
|
last merge done from the master library sources.
|
||||||
|
|
|
||||||
|
|
@ -504,7 +504,7 @@ runtime1.c: $(srcdir)/runtime/runtime1.goc goc2c
|
||||||
mv -f $@.tmp $@
|
mv -f $@.tmp $@
|
||||||
|
|
||||||
sema.c: $(srcdir)/runtime/sema.goc goc2c
|
sema.c: $(srcdir)/runtime/sema.goc goc2c
|
||||||
./goc2c --gcc --go-prefix libgo_runtime $< > $@.tmp
|
./goc2c --gcc --go-prefix libgo_sync $< > $@.tmp
|
||||||
mv -f $@.tmp $@
|
mv -f $@.tmp $@
|
||||||
|
|
||||||
sigqueue.c: $(srcdir)/runtime/sigqueue.goc goc2c
|
sigqueue.c: $(srcdir)/runtime/sigqueue.goc goc2c
|
||||||
|
|
@ -847,6 +847,7 @@ go_sync_files = \
|
||||||
go/sync/cond.go \
|
go/sync/cond.go \
|
||||||
go/sync/mutex.go \
|
go/sync/mutex.go \
|
||||||
go/sync/once.go \
|
go/sync/once.go \
|
||||||
|
go/sync/runtime.go \
|
||||||
go/sync/rwmutex.go \
|
go/sync/rwmutex.go \
|
||||||
go/sync/waitgroup.go
|
go/sync/waitgroup.go
|
||||||
|
|
||||||
|
|
@ -878,6 +879,7 @@ go_time_files = \
|
||||||
go/time/tick.go \
|
go/time/tick.go \
|
||||||
go/time/time.go \
|
go/time/time.go \
|
||||||
go/time/zoneinfo.go \
|
go/time/zoneinfo.go \
|
||||||
|
go/time/zoneinfo_read.go \
|
||||||
go/time/zoneinfo_unix.go
|
go/time/zoneinfo_unix.go
|
||||||
|
|
||||||
go_unicode_files = \
|
go_unicode_files = \
|
||||||
|
|
@ -1091,6 +1093,7 @@ go_exp_norm_files = \
|
||||||
go/exp/norm/composition.go \
|
go/exp/norm/composition.go \
|
||||||
go/exp/norm/forminfo.go \
|
go/exp/norm/forminfo.go \
|
||||||
go/exp/norm/input.go \
|
go/exp/norm/input.go \
|
||||||
|
go/exp/norm/iter.go \
|
||||||
go/exp/norm/normalize.go \
|
go/exp/norm/normalize.go \
|
||||||
go/exp/norm/readwriter.go \
|
go/exp/norm/readwriter.go \
|
||||||
go/exp/norm/tables.go \
|
go/exp/norm/tables.go \
|
||||||
|
|
@ -1132,7 +1135,8 @@ go_go_doc_files = \
|
||||||
go/go/doc/example.go \
|
go/go/doc/example.go \
|
||||||
go/go/doc/exports.go \
|
go/go/doc/exports.go \
|
||||||
go/go/doc/filter.go \
|
go/go/doc/filter.go \
|
||||||
go/go/doc/reader.go
|
go/go/doc/reader.go \
|
||||||
|
go/go/doc/synopsis.go
|
||||||
go_go_parser_files = \
|
go_go_parser_files = \
|
||||||
go/go/parser/interface.go \
|
go/go/parser/interface.go \
|
||||||
go/go/parser/parser.go
|
go/go/parser/parser.go
|
||||||
|
|
@ -1159,7 +1163,6 @@ go_hash_fnv_files = \
|
||||||
|
|
||||||
go_html_template_files = \
|
go_html_template_files = \
|
||||||
go/html/template/attr.go \
|
go/html/template/attr.go \
|
||||||
go/html/template/clone.go \
|
|
||||||
go/html/template/content.go \
|
go/html/template/content.go \
|
||||||
go/html/template/context.go \
|
go/html/template/context.go \
|
||||||
go/html/template/css.go \
|
go/html/template/css.go \
|
||||||
|
|
|
||||||
|
|
@ -1157,6 +1157,7 @@ go_sync_files = \
|
||||||
go/sync/cond.go \
|
go/sync/cond.go \
|
||||||
go/sync/mutex.go \
|
go/sync/mutex.go \
|
||||||
go/sync/once.go \
|
go/sync/once.go \
|
||||||
|
go/sync/runtime.go \
|
||||||
go/sync/rwmutex.go \
|
go/sync/rwmutex.go \
|
||||||
go/sync/waitgroup.go
|
go/sync/waitgroup.go
|
||||||
|
|
||||||
|
|
@ -1182,6 +1183,7 @@ go_time_files = \
|
||||||
go/time/tick.go \
|
go/time/tick.go \
|
||||||
go/time/time.go \
|
go/time/time.go \
|
||||||
go/time/zoneinfo.go \
|
go/time/zoneinfo.go \
|
||||||
|
go/time/zoneinfo_read.go \
|
||||||
go/time/zoneinfo_unix.go
|
go/time/zoneinfo_unix.go
|
||||||
|
|
||||||
go_unicode_files = \
|
go_unicode_files = \
|
||||||
|
|
@ -1427,6 +1429,7 @@ go_exp_norm_files = \
|
||||||
go/exp/norm/composition.go \
|
go/exp/norm/composition.go \
|
||||||
go/exp/norm/forminfo.go \
|
go/exp/norm/forminfo.go \
|
||||||
go/exp/norm/input.go \
|
go/exp/norm/input.go \
|
||||||
|
go/exp/norm/iter.go \
|
||||||
go/exp/norm/normalize.go \
|
go/exp/norm/normalize.go \
|
||||||
go/exp/norm/readwriter.go \
|
go/exp/norm/readwriter.go \
|
||||||
go/exp/norm/tables.go \
|
go/exp/norm/tables.go \
|
||||||
|
|
@ -1474,7 +1477,8 @@ go_go_doc_files = \
|
||||||
go/go/doc/example.go \
|
go/go/doc/example.go \
|
||||||
go/go/doc/exports.go \
|
go/go/doc/exports.go \
|
||||||
go/go/doc/filter.go \
|
go/go/doc/filter.go \
|
||||||
go/go/doc/reader.go
|
go/go/doc/reader.go \
|
||||||
|
go/go/doc/synopsis.go
|
||||||
|
|
||||||
go_go_parser_files = \
|
go_go_parser_files = \
|
||||||
go/go/parser/interface.go \
|
go/go/parser/interface.go \
|
||||||
|
|
@ -1508,7 +1512,6 @@ go_hash_fnv_files = \
|
||||||
|
|
||||||
go_html_template_files = \
|
go_html_template_files = \
|
||||||
go/html/template/attr.go \
|
go/html/template/attr.go \
|
||||||
go/html/template/clone.go \
|
|
||||||
go/html/template/content.go \
|
go/html/template/content.go \
|
||||||
go/html/template/context.go \
|
go/html/template/context.go \
|
||||||
go/html/template/css.go \
|
go/html/template/css.go \
|
||||||
|
|
@ -4318,7 +4321,7 @@ runtime1.c: $(srcdir)/runtime/runtime1.goc goc2c
|
||||||
mv -f $@.tmp $@
|
mv -f $@.tmp $@
|
||||||
|
|
||||||
sema.c: $(srcdir)/runtime/sema.goc goc2c
|
sema.c: $(srcdir)/runtime/sema.goc goc2c
|
||||||
./goc2c --gcc --go-prefix libgo_runtime $< > $@.tmp
|
./goc2c --gcc --go-prefix libgo_sync $< > $@.tmp
|
||||||
mv -f $@.tmp $@
|
mv -f $@.tmp $@
|
||||||
|
|
||||||
sigqueue.c: $(srcdir)/runtime/sigqueue.goc goc2c
|
sigqueue.c: $(srcdir)/runtime/sigqueue.goc goc2c
|
||||||
|
|
|
||||||
|
|
@ -106,9 +106,12 @@ func (b *Reader) Peek(n int) ([]byte, error) {
|
||||||
if m > n {
|
if m > n {
|
||||||
m = n
|
m = n
|
||||||
}
|
}
|
||||||
err := b.readErr()
|
var err error
|
||||||
if m < n && err == nil {
|
if m < n {
|
||||||
err = ErrBufferFull
|
err = b.readErr()
|
||||||
|
if err == nil {
|
||||||
|
err = ErrBufferFull
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return b.buf[b.r : b.r+m], err
|
return b.buf[b.r : b.r+m], err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -539,6 +539,27 @@ func TestPeek(t *testing.T) {
|
||||||
if _, err := buf.Peek(1); err != io.EOF {
|
if _, err := buf.Peek(1); err != io.EOF {
|
||||||
t.Fatalf("want EOF got %v", err)
|
t.Fatalf("want EOF got %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test for issue 3022, not exposing a reader's error on a successful Peek.
|
||||||
|
buf = NewReaderSize(dataAndEOFReader("abcd"), 32)
|
||||||
|
if s, err := buf.Peek(2); string(s) != "ab" || err != nil {
|
||||||
|
t.Errorf(`Peek(2) on "abcd", EOF = %q, %v; want "ab", nil`, string(s), err)
|
||||||
|
}
|
||||||
|
if s, err := buf.Peek(4); string(s) != "abcd" || err != nil {
|
||||||
|
t.Errorf(`Peek(4) on "abcd", EOF = %q, %v; want "abcd", nil`, string(s), err)
|
||||||
|
}
|
||||||
|
if n, err := buf.Read(p[0:5]); string(p[0:n]) != "abcd" || err != nil {
|
||||||
|
t.Fatalf("Read after peek = %q, %v; want abcd, EOF", p[0:n], err)
|
||||||
|
}
|
||||||
|
if n, err := buf.Read(p[0:1]); string(p[0:n]) != "" || err != io.EOF {
|
||||||
|
t.Fatalf(`second Read after peek = %q, %v; want "", EOF`, p[0:n], err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type dataAndEOFReader string
|
||||||
|
|
||||||
|
func (r dataAndEOFReader) Read(p []byte) (int, error) {
|
||||||
|
return copy(p, r), io.EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPeekThenUnreadRune(t *testing.T) {
|
func TestPeekThenUnreadRune(t *testing.T) {
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ import (
|
||||||
|
|
||||||
// Compare returns an integer comparing the two byte arrays lexicographically.
|
// Compare returns an integer comparing the two byte arrays lexicographically.
|
||||||
// The result will be 0 if a==b, -1 if a < b, and +1 if a > b
|
// The result will be 0 if a==b, -1 if a < b, and +1 if a > b
|
||||||
|
// A nil argument is equivalent to an empty slice.
|
||||||
func Compare(a, b []byte) int {
|
func Compare(a, b []byte) int {
|
||||||
m := len(a)
|
m := len(a)
|
||||||
if m > len(b) {
|
if m > len(b) {
|
||||||
|
|
@ -37,6 +38,7 @@ func Compare(a, b []byte) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Equal returns a boolean reporting whether a == b.
|
// Equal returns a boolean reporting whether a == b.
|
||||||
|
// A nil argument is equivalent to an empty slice.
|
||||||
func Equal(a, b []byte) bool
|
func Equal(a, b []byte) bool
|
||||||
|
|
||||||
func equalPortable(a, b []byte) bool {
|
func equalPortable(a, b []byte) bool {
|
||||||
|
|
|
||||||
|
|
@ -46,32 +46,39 @@ type BinOpTest struct {
|
||||||
i int
|
i int
|
||||||
}
|
}
|
||||||
|
|
||||||
var comparetests = []BinOpTest{
|
var compareTests = []struct {
|
||||||
{"", "", 0},
|
a, b []byte
|
||||||
{"a", "", 1},
|
i int
|
||||||
{"", "a", -1},
|
}{
|
||||||
{"abc", "abc", 0},
|
{[]byte(""), []byte(""), 0},
|
||||||
{"ab", "abc", -1},
|
{[]byte("a"), []byte(""), 1},
|
||||||
{"abc", "ab", 1},
|
{[]byte(""), []byte("a"), -1},
|
||||||
{"x", "ab", 1},
|
{[]byte("abc"), []byte("abc"), 0},
|
||||||
{"ab", "x", -1},
|
{[]byte("ab"), []byte("abc"), -1},
|
||||||
{"x", "a", 1},
|
{[]byte("abc"), []byte("ab"), 1},
|
||||||
{"b", "x", -1},
|
{[]byte("x"), []byte("ab"), 1},
|
||||||
|
{[]byte("ab"), []byte("x"), -1},
|
||||||
|
{[]byte("x"), []byte("a"), 1},
|
||||||
|
{[]byte("b"), []byte("x"), -1},
|
||||||
|
// nil tests
|
||||||
|
{nil, nil, 0},
|
||||||
|
{[]byte(""), nil, 0},
|
||||||
|
{nil, []byte(""), 0},
|
||||||
|
{[]byte("a"), nil, 1},
|
||||||
|
{nil, []byte("a"), -1},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompare(t *testing.T) {
|
func TestCompare(t *testing.T) {
|
||||||
for _, tt := range comparetests {
|
for _, tt := range compareTests {
|
||||||
a := []byte(tt.a)
|
cmp := Compare(tt.a, tt.b)
|
||||||
b := []byte(tt.b)
|
|
||||||
cmp := Compare(a, b)
|
|
||||||
if cmp != tt.i {
|
if cmp != tt.i {
|
||||||
t.Errorf(`Compare(%q, %q) = %v`, tt.a, tt.b, cmp)
|
t.Errorf(`Compare(%q, %q) = %v`, tt.a, tt.b, cmp)
|
||||||
}
|
}
|
||||||
eql := Equal(a, b)
|
eql := Equal(tt.a, tt.b)
|
||||||
if eql != (tt.i == 0) {
|
if eql != (tt.i == 0) {
|
||||||
t.Errorf(`Equal(%q, %q) = %v`, tt.a, tt.b, eql)
|
t.Errorf(`Equal(%q, %q) = %v`, tt.a, tt.b, eql)
|
||||||
}
|
}
|
||||||
eql = EqualPortable(a, b)
|
eql = EqualPortable(tt.a, tt.b)
|
||||||
if eql != (tt.i == 0) {
|
if eql != (tt.i == 0) {
|
||||||
t.Errorf(`EqualPortable(%q, %q) = %v`, tt.a, tt.b, eql)
|
t.Errorf(`EqualPortable(%q, %q) = %v`, tt.a, tt.b, eql)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,18 +11,18 @@ import (
|
||||||
"os"
|
"os"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hello world!
|
|
||||||
func ExampleBuffer() {
|
func ExampleBuffer() {
|
||||||
var b Buffer // A Buffer needs no initialization.
|
var b Buffer // A Buffer needs no initialization.
|
||||||
b.Write([]byte("Hello "))
|
b.Write([]byte("Hello "))
|
||||||
b.Write([]byte("world!"))
|
b.Write([]byte("world!"))
|
||||||
b.WriteTo(os.Stdout)
|
b.WriteTo(os.Stdout)
|
||||||
|
// Output: Hello world!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Gophers rule!
|
|
||||||
func ExampleBuffer_reader() {
|
func ExampleBuffer_reader() {
|
||||||
// A Buffer can turn a string or a []byte into an io.Reader.
|
// A Buffer can turn a string or a []byte into an io.Reader.
|
||||||
buf := NewBufferString("R29waGVycyBydWxlIQ==")
|
buf := NewBufferString("R29waGVycyBydWxlIQ==")
|
||||||
dec := base64.NewDecoder(base64.StdEncoding, buf)
|
dec := base64.NewDecoder(base64.StdEncoding, buf)
|
||||||
io.Copy(os.Stdout, dec)
|
io.Copy(os.Stdout, dec)
|
||||||
|
// Output: Gophers rule!
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -306,6 +306,9 @@ func TestDeflateInflateString(t *testing.T) {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
testToFromWithLimit(t, gold, test.label, test.limit)
|
testToFromWithLimit(t, gold, test.label, test.limit)
|
||||||
|
if testing.Short() {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -363,6 +366,10 @@ func TestWriterDict(t *testing.T) {
|
||||||
|
|
||||||
// See http://code.google.com/p/go/issues/detail?id=2508
|
// See http://code.google.com/p/go/issues/detail?id=2508
|
||||||
func TestRegression2508(t *testing.T) {
|
func TestRegression2508(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Logf("test disabled with -short")
|
||||||
|
return
|
||||||
|
}
|
||||||
w, err := NewWriter(ioutil.Discard, 1)
|
w, err := NewWriter(ioutil.Discard, 1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("NewWriter: %v", err)
|
t.Fatalf("NewWriter: %v", err)
|
||||||
|
|
|
||||||
|
|
@ -57,11 +57,26 @@ func (pq *PriorityQueue) Pop() interface{} {
|
||||||
return item
|
return item
|
||||||
}
|
}
|
||||||
|
|
||||||
// 99:seven 88:five 77:zero 66:nine 55:three 44:two 33:six 22:one 11:four 00:eight
|
// update is not used by the example but shows how to take the top item from
|
||||||
func ExampleInterface() {
|
// the queue, update its priority and value, and put it back.
|
||||||
// The full code of this example, including the methods that implement
|
func (pq *PriorityQueue) update(value string, priority int) {
|
||||||
// heap.Interface, is in the file src/pkg/container/heap/example_test.go.
|
item := heap.Pop(pq).(*Item)
|
||||||
|
item.value = value
|
||||||
|
item.priority = priority
|
||||||
|
heap.Push(pq, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
// changePriority is not used by the example but shows how to change the
|
||||||
|
// priority of an arbitrary item.
|
||||||
|
func (pq *PriorityQueue) changePriority(item *Item, priority int) {
|
||||||
|
heap.Remove(pq, item.index)
|
||||||
|
item.priority = priority
|
||||||
|
heap.Push(pq, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
// This example pushes 10 items into a PriorityQueue and takes them out in
|
||||||
|
// order of priority.
|
||||||
|
func Example() {
|
||||||
const nItem = 10
|
const nItem = 10
|
||||||
// Random priorities for the items (a permutation of 0..9, times 11)).
|
// Random priorities for the items (a permutation of 0..9, times 11)).
|
||||||
priorities := [nItem]int{
|
priorities := [nItem]int{
|
||||||
|
|
@ -85,21 +100,6 @@ func ExampleInterface() {
|
||||||
item := heap.Pop(&pq).(*Item)
|
item := heap.Pop(&pq).(*Item)
|
||||||
fmt.Printf("%.2d:%s ", item.priority, item.value)
|
fmt.Printf("%.2d:%s ", item.priority, item.value)
|
||||||
}
|
}
|
||||||
}
|
// Output:
|
||||||
|
// 99:seven 88:five 77:zero 66:nine 55:three 44:two 33:six 22:one 11:four 00:eight
|
||||||
// update is not used by the example but shows how to take the top item from the queue,
|
|
||||||
// update its priority and value, and put it back.
|
|
||||||
func (pq *PriorityQueue) update(value string, priority int) {
|
|
||||||
item := heap.Pop(pq).(*Item)
|
|
||||||
item.value = value
|
|
||||||
item.priority = priority
|
|
||||||
heap.Push(pq, item)
|
|
||||||
}
|
|
||||||
|
|
||||||
// changePriority is not used by the example but shows how to change the priority of an arbitrary
|
|
||||||
// item.
|
|
||||||
func (pq *PriorityQueue) changePriority(item *Item, priority int) {
|
|
||||||
heap.Remove(pq, item.index)
|
|
||||||
item.priority = priority
|
|
||||||
heap.Push(pq, item)
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// Package dsa implements the Digital Signature Algorithm, as defined in FIPS 186-3
|
// Package dsa implements the Digital Signature Algorithm, as defined in FIPS 186-3.
|
||||||
package dsa
|
package dsa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
|
||||||
|
|
@ -2,9 +2,10 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
package md5
|
package md5_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/md5"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
@ -52,7 +53,7 @@ var golden = []md5Test{
|
||||||
func TestGolden(t *testing.T) {
|
func TestGolden(t *testing.T) {
|
||||||
for i := 0; i < len(golden); i++ {
|
for i := 0; i < len(golden); i++ {
|
||||||
g := golden[i]
|
g := golden[i]
|
||||||
c := New()
|
c := md5.New()
|
||||||
for j := 0; j < 3; j++ {
|
for j := 0; j < 3; j++ {
|
||||||
if j < 2 {
|
if j < 2 {
|
||||||
io.WriteString(c, g.in)
|
io.WriteString(c, g.in)
|
||||||
|
|
@ -69,3 +70,11 @@ func TestGolden(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ExampleNew() {
|
||||||
|
h := md5.New()
|
||||||
|
io.WriteString(h, "The fog is getting thicker!")
|
||||||
|
io.WriteString(h, "And Leon's getting laaarger!")
|
||||||
|
fmt.Printf("%x", h.Sum(nil))
|
||||||
|
// Output: e2c569be17396eca2a2e3c11578123ed
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,10 @@
|
||||||
|
|
||||||
// SHA1 hash algorithm. See RFC 3174.
|
// SHA1 hash algorithm. See RFC 3174.
|
||||||
|
|
||||||
package sha1
|
package sha1_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha1"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
@ -54,7 +55,7 @@ var golden = []sha1Test{
|
||||||
func TestGolden(t *testing.T) {
|
func TestGolden(t *testing.T) {
|
||||||
for i := 0; i < len(golden); i++ {
|
for i := 0; i < len(golden); i++ {
|
||||||
g := golden[i]
|
g := golden[i]
|
||||||
c := New()
|
c := sha1.New()
|
||||||
for j := 0; j < 3; j++ {
|
for j := 0; j < 3; j++ {
|
||||||
if j < 2 {
|
if j < 2 {
|
||||||
io.WriteString(c, g.in)
|
io.WriteString(c, g.in)
|
||||||
|
|
@ -71,3 +72,10 @@ func TestGolden(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ExampleNew() {
|
||||||
|
h := sha1.New()
|
||||||
|
io.WriteString(h, "His money is twice tainted: 'taint yours and 'taint mine.")
|
||||||
|
fmt.Printf("% x", h.Sum(nil))
|
||||||
|
// Output: 59 7f 6a 54 00 10 f9 4c 15 d7 18 06 a9 9a 2c 87 10 e7 47 bd
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -273,7 +273,7 @@ func (c *Conn) clientHandshake() error {
|
||||||
masterSecret, clientMAC, serverMAC, clientKey, serverKey, clientIV, serverIV :=
|
masterSecret, clientMAC, serverMAC, clientKey, serverKey, clientIV, serverIV :=
|
||||||
keysFromPreMasterSecret(c.vers, preMasterSecret, hello.random, serverHello.random, suite.macLen, suite.keyLen, suite.ivLen)
|
keysFromPreMasterSecret(c.vers, preMasterSecret, hello.random, serverHello.random, suite.macLen, suite.keyLen, suite.ivLen)
|
||||||
|
|
||||||
clientCipher := suite.cipher(clientKey, clientIV, false /* not for reading */ )
|
clientCipher := suite.cipher(clientKey, clientIV, false /* not for reading */)
|
||||||
clientHash := suite.mac(c.vers, clientMAC)
|
clientHash := suite.mac(c.vers, clientMAC)
|
||||||
c.out.prepareCipherSpec(c.vers, clientCipher, clientHash)
|
c.out.prepareCipherSpec(c.vers, clientCipher, clientHash)
|
||||||
c.writeRecord(recordTypeChangeCipherSpec, []byte{1})
|
c.writeRecord(recordTypeChangeCipherSpec, []byte{1})
|
||||||
|
|
@ -294,7 +294,7 @@ func (c *Conn) clientHandshake() error {
|
||||||
finishedHash.Write(finished.marshal())
|
finishedHash.Write(finished.marshal())
|
||||||
c.writeRecord(recordTypeHandshake, finished.marshal())
|
c.writeRecord(recordTypeHandshake, finished.marshal())
|
||||||
|
|
||||||
serverCipher := suite.cipher(serverKey, serverIV, true /* for reading */ )
|
serverCipher := suite.cipher(serverKey, serverIV, true /* for reading */)
|
||||||
serverHash := suite.mac(c.vers, serverMAC)
|
serverHash := suite.mac(c.vers, serverMAC)
|
||||||
c.in.prepareCipherSpec(c.vers, serverCipher, serverHash)
|
c.in.prepareCipherSpec(c.vers, serverCipher, serverHash)
|
||||||
c.readRecord(recordTypeChangeCipherSpec)
|
c.readRecord(recordTypeChangeCipherSpec)
|
||||||
|
|
|
||||||
|
|
@ -295,7 +295,7 @@ FindCipherSuite:
|
||||||
masterSecret, clientMAC, serverMAC, clientKey, serverKey, clientIV, serverIV :=
|
masterSecret, clientMAC, serverMAC, clientKey, serverKey, clientIV, serverIV :=
|
||||||
keysFromPreMasterSecret(c.vers, preMasterSecret, clientHello.random, hello.random, suite.macLen, suite.keyLen, suite.ivLen)
|
keysFromPreMasterSecret(c.vers, preMasterSecret, clientHello.random, hello.random, suite.macLen, suite.keyLen, suite.ivLen)
|
||||||
|
|
||||||
clientCipher := suite.cipher(clientKey, clientIV, true /* for reading */ )
|
clientCipher := suite.cipher(clientKey, clientIV, true /* for reading */)
|
||||||
clientHash := suite.mac(c.vers, clientMAC)
|
clientHash := suite.mac(c.vers, clientMAC)
|
||||||
c.in.prepareCipherSpec(c.vers, clientCipher, clientHash)
|
c.in.prepareCipherSpec(c.vers, clientCipher, clientHash)
|
||||||
c.readRecord(recordTypeChangeCipherSpec)
|
c.readRecord(recordTypeChangeCipherSpec)
|
||||||
|
|
@ -333,7 +333,7 @@ FindCipherSuite:
|
||||||
|
|
||||||
finishedHash.Write(clientFinished.marshal())
|
finishedHash.Write(clientFinished.marshal())
|
||||||
|
|
||||||
serverCipher := suite.cipher(serverKey, serverIV, false /* not for reading */ )
|
serverCipher := suite.cipher(serverKey, serverIV, false /* not for reading */)
|
||||||
serverHash := suite.mac(c.vers, serverMAC)
|
serverHash := suite.mac(c.vers, serverMAC)
|
||||||
c.out.prepareCipherSpec(c.vers, serverCipher, serverHash)
|
c.out.prepareCipherSpec(c.vers, serverCipher, serverHash)
|
||||||
c.writeRecord(recordTypeChangeCipherSpec, []byte{1})
|
c.writeRecord(recordTypeChangeCipherSpec, []byte{1})
|
||||||
|
|
|
||||||
|
|
@ -17,8 +17,8 @@ import (
|
||||||
// subsetTypeArgs takes a slice of arguments from callers of the sql
|
// subsetTypeArgs takes a slice of arguments from callers of the sql
|
||||||
// package and converts them into a slice of the driver package's
|
// package and converts them into a slice of the driver package's
|
||||||
// "subset types".
|
// "subset types".
|
||||||
func subsetTypeArgs(args []interface{}) ([]interface{}, error) {
|
func subsetTypeArgs(args []interface{}) ([]driver.Value, error) {
|
||||||
out := make([]interface{}, len(args))
|
out := make([]driver.Value, len(args))
|
||||||
for n, arg := range args {
|
for n, arg := range args {
|
||||||
var err error
|
var err error
|
||||||
out[n], err = driver.DefaultParameterConverter.ConvertValue(arg)
|
out[n], err = driver.DefaultParameterConverter.ConvertValue(arg)
|
||||||
|
|
|
||||||
|
|
@ -6,21 +6,20 @@
|
||||||
// drivers as used by package sql.
|
// drivers as used by package sql.
|
||||||
//
|
//
|
||||||
// Most code should use package sql.
|
// Most code should use package sql.
|
||||||
//
|
package driver
|
||||||
// Drivers only need to be aware of a subset of Go's types. The sql package
|
|
||||||
// will convert all types into one of the following:
|
import "errors"
|
||||||
|
|
||||||
|
// A driver Value is a value that drivers must be able to handle.
|
||||||
|
// A Value is either nil or an instance of one of these types:
|
||||||
//
|
//
|
||||||
// int64
|
// int64
|
||||||
// float64
|
// float64
|
||||||
// bool
|
// bool
|
||||||
// nil
|
|
||||||
// []byte
|
// []byte
|
||||||
// string [*] everywhere except from Rows.Next.
|
// string [*] everywhere except from Rows.Next.
|
||||||
// time.Time
|
// time.Time
|
||||||
//
|
type Value interface{}
|
||||||
package driver
|
|
||||||
|
|
||||||
import "errors"
|
|
||||||
|
|
||||||
// Driver is the interface that must be implemented by a database
|
// Driver is the interface that must be implemented by a database
|
||||||
// driver.
|
// driver.
|
||||||
|
|
@ -50,11 +49,9 @@ var ErrSkip = errors.New("driver: skip fast-path; continue as if unimplemented")
|
||||||
// first prepare a query, execute the statement, and then close the
|
// first prepare a query, execute the statement, and then close the
|
||||||
// statement.
|
// statement.
|
||||||
//
|
//
|
||||||
// All arguments are of a subset type as defined in the package docs.
|
|
||||||
//
|
|
||||||
// Exec may return ErrSkip.
|
// Exec may return ErrSkip.
|
||||||
type Execer interface {
|
type Execer interface {
|
||||||
Exec(query string, args []interface{}) (Result, error)
|
Exec(query string, args []Value) (Result, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Conn is a connection to a database. It is not used concurrently
|
// Conn is a connection to a database. It is not used concurrently
|
||||||
|
|
@ -127,18 +124,17 @@ type Stmt interface {
|
||||||
NumInput() int
|
NumInput() int
|
||||||
|
|
||||||
// Exec executes a query that doesn't return rows, such
|
// Exec executes a query that doesn't return rows, such
|
||||||
// as an INSERT or UPDATE. The args are all of a subset
|
// as an INSERT or UPDATE.
|
||||||
// type as defined above.
|
Exec(args []Value) (Result, error)
|
||||||
Exec(args []interface{}) (Result, error)
|
|
||||||
|
|
||||||
// Exec executes a query that may return rows, such as a
|
// Exec executes a query that may return rows, such as a
|
||||||
// SELECT. The args of all of a subset type as defined above.
|
// SELECT.
|
||||||
Query(args []interface{}) (Rows, error)
|
Query(args []Value) (Rows, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ColumnConverter may be optionally implemented by Stmt if the
|
// ColumnConverter may be optionally implemented by Stmt if the
|
||||||
// the statement is aware of its own columns' types and can
|
// the statement is aware of its own columns' types and can
|
||||||
// convert from any type to a driver subset type.
|
// convert from any type to a driver Value.
|
||||||
type ColumnConverter interface {
|
type ColumnConverter interface {
|
||||||
// ColumnConverter returns a ValueConverter for the provided
|
// ColumnConverter returns a ValueConverter for the provided
|
||||||
// column index. If the type of a specific column isn't known
|
// column index. If the type of a specific column isn't known
|
||||||
|
|
@ -162,12 +158,12 @@ type Rows interface {
|
||||||
// the provided slice. The provided slice will be the same
|
// the provided slice. The provided slice will be the same
|
||||||
// size as the Columns() are wide.
|
// size as the Columns() are wide.
|
||||||
//
|
//
|
||||||
// The dest slice may be populated with only with values
|
// The dest slice may be populated only with
|
||||||
// of subset types defined above, but excluding string.
|
// a driver Value type, but excluding string.
|
||||||
// All string values must be converted to []byte.
|
// All string values must be converted to []byte.
|
||||||
//
|
//
|
||||||
// Next should return io.EOF when there are no more rows.
|
// Next should return io.EOF when there are no more rows.
|
||||||
Next(dest []interface{}) error
|
Next(dest []Value) error
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tx is a transaction.
|
// Tx is a transaction.
|
||||||
|
|
@ -190,18 +186,19 @@ func (v RowsAffected) RowsAffected() (int64, error) {
|
||||||
return int64(v), nil
|
return int64(v), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// DDLSuccess is a pre-defined Result for drivers to return when a DDL
|
// ResultNoRows is a pre-defined Result for drivers to return when a DDL
|
||||||
// command succeeds.
|
// command (such as a CREATE TABLE) succeeds. It returns an error for both
|
||||||
var DDLSuccess ddlSuccess
|
// LastInsertId and RowsAffected.
|
||||||
|
var ResultNoRows noRows
|
||||||
|
|
||||||
type ddlSuccess struct{}
|
type noRows struct{}
|
||||||
|
|
||||||
var _ Result = ddlSuccess{}
|
var _ Result = noRows{}
|
||||||
|
|
||||||
func (ddlSuccess) LastInsertId() (int64, error) {
|
func (noRows) LastInsertId() (int64, error) {
|
||||||
return 0, errors.New("no LastInsertId available after DDL statement")
|
return 0, errors.New("no LastInsertId available after DDL statement")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ddlSuccess) RowsAffected() (int64, error) {
|
func (noRows) RowsAffected() (int64, error) {
|
||||||
return 0, errors.New("no RowsAffected available after DDL statement")
|
return 0, errors.New("no RowsAffected available after DDL statement")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -17,28 +17,28 @@ import (
|
||||||
// driver package to provide consistent implementations of conversions
|
// driver package to provide consistent implementations of conversions
|
||||||
// between drivers. The ValueConverters have several uses:
|
// between drivers. The ValueConverters have several uses:
|
||||||
//
|
//
|
||||||
// * converting from the subset types as provided by the sql package
|
// * converting from the Value types as provided by the sql package
|
||||||
// into a database table's specific column type and making sure it
|
// into a database table's specific column type and making sure it
|
||||||
// fits, such as making sure a particular int64 fits in a
|
// fits, such as making sure a particular int64 fits in a
|
||||||
// table's uint16 column.
|
// table's uint16 column.
|
||||||
//
|
//
|
||||||
// * converting a value as given from the database into one of the
|
// * converting a value as given from the database into one of the
|
||||||
// subset types.
|
// driver Value types.
|
||||||
//
|
//
|
||||||
// * by the sql package, for converting from a driver's subset type
|
// * by the sql package, for converting from a driver's Value type
|
||||||
// to a user's type in a scan.
|
// to a user's type in a scan.
|
||||||
type ValueConverter interface {
|
type ValueConverter interface {
|
||||||
// ConvertValue converts a value to a restricted subset type.
|
// ConvertValue converts a value to a driver Value.
|
||||||
ConvertValue(v interface{}) (interface{}, error)
|
ConvertValue(v interface{}) (Value, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SubsetValuer is the interface providing the SubsetValue method.
|
// Valuer is the interface providing the Value method.
|
||||||
//
|
//
|
||||||
// Types implementing SubsetValuer interface are able to convert
|
// Types implementing Valuer interface are able to convert
|
||||||
// themselves to one of the driver's allowed subset values.
|
// themselves to a driver Value.
|
||||||
type SubsetValuer interface {
|
type Valuer interface {
|
||||||
// SubsetValue returns a driver parameter subset value.
|
// Value returns a driver Value.
|
||||||
SubsetValue() (interface{}, error)
|
Value() (Value, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bool is a ValueConverter that converts input values to bools.
|
// Bool is a ValueConverter that converts input values to bools.
|
||||||
|
|
@ -59,7 +59,7 @@ var _ ValueConverter = boolType{}
|
||||||
|
|
||||||
func (boolType) String() string { return "Bool" }
|
func (boolType) String() string { return "Bool" }
|
||||||
|
|
||||||
func (boolType) ConvertValue(src interface{}) (interface{}, error) {
|
func (boolType) ConvertValue(src interface{}) (Value, error) {
|
||||||
switch s := src.(type) {
|
switch s := src.(type) {
|
||||||
case bool:
|
case bool:
|
||||||
return s, nil
|
return s, nil
|
||||||
|
|
@ -104,7 +104,7 @@ type int32Type struct{}
|
||||||
|
|
||||||
var _ ValueConverter = int32Type{}
|
var _ ValueConverter = int32Type{}
|
||||||
|
|
||||||
func (int32Type) ConvertValue(v interface{}) (interface{}, error) {
|
func (int32Type) ConvertValue(v interface{}) (Value, error) {
|
||||||
rv := reflect.ValueOf(v)
|
rv := reflect.ValueOf(v)
|
||||||
switch rv.Kind() {
|
switch rv.Kind() {
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
|
@ -137,7 +137,7 @@ var String stringType
|
||||||
|
|
||||||
type stringType struct{}
|
type stringType struct{}
|
||||||
|
|
||||||
func (stringType) ConvertValue(v interface{}) (interface{}, error) {
|
func (stringType) ConvertValue(v interface{}) (Value, error) {
|
||||||
switch v.(type) {
|
switch v.(type) {
|
||||||
case string, []byte:
|
case string, []byte:
|
||||||
return v, nil
|
return v, nil
|
||||||
|
|
@ -151,7 +151,7 @@ type Null struct {
|
||||||
Converter ValueConverter
|
Converter ValueConverter
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n Null) ConvertValue(v interface{}) (interface{}, error) {
|
func (n Null) ConvertValue(v interface{}) (Value, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
@ -164,28 +164,17 @@ type NotNull struct {
|
||||||
Converter ValueConverter
|
Converter ValueConverter
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n NotNull) ConvertValue(v interface{}) (interface{}, error) {
|
func (n NotNull) ConvertValue(v interface{}) (Value, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, fmt.Errorf("nil value not allowed")
|
return nil, fmt.Errorf("nil value not allowed")
|
||||||
}
|
}
|
||||||
return n.Converter.ConvertValue(v)
|
return n.Converter.ConvertValue(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsParameterSubsetType reports whether v is of a valid type for a
|
// IsValue reports whether v is a valid Value parameter type.
|
||||||
// parameter. These types are:
|
// Unlike IsScanValue, IsValue permits the string type.
|
||||||
//
|
func IsValue(v interface{}) bool {
|
||||||
// int64
|
if IsScanValue(v) {
|
||||||
// float64
|
|
||||||
// bool
|
|
||||||
// nil
|
|
||||||
// []byte
|
|
||||||
// time.Time
|
|
||||||
// string
|
|
||||||
//
|
|
||||||
// This is the same list as IsScanSubsetType, with the addition of
|
|
||||||
// string.
|
|
||||||
func IsParameterSubsetType(v interface{}) bool {
|
|
||||||
if IsScanSubsetType(v) {
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if _, ok := v.(string); ok {
|
if _, ok := v.(string); ok {
|
||||||
|
|
@ -194,18 +183,9 @@ func IsParameterSubsetType(v interface{}) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsScanSubsetType reports whether v is of a valid type for a
|
// IsScanValue reports whether v is a valid Value scan type.
|
||||||
// value populated by Rows.Next. These types are:
|
// Unlike IsValue, IsScanValue does not permit the string type.
|
||||||
//
|
func IsScanValue(v interface{}) bool {
|
||||||
// int64
|
|
||||||
// float64
|
|
||||||
// bool
|
|
||||||
// nil
|
|
||||||
// []byte
|
|
||||||
// time.Time
|
|
||||||
//
|
|
||||||
// This is the same list as IsParameterSubsetType, without string.
|
|
||||||
func IsScanSubsetType(v interface{}) bool {
|
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
@ -221,7 +201,7 @@ func IsScanSubsetType(v interface{}) bool {
|
||||||
// ColumnConverter.
|
// ColumnConverter.
|
||||||
//
|
//
|
||||||
// DefaultParameterConverter returns the given value directly if
|
// DefaultParameterConverter returns the given value directly if
|
||||||
// IsSubsetType(value). Otherwise integer type are converted to
|
// IsValue(value). Otherwise integer type are converted to
|
||||||
// int64, floats to float64, and strings to []byte. Other types are
|
// int64, floats to float64, and strings to []byte. Other types are
|
||||||
// an error.
|
// an error.
|
||||||
var DefaultParameterConverter defaultConverter
|
var DefaultParameterConverter defaultConverter
|
||||||
|
|
@ -230,18 +210,18 @@ type defaultConverter struct{}
|
||||||
|
|
||||||
var _ ValueConverter = defaultConverter{}
|
var _ ValueConverter = defaultConverter{}
|
||||||
|
|
||||||
func (defaultConverter) ConvertValue(v interface{}) (interface{}, error) {
|
func (defaultConverter) ConvertValue(v interface{}) (Value, error) {
|
||||||
if IsParameterSubsetType(v) {
|
if IsValue(v) {
|
||||||
return v, nil
|
return v, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if svi, ok := v.(SubsetValuer); ok {
|
if svi, ok := v.(Valuer); ok {
|
||||||
sv, err := svi.SubsetValue()
|
sv, err := svi.Value()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if !IsParameterSubsetType(sv) {
|
if !IsValue(sv) {
|
||||||
return nil, fmt.Errorf("non-subset type %T returned from SubsetValue", sv)
|
return nil, fmt.Errorf("non-Value type %T returned from Value", sv)
|
||||||
}
|
}
|
||||||
return sv, nil
|
return sv, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -217,7 +217,7 @@ func (c *fakeConn) Close() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkSubsetTypes(args []interface{}) error {
|
func checkSubsetTypes(args []driver.Value) error {
|
||||||
for n, arg := range args {
|
for n, arg := range args {
|
||||||
switch arg.(type) {
|
switch arg.(type) {
|
||||||
case int64, float64, bool, nil, []byte, string, time.Time:
|
case int64, float64, bool, nil, []byte, string, time.Time:
|
||||||
|
|
@ -228,7 +228,7 @@ func checkSubsetTypes(args []interface{}) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *fakeConn) Exec(query string, args []interface{}) (driver.Result, error) {
|
func (c *fakeConn) Exec(query string, args []driver.Value) (driver.Result, error) {
|
||||||
// This is an optional interface, but it's implemented here
|
// This is an optional interface, but it's implemented here
|
||||||
// just to check that all the args of of the proper types.
|
// just to check that all the args of of the proper types.
|
||||||
// ErrSkip is returned so the caller acts as if we didn't
|
// ErrSkip is returned so the caller acts as if we didn't
|
||||||
|
|
@ -379,7 +379,7 @@ func (s *fakeStmt) Close() error {
|
||||||
|
|
||||||
var errClosed = errors.New("fakedb: statement has been closed")
|
var errClosed = errors.New("fakedb: statement has been closed")
|
||||||
|
|
||||||
func (s *fakeStmt) Exec(args []interface{}) (driver.Result, error) {
|
func (s *fakeStmt) Exec(args []driver.Value) (driver.Result, error) {
|
||||||
if s.closed {
|
if s.closed {
|
||||||
return nil, errClosed
|
return nil, errClosed
|
||||||
}
|
}
|
||||||
|
|
@ -392,12 +392,12 @@ func (s *fakeStmt) Exec(args []interface{}) (driver.Result, error) {
|
||||||
switch s.cmd {
|
switch s.cmd {
|
||||||
case "WIPE":
|
case "WIPE":
|
||||||
db.wipe()
|
db.wipe()
|
||||||
return driver.DDLSuccess, nil
|
return driver.ResultNoRows, nil
|
||||||
case "CREATE":
|
case "CREATE":
|
||||||
if err := db.createTable(s.table, s.colName, s.colType); err != nil {
|
if err := db.createTable(s.table, s.colName, s.colType); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return driver.DDLSuccess, nil
|
return driver.ResultNoRows, nil
|
||||||
case "INSERT":
|
case "INSERT":
|
||||||
return s.execInsert(args)
|
return s.execInsert(args)
|
||||||
}
|
}
|
||||||
|
|
@ -405,7 +405,7 @@ func (s *fakeStmt) Exec(args []interface{}) (driver.Result, error) {
|
||||||
return nil, fmt.Errorf("unimplemented statement Exec command type of %q", s.cmd)
|
return nil, fmt.Errorf("unimplemented statement Exec command type of %q", s.cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *fakeStmt) execInsert(args []interface{}) (driver.Result, error) {
|
func (s *fakeStmt) execInsert(args []driver.Value) (driver.Result, error) {
|
||||||
db := s.c.db
|
db := s.c.db
|
||||||
if len(args) != s.placeholders {
|
if len(args) != s.placeholders {
|
||||||
panic("error in pkg db; should only get here if size is correct")
|
panic("error in pkg db; should only get here if size is correct")
|
||||||
|
|
@ -441,7 +441,7 @@ func (s *fakeStmt) execInsert(args []interface{}) (driver.Result, error) {
|
||||||
return driver.RowsAffected(1), nil
|
return driver.RowsAffected(1), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *fakeStmt) Query(args []interface{}) (driver.Rows, error) {
|
func (s *fakeStmt) Query(args []driver.Value) (driver.Rows, error) {
|
||||||
if s.closed {
|
if s.closed {
|
||||||
return nil, errClosed
|
return nil, errClosed
|
||||||
}
|
}
|
||||||
|
|
@ -548,7 +548,7 @@ func (rc *rowsCursor) Columns() []string {
|
||||||
return rc.cols
|
return rc.cols
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rc *rowsCursor) Next(dest []interface{}) error {
|
func (rc *rowsCursor) Next(dest []driver.Value) error {
|
||||||
if rc.closed {
|
if rc.closed {
|
||||||
return errors.New("fakedb: cursor is closed")
|
return errors.New("fakedb: cursor is closed")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -62,8 +62,8 @@ func (ns *NullString) Scan(value interface{}) error {
|
||||||
return convertAssign(&ns.String, value)
|
return convertAssign(&ns.String, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SubsetValue implements the driver SubsetValuer interface.
|
// Value implements the driver Valuer interface.
|
||||||
func (ns NullString) SubsetValue() (interface{}, error) {
|
func (ns NullString) Value() (driver.Value, error) {
|
||||||
if !ns.Valid {
|
if !ns.Valid {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
@ -88,8 +88,8 @@ func (n *NullInt64) Scan(value interface{}) error {
|
||||||
return convertAssign(&n.Int64, value)
|
return convertAssign(&n.Int64, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SubsetValue implements the driver SubsetValuer interface.
|
// Value implements the driver Valuer interface.
|
||||||
func (n NullInt64) SubsetValue() (interface{}, error) {
|
func (n NullInt64) Value() (driver.Value, error) {
|
||||||
if !n.Valid {
|
if !n.Valid {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
@ -114,8 +114,8 @@ func (n *NullFloat64) Scan(value interface{}) error {
|
||||||
return convertAssign(&n.Float64, value)
|
return convertAssign(&n.Float64, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SubsetValue implements the driver SubsetValuer interface.
|
// Value implements the driver Valuer interface.
|
||||||
func (n NullFloat64) SubsetValue() (interface{}, error) {
|
func (n NullFloat64) Value() (driver.Value, error) {
|
||||||
if !n.Valid {
|
if !n.Valid {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
@ -140,8 +140,8 @@ func (n *NullBool) Scan(value interface{}) error {
|
||||||
return convertAssign(&n.Bool, value)
|
return convertAssign(&n.Bool, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SubsetValue implements the driver SubsetValuer interface.
|
// Value implements the driver Valuer interface.
|
||||||
func (n NullBool) SubsetValue() (interface{}, error) {
|
func (n NullBool) Value() (driver.Value, error) {
|
||||||
if !n.Valid {
|
if !n.Valid {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
@ -523,8 +523,13 @@ func (tx *Tx) Exec(query string, args ...interface{}) (Result, error) {
|
||||||
}
|
}
|
||||||
defer tx.releaseConn()
|
defer tx.releaseConn()
|
||||||
|
|
||||||
|
sargs, err := subsetTypeArgs(args)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
if execer, ok := ci.(driver.Execer); ok {
|
if execer, ok := ci.(driver.Execer); ok {
|
||||||
resi, err := execer.Exec(query, args)
|
resi, err := execer.Exec(query, sargs)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return result{resi}, nil
|
return result{resi}, nil
|
||||||
}
|
}
|
||||||
|
|
@ -539,11 +544,6 @@ func (tx *Tx) Exec(query string, args ...interface{}) (Result, error) {
|
||||||
}
|
}
|
||||||
defer sti.Close()
|
defer sti.Close()
|
||||||
|
|
||||||
sargs, err := subsetTypeArgs(args)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
resi, err := sti.Exec(sargs)
|
resi, err := sti.Exec(sargs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -618,19 +618,21 @@ func (s *Stmt) Exec(args ...interface{}) (Result, error) {
|
||||||
return nil, fmt.Errorf("sql: expected %d arguments, got %d", want, len(args))
|
return nil, fmt.Errorf("sql: expected %d arguments, got %d", want, len(args))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sargs := make([]driver.Value, len(args))
|
||||||
|
|
||||||
// Convert args to subset types.
|
// Convert args to subset types.
|
||||||
if cc, ok := si.(driver.ColumnConverter); ok {
|
if cc, ok := si.(driver.ColumnConverter); ok {
|
||||||
for n, arg := range args {
|
for n, arg := range args {
|
||||||
// First, see if the value itself knows how to convert
|
// First, see if the value itself knows how to convert
|
||||||
// itself to a driver type. For example, a NullString
|
// itself to a driver type. For example, a NullString
|
||||||
// struct changing into a string or nil.
|
// struct changing into a string or nil.
|
||||||
if svi, ok := arg.(driver.SubsetValuer); ok {
|
if svi, ok := arg.(driver.Valuer); ok {
|
||||||
sv, err := svi.SubsetValue()
|
sv, err := svi.Value()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("sql: argument index %d from SubsetValue: %v", n, err)
|
return nil, fmt.Errorf("sql: argument index %d from Value: %v", n, err)
|
||||||
}
|
}
|
||||||
if !driver.IsParameterSubsetType(sv) {
|
if !driver.IsValue(sv) {
|
||||||
return nil, fmt.Errorf("sql: argument index %d: non-subset type %T returned from SubsetValue", n, sv)
|
return nil, fmt.Errorf("sql: argument index %d: non-subset type %T returned from Value", n, sv)
|
||||||
}
|
}
|
||||||
arg = sv
|
arg = sv
|
||||||
}
|
}
|
||||||
|
|
@ -642,25 +644,25 @@ func (s *Stmt) Exec(args ...interface{}) (Result, error) {
|
||||||
// truncated), or that a nil can't go into a NOT NULL
|
// truncated), or that a nil can't go into a NOT NULL
|
||||||
// column before going across the network to get the
|
// column before going across the network to get the
|
||||||
// same error.
|
// same error.
|
||||||
args[n], err = cc.ColumnConverter(n).ConvertValue(arg)
|
sargs[n], err = cc.ColumnConverter(n).ConvertValue(arg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("sql: converting Exec argument #%d's type: %v", n, err)
|
return nil, fmt.Errorf("sql: converting Exec argument #%d's type: %v", n, err)
|
||||||
}
|
}
|
||||||
if !driver.IsParameterSubsetType(args[n]) {
|
if !driver.IsValue(sargs[n]) {
|
||||||
return nil, fmt.Errorf("sql: driver ColumnConverter error converted %T to unsupported type %T",
|
return nil, fmt.Errorf("sql: driver ColumnConverter error converted %T to unsupported type %T",
|
||||||
arg, args[n])
|
arg, sargs[n])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for n, arg := range args {
|
for n, arg := range args {
|
||||||
args[n], err = driver.DefaultParameterConverter.ConvertValue(arg)
|
sargs[n], err = driver.DefaultParameterConverter.ConvertValue(arg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("sql: converting Exec argument #%d's type: %v", n, err)
|
return nil, fmt.Errorf("sql: converting Exec argument #%d's type: %v", n, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
resi, err := si.Exec(args)
|
resi, err := si.Exec(sargs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
@ -829,7 +831,7 @@ type Rows struct {
|
||||||
rowsi driver.Rows
|
rowsi driver.Rows
|
||||||
|
|
||||||
closed bool
|
closed bool
|
||||||
lastcols []interface{}
|
lastcols []driver.Value
|
||||||
lasterr error
|
lasterr error
|
||||||
closeStmt *Stmt // if non-nil, statement to Close on close
|
closeStmt *Stmt // if non-nil, statement to Close on close
|
||||||
}
|
}
|
||||||
|
|
@ -846,7 +848,7 @@ func (rs *Rows) Next() bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if rs.lastcols == nil {
|
if rs.lastcols == nil {
|
||||||
rs.lastcols = make([]interface{}, len(rs.rowsi.Columns()))
|
rs.lastcols = make([]driver.Value, len(rs.rowsi.Columns()))
|
||||||
}
|
}
|
||||||
rs.lasterr = rs.rowsi.Next(rs.lastcols)
|
rs.lasterr = rs.rowsi.Next(rs.lastcols)
|
||||||
if rs.lasterr == io.EOF {
|
if rs.lasterr == io.EOF {
|
||||||
|
|
|
||||||
|
|
@ -31,8 +31,9 @@ type Data struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// New returns a new Data object initialized from the given parameters.
|
// New returns a new Data object initialized from the given parameters.
|
||||||
// Clients should typically use [TODO(rsc): method to be named later] instead of calling
|
// Rather than calling this function directly, clients should typically use
|
||||||
// New directly.
|
// the DWARF method of the File type of the appropriate package debug/elf,
|
||||||
|
// debug/macho, or debug/pe.
|
||||||
//
|
//
|
||||||
// The []byte arguments are the data from the corresponding debug section
|
// The []byte arguments are the data from the corresponding debug section
|
||||||
// in the object file; for example, for an ELF object, abbrev is the contents of
|
// in the object file; for example, for an ELF object, abbrev is the contents of
|
||||||
|
|
|
||||||
|
|
@ -28,8 +28,13 @@ typedef struct my_struct {
|
||||||
volatile int vi;
|
volatile int vi;
|
||||||
char x : 1;
|
char x : 1;
|
||||||
int y : 4;
|
int y : 4;
|
||||||
|
int z[0];
|
||||||
long long array[40];
|
long long array[40];
|
||||||
|
int zz[0];
|
||||||
} t_my_struct;
|
} t_my_struct;
|
||||||
|
typedef struct my_struct1 {
|
||||||
|
int zz [1];
|
||||||
|
} t_my_struct1;
|
||||||
typedef union my_union {
|
typedef union my_union {
|
||||||
volatile int vi;
|
volatile int vi;
|
||||||
char x : 1;
|
char x : 1;
|
||||||
|
|
@ -65,7 +70,8 @@ t_func_void_of_char *a9;
|
||||||
t_func_void_of_void *a10;
|
t_func_void_of_void *a10;
|
||||||
t_func_void_of_ptr_char_dots *a11;
|
t_func_void_of_ptr_char_dots *a11;
|
||||||
t_my_struct *a12;
|
t_my_struct *a12;
|
||||||
t_my_union *a12a;
|
t_my_struct1 *a12a;
|
||||||
|
t_my_union *a12b;
|
||||||
t_my_enum *a13;
|
t_my_enum *a13;
|
||||||
t_my_list *a14;
|
t_my_list *a14;
|
||||||
t_my_tree *a15;
|
t_my_tree *a15;
|
||||||
|
|
|
||||||
Binary file not shown.
Binary file not shown.
|
|
@ -426,6 +426,8 @@ func (d *Data) Type(off Offset) (Type, error) {
|
||||||
t.StructName, _ = e.Val(AttrName).(string)
|
t.StructName, _ = e.Val(AttrName).(string)
|
||||||
t.Incomplete = e.Val(AttrDeclaration) != nil
|
t.Incomplete = e.Val(AttrDeclaration) != nil
|
||||||
t.Field = make([]*StructField, 0, 8)
|
t.Field = make([]*StructField, 0, 8)
|
||||||
|
var lastFieldType Type
|
||||||
|
var lastFieldBitOffset int64
|
||||||
for kid := next(); kid != nil; kid = next() {
|
for kid := next(); kid != nil; kid = next() {
|
||||||
if kid.Tag == TagMember {
|
if kid.Tag == TagMember {
|
||||||
f := new(StructField)
|
f := new(StructField)
|
||||||
|
|
@ -444,11 +446,32 @@ func (d *Data) Type(off Offset) (Type, error) {
|
||||||
goto Error
|
goto Error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
haveBitOffset := false
|
||||||
f.Name, _ = kid.Val(AttrName).(string)
|
f.Name, _ = kid.Val(AttrName).(string)
|
||||||
f.ByteSize, _ = kid.Val(AttrByteSize).(int64)
|
f.ByteSize, _ = kid.Val(AttrByteSize).(int64)
|
||||||
f.BitOffset, _ = kid.Val(AttrBitOffset).(int64)
|
f.BitOffset, haveBitOffset = kid.Val(AttrBitOffset).(int64)
|
||||||
f.BitSize, _ = kid.Val(AttrBitSize).(int64)
|
f.BitSize, _ = kid.Val(AttrBitSize).(int64)
|
||||||
t.Field = append(t.Field, f)
|
t.Field = append(t.Field, f)
|
||||||
|
|
||||||
|
bito := f.BitOffset
|
||||||
|
if !haveBitOffset {
|
||||||
|
bito = f.ByteOffset * 8
|
||||||
|
}
|
||||||
|
if bito == lastFieldBitOffset && t.Kind != "union" {
|
||||||
|
// Last field was zero width. Fix array length.
|
||||||
|
// (DWARF writes out 0-length arrays as if they were 1-length arrays.)
|
||||||
|
zeroArray(lastFieldType)
|
||||||
|
}
|
||||||
|
lastFieldType = f.Type
|
||||||
|
lastFieldBitOffset = bito
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if t.Kind != "union" {
|
||||||
|
b, ok := e.Val(AttrByteSize).(int64)
|
||||||
|
if ok && b*8 == lastFieldBitOffset {
|
||||||
|
// Final field must be zero width. Fix array length.
|
||||||
|
zeroArray(lastFieldType)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -579,3 +602,14 @@ Error:
|
||||||
delete(d.typeCache, off)
|
delete(d.typeCache, off)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func zeroArray(t Type) {
|
||||||
|
for {
|
||||||
|
at, ok := t.(*ArrayType)
|
||||||
|
if !ok {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
at.Count = 0
|
||||||
|
t = at.Type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,13 +25,22 @@ var typedefTests = map[string]string{
|
||||||
"t_func_void_of_char": "func(char) void",
|
"t_func_void_of_char": "func(char) void",
|
||||||
"t_func_void_of_void": "func() void",
|
"t_func_void_of_void": "func() void",
|
||||||
"t_func_void_of_ptr_char_dots": "func(*char, ...) void",
|
"t_func_void_of_ptr_char_dots": "func(*char, ...) void",
|
||||||
"t_my_struct": "struct my_struct {vi volatile int@0; x char@4 : 1@7; y int@4 : 4@27; array [40]long long int@8}",
|
"t_my_struct": "struct my_struct {vi volatile int@0; x char@4 : 1@7; y int@4 : 4@27; z [0]int@8; array [40]long long int@8; zz [0]int@328}",
|
||||||
|
"t_my_struct1": "struct my_struct1 {zz [1]int@0}",
|
||||||
"t_my_union": "union my_union {vi volatile int@0; x char@0 : 1@7; y int@0 : 4@28; array [40]long long int@0}",
|
"t_my_union": "union my_union {vi volatile int@0; x char@0 : 1@7; y int@0 : 4@28; array [40]long long int@0}",
|
||||||
"t_my_enum": "enum my_enum {e1=1; e2=2; e3=-5; e4=1000000000000000}",
|
"t_my_enum": "enum my_enum {e1=1; e2=2; e3=-5; e4=1000000000000000}",
|
||||||
"t_my_list": "struct list {val short int@0; next *t_my_list@8}",
|
"t_my_list": "struct list {val short int@0; next *t_my_list@8}",
|
||||||
"t_my_tree": "struct tree {left *struct tree@0; right *struct tree@8; val long long unsigned int@16}",
|
"t_my_tree": "struct tree {left *struct tree@0; right *struct tree@8; val long long unsigned int@16}",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// As Apple converts gcc to a clang-based front end
|
||||||
|
// they keep breaking the DWARF output. This map lists the
|
||||||
|
// conversion from real answer to Apple answer.
|
||||||
|
var machoBug = map[string]string{
|
||||||
|
"func(*char, ...) void": "func(*char) void",
|
||||||
|
"enum my_enum {e1=1; e2=2; e3=-5; e4=1000000000000000}": "enum my_enum {e1=1; e2=2; e3=-5; e4=-1530494976}",
|
||||||
|
}
|
||||||
|
|
||||||
func elfData(t *testing.T, name string) *Data {
|
func elfData(t *testing.T, name string) *Data {
|
||||||
f, err := elf.Open(name)
|
f, err := elf.Open(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -58,13 +67,13 @@ func machoData(t *testing.T, name string) *Data {
|
||||||
return d
|
return d
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTypedefsELF(t *testing.T) { testTypedefs(t, elfData(t, "testdata/typedef.elf")) }
|
func TestTypedefsELF(t *testing.T) { testTypedefs(t, elfData(t, "testdata/typedef.elf"), "elf") }
|
||||||
|
|
||||||
func TestTypedefsMachO(t *testing.T) {
|
func TestTypedefsMachO(t *testing.T) {
|
||||||
testTypedefs(t, machoData(t, "testdata/typedef.macho"))
|
testTypedefs(t, machoData(t, "testdata/typedef.macho"), "macho")
|
||||||
}
|
}
|
||||||
|
|
||||||
func testTypedefs(t *testing.T, d *Data) {
|
func testTypedefs(t *testing.T, d *Data, kind string) {
|
||||||
r := d.Reader()
|
r := d.Reader()
|
||||||
seen := make(map[string]bool)
|
seen := make(map[string]bool)
|
||||||
for {
|
for {
|
||||||
|
|
@ -93,7 +102,7 @@ func testTypedefs(t *testing.T, d *Data) {
|
||||||
t.Errorf("multiple definitions for %s", t1.Name)
|
t.Errorf("multiple definitions for %s", t1.Name)
|
||||||
}
|
}
|
||||||
seen[t1.Name] = true
|
seen[t1.Name] = true
|
||||||
if typstr != want {
|
if typstr != want && (kind != "macho" || typstr != machoBug[want]) {
|
||||||
t.Errorf("%s:\n\thave %s\n\twant %s", t1.Name, typstr, want)
|
t.Errorf("%s:\n\thave %s\n\twant %s", t1.Name, typstr, want)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,15 +6,37 @@ package gosym
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"debug/elf"
|
"debug/elf"
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"os/exec"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var pclinetestBinary string
|
||||||
|
|
||||||
func dotest() bool {
|
func dotest() bool {
|
||||||
// For now, only works on ELF platforms.
|
// For now, only works on ELF platforms.
|
||||||
// TODO: convert to work with new go tool
|
if runtime.GOOS != "linux" || runtime.GOARCH != "amd64" {
|
||||||
return false && runtime.GOOS == "linux" && runtime.GOARCH == "amd64"
|
return false
|
||||||
|
}
|
||||||
|
if pclinetestBinary != "" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// This command builds pclinetest from pclinetest.asm;
|
||||||
|
// the resulting binary looks like it was built from pclinetest.s,
|
||||||
|
// but we have renamed it to keep it away from the go tool.
|
||||||
|
pclinetestBinary = os.TempDir() + "/pclinetest"
|
||||||
|
command := fmt.Sprintf("go tool 6a -o %s.6 pclinetest.asm && go tool 6l -E main -o %s %s.6",
|
||||||
|
pclinetestBinary, pclinetestBinary, pclinetestBinary)
|
||||||
|
cmd := exec.Command("sh", "-c", command)
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func getTable(t *testing.T) *Table {
|
func getTable(t *testing.T) *Table {
|
||||||
|
|
@ -149,7 +171,7 @@ func TestPCLine(t *testing.T) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
f, tab := crack("_test/pclinetest", t)
|
f, tab := crack(pclinetestBinary, t)
|
||||||
text := f.Section(".text")
|
text := f.Section(".text")
|
||||||
textdat, err := text.Data()
|
textdat, err := text.Data()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -163,10 +185,13 @@ func TestPCLine(t *testing.T) {
|
||||||
file, line, fn := tab.PCToLine(pc)
|
file, line, fn := tab.PCToLine(pc)
|
||||||
off := pc - text.Addr // TODO(rsc): should not need off; bug in 8g
|
off := pc - text.Addr // TODO(rsc): should not need off; bug in 8g
|
||||||
wantLine += int(textdat[off])
|
wantLine += int(textdat[off])
|
||||||
|
t.Logf("off is %d", off)
|
||||||
if fn == nil {
|
if fn == nil {
|
||||||
t.Errorf("failed to get line of PC %#x", pc)
|
t.Errorf("failed to get line of PC %#x", pc)
|
||||||
} else if len(file) < 12 || file[len(file)-12:] != "pclinetest.s" || line != wantLine || fn != sym {
|
} else if !strings.HasSuffix(file, "pclinetest.asm") {
|
||||||
t.Errorf("expected %s:%d (%s) at PC %#x, got %s:%d (%s)", "pclinetest.s", wantLine, sym.Name, pc, file, line, fn.Name)
|
t.Errorf("expected %s (%s) at PC %#x, got %s (%s)", "pclinetest.asm", sym.Name, pc, file, fn.Name)
|
||||||
|
} else if line != wantLine || fn != sym {
|
||||||
|
t.Errorf("expected :%d (%s) at PC %#x, got :%d (%s)", wantLine, sym.Name, pc, line, fn.Name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -464,7 +464,7 @@ func allocate(rtyp reflect.Type, p uintptr, indir int) uintptr {
|
||||||
// decodeSingle decodes a top-level value that is not a struct and stores it through p.
|
// decodeSingle decodes a top-level value that is not a struct and stores it through p.
|
||||||
// Such values are preceded by a zero, making them have the memory layout of a
|
// Such values are preceded by a zero, making them have the memory layout of a
|
||||||
// struct field (although with an illegal field number).
|
// struct field (although with an illegal field number).
|
||||||
func (dec *Decoder) decodeSingle(engine *decEngine, ut *userTypeInfo, basep uintptr) (err error) {
|
func (dec *Decoder) decodeSingle(engine *decEngine, ut *userTypeInfo, basep uintptr) {
|
||||||
state := dec.newDecoderState(&dec.buf)
|
state := dec.newDecoderState(&dec.buf)
|
||||||
state.fieldnum = singletonField
|
state.fieldnum = singletonField
|
||||||
delta := int(state.decodeUint())
|
delta := int(state.decodeUint())
|
||||||
|
|
@ -473,7 +473,7 @@ func (dec *Decoder) decodeSingle(engine *decEngine, ut *userTypeInfo, basep uint
|
||||||
}
|
}
|
||||||
instr := &engine.instr[singletonField]
|
instr := &engine.instr[singletonField]
|
||||||
if instr.indir != ut.indir {
|
if instr.indir != ut.indir {
|
||||||
return errors.New("gob: internal error: inconsistent indirection")
|
errorf("internal error: inconsistent indirection instr %d ut %d", instr.indir, ut.indir)
|
||||||
}
|
}
|
||||||
ptr := unsafe.Pointer(basep) // offset will be zero
|
ptr := unsafe.Pointer(basep) // offset will be zero
|
||||||
if instr.indir > 1 {
|
if instr.indir > 1 {
|
||||||
|
|
@ -481,10 +481,9 @@ func (dec *Decoder) decodeSingle(engine *decEngine, ut *userTypeInfo, basep uint
|
||||||
}
|
}
|
||||||
instr.op(instr, state, ptr)
|
instr.op(instr, state, ptr)
|
||||||
dec.freeDecoderState(state)
|
dec.freeDecoderState(state)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// decodeSingle decodes a top-level struct and stores it through p.
|
// decodeStruct decodes a top-level struct and stores it through p.
|
||||||
// Indir is for the value, not the type. At the time of the call it may
|
// Indir is for the value, not the type. At the time of the call it may
|
||||||
// differ from ut.indir, which was computed when the engine was built.
|
// differ from ut.indir, which was computed when the engine was built.
|
||||||
// This state cannot arise for decodeSingle, which is called directly
|
// This state cannot arise for decodeSingle, which is called directly
|
||||||
|
|
@ -839,11 +838,10 @@ func (dec *Decoder) decOpFor(wireId typeId, rt reflect.Type, name string, inProg
|
||||||
}
|
}
|
||||||
|
|
||||||
case reflect.Map:
|
case reflect.Map:
|
||||||
name = "element of " + name
|
|
||||||
keyId := dec.wireType[wireId].MapT.Key
|
keyId := dec.wireType[wireId].MapT.Key
|
||||||
elemId := dec.wireType[wireId].MapT.Elem
|
elemId := dec.wireType[wireId].MapT.Elem
|
||||||
keyOp, keyIndir := dec.decOpFor(keyId, t.Key(), name, inProgress)
|
keyOp, keyIndir := dec.decOpFor(keyId, t.Key(), "key of "+name, inProgress)
|
||||||
elemOp, elemIndir := dec.decOpFor(elemId, t.Elem(), name, inProgress)
|
elemOp, elemIndir := dec.decOpFor(elemId, t.Elem(), "element of "+name, inProgress)
|
||||||
ovfl := overflow(name)
|
ovfl := overflow(name)
|
||||||
op = func(i *decInstr, state *decoderState, p unsafe.Pointer) {
|
op = func(i *decInstr, state *decoderState, p unsafe.Pointer) {
|
||||||
up := unsafe.Pointer(p)
|
up := unsafe.Pointer(p)
|
||||||
|
|
@ -1151,7 +1149,7 @@ func (dec *Decoder) compileDec(remoteId typeId, ut *userTypeInfo) (engine *decEn
|
||||||
|
|
||||||
// getDecEnginePtr returns the engine for the specified type.
|
// getDecEnginePtr returns the engine for the specified type.
|
||||||
func (dec *Decoder) getDecEnginePtr(remoteId typeId, ut *userTypeInfo) (enginePtr **decEngine, err error) {
|
func (dec *Decoder) getDecEnginePtr(remoteId typeId, ut *userTypeInfo) (enginePtr **decEngine, err error) {
|
||||||
rt := ut.base
|
rt := ut.user
|
||||||
decoderMap, ok := dec.decoderCache[rt]
|
decoderMap, ok := dec.decoderCache[rt]
|
||||||
if !ok {
|
if !ok {
|
||||||
decoderMap = make(map[typeId]**decEngine)
|
decoderMap = make(map[typeId]**decEngine)
|
||||||
|
|
|
||||||
|
|
@ -685,3 +685,54 @@ func TestSliceIncompatibility(t *testing.T) {
|
||||||
t.Error("expected compatibility error")
|
t.Error("expected compatibility error")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Mutually recursive slices of structs caused problems.
|
||||||
|
type Bug3 struct {
|
||||||
|
Num int
|
||||||
|
Children []*Bug3
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGobPtrSlices(t *testing.T) {
|
||||||
|
in := []*Bug3{
|
||||||
|
&Bug3{1, nil},
|
||||||
|
&Bug3{2, nil},
|
||||||
|
}
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
err := NewEncoder(b).Encode(&in)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("encode:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var out []*Bug3
|
||||||
|
err = NewDecoder(b).Decode(&out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("decode:", err)
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(in, out) {
|
||||||
|
t.Fatal("got %v; wanted %v", out, in)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getDecEnginePtr cached engine for ut.base instead of ut.user so we passed
|
||||||
|
// a *map and then tried to reuse its engine to decode the inner map.
|
||||||
|
func TestPtrToMapOfMap(t *testing.T) {
|
||||||
|
Register(make(map[string]interface{}))
|
||||||
|
subdata := make(map[string]interface{})
|
||||||
|
subdata["bar"] = "baz"
|
||||||
|
data := make(map[string]interface{})
|
||||||
|
data["foo"] = subdata
|
||||||
|
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
err := NewEncoder(b).Encode(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("encode:", err)
|
||||||
|
}
|
||||||
|
var newData map[string]interface{}
|
||||||
|
err = NewDecoder(b).Decode(&newData)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("decode:", err)
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(data, newData) {
|
||||||
|
t.Fatalf("expected %v got %v", data, newData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -152,6 +152,10 @@ var idToType = make(map[typeId]gobType)
|
||||||
var builtinIdToType map[typeId]gobType // set in init() after builtins are established
|
var builtinIdToType map[typeId]gobType // set in init() after builtins are established
|
||||||
|
|
||||||
func setTypeId(typ gobType) {
|
func setTypeId(typ gobType) {
|
||||||
|
// When building recursive types, someone may get there before us.
|
||||||
|
if typ.id() != 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
nextId++
|
nextId++
|
||||||
typ.setId(nextId)
|
typ.setId(nextId)
|
||||||
idToType[nextId] = typ
|
idToType[nextId] = typ
|
||||||
|
|
@ -346,6 +350,11 @@ func newSliceType(name string) *sliceType {
|
||||||
func (s *sliceType) init(elem gobType) {
|
func (s *sliceType) init(elem gobType) {
|
||||||
// Set our type id before evaluating the element's, in case it's our own.
|
// Set our type id before evaluating the element's, in case it's our own.
|
||||||
setTypeId(s)
|
setTypeId(s)
|
||||||
|
// See the comments about ids in newTypeObject. Only slices and
|
||||||
|
// structs have mutual recursion.
|
||||||
|
if elem.id() == 0 {
|
||||||
|
setTypeId(elem)
|
||||||
|
}
|
||||||
s.Elem = elem.id()
|
s.Elem = elem.id()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -503,6 +512,13 @@ func newTypeObject(name string, ut *userTypeInfo, rt reflect.Type) (gobType, err
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
// Some mutually recursive types can cause us to be here while
|
||||||
|
// still defining the element. Fix the element type id here.
|
||||||
|
// We could do this more neatly by setting the id at the start of
|
||||||
|
// building every type, but that would break binary compatibility.
|
||||||
|
if gt.id() == 0 {
|
||||||
|
setTypeId(gt)
|
||||||
|
}
|
||||||
st.Field = append(st.Field, &fieldType{f.Name, gt.id()})
|
st.Field = append(st.Field, &fieldType{f.Name, gt.id()})
|
||||||
}
|
}
|
||||||
return st, nil
|
return st, nil
|
||||||
|
|
|
||||||
|
|
@ -496,6 +496,12 @@ func (d *decodeState) object(v reflect.Value) {
|
||||||
// Pretend this field doesn't exist.
|
// Pretend this field doesn't exist.
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if sf.Anonymous {
|
||||||
|
// Pretend this field doesn't exist,
|
||||||
|
// so that we can do a good job with
|
||||||
|
// these in a later version.
|
||||||
|
continue
|
||||||
|
}
|
||||||
// First, tag match
|
// First, tag match
|
||||||
tagName, _ := parseTag(tag)
|
tagName, _ := parseTag(tag)
|
||||||
if tagName == key {
|
if tagName == key {
|
||||||
|
|
@ -963,3 +969,11 @@ func unquoteBytes(s []byte) (t []byte, ok bool) {
|
||||||
}
|
}
|
||||||
return b[0:w], true
|
return b[0:w], true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The following is issue 3069.
|
||||||
|
|
||||||
|
// BUG(rsc): This package ignores anonymous (embedded) struct fields
|
||||||
|
// during encoding and decoding. A future version may assign meaning
|
||||||
|
// to them. To force an anonymous field to be ignored in all future
|
||||||
|
// versions of this package, use an explicit `json:"-"` tag in the struct
|
||||||
|
// definition.
|
||||||
|
|
|
||||||
|
|
@ -619,3 +619,32 @@ func TestRefUnmarshal(t *testing.T) {
|
||||||
t.Errorf("got %+v, want %+v", got, want)
|
t.Errorf("got %+v, want %+v", got, want)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test that anonymous fields are ignored.
|
||||||
|
// We may assign meaning to them later.
|
||||||
|
func TestAnonymous(t *testing.T) {
|
||||||
|
type S struct {
|
||||||
|
T
|
||||||
|
N int
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := Marshal(new(S))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Marshal: %v", err)
|
||||||
|
}
|
||||||
|
want := `{"N":0}`
|
||||||
|
if string(data) != want {
|
||||||
|
t.Fatalf("Marshal = %#q, want %#q", string(data), want)
|
||||||
|
}
|
||||||
|
|
||||||
|
var s S
|
||||||
|
if err := Unmarshal([]byte(`{"T": 1, "T": {"Y": 1}, "N": 2}`), &s); err != nil {
|
||||||
|
t.Fatalf("Unmarshal: %v", err)
|
||||||
|
}
|
||||||
|
if s.N != 2 {
|
||||||
|
t.Fatal("Unmarshal: did not set N")
|
||||||
|
}
|
||||||
|
if s.T.Y != 0 {
|
||||||
|
t.Fatal("Unmarshal: did set T.Y")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -538,6 +538,11 @@ func encodeFields(t reflect.Type) []encodeField {
|
||||||
if f.PkgPath != "" {
|
if f.PkgPath != "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if f.Anonymous {
|
||||||
|
// We want to do a better job with these later,
|
||||||
|
// so for now pretend they don't exist.
|
||||||
|
continue
|
||||||
|
}
|
||||||
var ef encodeField
|
var ef encodeField
|
||||||
ef.i = i
|
ef.i = i
|
||||||
ef.tag = f.Name
|
ef.tag = f.Name
|
||||||
|
|
|
||||||
|
|
@ -57,35 +57,14 @@ const (
|
||||||
// if the field value is empty. The empty values are false, 0, any
|
// if the field value is empty. The empty values are false, 0, any
|
||||||
// nil pointer or interface value, and any array, slice, map, or
|
// nil pointer or interface value, and any array, slice, map, or
|
||||||
// string of length zero.
|
// string of length zero.
|
||||||
|
// - a non-pointer anonymous struct field is handled as if the
|
||||||
|
// fields of its value were part of the outer struct.
|
||||||
//
|
//
|
||||||
// If a field uses a tag "a>b>c", then the element c will be nested inside
|
// If a field uses a tag "a>b>c", then the element c will be nested inside
|
||||||
// parent elements a and b. Fields that appear next to each other that name
|
// parent elements a and b. Fields that appear next to each other that name
|
||||||
// the same parent will be enclosed in one XML element. For example:
|
// the same parent will be enclosed in one XML element.
|
||||||
//
|
//
|
||||||
// type Result struct {
|
// See MarshalIndent for an example.
|
||||||
// XMLName xml.Name `xml:"result"`
|
|
||||||
// Id int `xml:"id,attr"`
|
|
||||||
// FirstName string `xml:"person>name>first"`
|
|
||||||
// LastName string `xml:"person>name>last"`
|
|
||||||
// Age int `xml:"person>age"`
|
|
||||||
// Height float `xml:"person>height,omitempty"`
|
|
||||||
// Married bool `xml:"person>married"`
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// xml.Marshal(&Result{Id: 13, FirstName: "John", LastName: "Doe", Age: 42})
|
|
||||||
//
|
|
||||||
// would be marshalled as:
|
|
||||||
//
|
|
||||||
// <result>
|
|
||||||
// <person id="13">
|
|
||||||
// <name>
|
|
||||||
// <first>John</first>
|
|
||||||
// <last>Doe</last>
|
|
||||||
// </name>
|
|
||||||
// <age>42</age>
|
|
||||||
// <married>false</married>
|
|
||||||
// </person>
|
|
||||||
// </result>
|
|
||||||
//
|
//
|
||||||
// Marshal will return an error if asked to marshal a channel, function, or map.
|
// Marshal will return an error if asked to marshal a channel, function, or map.
|
||||||
func Marshal(v interface{}) ([]byte, error) {
|
func Marshal(v interface{}) ([]byte, error) {
|
||||||
|
|
@ -96,6 +75,22 @@ func Marshal(v interface{}) ([]byte, error) {
|
||||||
return b.Bytes(), nil
|
return b.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MarshalIndent works like Marshal, but each XML element begins on a new
|
||||||
|
// indented line that starts with prefix and is followed by one or more
|
||||||
|
// copies of indent according to the nesting depth.
|
||||||
|
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||||
|
var b bytes.Buffer
|
||||||
|
enc := NewEncoder(&b)
|
||||||
|
enc.prefix = prefix
|
||||||
|
enc.indent = indent
|
||||||
|
err := enc.marshalValue(reflect.ValueOf(v), nil)
|
||||||
|
enc.Flush()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return b.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
// An Encoder writes XML data to an output stream.
|
// An Encoder writes XML data to an output stream.
|
||||||
type Encoder struct {
|
type Encoder struct {
|
||||||
printer
|
printer
|
||||||
|
|
@ -103,7 +98,7 @@ type Encoder struct {
|
||||||
|
|
||||||
// NewEncoder returns a new encoder that writes to w.
|
// NewEncoder returns a new encoder that writes to w.
|
||||||
func NewEncoder(w io.Writer) *Encoder {
|
func NewEncoder(w io.Writer) *Encoder {
|
||||||
return &Encoder{printer{bufio.NewWriter(w)}}
|
return &Encoder{printer{Writer: bufio.NewWriter(w)}}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Encode writes the XML encoding of v to the stream.
|
// Encode writes the XML encoding of v to the stream.
|
||||||
|
|
@ -118,8 +113,14 @@ func (enc *Encoder) Encode(v interface{}) error {
|
||||||
|
|
||||||
type printer struct {
|
type printer struct {
|
||||||
*bufio.Writer
|
*bufio.Writer
|
||||||
|
indent string
|
||||||
|
prefix string
|
||||||
|
depth int
|
||||||
|
indentedIn bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// marshalValue writes one or more XML elements representing val.
|
||||||
|
// If val was obtained from a struct field, finfo must have its details.
|
||||||
func (p *printer) marshalValue(val reflect.Value, finfo *fieldInfo) error {
|
func (p *printer) marshalValue(val reflect.Value, finfo *fieldInfo) error {
|
||||||
if !val.IsValid() {
|
if !val.IsValid() {
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -177,6 +178,7 @@ func (p *printer) marshalValue(val reflect.Value, finfo *fieldInfo) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p.writeIndent(1)
|
||||||
p.WriteByte('<')
|
p.WriteByte('<')
|
||||||
p.WriteString(name)
|
p.WriteString(name)
|
||||||
|
|
||||||
|
|
@ -216,6 +218,7 @@ func (p *printer) marshalValue(val reflect.Value, finfo *fieldInfo) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p.writeIndent(-1)
|
||||||
p.WriteByte('<')
|
p.WriteByte('<')
|
||||||
p.WriteByte('/')
|
p.WriteByte('/')
|
||||||
p.WriteString(name)
|
p.WriteString(name)
|
||||||
|
|
@ -294,6 +297,7 @@ func (p *printer) marshalStruct(tinfo *typeInfo, val reflect.Value) error {
|
||||||
if vf.Len() == 0 {
|
if vf.Len() == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
p.writeIndent(0)
|
||||||
p.WriteString("<!--")
|
p.WriteString("<!--")
|
||||||
dashDash := false
|
dashDash := false
|
||||||
dashLast := false
|
dashLast := false
|
||||||
|
|
@ -352,6 +356,33 @@ func (p *printer) marshalStruct(tinfo *typeInfo, val reflect.Value) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *printer) writeIndent(depthDelta int) {
|
||||||
|
if len(p.prefix) == 0 && len(p.indent) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if depthDelta < 0 {
|
||||||
|
p.depth--
|
||||||
|
if p.indentedIn {
|
||||||
|
p.indentedIn = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.indentedIn = false
|
||||||
|
}
|
||||||
|
p.WriteByte('\n')
|
||||||
|
if len(p.prefix) > 0 {
|
||||||
|
p.WriteString(p.prefix)
|
||||||
|
}
|
||||||
|
if len(p.indent) > 0 {
|
||||||
|
for i := 0; i < p.depth; i++ {
|
||||||
|
p.WriteString(p.indent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if depthDelta > 0 {
|
||||||
|
p.depth++
|
||||||
|
p.indentedIn = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type parentStack struct {
|
type parentStack struct {
|
||||||
*printer
|
*printer
|
||||||
stack []string
|
stack []string
|
||||||
|
|
@ -367,20 +398,20 @@ func (s *parentStack) trim(parents []string) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := len(s.stack) - 1; i >= split; i-- {
|
for i := len(s.stack) - 1; i >= split; i-- {
|
||||||
|
s.writeIndent(-1)
|
||||||
s.WriteString("</")
|
s.WriteString("</")
|
||||||
s.WriteString(s.stack[i])
|
s.WriteString(s.stack[i])
|
||||||
s.WriteByte('>')
|
s.WriteByte('>')
|
||||||
}
|
}
|
||||||
|
|
||||||
s.stack = parents[:split]
|
s.stack = parents[:split]
|
||||||
}
|
}
|
||||||
|
|
||||||
// push adds parent elements to the stack and writes open tags.
|
// push adds parent elements to the stack and writes open tags.
|
||||||
func (s *parentStack) push(parents []string) {
|
func (s *parentStack) push(parents []string) {
|
||||||
for i := 0; i < len(parents); i++ {
|
for i := 0; i < len(parents); i++ {
|
||||||
s.WriteString("<")
|
s.writeIndent(1)
|
||||||
|
s.WriteByte('<')
|
||||||
s.WriteString(parents[i])
|
s.WriteString(parents[i])
|
||||||
s.WriteByte('>')
|
s.WriteByte('>')
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,58 +25,6 @@ import (
|
||||||
// slice, or string. Well-formed data that does not fit into v is
|
// slice, or string. Well-formed data that does not fit into v is
|
||||||
// discarded.
|
// discarded.
|
||||||
//
|
//
|
||||||
// For example, given these definitions:
|
|
||||||
//
|
|
||||||
// type Email struct {
|
|
||||||
// Where string `xml:",attr"`
|
|
||||||
// Addr string
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// type Result struct {
|
|
||||||
// XMLName xml.Name `xml:"result"`
|
|
||||||
// Name string
|
|
||||||
// Phone string
|
|
||||||
// Email []Email
|
|
||||||
// Groups []string `xml:"group>value"`
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// result := Result{Name: "name", Phone: "phone", Email: nil}
|
|
||||||
//
|
|
||||||
// unmarshalling the XML input
|
|
||||||
//
|
|
||||||
// <result>
|
|
||||||
// <email where="home">
|
|
||||||
// <addr>gre@example.com</addr>
|
|
||||||
// </email>
|
|
||||||
// <email where='work'>
|
|
||||||
// <addr>gre@work.com</addr>
|
|
||||||
// </email>
|
|
||||||
// <name>Grace R. Emlin</name>
|
|
||||||
// <group>
|
|
||||||
// <value>Friends</value>
|
|
||||||
// <value>Squash</value>
|
|
||||||
// </group>
|
|
||||||
// <address>123 Main Street</address>
|
|
||||||
// </result>
|
|
||||||
//
|
|
||||||
// via Unmarshal(data, &result) is equivalent to assigning
|
|
||||||
//
|
|
||||||
// r = Result{
|
|
||||||
// xml.Name{Local: "result"},
|
|
||||||
// "Grace R. Emlin", // name
|
|
||||||
// "phone", // no phone given
|
|
||||||
// []Email{
|
|
||||||
// Email{"home", "gre@example.com"},
|
|
||||||
// Email{"work", "gre@work.com"},
|
|
||||||
// },
|
|
||||||
// []string{"Friends", "Squash"},
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Note that the field r.Phone has not been modified and
|
|
||||||
// that the XML <address> element was discarded. Also, the field
|
|
||||||
// Groups was assigned considering the element path provided in the
|
|
||||||
// field tag.
|
|
||||||
//
|
|
||||||
// Because Unmarshal uses the reflect package, it can only assign
|
// Because Unmarshal uses the reflect package, it can only assign
|
||||||
// to exported (upper case) fields. Unmarshal uses a case-sensitive
|
// to exported (upper case) fields. Unmarshal uses a case-sensitive
|
||||||
// comparison to match XML element names to tag values and struct
|
// comparison to match XML element names to tag values and struct
|
||||||
|
|
@ -133,6 +81,9 @@ import (
|
||||||
// of the above rules and the struct has a field with tag ",any",
|
// of the above rules and the struct has a field with tag ",any",
|
||||||
// unmarshal maps the sub-element to that struct field.
|
// unmarshal maps the sub-element to that struct field.
|
||||||
//
|
//
|
||||||
|
// * A non-pointer anonymous struct field is handled as if the
|
||||||
|
// fields of its value were part of the outer struct.
|
||||||
|
//
|
||||||
// * A struct field with tag "-" is never unmarshalled into.
|
// * A struct field with tag "-" is never unmarshalled into.
|
||||||
//
|
//
|
||||||
// Unmarshal maps an XML element to a string or []byte by saving the
|
// Unmarshal maps an XML element to a string or []byte by saving the
|
||||||
|
|
|
||||||
|
|
@ -5,29 +5,49 @@
|
||||||
package errors_test
|
package errors_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
. "errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNewEqual(t *testing.T) {
|
func TestNewEqual(t *testing.T) {
|
||||||
// Different allocations should not be equal.
|
// Different allocations should not be equal.
|
||||||
if New("abc") == New("abc") {
|
if errors.New("abc") == errors.New("abc") {
|
||||||
t.Errorf(`New("abc") == New("abc")`)
|
t.Errorf(`New("abc") == New("abc")`)
|
||||||
}
|
}
|
||||||
if New("abc") == New("xyz") {
|
if errors.New("abc") == errors.New("xyz") {
|
||||||
t.Errorf(`New("abc") == New("xyz")`)
|
t.Errorf(`New("abc") == New("xyz")`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Same allocation should be equal to itself (not crash).
|
// Same allocation should be equal to itself (not crash).
|
||||||
err := New("jkl")
|
err := errors.New("jkl")
|
||||||
if err != err {
|
if err != err {
|
||||||
t.Errorf(`err != err`)
|
t.Errorf(`err != err`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestErrorMethod(t *testing.T) {
|
func TestErrorMethod(t *testing.T) {
|
||||||
err := New("abc")
|
err := errors.New("abc")
|
||||||
if err.Error() != "abc" {
|
if err.Error() != "abc" {
|
||||||
t.Errorf(`New("abc").Error() = %q, want %q`, err.Error(), "abc")
|
t.Errorf(`New("abc").Error() = %q, want %q`, err.Error(), "abc")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ExampleNew() {
|
||||||
|
err := errors.New("emit macho dwarf: elf header corrupted")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Print(err)
|
||||||
|
}
|
||||||
|
// Output: emit macho dwarf: elf header corrupted
|
||||||
|
}
|
||||||
|
|
||||||
|
// The fmt package's Errorf function lets us use the package's formatting
|
||||||
|
// features to create descriptive error messages.
|
||||||
|
func ExampleNew_errorf() {
|
||||||
|
const name, id = "bimmler", 17
|
||||||
|
err := fmt.Errorf("user %q (id %d) not found", name, id)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Print(err)
|
||||||
|
}
|
||||||
|
// Output: user "bimmler" (id 17) not found
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@
|
||||||
package inotify
|
package inotify
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
@ -16,16 +17,19 @@ func TestInotifyEvents(t *testing.T) {
|
||||||
// Create an inotify watcher instance and initialize it
|
// Create an inotify watcher instance and initialize it
|
||||||
watcher, err := NewWatcher()
|
watcher, err := NewWatcher()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("NewWatcher() failed: %s", err)
|
t.Fatalf("NewWatcher failed: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Logf("NEEDS TO BE CONVERTED TO NEW GO TOOL") // TODO
|
dir, err := ioutil.TempDir("", "inotify")
|
||||||
return
|
if err != nil {
|
||||||
|
t.Fatalf("TempDir failed: %s", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
// Add a watch for "_test"
|
// Add a watch for "_test"
|
||||||
err = watcher.Watch("_test")
|
err = watcher.Watch(dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Watcher.Watch() failed: %s", err)
|
t.Fatalf("Watch failed: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Receive errors on the error channel on a separate goroutine
|
// Receive errors on the error channel on a separate goroutine
|
||||||
|
|
@ -35,7 +39,7 @@ func TestInotifyEvents(t *testing.T) {
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
const testFile string = "_test/TestInotifyEvents.testfile"
|
testFile := dir + "/TestInotifyEvents.testfile"
|
||||||
|
|
||||||
// Receive events on the event channel on a separate goroutine
|
// Receive events on the event channel on a separate goroutine
|
||||||
eventstream := watcher.Event
|
eventstream := watcher.Event
|
||||||
|
|
@ -58,7 +62,7 @@ func TestInotifyEvents(t *testing.T) {
|
||||||
// This should add at least one event to the inotify event queue
|
// This should add at least one event to the inotify event queue
|
||||||
_, err = os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666)
|
_, err = os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("creating test file failed: %s", err)
|
t.Fatalf("creating test file: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// We expect this event to be received almost immediately, but let's wait 1 s to be sure
|
// We expect this event to be received almost immediately, but let's wait 1 s to be sure
|
||||||
|
|
@ -95,7 +99,7 @@ func TestInotifyClose(t *testing.T) {
|
||||||
t.Fatal("double Close() test failed: second Close() call didn't return")
|
t.Fatal("double Close() test failed: second Close() call didn't return")
|
||||||
}
|
}
|
||||||
|
|
||||||
err := watcher.Watch("_test")
|
err := watcher.Watch(os.TempDir())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Fatal("expected error on Watch() after Close(), got nil")
|
t.Fatal("expected error on Watch() after Close(), got nil")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -66,6 +66,18 @@ func (rb *reorderBuffer) flush(out []byte) []byte {
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// flushCopy copies the normalized segment to buf and resets rb.
|
||||||
|
// It returns the number of bytes written to buf.
|
||||||
|
func (rb *reorderBuffer) flushCopy(buf []byte) int {
|
||||||
|
p := 0
|
||||||
|
for i := 0; i < rb.nrune; i++ {
|
||||||
|
runep := rb.rune[i]
|
||||||
|
p += copy(buf[p:], rb.byte[runep.pos:runep.pos+runep.size])
|
||||||
|
}
|
||||||
|
rb.reset()
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
// insertOrdered inserts a rune in the buffer, ordered by Canonical Combining Class.
|
// insertOrdered inserts a rune in the buffer, ordered by Canonical Combining Class.
|
||||||
// It returns false if the buffer is not large enough to hold the rune.
|
// It returns false if the buffer is not large enough to hold the rune.
|
||||||
// It is used internally by insert and insertString only.
|
// It is used internally by insert and insertString only.
|
||||||
|
|
@ -96,35 +108,44 @@ func (rb *reorderBuffer) insertOrdered(info runeInfo) bool {
|
||||||
// insert inserts the given rune in the buffer ordered by CCC.
|
// insert inserts the given rune in the buffer ordered by CCC.
|
||||||
// It returns true if the buffer was large enough to hold the decomposed rune.
|
// It returns true if the buffer was large enough to hold the decomposed rune.
|
||||||
func (rb *reorderBuffer) insert(src input, i int, info runeInfo) bool {
|
func (rb *reorderBuffer) insert(src input, i int, info runeInfo) bool {
|
||||||
if info.size == 3 {
|
if rune := src.hangul(i); rune != 0 {
|
||||||
if rune := src.hangul(i); rune != 0 {
|
return rb.decomposeHangul(rune)
|
||||||
return rb.decomposeHangul(rune)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if info.hasDecomposition() {
|
if info.hasDecomposition() {
|
||||||
dcomp := info.decomposition()
|
return rb.insertDecomposed(info.decomposition())
|
||||||
rb.tmpBytes = inputBytes(dcomp)
|
}
|
||||||
for i := 0; i < len(dcomp); {
|
return rb.insertSingle(src, i, info)
|
||||||
info = rb.f.info(&rb.tmpBytes, i)
|
}
|
||||||
pos := rb.nbyte
|
|
||||||
if !rb.insertOrdered(info) {
|
// insertDecomposed inserts an entry in to the reorderBuffer for each rune
|
||||||
return false
|
// in dcomp. dcomp must be a sequence of decomposed UTF-8-encoded runes.
|
||||||
}
|
func (rb *reorderBuffer) insertDecomposed(dcomp []byte) bool {
|
||||||
end := i + int(info.size)
|
saveNrune, saveNbyte := rb.nrune, rb.nbyte
|
||||||
copy(rb.byte[pos:], dcomp[i:end])
|
rb.tmpBytes = inputBytes(dcomp)
|
||||||
i = end
|
for i := 0; i < len(dcomp); {
|
||||||
}
|
info := rb.f.info(&rb.tmpBytes, i)
|
||||||
} else {
|
|
||||||
// insertOrder changes nbyte
|
|
||||||
pos := rb.nbyte
|
pos := rb.nbyte
|
||||||
if !rb.insertOrdered(info) {
|
if !rb.insertOrdered(info) {
|
||||||
|
rb.nrune, rb.nbyte = saveNrune, saveNbyte
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
src.copySlice(rb.byte[pos:], i, i+int(info.size))
|
i += copy(rb.byte[pos:], dcomp[i:i+int(info.size)])
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// insertSingle inserts an entry in the reorderBuffer for the rune at
|
||||||
|
// position i. info is the runeInfo for the rune at position i.
|
||||||
|
func (rb *reorderBuffer) insertSingle(src input, i int, info runeInfo) bool {
|
||||||
|
// insertOrder changes nbyte
|
||||||
|
pos := rb.nbyte
|
||||||
|
if !rb.insertOrdered(info) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
src.copySlice(rb.byte[pos:], i, i+int(info.size))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// appendRune inserts a rune at the end of the buffer. It is used for Hangul.
|
// appendRune inserts a rune at the end of the buffer. It is used for Hangul.
|
||||||
func (rb *reorderBuffer) appendRune(r rune) {
|
func (rb *reorderBuffer) appendRune(r rune) {
|
||||||
bn := rb.nbyte
|
bn := rb.nbyte
|
||||||
|
|
@ -182,8 +203,12 @@ const (
|
||||||
jamoLVTCount = 19 * 21 * 28
|
jamoLVTCount = 19 * 21 * 28
|
||||||
)
|
)
|
||||||
|
|
||||||
// Caller must verify that len(b) >= 3.
|
const hangulUTF8Size = 3
|
||||||
|
|
||||||
func isHangul(b []byte) bool {
|
func isHangul(b []byte) bool {
|
||||||
|
if len(b) < hangulUTF8Size {
|
||||||
|
return false
|
||||||
|
}
|
||||||
b0 := b[0]
|
b0 := b[0]
|
||||||
if b0 < hangulBase0 {
|
if b0 < hangulBase0 {
|
||||||
return false
|
return false
|
||||||
|
|
@ -202,8 +227,10 @@ func isHangul(b []byte) bool {
|
||||||
return b1 == hangulEnd1 && b[2] < hangulEnd2
|
return b1 == hangulEnd1 && b[2] < hangulEnd2
|
||||||
}
|
}
|
||||||
|
|
||||||
// Caller must verify that len(b) >= 3.
|
|
||||||
func isHangulString(b string) bool {
|
func isHangulString(b string) bool {
|
||||||
|
if len(b) < hangulUTF8Size {
|
||||||
|
return false
|
||||||
|
}
|
||||||
b0 := b[0]
|
b0 := b[0]
|
||||||
if b0 < hangulBase0 {
|
if b0 < hangulBase0 {
|
||||||
return false
|
return false
|
||||||
|
|
@ -234,6 +261,22 @@ func isHangulWithoutJamoT(b []byte) bool {
|
||||||
return c < jamoLVTCount && c%jamoTCount == 0
|
return c < jamoLVTCount && c%jamoTCount == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// decomposeHangul writes the decomposed Hangul to buf and returns the number
|
||||||
|
// of bytes written. len(buf) should be at least 9.
|
||||||
|
func decomposeHangul(buf []byte, r rune) int {
|
||||||
|
const JamoUTF8Len = 3
|
||||||
|
r -= hangulBase
|
||||||
|
x := r % jamoTCount
|
||||||
|
r /= jamoTCount
|
||||||
|
utf8.EncodeRune(buf, jamoLBase+r/jamoVCount)
|
||||||
|
utf8.EncodeRune(buf[JamoUTF8Len:], jamoVBase+r%jamoVCount)
|
||||||
|
if x != 0 {
|
||||||
|
utf8.EncodeRune(buf[2*JamoUTF8Len:], jamoTBase+x)
|
||||||
|
return 3 * JamoUTF8Len
|
||||||
|
}
|
||||||
|
return 2 * JamoUTF8Len
|
||||||
|
}
|
||||||
|
|
||||||
// decomposeHangul algorithmically decomposes a Hangul rune into
|
// decomposeHangul algorithmically decomposes a Hangul rune into
|
||||||
// its Jamo components.
|
// its Jamo components.
|
||||||
// See http://unicode.org/reports/tr15/#Hangul for details on decomposing Hangul.
|
// See http://unicode.org/reports/tr15/#Hangul for details on decomposing Hangul.
|
||||||
|
|
|
||||||
|
|
@ -47,14 +47,14 @@ func runTests(t *testing.T, name string, fm Form, f insertFunc, tests []TestCase
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFlush(t *testing.T) {
|
type flushFunc func(rb *reorderBuffer) []byte
|
||||||
|
|
||||||
|
func testFlush(t *testing.T, name string, fn flushFunc) {
|
||||||
rb := reorderBuffer{}
|
rb := reorderBuffer{}
|
||||||
rb.init(NFC, nil)
|
rb.init(NFC, nil)
|
||||||
out := make([]byte, 0)
|
out := fn(&rb)
|
||||||
|
|
||||||
out = rb.flush(out)
|
|
||||||
if len(out) != 0 {
|
if len(out) != 0 {
|
||||||
t.Errorf("wrote bytes on flush of empty buffer. (len(out) = %d)", len(out))
|
t.Errorf("%s: wrote bytes on flush of empty buffer. (len(out) = %d)", name, len(out))
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, r := range []rune("world!") {
|
for _, r := range []rune("world!") {
|
||||||
|
|
@ -65,16 +65,32 @@ func TestFlush(t *testing.T) {
|
||||||
out = rb.flush(out)
|
out = rb.flush(out)
|
||||||
want := "Hello world!"
|
want := "Hello world!"
|
||||||
if string(out) != want {
|
if string(out) != want {
|
||||||
t.Errorf(`output after flush was "%s"; want "%s"`, string(out), want)
|
t.Errorf(`%s: output after flush was "%s"; want "%s"`, name, string(out), want)
|
||||||
}
|
}
|
||||||
if rb.nrune != 0 {
|
if rb.nrune != 0 {
|
||||||
t.Errorf("flush: non-null size of info buffer (rb.nrune == %d)", rb.nrune)
|
t.Errorf("%s: non-null size of info buffer (rb.nrune == %d)", name, rb.nrune)
|
||||||
}
|
}
|
||||||
if rb.nbyte != 0 {
|
if rb.nbyte != 0 {
|
||||||
t.Errorf("flush: non-null size of byte buffer (rb.nbyte == %d)", rb.nbyte)
|
t.Errorf("%s: non-null size of byte buffer (rb.nbyte == %d)", name, rb.nbyte)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func flushF(rb *reorderBuffer) []byte {
|
||||||
|
out := make([]byte, 0)
|
||||||
|
return rb.flush(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
func flushCopyF(rb *reorderBuffer) []byte {
|
||||||
|
out := make([]byte, MaxSegmentSize)
|
||||||
|
n := rb.flushCopy(out)
|
||||||
|
return out[:n]
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFlush(t *testing.T) {
|
||||||
|
testFlush(t, "flush", flushF)
|
||||||
|
testFlush(t, "flushCopy", flushCopyF)
|
||||||
|
}
|
||||||
|
|
||||||
var insertTests = []TestCase{
|
var insertTests = []TestCase{
|
||||||
{[]rune{'a'}, []rune{'a'}},
|
{[]rune{'a'}, []rune{'a'}},
|
||||||
{[]rune{0x300}, []rune{0x300}},
|
{[]rune{0x300}, []rune{0x300}},
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ package norm
|
||||||
import "unicode/utf8"
|
import "unicode/utf8"
|
||||||
|
|
||||||
type input interface {
|
type input interface {
|
||||||
skipASCII(p int) int
|
skipASCII(p, max int) int
|
||||||
skipNonStarter(p int) int
|
skipNonStarter(p int) int
|
||||||
appendSlice(buf []byte, s, e int) []byte
|
appendSlice(buf []byte, s, e int) []byte
|
||||||
copySlice(buf []byte, s, e int)
|
copySlice(buf []byte, s, e int)
|
||||||
|
|
@ -18,8 +18,8 @@ type input interface {
|
||||||
|
|
||||||
type inputString string
|
type inputString string
|
||||||
|
|
||||||
func (s inputString) skipASCII(p int) int {
|
func (s inputString) skipASCII(p, max int) int {
|
||||||
for ; p < len(s) && s[p] < utf8.RuneSelf; p++ {
|
for ; p < max && s[p] < utf8.RuneSelf; p++ {
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
@ -59,8 +59,8 @@ func (s inputString) hangul(p int) rune {
|
||||||
|
|
||||||
type inputBytes []byte
|
type inputBytes []byte
|
||||||
|
|
||||||
func (s inputBytes) skipASCII(p int) int {
|
func (s inputBytes) skipASCII(p, max int) int {
|
||||||
for ; p < len(s) && s[p] < utf8.RuneSelf; p++ {
|
for ; p < max && s[p] < utf8.RuneSelf; p++ {
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,286 @@
|
||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package norm
|
||||||
|
|
||||||
|
const MaxSegmentSize = maxByteBufferSize
|
||||||
|
|
||||||
|
// An Iter iterates over a string or byte slice, while normalizing it
|
||||||
|
// to a given Form.
|
||||||
|
type Iter struct {
|
||||||
|
rb reorderBuffer
|
||||||
|
info runeInfo // first character saved from previous iteration
|
||||||
|
next iterFunc // implementation of next depends on form
|
||||||
|
|
||||||
|
p int // current position in input source
|
||||||
|
outStart int // start of current segment in output buffer
|
||||||
|
inStart int // start of current segment in input source
|
||||||
|
maxp int // position in output buffer after which not to start a new segment
|
||||||
|
maxseg int // for tracking an excess of combining characters
|
||||||
|
|
||||||
|
tccc uint8
|
||||||
|
done bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type iterFunc func(*Iter, []byte) int
|
||||||
|
|
||||||
|
// SetInput initializes i to iterate over src after normalizing it to Form f.
|
||||||
|
func (i *Iter) SetInput(f Form, src []byte) {
|
||||||
|
i.rb.init(f, src)
|
||||||
|
if i.rb.f.composing {
|
||||||
|
i.next = nextComposed
|
||||||
|
} else {
|
||||||
|
i.next = nextDecomposed
|
||||||
|
}
|
||||||
|
i.p = 0
|
||||||
|
if i.done = len(src) == 0; !i.done {
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetInputString initializes i to iterate over src after normalizing it to Form f.
|
||||||
|
func (i *Iter) SetInputString(f Form, src string) {
|
||||||
|
i.rb.initString(f, src)
|
||||||
|
if i.rb.f.composing {
|
||||||
|
i.next = nextComposed
|
||||||
|
} else {
|
||||||
|
i.next = nextDecomposed
|
||||||
|
}
|
||||||
|
i.p = 0
|
||||||
|
if i.done = len(src) == 0; !i.done {
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pos returns the byte position at which the next call to Next will commence processing.
|
||||||
|
func (i *Iter) Pos() int {
|
||||||
|
return i.p
|
||||||
|
}
|
||||||
|
|
||||||
|
// Done returns true if there is no more input to process.
|
||||||
|
func (i *Iter) Done() bool {
|
||||||
|
return i.done
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next writes f(i.input[i.Pos():n]...) to buffer buf, where n is the
|
||||||
|
// largest boundary of i.input such that the result fits in buf.
|
||||||
|
// It returns the number of bytes written to buf.
|
||||||
|
// len(buf) should be at least MaxSegmentSize.
|
||||||
|
// Done must be false before calling Next.
|
||||||
|
func (i *Iter) Next(buf []byte) int {
|
||||||
|
return i.next(i, buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Iter) initNext(outn, inStart int) {
|
||||||
|
i.outStart = 0
|
||||||
|
i.inStart = inStart
|
||||||
|
i.maxp = outn - MaxSegmentSize
|
||||||
|
i.maxseg = MaxSegmentSize
|
||||||
|
}
|
||||||
|
|
||||||
|
// setStart resets the start of the new segment to the given position.
|
||||||
|
// It returns true if there is not enough room for the new segment.
|
||||||
|
func (i *Iter) setStart(outp, inp int) bool {
|
||||||
|
if outp > i.maxp {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
i.outStart = outp
|
||||||
|
i.inStart = inp
|
||||||
|
i.maxseg = outp + MaxSegmentSize
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func min(a, b int) int {
|
||||||
|
if a < b {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// nextDecomposed is the implementation of Next for forms NFD and NFKD.
|
||||||
|
func nextDecomposed(i *Iter, out []byte) int {
|
||||||
|
var outp int
|
||||||
|
i.initNext(len(out), i.p)
|
||||||
|
doFast:
|
||||||
|
inCopyStart, outCopyStart := i.p, outp // invariant xCopyStart <= i.xStart
|
||||||
|
for {
|
||||||
|
if sz := int(i.info.size); sz <= 1 {
|
||||||
|
// ASCII or illegal byte. Either way, advance by 1.
|
||||||
|
i.p++
|
||||||
|
outp++
|
||||||
|
max := min(i.rb.nsrc, len(out)-outp+i.p)
|
||||||
|
if np := i.rb.src.skipASCII(i.p, max); np > i.p {
|
||||||
|
outp += np - i.p
|
||||||
|
i.p = np
|
||||||
|
if i.p >= i.rb.nsrc {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// ASCII may combine with consecutive runes.
|
||||||
|
if i.setStart(outp-1, i.p-1) {
|
||||||
|
i.p--
|
||||||
|
outp--
|
||||||
|
i.info.size = 1
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if d := i.info.decomposition(); d != nil {
|
||||||
|
i.rb.src.copySlice(out[outCopyStart:], inCopyStart, i.p)
|
||||||
|
p := outp + len(d)
|
||||||
|
if p > i.maxseg && i.setStart(outp, i.p) {
|
||||||
|
return outp
|
||||||
|
}
|
||||||
|
copy(out[outp:], d)
|
||||||
|
outp = p
|
||||||
|
i.p += sz
|
||||||
|
inCopyStart, outCopyStart = i.p, outp
|
||||||
|
} else if r := i.rb.src.hangul(i.p); r != 0 {
|
||||||
|
i.rb.src.copySlice(out[outCopyStart:], inCopyStart, i.p)
|
||||||
|
for {
|
||||||
|
outp += decomposeHangul(out[outp:], r)
|
||||||
|
i.p += hangulUTF8Size
|
||||||
|
if r = i.rb.src.hangul(i.p); r == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if i.setStart(outp, i.p) {
|
||||||
|
return outp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inCopyStart, outCopyStart = i.p, outp
|
||||||
|
} else {
|
||||||
|
p := outp + sz
|
||||||
|
if p > i.maxseg && i.setStart(outp, i.p) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
outp = p
|
||||||
|
i.p += sz
|
||||||
|
}
|
||||||
|
if i.p >= i.rb.nsrc {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
prevCC := i.info.tccc
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
if cc := i.info.ccc; cc == 0 {
|
||||||
|
if i.setStart(outp, i.p) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} else if cc < prevCC {
|
||||||
|
goto doNorm
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if inCopyStart != i.p {
|
||||||
|
i.rb.src.copySlice(out[outCopyStart:], inCopyStart, i.p)
|
||||||
|
}
|
||||||
|
i.done = i.p >= i.rb.nsrc
|
||||||
|
return outp
|
||||||
|
doNorm:
|
||||||
|
// Insert what we have decomposed so far in the reorderBuffer.
|
||||||
|
// As we will only reorder, there will always be enough room.
|
||||||
|
i.rb.src.copySlice(out[outCopyStart:], inCopyStart, i.p)
|
||||||
|
if !i.rb.insertDecomposed(out[i.outStart:outp]) {
|
||||||
|
// Start over to prevent decompositions from crossing segment boundaries.
|
||||||
|
// This is a rare occurance.
|
||||||
|
i.p = i.inStart
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
}
|
||||||
|
outp = i.outStart
|
||||||
|
for {
|
||||||
|
if !i.rb.insert(i.rb.src, i.p, i.info) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if i.p += int(i.info.size); i.p >= i.rb.nsrc {
|
||||||
|
outp += i.rb.flushCopy(out[outp:])
|
||||||
|
i.done = true
|
||||||
|
return outp
|
||||||
|
}
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
if i.info.ccc == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// new segment or too many combining characters: exit normalization
|
||||||
|
if outp += i.rb.flushCopy(out[outp:]); i.setStart(outp, i.p) {
|
||||||
|
return outp
|
||||||
|
}
|
||||||
|
goto doFast
|
||||||
|
}
|
||||||
|
|
||||||
|
// nextComposed is the implementation of Next for forms NFC and NFKC.
|
||||||
|
func nextComposed(i *Iter, out []byte) int {
|
||||||
|
var outp int
|
||||||
|
i.initNext(len(out), i.p)
|
||||||
|
doFast:
|
||||||
|
inCopyStart, outCopyStart := i.p, outp // invariant xCopyStart <= i.xStart
|
||||||
|
var prevCC uint8
|
||||||
|
for {
|
||||||
|
if !i.info.isYesC() {
|
||||||
|
goto doNorm
|
||||||
|
}
|
||||||
|
if cc := i.info.ccc; cc == 0 {
|
||||||
|
if i.setStart(outp, i.p) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} else if cc < prevCC {
|
||||||
|
goto doNorm
|
||||||
|
}
|
||||||
|
prevCC = i.info.tccc
|
||||||
|
sz := int(i.info.size)
|
||||||
|
if sz == 0 {
|
||||||
|
sz = 1 // illegal rune: copy byte-by-byte
|
||||||
|
}
|
||||||
|
p := outp + sz
|
||||||
|
if p > i.maxseg && i.setStart(outp, i.p) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
outp = p
|
||||||
|
i.p += sz
|
||||||
|
max := min(i.rb.nsrc, len(out)-outp+i.p)
|
||||||
|
if np := i.rb.src.skipASCII(i.p, max); np > i.p {
|
||||||
|
outp += np - i.p
|
||||||
|
i.p = np
|
||||||
|
if i.p >= i.rb.nsrc {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// ASCII may combine with consecutive runes.
|
||||||
|
if i.setStart(outp-1, i.p-1) {
|
||||||
|
i.p--
|
||||||
|
outp--
|
||||||
|
i.info = runeInfo{size: 1}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if i.p >= i.rb.nsrc {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
}
|
||||||
|
if inCopyStart != i.p {
|
||||||
|
i.rb.src.copySlice(out[outCopyStart:], inCopyStart, i.p)
|
||||||
|
}
|
||||||
|
i.done = i.p >= i.rb.nsrc
|
||||||
|
return outp
|
||||||
|
doNorm:
|
||||||
|
i.rb.src.copySlice(out[outCopyStart:], inCopyStart, i.inStart)
|
||||||
|
outp, i.p = i.outStart, i.inStart
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
for {
|
||||||
|
if !i.rb.insert(i.rb.src, i.p, i.info) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if i.p += int(i.info.size); i.p >= i.rb.nsrc {
|
||||||
|
i.rb.compose()
|
||||||
|
outp += i.rb.flushCopy(out[outp:])
|
||||||
|
i.done = true
|
||||||
|
return outp
|
||||||
|
}
|
||||||
|
i.info = i.rb.f.info(i.rb.src, i.p)
|
||||||
|
if i.info.boundaryBefore() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i.rb.compose()
|
||||||
|
if outp += i.rb.flushCopy(out[outp:]); i.setStart(outp, i.p) {
|
||||||
|
return outp
|
||||||
|
}
|
||||||
|
goto doFast
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,186 @@
|
||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package norm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
var iterBufSizes = []int{
|
||||||
|
MaxSegmentSize,
|
||||||
|
1.5 * MaxSegmentSize,
|
||||||
|
2 * MaxSegmentSize,
|
||||||
|
3 * MaxSegmentSize,
|
||||||
|
100 * MaxSegmentSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
func doIterNorm(f Form, buf []byte, s string) []byte {
|
||||||
|
acc := []byte{}
|
||||||
|
i := Iter{}
|
||||||
|
i.SetInputString(f, s)
|
||||||
|
for !i.Done() {
|
||||||
|
n := i.Next(buf)
|
||||||
|
acc = append(acc, buf[:n]...)
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
}
|
||||||
|
|
||||||
|
func runIterTests(t *testing.T, name string, f Form, tests []AppendTest, norm bool) {
|
||||||
|
for i, test := range tests {
|
||||||
|
in := test.left + test.right
|
||||||
|
gold := test.out
|
||||||
|
if norm {
|
||||||
|
gold = string(f.AppendString(nil, test.out))
|
||||||
|
}
|
||||||
|
for _, sz := range iterBufSizes {
|
||||||
|
buf := make([]byte, sz)
|
||||||
|
out := string(doIterNorm(f, buf, in))
|
||||||
|
if len(out) != len(gold) {
|
||||||
|
const msg = "%s:%d:%d: length is %d; want %d"
|
||||||
|
t.Errorf(msg, name, i, sz, len(out), len(gold))
|
||||||
|
}
|
||||||
|
if out != gold {
|
||||||
|
// Find first rune that differs and show context.
|
||||||
|
ir := []rune(out)
|
||||||
|
ig := []rune(gold)
|
||||||
|
for j := 0; j < len(ir) && j < len(ig); j++ {
|
||||||
|
if ir[j] == ig[j] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if j -= 3; j < 0 {
|
||||||
|
j = 0
|
||||||
|
}
|
||||||
|
for e := j + 7; j < e && j < len(ir) && j < len(ig); j++ {
|
||||||
|
const msg = "%s:%d:%d: runeAt(%d) = %U; want %U"
|
||||||
|
t.Errorf(msg, name, i, sz, j, ir[j], ig[j])
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func rep(r rune, n int) string {
|
||||||
|
return strings.Repeat(string(r), n)
|
||||||
|
}
|
||||||
|
|
||||||
|
var iterTests = []AppendTest{
|
||||||
|
{"", ascii, ascii},
|
||||||
|
{"", txt_all, txt_all},
|
||||||
|
{"", "a" + rep(0x0300, MaxSegmentSize/2), "a" + rep(0x0300, MaxSegmentSize/2)},
|
||||||
|
}
|
||||||
|
|
||||||
|
var iterTestsD = []AppendTest{
|
||||||
|
{ // segment overflow on unchanged character
|
||||||
|
"",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2) + "\u0316",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2-1) + "\u0316\u0300",
|
||||||
|
},
|
||||||
|
{ // segment overflow on unchanged character + start value
|
||||||
|
"",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2+maxCombiningChars+4) + "\u0316",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2+maxCombiningChars) + "\u0316" + rep(0x300, 4),
|
||||||
|
},
|
||||||
|
{ // segment overflow on decomposition
|
||||||
|
"",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2-1) + "\u0340",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2),
|
||||||
|
},
|
||||||
|
{ // segment overflow on decomposition + start value
|
||||||
|
"",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2-1) + "\u0340" + rep(0x300, maxCombiningChars+4) + "\u0320",
|
||||||
|
"a" + rep(0x0300, MaxSegmentSize/2-1) + rep(0x300, maxCombiningChars+1) + "\u0320" + rep(0x300, 4),
|
||||||
|
},
|
||||||
|
{ // start value after ASCII overflow
|
||||||
|
"",
|
||||||
|
rep('a', MaxSegmentSize) + rep(0x300, maxCombiningChars+2) + "\u0320",
|
||||||
|
rep('a', MaxSegmentSize) + rep(0x300, maxCombiningChars) + "\u0320\u0300\u0300",
|
||||||
|
},
|
||||||
|
{ // start value after Hangul overflow
|
||||||
|
"",
|
||||||
|
rep(0xAC00, MaxSegmentSize/6) + rep(0x300, maxCombiningChars+2) + "\u0320",
|
||||||
|
strings.Repeat("\u1100\u1161", MaxSegmentSize/6) + rep(0x300, maxCombiningChars-1) + "\u0320" + rep(0x300, 3),
|
||||||
|
},
|
||||||
|
{ // start value after cc=0
|
||||||
|
"",
|
||||||
|
"您您" + rep(0x300, maxCombiningChars+4) + "\u0320",
|
||||||
|
"您您" + rep(0x300, maxCombiningChars) + "\u0320" + rep(0x300, 4),
|
||||||
|
},
|
||||||
|
{ // start value after normalization
|
||||||
|
"",
|
||||||
|
"\u0300\u0320a" + rep(0x300, maxCombiningChars+4) + "\u0320",
|
||||||
|
"\u0320\u0300a" + rep(0x300, maxCombiningChars) + "\u0320" + rep(0x300, 4),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var iterTestsC = []AppendTest{
|
||||||
|
{ // ordering of non-composing combining characters
|
||||||
|
"",
|
||||||
|
"\u0305\u0316",
|
||||||
|
"\u0316\u0305",
|
||||||
|
},
|
||||||
|
{ // segment overflow
|
||||||
|
"",
|
||||||
|
"a" + rep(0x0305, MaxSegmentSize/2+4) + "\u0316",
|
||||||
|
"a" + rep(0x0305, MaxSegmentSize/2-1) + "\u0316" + rep(0x305, 5),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIterNextD(t *testing.T) {
|
||||||
|
runIterTests(t, "IterNextD1", NFKD, appendTests, true)
|
||||||
|
runIterTests(t, "IterNextD2", NFKD, iterTests, true)
|
||||||
|
runIterTests(t, "IterNextD3", NFKD, iterTestsD, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIterNextC(t *testing.T) {
|
||||||
|
runIterTests(t, "IterNextC1", NFKC, appendTests, true)
|
||||||
|
runIterTests(t, "IterNextC2", NFKC, iterTests, true)
|
||||||
|
runIterTests(t, "IterNextC3", NFKC, iterTestsC, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
type SegmentTest struct {
|
||||||
|
in string
|
||||||
|
out []string
|
||||||
|
}
|
||||||
|
|
||||||
|
var segmentTests = []SegmentTest{
|
||||||
|
{rep('a', MaxSegmentSize), []string{rep('a', MaxSegmentSize), ""}},
|
||||||
|
{rep('a', MaxSegmentSize+2), []string{rep('a', MaxSegmentSize-1), "aaa", ""}},
|
||||||
|
{rep('a', MaxSegmentSize) + "\u0300aa", []string{rep('a', MaxSegmentSize-1), "a\u0300", "aa", ""}},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note that, by design, segmentation is equal for composing and decomposing forms.
|
||||||
|
func TestIterSegmentation(t *testing.T) {
|
||||||
|
segmentTest(t, "SegmentTestD", NFD, segmentTests)
|
||||||
|
segmentTest(t, "SegmentTestC", NFC, segmentTests)
|
||||||
|
}
|
||||||
|
|
||||||
|
func segmentTest(t *testing.T, name string, f Form, tests []SegmentTest) {
|
||||||
|
iter := Iter{}
|
||||||
|
for i, tt := range segmentTests {
|
||||||
|
buf := make([]byte, MaxSegmentSize)
|
||||||
|
iter.SetInputString(f, tt.in)
|
||||||
|
for j, seg := range tt.out {
|
||||||
|
if seg == "" {
|
||||||
|
if !iter.Done() {
|
||||||
|
n := iter.Next(buf)
|
||||||
|
res := string(buf[:n])
|
||||||
|
t.Errorf(`%s:%d:%d: expected Done()==true, found segment "%s"`, name, i, j, res)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if iter.Done() {
|
||||||
|
t.Errorf("%s:%d:%d: Done()==true, want false", name, i, j)
|
||||||
|
}
|
||||||
|
n := iter.Next(buf)
|
||||||
|
seg = f.String(seg)
|
||||||
|
if res := string(buf[:n]); res != seg {
|
||||||
|
t.Errorf(`%s:%d:%d" segment was "%s" (%d); want "%s" (%d)`, name, i, j, res, len(res), seg, len(seg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -243,7 +243,7 @@ func quickSpan(rb *reorderBuffer, i int) int {
|
||||||
lastSegStart := i
|
lastSegStart := i
|
||||||
src, n := rb.src, rb.nsrc
|
src, n := rb.src, rb.nsrc
|
||||||
for i < n {
|
for i < n {
|
||||||
if j := src.skipASCII(i); i != j {
|
if j := src.skipASCII(i, n); i != j {
|
||||||
i = j
|
i = j
|
||||||
lastSegStart = i - 1
|
lastSegStart = i - 1
|
||||||
lastCC = 0
|
lastCC = 0
|
||||||
|
|
@ -448,11 +448,16 @@ func decomposeToLastBoundary(rb *reorderBuffer, buf []byte) []byte {
|
||||||
}
|
}
|
||||||
// Check that decomposition doesn't result in overflow.
|
// Check that decomposition doesn't result in overflow.
|
||||||
if info.hasDecomposition() {
|
if info.hasDecomposition() {
|
||||||
dcomp := info.decomposition()
|
if isHangul(buf) {
|
||||||
for i := 0; i < len(dcomp); {
|
i += int(info.size)
|
||||||
inf := rb.f.info(inputBytes(dcomp), i)
|
|
||||||
i += int(inf.size)
|
|
||||||
n++
|
n++
|
||||||
|
} else {
|
||||||
|
dcomp := info.decomposition()
|
||||||
|
for i := 0; i < len(dcomp); {
|
||||||
|
inf := rb.f.info(inputBytes(dcomp), i)
|
||||||
|
i += int(inf.size)
|
||||||
|
n++
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
n++
|
n++
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package norm
|
package norm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
@ -495,15 +496,40 @@ func TestAppend(t *testing.T) {
|
||||||
runAppendTests(t, "TestString", NFKC, stringF, appendTests)
|
runAppendTests(t, "TestString", NFKC, stringF, appendTests)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func appendBench(f Form, in []byte) func() {
|
||||||
|
buf := make([]byte, 0, 4*len(in))
|
||||||
|
return func() {
|
||||||
|
f.Append(buf, in...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func iterBench(f Form, in []byte) func() {
|
||||||
|
buf := make([]byte, 4*len(in))
|
||||||
|
iter := Iter{}
|
||||||
|
return func() {
|
||||||
|
iter.SetInput(f, in)
|
||||||
|
for !iter.Done() {
|
||||||
|
iter.Next(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendBenchmarks(bm []func(), f Form, in []byte) []func() {
|
||||||
|
//bm = append(bm, appendBench(f, in))
|
||||||
|
bm = append(bm, iterBench(f, in))
|
||||||
|
return bm
|
||||||
|
}
|
||||||
|
|
||||||
func doFormBenchmark(b *testing.B, inf, f Form, s string) {
|
func doFormBenchmark(b *testing.B, inf, f Form, s string) {
|
||||||
b.StopTimer()
|
b.StopTimer()
|
||||||
in := inf.Bytes([]byte(s))
|
in := inf.Bytes([]byte(s))
|
||||||
buf := make([]byte, 2*len(in))
|
bm := appendBenchmarks(nil, f, in)
|
||||||
b.SetBytes(int64(len(in)))
|
b.SetBytes(int64(len(in) * len(bm)))
|
||||||
b.StartTimer()
|
b.StartTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
buf = f.Append(buf[0:0], in...)
|
for _, fn := range bm {
|
||||||
buf = buf[0:0]
|
fn()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -549,17 +575,21 @@ func BenchmarkNormalizeHangulNFD2NFD(b *testing.B) {
|
||||||
doFormBenchmark(b, NFD, NFD, txt_kr)
|
doFormBenchmark(b, NFD, NFD, txt_kr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var forms = []Form{NFC, NFD, NFKC, NFKD}
|
||||||
|
|
||||||
func doTextBenchmark(b *testing.B, s string) {
|
func doTextBenchmark(b *testing.B, s string) {
|
||||||
b.StopTimer()
|
b.StopTimer()
|
||||||
b.SetBytes(int64(len(s)) * 4)
|
|
||||||
in := []byte(s)
|
in := []byte(s)
|
||||||
var buf = make([]byte, 0, 2*len(in))
|
bm := []func(){}
|
||||||
|
for _, f := range forms {
|
||||||
|
bm = appendBenchmarks(bm, f, in)
|
||||||
|
}
|
||||||
|
b.SetBytes(int64(len(s) * len(bm)))
|
||||||
b.StartTimer()
|
b.StartTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
NFC.Append(buf, in...)
|
for _, f := range bm {
|
||||||
NFD.Append(buf, in...)
|
f()
|
||||||
NFKC.Append(buf, in...)
|
}
|
||||||
NFKD.Append(buf, in...)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -584,6 +614,11 @@ func BenchmarkJapanese(b *testing.B) {
|
||||||
func BenchmarkChinese(b *testing.B) {
|
func BenchmarkChinese(b *testing.B) {
|
||||||
doTextBenchmark(b, txt_cn)
|
doTextBenchmark(b, txt_cn)
|
||||||
}
|
}
|
||||||
|
func BenchmarkOverflow(b *testing.B) {
|
||||||
|
doTextBenchmark(b, overflow)
|
||||||
|
}
|
||||||
|
|
||||||
|
var overflow = string(bytes.Repeat([]byte("\u035D"), 4096)) + "\u035B"
|
||||||
|
|
||||||
// Tests sampled from the Canonical ordering tests (Part 2) of
|
// Tests sampled from the Canonical ordering tests (Part 2) of
|
||||||
// http://unicode.org/Public/UNIDATA/NormalizationTest.txt
|
// http://unicode.org/Public/UNIDATA/NormalizationTest.txt
|
||||||
|
|
|
||||||
|
|
@ -220,6 +220,17 @@ func cmpIsNormal(t *Test, name string, f norm.Form, test string, result, want bo
|
||||||
func doTest(t *Test, f norm.Form, gold, test string) {
|
func doTest(t *Test, f norm.Form, gold, test string) {
|
||||||
result := f.Bytes([]byte(test))
|
result := f.Bytes([]byte(test))
|
||||||
cmpResult(t, "Bytes", f, gold, test, string(result))
|
cmpResult(t, "Bytes", f, gold, test, string(result))
|
||||||
|
sresult := f.String(test)
|
||||||
|
cmpResult(t, "String", f, gold, test, sresult)
|
||||||
|
buf := make([]byte, norm.MaxSegmentSize)
|
||||||
|
acc := []byte{}
|
||||||
|
i := norm.Iter{}
|
||||||
|
i.SetInputString(f, test)
|
||||||
|
for !i.Done() {
|
||||||
|
n := i.Next(buf)
|
||||||
|
acc = append(acc, buf[:n]...)
|
||||||
|
}
|
||||||
|
cmpResult(t, "Iter.Next", f, gold, test, string(acc))
|
||||||
for i := range test {
|
for i := range test {
|
||||||
out := f.Append(f.Bytes([]byte(test[:i])), []byte(test[i:])...)
|
out := f.Append(f.Bytes([]byte(test[:i])), []byte(test[i:])...)
|
||||||
cmpResult(t, fmt.Sprintf(":Append:%d", i), f, gold, test, string(out))
|
cmpResult(t, fmt.Sprintf(":Append:%d", i), f, gold, test, string(out))
|
||||||
|
|
|
||||||
|
|
@ -98,9 +98,9 @@ func (s *socks5) Dial(network, addr string) (net.Conn, error) {
|
||||||
|
|
||||||
buf = append(buf, socks5Version)
|
buf = append(buf, socks5Version)
|
||||||
if len(s.user) > 0 && len(s.user) < 256 && len(s.password) < 256 {
|
if len(s.user) > 0 && len(s.user) < 256 && len(s.password) < 256 {
|
||||||
buf = append(buf, 2, /* num auth methods */ socks5AuthNone, socks5AuthPassword)
|
buf = append(buf, 2 /* num auth methods */, socks5AuthNone, socks5AuthPassword)
|
||||||
} else {
|
} else {
|
||||||
buf = append(buf, 1, /* num auth methods */ socks5AuthNone)
|
buf = append(buf, 1 /* num auth methods */, socks5AuthNone)
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = conn.Write(buf); err != nil {
|
if _, err = conn.Write(buf); err != nil {
|
||||||
|
|
@ -139,7 +139,7 @@ func (s *socks5) Dial(network, addr string) (net.Conn, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
buf = buf[:0]
|
buf = buf[:0]
|
||||||
buf = append(buf, socks5Version, socks5Connect, 0 /* reserved */ )
|
buf = append(buf, socks5Version, socks5Connect, 0 /* reserved */)
|
||||||
|
|
||||||
if ip := net.ParseIP(host); ip != nil {
|
if ip := net.ParseIP(host); ip != nil {
|
||||||
if len(ip) == 4 {
|
if len(ip) == 4 {
|
||||||
|
|
|
||||||
|
|
@ -389,12 +389,12 @@ func (t *Terminal) Write(buf []byte) (n int, err error) {
|
||||||
|
|
||||||
// We have a prompt and possibly user input on the screen. We
|
// We have a prompt and possibly user input on the screen. We
|
||||||
// have to clear it first.
|
// have to clear it first.
|
||||||
t.move(0, /* up */ 0, /* down */ t.cursorX, /* left */ 0 /* right */ )
|
t.move(0 /* up */, 0 /* down */, t.cursorX /* left */, 0 /* right */)
|
||||||
t.cursorX = 0
|
t.cursorX = 0
|
||||||
t.clearLineToRight()
|
t.clearLineToRight()
|
||||||
|
|
||||||
for t.cursorY > 0 {
|
for t.cursorY > 0 {
|
||||||
t.move(1, /* up */ 0, 0, 0)
|
t.move(1 /* up */, 0, 0, 0)
|
||||||
t.cursorY--
|
t.cursorY--
|
||||||
t.clearLineToRight()
|
t.clearLineToRight()
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@
|
||||||
package winfsnotify
|
package winfsnotify
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
@ -115,7 +116,13 @@ func TestNotifyClose(t *testing.T) {
|
||||||
t.Fatal("double Close() test failed: second Close() call didn't return")
|
t.Fatal("double Close() test failed: second Close() call didn't return")
|
||||||
}
|
}
|
||||||
|
|
||||||
err := watcher.Watch("_test")
|
dir, err := ioutil.TempDir("", "wininotify")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("TempDir failed: %s", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
|
err = watcher.Watch(dir)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Fatal("expected error on Watch() after Close(), got nil")
|
t.Fatal("expected error on Watch() after Close(), got nil")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,28 +2,31 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// Extract example functions from package ASTs.
|
// Extract example functions from file ASTs.
|
||||||
|
|
||||||
package doc
|
package doc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/printer"
|
|
||||||
"go/token"
|
"go/token"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Example struct {
|
type Example struct {
|
||||||
Name string // name of the item being demonstrated
|
Name string // name of the item being exemplified
|
||||||
Body *printer.CommentedNode // code
|
Doc string // example function doc string
|
||||||
Output string // expected output
|
Code ast.Node
|
||||||
|
Comments []*ast.CommentGroup
|
||||||
|
Output string // expected output
|
||||||
}
|
}
|
||||||
|
|
||||||
func Examples(pkg *ast.Package) []*Example {
|
func Examples(files ...*ast.File) []*Example {
|
||||||
var list []*Example
|
var list []*Example
|
||||||
for _, file := range pkg.Files {
|
for _, file := range files {
|
||||||
hasTests := false // file contains tests or benchmarks
|
hasTests := false // file contains tests or benchmarks
|
||||||
numDecl := 0 // number of non-import declarations in the file
|
numDecl := 0 // number of non-import declarations in the file
|
||||||
var flist []*Example
|
var flist []*Example
|
||||||
|
|
@ -45,26 +48,54 @@ func Examples(pkg *ast.Package) []*Example {
|
||||||
if !isTest(name, "Example") {
|
if !isTest(name, "Example") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
var doc string
|
||||||
|
if f.Doc != nil {
|
||||||
|
doc = f.Doc.Text()
|
||||||
|
}
|
||||||
flist = append(flist, &Example{
|
flist = append(flist, &Example{
|
||||||
Name: name[len("Example"):],
|
Name: name[len("Example"):],
|
||||||
Body: &printer.CommentedNode{
|
Doc: doc,
|
||||||
Node: f.Body,
|
Code: f.Body,
|
||||||
Comments: file.Comments,
|
Comments: file.Comments,
|
||||||
},
|
Output: exampleOutput(f, file.Comments),
|
||||||
Output: f.Doc.Text(),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if !hasTests && numDecl > 1 && len(flist) == 1 {
|
if !hasTests && numDecl > 1 && len(flist) == 1 {
|
||||||
// If this file only has one example function, some
|
// If this file only has one example function, some
|
||||||
// other top-level declarations, and no tests or
|
// other top-level declarations, and no tests or
|
||||||
// benchmarks, use the whole file as the example.
|
// benchmarks, use the whole file as the example.
|
||||||
flist[0].Body.Node = file
|
flist[0].Code = file
|
||||||
}
|
}
|
||||||
list = append(list, flist...)
|
list = append(list, flist...)
|
||||||
}
|
}
|
||||||
|
sort.Sort(exampleByName(list))
|
||||||
return list
|
return list
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var outputPrefix = regexp.MustCompile(`(?i)^[[:space:]]*output:`)
|
||||||
|
|
||||||
|
func exampleOutput(fun *ast.FuncDecl, comments []*ast.CommentGroup) string {
|
||||||
|
// find the last comment in the function
|
||||||
|
var last *ast.CommentGroup
|
||||||
|
for _, cg := range comments {
|
||||||
|
if cg.Pos() < fun.Pos() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if cg.End() > fun.End() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
last = cg
|
||||||
|
}
|
||||||
|
if last != nil {
|
||||||
|
// test that it begins with the correct prefix
|
||||||
|
text := last.Text()
|
||||||
|
if loc := outputPrefix.FindStringIndex(text); loc != nil {
|
||||||
|
return strings.TrimSpace(text[loc[1]:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "" // no suitable comment found
|
||||||
|
}
|
||||||
|
|
||||||
// isTest tells whether name looks like a test, example, or benchmark.
|
// isTest tells whether name looks like a test, example, or benchmark.
|
||||||
// It is a Test (say) if there is a character after Test that is not a
|
// It is a Test (say) if there is a character after Test that is not a
|
||||||
// lower-case letter. (We don't want Testiness.)
|
// lower-case letter. (We don't want Testiness.)
|
||||||
|
|
@ -78,3 +109,9 @@ func isTest(name, prefix string) bool {
|
||||||
rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
|
rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
|
||||||
return !unicode.IsLower(rune)
|
return !unicode.IsLower(rune)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type exampleByName []*Example
|
||||||
|
|
||||||
|
func (s exampleByName) Len() int { return len(s) }
|
||||||
|
func (s exampleByName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
func (s exampleByName) Less(i, j int) bool { return s[i].Name < s[j].Name }
|
||||||
|
|
|
||||||
|
|
@ -439,8 +439,10 @@ func (r *reader) readFile(src *ast.File) {
|
||||||
// gets to (re-)use the declaration documentation
|
// gets to (re-)use the declaration documentation
|
||||||
// if there's none associated with the spec itself
|
// if there's none associated with the spec itself
|
||||||
fake := &ast.GenDecl{
|
fake := &ast.GenDecl{
|
||||||
d.Doc, d.Pos(), token.TYPE, token.NoPos,
|
Doc: d.Doc,
|
||||||
[]ast.Spec{s}, token.NoPos,
|
TokPos: d.Pos(),
|
||||||
|
Tok: token.TYPE,
|
||||||
|
Specs: []ast.Spec{s},
|
||||||
}
|
}
|
||||||
r.readType(fake, s)
|
r.readType(fake, s)
|
||||||
}
|
}
|
||||||
|
|
@ -460,7 +462,7 @@ func (r *reader) readFile(src *ast.File) {
|
||||||
// non-empty BUG comment; collect comment without BUG prefix
|
// non-empty BUG comment; collect comment without BUG prefix
|
||||||
list := append([]*ast.Comment(nil), c.List...) // make a copy
|
list := append([]*ast.Comment(nil), c.List...) // make a copy
|
||||||
list[0].Text = text[m[1]:]
|
list[0].Text = text[m[1]:]
|
||||||
r.bugs = append(r.bugs, (&ast.CommentGroup{list}).Text())
|
r.bugs = append(r.bugs, (&ast.CommentGroup{List: list}).Text())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -530,7 +532,7 @@ func customizeRecv(f *Func, recvTypeName string, embeddedIsPtr bool, level int)
|
||||||
_, origRecvIsPtr := newField.Type.(*ast.StarExpr)
|
_, origRecvIsPtr := newField.Type.(*ast.StarExpr)
|
||||||
var typ ast.Expr = ast.NewIdent(recvTypeName)
|
var typ ast.Expr = ast.NewIdent(recvTypeName)
|
||||||
if !embeddedIsPtr && origRecvIsPtr {
|
if !embeddedIsPtr && origRecvIsPtr {
|
||||||
typ = &ast.StarExpr{token.NoPos, typ}
|
typ = &ast.StarExpr{X: typ}
|
||||||
}
|
}
|
||||||
newField.Type = typ
|
newField.Type = typ
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,52 @@
|
||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package doc
|
||||||
|
|
||||||
|
import "unicode"
|
||||||
|
|
||||||
|
// firstSentenceLen returns the length of the first sentence in s.
|
||||||
|
// The sentence ends after the first period followed by space and
|
||||||
|
// not preceded by exactly one uppercase letter.
|
||||||
|
//
|
||||||
|
func firstSentenceLen(s string) int {
|
||||||
|
var ppp, pp, p rune
|
||||||
|
for i, q := range s {
|
||||||
|
if q == '\n' || q == '\r' || q == '\t' {
|
||||||
|
q = ' '
|
||||||
|
}
|
||||||
|
if q == ' ' && p == '.' && (!unicode.IsUpper(pp) || unicode.IsUpper(ppp)) {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
ppp, pp, p = pp, p, q
|
||||||
|
}
|
||||||
|
return len(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Synopsis returns a cleaned version of the first sentence in s.
|
||||||
|
// That sentence ends after the first period followed by space and
|
||||||
|
// not preceded by exactly one uppercase letter. The result string
|
||||||
|
// has no \n, \r, or \t characters and uses only single spaces between
|
||||||
|
// words.
|
||||||
|
//
|
||||||
|
func Synopsis(s string) string {
|
||||||
|
n := firstSentenceLen(s)
|
||||||
|
var b []byte
|
||||||
|
p := byte(' ')
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
q := s[i]
|
||||||
|
if q == '\n' || q == '\r' || q == '\t' {
|
||||||
|
q = ' '
|
||||||
|
}
|
||||||
|
if q != ' ' || p != ' ' {
|
||||||
|
b = append(b, q)
|
||||||
|
p = q
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// remove trailing blank, if any
|
||||||
|
if n := len(b); n > 0 && p == ' ' {
|
||||||
|
b = b[0 : n-1]
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,44 @@
|
||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package doc
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
var tests = []struct {
|
||||||
|
txt string
|
||||||
|
fsl int
|
||||||
|
syn string
|
||||||
|
}{
|
||||||
|
{"", 0, ""},
|
||||||
|
{"foo", 3, "foo"},
|
||||||
|
{"foo.", 4, "foo."},
|
||||||
|
{"foo.bar", 7, "foo.bar"},
|
||||||
|
{" foo. ", 6, "foo."},
|
||||||
|
{" foo\t bar.\n", 12, "foo bar."},
|
||||||
|
{" foo\t bar.\n", 12, "foo bar."},
|
||||||
|
{"a b\n\nc\r\rd\t\t", 12, "a b c d"},
|
||||||
|
{"a b\n\nc\r\rd\t\t . BLA", 15, "a b c d ."},
|
||||||
|
{"Package poems by T.S.Eliot. To rhyme...", 27, "Package poems by T.S.Eliot."},
|
||||||
|
{"Package poems by T. S. Eliot. To rhyme...", 29, "Package poems by T. S. Eliot."},
|
||||||
|
{"foo implements the foo ABI. The foo ABI is...", 27, "foo implements the foo ABI."},
|
||||||
|
{"Package\nfoo. ..", 12, "Package foo."},
|
||||||
|
{"P . Q.", 3, "P ."},
|
||||||
|
{"P. Q. ", 8, "P. Q."},
|
||||||
|
{"Package Καλημέρα κόσμε.", 36, "Package Καλημέρα κόσμε."},
|
||||||
|
{"Package こんにちは 世界\n", 31, "Package こんにちは 世界"},
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSynopsis(t *testing.T) {
|
||||||
|
for _, e := range tests {
|
||||||
|
fsl := firstSentenceLen(e.txt)
|
||||||
|
if fsl != e.fsl {
|
||||||
|
t.Errorf("got fsl = %d; want %d for %q\n", fsl, e.fsl, e.txt)
|
||||||
|
}
|
||||||
|
syn := Synopsis(e.txt)
|
||||||
|
if syn != e.syn {
|
||||||
|
t.Errorf("got syn = %q; want %q for %q\n", syn, e.syn, e.txt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -16,7 +16,7 @@ var matchBenchmarks = flag.String("test.bench", "", "regular expression to selec
|
||||||
var benchTime = flag.Float64("test.benchtime", 1, "approximate run time for each benchmark, in seconds")
|
var benchTime = flag.Float64("test.benchtime", 1, "approximate run time for each benchmark, in seconds")
|
||||||
|
|
||||||
// An internal type but exported because it is cross-package; part of the implementation
|
// An internal type but exported because it is cross-package; part of the implementation
|
||||||
// of gotest.
|
// of go test.
|
||||||
type InternalBenchmark struct {
|
type InternalBenchmark struct {
|
||||||
Name string
|
Name string
|
||||||
F func(b *B)
|
F func(b *B)
|
||||||
|
|
@ -213,7 +213,7 @@ func (r BenchmarkResult) String() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// An internal function but exported because it is cross-package; part of the implementation
|
// An internal function but exported because it is cross-package; part of the implementation
|
||||||
// of gotest.
|
// of go test.
|
||||||
func RunBenchmarks(matchString func(pat, str string) (bool, error), benchmarks []InternalBenchmark) {
|
func RunBenchmarks(matchString func(pat, str string) (bool, error), benchmarks []InternalBenchmark) {
|
||||||
// If no flag was specified, don't run benchmarks.
|
// If no flag was specified, don't run benchmarks.
|
||||||
if len(*matchBenchmarks) == 0 {
|
if len(*matchBenchmarks) == 0 {
|
||||||
|
|
@ -281,7 +281,7 @@ func (b *B) trimOutput() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Benchmark benchmarks a single function. Useful for creating
|
// Benchmark benchmarks a single function. Useful for creating
|
||||||
// custom benchmarks that do not use gotest.
|
// custom benchmarks that do not use go test.
|
||||||
func Benchmark(f func(b *B)) BenchmarkResult {
|
func Benchmark(f func(b *B)) BenchmarkResult {
|
||||||
b := &B{
|
b := &B{
|
||||||
common: common{
|
common: common{
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ VARIABLES
|
||||||
// The short flag requests that tests run more quickly, but its functionality
|
// The short flag requests that tests run more quickly, but its functionality
|
||||||
// is provided by test writers themselves. The testing package is just its
|
// is provided by test writers themselves. The testing package is just its
|
||||||
// home. The all.bash installation script sets it to make installation more
|
// home. The all.bash installation script sets it to make installation more
|
||||||
// efficient, but by default the flag is off so a plain "gotest" will do a
|
// efficient, but by default the flag is off so a plain "go test" will do a
|
||||||
// full test of the package.
|
// full test of the package.
|
||||||
short = flag.Bool("test.short", false, "run smaller test suite to save time")
|
short = flag.Bool("test.short", false, "run smaller test suite to save time")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// Package testing provides support for automated testing of Go packages.
|
// Package testing provides support for automated testing of Go packages.
|
||||||
// It is intended to be used in concert with the ``gotest'' utility, which automates
|
// It is intended to be used in concert with the ``go test'' utility, which automates
|
||||||
// execution of any function of the form
|
// execution of any function of the form
|
||||||
// func TestXxx(*testing.T)
|
// func TestXxx(*testing.T)
|
||||||
// where Xxx can be any alphanumeric string (but the first letter must not be in
|
// where Xxx can be any alphanumeric string (but the first letter must not be in
|
||||||
|
|
@ -12,7 +12,7 @@
|
||||||
//
|
//
|
||||||
// Functions of the form
|
// Functions of the form
|
||||||
// func BenchmarkXxx(*testing.B)
|
// func BenchmarkXxx(*testing.B)
|
||||||
// are considered benchmarks, and are executed by gotest when the -test.bench
|
// are considered benchmarks, and are executed by go test when the -test.bench
|
||||||
// flag is provided.
|
// flag is provided.
|
||||||
//
|
//
|
||||||
// A sample benchmark function looks like this:
|
// A sample benchmark function looks like this:
|
||||||
|
|
@ -53,7 +53,7 @@ var (
|
||||||
// The short flag requests that tests run more quickly, but its functionality
|
// The short flag requests that tests run more quickly, but its functionality
|
||||||
// is provided by test writers themselves. The testing package is just its
|
// is provided by test writers themselves. The testing package is just its
|
||||||
// home. The all.bash installation script sets it to make installation more
|
// home. The all.bash installation script sets it to make installation more
|
||||||
// efficient, but by default the flag is off so a plain "gotest" will do a
|
// efficient, but by default the flag is off so a plain "go test" will do a
|
||||||
// full test of the package.
|
// full test of the package.
|
||||||
short = flag.Bool("test.short", false, "run smaller test suite to save time")
|
short = flag.Bool("test.short", false, "run smaller test suite to save time")
|
||||||
|
|
||||||
|
|
@ -205,7 +205,7 @@ func (t *T) Parallel() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// An internal type but exported because it is cross-package; part of the implementation
|
// An internal type but exported because it is cross-package; part of the implementation
|
||||||
// of gotest.
|
// of go test.
|
||||||
type InternalTest struct {
|
type InternalTest struct {
|
||||||
Name string
|
Name string
|
||||||
F func(*T)
|
F func(*T)
|
||||||
|
|
@ -227,7 +227,7 @@ func tRunner(t *T, test *InternalTest) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// An internal function but exported because it is cross-package; part of the implementation
|
// An internal function but exported because it is cross-package; part of the implementation
|
||||||
// of gotest.
|
// of go test.
|
||||||
func Main(matchString func(pat, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample) {
|
func Main(matchString func(pat, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample) {
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
parseCpuList()
|
parseCpuList()
|
||||||
|
|
|
||||||
|
|
@ -249,7 +249,7 @@ func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
comment = &ast.Comment{p.pos, p.lit}
|
comment = &ast.Comment{Slash: p.pos, Text: p.lit}
|
||||||
p.next0()
|
p.next0()
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
@ -270,7 +270,7 @@ func (p *parser) consumeCommentGroup() (comments *ast.CommentGroup, endline int)
|
||||||
}
|
}
|
||||||
|
|
||||||
// add comment group to the comments list
|
// add comment group to the comments list
|
||||||
comments = &ast.CommentGroup{list}
|
comments = &ast.CommentGroup{List: list}
|
||||||
p.comments = append(p.comments, comments)
|
p.comments = append(p.comments, comments)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
@ -391,7 +391,7 @@ func (p *parser) parseIdent() *ast.Ident {
|
||||||
} else {
|
} else {
|
||||||
p.expect(token.IDENT) // use expect() error handling
|
p.expect(token.IDENT) // use expect() error handling
|
||||||
}
|
}
|
||||||
return &ast.Ident{pos, name, nil}
|
return &ast.Ident{NamePos: pos, Name: name}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseIdentList() (list []*ast.Ident) {
|
func (p *parser) parseIdentList() (list []*ast.Ident) {
|
||||||
|
|
@ -469,7 +469,7 @@ func (p *parser) parseType() ast.Expr {
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.errorExpected(pos, "type")
|
p.errorExpected(pos, "type")
|
||||||
p.next() // make progress
|
p.next() // make progress
|
||||||
return &ast.BadExpr{pos, p.pos}
|
return &ast.BadExpr{From: pos, To: p.pos}
|
||||||
}
|
}
|
||||||
|
|
||||||
return typ
|
return typ
|
||||||
|
|
@ -489,7 +489,7 @@ func (p *parser) parseTypeName() ast.Expr {
|
||||||
p.next()
|
p.next()
|
||||||
p.resolve(ident)
|
p.resolve(ident)
|
||||||
sel := p.parseIdent()
|
sel := p.parseIdent()
|
||||||
return &ast.SelectorExpr{ident, sel}
|
return &ast.SelectorExpr{X: ident, Sel: sel}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ident
|
return ident
|
||||||
|
|
@ -503,7 +503,7 @@ func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr {
|
||||||
lbrack := p.expect(token.LBRACK)
|
lbrack := p.expect(token.LBRACK)
|
||||||
var len ast.Expr
|
var len ast.Expr
|
||||||
if ellipsisOk && p.tok == token.ELLIPSIS {
|
if ellipsisOk && p.tok == token.ELLIPSIS {
|
||||||
len = &ast.Ellipsis{p.pos, nil}
|
len = &ast.Ellipsis{Ellipsis: p.pos}
|
||||||
p.next()
|
p.next()
|
||||||
} else if p.tok != token.RBRACK {
|
} else if p.tok != token.RBRACK {
|
||||||
len = p.parseRhs()
|
len = p.parseRhs()
|
||||||
|
|
@ -511,7 +511,7 @@ func (p *parser) parseArrayType(ellipsisOk bool) ast.Expr {
|
||||||
p.expect(token.RBRACK)
|
p.expect(token.RBRACK)
|
||||||
elt := p.parseType()
|
elt := p.parseType()
|
||||||
|
|
||||||
return &ast.ArrayType{lbrack, len, elt}
|
return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
|
func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
|
||||||
|
|
@ -521,7 +521,7 @@ func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
|
||||||
if !isIdent {
|
if !isIdent {
|
||||||
pos := x.Pos()
|
pos := x.Pos()
|
||||||
p.errorExpected(pos, "identifier")
|
p.errorExpected(pos, "identifier")
|
||||||
ident = &ast.Ident{pos, "_", nil}
|
ident = &ast.Ident{NamePos: pos, Name: "_"}
|
||||||
}
|
}
|
||||||
idents[i] = ident
|
idents[i] = ident
|
||||||
}
|
}
|
||||||
|
|
@ -541,7 +541,7 @@ func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
|
||||||
// optional tag
|
// optional tag
|
||||||
var tag *ast.BasicLit
|
var tag *ast.BasicLit
|
||||||
if p.tok == token.STRING {
|
if p.tok == token.STRING {
|
||||||
tag = &ast.BasicLit{p.pos, p.tok, p.lit}
|
tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
|
||||||
p.next()
|
p.next()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -557,13 +557,13 @@ func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
|
||||||
if n := len(list); n > 1 || !isTypeName(deref(typ)) {
|
if n := len(list); n > 1 || !isTypeName(deref(typ)) {
|
||||||
pos := typ.Pos()
|
pos := typ.Pos()
|
||||||
p.errorExpected(pos, "anonymous field")
|
p.errorExpected(pos, "anonymous field")
|
||||||
typ = &ast.BadExpr{pos, list[n-1].End()}
|
typ = &ast.BadExpr{From: pos, To: list[n-1].End()}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.expectSemi() // call before accessing p.linecomment
|
p.expectSemi() // call before accessing p.linecomment
|
||||||
|
|
||||||
field := &ast.Field{doc, idents, typ, tag, p.lineComment}
|
field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
|
||||||
p.declare(field, nil, scope, ast.Var, idents...)
|
p.declare(field, nil, scope, ast.Var, idents...)
|
||||||
|
|
||||||
return field
|
return field
|
||||||
|
|
@ -586,7 +586,14 @@ func (p *parser) parseStructType() *ast.StructType {
|
||||||
}
|
}
|
||||||
rbrace := p.expect(token.RBRACE)
|
rbrace := p.expect(token.RBRACE)
|
||||||
|
|
||||||
return &ast.StructType{pos, &ast.FieldList{lbrace, list, rbrace}, false}
|
return &ast.StructType{
|
||||||
|
Struct: pos,
|
||||||
|
Fields: &ast.FieldList{
|
||||||
|
Opening: lbrace,
|
||||||
|
List: list,
|
||||||
|
Closing: rbrace,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parsePointerType() *ast.StarExpr {
|
func (p *parser) parsePointerType() *ast.StarExpr {
|
||||||
|
|
@ -597,7 +604,7 @@ func (p *parser) parsePointerType() *ast.StarExpr {
|
||||||
star := p.expect(token.MUL)
|
star := p.expect(token.MUL)
|
||||||
base := p.parseType()
|
base := p.parseType()
|
||||||
|
|
||||||
return &ast.StarExpr{star, base}
|
return &ast.StarExpr{Star: star, X: base}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) tryVarType(isParam bool) ast.Expr {
|
func (p *parser) tryVarType(isParam bool) ast.Expr {
|
||||||
|
|
@ -607,9 +614,9 @@ func (p *parser) tryVarType(isParam bool) ast.Expr {
|
||||||
typ := p.tryIdentOrType(isParam) // don't use parseType so we can provide better error message
|
typ := p.tryIdentOrType(isParam) // don't use parseType so we can provide better error message
|
||||||
if typ == nil {
|
if typ == nil {
|
||||||
p.error(pos, "'...' parameter is missing type")
|
p.error(pos, "'...' parameter is missing type")
|
||||||
typ = &ast.BadExpr{pos, p.pos}
|
typ = &ast.BadExpr{From: pos, To: p.pos}
|
||||||
}
|
}
|
||||||
return &ast.Ellipsis{pos, typ}
|
return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
|
||||||
}
|
}
|
||||||
return p.tryIdentOrType(false)
|
return p.tryIdentOrType(false)
|
||||||
}
|
}
|
||||||
|
|
@ -620,7 +627,7 @@ func (p *parser) parseVarType(isParam bool) ast.Expr {
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.errorExpected(pos, "type")
|
p.errorExpected(pos, "type")
|
||||||
p.next() // make progress
|
p.next() // make progress
|
||||||
typ = &ast.BadExpr{pos, p.pos}
|
typ = &ast.BadExpr{From: pos, To: p.pos}
|
||||||
}
|
}
|
||||||
return typ
|
return typ
|
||||||
}
|
}
|
||||||
|
|
@ -661,7 +668,7 @@ func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params [
|
||||||
if typ != nil {
|
if typ != nil {
|
||||||
// IdentifierList Type
|
// IdentifierList Type
|
||||||
idents := p.makeIdentList(list)
|
idents := p.makeIdentList(list)
|
||||||
field := &ast.Field{nil, idents, typ, nil, nil}
|
field := &ast.Field{Names: idents, Type: typ}
|
||||||
params = append(params, field)
|
params = append(params, field)
|
||||||
// Go spec: The scope of an identifier denoting a function
|
// Go spec: The scope of an identifier denoting a function
|
||||||
// parameter or result variable is the function body.
|
// parameter or result variable is the function body.
|
||||||
|
|
@ -673,7 +680,7 @@ func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params [
|
||||||
for p.tok != token.RPAREN && p.tok != token.EOF {
|
for p.tok != token.RPAREN && p.tok != token.EOF {
|
||||||
idents := p.parseIdentList()
|
idents := p.parseIdentList()
|
||||||
typ := p.parseVarType(ellipsisOk)
|
typ := p.parseVarType(ellipsisOk)
|
||||||
field := &ast.Field{nil, idents, typ, nil, nil}
|
field := &ast.Field{Names: idents, Type: typ}
|
||||||
params = append(params, field)
|
params = append(params, field)
|
||||||
// Go spec: The scope of an identifier denoting a function
|
// Go spec: The scope of an identifier denoting a function
|
||||||
// parameter or result variable is the function body.
|
// parameter or result variable is the function body.
|
||||||
|
|
@ -708,7 +715,7 @@ func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldLi
|
||||||
}
|
}
|
||||||
rparen := p.expect(token.RPAREN)
|
rparen := p.expect(token.RPAREN)
|
||||||
|
|
||||||
return &ast.FieldList{lparen, params, rparen}
|
return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
|
func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
|
||||||
|
|
@ -750,7 +757,7 @@ func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
|
||||||
scope := ast.NewScope(p.topScope) // function scope
|
scope := ast.NewScope(p.topScope) // function scope
|
||||||
params, results := p.parseSignature(scope)
|
params, results := p.parseSignature(scope)
|
||||||
|
|
||||||
return &ast.FuncType{pos, params, results}, scope
|
return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
|
func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
|
||||||
|
|
@ -767,7 +774,7 @@ func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
|
||||||
idents = []*ast.Ident{ident}
|
idents = []*ast.Ident{ident}
|
||||||
scope := ast.NewScope(nil) // method scope
|
scope := ast.NewScope(nil) // method scope
|
||||||
params, results := p.parseSignature(scope)
|
params, results := p.parseSignature(scope)
|
||||||
typ = &ast.FuncType{token.NoPos, params, results}
|
typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
|
||||||
} else {
|
} else {
|
||||||
// embedded interface
|
// embedded interface
|
||||||
typ = x
|
typ = x
|
||||||
|
|
@ -775,7 +782,7 @@ func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
|
||||||
}
|
}
|
||||||
p.expectSemi() // call before accessing p.linecomment
|
p.expectSemi() // call before accessing p.linecomment
|
||||||
|
|
||||||
spec := &ast.Field{doc, idents, typ, nil, p.lineComment}
|
spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
|
||||||
p.declare(spec, nil, scope, ast.Fun, idents...)
|
p.declare(spec, nil, scope, ast.Fun, idents...)
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
@ -795,7 +802,14 @@ func (p *parser) parseInterfaceType() *ast.InterfaceType {
|
||||||
}
|
}
|
||||||
rbrace := p.expect(token.RBRACE)
|
rbrace := p.expect(token.RBRACE)
|
||||||
|
|
||||||
return &ast.InterfaceType{pos, &ast.FieldList{lbrace, list, rbrace}, false}
|
return &ast.InterfaceType{
|
||||||
|
Interface: pos,
|
||||||
|
Methods: &ast.FieldList{
|
||||||
|
Opening: lbrace,
|
||||||
|
List: list,
|
||||||
|
Closing: rbrace,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseMapType() *ast.MapType {
|
func (p *parser) parseMapType() *ast.MapType {
|
||||||
|
|
@ -809,7 +823,7 @@ func (p *parser) parseMapType() *ast.MapType {
|
||||||
p.expect(token.RBRACK)
|
p.expect(token.RBRACK)
|
||||||
value := p.parseType()
|
value := p.parseType()
|
||||||
|
|
||||||
return &ast.MapType{pos, key, value}
|
return &ast.MapType{Map: pos, Key: key, Value: value}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseChanType() *ast.ChanType {
|
func (p *parser) parseChanType() *ast.ChanType {
|
||||||
|
|
@ -832,7 +846,7 @@ func (p *parser) parseChanType() *ast.ChanType {
|
||||||
}
|
}
|
||||||
value := p.parseType()
|
value := p.parseType()
|
||||||
|
|
||||||
return &ast.ChanType{pos, dir, value}
|
return &ast.ChanType{Begin: pos, Dir: dir, Value: value}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the result is an identifier, it is not resolved.
|
// If the result is an identifier, it is not resolved.
|
||||||
|
|
@ -860,7 +874,7 @@ func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr {
|
||||||
p.next()
|
p.next()
|
||||||
typ := p.parseType()
|
typ := p.parseType()
|
||||||
rparen := p.expect(token.RPAREN)
|
rparen := p.expect(token.RPAREN)
|
||||||
return &ast.ParenExpr{lparen, typ, rparen}
|
return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
|
||||||
}
|
}
|
||||||
|
|
||||||
// no type found
|
// no type found
|
||||||
|
|
@ -903,7 +917,7 @@ func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
|
||||||
p.closeScope()
|
p.closeScope()
|
||||||
rbrace := p.expect(token.RBRACE)
|
rbrace := p.expect(token.RBRACE)
|
||||||
|
|
||||||
return &ast.BlockStmt{lbrace, list, rbrace}
|
return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseBlockStmt() *ast.BlockStmt {
|
func (p *parser) parseBlockStmt() *ast.BlockStmt {
|
||||||
|
|
@ -917,7 +931,7 @@ func (p *parser) parseBlockStmt() *ast.BlockStmt {
|
||||||
p.closeScope()
|
p.closeScope()
|
||||||
rbrace := p.expect(token.RBRACE)
|
rbrace := p.expect(token.RBRACE)
|
||||||
|
|
||||||
return &ast.BlockStmt{lbrace, list, rbrace}
|
return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
|
|
@ -938,7 +952,7 @@ func (p *parser) parseFuncTypeOrLit() ast.Expr {
|
||||||
body := p.parseBody(scope)
|
body := p.parseBody(scope)
|
||||||
p.exprLev--
|
p.exprLev--
|
||||||
|
|
||||||
return &ast.FuncLit{typ, body}
|
return &ast.FuncLit{Type: typ, Body: body}
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseOperand may return an expression or a raw type (incl. array
|
// parseOperand may return an expression or a raw type (incl. array
|
||||||
|
|
@ -959,7 +973,7 @@ func (p *parser) parseOperand(lhs bool) ast.Expr {
|
||||||
return x
|
return x
|
||||||
|
|
||||||
case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
|
case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
|
||||||
x := &ast.BasicLit{p.pos, p.tok, p.lit}
|
x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
|
||||||
p.next()
|
p.next()
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
@ -970,7 +984,7 @@ func (p *parser) parseOperand(lhs bool) ast.Expr {
|
||||||
x := p.parseRhsOrType() // types may be parenthesized: (some type)
|
x := p.parseRhsOrType() // types may be parenthesized: (some type)
|
||||||
p.exprLev--
|
p.exprLev--
|
||||||
rparen := p.expect(token.RPAREN)
|
rparen := p.expect(token.RPAREN)
|
||||||
return &ast.ParenExpr{lparen, x, rparen}
|
return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
|
||||||
|
|
||||||
case token.FUNC:
|
case token.FUNC:
|
||||||
return p.parseFuncTypeOrLit()
|
return p.parseFuncTypeOrLit()
|
||||||
|
|
@ -987,7 +1001,7 @@ func (p *parser) parseOperand(lhs bool) ast.Expr {
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.errorExpected(pos, "operand")
|
p.errorExpected(pos, "operand")
|
||||||
p.next() // make progress
|
p.next() // make progress
|
||||||
return &ast.BadExpr{pos, p.pos}
|
return &ast.BadExpr{From: pos, To: p.pos}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseSelector(x ast.Expr) ast.Expr {
|
func (p *parser) parseSelector(x ast.Expr) ast.Expr {
|
||||||
|
|
@ -997,7 +1011,7 @@ func (p *parser) parseSelector(x ast.Expr) ast.Expr {
|
||||||
|
|
||||||
sel := p.parseIdent()
|
sel := p.parseIdent()
|
||||||
|
|
||||||
return &ast.SelectorExpr{x, sel}
|
return &ast.SelectorExpr{X: x, Sel: sel}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
|
func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
|
||||||
|
|
@ -1015,7 +1029,7 @@ func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
|
||||||
}
|
}
|
||||||
p.expect(token.RPAREN)
|
p.expect(token.RPAREN)
|
||||||
|
|
||||||
return &ast.TypeAssertExpr{x, typ}
|
return &ast.TypeAssertExpr{X: x, Type: typ}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
|
func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
|
||||||
|
|
@ -1041,9 +1055,9 @@ func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
|
||||||
rbrack := p.expect(token.RBRACK)
|
rbrack := p.expect(token.RBRACK)
|
||||||
|
|
||||||
if isSlice {
|
if isSlice {
|
||||||
return &ast.SliceExpr{x, lbrack, low, high, rbrack}
|
return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: low, High: high, Rbrack: rbrack}
|
||||||
}
|
}
|
||||||
return &ast.IndexExpr{x, lbrack, low, rbrack}
|
return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: low, Rbrack: rbrack}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
|
func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
|
||||||
|
|
@ -1069,7 +1083,7 @@ func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
|
||||||
p.exprLev--
|
p.exprLev--
|
||||||
rparen := p.expectClosing(token.RPAREN, "argument list")
|
rparen := p.expectClosing(token.RPAREN, "argument list")
|
||||||
|
|
||||||
return &ast.CallExpr{fun, lparen, list, ellipsis, rparen}
|
return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseElement(keyOk bool) ast.Expr {
|
func (p *parser) parseElement(keyOk bool) ast.Expr {
|
||||||
|
|
@ -1086,7 +1100,7 @@ func (p *parser) parseElement(keyOk bool) ast.Expr {
|
||||||
if p.tok == token.COLON {
|
if p.tok == token.COLON {
|
||||||
colon := p.pos
|
colon := p.pos
|
||||||
p.next()
|
p.next()
|
||||||
return &ast.KeyValueExpr{x, colon, p.parseElement(false)}
|
return &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseElement(false)}
|
||||||
}
|
}
|
||||||
p.resolve(x) // not a map key
|
p.resolve(x) // not a map key
|
||||||
}
|
}
|
||||||
|
|
@ -1123,7 +1137,7 @@ func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
|
||||||
}
|
}
|
||||||
p.exprLev--
|
p.exprLev--
|
||||||
rbrace := p.expectClosing(token.RBRACE, "composite literal")
|
rbrace := p.expectClosing(token.RBRACE, "composite literal")
|
||||||
return &ast.CompositeLit{typ, lbrace, elts, rbrace}
|
return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
|
||||||
}
|
}
|
||||||
|
|
||||||
// checkExpr checks that x is an expression (and not a type).
|
// checkExpr checks that x is an expression (and not a type).
|
||||||
|
|
@ -1152,7 +1166,7 @@ func (p *parser) checkExpr(x ast.Expr) ast.Expr {
|
||||||
default:
|
default:
|
||||||
// all other nodes are not proper expressions
|
// all other nodes are not proper expressions
|
||||||
p.errorExpected(x.Pos(), "expression")
|
p.errorExpected(x.Pos(), "expression")
|
||||||
x = &ast.BadExpr{x.Pos(), x.End()}
|
x = &ast.BadExpr{From: x.Pos(), To: x.End()}
|
||||||
}
|
}
|
||||||
return x
|
return x
|
||||||
}
|
}
|
||||||
|
|
@ -1215,7 +1229,7 @@ func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
|
||||||
case *ast.ArrayType:
|
case *ast.ArrayType:
|
||||||
if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
|
if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
|
||||||
p.error(len.Pos(), "expected array length, found '...'")
|
p.error(len.Pos(), "expected array length, found '...'")
|
||||||
x = &ast.BadExpr{x.Pos(), x.End()}
|
x = &ast.BadExpr{From: x.Pos(), To: x.End()}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1247,7 +1261,7 @@ L:
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.next() // make progress
|
p.next() // make progress
|
||||||
p.errorExpected(pos, "selector or type assertion")
|
p.errorExpected(pos, "selector or type assertion")
|
||||||
x = &ast.BadExpr{pos, p.pos}
|
x = &ast.BadExpr{From: pos, To: p.pos}
|
||||||
}
|
}
|
||||||
case token.LBRACK:
|
case token.LBRACK:
|
||||||
if lhs {
|
if lhs {
|
||||||
|
|
@ -1288,7 +1302,7 @@ func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
|
||||||
pos, op := p.pos, p.tok
|
pos, op := p.pos, p.tok
|
||||||
p.next()
|
p.next()
|
||||||
x := p.parseUnaryExpr(false)
|
x := p.parseUnaryExpr(false)
|
||||||
return &ast.UnaryExpr{pos, op, p.checkExpr(x)}
|
return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
|
||||||
|
|
||||||
case token.ARROW:
|
case token.ARROW:
|
||||||
// channel type or receive expression
|
// channel type or receive expression
|
||||||
|
|
@ -1297,18 +1311,18 @@ func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
|
||||||
if p.tok == token.CHAN {
|
if p.tok == token.CHAN {
|
||||||
p.next()
|
p.next()
|
||||||
value := p.parseType()
|
value := p.parseType()
|
||||||
return &ast.ChanType{pos, ast.RECV, value}
|
return &ast.ChanType{Begin: pos, Dir: ast.RECV, Value: value}
|
||||||
}
|
}
|
||||||
|
|
||||||
x := p.parseUnaryExpr(false)
|
x := p.parseUnaryExpr(false)
|
||||||
return &ast.UnaryExpr{pos, token.ARROW, p.checkExpr(x)}
|
return &ast.UnaryExpr{OpPos: pos, Op: token.ARROW, X: p.checkExpr(x)}
|
||||||
|
|
||||||
case token.MUL:
|
case token.MUL:
|
||||||
// pointer type or unary "*" expression
|
// pointer type or unary "*" expression
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.next()
|
p.next()
|
||||||
x := p.parseUnaryExpr(false)
|
x := p.parseUnaryExpr(false)
|
||||||
return &ast.StarExpr{pos, p.checkExprOrType(x)}
|
return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.parsePrimaryExpr(lhs)
|
return p.parsePrimaryExpr(lhs)
|
||||||
|
|
@ -1330,7 +1344,7 @@ func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
|
||||||
lhs = false
|
lhs = false
|
||||||
}
|
}
|
||||||
y := p.parseBinaryExpr(false, prec+1)
|
y := p.parseBinaryExpr(false, prec+1)
|
||||||
x = &ast.BinaryExpr{p.checkExpr(x), pos, op, p.checkExpr(y)}
|
x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1392,12 +1406,12 @@ func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
|
||||||
if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
|
if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.next()
|
p.next()
|
||||||
y = []ast.Expr{&ast.UnaryExpr{pos, token.RANGE, p.parseRhs()}}
|
y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
|
||||||
isRange = true
|
isRange = true
|
||||||
} else {
|
} else {
|
||||||
y = p.parseRhsList()
|
y = p.parseRhsList()
|
||||||
}
|
}
|
||||||
as := &ast.AssignStmt{x, pos, tok, y}
|
as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
|
||||||
if tok == token.DEFINE {
|
if tok == token.DEFINE {
|
||||||
p.shortVarDecl(as, x)
|
p.shortVarDecl(as, x)
|
||||||
}
|
}
|
||||||
|
|
@ -1418,7 +1432,7 @@ func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
|
||||||
// Go spec: The scope of a label is the body of the function
|
// Go spec: The scope of a label is the body of the function
|
||||||
// in which it is declared and excludes the body of any nested
|
// in which it is declared and excludes the body of any nested
|
||||||
// function.
|
// function.
|
||||||
stmt := &ast.LabeledStmt{label, colon, p.parseStmt()}
|
stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
|
||||||
p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
|
p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
|
||||||
return stmt, false
|
return stmt, false
|
||||||
}
|
}
|
||||||
|
|
@ -1429,24 +1443,24 @@ func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
|
||||||
// before the ':' that caused the problem. Thus, use the (latest) colon
|
// before the ':' that caused the problem. Thus, use the (latest) colon
|
||||||
// position for error reporting.
|
// position for error reporting.
|
||||||
p.error(colon, "illegal label declaration")
|
p.error(colon, "illegal label declaration")
|
||||||
return &ast.BadStmt{x[0].Pos(), colon + 1}, false
|
return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
|
||||||
|
|
||||||
case token.ARROW:
|
case token.ARROW:
|
||||||
// send statement
|
// send statement
|
||||||
arrow := p.pos
|
arrow := p.pos
|
||||||
p.next()
|
p.next()
|
||||||
y := p.parseRhs()
|
y := p.parseRhs()
|
||||||
return &ast.SendStmt{x[0], arrow, y}, false
|
return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
|
||||||
|
|
||||||
case token.INC, token.DEC:
|
case token.INC, token.DEC:
|
||||||
// increment or decrement
|
// increment or decrement
|
||||||
s := &ast.IncDecStmt{x[0], p.pos, p.tok}
|
s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
|
||||||
p.next()
|
p.next()
|
||||||
return s, false
|
return s, false
|
||||||
}
|
}
|
||||||
|
|
||||||
// expression
|
// expression
|
||||||
return &ast.ExprStmt{x[0]}, false
|
return &ast.ExprStmt{X: x[0]}, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseCallExpr() *ast.CallExpr {
|
func (p *parser) parseCallExpr() *ast.CallExpr {
|
||||||
|
|
@ -1467,10 +1481,10 @@ func (p *parser) parseGoStmt() ast.Stmt {
|
||||||
call := p.parseCallExpr()
|
call := p.parseCallExpr()
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
if call == nil {
|
if call == nil {
|
||||||
return &ast.BadStmt{pos, pos + 2} // len("go")
|
return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ast.GoStmt{pos, call}
|
return &ast.GoStmt{Go: pos, Call: call}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseDeferStmt() ast.Stmt {
|
func (p *parser) parseDeferStmt() ast.Stmt {
|
||||||
|
|
@ -1482,10 +1496,10 @@ func (p *parser) parseDeferStmt() ast.Stmt {
|
||||||
call := p.parseCallExpr()
|
call := p.parseCallExpr()
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
if call == nil {
|
if call == nil {
|
||||||
return &ast.BadStmt{pos, pos + 5} // len("defer")
|
return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ast.DeferStmt{pos, call}
|
return &ast.DeferStmt{Defer: pos, Call: call}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseReturnStmt() *ast.ReturnStmt {
|
func (p *parser) parseReturnStmt() *ast.ReturnStmt {
|
||||||
|
|
@ -1501,7 +1515,7 @@ func (p *parser) parseReturnStmt() *ast.ReturnStmt {
|
||||||
}
|
}
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
|
|
||||||
return &ast.ReturnStmt{pos, x}
|
return &ast.ReturnStmt{Return: pos, Results: x}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
|
func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
|
||||||
|
|
@ -1519,7 +1533,7 @@ func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
|
||||||
}
|
}
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
|
|
||||||
return &ast.BranchStmt{pos, tok, label}
|
return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) makeExpr(s ast.Stmt) ast.Expr {
|
func (p *parser) makeExpr(s ast.Stmt) ast.Expr {
|
||||||
|
|
@ -1530,7 +1544,7 @@ func (p *parser) makeExpr(s ast.Stmt) ast.Expr {
|
||||||
return p.checkExpr(es.X)
|
return p.checkExpr(es.X)
|
||||||
}
|
}
|
||||||
p.error(s.Pos(), "expected condition, found simple statement")
|
p.error(s.Pos(), "expected condition, found simple statement")
|
||||||
return &ast.BadExpr{s.Pos(), s.End()}
|
return &ast.BadExpr{From: s.Pos(), To: s.End()}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseIfStmt() *ast.IfStmt {
|
func (p *parser) parseIfStmt() *ast.IfStmt {
|
||||||
|
|
@ -1572,7 +1586,7 @@ func (p *parser) parseIfStmt() *ast.IfStmt {
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ast.IfStmt{pos, s, x, body, else_}
|
return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseTypeList() (list []ast.Expr) {
|
func (p *parser) parseTypeList() (list []ast.Expr) {
|
||||||
|
|
@ -1612,7 +1626,7 @@ func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
|
||||||
body := p.parseStmtList()
|
body := p.parseStmtList()
|
||||||
p.closeScope()
|
p.closeScope()
|
||||||
|
|
||||||
return &ast.CaseClause{pos, list, colon, body}
|
return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
|
||||||
}
|
}
|
||||||
|
|
||||||
func isTypeSwitchAssert(x ast.Expr) bool {
|
func isTypeSwitchAssert(x ast.Expr) bool {
|
||||||
|
|
@ -1681,13 +1695,13 @@ func (p *parser) parseSwitchStmt() ast.Stmt {
|
||||||
}
|
}
|
||||||
rbrace := p.expect(token.RBRACE)
|
rbrace := p.expect(token.RBRACE)
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
body := &ast.BlockStmt{lbrace, list, rbrace}
|
body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
|
||||||
|
|
||||||
if typeSwitch {
|
if typeSwitch {
|
||||||
return &ast.TypeSwitchStmt{pos, s1, s2, body}
|
return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ast.SwitchStmt{pos, s1, p.makeExpr(s2), body}
|
return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2), Body: body}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseCommClause() *ast.CommClause {
|
func (p *parser) parseCommClause() *ast.CommClause {
|
||||||
|
|
@ -1710,7 +1724,7 @@ func (p *parser) parseCommClause() *ast.CommClause {
|
||||||
arrow := p.pos
|
arrow := p.pos
|
||||||
p.next()
|
p.next()
|
||||||
rhs := p.parseRhs()
|
rhs := p.parseRhs()
|
||||||
comm = &ast.SendStmt{lhs[0], arrow, rhs}
|
comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
|
||||||
} else {
|
} else {
|
||||||
// RecvStmt
|
// RecvStmt
|
||||||
if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
|
if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
|
||||||
|
|
@ -1723,7 +1737,7 @@ func (p *parser) parseCommClause() *ast.CommClause {
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.next()
|
p.next()
|
||||||
rhs := p.parseRhs()
|
rhs := p.parseRhs()
|
||||||
as := &ast.AssignStmt{lhs, pos, tok, []ast.Expr{rhs}}
|
as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
|
||||||
if tok == token.DEFINE {
|
if tok == token.DEFINE {
|
||||||
p.shortVarDecl(as, lhs)
|
p.shortVarDecl(as, lhs)
|
||||||
}
|
}
|
||||||
|
|
@ -1734,7 +1748,7 @@ func (p *parser) parseCommClause() *ast.CommClause {
|
||||||
p.errorExpected(lhs[0].Pos(), "1 expression")
|
p.errorExpected(lhs[0].Pos(), "1 expression")
|
||||||
// continue with first expression
|
// continue with first expression
|
||||||
}
|
}
|
||||||
comm = &ast.ExprStmt{lhs[0]}
|
comm = &ast.ExprStmt{X: lhs[0]}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -1745,7 +1759,7 @@ func (p *parser) parseCommClause() *ast.CommClause {
|
||||||
body := p.parseStmtList()
|
body := p.parseStmtList()
|
||||||
p.closeScope()
|
p.closeScope()
|
||||||
|
|
||||||
return &ast.CommClause{pos, comm, colon, body}
|
return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseSelectStmt() *ast.SelectStmt {
|
func (p *parser) parseSelectStmt() *ast.SelectStmt {
|
||||||
|
|
@ -1761,9 +1775,9 @@ func (p *parser) parseSelectStmt() *ast.SelectStmt {
|
||||||
}
|
}
|
||||||
rbrace := p.expect(token.RBRACE)
|
rbrace := p.expect(token.RBRACE)
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
body := &ast.BlockStmt{lbrace, list, rbrace}
|
body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
|
||||||
|
|
||||||
return &ast.SelectStmt{pos, body}
|
return &ast.SelectStmt{Select: pos, Body: body}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseForStmt() ast.Stmt {
|
func (p *parser) parseForStmt() ast.Stmt {
|
||||||
|
|
@ -1812,16 +1826,30 @@ func (p *parser) parseForStmt() ast.Stmt {
|
||||||
key = as.Lhs[0]
|
key = as.Lhs[0]
|
||||||
default:
|
default:
|
||||||
p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions")
|
p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions")
|
||||||
return &ast.BadStmt{pos, body.End()}
|
return &ast.BadStmt{From: pos, To: body.End()}
|
||||||
}
|
}
|
||||||
// parseSimpleStmt returned a right-hand side that
|
// parseSimpleStmt returned a right-hand side that
|
||||||
// is a single unary expression of the form "range x"
|
// is a single unary expression of the form "range x"
|
||||||
x := as.Rhs[0].(*ast.UnaryExpr).X
|
x := as.Rhs[0].(*ast.UnaryExpr).X
|
||||||
return &ast.RangeStmt{pos, key, value, as.TokPos, as.Tok, x, body}
|
return &ast.RangeStmt{
|
||||||
|
For: pos,
|
||||||
|
Key: key,
|
||||||
|
Value: value,
|
||||||
|
TokPos: as.TokPos,
|
||||||
|
Tok: as.Tok,
|
||||||
|
X: x,
|
||||||
|
Body: body,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// regular for statement
|
// regular for statement
|
||||||
return &ast.ForStmt{pos, s1, p.makeExpr(s2), s3, body}
|
return &ast.ForStmt{
|
||||||
|
For: pos,
|
||||||
|
Init: s1,
|
||||||
|
Cond: p.makeExpr(s2),
|
||||||
|
Post: s3,
|
||||||
|
Body: body,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseStmt() (s ast.Stmt) {
|
func (p *parser) parseStmt() (s ast.Stmt) {
|
||||||
|
|
@ -1831,12 +1859,12 @@ func (p *parser) parseStmt() (s ast.Stmt) {
|
||||||
|
|
||||||
switch p.tok {
|
switch p.tok {
|
||||||
case token.CONST, token.TYPE, token.VAR:
|
case token.CONST, token.TYPE, token.VAR:
|
||||||
s = &ast.DeclStmt{p.parseDecl()}
|
s = &ast.DeclStmt{Decl: p.parseDecl()}
|
||||||
case
|
case
|
||||||
// tokens that may start a top-level expression
|
// tokens that may start an expression
|
||||||
token.IDENT, token.INT, token.FLOAT, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operand
|
token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
|
||||||
token.LBRACK, token.STRUCT, // composite type
|
token.LBRACK, token.STRUCT, // composite types
|
||||||
token.MUL, token.AND, token.ARROW, token.ADD, token.SUB, token.XOR: // unary operators
|
token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
|
||||||
s, _ = p.parseSimpleStmt(labelOk)
|
s, _ = p.parseSimpleStmt(labelOk)
|
||||||
// because of the required look-ahead, labeled statements are
|
// because of the required look-ahead, labeled statements are
|
||||||
// parsed by parseSimpleStmt - don't expect a semicolon after
|
// parsed by parseSimpleStmt - don't expect a semicolon after
|
||||||
|
|
@ -1864,17 +1892,17 @@ func (p *parser) parseStmt() (s ast.Stmt) {
|
||||||
case token.FOR:
|
case token.FOR:
|
||||||
s = p.parseForStmt()
|
s = p.parseForStmt()
|
||||||
case token.SEMICOLON:
|
case token.SEMICOLON:
|
||||||
s = &ast.EmptyStmt{p.pos}
|
s = &ast.EmptyStmt{Semicolon: p.pos}
|
||||||
p.next()
|
p.next()
|
||||||
case token.RBRACE:
|
case token.RBRACE:
|
||||||
// a semicolon may be omitted before a closing "}"
|
// a semicolon may be omitted before a closing "}"
|
||||||
s = &ast.EmptyStmt{p.pos}
|
s = &ast.EmptyStmt{Semicolon: p.pos}
|
||||||
default:
|
default:
|
||||||
// no statement found
|
// no statement found
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.errorExpected(pos, "statement")
|
p.errorExpected(pos, "statement")
|
||||||
p.next() // make progress
|
p.next() // make progress
|
||||||
s = &ast.BadStmt{pos, p.pos}
|
s = &ast.BadStmt{From: pos, To: p.pos}
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
@ -1893,7 +1921,7 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
|
||||||
var ident *ast.Ident
|
var ident *ast.Ident
|
||||||
switch p.tok {
|
switch p.tok {
|
||||||
case token.PERIOD:
|
case token.PERIOD:
|
||||||
ident = &ast.Ident{p.pos, ".", nil}
|
ident = &ast.Ident{NamePos: p.pos, Name: "."}
|
||||||
p.next()
|
p.next()
|
||||||
case token.IDENT:
|
case token.IDENT:
|
||||||
ident = p.parseIdent()
|
ident = p.parseIdent()
|
||||||
|
|
@ -1901,7 +1929,7 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
|
||||||
|
|
||||||
var path *ast.BasicLit
|
var path *ast.BasicLit
|
||||||
if p.tok == token.STRING {
|
if p.tok == token.STRING {
|
||||||
path = &ast.BasicLit{p.pos, p.tok, p.lit}
|
path = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
|
||||||
p.next()
|
p.next()
|
||||||
} else {
|
} else {
|
||||||
p.expect(token.STRING) // use expect() error handling
|
p.expect(token.STRING) // use expect() error handling
|
||||||
|
|
@ -1909,7 +1937,12 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
|
||||||
p.expectSemi() // call before accessing p.linecomment
|
p.expectSemi() // call before accessing p.linecomment
|
||||||
|
|
||||||
// collect imports
|
// collect imports
|
||||||
spec := &ast.ImportSpec{doc, ident, path, p.lineComment, token.NoPos}
|
spec := &ast.ImportSpec{
|
||||||
|
Doc: doc,
|
||||||
|
Name: ident,
|
||||||
|
Path: path,
|
||||||
|
Comment: p.lineComment,
|
||||||
|
}
|
||||||
p.imports = append(p.imports, spec)
|
p.imports = append(p.imports, spec)
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
@ -1933,7 +1966,13 @@ func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec {
|
||||||
// a function begins at the end of the ConstSpec or VarSpec and ends at
|
// a function begins at the end of the ConstSpec or VarSpec and ends at
|
||||||
// the end of the innermost containing block.
|
// the end of the innermost containing block.
|
||||||
// (Global identifiers are resolved in a separate phase after parsing.)
|
// (Global identifiers are resolved in a separate phase after parsing.)
|
||||||
spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment}
|
spec := &ast.ValueSpec{
|
||||||
|
Doc: doc,
|
||||||
|
Names: idents,
|
||||||
|
Type: typ,
|
||||||
|
Values: values,
|
||||||
|
Comment: p.lineComment,
|
||||||
|
}
|
||||||
p.declare(spec, iota, p.topScope, ast.Con, idents...)
|
p.declare(spec, iota, p.topScope, ast.Con, idents...)
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
@ -1950,7 +1989,7 @@ func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
|
||||||
// at the identifier in the TypeSpec and ends at the end of the innermost
|
// at the identifier in the TypeSpec and ends at the end of the innermost
|
||||||
// containing block.
|
// containing block.
|
||||||
// (Global identifiers are resolved in a separate phase after parsing.)
|
// (Global identifiers are resolved in a separate phase after parsing.)
|
||||||
spec := &ast.TypeSpec{doc, ident, nil, nil}
|
spec := &ast.TypeSpec{Doc: doc, Name: ident}
|
||||||
p.declare(spec, nil, p.topScope, ast.Typ, ident)
|
p.declare(spec, nil, p.topScope, ast.Typ, ident)
|
||||||
|
|
||||||
spec.Type = p.parseType()
|
spec.Type = p.parseType()
|
||||||
|
|
@ -1978,7 +2017,13 @@ func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec {
|
||||||
// a function begins at the end of the ConstSpec or VarSpec and ends at
|
// a function begins at the end of the ConstSpec or VarSpec and ends at
|
||||||
// the end of the innermost containing block.
|
// the end of the innermost containing block.
|
||||||
// (Global identifiers are resolved in a separate phase after parsing.)
|
// (Global identifiers are resolved in a separate phase after parsing.)
|
||||||
spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment}
|
spec := &ast.ValueSpec{
|
||||||
|
Doc: doc,
|
||||||
|
Names: idents,
|
||||||
|
Type: typ,
|
||||||
|
Values: values,
|
||||||
|
Comment: p.lineComment,
|
||||||
|
}
|
||||||
p.declare(spec, nil, p.topScope, ast.Var, idents...)
|
p.declare(spec, nil, p.topScope, ast.Var, idents...)
|
||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
@ -2005,7 +2050,14 @@ func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.Gen
|
||||||
list = append(list, f(p, nil, 0))
|
list = append(list, f(p, nil, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ast.GenDecl{doc, pos, keyword, lparen, list, rparen}
|
return &ast.GenDecl{
|
||||||
|
Doc: doc,
|
||||||
|
TokPos: pos,
|
||||||
|
Tok: keyword,
|
||||||
|
Lparen: lparen,
|
||||||
|
Specs: list,
|
||||||
|
Rparen: rparen,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
|
func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
|
||||||
|
|
@ -2018,7 +2070,7 @@ func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
|
||||||
// must have exactly one receiver
|
// must have exactly one receiver
|
||||||
if par.NumFields() != 1 {
|
if par.NumFields() != 1 {
|
||||||
p.errorExpected(par.Opening, "exactly one receiver")
|
p.errorExpected(par.Opening, "exactly one receiver")
|
||||||
par.List = []*ast.Field{{Type: &ast.BadExpr{par.Opening, par.Closing + 1}}}
|
par.List = []*ast.Field{{Type: &ast.BadExpr{From: par.Opening, To: par.Closing + 1}}}
|
||||||
return par
|
return par
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2027,7 +2079,7 @@ func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList {
|
||||||
base := deref(recv.Type)
|
base := deref(recv.Type)
|
||||||
if _, isIdent := base.(*ast.Ident); !isIdent {
|
if _, isIdent := base.(*ast.Ident); !isIdent {
|
||||||
p.errorExpected(base.Pos(), "(unqualified) identifier")
|
p.errorExpected(base.Pos(), "(unqualified) identifier")
|
||||||
par.List = []*ast.Field{{Type: &ast.BadExpr{recv.Pos(), recv.End()}}}
|
par.List = []*ast.Field{{Type: &ast.BadExpr{From: recv.Pos(), To: recv.End()}}}
|
||||||
}
|
}
|
||||||
|
|
||||||
return par
|
return par
|
||||||
|
|
@ -2057,7 +2109,17 @@ func (p *parser) parseFuncDecl() *ast.FuncDecl {
|
||||||
}
|
}
|
||||||
p.expectSemi()
|
p.expectSemi()
|
||||||
|
|
||||||
decl := &ast.FuncDecl{doc, recv, ident, &ast.FuncType{pos, params, results}, body}
|
decl := &ast.FuncDecl{
|
||||||
|
Doc: doc,
|
||||||
|
Recv: recv,
|
||||||
|
Name: ident,
|
||||||
|
Type: &ast.FuncType{
|
||||||
|
Func: pos,
|
||||||
|
Params: params,
|
||||||
|
Results: results,
|
||||||
|
},
|
||||||
|
Body: body,
|
||||||
|
}
|
||||||
if recv == nil {
|
if recv == nil {
|
||||||
// Go spec: The scope of an identifier denoting a constant, type,
|
// Go spec: The scope of an identifier denoting a constant, type,
|
||||||
// variable, or function (but not method) declared at top level
|
// variable, or function (but not method) declared at top level
|
||||||
|
|
@ -2096,7 +2158,7 @@ func (p *parser) parseDecl() ast.Decl {
|
||||||
pos := p.pos
|
pos := p.pos
|
||||||
p.errorExpected(pos, "declaration")
|
p.errorExpected(pos, "declaration")
|
||||||
p.next() // make progress
|
p.next() // make progress
|
||||||
decl := &ast.BadDecl{pos, p.pos}
|
decl := &ast.BadDecl{From: pos, To: p.pos}
|
||||||
return decl
|
return decl
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2155,5 +2217,14 @@ func (p *parser) parseFile() *ast.File {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ast.File{doc, pos, ident, decls, p.pkgScope, p.imports, p.unresolved[0:i], p.comments}
|
return &ast.File{
|
||||||
|
Doc: doc,
|
||||||
|
Package: pos,
|
||||||
|
Name: ident,
|
||||||
|
Decls: decls,
|
||||||
|
Scope: p.pkgScope,
|
||||||
|
Imports: p.imports,
|
||||||
|
Unresolved: p.unresolved[0:i],
|
||||||
|
Comments: p.comments,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -87,7 +87,6 @@ const (
|
||||||
commaSep // elements are separated by commas
|
commaSep // elements are separated by commas
|
||||||
commaTerm // list is optionally terminated by a comma
|
commaTerm // list is optionally terminated by a comma
|
||||||
noIndent // no extra indentation in multi-line lists
|
noIndent // no extra indentation in multi-line lists
|
||||||
periodSep // elements are separated by periods
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Sets multiLine to true if the identifier list spans multiple lines.
|
// Sets multiLine to true if the identifier list spans multiple lines.
|
||||||
|
|
@ -133,7 +132,9 @@ func (p *printer) exprList(prev0 token.Pos, list []ast.Expr, depth int, mode exp
|
||||||
for i, x := range list {
|
for i, x := range list {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
if mode&commaSep != 0 {
|
if mode&commaSep != 0 {
|
||||||
p.print(token.COMMA)
|
// use position of expression following the comma as
|
||||||
|
// comma position for correct comment placement
|
||||||
|
p.print(x.Pos(), token.COMMA)
|
||||||
}
|
}
|
||||||
p.print(blank)
|
p.print(blank)
|
||||||
}
|
}
|
||||||
|
|
@ -213,14 +214,18 @@ func (p *printer) exprList(prev0 token.Pos, list []ast.Expr, depth int, mode exp
|
||||||
}
|
}
|
||||||
|
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
switch {
|
needsLinebreak := prevLine < line && prevLine > 0 && line > 0
|
||||||
case mode&commaSep != 0:
|
if mode&commaSep != 0 {
|
||||||
|
// use position of expression following the comma as
|
||||||
|
// comma position for correct comment placement, but
|
||||||
|
// only if the expression is on the same line
|
||||||
|
if !needsLinebreak {
|
||||||
|
p.print(x.Pos())
|
||||||
|
}
|
||||||
p.print(token.COMMA)
|
p.print(token.COMMA)
|
||||||
case mode&periodSep != 0:
|
|
||||||
p.print(token.PERIOD)
|
|
||||||
}
|
}
|
||||||
needsBlank := mode&periodSep == 0 // period-separated list elements don't need a blank
|
needsBlank := true
|
||||||
if prevLine < line && prevLine > 0 && line > 0 {
|
if needsLinebreak {
|
||||||
// lines are broken using newlines so comments remain aligned
|
// lines are broken using newlines so comments remain aligned
|
||||||
// unless forceFF is set or there are multiple expressions on
|
// unless forceFF is set or there are multiple expressions on
|
||||||
// the same line in which case formfeed is used
|
// the same line in which case formfeed is used
|
||||||
|
|
@ -287,11 +292,18 @@ func (p *printer) parameters(fields *ast.FieldList, multiLine *bool) {
|
||||||
parLineBeg = parLineEnd
|
parLineBeg = parLineEnd
|
||||||
}
|
}
|
||||||
// separating "," if needed
|
// separating "," if needed
|
||||||
|
needsLinebreak := 0 < prevLine && prevLine < parLineBeg
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
|
// use position of parameter following the comma as
|
||||||
|
// comma position for correct comma placement, but
|
||||||
|
// only if the next parameter is on the same line
|
||||||
|
if !needsLinebreak {
|
||||||
|
p.print(par.Pos())
|
||||||
|
}
|
||||||
p.print(token.COMMA)
|
p.print(token.COMMA)
|
||||||
}
|
}
|
||||||
// separator if needed (linebreak or blank)
|
// separator if needed (linebreak or blank)
|
||||||
if 0 < prevLine && prevLine < parLineBeg && p.linebreak(parLineBeg, 0, ws, true) {
|
if needsLinebreak && p.linebreak(parLineBeg, 0, ws, true) {
|
||||||
// break line if the opening "(" or previous parameter ended on a different line
|
// break line if the opening "(" or previous parameter ended on a different line
|
||||||
ws = ignore
|
ws = ignore
|
||||||
*multiLine = true
|
*multiLine = true
|
||||||
|
|
@ -316,7 +328,7 @@ func (p *printer) parameters(fields *ast.FieldList, multiLine *bool) {
|
||||||
// if the closing ")" is on a separate line from the last parameter,
|
// if the closing ")" is on a separate line from the last parameter,
|
||||||
// print an additional "," and line break
|
// print an additional "," and line break
|
||||||
if closing := p.lineFor(fields.Closing); 0 < prevLine && prevLine < closing {
|
if closing := p.lineFor(fields.Closing); 0 < prevLine && prevLine < closing {
|
||||||
p.print(",")
|
p.print(token.COMMA)
|
||||||
p.linebreak(closing, 0, ignore, true)
|
p.linebreak(closing, 0, ignore, true)
|
||||||
}
|
}
|
||||||
// unindent if we indented
|
// unindent if we indented
|
||||||
|
|
@ -374,7 +386,7 @@ func (p *printer) isOneLineFieldList(list []*ast.Field) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *printer) setLineComment(text string) {
|
func (p *printer) setLineComment(text string) {
|
||||||
p.setComment(&ast.CommentGroup{[]*ast.Comment{{token.NoPos, text}}})
|
p.setComment(&ast.CommentGroup{List: []*ast.Comment{{Slash: token.NoPos, Text: text}}})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *printer) fieldList(fields *ast.FieldList, isStruct, isIncomplete bool) {
|
func (p *printer) fieldList(fields *ast.FieldList, isStruct, isIncomplete bool) {
|
||||||
|
|
@ -397,6 +409,7 @@ func (p *printer) fieldList(fields *ast.FieldList, isStruct, isIncomplete bool)
|
||||||
f := list[0]
|
f := list[0]
|
||||||
for i, x := range f.Names {
|
for i, x := range f.Names {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
|
// no comments so no need for comma position
|
||||||
p.print(token.COMMA, blank)
|
p.print(token.COMMA, blank)
|
||||||
}
|
}
|
||||||
p.expr(x, ignoreMultiLine)
|
p.expr(x, ignoreMultiLine)
|
||||||
|
|
@ -668,63 +681,6 @@ func isBinary(expr ast.Expr) bool {
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the expression contains one or more selector expressions, splits it into
|
|
||||||
// two expressions at the rightmost period. Writes entire expr to suffix when
|
|
||||||
// selector isn't found. Rewrites AST nodes for calls, index expressions and
|
|
||||||
// type assertions, all of which may be found in selector chains, to make them
|
|
||||||
// parts of the chain.
|
|
||||||
func splitSelector(expr ast.Expr) (body, suffix ast.Expr) {
|
|
||||||
switch x := expr.(type) {
|
|
||||||
case *ast.SelectorExpr:
|
|
||||||
body, suffix = x.X, x.Sel
|
|
||||||
return
|
|
||||||
case *ast.CallExpr:
|
|
||||||
body, suffix = splitSelector(x.Fun)
|
|
||||||
if body != nil {
|
|
||||||
suffix = &ast.CallExpr{suffix, x.Lparen, x.Args, x.Ellipsis, x.Rparen}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
case *ast.IndexExpr:
|
|
||||||
body, suffix = splitSelector(x.X)
|
|
||||||
if body != nil {
|
|
||||||
suffix = &ast.IndexExpr{suffix, x.Lbrack, x.Index, x.Rbrack}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
case *ast.SliceExpr:
|
|
||||||
body, suffix = splitSelector(x.X)
|
|
||||||
if body != nil {
|
|
||||||
suffix = &ast.SliceExpr{suffix, x.Lbrack, x.Low, x.High, x.Rbrack}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
case *ast.TypeAssertExpr:
|
|
||||||
body, suffix = splitSelector(x.X)
|
|
||||||
if body != nil {
|
|
||||||
suffix = &ast.TypeAssertExpr{suffix, x.Type}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
suffix = expr
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert an expression into an expression list split at the periods of
|
|
||||||
// selector expressions.
|
|
||||||
func selectorExprList(expr ast.Expr) (list []ast.Expr) {
|
|
||||||
// split expression
|
|
||||||
for expr != nil {
|
|
||||||
var suffix ast.Expr
|
|
||||||
expr, suffix = splitSelector(expr)
|
|
||||||
list = append(list, suffix)
|
|
||||||
}
|
|
||||||
|
|
||||||
// reverse list
|
|
||||||
for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 {
|
|
||||||
list[i], list[j] = list[j], list[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sets multiLine to true if the expression spans multiple lines.
|
// Sets multiLine to true if the expression spans multiple lines.
|
||||||
func (p *printer) expr1(expr ast.Expr, prec1, depth int, multiLine *bool) {
|
func (p *printer) expr1(expr ast.Expr, prec1, depth int, multiLine *bool) {
|
||||||
p.print(expr.Pos())
|
p.print(expr.Pos())
|
||||||
|
|
@ -798,8 +754,14 @@ func (p *printer) expr1(expr ast.Expr, prec1, depth int, multiLine *bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
case *ast.SelectorExpr:
|
case *ast.SelectorExpr:
|
||||||
parts := selectorExprList(expr)
|
p.expr1(x.X, token.HighestPrec, depth, multiLine)
|
||||||
p.exprList(token.NoPos, parts, depth, periodSep, multiLine, token.NoPos)
|
p.print(token.PERIOD)
|
||||||
|
if line := p.lineFor(x.Sel.Pos()); p.pos.IsValid() && p.pos.Line < line {
|
||||||
|
p.print(indent, newline, x.Sel.Pos(), x.Sel, unindent)
|
||||||
|
*multiLine = true
|
||||||
|
} else {
|
||||||
|
p.print(x.Sel.Pos(), x.Sel)
|
||||||
|
}
|
||||||
|
|
||||||
case *ast.TypeAssertExpr:
|
case *ast.TypeAssertExpr:
|
||||||
p.expr1(x.X, token.HighestPrec, depth, multiLine)
|
p.expr1(x.X, token.HighestPrec, depth, multiLine)
|
||||||
|
|
@ -1180,7 +1142,9 @@ func (p *printer) stmt(stmt ast.Stmt, nextIsRBrace bool, multiLine *bool) {
|
||||||
p.print(token.FOR, blank)
|
p.print(token.FOR, blank)
|
||||||
p.expr(s.Key, multiLine)
|
p.expr(s.Key, multiLine)
|
||||||
if s.Value != nil {
|
if s.Value != nil {
|
||||||
p.print(token.COMMA, blank)
|
// use position of value following the comma as
|
||||||
|
// comma position for correct comment placement
|
||||||
|
p.print(s.Value.Pos(), token.COMMA, blank)
|
||||||
p.expr(s.Value, multiLine)
|
p.expr(s.Value, multiLine)
|
||||||
}
|
}
|
||||||
p.print(blank, s.TokPos, s.Tok, blank, token.RANGE, blank)
|
p.print(blank, s.TokPos, s.Tok, blank, token.RANGE, blank)
|
||||||
|
|
|
||||||
|
|
@ -686,9 +686,11 @@ func (p *printer) intersperseComments(next token.Position, tok token.Token) (wro
|
||||||
}
|
}
|
||||||
|
|
||||||
if last != nil {
|
if last != nil {
|
||||||
if last.Text[1] == '*' && p.lineFor(last.Pos()) == next.Line {
|
// if the last comment is a /*-style comment and the next item
|
||||||
// the last comment is a /*-style comment and the next item
|
// follows on the same line but is not a comma or a "closing"
|
||||||
// follows on the same line: separate with an extra blank
|
// token, add an extra blank for separation
|
||||||
|
if last.Text[1] == '*' && p.lineFor(last.Pos()) == next.Line && tok != token.COMMA &&
|
||||||
|
tok != token.RPAREN && tok != token.RBRACK && tok != token.RBRACE {
|
||||||
p.writeByte(' ', 1)
|
p.writeByte(' ', 1)
|
||||||
}
|
}
|
||||||
// ensure that there is a line break after a //-style comment,
|
// ensure that there is a line break after a //-style comment,
|
||||||
|
|
|
||||||
|
|
@ -283,10 +283,10 @@ func fibo(n int) {
|
||||||
t.Error("expected offset 1") // error in test
|
t.Error("expected offset 1") // error in test
|
||||||
}
|
}
|
||||||
|
|
||||||
testComment(t, f, len(src), &ast.Comment{pos, "//-style comment"})
|
testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "//-style comment"})
|
||||||
testComment(t, f, len(src), &ast.Comment{pos, "/*-style comment */"})
|
testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "/*-style comment */"})
|
||||||
testComment(t, f, len(src), &ast.Comment{pos, "/*-style \n comment */"})
|
testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "/*-style \n comment */"})
|
||||||
testComment(t, f, len(src), &ast.Comment{pos, "/*-style comment \n\n\n */"})
|
testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "/*-style comment \n\n\n */"})
|
||||||
}
|
}
|
||||||
|
|
||||||
type visitor chan *ast.Ident
|
type visitor chan *ast.Ident
|
||||||
|
|
|
||||||
|
|
@ -405,16 +405,17 @@ func _() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Some interesting interspersed comments.
|
// Some interesting interspersed comments.
|
||||||
|
// See below for more common cases.
|
||||||
func _( /* this */ x /* is */ /* an */ int) {
|
func _( /* this */ x /* is */ /* an */ int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func _( /* no params */ ) {}
|
func _( /* no params */) {}
|
||||||
|
|
||||||
func _() {
|
func _() {
|
||||||
f( /* no args */ )
|
f( /* no args */)
|
||||||
}
|
}
|
||||||
|
|
||||||
func ( /* comment1 */ T /* comment2 */ ) _() {}
|
func ( /* comment1 */ T /* comment2 */) _() {}
|
||||||
|
|
||||||
func _() { /* one-line functions with comments are formatted as multi-line functions */
|
func _() { /* one-line functions with comments are formatted as multi-line functions */
|
||||||
}
|
}
|
||||||
|
|
@ -425,7 +426,7 @@ func _() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func _() {
|
func _() {
|
||||||
_ = []int{0, 1 /* don't introduce a newline after this comment - was issue 1365 */ }
|
_ = []int{0, 1 /* don't introduce a newline after this comment - was issue 1365 */}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test cases from issue 1542:
|
// Test cases from issue 1542:
|
||||||
|
|
@ -448,8 +449,9 @@ func _() {
|
||||||
_ = a
|
_ = a
|
||||||
}
|
}
|
||||||
|
|
||||||
// Comments immediately adjacent to punctuation (for which the go/printer
|
// Comments immediately adjacent to punctuation followed by a newline
|
||||||
// may only have estimated position information) must remain after the punctuation.
|
// remain after the punctuation (looks better and permits alignment of
|
||||||
|
// comments).
|
||||||
func _() {
|
func _() {
|
||||||
_ = T{
|
_ = T{
|
||||||
1, // comment after comma
|
1, // comment after comma
|
||||||
|
|
@ -479,6 +481,35 @@ func _() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If there is no newline following punctuation, commas move before the punctuation.
|
||||||
|
// This way, commas interspersed in lists stay with the respective expression.
|
||||||
|
func f(x /* comment */, y int, z int /* comment */, u, v, w int /* comment */) {
|
||||||
|
f(x /* comment */, y)
|
||||||
|
f(x, /* comment */
|
||||||
|
y)
|
||||||
|
f(
|
||||||
|
x, /* comment */
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func g(
|
||||||
|
x int, /* comment */
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|
||||||
|
type _ struct {
|
||||||
|
a, b /* comment */, c int
|
||||||
|
}
|
||||||
|
|
||||||
|
type _ struct {
|
||||||
|
a, b /* comment */, c int
|
||||||
|
}
|
||||||
|
|
||||||
|
func _() {
|
||||||
|
for a /* comment */, b := range x {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Print line directives correctly.
|
// Print line directives correctly.
|
||||||
|
|
||||||
// The following is a legal line directive.
|
// The following is a legal line directive.
|
||||||
|
|
|
||||||
|
|
@ -411,6 +411,7 @@ func _() {
|
||||||
|
|
||||||
|
|
||||||
// Some interesting interspersed comments.
|
// Some interesting interspersed comments.
|
||||||
|
// See below for more common cases.
|
||||||
func _(/* this */x/* is *//* an */ int) {
|
func _(/* this */x/* is *//* an */ int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -453,8 +454,9 @@ func _() {
|
||||||
_ = a
|
_ = a
|
||||||
}
|
}
|
||||||
|
|
||||||
// Comments immediately adjacent to punctuation (for which the go/printer
|
// Comments immediately adjacent to punctuation followed by a newline
|
||||||
// may only have estimated position information) must remain after the punctuation.
|
// remain after the punctuation (looks better and permits alignment of
|
||||||
|
// comments).
|
||||||
func _() {
|
func _() {
|
||||||
_ = T{
|
_ = T{
|
||||||
1, // comment after comma
|
1, // comment after comma
|
||||||
|
|
@ -486,6 +488,31 @@ func _() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If there is no newline following punctuation, commas move before the punctuation.
|
||||||
|
// This way, commas interspersed in lists stay with the respective expression.
|
||||||
|
func f(x/* comment */, y int, z int /* comment */, u, v, w int /* comment */) {
|
||||||
|
f(x /* comment */, y)
|
||||||
|
f(x /* comment */,
|
||||||
|
y)
|
||||||
|
f(
|
||||||
|
x /* comment */,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func g(
|
||||||
|
x int /* comment */,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
type _ struct {
|
||||||
|
a, b /* comment */, c int
|
||||||
|
}
|
||||||
|
|
||||||
|
type _ struct { a, b /* comment */, c int }
|
||||||
|
|
||||||
|
func _() {
|
||||||
|
for a /* comment */, b := range x {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Print line directives correctly.
|
// Print line directives correctly.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -545,7 +545,7 @@ func _() {
|
||||||
// handle multiline argument list correctly
|
// handle multiline argument list correctly
|
||||||
_ = new(T).
|
_ = new(T).
|
||||||
foo(
|
foo(
|
||||||
1).
|
1).
|
||||||
foo(2)
|
foo(2)
|
||||||
|
|
||||||
_ = new(T).foo(
|
_ = new(T).foo(
|
||||||
|
|
@ -587,12 +587,12 @@ func _() {
|
||||||
_ = new(T).
|
_ = new(T).
|
||||||
Field.
|
Field.
|
||||||
Array[3+
|
Array[3+
|
||||||
4].
|
4].
|
||||||
Table["foo"].
|
Table["foo"].
|
||||||
Blob.(*Type).
|
Blob.(*Type).
|
||||||
Slices[1:4].
|
Slices[1:4].
|
||||||
Method(1, 2,
|
Method(1, 2,
|
||||||
3).
|
3).
|
||||||
Thingy
|
Thingy
|
||||||
|
|
||||||
_ = a.b.c
|
_ = a.b.c
|
||||||
|
|
|
||||||
|
|
@ -545,7 +545,7 @@ func _() {
|
||||||
// handle multiline argument list correctly
|
// handle multiline argument list correctly
|
||||||
_ = new(T).
|
_ = new(T).
|
||||||
foo(
|
foo(
|
||||||
1).
|
1).
|
||||||
foo(2)
|
foo(2)
|
||||||
|
|
||||||
_ = new(T).foo(
|
_ = new(T).foo(
|
||||||
|
|
@ -587,12 +587,12 @@ func _() {
|
||||||
_ = new(T).
|
_ = new(T).
|
||||||
Field.
|
Field.
|
||||||
Array[3+
|
Array[3+
|
||||||
4].
|
4].
|
||||||
Table["foo"].
|
Table["foo"].
|
||||||
Blob.(*Type).
|
Blob.(*Type).
|
||||||
Slices[1:4].
|
Slices[1:4].
|
||||||
Method(1, 2,
|
Method(1, 2,
|
||||||
3).
|
3).
|
||||||
Thingy
|
Thingy
|
||||||
|
|
||||||
_ = a.b.c
|
_ = a.b.c
|
||||||
|
|
|
||||||
|
|
@ -2,21 +2,9 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// Package scanner implements a scanner for Go source text. Takes a []byte as
|
// Package scanner implements a scanner for Go source text.
|
||||||
// source which can then be tokenized through repeated calls to the Scan
|
// It takes a []byte as source which can then be tokenized
|
||||||
// function. Typical use:
|
// through repeated calls to the Scan method.
|
||||||
//
|
|
||||||
// var s scanner.Scanner
|
|
||||||
// fset := token.NewFileSet() // position information is relative to fset
|
|
||||||
// file := fset.AddFile(filename, fset.Base(), len(src)) // register file
|
|
||||||
// s.Init(file, src, nil /* no error handler */, 0)
|
|
||||||
// for {
|
|
||||||
// pos, tok, lit := s.Scan()
|
|
||||||
// if tok == token.EOF {
|
|
||||||
// break
|
|
||||||
// }
|
|
||||||
// // do something here with pos, tok, and lit
|
|
||||||
// }
|
|
||||||
//
|
//
|
||||||
package scanner
|
package scanner
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,90 +0,0 @@
|
||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package template
|
|
||||||
|
|
||||||
import (
|
|
||||||
"text/template/parse"
|
|
||||||
)
|
|
||||||
|
|
||||||
// clone clones a template Node.
|
|
||||||
func clone(n parse.Node) parse.Node {
|
|
||||||
switch t := n.(type) {
|
|
||||||
case *parse.ActionNode:
|
|
||||||
return cloneAction(t)
|
|
||||||
case *parse.IfNode:
|
|
||||||
b := new(parse.IfNode)
|
|
||||||
copyBranch(&b.BranchNode, &t.BranchNode)
|
|
||||||
return b
|
|
||||||
case *parse.ListNode:
|
|
||||||
return cloneList(t)
|
|
||||||
case *parse.RangeNode:
|
|
||||||
b := new(parse.RangeNode)
|
|
||||||
copyBranch(&b.BranchNode, &t.BranchNode)
|
|
||||||
return b
|
|
||||||
case *parse.TemplateNode:
|
|
||||||
return cloneTemplate(t)
|
|
||||||
case *parse.TextNode:
|
|
||||||
return cloneText(t)
|
|
||||||
case *parse.WithNode:
|
|
||||||
b := new(parse.WithNode)
|
|
||||||
copyBranch(&b.BranchNode, &t.BranchNode)
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
panic("cloning " + n.String() + " is unimplemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
// cloneAction returns a deep clone of n.
|
|
||||||
func cloneAction(n *parse.ActionNode) *parse.ActionNode {
|
|
||||||
// We use keyless fields because they won't compile if a field is added.
|
|
||||||
return &parse.ActionNode{n.NodeType, n.Line, clonePipe(n.Pipe)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// cloneList returns a deep clone of n.
|
|
||||||
func cloneList(n *parse.ListNode) *parse.ListNode {
|
|
||||||
if n == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
// We use keyless fields because they won't compile if a field is added.
|
|
||||||
c := parse.ListNode{n.NodeType, make([]parse.Node, len(n.Nodes))}
|
|
||||||
for i, child := range n.Nodes {
|
|
||||||
c.Nodes[i] = clone(child)
|
|
||||||
}
|
|
||||||
return &c
|
|
||||||
}
|
|
||||||
|
|
||||||
// clonePipe returns a shallow clone of n.
|
|
||||||
// The escaper does not modify pipe descendants in place so there's no need to
|
|
||||||
// clone deeply.
|
|
||||||
func clonePipe(n *parse.PipeNode) *parse.PipeNode {
|
|
||||||
if n == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
// We use keyless fields because they won't compile if a field is added.
|
|
||||||
return &parse.PipeNode{n.NodeType, n.Line, n.Decl, n.Cmds}
|
|
||||||
}
|
|
||||||
|
|
||||||
// cloneTemplate returns a deep clone of n.
|
|
||||||
func cloneTemplate(n *parse.TemplateNode) *parse.TemplateNode {
|
|
||||||
// We use keyless fields because they won't compile if a field is added.
|
|
||||||
return &parse.TemplateNode{n.NodeType, n.Line, n.Name, clonePipe(n.Pipe)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// cloneText clones the given node sharing its []byte.
|
|
||||||
func cloneText(n *parse.TextNode) *parse.TextNode {
|
|
||||||
// We use keyless fields because they won't compile if a field is added.
|
|
||||||
return &parse.TextNode{n.NodeType, n.Text}
|
|
||||||
}
|
|
||||||
|
|
||||||
// copyBranch clones src into dst.
|
|
||||||
func copyBranch(dst, src *parse.BranchNode) {
|
|
||||||
// We use keyless fields because they won't compile if a field is added.
|
|
||||||
*dst = parse.BranchNode{
|
|
||||||
src.NodeType,
|
|
||||||
src.Line,
|
|
||||||
clonePipe(src.Pipe),
|
|
||||||
cloneList(src.List),
|
|
||||||
cloneList(src.ElseList),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -7,86 +7,109 @@ package template
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"testing"
|
"testing"
|
||||||
|
"text/template/parse"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestClone(t *testing.T) {
|
func TestAddParseTree(t *testing.T) {
|
||||||
tests := []struct {
|
root := Must(New("root").Parse(`{{define "a"}} {{.}} {{template "b"}} {{.}} "></a>{{end}}`))
|
||||||
input, want, wantClone string
|
tree, err := parse.Parse("t", `{{define "b"}}<a href="{{end}}`, "", "", nil, nil)
|
||||||
}{
|
if err != nil {
|
||||||
{
|
t.Fatal(err)
|
||||||
`Hello, {{if true}}{{"<World>"}}{{end}}!`,
|
|
||||||
"Hello, <World>!",
|
|
||||||
"Hello, <World>!",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
`Hello, {{if false}}{{.X}}{{else}}{{"<World>"}}{{end}}!`,
|
|
||||||
"Hello, <World>!",
|
|
||||||
"Hello, <World>!",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
`Hello, {{with "<World>"}}{{.}}{{end}}!`,
|
|
||||||
"Hello, <World>!",
|
|
||||||
"Hello, <World>!",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
`{{range .}}<p>{{.}}</p>{{end}}`,
|
|
||||||
"<p>foo</p><p><bar></p><p>baz</p>",
|
|
||||||
"<p>foo</p><p><bar></p><p>baz</p>",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
`Hello, {{"<World>" | html}}!`,
|
|
||||||
"Hello, <World>!",
|
|
||||||
"Hello, <World>!",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
`Hello{{if 1}}, World{{else}}{{template "d"}}{{end}}!`,
|
|
||||||
"Hello, World!",
|
|
||||||
"Hello, World!",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
added := Must(root.AddParseTree("b", tree["b"]))
|
||||||
for _, test := range tests {
|
b := new(bytes.Buffer)
|
||||||
s, err := New("s").Parse(test.input)
|
err = added.ExecuteTemplate(b, "a", "1>0")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("input=%q: unexpected parse error %v", test.input, err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
if got, want := b.String(), ` 1>0 <a href=" 1%3e0 "></a>`; got != want {
|
||||||
d, _ := New("d").Parse(test.input)
|
t.Errorf("got %q want %q", got, want)
|
||||||
// Hack: just replace the root of the tree.
|
}
|
||||||
d.text.Root = cloneList(s.text.Root)
|
}
|
||||||
|
|
||||||
if want, got := s.text.Root.String(), d.text.Root.String(); want != got {
|
func TestClone(t *testing.T) {
|
||||||
t.Errorf("want %q, got %q", want, got)
|
// The {{.}} will be executed with data "<i>*/" in different contexts.
|
||||||
}
|
// In the t0 template, it will be in a text context.
|
||||||
|
// In the t1 template, it will be in a URL context.
|
||||||
err = escapeTemplates(d, "d")
|
// In the t2 template, it will be in a JavaScript context.
|
||||||
if err != nil {
|
// In the t3 template, it will be in a CSS context.
|
||||||
t.Errorf("%q: failed to escape: %s", test.input, err)
|
const tmpl = `{{define "a"}}{{template "lhs"}}{{.}}{{template "rhs"}}{{end}}`
|
||||||
continue
|
b := new(bytes.Buffer)
|
||||||
}
|
|
||||||
|
// Create an incomplete template t0.
|
||||||
if want, got := "s", s.Name(); want != got {
|
t0 := Must(New("t0").Parse(tmpl))
|
||||||
t.Errorf("want %q, got %q", want, got)
|
|
||||||
continue
|
// Clone t0 as t1.
|
||||||
}
|
t1 := Must(t0.Clone())
|
||||||
if want, got := "d", d.Name(); want != got {
|
Must(t1.Parse(`{{define "lhs"}} <a href=" {{end}}`))
|
||||||
t.Errorf("want %q, got %q", want, got)
|
Must(t1.Parse(`{{define "rhs"}} "></a> {{end}}`))
|
||||||
continue
|
|
||||||
}
|
// Execute t1.
|
||||||
|
b.Reset()
|
||||||
data := []string{"foo", "<bar>", "baz"}
|
if err := t1.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
var b bytes.Buffer
|
}
|
||||||
d.Execute(&b, data)
|
if got, want := b.String(), ` <a href=" %3ci%3e*/ "></a> `; got != want {
|
||||||
if got := b.String(); got != test.wantClone {
|
t.Errorf("t1: got %q want %q", got, want)
|
||||||
t.Errorf("input=%q: want %q, got %q", test.input, test.wantClone, got)
|
}
|
||||||
}
|
|
||||||
|
// Clone t0 as t2.
|
||||||
// Make sure escaping d did not affect s.
|
t2 := Must(t0.Clone())
|
||||||
b.Reset()
|
Must(t2.Parse(`{{define "lhs"}} <p onclick="javascript: {{end}}`))
|
||||||
s.text.Execute(&b, data)
|
Must(t2.Parse(`{{define "rhs"}} "></p> {{end}}`))
|
||||||
if got := b.String(); got != test.want {
|
|
||||||
t.Errorf("input=%q: want %q, got %q", test.input, test.want, got)
|
// Execute t2.
|
||||||
}
|
b.Reset()
|
||||||
|
if err := t2.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if got, want := b.String(), ` <p onclick="javascript: "\u003ci\u003e*/" "></p> `; got != want {
|
||||||
|
t.Errorf("t2: got %q want %q", got, want)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone t0 as t3, but do not execute t3 yet.
|
||||||
|
t3 := Must(t0.Clone())
|
||||||
|
Must(t3.Parse(`{{define "lhs"}} <style> {{end}}`))
|
||||||
|
Must(t3.Parse(`{{define "rhs"}} </style> {{end}}`))
|
||||||
|
|
||||||
|
// Complete t0.
|
||||||
|
Must(t0.Parse(`{{define "lhs"}} ( {{end}}`))
|
||||||
|
Must(t0.Parse(`{{define "rhs"}} ) {{end}}`))
|
||||||
|
|
||||||
|
// Clone t0 as t4. Redefining the "lhs" template should fail.
|
||||||
|
t4 := Must(t0.Clone())
|
||||||
|
if _, err := t4.Parse(`{{define "lhs"}} FAIL {{end}}`); err == nil {
|
||||||
|
t.Error(`redefine "lhs": got nil err want non-nil`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute t0.
|
||||||
|
b.Reset()
|
||||||
|
if err := t0.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if got, want := b.String(), ` ( <i>*/ ) `; got != want {
|
||||||
|
t.Errorf("t0: got %q want %q", got, want)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone t0. This should fail, as t0 has already executed.
|
||||||
|
if _, err := t0.Clone(); err == nil {
|
||||||
|
t.Error(`t0.Clone(): got nil err want non-nil`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Similarly, cloning sub-templates should fail.
|
||||||
|
if _, err := t0.Lookup("a").Clone(); err == nil {
|
||||||
|
t.Error(`t0.Lookup("a").Clone(): got nil err want non-nil`)
|
||||||
|
}
|
||||||
|
if _, err := t0.Lookup("lhs").Clone(); err == nil {
|
||||||
|
t.Error(`t0.Lookup("lhs").Clone(): got nil err want non-nil`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute t3.
|
||||||
|
b.Reset()
|
||||||
|
if err := t3.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if got, want := b.String(), ` <style> ZgotmplZ </style> `; got != want {
|
||||||
|
t.Errorf("t3: got %q want %q", got, want)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -85,6 +85,22 @@ func indirect(a interface{}) interface{} {
|
||||||
return v.Interface()
|
return v.Interface()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
errorType = reflect.TypeOf((*error)(nil)).Elem()
|
||||||
|
fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem()
|
||||||
|
)
|
||||||
|
|
||||||
|
// indirectToStringerOrError returns the value, after dereferencing as many times
|
||||||
|
// as necessary to reach the base type (or nil) or an implementation of fmt.Stringer
|
||||||
|
// or error,
|
||||||
|
func indirectToStringerOrError(a interface{}) interface{} {
|
||||||
|
v := reflect.ValueOf(a)
|
||||||
|
for !v.Type().Implements(fmtStringerType) && !v.Type().Implements(errorType) && v.Kind() == reflect.Ptr && !v.IsNil() {
|
||||||
|
v = v.Elem()
|
||||||
|
}
|
||||||
|
return v.Interface()
|
||||||
|
}
|
||||||
|
|
||||||
// stringify converts its arguments to a string and the type of the content.
|
// stringify converts its arguments to a string and the type of the content.
|
||||||
// All pointers are dereferenced, as in the text/template package.
|
// All pointers are dereferenced, as in the text/template package.
|
||||||
func stringify(args ...interface{}) (string, contentType) {
|
func stringify(args ...interface{}) (string, contentType) {
|
||||||
|
|
@ -107,7 +123,7 @@ func stringify(args ...interface{}) (string, contentType) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for i, arg := range args {
|
for i, arg := range args {
|
||||||
args[i] = indirect(arg)
|
args[i] = indirectToStringerOrError(arg)
|
||||||
}
|
}
|
||||||
return fmt.Sprint(args...), contentTypePlain
|
return fmt.Sprint(args...), contentTypePlain
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ package template
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
@ -219,3 +220,42 @@ func TestTypedContent(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test that we print using the String method. Was issue 3073.
|
||||||
|
type stringer struct {
|
||||||
|
v int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *stringer) String() string {
|
||||||
|
return fmt.Sprintf("string=%d", s.v)
|
||||||
|
}
|
||||||
|
|
||||||
|
type errorer struct {
|
||||||
|
v int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *errorer) Error() string {
|
||||||
|
return fmt.Sprintf("error=%d", s.v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStringer(t *testing.T) {
|
||||||
|
s := &stringer{3}
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
tmpl := Must(New("x").Parse("{{.}}"))
|
||||||
|
if err := tmpl.Execute(b, s); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
var expect = "string=3"
|
||||||
|
if b.String() != expect {
|
||||||
|
t.Errorf("expected %q got %q", expect, b.String())
|
||||||
|
}
|
||||||
|
e := &errorer{7}
|
||||||
|
b.Reset()
|
||||||
|
if err := tmpl.Execute(b, e); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
expect = "error=7"
|
||||||
|
if b.String() != expect {
|
||||||
|
t.Errorf("expected %q got %q", expect, b.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -17,11 +17,11 @@ Introduction
|
||||||
This package wraps package text/template so you can share its template API
|
This package wraps package text/template so you can share its template API
|
||||||
to parse and execute HTML templates safely.
|
to parse and execute HTML templates safely.
|
||||||
|
|
||||||
set, err := new(template.Set).Parse(...)
|
tmpl, err := template.New("name").Parse(...)
|
||||||
// Error checking elided
|
// Error checking elided
|
||||||
err = set.Execute(out, "Foo", data)
|
err = tmpl.Execute(out, "Foo", data)
|
||||||
|
|
||||||
If successful, set will now be injection-safe. Otherwise, err is an error
|
If successful, tmpl will now be injection-safe. Otherwise, err is an error
|
||||||
defined in the docs for ErrorCode.
|
defined in the docs for ErrorCode.
|
||||||
|
|
||||||
HTML templates treat data values as plain text which should be encoded so they
|
HTML templates treat data values as plain text which should be encoded so they
|
||||||
|
|
@ -172,18 +172,18 @@ This package assumes that template authors are trusted, that Execute's data
|
||||||
parameter is not, and seeks to preserve the properties below in the face
|
parameter is not, and seeks to preserve the properties below in the face
|
||||||
of untrusted data:
|
of untrusted data:
|
||||||
|
|
||||||
Structure Preservation Property
|
Structure Preservation Property:
|
||||||
"... when a template author writes an HTML tag in a safe templating language,
|
"... when a template author writes an HTML tag in a safe templating language,
|
||||||
the browser will interpret the corresponding portion of the output as a tag
|
the browser will interpret the corresponding portion of the output as a tag
|
||||||
regardless of the values of untrusted data, and similarly for other structures
|
regardless of the values of untrusted data, and similarly for other structures
|
||||||
such as attribute boundaries and JS and CSS string boundaries."
|
such as attribute boundaries and JS and CSS string boundaries."
|
||||||
|
|
||||||
Code Effect Property
|
Code Effect Property:
|
||||||
"... only code specified by the template author should run as a result of
|
"... only code specified by the template author should run as a result of
|
||||||
injecting the template output into a page and all code specified by the
|
injecting the template output into a page and all code specified by the
|
||||||
template author should run as a result of the same."
|
template author should run as a result of the same."
|
||||||
|
|
||||||
Least Surprise Property
|
Least Surprise Property:
|
||||||
"A developer (or code reviewer) familiar with HTML, CSS, and JavaScript, who
|
"A developer (or code reviewer) familiar with HTML, CSS, and JavaScript, who
|
||||||
knows that contextual autoescaping happens should be able to look at a {{.}}
|
knows that contextual autoescaping happens should be able to look at a {{.}}
|
||||||
and correctly infer what sanitization happens."
|
and correctly infer what sanitization happens."
|
||||||
|
|
|
||||||
|
|
@ -46,30 +46,30 @@ func escapeTemplates(tmpl *Template, names ...string) error {
|
||||||
|
|
||||||
// funcMap maps command names to functions that render their inputs safe.
|
// funcMap maps command names to functions that render their inputs safe.
|
||||||
var funcMap = template.FuncMap{
|
var funcMap = template.FuncMap{
|
||||||
"exp_template_html_attrescaper": attrEscaper,
|
"html_template_attrescaper": attrEscaper,
|
||||||
"exp_template_html_commentescaper": commentEscaper,
|
"html_template_commentescaper": commentEscaper,
|
||||||
"exp_template_html_cssescaper": cssEscaper,
|
"html_template_cssescaper": cssEscaper,
|
||||||
"exp_template_html_cssvaluefilter": cssValueFilter,
|
"html_template_cssvaluefilter": cssValueFilter,
|
||||||
"exp_template_html_htmlnamefilter": htmlNameFilter,
|
"html_template_htmlnamefilter": htmlNameFilter,
|
||||||
"exp_template_html_htmlescaper": htmlEscaper,
|
"html_template_htmlescaper": htmlEscaper,
|
||||||
"exp_template_html_jsregexpescaper": jsRegexpEscaper,
|
"html_template_jsregexpescaper": jsRegexpEscaper,
|
||||||
"exp_template_html_jsstrescaper": jsStrEscaper,
|
"html_template_jsstrescaper": jsStrEscaper,
|
||||||
"exp_template_html_jsvalescaper": jsValEscaper,
|
"html_template_jsvalescaper": jsValEscaper,
|
||||||
"exp_template_html_nospaceescaper": htmlNospaceEscaper,
|
"html_template_nospaceescaper": htmlNospaceEscaper,
|
||||||
"exp_template_html_rcdataescaper": rcdataEscaper,
|
"html_template_rcdataescaper": rcdataEscaper,
|
||||||
"exp_template_html_urlescaper": urlEscaper,
|
"html_template_urlescaper": urlEscaper,
|
||||||
"exp_template_html_urlfilter": urlFilter,
|
"html_template_urlfilter": urlFilter,
|
||||||
"exp_template_html_urlnormalizer": urlNormalizer,
|
"html_template_urlnormalizer": urlNormalizer,
|
||||||
}
|
}
|
||||||
|
|
||||||
// equivEscapers matches contextual escapers to equivalent template builtins.
|
// equivEscapers matches contextual escapers to equivalent template builtins.
|
||||||
var equivEscapers = map[string]string{
|
var equivEscapers = map[string]string{
|
||||||
"exp_template_html_attrescaper": "html",
|
"html_template_attrescaper": "html",
|
||||||
"exp_template_html_htmlescaper": "html",
|
"html_template_htmlescaper": "html",
|
||||||
"exp_template_html_nospaceescaper": "html",
|
"html_template_nospaceescaper": "html",
|
||||||
"exp_template_html_rcdataescaper": "html",
|
"html_template_rcdataescaper": "html",
|
||||||
"exp_template_html_urlescaper": "urlquery",
|
"html_template_urlescaper": "urlquery",
|
||||||
"exp_template_html_urlnormalizer": "urlquery",
|
"html_template_urlnormalizer": "urlquery",
|
||||||
}
|
}
|
||||||
|
|
||||||
// escaper collects type inferences about templates and changes needed to make
|
// escaper collects type inferences about templates and changes needed to make
|
||||||
|
|
@ -147,17 +147,17 @@ func (e *escaper) escapeAction(c context, n *parse.ActionNode) context {
|
||||||
case stateURL, stateCSSDqStr, stateCSSSqStr, stateCSSDqURL, stateCSSSqURL, stateCSSURL:
|
case stateURL, stateCSSDqStr, stateCSSSqStr, stateCSSDqURL, stateCSSSqURL, stateCSSURL:
|
||||||
switch c.urlPart {
|
switch c.urlPart {
|
||||||
case urlPartNone:
|
case urlPartNone:
|
||||||
s = append(s, "exp_template_html_urlfilter")
|
s = append(s, "html_template_urlfilter")
|
||||||
fallthrough
|
fallthrough
|
||||||
case urlPartPreQuery:
|
case urlPartPreQuery:
|
||||||
switch c.state {
|
switch c.state {
|
||||||
case stateCSSDqStr, stateCSSSqStr:
|
case stateCSSDqStr, stateCSSSqStr:
|
||||||
s = append(s, "exp_template_html_cssescaper")
|
s = append(s, "html_template_cssescaper")
|
||||||
default:
|
default:
|
||||||
s = append(s, "exp_template_html_urlnormalizer")
|
s = append(s, "html_template_urlnormalizer")
|
||||||
}
|
}
|
||||||
case urlPartQueryOrFrag:
|
case urlPartQueryOrFrag:
|
||||||
s = append(s, "exp_template_html_urlescaper")
|
s = append(s, "html_template_urlescaper")
|
||||||
case urlPartUnknown:
|
case urlPartUnknown:
|
||||||
return context{
|
return context{
|
||||||
state: stateError,
|
state: stateError,
|
||||||
|
|
@ -167,27 +167,27 @@ func (e *escaper) escapeAction(c context, n *parse.ActionNode) context {
|
||||||
panic(c.urlPart.String())
|
panic(c.urlPart.String())
|
||||||
}
|
}
|
||||||
case stateJS:
|
case stateJS:
|
||||||
s = append(s, "exp_template_html_jsvalescaper")
|
s = append(s, "html_template_jsvalescaper")
|
||||||
// A slash after a value starts a div operator.
|
// A slash after a value starts a div operator.
|
||||||
c.jsCtx = jsCtxDivOp
|
c.jsCtx = jsCtxDivOp
|
||||||
case stateJSDqStr, stateJSSqStr:
|
case stateJSDqStr, stateJSSqStr:
|
||||||
s = append(s, "exp_template_html_jsstrescaper")
|
s = append(s, "html_template_jsstrescaper")
|
||||||
case stateJSRegexp:
|
case stateJSRegexp:
|
||||||
s = append(s, "exp_template_html_jsregexpescaper")
|
s = append(s, "html_template_jsregexpescaper")
|
||||||
case stateCSS:
|
case stateCSS:
|
||||||
s = append(s, "exp_template_html_cssvaluefilter")
|
s = append(s, "html_template_cssvaluefilter")
|
||||||
case stateText:
|
case stateText:
|
||||||
s = append(s, "exp_template_html_htmlescaper")
|
s = append(s, "html_template_htmlescaper")
|
||||||
case stateRCDATA:
|
case stateRCDATA:
|
||||||
s = append(s, "exp_template_html_rcdataescaper")
|
s = append(s, "html_template_rcdataescaper")
|
||||||
case stateAttr:
|
case stateAttr:
|
||||||
// Handled below in delim check.
|
// Handled below in delim check.
|
||||||
case stateAttrName, stateTag:
|
case stateAttrName, stateTag:
|
||||||
c.state = stateAttrName
|
c.state = stateAttrName
|
||||||
s = append(s, "exp_template_html_htmlnamefilter")
|
s = append(s, "html_template_htmlnamefilter")
|
||||||
default:
|
default:
|
||||||
if isComment(c.state) {
|
if isComment(c.state) {
|
||||||
s = append(s, "exp_template_html_commentescaper")
|
s = append(s, "html_template_commentescaper")
|
||||||
} else {
|
} else {
|
||||||
panic("unexpected state " + c.state.String())
|
panic("unexpected state " + c.state.String())
|
||||||
}
|
}
|
||||||
|
|
@ -196,9 +196,9 @@ func (e *escaper) escapeAction(c context, n *parse.ActionNode) context {
|
||||||
case delimNone:
|
case delimNone:
|
||||||
// No extra-escaping needed for raw text content.
|
// No extra-escaping needed for raw text content.
|
||||||
case delimSpaceOrTagEnd:
|
case delimSpaceOrTagEnd:
|
||||||
s = append(s, "exp_template_html_nospaceescaper")
|
s = append(s, "html_template_nospaceescaper")
|
||||||
default:
|
default:
|
||||||
s = append(s, "exp_template_html_attrescaper")
|
s = append(s, "html_template_attrescaper")
|
||||||
}
|
}
|
||||||
e.editActionNode(n, s)
|
e.editActionNode(n, s)
|
||||||
return c
|
return c
|
||||||
|
|
@ -260,22 +260,22 @@ func ensurePipelineContains(p *parse.PipeNode, s []string) {
|
||||||
// redundantFuncs[a][b] implies that funcMap[b](funcMap[a](x)) == funcMap[a](x)
|
// redundantFuncs[a][b] implies that funcMap[b](funcMap[a](x)) == funcMap[a](x)
|
||||||
// for all x.
|
// for all x.
|
||||||
var redundantFuncs = map[string]map[string]bool{
|
var redundantFuncs = map[string]map[string]bool{
|
||||||
"exp_template_html_commentescaper": {
|
"html_template_commentescaper": {
|
||||||
"exp_template_html_attrescaper": true,
|
"html_template_attrescaper": true,
|
||||||
"exp_template_html_nospaceescaper": true,
|
"html_template_nospaceescaper": true,
|
||||||
"exp_template_html_htmlescaper": true,
|
"html_template_htmlescaper": true,
|
||||||
},
|
},
|
||||||
"exp_template_html_cssescaper": {
|
"html_template_cssescaper": {
|
||||||
"exp_template_html_attrescaper": true,
|
"html_template_attrescaper": true,
|
||||||
},
|
},
|
||||||
"exp_template_html_jsregexpescaper": {
|
"html_template_jsregexpescaper": {
|
||||||
"exp_template_html_attrescaper": true,
|
"html_template_attrescaper": true,
|
||||||
},
|
},
|
||||||
"exp_template_html_jsstrescaper": {
|
"html_template_jsstrescaper": {
|
||||||
"exp_template_html_attrescaper": true,
|
"html_template_attrescaper": true,
|
||||||
},
|
},
|
||||||
"exp_template_html_urlescaper": {
|
"html_template_urlescaper": {
|
||||||
"exp_template_html_urlnormalizer": true,
|
"html_template_urlnormalizer": true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -505,7 +505,7 @@ func (e *escaper) escapeTree(c context, name string, line int) (context, string)
|
||||||
dt := e.template(dname)
|
dt := e.template(dname)
|
||||||
if dt == nil {
|
if dt == nil {
|
||||||
dt = template.New(dname)
|
dt = template.New(dname)
|
||||||
dt.Tree = &parse.Tree{Name: dname, Root: cloneList(t.Root)}
|
dt.Tree = &parse.Tree{Name: dname, Root: t.Root.CopyList()}
|
||||||
e.derived[dname] = dt
|
e.derived[dname] = dt
|
||||||
}
|
}
|
||||||
t = dt
|
t = dt
|
||||||
|
|
|
||||||
|
|
@ -50,7 +50,7 @@ func (t *Template) Execute(wr io.Writer, data interface{}) (err error) {
|
||||||
// ExecuteTemplate applies the template associated with t that has the given
|
// ExecuteTemplate applies the template associated with t that has the given
|
||||||
// name to the specified data object and writes the output to wr.
|
// name to the specified data object and writes the output to wr.
|
||||||
func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error {
|
func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error {
|
||||||
tmpl, err := t.lookupAndEscapeTemplate(wr, name)
|
tmpl, err := t.lookupAndEscapeTemplate(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -60,7 +60,7 @@ func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{})
|
||||||
// lookupAndEscapeTemplate guarantees that the template with the given name
|
// lookupAndEscapeTemplate guarantees that the template with the given name
|
||||||
// is escaped, or returns an error if it cannot be. It returns the named
|
// is escaped, or returns an error if it cannot be. It returns the named
|
||||||
// template.
|
// template.
|
||||||
func (t *Template) lookupAndEscapeTemplate(wr io.Writer, name string) (tmpl *Template, err error) {
|
func (t *Template) lookupAndEscapeTemplate(name string) (tmpl *Template, err error) {
|
||||||
t.nameSpace.mu.Lock()
|
t.nameSpace.mu.Lock()
|
||||||
defer t.nameSpace.mu.Unlock()
|
defer t.nameSpace.mu.Unlock()
|
||||||
tmpl = t.set[name]
|
tmpl = t.set[name]
|
||||||
|
|
@ -106,14 +106,71 @@ func (t *Template) Parse(src string) (*Template, error) {
|
||||||
return t, nil
|
return t, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddParseTree is unimplemented.
|
// AddParseTree creates a new template with the name and parse tree
|
||||||
func (t *Template) AddParseTree(name string, tree *parse.Tree) error {
|
// and associates it with t.
|
||||||
return fmt.Errorf("html/template: AddParseTree unimplemented")
|
//
|
||||||
|
// It returns an error if t has already been executed.
|
||||||
|
func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) {
|
||||||
|
t.nameSpace.mu.Lock()
|
||||||
|
defer t.nameSpace.mu.Unlock()
|
||||||
|
if t.escaped {
|
||||||
|
return nil, fmt.Errorf("html/template: cannot AddParseTree to %q after it has executed", t.Name())
|
||||||
|
}
|
||||||
|
text, err := t.text.AddParseTree(name, tree)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ret := &Template{
|
||||||
|
false,
|
||||||
|
text,
|
||||||
|
t.nameSpace,
|
||||||
|
}
|
||||||
|
t.set[name] = ret
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clone is unimplemented.
|
// Clone returns a duplicate of the template, including all associated
|
||||||
func (t *Template) Clone(name string) error {
|
// templates. The actual representation is not copied, but the name space of
|
||||||
return fmt.Errorf("html/template: Clone unimplemented")
|
// associated templates is, so further calls to Parse in the copy will add
|
||||||
|
// templates to the copy but not to the original. Clone can be used to prepare
|
||||||
|
// common templates and use them with variant definitions for other templates
|
||||||
|
// by adding the variants after the clone is made.
|
||||||
|
//
|
||||||
|
// It returns an error if t has already been executed.
|
||||||
|
func (t *Template) Clone() (*Template, error) {
|
||||||
|
t.nameSpace.mu.Lock()
|
||||||
|
defer t.nameSpace.mu.Unlock()
|
||||||
|
if t.escaped {
|
||||||
|
return nil, fmt.Errorf("html/template: cannot Clone %q after it has executed", t.Name())
|
||||||
|
}
|
||||||
|
textClone, err := t.text.Clone()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ret := &Template{
|
||||||
|
false,
|
||||||
|
textClone,
|
||||||
|
&nameSpace{
|
||||||
|
set: make(map[string]*Template),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, x := range textClone.Templates() {
|
||||||
|
name := x.Name()
|
||||||
|
src := t.set[name]
|
||||||
|
if src == nil || src.escaped {
|
||||||
|
return nil, fmt.Errorf("html/template: cannot Clone %q after it has executed", t.Name())
|
||||||
|
}
|
||||||
|
x.Tree = &parse.Tree{
|
||||||
|
Name: x.Tree.Name,
|
||||||
|
Root: x.Tree.Root.CopyList(),
|
||||||
|
}
|
||||||
|
ret.set[name] = &Template{
|
||||||
|
false,
|
||||||
|
x,
|
||||||
|
ret.nameSpace,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// New allocates a new HTML template with the given name.
|
// New allocates a new HTML template with the given name.
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,79 @@
|
||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// This example demonstrates decoding a JPEG image and examining its pixels.
|
||||||
|
package image_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"image"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
// Package image/jpeg is not used explicitly in the code below,
|
||||||
|
// but is imported for its initialization side-effect, which allows
|
||||||
|
// image.Decode to understand JPEG formatted images. Uncomment these
|
||||||
|
// two lines to also understand GIF and PNG images:
|
||||||
|
// _ "image/gif"
|
||||||
|
// _ "image/png"
|
||||||
|
_ "image/jpeg"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Example() {
|
||||||
|
// Open the file.
|
||||||
|
file, err := os.Open("testdata/video-001.jpeg")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
// Decode the image.
|
||||||
|
m, _, err := image.Decode(file)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
bounds := m.Bounds()
|
||||||
|
|
||||||
|
// Calculate a 16-bin histogram for m's red, green, blue and alpha components.
|
||||||
|
//
|
||||||
|
// An image's bounds do not necessarily start at (0, 0), so the two loops start
|
||||||
|
// at bounds.Min.Y and bounds.Min.X. Looping over Y first and X second is more
|
||||||
|
// likely to result in better memory access patterns than X first and Y second.
|
||||||
|
var histogram [16][4]int
|
||||||
|
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
|
||||||
|
for x := bounds.Min.X; x < bounds.Max.X; x++ {
|
||||||
|
r, g, b, a := m.At(x, y).RGBA()
|
||||||
|
// A color's RGBA method returns values in the range [0, 65535].
|
||||||
|
// Shifting by 12 reduces this to the range [0, 15].
|
||||||
|
histogram[r>>12][0]++
|
||||||
|
histogram[g>>12][1]++
|
||||||
|
histogram[b>>12][2]++
|
||||||
|
histogram[a>>12][3]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print the results.
|
||||||
|
fmt.Printf("%-14s %6s %6s %6s %6s\n", "bin", "red", "green", "blue", "alpha")
|
||||||
|
for i, x := range histogram {
|
||||||
|
fmt.Printf("0x%04x-0x%04x: %6d %6d %6d %6d\n", i<<12, (i+1)<<12-1, x[0], x[1], x[2], x[3])
|
||||||
|
}
|
||||||
|
// Output:
|
||||||
|
// bin red green blue alpha
|
||||||
|
// 0x0000-0x0fff: 471 819 7596 0
|
||||||
|
// 0x1000-0x1fff: 576 2892 726 0
|
||||||
|
// 0x2000-0x2fff: 1038 2330 943 0
|
||||||
|
// 0x3000-0x3fff: 883 2321 1014 0
|
||||||
|
// 0x4000-0x4fff: 501 1295 525 0
|
||||||
|
// 0x5000-0x5fff: 302 962 242 0
|
||||||
|
// 0x6000-0x6fff: 219 358 150 0
|
||||||
|
// 0x7000-0x7fff: 352 281 192 0
|
||||||
|
// 0x8000-0x8fff: 3688 216 246 0
|
||||||
|
// 0x9000-0x9fff: 2277 237 283 0
|
||||||
|
// 0xa000-0xafff: 971 254 357 0
|
||||||
|
// 0xb000-0xbfff: 317 306 429 0
|
||||||
|
// 0xc000-0xcfff: 203 402 401 0
|
||||||
|
// 0xd000-0xdfff: 256 394 241 0
|
||||||
|
// 0xe000-0xefff: 378 343 173 0
|
||||||
|
// 0xf000-0xffff: 3018 2040 1932 15450
|
||||||
|
}
|
||||||
|
|
@ -50,6 +50,9 @@ func TestYCbCr(t *testing.T) {
|
||||||
testYCbCr(t, r, subsampleRatio, delta)
|
testYCbCr(t, r, subsampleRatio, delta)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if testing.Short() {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,7 @@ func TempFile(dir, prefix string) (f *os.File, err error) {
|
||||||
for i := 0; i < 10000; i++ {
|
for i := 0; i < 10000; i++ {
|
||||||
name := filepath.Join(dir, prefix+nextSuffix())
|
name := filepath.Join(dir, prefix+nextSuffix())
|
||||||
f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600)
|
f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600)
|
||||||
if pe, ok := err.(*os.PathError); ok && pe.Err == os.EEXIST {
|
if os.IsExist(err) {
|
||||||
if nconflict++; nconflict > 10 {
|
if nconflict++; nconflict > 10 {
|
||||||
rand = reseed()
|
rand = reseed()
|
||||||
}
|
}
|
||||||
|
|
@ -76,7 +76,7 @@ func TempDir(dir, prefix string) (name string, err error) {
|
||||||
for i := 0; i < 10000; i++ {
|
for i := 0; i < 10000; i++ {
|
||||||
try := filepath.Join(dir, prefix+nextSuffix())
|
try := filepath.Join(dir, prefix+nextSuffix())
|
||||||
err = os.Mkdir(try, 0700)
|
err = os.Mkdir(try, 0700)
|
||||||
if pe, ok := err.(*os.PathError); ok && pe.Err == os.EEXIST {
|
if os.IsExist(err) {
|
||||||
if nconflict++; nconflict > 10 {
|
if nconflict++; nconflict > 10 {
|
||||||
rand = reseed()
|
rand = reseed()
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,8 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !windows,!plan9
|
||||||
|
|
||||||
// Package syslog provides a simple interface to the system log service. It
|
// Package syslog provides a simple interface to the system log service. It
|
||||||
// can send messages to the syslog daemon using UNIX domain sockets, UDP, or
|
// can send messages to the syslog daemon using UNIX domain sockets, UDP, or
|
||||||
// TCP connections.
|
// TCP connections.
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
// Copyright 2009 The Go Authors. All rights reserved.
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !windows,!plan9
|
||||||
|
|
||||||
package syslog
|
package syslog
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,8 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !windows,!plan9
|
||||||
|
|
||||||
package syslog
|
package syslog
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
|
||||||
|
|
@ -512,6 +512,9 @@ func TestStringPowers(t *testing.T) {
|
||||||
t.Errorf("failed at %d ** %d in base %d: %s != %s", b, p, b, xs, xs2)
|
t.Errorf("failed at %d ** %d in base %d: %s != %s", b, p, b, xs, xs2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if b >= 3 && testing.Short() {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -49,9 +49,10 @@ func (r *Rand) Int() int {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Int63n returns, as an int64, a non-negative pseudo-random number in [0,n).
|
// Int63n returns, as an int64, a non-negative pseudo-random number in [0,n).
|
||||||
|
// It panics if n <= 0.
|
||||||
func (r *Rand) Int63n(n int64) int64 {
|
func (r *Rand) Int63n(n int64) int64 {
|
||||||
if n <= 0 {
|
if n <= 0 {
|
||||||
return 0
|
panic("invalid argument to Int63n")
|
||||||
}
|
}
|
||||||
max := int64((1 << 63) - 1 - (1<<63)%uint64(n))
|
max := int64((1 << 63) - 1 - (1<<63)%uint64(n))
|
||||||
v := r.Int63()
|
v := r.Int63()
|
||||||
|
|
@ -62,9 +63,10 @@ func (r *Rand) Int63n(n int64) int64 {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Int31n returns, as an int32, a non-negative pseudo-random number in [0,n).
|
// Int31n returns, as an int32, a non-negative pseudo-random number in [0,n).
|
||||||
|
// It panics if n <= 0.
|
||||||
func (r *Rand) Int31n(n int32) int32 {
|
func (r *Rand) Int31n(n int32) int32 {
|
||||||
if n <= 0 {
|
if n <= 0 {
|
||||||
return 0
|
panic("invalid argument to Int31n")
|
||||||
}
|
}
|
||||||
max := int32((1 << 31) - 1 - (1<<31)%uint32(n))
|
max := int32((1 << 31) - 1 - (1<<31)%uint32(n))
|
||||||
v := r.Int31()
|
v := r.Int31()
|
||||||
|
|
@ -75,7 +77,11 @@ func (r *Rand) Int31n(n int32) int32 {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Intn returns, as an int, a non-negative pseudo-random number in [0,n).
|
// Intn returns, as an int, a non-negative pseudo-random number in [0,n).
|
||||||
|
// It panics if n <= 0.
|
||||||
func (r *Rand) Intn(n int) int {
|
func (r *Rand) Intn(n int) int {
|
||||||
|
if n <= 0 {
|
||||||
|
panic("invalid argument to Intn")
|
||||||
|
}
|
||||||
if n <= 1<<31-1 {
|
if n <= 1<<31-1 {
|
||||||
return int(r.Int31n(int32(n)))
|
return int(r.Int31n(int32(n)))
|
||||||
}
|
}
|
||||||
|
|
@ -125,12 +131,15 @@ func Int31() int32 { return globalRand.Int31() }
|
||||||
func Int() int { return globalRand.Int() }
|
func Int() int { return globalRand.Int() }
|
||||||
|
|
||||||
// Int63n returns, as an int64, a non-negative pseudo-random number in [0,n).
|
// Int63n returns, as an int64, a non-negative pseudo-random number in [0,n).
|
||||||
|
// It panics if n <= 0.
|
||||||
func Int63n(n int64) int64 { return globalRand.Int63n(n) }
|
func Int63n(n int64) int64 { return globalRand.Int63n(n) }
|
||||||
|
|
||||||
// Int31n returns, as an int32, a non-negative pseudo-random number in [0,n).
|
// Int31n returns, as an int32, a non-negative pseudo-random number in [0,n).
|
||||||
|
// It panics if n <= 0.
|
||||||
func Int31n(n int32) int32 { return globalRand.Int31n(n) }
|
func Int31n(n int32) int32 { return globalRand.Int31n(n) }
|
||||||
|
|
||||||
// Intn returns, as an int, a non-negative pseudo-random number in [0,n).
|
// Intn returns, as an int, a non-negative pseudo-random number in [0,n).
|
||||||
|
// It panics if n <= 0.
|
||||||
func Intn(n int) int { return globalRand.Intn(n) }
|
func Intn(n int) int { return globalRand.Intn(n) }
|
||||||
|
|
||||||
// Float64 returns, as a float64, a pseudo-random number in [0.0,1.0).
|
// Float64 returns, as a float64, a pseudo-random number in [0.0,1.0).
|
||||||
|
|
|
||||||
|
|
@ -141,6 +141,9 @@ func TestNonStandardNormalValues(t *testing.T) {
|
||||||
for m := 0.5; m < mmax; m *= 2 {
|
for m := 0.5; m < mmax; m *= 2 {
|
||||||
for _, seed := range testSeeds {
|
for _, seed := range testSeeds {
|
||||||
testNormalDistribution(t, numTestSamples, m, sd, seed)
|
testNormalDistribution(t, numTestSamples, m, sd, seed)
|
||||||
|
if testing.Short() {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -191,6 +194,9 @@ func TestNonStandardExponentialValues(t *testing.T) {
|
||||||
for rate := 0.05; rate < 10; rate *= 2 {
|
for rate := 0.05; rate < 10; rate *= 2 {
|
||||||
for _, seed := range testSeeds {
|
for _, seed := range testSeeds {
|
||||||
testExponentialDistribution(t, numTestSamples, rate, seed)
|
testExponentialDistribution(t, numTestSamples, rate, seed)
|
||||||
|
if testing.Short() {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// If an IPv6 tunnel is running, we can try dialing a real IPv6 address.
|
// If an IPv6 tunnel is running, we can try dialing a real IPv6 address.
|
||||||
var ipv6 = flag.Bool("ipv6", false, "assume ipv6 tunnel is present")
|
var testIPv6 = flag.Bool("ipv6", false, "assume ipv6 tunnel is present")
|
||||||
|
|
||||||
// fd is already connected to the destination, port 80.
|
// fd is already connected to the destination, port 80.
|
||||||
// Run an HTTP request to fetch the appropriate page.
|
// Run an HTTP request to fetch the appropriate page.
|
||||||
|
|
@ -130,7 +130,7 @@ func TestDialGoogleIPv6(t *testing.T) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Only run tcp6 if the kernel will take it.
|
// Only run tcp6 if the kernel will take it.
|
||||||
if !*ipv6 || !supportsIPv6 {
|
if !*testIPv6 || !supportsIPv6 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -252,7 +252,9 @@ func (s *pollServer) Run() {
|
||||||
} else {
|
} else {
|
||||||
netfd := s.LookupFD(fd, mode)
|
netfd := s.LookupFD(fd, mode)
|
||||||
if netfd == nil {
|
if netfd == nil {
|
||||||
print("pollServer: unexpected wakeup for fd=", fd, " mode=", string(mode), "\n")
|
// This can happen because the WaitFD runs without
|
||||||
|
// holding s's lock, so there might be a pending wakeup
|
||||||
|
// for an fd that has been evicted. No harm done.
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
s.WakeFD(netfd, mode, nil)
|
s.WakeFD(netfd, mode, nil)
|
||||||
|
|
@ -506,7 +508,7 @@ func (fd *netFD) Write(p []byte) (int, error) {
|
||||||
}
|
}
|
||||||
defer fd.decref()
|
defer fd.decref()
|
||||||
if fd.sysfile == nil {
|
if fd.sysfile == nil {
|
||||||
return 0, os.EINVAL
|
return 0, syscall.EINVAL
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
|
|
||||||
|
|
@ -335,7 +335,7 @@ func (fd *netFD) Close() error {
|
||||||
|
|
||||||
func (fd *netFD) shutdown(how int) error {
|
func (fd *netFD) shutdown(how int) error {
|
||||||
if fd == nil || fd.sysfd == syscall.InvalidHandle {
|
if fd == nil || fd.sysfd == syscall.InvalidHandle {
|
||||||
return os.EINVAL
|
return syscall.EINVAL
|
||||||
}
|
}
|
||||||
err := syscall.Shutdown(fd.sysfd, how)
|
err := syscall.Shutdown(fd.sysfd, how)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -369,7 +369,7 @@ func (o *readOp) Name() string {
|
||||||
|
|
||||||
func (fd *netFD) Read(buf []byte) (int, error) {
|
func (fd *netFD) Read(buf []byte) (int, error) {
|
||||||
if fd == nil {
|
if fd == nil {
|
||||||
return 0, os.EINVAL
|
return 0, syscall.EINVAL
|
||||||
}
|
}
|
||||||
fd.rio.Lock()
|
fd.rio.Lock()
|
||||||
defer fd.rio.Unlock()
|
defer fd.rio.Unlock()
|
||||||
|
|
@ -378,7 +378,7 @@ func (fd *netFD) Read(buf []byte) (int, error) {
|
||||||
}
|
}
|
||||||
defer fd.decref()
|
defer fd.decref()
|
||||||
if fd.sysfd == syscall.InvalidHandle {
|
if fd.sysfd == syscall.InvalidHandle {
|
||||||
return 0, os.EINVAL
|
return 0, syscall.EINVAL
|
||||||
}
|
}
|
||||||
var o readOp
|
var o readOp
|
||||||
o.Init(fd, buf, 'r')
|
o.Init(fd, buf, 'r')
|
||||||
|
|
@ -408,7 +408,7 @@ func (o *readFromOp) Name() string {
|
||||||
|
|
||||||
func (fd *netFD) ReadFrom(buf []byte) (n int, sa syscall.Sockaddr, err error) {
|
func (fd *netFD) ReadFrom(buf []byte) (n int, sa syscall.Sockaddr, err error) {
|
||||||
if fd == nil {
|
if fd == nil {
|
||||||
return 0, nil, os.EINVAL
|
return 0, nil, syscall.EINVAL
|
||||||
}
|
}
|
||||||
if len(buf) == 0 {
|
if len(buf) == 0 {
|
||||||
return 0, nil, nil
|
return 0, nil, nil
|
||||||
|
|
@ -447,7 +447,7 @@ func (o *writeOp) Name() string {
|
||||||
|
|
||||||
func (fd *netFD) Write(buf []byte) (int, error) {
|
func (fd *netFD) Write(buf []byte) (int, error) {
|
||||||
if fd == nil {
|
if fd == nil {
|
||||||
return 0, os.EINVAL
|
return 0, syscall.EINVAL
|
||||||
}
|
}
|
||||||
fd.wio.Lock()
|
fd.wio.Lock()
|
||||||
defer fd.wio.Unlock()
|
defer fd.wio.Unlock()
|
||||||
|
|
@ -478,7 +478,7 @@ func (o *writeToOp) Name() string {
|
||||||
|
|
||||||
func (fd *netFD) WriteTo(buf []byte, sa syscall.Sockaddr) (int, error) {
|
func (fd *netFD) WriteTo(buf []byte, sa syscall.Sockaddr) (int, error) {
|
||||||
if fd == nil {
|
if fd == nil {
|
||||||
return 0, os.EINVAL
|
return 0, syscall.EINVAL
|
||||||
}
|
}
|
||||||
if len(buf) == 0 {
|
if len(buf) == 0 {
|
||||||
return 0, nil
|
return 0, nil
|
||||||
|
|
@ -490,7 +490,7 @@ func (fd *netFD) WriteTo(buf []byte, sa syscall.Sockaddr) (int, error) {
|
||||||
}
|
}
|
||||||
defer fd.decref()
|
defer fd.decref()
|
||||||
if fd.sysfd == syscall.InvalidHandle {
|
if fd.sysfd == syscall.InvalidHandle {
|
||||||
return 0, os.EINVAL
|
return 0, syscall.EINVAL
|
||||||
}
|
}
|
||||||
var o writeToOp
|
var o writeToOp
|
||||||
o.Init(fd, buf, 'w')
|
o.Init(fd, buf, 'w')
|
||||||
|
|
@ -578,10 +578,12 @@ func (fd *netFD) dup() (*os.File, error) {
|
||||||
return nil, os.NewSyscallError("dup", syscall.EWINDOWS)
|
return nil, os.NewSyscallError("dup", syscall.EWINDOWS)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var errNoSupport = errors.New("address family not supported")
|
||||||
|
|
||||||
func (fd *netFD) ReadMsg(p []byte, oob []byte) (n, oobn, flags int, sa syscall.Sockaddr, err error) {
|
func (fd *netFD) ReadMsg(p []byte, oob []byte) (n, oobn, flags int, sa syscall.Sockaddr, err error) {
|
||||||
return 0, 0, 0, nil, os.EAFNOSUPPORT
|
return 0, 0, 0, nil, errNoSupport
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fd *netFD) WriteMsg(p []byte, oob []byte, sa syscall.Sockaddr) (n int, oobn int, err error) {
|
func (fd *netFD) WriteMsg(p []byte, oob []byte, sa syscall.Sockaddr) (n int, oobn int, err error) {
|
||||||
return 0, 0, os.EAFNOSUPPORT
|
return 0, 0, errNoSupport
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ func newFileFD(f *os.File) (*netFD, error) {
|
||||||
switch sa.(type) {
|
switch sa.(type) {
|
||||||
default:
|
default:
|
||||||
closesocket(fd)
|
closesocket(fd)
|
||||||
return nil, os.EINVAL
|
return nil, syscall.EINVAL
|
||||||
case *syscall.SockaddrInet4:
|
case *syscall.SockaddrInet4:
|
||||||
family = syscall.AF_INET
|
family = syscall.AF_INET
|
||||||
if proto == syscall.SOCK_DGRAM {
|
if proto == syscall.SOCK_DGRAM {
|
||||||
|
|
@ -84,7 +84,7 @@ func FileConn(f *os.File) (c Conn, err error) {
|
||||||
return newIPConn(fd), nil
|
return newIPConn(fd), nil
|
||||||
}
|
}
|
||||||
fd.Close()
|
fd.Close()
|
||||||
return nil, os.EINVAL
|
return nil, syscall.EINVAL
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileListener returns a copy of the network listener corresponding
|
// FileListener returns a copy of the network listener corresponding
|
||||||
|
|
@ -103,7 +103,7 @@ func FileListener(f *os.File) (l Listener, err error) {
|
||||||
return &UnixListener{fd, laddr.Name}, nil
|
return &UnixListener{fd, laddr.Name}, nil
|
||||||
}
|
}
|
||||||
fd.Close()
|
fd.Close()
|
||||||
return nil, os.EINVAL
|
return nil, syscall.EINVAL
|
||||||
}
|
}
|
||||||
|
|
||||||
// FilePacketConn returns a copy of the packet network connection
|
// FilePacketConn returns a copy of the packet network connection
|
||||||
|
|
@ -122,5 +122,5 @@ func FilePacketConn(f *os.File) (c PacketConn, err error) {
|
||||||
return newUnixConn(fd), nil
|
return newUnixConn(fd), nil
|
||||||
}
|
}
|
||||||
fd.Close()
|
fd.Close()
|
||||||
return nil, os.EINVAL
|
return nil, syscall.EINVAL
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ package net
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
|
"syscall"
|
||||||
)
|
)
|
||||||
|
|
||||||
// FileConn returns a copy of the network connection corresponding to
|
// FileConn returns a copy of the network connection corresponding to
|
||||||
|
|
@ -13,7 +14,7 @@ import (
|
||||||
// finished. Closing c does not affect f, and closing f does not
|
// finished. Closing c does not affect f, and closing f does not
|
||||||
// affect c.
|
// affect c.
|
||||||
func FileConn(f *os.File) (c Conn, err error) {
|
func FileConn(f *os.File) (c Conn, err error) {
|
||||||
return nil, os.EPLAN9
|
return nil, syscall.EPLAN9
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileListener returns a copy of the network listener corresponding
|
// FileListener returns a copy of the network listener corresponding
|
||||||
|
|
@ -21,7 +22,7 @@ func FileConn(f *os.File) (c Conn, err error) {
|
||||||
// when finished. Closing c does not affect l, and closing l does not
|
// when finished. Closing c does not affect l, and closing l does not
|
||||||
// affect c.
|
// affect c.
|
||||||
func FileListener(f *os.File) (l Listener, err error) {
|
func FileListener(f *os.File) (l Listener, err error) {
|
||||||
return nil, os.EPLAN9
|
return nil, syscall.EPLAN9
|
||||||
}
|
}
|
||||||
|
|
||||||
// FilePacketConn returns a copy of the packet network connection
|
// FilePacketConn returns a copy of the packet network connection
|
||||||
|
|
@ -29,5 +30,5 @@ func FileListener(f *os.File) (l Listener, err error) {
|
||||||
// responsibility to close f when finished. Closing c does not affect
|
// responsibility to close f when finished. Closing c does not affect
|
||||||
// f, and closing f does not affect c.
|
// f, and closing f does not affect c.
|
||||||
func FilePacketConn(f *os.File) (c PacketConn, err error) {
|
func FilePacketConn(f *os.File) (c PacketConn, err error) {
|
||||||
return nil, os.EPLAN9
|
return nil, syscall.EPLAN9
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,7 @@ var hosttests = []hostTest{
|
||||||
|
|
||||||
func TestLookupStaticHost(t *testing.T) {
|
func TestLookupStaticHost(t *testing.T) {
|
||||||
p := hostsPath
|
p := hostsPath
|
||||||
hostsPath = "hosts_testdata"
|
hostsPath = "testdata/hosts"
|
||||||
for i := 0; i < len(hosttests); i++ {
|
for i := 0; i < len(hosttests); i++ {
|
||||||
tt := hosttests[i]
|
tt := hosttests[i]
|
||||||
ips := lookupStaticHost(tt.host)
|
ips := lookupStaticHost(tt.host)
|
||||||
|
|
|
||||||
|
|
@ -128,6 +128,34 @@ var readSetCookiesTests = []struct {
|
||||||
Raw: "NID=99=YsDT5i3E-CXax-; expires=Wed, 23-Nov-2011 01:05:03 GMT; path=/; domain=.google.ch; HttpOnly",
|
Raw: "NID=99=YsDT5i3E-CXax-; expires=Wed, 23-Nov-2011 01:05:03 GMT; path=/; domain=.google.ch; HttpOnly",
|
||||||
}},
|
}},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Header{"Set-Cookie": {".ASPXAUTH=7E3AA; expires=Wed, 07-Mar-2012 14:25:06 GMT; path=/; HttpOnly"}},
|
||||||
|
[]*Cookie{{
|
||||||
|
Name: ".ASPXAUTH",
|
||||||
|
Value: "7E3AA",
|
||||||
|
Path: "/",
|
||||||
|
Expires: time.Date(2012, 3, 7, 14, 25, 6, 0, time.UTC),
|
||||||
|
RawExpires: "Wed, 07-Mar-2012 14:25:06 GMT",
|
||||||
|
HttpOnly: true,
|
||||||
|
Raw: ".ASPXAUTH=7E3AA; expires=Wed, 07-Mar-2012 14:25:06 GMT; path=/; HttpOnly",
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Header{"Set-Cookie": {"ASP.NET_SessionId=foo; path=/; HttpOnly"}},
|
||||||
|
[]*Cookie{{
|
||||||
|
Name: "ASP.NET_SessionId",
|
||||||
|
Value: "foo",
|
||||||
|
Path: "/",
|
||||||
|
HttpOnly: true,
|
||||||
|
Raw: "ASP.NET_SessionId=foo; path=/; HttpOnly",
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
|
||||||
|
// TODO(bradfitz): users have reported seeing this in the
|
||||||
|
// wild, but do browsers handle it? RFC 6265 just says "don't
|
||||||
|
// do that" (section 3) and then never mentions header folding
|
||||||
|
// again.
|
||||||
|
// Header{"Set-Cookie": {"ASP.NET_SessionId=foo; path=/; HttpOnly, .ASPXAUTH=7E3AA; expires=Wed, 07-Mar-2012 14:25:06 GMT; path=/; HttpOnly"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
func toJSON(v interface{}) string {
|
func toJSON(v interface{}) string {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,51 @@
|
||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package http_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ExampleHijacker() {
|
||||||
|
http.HandleFunc("/hijack", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
hj, ok := w.(http.Hijacker)
|
||||||
|
if !ok {
|
||||||
|
http.Error(w, "webserver doesn't support hijacking", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
conn, bufrw, err := hj.Hijack()
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Don't forget to close the connection:
|
||||||
|
defer conn.Close()
|
||||||
|
bufrw.WriteString("Now we're speaking raw TCP. Say hi: ")
|
||||||
|
bufrw.Flush()
|
||||||
|
s, err := bufrw.ReadString('\n')
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("error reading string: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fmt.Fprintf(bufrw, "You said: %q\nBye.\n", s)
|
||||||
|
bufrw.Flush()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleGet() {
|
||||||
|
res, err := http.Get("http://www.google.com/robots.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
robots, err := ioutil.ReadAll(res.Body)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
res.Body.Close()
|
||||||
|
fmt.Printf("%s", robots)
|
||||||
|
}
|
||||||
|
|
@ -6,6 +6,7 @@ package http_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
|
@ -131,7 +132,7 @@ func TestFileServerCleans(t *testing.T) {
|
||||||
ch := make(chan string, 1)
|
ch := make(chan string, 1)
|
||||||
fs := FileServer(&testFileSystem{func(name string) (File, error) {
|
fs := FileServer(&testFileSystem{func(name string) (File, error) {
|
||||||
ch <- name
|
ch <- name
|
||||||
return nil, os.ENOENT
|
return nil, errors.New("file does not exist")
|
||||||
}})
|
}})
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
reqPath, openArg string
|
reqPath, openArg string
|
||||||
|
|
@ -398,11 +399,15 @@ func TestLinuxSendfile(t *testing.T) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = Get(fmt.Sprintf("http://%s/", ln.Addr()))
|
res, err := Get(fmt.Sprintf("http://%s/", ln.Addr()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("http client error: %v", err)
|
t.Fatalf("http client error: %v", err)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
_, err = io.Copy(ioutil.Discard, res.Body)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("client body read error: %v", err)
|
||||||
|
}
|
||||||
|
res.Body.Close()
|
||||||
|
|
||||||
// Force child to exit cleanly.
|
// Force child to exit cleanly.
|
||||||
Get(fmt.Sprintf("http://%s/quit", ln.Addr()))
|
Get(fmt.Sprintf("http://%s/quit", ln.Addr()))
|
||||||
|
|
|
||||||
|
|
@ -13,12 +13,12 @@ import (
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/textproto"
|
"net/textproto"
|
||||||
"os"
|
|
||||||
"sync"
|
"sync"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ErrPersistEOF = &http.ProtocolError{ErrorString: "persistent connection closed"}
|
ErrPersistEOF = &http.ProtocolError{ErrorString: "persistent connection closed"}
|
||||||
|
ErrClosed = &http.ProtocolError{ErrorString: "connection closed by user"}
|
||||||
ErrPipeline = &http.ProtocolError{ErrorString: "pipeline error"}
|
ErrPipeline = &http.ProtocolError{ErrorString: "pipeline error"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -191,7 +191,7 @@ func (sc *ServerConn) Write(req *http.Request, resp *http.Response) error {
|
||||||
}
|
}
|
||||||
if sc.c == nil { // connection closed by user in the meantime
|
if sc.c == nil { // connection closed by user in the meantime
|
||||||
defer sc.lk.Unlock()
|
defer sc.lk.Unlock()
|
||||||
return os.EBADF
|
return ErrClosed
|
||||||
}
|
}
|
||||||
c := sc.c
|
c := sc.c
|
||||||
if sc.nread <= sc.nwritten {
|
if sc.nread <= sc.nwritten {
|
||||||
|
|
|
||||||
|
|
@ -22,9 +22,9 @@
|
||||||
//
|
//
|
||||||
// go tool pprof http://localhost:6060/debug/pprof/profile
|
// go tool pprof http://localhost:6060/debug/pprof/profile
|
||||||
//
|
//
|
||||||
// Or to look at the thread creation profile:
|
// Or to view all available profiles:
|
||||||
//
|
//
|
||||||
// go tool pprof http://localhost:6060/debug/pprof/thread
|
// go tool pprof http://localhost:6060/debug/pprof/
|
||||||
//
|
//
|
||||||
// For a study of the facility in action, visit
|
// For a study of the facility in action, visit
|
||||||
//
|
//
|
||||||
|
|
@ -36,7 +36,9 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"html/template"
|
||||||
"io"
|
"io"
|
||||||
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
@ -47,11 +49,10 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
http.Handle("/debug/pprof/", http.HandlerFunc(Index))
|
||||||
http.Handle("/debug/pprof/cmdline", http.HandlerFunc(Cmdline))
|
http.Handle("/debug/pprof/cmdline", http.HandlerFunc(Cmdline))
|
||||||
http.Handle("/debug/pprof/profile", http.HandlerFunc(Profile))
|
http.Handle("/debug/pprof/profile", http.HandlerFunc(Profile))
|
||||||
http.Handle("/debug/pprof/heap", http.HandlerFunc(Heap))
|
|
||||||
http.Handle("/debug/pprof/symbol", http.HandlerFunc(Symbol))
|
http.Handle("/debug/pprof/symbol", http.HandlerFunc(Symbol))
|
||||||
http.Handle("/debug/pprof/thread", http.HandlerFunc(Thread))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cmdline responds with the running program's
|
// Cmdline responds with the running program's
|
||||||
|
|
@ -62,20 +63,6 @@ func Cmdline(w http.ResponseWriter, r *http.Request) {
|
||||||
fmt.Fprintf(w, strings.Join(os.Args, "\x00"))
|
fmt.Fprintf(w, strings.Join(os.Args, "\x00"))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Heap responds with the pprof-formatted heap profile.
|
|
||||||
// The package initialization registers it as /debug/pprof/heap.
|
|
||||||
func Heap(w http.ResponseWriter, r *http.Request) {
|
|
||||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
|
||||||
pprof.WriteHeapProfile(w)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Thread responds with the pprof-formatted thread creation profile.
|
|
||||||
// The package initialization registers it as /debug/pprof/thread.
|
|
||||||
func Thread(w http.ResponseWriter, r *http.Request) {
|
|
||||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
|
||||||
pprof.WriteThreadProfile(w)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Profile responds with the pprof-formatted cpu profile.
|
// Profile responds with the pprof-formatted cpu profile.
|
||||||
// The package initialization registers it as /debug/pprof/profile.
|
// The package initialization registers it as /debug/pprof/profile.
|
||||||
func Profile(w http.ResponseWriter, r *http.Request) {
|
func Profile(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
@ -147,3 +134,61 @@ func Symbol(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
w.Write(buf.Bytes())
|
w.Write(buf.Bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handler returns an HTTP handler that serves the named profile.
|
||||||
|
func Handler(name string) http.Handler {
|
||||||
|
return handler(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
type handler string
|
||||||
|
|
||||||
|
func (name handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||||
|
debug, _ := strconv.Atoi(r.FormValue("debug"))
|
||||||
|
p := pprof.Lookup(string(name))
|
||||||
|
if p == nil {
|
||||||
|
w.WriteHeader(404)
|
||||||
|
fmt.Fprintf(w, "Unknown profile: %s\n", name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.WriteTo(w, debug)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Index responds with the pprof-formatted profile named by the request.
|
||||||
|
// For example, "/debug/pprof/heap" serves the "heap" profile.
|
||||||
|
// Index responds to a request for "/debug/pprof/" with an HTML page
|
||||||
|
// listing the available profiles.
|
||||||
|
func Index(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if strings.HasPrefix(r.URL.Path, "/debug/pprof/") {
|
||||||
|
name := r.URL.Path[len("/debug/pprof/"):]
|
||||||
|
if name != "" {
|
||||||
|
handler(name).ServeHTTP(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
profiles := pprof.Profiles()
|
||||||
|
if err := indexTmpl.Execute(w, profiles); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var indexTmpl = template.Must(template.New("index").Parse(`<html>
|
||||||
|
<head>
|
||||||
|
<title>/debug/pprof/</title>
|
||||||
|
</head>
|
||||||
|
/debug/pprof/<br>
|
||||||
|
<br>
|
||||||
|
<body>
|
||||||
|
profiles:<br>
|
||||||
|
<table>
|
||||||
|
{{range .}}
|
||||||
|
<tr><td align=right>{{.Count}}<td><a href="/debug/pprof/{{.Name}}?debug=1">{{.Name}}</a>
|
||||||
|
{{end}}
|
||||||
|
</table>
|
||||||
|
<br>
|
||||||
|
<a href="/debug/pprof/goroutine?debug=2">full goroutine stack dump</a><br>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`))
|
||||||
|
|
|
||||||
|
|
@ -186,7 +186,7 @@ func (r *Request) Cookies() []*Cookie {
|
||||||
return readCookies(r.Header, "")
|
return readCookies(r.Header, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
var ErrNoCookie = errors.New("http: named cookied not present")
|
var ErrNoCookie = errors.New("http: named cookie not present")
|
||||||
|
|
||||||
// Cookie returns the named cookie provided in the request or
|
// Cookie returns the named cookie provided in the request or
|
||||||
// ErrNoCookie if not found.
|
// ErrNoCookie if not found.
|
||||||
|
|
@ -486,7 +486,7 @@ func ReadRequest(b *bufio.Reader) (req *Request, err error) {
|
||||||
rawurl = "http://" + rawurl
|
rawurl = "http://" + rawurl
|
||||||
}
|
}
|
||||||
|
|
||||||
if req.URL, err = url.ParseRequest(rawurl); err != nil {
|
if req.URL, err = url.ParseRequestURI(rawurl); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -245,8 +245,7 @@ func TestServerTimeouts(t *testing.T) {
|
||||||
fmt.Fprintf(res, "req=%d", reqNum)
|
fmt.Fprintf(res, "req=%d", reqNum)
|
||||||
})
|
})
|
||||||
|
|
||||||
const second = 1000000000 /* nanos */
|
server := &Server{Handler: handler, ReadTimeout: 250 * time.Millisecond, WriteTimeout: 250 * time.Millisecond}
|
||||||
server := &Server{Handler: handler, ReadTimeout: 0.25 * second, WriteTimeout: 0.25 * second}
|
|
||||||
go server.Serve(l)
|
go server.Serve(l)
|
||||||
|
|
||||||
url := fmt.Sprintf("http://%s/", addr)
|
url := fmt.Sprintf("http://%s/", addr)
|
||||||
|
|
@ -277,7 +276,7 @@ func TestServerTimeouts(t *testing.T) {
|
||||||
if n != 0 || err != io.EOF {
|
if n != 0 || err != io.EOF {
|
||||||
t.Errorf("Read = %v, %v, wanted %v, %v", n, err, 0, io.EOF)
|
t.Errorf("Read = %v, %v, wanted %v, %v", n, err, 0, io.EOF)
|
||||||
}
|
}
|
||||||
if latency < 200*time.Millisecond /* fudge from 0.25 above */ {
|
if latency < 200*time.Millisecond /* fudge from 250 ms above */ {
|
||||||
t.Errorf("got EOF after %s, want >= %s", latency, 200*time.Millisecond)
|
t.Errorf("got EOF after %s, want >= %s", latency, 200*time.Millisecond)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,6 @@ package http
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"crypto/rand"
|
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
@ -985,6 +984,7 @@ type Server struct {
|
||||||
ReadTimeout time.Duration // maximum duration before timing out read of the request
|
ReadTimeout time.Duration // maximum duration before timing out read of the request
|
||||||
WriteTimeout time.Duration // maximum duration before timing out write of the response
|
WriteTimeout time.Duration // maximum duration before timing out write of the response
|
||||||
MaxHeaderBytes int // maximum size of request headers, DefaultMaxHeaderBytes if 0
|
MaxHeaderBytes int // maximum size of request headers, DefaultMaxHeaderBytes if 0
|
||||||
|
TLSConfig *tls.Config // optional TLS config, used by ListenAndServeTLS
|
||||||
}
|
}
|
||||||
|
|
||||||
// ListenAndServe listens on the TCP network address srv.Addr and then
|
// ListenAndServe listens on the TCP network address srv.Addr and then
|
||||||
|
|
@ -1121,9 +1121,12 @@ func (srv *Server) ListenAndServeTLS(certFile, keyFile string) error {
|
||||||
if addr == "" {
|
if addr == "" {
|
||||||
addr = ":https"
|
addr = ":https"
|
||||||
}
|
}
|
||||||
config := &tls.Config{
|
config := &tls.Config{}
|
||||||
Rand: rand.Reader,
|
if srv.TLSConfig != nil {
|
||||||
NextProtos: []string{"http/1.1"},
|
*config = *srv.TLSConfig
|
||||||
|
}
|
||||||
|
if config.NextProtos == nil {
|
||||||
|
config.NextProtos = []string{"http/1.1"}
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
|
|
||||||
|
|
@ -648,7 +648,7 @@ func TestTransportPersistConnLeak(t *testing.T) {
|
||||||
tr := &Transport{}
|
tr := &Transport{}
|
||||||
c := &Client{Transport: tr}
|
c := &Client{Transport: tr}
|
||||||
|
|
||||||
n0 := runtime.Goroutines()
|
n0 := runtime.NumGoroutine()
|
||||||
|
|
||||||
const numReq = 25
|
const numReq = 25
|
||||||
didReqCh := make(chan bool)
|
didReqCh := make(chan bool)
|
||||||
|
|
@ -669,7 +669,7 @@ func TestTransportPersistConnLeak(t *testing.T) {
|
||||||
<-gotReqCh
|
<-gotReqCh
|
||||||
}
|
}
|
||||||
|
|
||||||
nhigh := runtime.Goroutines()
|
nhigh := runtime.NumGoroutine()
|
||||||
|
|
||||||
// Tell all handlers to unblock and reply.
|
// Tell all handlers to unblock and reply.
|
||||||
for i := 0; i < numReq; i++ {
|
for i := 0; i < numReq; i++ {
|
||||||
|
|
@ -685,7 +685,7 @@ func TestTransportPersistConnLeak(t *testing.T) {
|
||||||
time.Sleep(100 * time.Millisecond)
|
time.Sleep(100 * time.Millisecond)
|
||||||
runtime.GC()
|
runtime.GC()
|
||||||
runtime.GC() // even more.
|
runtime.GC() // even more.
|
||||||
nfinal := runtime.Goroutines()
|
nfinal := runtime.NumGoroutine()
|
||||||
|
|
||||||
growth := nfinal - n0
|
growth := nfinal - n0
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -166,13 +166,13 @@ func interfaceMulticastAddrTable(ifindex int) ([]Addr, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ifmat4 := parseProcNetIGMP(ifi)
|
ifmat4 := parseProcNetIGMP("/proc/net/igmp", ifi)
|
||||||
ifmat6 := parseProcNetIGMP6(ifi)
|
ifmat6 := parseProcNetIGMP6("/proc/net/igmp6", ifi)
|
||||||
return append(ifmat4, ifmat6...), nil
|
return append(ifmat4, ifmat6...), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseProcNetIGMP(ifi *Interface) []Addr {
|
func parseProcNetIGMP(path string, ifi *Interface) []Addr {
|
||||||
fd, err := open("/proc/net/igmp")
|
fd, err := open(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -185,23 +185,26 @@ func parseProcNetIGMP(ifi *Interface) []Addr {
|
||||||
fd.readLine() // skip first line
|
fd.readLine() // skip first line
|
||||||
b := make([]byte, IPv4len)
|
b := make([]byte, IPv4len)
|
||||||
for l, ok := fd.readLine(); ok; l, ok = fd.readLine() {
|
for l, ok := fd.readLine(); ok; l, ok = fd.readLine() {
|
||||||
f := getFields(l)
|
f := splitAtBytes(l, " :\r\t\n")
|
||||||
switch len(f) {
|
if len(f) < 4 {
|
||||||
case 4:
|
continue
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case l[0] != ' ' && l[0] != '\t': // new interface line
|
||||||
|
name = f[1]
|
||||||
|
case len(f[0]) == 8:
|
||||||
if ifi == nil || name == ifi.Name {
|
if ifi == nil || name == ifi.Name {
|
||||||
fmt.Sscanf(f[0], "%08x", &b)
|
fmt.Sscanf(f[0], "%08x", &b)
|
||||||
ifma := IPAddr{IP: IPv4(b[3], b[2], b[1], b[0])}
|
ifma := IPAddr{IP: IPv4(b[3], b[2], b[1], b[0])}
|
||||||
ifmat = append(ifmat, ifma.toAddr())
|
ifmat = append(ifmat, ifma.toAddr())
|
||||||
}
|
}
|
||||||
case 5:
|
|
||||||
name = f[1]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ifmat
|
return ifmat
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseProcNetIGMP6(ifi *Interface) []Addr {
|
func parseProcNetIGMP6(path string, ifi *Interface) []Addr {
|
||||||
fd, err := open("/proc/net/igmp6")
|
fd, err := open(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -210,7 +213,10 @@ func parseProcNetIGMP6(ifi *Interface) []Addr {
|
||||||
var ifmat []Addr
|
var ifmat []Addr
|
||||||
b := make([]byte, IPv6len)
|
b := make([]byte, IPv6len)
|
||||||
for l, ok := fd.readLine(); ok; l, ok = fd.readLine() {
|
for l, ok := fd.readLine(); ok; l, ok = fd.readLine() {
|
||||||
f := getFields(l)
|
f := splitAtBytes(l, " \r\t\n")
|
||||||
|
if len(f) < 6 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
if ifi == nil || f[1] == ifi.Name {
|
if ifi == nil || f[1] == ifi.Name {
|
||||||
fmt.Sscanf(f[2], "%32x", &b)
|
fmt.Sscanf(f[2], "%32x", &b)
|
||||||
ifma := IPAddr{IP: IP{b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15]}}
|
ifma := IPAddr{IP: IP{b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15]}}
|
||||||
|
|
|
||||||
|
|
@ -31,17 +31,17 @@ func TestInterfaces(t *testing.T) {
|
||||||
for _, ifi := range ift {
|
for _, ifi := range ift {
|
||||||
ifxi, err := InterfaceByIndex(ifi.Index)
|
ifxi, err := InterfaceByIndex(ifi.Index)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("InterfaceByIndex(%#q) failed: %v", ifi.Index, err)
|
t.Fatalf("InterfaceByIndex(%q) failed: %v", ifi.Index, err)
|
||||||
}
|
}
|
||||||
if !sameInterface(ifxi, &ifi) {
|
if !sameInterface(ifxi, &ifi) {
|
||||||
t.Fatalf("InterfaceByIndex(%#q) = %v, want %v", ifi.Index, *ifxi, ifi)
|
t.Fatalf("InterfaceByIndex(%q) = %v, want %v", ifi.Index, *ifxi, ifi)
|
||||||
}
|
}
|
||||||
ifxn, err := InterfaceByName(ifi.Name)
|
ifxn, err := InterfaceByName(ifi.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("InterfaceByName(%#q) failed: %v", ifi.Name, err)
|
t.Fatalf("InterfaceByName(%q) failed: %v", ifi.Name, err)
|
||||||
}
|
}
|
||||||
if !sameInterface(ifxn, &ifi) {
|
if !sameInterface(ifxn, &ifi) {
|
||||||
t.Fatalf("InterfaceByName(%#q) = %v, want %v", ifi.Name, *ifxn, ifi)
|
t.Fatalf("InterfaceByName(%q) = %v, want %v", ifi.Name, *ifxn, ifi)
|
||||||
}
|
}
|
||||||
t.Logf("%q: flags %q, ifindex %v, mtu %v\n", ifi.Name, ifi.Flags.String(), ifi.Index, ifi.MTU)
|
t.Logf("%q: flags %q, ifindex %v, mtu %v\n", ifi.Name, ifi.Flags.String(), ifi.Index, ifi.MTU)
|
||||||
t.Logf("\thardware address %q", ifi.HardwareAddr.String())
|
t.Logf("\thardware address %q", ifi.HardwareAddr.String())
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ package net
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"os"
|
"os"
|
||||||
|
"syscall"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
@ -15,7 +16,7 @@ var icmpTests = []struct {
|
||||||
net string
|
net string
|
||||||
laddr string
|
laddr string
|
||||||
raddr string
|
raddr string
|
||||||
ipv6 bool
|
ipv6 bool // test with underlying AF_INET6 socket
|
||||||
}{
|
}{
|
||||||
{"ip4:icmp", "", "127.0.0.1", false},
|
{"ip4:icmp", "", "127.0.0.1", false},
|
||||||
{"ip6:icmp", "", "::1", true},
|
{"ip6:icmp", "", "::1", true},
|
||||||
|
|
@ -34,15 +35,15 @@ func TestICMP(t *testing.T) {
|
||||||
}
|
}
|
||||||
id := os.Getpid() & 0xffff
|
id := os.Getpid() & 0xffff
|
||||||
seqnum++
|
seqnum++
|
||||||
echo := newICMPEchoRequest(tt.ipv6, id, seqnum, 128, []byte("Go Go Gadget Ping!!!"))
|
echo := newICMPEchoRequest(tt.net, id, seqnum, 128, []byte("Go Go Gadget Ping!!!"))
|
||||||
exchangeICMPEcho(t, tt.net, tt.laddr, tt.raddr, tt.ipv6, echo)
|
exchangeICMPEcho(t, tt.net, tt.laddr, tt.raddr, echo)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func exchangeICMPEcho(t *testing.T, net, laddr, raddr string, ipv6 bool, echo []byte) {
|
func exchangeICMPEcho(t *testing.T, net, laddr, raddr string, echo []byte) {
|
||||||
c, err := ListenPacket(net, laddr)
|
c, err := ListenPacket(net, laddr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("ListenPacket(%#q, %#q) failed: %v", net, laddr, err)
|
t.Errorf("ListenPacket(%q, %q) failed: %v", net, laddr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
c.SetDeadline(time.Now().Add(100 * time.Millisecond))
|
c.SetDeadline(time.Now().Add(100 * time.Millisecond))
|
||||||
|
|
@ -50,12 +51,12 @@ func exchangeICMPEcho(t *testing.T, net, laddr, raddr string, ipv6 bool, echo []
|
||||||
|
|
||||||
ra, err := ResolveIPAddr(net, raddr)
|
ra, err := ResolveIPAddr(net, raddr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("ResolveIPAddr(%#q, %#q) failed: %v", net, raddr, err)
|
t.Errorf("ResolveIPAddr(%q, %q) failed: %v", net, raddr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
waitForReady := make(chan bool)
|
waitForReady := make(chan bool)
|
||||||
go icmpEchoTransponder(t, net, raddr, ipv6, waitForReady)
|
go icmpEchoTransponder(t, net, raddr, waitForReady)
|
||||||
<-waitForReady
|
<-waitForReady
|
||||||
|
|
||||||
_, err = c.WriteTo(echo, ra)
|
_, err = c.WriteTo(echo, ra)
|
||||||
|
|
@ -71,11 +72,15 @@ func exchangeICMPEcho(t *testing.T, net, laddr, raddr string, ipv6 bool, echo []
|
||||||
t.Errorf("ReadFrom failed: %v", err)
|
t.Errorf("ReadFrom failed: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !ipv6 && reply[0] != ICMP4_ECHO_REPLY {
|
switch c.(*IPConn).fd.family {
|
||||||
continue
|
case syscall.AF_INET:
|
||||||
}
|
if reply[0] != ICMP4_ECHO_REPLY {
|
||||||
if ipv6 && reply[0] != ICMP6_ECHO_REPLY {
|
continue
|
||||||
continue
|
}
|
||||||
|
case syscall.AF_INET6:
|
||||||
|
if reply[0] != ICMP6_ECHO_REPLY {
|
||||||
|
continue
|
||||||
|
}
|
||||||
}
|
}
|
||||||
xid, xseqnum := parseICMPEchoReply(echo)
|
xid, xseqnum := parseICMPEchoReply(echo)
|
||||||
rid, rseqnum := parseICMPEchoReply(reply)
|
rid, rseqnum := parseICMPEchoReply(reply)
|
||||||
|
|
@ -87,11 +92,11 @@ func exchangeICMPEcho(t *testing.T, net, laddr, raddr string, ipv6 bool, echo []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func icmpEchoTransponder(t *testing.T, net, raddr string, ipv6 bool, waitForReady chan bool) {
|
func icmpEchoTransponder(t *testing.T, net, raddr string, waitForReady chan bool) {
|
||||||
c, err := Dial(net, raddr)
|
c, err := Dial(net, raddr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
waitForReady <- true
|
waitForReady <- true
|
||||||
t.Errorf("Dial(%#q, %#q) failed: %v", net, raddr, err)
|
t.Errorf("Dial(%q, %q) failed: %v", net, raddr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
c.SetDeadline(time.Now().Add(100 * time.Millisecond))
|
c.SetDeadline(time.Now().Add(100 * time.Millisecond))
|
||||||
|
|
@ -106,18 +111,23 @@ func icmpEchoTransponder(t *testing.T, net, raddr string, ipv6 bool, waitForRead
|
||||||
t.Errorf("Read failed: %v", err)
|
t.Errorf("Read failed: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !ipv6 && echo[0] != ICMP4_ECHO_REQUEST {
|
switch c.(*IPConn).fd.family {
|
||||||
continue
|
case syscall.AF_INET:
|
||||||
}
|
if echo[0] != ICMP4_ECHO_REQUEST {
|
||||||
if ipv6 && echo[0] != ICMP6_ECHO_REQUEST {
|
continue
|
||||||
continue
|
}
|
||||||
|
case syscall.AF_INET6:
|
||||||
|
if echo[0] != ICMP6_ECHO_REQUEST {
|
||||||
|
continue
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ipv6 {
|
switch c.(*IPConn).fd.family {
|
||||||
|
case syscall.AF_INET:
|
||||||
echo[0] = ICMP4_ECHO_REPLY
|
echo[0] = ICMP4_ECHO_REPLY
|
||||||
} else {
|
case syscall.AF_INET6:
|
||||||
echo[0] = ICMP6_ECHO_REPLY
|
echo[0] = ICMP6_ECHO_REPLY
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -135,11 +145,15 @@ const (
|
||||||
ICMP6_ECHO_REPLY = 129
|
ICMP6_ECHO_REPLY = 129
|
||||||
)
|
)
|
||||||
|
|
||||||
func newICMPEchoRequest(ipv6 bool, id, seqnum, msglen int, filler []byte) []byte {
|
func newICMPEchoRequest(net string, id, seqnum, msglen int, filler []byte) []byte {
|
||||||
if !ipv6 {
|
afnet, _, _ := parseDialNetwork(net)
|
||||||
|
switch afnet {
|
||||||
|
case "ip4":
|
||||||
return newICMPv4EchoRequest(id, seqnum, msglen, filler)
|
return newICMPv4EchoRequest(id, seqnum, msglen, filler)
|
||||||
|
case "ip6":
|
||||||
|
return newICMPv6EchoRequest(id, seqnum, msglen, filler)
|
||||||
}
|
}
|
||||||
return newICMPv6EchoRequest(id, seqnum, msglen, filler)
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func newICMPv4EchoRequest(id, seqnum, msglen int, filler []byte) []byte {
|
func newICMPv4EchoRequest(id, seqnum, msglen int, filler []byte) []byte {
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue