libgo: Update to weekly.2011-12-22.

From-SVN: r183150
This commit is contained in:
Ian Lance Taylor 2012-01-13 05:11:45 +00:00
parent f83fa0bf8f
commit df4aa89a5e
195 changed files with 3634 additions and 1287 deletions

View File

@ -12,9 +12,9 @@ func main() {
var t testing.T var t testing.T
// make sure error mentions that // make sure error mentions that
// ch is unexported, not just "ch not found". // name is unexported, not just "name not found".
t.ch = nil // ERROR "unexported" t.name = nil // ERROR "unexported"
println(testing.anyLowercaseName("asdf")) // ERROR "unexported" "undefined: testing.anyLowercaseName" println(testing.anyLowercaseName("asdf")) // ERROR "unexported" "undefined: testing.anyLowercaseName"
} }

View File

@ -1,4 +1,4 @@
82fdc445f2ff 4a8268927758
The first line of this file holds the Mercurial revision number of the The first line of this file holds the Mercurial revision number of the
last merge done from the master library sources. last merge done from the master library sources.

View File

@ -279,8 +279,7 @@ toolexeclibgoimage_DATA = \
image/gif.gox \ image/gif.gox \
image/jpeg.gox \ image/jpeg.gox \
image/png.gox \ image/png.gox \
image/tiff.gox \ image/tiff.gox
image/ycbcr.gox
toolexeclibgoindexdir = $(toolexeclibgodir)/index toolexeclibgoindexdir = $(toolexeclibgodir)/index
@ -586,7 +585,8 @@ go_image_files = \
go/image/format.go \ go/image/format.go \
go/image/geom.go \ go/image/geom.go \
go/image/image.go \ go/image/image.go \
go/image/names.go go/image/names.go \
go/image/ycbcr.go
go_io_files = \ go_io_files = \
go/io/multi.go \ go/io/multi.go \
@ -654,10 +654,15 @@ if LIBGO_IS_LINUX
go_net_fd_os_file = go/net/fd_linux.go go_net_fd_os_file = go/net/fd_linux.go
go_net_newpollserver_file = go/net/newpollserver.go go_net_newpollserver_file = go/net/newpollserver.go
else # !LIBGO_IS_LINUX && !LIBGO_IS_RTEMS else # !LIBGO_IS_LINUX && !LIBGO_IS_RTEMS
if LIBGO_IS_NETBSD
go_net_fd_os_file = go/net/fd_netbsd.go
go_net_newpollserver_file = go/net/newpollserver.go
else # !LIBGO_IS_NETBSD && !LIBGO_IS_LINUX && !LIBGO_IS_RTEMS
# By default use select with pipes. Most systems should have # By default use select with pipes. Most systems should have
# something better. # something better.
go_net_fd_os_file = go/net/fd_select.go go_net_fd_os_file = go/net/fd_select.go
go_net_newpollserver_file = go/net/newpollserver.go go_net_newpollserver_file = go/net/newpollserver.go
endif # !LIBGO_IS_NETBSD
endif # !LIBGO_IS_LINUX endif # !LIBGO_IS_LINUX
endif # !LIBGO_IS_RTEMS endif # !LIBGO_IS_RTEMS
@ -688,8 +693,12 @@ endif
if LIBGO_IS_LINUX if LIBGO_IS_LINUX
go_net_interface_file = go/net/interface_linux.go go_net_interface_file = go/net/interface_linux.go
else else
if LIBGO_IS_NETBSD
go_net_interface_file = go/net/interface_netbsd.go
else
go_net_interface_file = go/net/interface_stub.go go_net_interface_file = go/net/interface_stub.go
endif endif
endif
go_net_files = \ go_net_files = \
go/net/cgo_unix.go \ go/net/cgo_unix.go \
@ -845,6 +854,7 @@ go_strconv_files = \
go/strconv/atof.go \ go/strconv/atof.go \
go/strconv/atoi.go \ go/strconv/atoi.go \
go/strconv/decimal.go \ go/strconv/decimal.go \
go/strconv/extfloat.go \
go/strconv/ftoa.go \ go/strconv/ftoa.go \
go/strconv/itoa.go \ go/strconv/itoa.go \
go/strconv/quote.go go/strconv/quote.go
@ -880,7 +890,8 @@ go_syslog_c_files = \
go_testing_files = \ go_testing_files = \
go/testing/benchmark.go \ go/testing/benchmark.go \
go/testing/example.go \ go/testing/example.go \
go/testing/testing.go go/testing/testing.go \
go/testing/wrapper.go
go_time_files = \ go_time_files = \
go/time/format.go \ go/time/format.go \
@ -1197,7 +1208,9 @@ go_go_build_files = \
go_go_doc_files = \ go_go_doc_files = \
go/go/doc/comment.go \ go/go/doc/comment.go \
go/go/doc/doc.go \ go/go/doc/doc.go \
go/go/doc/example.go go/go/doc/example.go \
go/go/doc/exports.go \
go/go/doc/filter.go
go_go_parser_files = \ go_go_parser_files = \
go/go/parser/interface.go \ go/go/parser/interface.go \
go/go/parser/parser.go go/go/parser/parser.go
@ -1241,7 +1254,8 @@ go_image_bmp_files = \
go/image/bmp/reader.go go/image/bmp/reader.go
go_image_color_files = \ go_image_color_files = \
go/image/color/color.go go/image/color/color.go \
go/image/color/ycbcr.go
go_image_draw_files = \ go_image_draw_files = \
go/image/draw/draw.go go/image/draw/draw.go
@ -1266,9 +1280,6 @@ go_image_tiff_files = \
go/image/tiff/consts.go \ go/image/tiff/consts.go \
go/image/tiff/reader.go go/image/tiff/reader.go
go_image_ycbcr_files = \
go/image/ycbcr/ycbcr.go
go_index_suffixarray_files = \ go_index_suffixarray_files = \
go/index/suffixarray/qsufsort.go \ go/index/suffixarray/qsufsort.go \
go/index/suffixarray/suffixarray.go go/index/suffixarray/suffixarray.go
@ -1318,6 +1329,7 @@ go_net_http_files = \
go/net/http/filetransport.go \ go/net/http/filetransport.go \
go/net/http/fs.go \ go/net/http/fs.go \
go/net/http/header.go \ go/net/http/header.go \
go/net/http/jar.go \
go/net/http/lex.go \ go/net/http/lex.go \
go/net/http/request.go \ go/net/http/request.go \
go/net/http/response.go \ go/net/http/response.go \
@ -1761,7 +1773,6 @@ libgo_go_objs = \
image/jpeg.lo \ image/jpeg.lo \
image/png.lo \ image/png.lo \
image/tiff.lo \ image/tiff.lo \
image/ycbcr.lo \
index/suffixarray.lo \ index/suffixarray.lo \
io/ioutil.lo \ io/ioutil.lo \
log/syslog.lo \ log/syslog.lo \
@ -3066,16 +3077,6 @@ image/tiff/check: $(CHECK_DEPS)
@$(CHECK) @$(CHECK)
.PHONY: image/tiff/check .PHONY: image/tiff/check
@go_include@ image/ycbcr.lo.dep
image/ycbcr.lo.dep: $(go_image_ycbcr_files)
$(BUILDDEPS)
image/ycbcr.lo: $(go_image_ycbcr_files)
$(BUILDPACKAGE)
image/ycbcr/check: $(CHECK_DEPS)
@$(MKDIR_P) image/ycbcr
@$(CHECK)
.PHONY: image/ycbcr/check
@go_include@ index/suffixarray.lo.dep @go_include@ index/suffixarray.lo.dep
index/suffixarray.lo.dep: $(go_index_suffixarray_files) index/suffixarray.lo.dep: $(go_index_suffixarray_files)
$(BUILDDEPS) $(BUILDDEPS)
@ -3728,8 +3729,6 @@ image/png.gox: image/png.lo
$(BUILDGOX) $(BUILDGOX)
image/tiff.gox: image/tiff.lo image/tiff.gox: image/tiff.lo
$(BUILDGOX) $(BUILDGOX)
image/ycbcr.gox: image/ycbcr.lo
$(BUILDGOX)
index/suffixarray.gox: index/suffixarray.lo index/suffixarray.gox: index/suffixarray.lo
$(BUILDGOX) $(BUILDGOX)
@ -3938,11 +3937,11 @@ TEST_PACKAGES = \
hash/crc32/check \ hash/crc32/check \
hash/crc64/check \ hash/crc64/check \
hash/fnv/check \ hash/fnv/check \
image/color/check \
image/draw/check \ image/draw/check \
image/jpeg/check \ image/jpeg/check \
image/png/check \ image/png/check \
image/tiff/check \ image/tiff/check \
image/ycbcr/check \
index/suffixarray/check \ index/suffixarray/check \
io/ioutil/check \ io/ioutil/check \
log/syslog/check \ log/syslog/check \

View File

@ -167,20 +167,19 @@ am__DEPENDENCIES_2 = bufio/bufio.lo bytes/bytes.lo bytes/index.lo \
hash/fnv.lo net/http/cgi.lo net/http/fcgi.lo \ hash/fnv.lo net/http/cgi.lo net/http/fcgi.lo \
net/http/httptest.lo net/http/httputil.lo net/http/pprof.lo \ net/http/httptest.lo net/http/httputil.lo net/http/pprof.lo \
image/bmp.lo image/color.lo image/draw.lo image/gif.lo \ image/bmp.lo image/color.lo image/draw.lo image/gif.lo \
image/jpeg.lo image/png.lo image/tiff.lo image/ycbcr.lo \ image/jpeg.lo image/png.lo image/tiff.lo index/suffixarray.lo \
index/suffixarray.lo io/ioutil.lo log/syslog.lo \ io/ioutil.lo log/syslog.lo log/syslog/syslog_c.lo math/big.lo \
log/syslog/syslog_c.lo math/big.lo math/cmplx.lo math/rand.lo \ math/cmplx.lo math/rand.lo mime/mime.lo mime/multipart.lo \
mime/mime.lo mime/multipart.lo net/dict.lo net/http.lo \ net/dict.lo net/http.lo net/mail.lo net/rpc.lo net/smtp.lo \
net/mail.lo net/rpc.lo net/smtp.lo net/textproto.lo net/url.lo \ net/textproto.lo net/url.lo old/netchan.lo old/regexp.lo \
old/netchan.lo old/regexp.lo old/template.lo \ old/template.lo $(am__DEPENDENCIES_1) os/user.lo os/signal.lo \
$(am__DEPENDENCIES_1) os/user.lo os/signal.lo path/filepath.lo \ path/filepath.lo regexp/syntax.lo net/rpc/jsonrpc.lo \
regexp/syntax.lo net/rpc/jsonrpc.lo runtime/debug.lo \ runtime/debug.lo runtime/pprof.lo sync/atomic.lo \
runtime/pprof.lo sync/atomic.lo sync/atomic_c.lo \ sync/atomic_c.lo syscall/syscall.lo syscall/errno.lo \
syscall/syscall.lo syscall/errno.lo syscall/wait.lo \ syscall/wait.lo text/scanner.lo text/tabwriter.lo \
text/scanner.lo text/tabwriter.lo text/template.lo \ text/template.lo text/template/parse.lo testing/testing.lo \
text/template/parse.lo testing/testing.lo testing/iotest.lo \ testing/iotest.lo testing/quick.lo testing/script.lo \
testing/quick.lo testing/script.lo unicode/utf16.lo \ unicode/utf16.lo unicode/utf8.lo
unicode/utf8.lo
libgo_la_DEPENDENCIES = $(am__DEPENDENCIES_2) $(am__DEPENDENCIES_1) \ libgo_la_DEPENDENCIES = $(am__DEPENDENCIES_2) $(am__DEPENDENCIES_1) \
$(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \
$(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1)
@ -732,8 +731,7 @@ toolexeclibgoimage_DATA = \
image/gif.gox \ image/gif.gox \
image/jpeg.gox \ image/jpeg.gox \
image/png.gox \ image/png.gox \
image/tiff.gox \ image/tiff.gox
image/ycbcr.gox
toolexeclibgoindexdir = $(toolexeclibgodir)/index toolexeclibgoindexdir = $(toolexeclibgodir)/index
toolexeclibgoindex_DATA = \ toolexeclibgoindex_DATA = \
@ -972,7 +970,8 @@ go_image_files = \
go/image/format.go \ go/image/format.go \
go/image/geom.go \ go/image/geom.go \
go/image/image.go \ go/image/image.go \
go/image/names.go go/image/names.go \
go/image/ycbcr.go
go_io_files = \ go_io_files = \
go/io/multi.go \ go/io/multi.go \
@ -1034,10 +1033,12 @@ go_mime_files = \
# By default use select with pipes. Most systems should have # By default use select with pipes. Most systems should have
# something better. # something better.
@LIBGO_IS_LINUX_FALSE@@LIBGO_IS_RTEMS_FALSE@go_net_fd_os_file = go/net/fd_select.go @LIBGO_IS_LINUX_FALSE@@LIBGO_IS_NETBSD_FALSE@@LIBGO_IS_RTEMS_FALSE@go_net_fd_os_file = go/net/fd_select.go
@LIBGO_IS_LINUX_FALSE@@LIBGO_IS_NETBSD_TRUE@@LIBGO_IS_RTEMS_FALSE@go_net_fd_os_file = go/net/fd_netbsd.go
@LIBGO_IS_LINUX_TRUE@@LIBGO_IS_RTEMS_FALSE@go_net_fd_os_file = go/net/fd_linux.go @LIBGO_IS_LINUX_TRUE@@LIBGO_IS_RTEMS_FALSE@go_net_fd_os_file = go/net/fd_linux.go
@LIBGO_IS_RTEMS_TRUE@go_net_fd_os_file = go/net/fd_select.go @LIBGO_IS_RTEMS_TRUE@go_net_fd_os_file = go/net/fd_select.go
@LIBGO_IS_LINUX_FALSE@@LIBGO_IS_RTEMS_FALSE@go_net_newpollserver_file = go/net/newpollserver.go @LIBGO_IS_LINUX_FALSE@@LIBGO_IS_NETBSD_FALSE@@LIBGO_IS_RTEMS_FALSE@go_net_newpollserver_file = go/net/newpollserver.go
@LIBGO_IS_LINUX_FALSE@@LIBGO_IS_NETBSD_TRUE@@LIBGO_IS_RTEMS_FALSE@go_net_newpollserver_file = go/net/newpollserver.go
@LIBGO_IS_LINUX_TRUE@@LIBGO_IS_RTEMS_FALSE@go_net_newpollserver_file = go/net/newpollserver.go @LIBGO_IS_LINUX_TRUE@@LIBGO_IS_RTEMS_FALSE@go_net_newpollserver_file = go/net/newpollserver.go
@LIBGO_IS_RTEMS_TRUE@go_net_newpollserver_file = go/net/newpollserver_rtems.go @LIBGO_IS_RTEMS_TRUE@go_net_newpollserver_file = go/net/newpollserver_rtems.go
@LIBGO_IS_IRIX_FALSE@@LIBGO_IS_LINUX_FALSE@@LIBGO_IS_SOLARIS_FALSE@go_net_cgo_file = go/net/cgo_bsd.go @LIBGO_IS_IRIX_FALSE@@LIBGO_IS_LINUX_FALSE@@LIBGO_IS_SOLARIS_FALSE@go_net_cgo_file = go/net/cgo_bsd.go
@ -1050,7 +1051,8 @@ go_mime_files = \
@LIBGO_IS_LINUX_TRUE@go_net_sock_file = go/net/sock_linux.go @LIBGO_IS_LINUX_TRUE@go_net_sock_file = go/net/sock_linux.go
@LIBGO_IS_LINUX_FALSE@go_net_sendfile_file = go/net/sendfile_stub.go @LIBGO_IS_LINUX_FALSE@go_net_sendfile_file = go/net/sendfile_stub.go
@LIBGO_IS_LINUX_TRUE@go_net_sendfile_file = go/net/sendfile_linux.go @LIBGO_IS_LINUX_TRUE@go_net_sendfile_file = go/net/sendfile_linux.go
@LIBGO_IS_LINUX_FALSE@go_net_interface_file = go/net/interface_stub.go @LIBGO_IS_LINUX_FALSE@@LIBGO_IS_NETBSD_FALSE@go_net_interface_file = go/net/interface_stub.go
@LIBGO_IS_LINUX_FALSE@@LIBGO_IS_NETBSD_TRUE@go_net_interface_file = go/net/interface_netbsd.go
@LIBGO_IS_LINUX_TRUE@go_net_interface_file = go/net/interface_linux.go @LIBGO_IS_LINUX_TRUE@go_net_interface_file = go/net/interface_linux.go
go_net_files = \ go_net_files = \
go/net/cgo_unix.go \ go/net/cgo_unix.go \
@ -1165,6 +1167,7 @@ go_strconv_files = \
go/strconv/atof.go \ go/strconv/atof.go \
go/strconv/atoi.go \ go/strconv/atoi.go \
go/strconv/decimal.go \ go/strconv/decimal.go \
go/strconv/extfloat.go \
go/strconv/ftoa.go \ go/strconv/ftoa.go \
go/strconv/itoa.go \ go/strconv/itoa.go \
go/strconv/quote.go go/strconv/quote.go
@ -1194,7 +1197,8 @@ go_syslog_c_files = \
go_testing_files = \ go_testing_files = \
go/testing/benchmark.go \ go/testing/benchmark.go \
go/testing/example.go \ go/testing/example.go \
go/testing/testing.go go/testing/testing.go \
go/testing/wrapper.go
go_time_files = \ go_time_files = \
go/time/format.go \ go/time/format.go \
@ -1563,7 +1567,9 @@ go_go_build_files = \
go_go_doc_files = \ go_go_doc_files = \
go/go/doc/comment.go \ go/go/doc/comment.go \
go/go/doc/doc.go \ go/go/doc/doc.go \
go/go/doc/example.go go/go/doc/example.go \
go/go/doc/exports.go \
go/go/doc/filter.go
go_go_parser_files = \ go_go_parser_files = \
go/go/parser/interface.go \ go/go/parser/interface.go \
@ -1614,7 +1620,8 @@ go_image_bmp_files = \
go/image/bmp/reader.go go/image/bmp/reader.go
go_image_color_files = \ go_image_color_files = \
go/image/color/color.go go/image/color/color.go \
go/image/color/ycbcr.go
go_image_draw_files = \ go_image_draw_files = \
go/image/draw/draw.go go/image/draw/draw.go
@ -1639,9 +1646,6 @@ go_image_tiff_files = \
go/image/tiff/consts.go \ go/image/tiff/consts.go \
go/image/tiff/reader.go go/image/tiff/reader.go
go_image_ycbcr_files = \
go/image/ycbcr/ycbcr.go
go_index_suffixarray_files = \ go_index_suffixarray_files = \
go/index/suffixarray/qsufsort.go \ go/index/suffixarray/qsufsort.go \
go/index/suffixarray/suffixarray.go go/index/suffixarray/suffixarray.go
@ -1694,6 +1698,7 @@ go_net_http_files = \
go/net/http/filetransport.go \ go/net/http/filetransport.go \
go/net/http/fs.go \ go/net/http/fs.go \
go/net/http/header.go \ go/net/http/header.go \
go/net/http/jar.go \
go/net/http/lex.go \ go/net/http/lex.go \
go/net/http/request.go \ go/net/http/request.go \
go/net/http/response.go \ go/net/http/response.go \
@ -2043,7 +2048,6 @@ libgo_go_objs = \
image/jpeg.lo \ image/jpeg.lo \
image/png.lo \ image/png.lo \
image/tiff.lo \ image/tiff.lo \
image/ycbcr.lo \
index/suffixarray.lo \ index/suffixarray.lo \
io/ioutil.lo \ io/ioutil.lo \
log/syslog.lo \ log/syslog.lo \
@ -2299,11 +2303,11 @@ TEST_PACKAGES = \
hash/crc32/check \ hash/crc32/check \
hash/crc64/check \ hash/crc64/check \
hash/fnv/check \ hash/fnv/check \
image/color/check \
image/draw/check \ image/draw/check \
image/jpeg/check \ image/jpeg/check \
image/png/check \ image/png/check \
image/tiff/check \ image/tiff/check \
image/ycbcr/check \
index/suffixarray/check \ index/suffixarray/check \
io/ioutil/check \ io/ioutil/check \
log/syslog/check \ log/syslog/check \
@ -5657,16 +5661,6 @@ image/tiff/check: $(CHECK_DEPS)
@$(CHECK) @$(CHECK)
.PHONY: image/tiff/check .PHONY: image/tiff/check
@go_include@ image/ycbcr.lo.dep
image/ycbcr.lo.dep: $(go_image_ycbcr_files)
$(BUILDDEPS)
image/ycbcr.lo: $(go_image_ycbcr_files)
$(BUILDPACKAGE)
image/ycbcr/check: $(CHECK_DEPS)
@$(MKDIR_P) image/ycbcr
@$(CHECK)
.PHONY: image/ycbcr/check
@go_include@ index/suffixarray.lo.dep @go_include@ index/suffixarray.lo.dep
index/suffixarray.lo.dep: $(go_index_suffixarray_files) index/suffixarray.lo.dep: $(go_index_suffixarray_files)
$(BUILDDEPS) $(BUILDDEPS)
@ -6314,8 +6308,6 @@ image/png.gox: image/png.lo
$(BUILDGOX) $(BUILDGOX)
image/tiff.gox: image/tiff.lo image/tiff.gox: image/tiff.lo
$(BUILDGOX) $(BUILDGOX)
image/ycbcr.gox: image/ycbcr.lo
$(BUILDGOX)
index/suffixarray.gox: index/suffixarray.lo index/suffixarray.gox: index/suffixarray.lo
$(BUILDGOX) $(BUILDGOX)

19
libgo/configure vendored
View File

@ -657,6 +657,8 @@ LIBGO_IS_SOLARIS_FALSE
LIBGO_IS_SOLARIS_TRUE LIBGO_IS_SOLARIS_TRUE
LIBGO_IS_RTEMS_FALSE LIBGO_IS_RTEMS_FALSE
LIBGO_IS_RTEMS_TRUE LIBGO_IS_RTEMS_TRUE
LIBGO_IS_NETBSD_FALSE
LIBGO_IS_NETBSD_TRUE
LIBGO_IS_LINUX_FALSE LIBGO_IS_LINUX_FALSE
LIBGO_IS_LINUX_TRUE LIBGO_IS_LINUX_TRUE
LIBGO_IS_IRIX_FALSE LIBGO_IS_IRIX_FALSE
@ -11097,7 +11099,7 @@ else
lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
lt_status=$lt_dlunknown lt_status=$lt_dlunknown
cat > conftest.$ac_ext <<_LT_EOF cat > conftest.$ac_ext <<_LT_EOF
#line 11100 "configure" #line 11102 "configure"
#include "confdefs.h" #include "confdefs.h"
#if HAVE_DLFCN_H #if HAVE_DLFCN_H
@ -11203,7 +11205,7 @@ else
lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
lt_status=$lt_dlunknown lt_status=$lt_dlunknown
cat > conftest.$ac_ext <<_LT_EOF cat > conftest.$ac_ext <<_LT_EOF
#line 11206 "configure" #line 11208 "configure"
#include "confdefs.h" #include "confdefs.h"
#if HAVE_DLFCN_H #if HAVE_DLFCN_H
@ -13473,6 +13475,7 @@ case ${host} in
*-*-freebsd*) is_freebsd=yes; GOOS=freebsd ;; *-*-freebsd*) is_freebsd=yes; GOOS=freebsd ;;
*-*-irix6*) is_irix=yes; GOOS=irix ;; *-*-irix6*) is_irix=yes; GOOS=irix ;;
*-*-linux*) is_linux=yes; GOOS=linux ;; *-*-linux*) is_linux=yes; GOOS=linux ;;
*-*-netbsd*) is_netbsd=yes; GOOS=netbsd ;;
*-*-rtems*) is_rtems=yes; GOOS=rtems ;; *-*-rtems*) is_rtems=yes; GOOS=rtems ;;
*-*-solaris2*) is_solaris=yes; GOOS=solaris ;; *-*-solaris2*) is_solaris=yes; GOOS=solaris ;;
esac esac
@ -13508,6 +13511,14 @@ else
LIBGO_IS_LINUX_FALSE= LIBGO_IS_LINUX_FALSE=
fi fi
if test $is_netbsd = yes; then
LIBGO_IS_NETBSD_TRUE=
LIBGO_IS_NETBSD_FALSE='#'
else
LIBGO_IS_NETBSD_TRUE='#'
LIBGO_IS_NETBSD_FALSE=
fi
if test $is_rtems = yes; then if test $is_rtems = yes; then
LIBGO_IS_RTEMS_TRUE= LIBGO_IS_RTEMS_TRUE=
LIBGO_IS_RTEMS_FALSE='#' LIBGO_IS_RTEMS_FALSE='#'
@ -14938,6 +14949,10 @@ if test -z "${LIBGO_IS_LINUX_TRUE}" && test -z "${LIBGO_IS_LINUX_FALSE}"; then
as_fn_error "conditional \"LIBGO_IS_LINUX\" was never defined. as_fn_error "conditional \"LIBGO_IS_LINUX\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5 Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi fi
if test -z "${LIBGO_IS_NETBSD_TRUE}" && test -z "${LIBGO_IS_NETBSD_FALSE}"; then
as_fn_error "conditional \"LIBGO_IS_NETBSD\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5
fi
if test -z "${LIBGO_IS_RTEMS_TRUE}" && test -z "${LIBGO_IS_RTEMS_FALSE}"; then if test -z "${LIBGO_IS_RTEMS_TRUE}" && test -z "${LIBGO_IS_RTEMS_FALSE}"; then
as_fn_error "conditional \"LIBGO_IS_RTEMS\" was never defined. as_fn_error "conditional \"LIBGO_IS_RTEMS\" was never defined.
Usually this means the macro was only invoked conditionally." "$LINENO" 5 Usually this means the macro was only invoked conditionally." "$LINENO" 5

View File

@ -134,6 +134,7 @@ case ${host} in
*-*-freebsd*) is_freebsd=yes; GOOS=freebsd ;; *-*-freebsd*) is_freebsd=yes; GOOS=freebsd ;;
*-*-irix6*) is_irix=yes; GOOS=irix ;; *-*-irix6*) is_irix=yes; GOOS=irix ;;
*-*-linux*) is_linux=yes; GOOS=linux ;; *-*-linux*) is_linux=yes; GOOS=linux ;;
*-*-netbsd*) is_netbsd=yes; GOOS=netbsd ;;
*-*-rtems*) is_rtems=yes; GOOS=rtems ;; *-*-rtems*) is_rtems=yes; GOOS=rtems ;;
*-*-solaris2*) is_solaris=yes; GOOS=solaris ;; *-*-solaris2*) is_solaris=yes; GOOS=solaris ;;
esac esac
@ -141,6 +142,7 @@ AM_CONDITIONAL(LIBGO_IS_DARWIN, test $is_darwin = yes)
AM_CONDITIONAL(LIBGO_IS_FREEBSD, test $is_freebsd = yes) AM_CONDITIONAL(LIBGO_IS_FREEBSD, test $is_freebsd = yes)
AM_CONDITIONAL(LIBGO_IS_IRIX, test $is_irix = yes) AM_CONDITIONAL(LIBGO_IS_IRIX, test $is_irix = yes)
AM_CONDITIONAL(LIBGO_IS_LINUX, test $is_linux = yes) AM_CONDITIONAL(LIBGO_IS_LINUX, test $is_linux = yes)
AM_CONDITIONAL(LIBGO_IS_NETBSD, test $is_netbsd = yes)
AM_CONDITIONAL(LIBGO_IS_RTEMS, test $is_rtems = yes) AM_CONDITIONAL(LIBGO_IS_RTEMS, test $is_rtems = yes)
AM_CONDITIONAL(LIBGO_IS_SOLARIS, test $is_solaris = yes) AM_CONDITIONAL(LIBGO_IS_SOLARIS, test $is_solaris = yes)
AC_SUBST(GOOS) AC_SUBST(GOOS)

View File

@ -163,10 +163,10 @@ func readTestZip(t *testing.T, zt ZipTest) {
done := make(chan bool) done := make(chan bool)
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
for j, ft := range zt.File { for j, ft := range zt.File {
go func() { go func(j int, ft ZipTestFile) {
readTestFile(t, ft, z.File[j]) readTestFile(t, ft, z.File[j])
done <- true done <- true
}() }(j, ft)
n++ n++
} }
} }

View File

@ -96,12 +96,28 @@ func msDosTimeToTime(dosDate, dosTime uint16) time.Time {
) )
} }
// timeToMsDosTime converts a time.Time to an MS-DOS date and time.
// The resolution is 2s.
// See: http://msdn.microsoft.com/en-us/library/ms724274(v=VS.85).aspx
func timeToMsDosTime(t time.Time) (fDate uint16, fTime uint16) {
t = t.In(time.UTC)
fDate = uint16(t.Day() + int(t.Month())<<5 + (t.Year()-1980)<<9)
fTime = uint16(t.Second()/2 + t.Minute()<<5 + t.Hour()<<11)
return
}
// ModTime returns the modification time. // ModTime returns the modification time.
// The resolution is 2s. // The resolution is 2s.
func (h *FileHeader) ModTime() time.Time { func (h *FileHeader) ModTime() time.Time {
return msDosTimeToTime(h.ModifiedDate, h.ModifiedTime) return msDosTimeToTime(h.ModifiedDate, h.ModifiedTime)
} }
// SetModTime sets the ModifiedTime and ModifiedDate fields to the given time.
// The resolution is 2s.
func (h *FileHeader) SetModTime(t time.Time) {
h.ModifiedDate, h.ModifiedTime = timeToMsDosTime(t)
}
// traditional names for Unix constants // traditional names for Unix constants
const ( const (
s_IFMT = 0xf000 s_IFMT = 0xf000

View File

@ -11,6 +11,7 @@ import (
"fmt" "fmt"
"io" "io"
"testing" "testing"
"time"
) )
type stringReaderAt string type stringReaderAt string
@ -55,3 +56,13 @@ func TestOver65kFiles(t *testing.T) {
} }
} }
} }
func TestModTime(t *testing.T) {
var testTime = time.Date(2009, time.November, 10, 23, 45, 58, 0, time.UTC)
fh := new(FileHeader)
fh.SetModTime(testTime)
outTime := fh.ModTime()
if !outTime.Equal(testTime) {
t.Errorf("times don't match: got %s, want %s", outTime, testTime)
}
}

View File

@ -16,7 +16,6 @@ const N = 10000 // make this bigger for a larger (and slower) test
var data string // test data for write tests var data string // test data for write tests
var bytes []byte // test data; same as data but as a slice. var bytes []byte // test data; same as data but as a slice.
func init() { func init() {
bytes = make([]byte, N) bytes = make([]byte, N)
for i := 0; i < N; i++ { for i := 0; i < N; i++ {

View File

@ -289,8 +289,7 @@ func bmIndexByte(b *testing.B, index func([]byte, byte) int, n int) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
j := index(buf, 'x') j := index(buf, 'x')
if j != n-1 { if j != n-1 {
println("bad index", j) b.Fatal("bad index", j)
panic("bad index")
} }
} }
buf[n-1] = '\x00' buf[n-1] = '\x00'
@ -317,7 +316,7 @@ func bmEqual(b *testing.B, equal func([]byte, []byte) bool, n int) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
eq := equal(buf1, buf2) eq := equal(buf1, buf2)
if !eq { if !eq {
panic("bad equal") b.Fatal("bad equal")
} }
} }
buf1[n-1] = '\x00' buf1[n-1] = '\x00'
@ -339,8 +338,7 @@ func bmIndex(b *testing.B, index func([]byte, []byte) int, n int) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
j := index(buf, buf[n-7:]) j := index(buf, buf[n-7:])
if j != n-7 { if j != n-7 {
println("bad index", j) b.Fatal("bad index", j)
panic("bad index")
} }
} }
buf[n-1] = '\x00' buf[n-1] = '\x00'
@ -362,8 +360,7 @@ func bmIndexEasy(b *testing.B, index func([]byte, []byte) int, n int) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
j := index(buf, buf[n-7:]) j := index(buf, buf[n-7:])
if j != n-7 { if j != n-7 {
println("bad index", j) b.Fatal("bad index", j)
panic("bad index")
} }
} }
buf[n-1] = '\x00' buf[n-1] = '\x00'
@ -385,8 +382,7 @@ func bmCount(b *testing.B, count func([]byte, []byte) int, n int) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
j := count(buf, buf[n-7:]) j := count(buf, buf[n-7:])
if j != 1 { if j != 1 {
println("bad count", j) b.Fatal("bad count", j)
panic("bad count")
} }
} }
buf[n-1] = '\x00' buf[n-1] = '\x00'
@ -408,8 +404,7 @@ func bmCountEasy(b *testing.B, count func([]byte, []byte) int, n int) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
j := count(buf, buf[n-7:]) j := count(buf, buf[n-7:])
if j != 1 { if j != 1 {
println("bad count", j) b.Fatal("bad count", j)
panic("bad count")
} }
} }
buf[n-1] = '\x00' buf[n-1] = '\x00'

View File

@ -0,0 +1,24 @@
package bytes_test
import (
. "bytes"
"encoding/base64"
"io"
"os"
)
// Hello world!
func ExampleBuffer() {
var b Buffer // A Buffer needs no initialization.
b.Write([]byte("Hello "))
b.Write([]byte("world!"))
b.WriteTo(os.Stdout)
}
// Gophers rule!
func ExampleBuffer_reader() {
// A Buffer can turn a string or a []byte into an io.Reader.
buf := NewBufferString("R29waGVycyBydWxlIQ==")
dec := base64.NewDecoder(base64.StdEncoding, buf)
io.Copy(os.Stdout, dec)
}

View File

@ -356,7 +356,7 @@ func BenchmarkEncrypt(b *testing.B) {
tt := encryptTests[0] tt := encryptTests[0]
c, err := NewCipher(tt.key) c, err := NewCipher(tt.key)
if err != nil { if err != nil {
panic("NewCipher") b.Fatal("NewCipher:", err)
} }
out := make([]byte, len(tt.in)) out := make([]byte, len(tt.in))
b.StartTimer() b.StartTimer()

View File

@ -71,3 +71,6 @@ func RegisterHash(h Hash, f func() hash.Hash) {
} }
hashes[h] = f hashes[h] = f
} }
// PrivateKey represents a private key using an unspecified algorithm.
type PrivateKey interface{}

View File

@ -16,6 +16,7 @@ import (
// PublicKeyType is the armor type for a PGP public key. // PublicKeyType is the armor type for a PGP public key.
var PublicKeyType = "PGP PUBLIC KEY BLOCK" var PublicKeyType = "PGP PUBLIC KEY BLOCK"
// PrivateKeyType is the armor type for a PGP private key. // PrivateKeyType is the armor type for a PGP private key.
var PrivateKeyType = "PGP PRIVATE KEY BLOCK" var PrivateKeyType = "PGP PRIVATE KEY BLOCK"

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build darwin freebsd linux openbsd // +build darwin freebsd linux netbsd openbsd
// Unix cryptographically secure pseudorandom number // Unix cryptographically secure pseudorandom number
// generator. // generator.

View File

@ -5,8 +5,8 @@
package tls package tls
import ( import (
"crypto"
"crypto/rand" "crypto/rand"
"crypto/rsa"
"crypto/x509" "crypto/x509"
"io" "io"
"strings" "strings"
@ -255,7 +255,7 @@ func (c *Config) BuildNameToCertificate() {
// A Certificate is a chain of one or more certificates, leaf first. // A Certificate is a chain of one or more certificates, leaf first.
type Certificate struct { type Certificate struct {
Certificate [][]byte Certificate [][]byte
PrivateKey *rsa.PrivateKey PrivateKey crypto.PrivateKey // supported types: *rsa.PrivateKey
// OCSPStaple contains an optional OCSP response which will be served // OCSPStaple contains an optional OCSP response which will be served
// to clients that request it. // to clients that request it.
OCSPStaple []byte OCSPStaple []byte

View File

@ -234,7 +234,7 @@ func (c *Conn) clientHandshake() error {
digest := make([]byte, 0, 36) digest := make([]byte, 0, 36)
digest = finishedHash.serverMD5.Sum(digest) digest = finishedHash.serverMD5.Sum(digest)
digest = finishedHash.serverSHA1.Sum(digest) digest = finishedHash.serverSHA1.Sum(digest)
signed, err := rsa.SignPKCS1v15(c.config.rand(), c.config.Certificates[0].PrivateKey, crypto.MD5SHA1, digest) signed, err := rsa.SignPKCS1v15(c.config.rand(), c.config.Certificates[0].PrivateKey.(*rsa.PrivateKey), crypto.MD5SHA1, digest)
if err != nil { if err != nil {
return c.sendAlert(alertInternalError) return c.sendAlert(alertInternalError)
} }

View File

@ -44,7 +44,7 @@ func (ka rsaKeyAgreement) processClientKeyExchange(config *Config, ckx *clientKe
ciphertext = ckx.ciphertext[2:] ciphertext = ckx.ciphertext[2:]
} }
err = rsa.DecryptPKCS1v15SessionKey(config.rand(), config.Certificates[0].PrivateKey, ciphertext, preMasterSecret) err = rsa.DecryptPKCS1v15SessionKey(config.rand(), config.Certificates[0].PrivateKey.(*rsa.PrivateKey), ciphertext, preMasterSecret)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -147,7 +147,7 @@ Curve:
copy(serverECDHParams[4:], ecdhePublic) copy(serverECDHParams[4:], ecdhePublic)
md5sha1 := md5SHA1Hash(clientHello.random, hello.random, serverECDHParams) md5sha1 := md5SHA1Hash(clientHello.random, hello.random, serverECDHParams)
sig, err := rsa.SignPKCS1v15(config.rand(), config.Certificates[0].PrivateKey, crypto.MD5SHA1, md5sha1) sig, err := rsa.SignPKCS1v15(config.rand(), config.Certificates[0].PrivateKey.(*rsa.PrivateKey), crypto.MD5SHA1, md5sha1)
if err != nil { if err != nil {
return nil, errors.New("failed to sign ECDHE parameters: " + err.Error()) return nil, errors.New("failed to sign ECDHE parameters: " + err.Error())
} }

View File

@ -8,7 +8,7 @@ package tls
// Note: We disable -Werror here because the code in this file uses a deprecated API to stay // Note: We disable -Werror here because the code in this file uses a deprecated API to stay
// compatible with both Mac OS X 10.6 and 10.7. Using a deprecated function on Darwin generates // compatible with both Mac OS X 10.6 and 10.7. Using a deprecated function on Darwin generates
// a warning. // a warning.
#cgo CFLAGS: -Wno-error #cgo CFLAGS: -Wno-error -Wno-deprecated-declarations
#cgo LDFLAGS: -framework CoreFoundation -framework Security #cgo LDFLAGS: -framework CoreFoundation -framework Security
#include <CoreFoundation/CoreFoundation.h> #include <CoreFoundation/CoreFoundation.h>
#include <Security/Security.h> #include <Security/Security.h>

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build plan9 // +build plan9 darwin/nocgo
package tls package tls

View File

@ -28,6 +28,9 @@ func NewCertPool() *CertPool {
// given certificate. If no such certificate can be found or the signature // given certificate. If no such certificate can be found or the signature
// doesn't match, it returns nil. // doesn't match, it returns nil.
func (s *CertPool) findVerifiedParents(cert *Certificate) (parents []int) { func (s *CertPool) findVerifiedParents(cert *Certificate) (parents []int) {
if s == nil {
return
}
var candidates []int var candidates []int
if len(cert.AuthorityKeyId) > 0 { if len(cert.AuthorityKeyId) > 0 {

View File

@ -19,6 +19,7 @@ type verifyTest struct {
roots []string roots []string
currentTime int64 currentTime int64
dnsName string dnsName string
nilRoots bool
errorCallback func(*testing.T, int, error) bool errorCallback func(*testing.T, int, error) bool
expectedChains [][]string expectedChains [][]string
@ -45,6 +46,14 @@ var verifyTests = []verifyTest{
errorCallback: expectHostnameError, errorCallback: expectHostnameError,
}, },
{
leaf: googleLeaf,
intermediates: []string{thawteIntermediate},
nilRoots: true, // verifies that we don't crash
currentTime: 1302726541,
dnsName: "www.google.com",
errorCallback: expectAuthorityUnknown,
},
{ {
leaf: googleLeaf, leaf: googleLeaf,
intermediates: []string{thawteIntermediate}, intermediates: []string{thawteIntermediate},
@ -136,6 +145,9 @@ func TestVerify(t *testing.T) {
DNSName: test.dnsName, DNSName: test.dnsName,
CurrentTime: time.Unix(test.currentTime, 0), CurrentTime: time.Unix(test.currentTime, 0),
} }
if test.nilRoots {
opts.Roots = nil
}
for j, root := range test.roots { for j, root := range test.roots {
ok := opts.Roots.AppendCertsFromPEM([]byte(root)) ok := opts.Roots.AppendCertsFromPEM([]byte(root))

View File

@ -981,6 +981,7 @@ func CreateCertificate(rand io.Reader, template, parent *Certificate, pub *rsa.P
// pemCRLPrefix is the magic string that indicates that we have a PEM encoded // pemCRLPrefix is the magic string that indicates that we have a PEM encoded
// CRL. // CRL.
var pemCRLPrefix = []byte("-----BEGIN X509 CRL") var pemCRLPrefix = []byte("-----BEGIN X509 CRL")
// pemType is the type of a PEM encoded CRL. // pemType is the type of a PEM encoded CRL.
var pemType = "X509 CRL" var pemType = "X509 CRL"

View File

@ -13,7 +13,8 @@ import (
func dotest() bool { func dotest() bool {
// For now, only works on ELF platforms. // For now, only works on ELF platforms.
return syscall.OS == "linux" && os.Getenv("GOARCH") == "amd64" // TODO: convert to work with new go tool
return false && syscall.OS == "linux" && os.Getenv("GOARCH") == "amd64"
} }
func getTable(t *testing.T) *Table { func getTable(t *testing.T) *Table {

View File

@ -171,11 +171,42 @@ func (br *byteSliceReader) Read(p []byte) (int, error) {
return n, nil return n, nil
} }
func BenchmarkRead(b *testing.B) { func BenchmarkReadSlice1000Int32s(b *testing.B) {
bsr := &byteSliceReader{}
slice := make([]int32, 1000)
buf := make([]byte, len(slice)*4)
b.SetBytes(int64(len(buf)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
bsr.remain = buf
Read(bsr, BigEndian, slice)
}
}
func BenchmarkReadStruct(b *testing.B) {
bsr := &byteSliceReader{}
var buf bytes.Buffer
Write(&buf, BigEndian, &s)
n := TotalSize(reflect.ValueOf(s))
b.SetBytes(int64(n))
t := s
b.ResetTimer()
for i := 0; i < b.N; i++ {
bsr.remain = buf.Bytes()
Read(bsr, BigEndian, &t)
}
b.StopTimer()
if !reflect.DeepEqual(s, t) {
b.Fatal("no match")
}
}
func BenchmarkReadInts(b *testing.B) {
var ls Struct var ls Struct
bsr := &byteSliceReader{} bsr := &byteSliceReader{}
var r io.Reader = bsr var r io.Reader = bsr
b.SetBytes(2 * (1 + 2 + 4 + 8))
b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
bsr.remain = big bsr.remain = big
Read(r, BigEndian, &ls.Int8) Read(r, BigEndian, &ls.Int8)
@ -196,25 +227,19 @@ func BenchmarkRead(b *testing.B) {
for i := range want.Array { for i := range want.Array {
want.Array[i] = 0 want.Array[i] = 0
} }
b.StopTimer()
if !reflect.DeepEqual(ls, want) { if !reflect.DeepEqual(ls, want) {
panic("no match") panic("no match")
} }
} }
func BenchmarkWrite(b *testing.B) { func BenchmarkWriteInts(b *testing.B) {
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
var w io.Writer = buf var w io.Writer = buf
b.SetBytes(2 * (1 + 2 + 4 + 8))
b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
buf.Reset() buf.Reset()
Write(w, BigEndian, &s.Int8)
Write(w, BigEndian, &s.Int16)
Write(w, BigEndian, &s.Int32)
Write(w, BigEndian, &s.Int64)
Write(w, BigEndian, &s.Uint8)
Write(w, BigEndian, &s.Uint16)
Write(w, BigEndian, &s.Uint32)
Write(w, BigEndian, &s.Uint64)
Write(w, BigEndian, s.Int8) Write(w, BigEndian, s.Int8)
Write(w, BigEndian, s.Int16) Write(w, BigEndian, s.Int16)
Write(w, BigEndian, s.Int32) Write(w, BigEndian, s.Int32)
@ -224,11 +249,8 @@ func BenchmarkWrite(b *testing.B) {
Write(w, BigEndian, s.Uint32) Write(w, BigEndian, s.Uint32)
Write(w, BigEndian, s.Uint64) Write(w, BigEndian, s.Uint64)
} }
b.StopTimer()
if !bytes.Equal(buf.Bytes()[:30], big[:30]) { if !bytes.Equal(buf.Bytes(), big[:30]) {
panic("first half doesn't match") b.Fatalf("first half doesn't match: %x %x", buf.Bytes(), big[:30])
}
if !bytes.Equal(buf.Bytes()[30:], big[:30]) {
panic("second half doesn't match")
} }
} }

View File

@ -165,6 +165,7 @@ func TestNonCanonicalZero(t *testing.T) {
func BenchmarkPutUvarint32(b *testing.B) { func BenchmarkPutUvarint32(b *testing.B) {
buf := make([]byte, MaxVarintLen32) buf := make([]byte, MaxVarintLen32)
b.SetBytes(4)
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for j := uint(0); j < MaxVarintLen32; j++ { for j := uint(0); j < MaxVarintLen32; j++ {
PutUvarint(buf, 1<<(j*7)) PutUvarint(buf, 1<<(j*7))
@ -174,6 +175,7 @@ func BenchmarkPutUvarint32(b *testing.B) {
func BenchmarkPutUvarint64(b *testing.B) { func BenchmarkPutUvarint64(b *testing.B) {
buf := make([]byte, MaxVarintLen64) buf := make([]byte, MaxVarintLen64)
b.SetBytes(8)
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for j := uint(0); j < MaxVarintLen64; j++ { for j := uint(0); j < MaxVarintLen64; j++ {
PutUvarint(buf, 1<<(j*7)) PutUvarint(buf, 1<<(j*7))

View File

@ -102,12 +102,15 @@ func TestIntCodec(t *testing.T) {
// The result of encoding a true boolean with field number 7 // The result of encoding a true boolean with field number 7
var boolResult = []byte{0x07, 0x01} var boolResult = []byte{0x07, 0x01}
// The result of encoding a number 17 with field number 7 // The result of encoding a number 17 with field number 7
var signedResult = []byte{0x07, 2 * 17} var signedResult = []byte{0x07, 2 * 17}
var unsignedResult = []byte{0x07, 17} var unsignedResult = []byte{0x07, 17}
var floatResult = []byte{0x07, 0xFE, 0x31, 0x40} var floatResult = []byte{0x07, 0xFE, 0x31, 0x40}
// The result of encoding a number 17+19i with field number 7 // The result of encoding a number 17+19i with field number 7
var complexResult = []byte{0x07, 0xFE, 0x31, 0x40, 0xFE, 0x33, 0x40} var complexResult = []byte{0x07, 0xFE, 0x31, 0x40, 0xFE, 0x33, 0x40}
// The result of encoding "hello" with field number 7 // The result of encoding "hello" with field number 7
var bytesResult = []byte{0x07, 0x05, 'h', 'e', 'l', 'l', 'o'} var bytesResult = []byte{0x07, 0x05, 'h', 'e', 'l', 'l', 'o'}

View File

@ -469,7 +469,14 @@ func (enc *Encoder) encodeInterface(b *bytes.Buffer, iv reflect.Value) {
// isZero returns whether the value is the zero of its type. // isZero returns whether the value is the zero of its type.
func isZero(val reflect.Value) bool { func isZero(val reflect.Value) bool {
switch val.Kind() { switch val.Kind() {
case reflect.Array, reflect.Map, reflect.Slice, reflect.String: case reflect.Array:
for i := 0; i < val.Len(); i++ {
if !isZero(val.Index(i)) {
return false
}
}
return true
case reflect.Map, reflect.Slice, reflect.String:
return val.Len() == 0 return val.Len() == 0
case reflect.Bool: case reflect.Bool:
return !val.Bool() return !val.Bool()
@ -483,6 +490,13 @@ func isZero(val reflect.Value) bool {
return val.Float() == 0 return val.Float() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return val.Uint() == 0 return val.Uint() == 0
case reflect.Struct:
for i := 0; i < val.NumField(); i++ {
if !isZero(val.Field(i)) {
return false
}
}
return true
} }
panic("unknown type in isZero " + val.Type().String()) panic("unknown type in isZero " + val.Type().String())
} }

View File

@ -13,6 +13,7 @@ import (
"io" "io"
"strings" "strings"
"testing" "testing"
"time"
) )
// Types that implement the GobEncoder/Decoder interfaces. // Types that implement the GobEncoder/Decoder interfaces.
@ -526,3 +527,50 @@ func TestGobEncoderExtraIndirect(t *testing.T) {
t.Errorf("got = %q, want %q", got, gdb) t.Errorf("got = %q, want %q", got, gdb)
} }
} }
// Another bug: this caused a crash with the new Go1 Time type.
// We throw in a gob-encoding array, to test another case of isZero
type isZeroBug struct {
T time.Time
S string
I int
A isZeroBugArray
}
type isZeroBugArray [2]uint8
// Receiver is value, not pointer, to test isZero of array.
func (a isZeroBugArray) GobEncode() (b []byte, e error) {
b = append(b, a[:]...)
return b, nil
}
func (a *isZeroBugArray) GobDecode(data []byte) error {
println("DECODE")
if len(data) != len(a) {
return io.EOF
}
a[0] = data[0]
a[1] = data[1]
return nil
}
func TestGobEncodeIsZero(t *testing.T) {
x := isZeroBug{time.Now(), "hello", -55, isZeroBugArray{1, 2}}
b := new(bytes.Buffer)
enc := NewEncoder(b)
err := enc.Encode(x)
if err != nil {
t.Fatal("encode:", err)
}
var y isZeroBug
dec := NewDecoder(b)
err = dec.Decode(&y)
if err != nil {
t.Fatal("decode:", err)
}
if x != y {
t.Fatalf("%v != %v", x, y)
}
}

View File

@ -39,7 +39,7 @@ func benchmarkEndToEnd(r io.Reader, w io.Writer, b *testing.B) {
func BenchmarkEndToEndPipe(b *testing.B) { func BenchmarkEndToEndPipe(b *testing.B) {
r, w, err := os.Pipe() r, w, err := os.Pipe()
if err != nil { if err != nil {
panic("can't get pipe:" + err.Error()) b.Fatal("can't get pipe:", err)
} }
benchmarkEndToEnd(r, w, b) benchmarkEndToEnd(r, w, b)
} }

View File

@ -130,6 +130,7 @@ func userType(rt reflect.Type) *userTypeInfo {
} }
return ut return ut
} }
// A typeId represents a gob Type as an integer that can be passed on the wire. // A typeId represents a gob Type as an integer that can be passed on the wire.
// Internally, typeIds are used as keys to a map to recover the underlying type info. // Internally, typeIds are used as keys to a map to recover the underlying type info.
type typeId int32 type typeId int32

View File

@ -84,7 +84,7 @@ func BenchmarkCodeEncoder(b *testing.B) {
enc := NewEncoder(ioutil.Discard) enc := NewEncoder(ioutil.Discard)
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
if err := enc.Encode(&codeStruct); err != nil { if err := enc.Encode(&codeStruct); err != nil {
panic(err) b.Fatal("Encode:", err)
} }
} }
b.SetBytes(int64(len(codeJSON))) b.SetBytes(int64(len(codeJSON)))
@ -98,7 +98,7 @@ func BenchmarkCodeMarshal(b *testing.B) {
} }
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
if _, err := Marshal(&codeStruct); err != nil { if _, err := Marshal(&codeStruct); err != nil {
panic(err) b.Fatal("Marshal:", err)
} }
} }
b.SetBytes(int64(len(codeJSON))) b.SetBytes(int64(len(codeJSON)))
@ -120,7 +120,7 @@ func BenchmarkCodeDecoder(b *testing.B) {
buf.WriteByte('\n') buf.WriteByte('\n')
buf.WriteByte('\n') buf.WriteByte('\n')
if err := dec.Decode(&r); err != nil { if err := dec.Decode(&r); err != nil {
panic(err) b.Fatal("Decode:", err)
} }
} }
b.SetBytes(int64(len(codeJSON))) b.SetBytes(int64(len(codeJSON)))
@ -135,7 +135,7 @@ func BenchmarkCodeUnmarshal(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
var r codeResponse var r codeResponse
if err := Unmarshal(codeJSON, &r); err != nil { if err := Unmarshal(codeJSON, &r); err != nil {
panic(err) b.Fatal("Unmmarshal:", err)
} }
} }
b.SetBytes(int64(len(codeJSON))) b.SetBytes(int64(len(codeJSON)))
@ -150,7 +150,7 @@ func BenchmarkCodeUnmarshalReuse(b *testing.B) {
var r codeResponse var r codeResponse
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
if err := Unmarshal(codeJSON, &r); err != nil { if err := Unmarshal(codeJSON, &r); err != nil {
panic(err) b.Fatal("Unmmarshal:", err)
} }
} }
b.SetBytes(int64(len(codeJSON))) b.SetBytes(int64(len(codeJSON)))

View File

@ -228,7 +228,9 @@ func (d *decodeState) value(v reflect.Value) {
// Feed in an empty string - the shortest, simplest value - // Feed in an empty string - the shortest, simplest value -
// so that it knows we got to the end of the value. // so that it knows we got to the end of the value.
if d.scan.redo { if d.scan.redo {
panic("redo") // rewind.
d.scan.redo = false
d.scan.step = stateBeginValue
} }
d.scan.step(&d.scan, '"') d.scan.step(&d.scan, '"')
d.scan.step(&d.scan, '"') d.scan.step(&d.scan, '"')
@ -317,25 +319,22 @@ func (d *decodeState) array(v reflect.Value) {
} }
v = pv v = pv
// Decoding into nil interface? Switch to non-reflect code.
iv := v
ok := iv.Kind() == reflect.Interface
if ok {
iv.Set(reflect.ValueOf(d.arrayInterface()))
return
}
// Check type of target. // Check type of target.
av := v switch v.Kind() {
if av.Kind() != reflect.Array && av.Kind() != reflect.Slice { default:
d.saveError(&UnmarshalTypeError{"array", v.Type()}) d.saveError(&UnmarshalTypeError{"array", v.Type()})
d.off-- d.off--
d.next() d.next()
return return
case reflect.Interface:
// Decoding into nil interface? Switch to non-reflect code.
v.Set(reflect.ValueOf(d.arrayInterface()))
return
case reflect.Array:
case reflect.Slice:
break
} }
sv := v
i := 0 i := 0
for { for {
// Look ahead for ] - can only happen on first iteration. // Look ahead for ] - can only happen on first iteration.
@ -349,23 +348,25 @@ func (d *decodeState) array(v reflect.Value) {
d.scan.undo(op) d.scan.undo(op)
// Get element of array, growing if necessary. // Get element of array, growing if necessary.
if i >= av.Cap() && sv.IsValid() { if v.Kind() == reflect.Slice {
newcap := sv.Cap() + sv.Cap()/2 // Grow slice if necessary
if newcap < 4 { if i >= v.Cap() {
newcap = 4 newcap := v.Cap() + v.Cap()/2
if newcap < 4 {
newcap = 4
}
newv := reflect.MakeSlice(v.Type(), v.Len(), newcap)
reflect.Copy(newv, v)
v.Set(newv)
}
if i >= v.Len() {
v.SetLen(i + 1)
} }
newv := reflect.MakeSlice(sv.Type(), sv.Len(), newcap)
reflect.Copy(newv, sv)
sv.Set(newv)
}
if i >= av.Len() && sv.IsValid() {
// Must be slice; gave up on array during i >= av.Cap().
sv.SetLen(i + 1)
} }
// Decode into element. if i < v.Len() {
if i < av.Len() { // Decode into element.
d.value(av.Index(i)) d.value(v.Index(i))
} else { } else {
// Ran out of fixed array: skip. // Ran out of fixed array: skip.
d.value(reflect.Value{}) d.value(reflect.Value{})
@ -382,19 +383,19 @@ func (d *decodeState) array(v reflect.Value) {
} }
} }
if i < av.Len() { if i < v.Len() {
if !sv.IsValid() { if v.Kind() == reflect.Array {
// Array. Zero the rest. // Array. Zero the rest.
z := reflect.Zero(av.Type().Elem()) z := reflect.Zero(v.Type().Elem())
for ; i < av.Len(); i++ { for ; i < v.Len(); i++ {
av.Index(i).Set(z) v.Index(i).Set(z)
} }
} else { } else {
sv.SetLen(i) v.SetLen(i)
} }
} }
if i == 0 && av.Kind() == reflect.Slice && sv.IsNil() { if i == 0 && v.Kind() == reflect.Slice {
sv.Set(reflect.MakeSlice(sv.Type(), 0, 0)) v.Set(reflect.MakeSlice(v.Type(), 0, 0))
} }
} }

View File

@ -6,6 +6,7 @@ package json
import ( import (
"bytes" "bytes"
"fmt"
"reflect" "reflect"
"strings" "strings"
"testing" "testing"
@ -73,6 +74,12 @@ var unmarshalTests = []unmarshalTest{
// syntax errors // syntax errors
{`{"X": "foo", "Y"}`, nil, nil, &SyntaxError{"invalid character '}' after object key", 17}}, {`{"X": "foo", "Y"}`, nil, nil, &SyntaxError{"invalid character '}' after object key", 17}},
{`[1, 2, 3+]`, nil, nil, &SyntaxError{"invalid character '+' after array element", 9}},
// array tests
{`[1, 2, 3]`, new([3]int), [3]int{1, 2, 3}, nil},
{`[1, 2, 3]`, new([1]int), [1]int{1}, nil},
{`[1, 2, 3]`, new([5]int), [5]int{1, 2, 3, 0, 0}, nil},
// composite tests // composite tests
{allValueIndent, new(All), allValue, nil}, {allValueIndent, new(All), allValue, nil},
@ -242,6 +249,38 @@ func TestHTMLEscape(t *testing.T) {
} }
} }
// WrongString is a struct that's misusing the ,string modifier.
type WrongString struct {
Message string `json:"result,string"`
}
type wrongStringTest struct {
in, err string
}
// TODO(bradfitz): as part of Issue 2331, fix these tests' expected
// error values to be helpful, rather than the confusing messages they
// are now.
var wrongStringTests = []wrongStringTest{
{`{"result":"x"}`, "JSON decoder out of sync - data changing underfoot?"},
{`{"result":"foo"}`, "json: cannot unmarshal bool into Go value of type string"},
{`{"result":"123"}`, "json: cannot unmarshal number into Go value of type string"},
}
// If people misuse the ,string modifier, the error message should be
// helpful, telling the user that they're doing it wrong.
func TestErrorMessageFromMisusedString(t *testing.T) {
for n, tt := range wrongStringTests {
r := strings.NewReader(tt.in)
var s WrongString
err := NewDecoder(r).Decode(&s)
got := fmt.Sprintf("%v", err)
if got != tt.err {
t.Errorf("%d. got err = %q, want %q", n, got, tt.err)
}
}
}
func noSpace(c rune) rune { func noSpace(c rune) rune {
if isSpace(c) { if isSpace(c) {
return -1 return -1

View File

@ -197,6 +197,7 @@ var hex = "0123456789abcdef"
// An encodeState encodes JSON into a bytes.Buffer. // An encodeState encodes JSON into a bytes.Buffer.
type encodeState struct { type encodeState struct {
bytes.Buffer // accumulated output bytes.Buffer // accumulated output
scratch [64]byte
} }
func (e *encodeState) marshal(v interface{}) (err error) { func (e *encodeState) marshal(v interface{}) (err error) {
@ -275,14 +276,26 @@ func (e *encodeState) reflectValueQuoted(v reflect.Value, quoted bool) {
} }
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
writeString(e, strconv.FormatInt(v.Int(), 10)) b := strconv.AppendInt(e.scratch[:0], v.Int(), 10)
if quoted {
writeString(e, string(b))
} else {
e.Write(b)
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
writeString(e, strconv.FormatUint(v.Uint(), 10)) b := strconv.AppendUint(e.scratch[:0], v.Uint(), 10)
if quoted {
writeString(e, string(b))
} else {
e.Write(b)
}
case reflect.Float32, reflect.Float64: case reflect.Float32, reflect.Float64:
writeString(e, strconv.FormatFloat(v.Float(), 'g', -1, v.Type().Bits())) b := strconv.AppendFloat(e.scratch[:0], v.Float(), 'g', -1, v.Type().Bits())
if quoted {
writeString(e, string(b))
} else {
e.Write(b)
}
case reflect.String: case reflect.String:
if quoted { if quoted {
sb, err := Marshal(v.String()) sb, err := Marshal(v.String())

View File

@ -394,7 +394,7 @@ func TestUnmarshal(t *testing.T) {
if err != nil { if err != nil {
t.Errorf("#%d: unexpected error: %#v", i, err) t.Errorf("#%d: unexpected error: %#v", i, err)
} else if got, want := dest, test.Value; !reflect.DeepEqual(got, want) { } else if got, want := dest, test.Value; !reflect.DeepEqual(got, want) {
t.Errorf("#%d: unmarshal(%#s) = %#v, want %#v", i, test.ExpectXML, got, want) t.Errorf("#%d: unmarshal(%q) = %#v, want %#v", i, test.ExpectXML, got, want)
} }
} }
} }

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build linux
package inotify package inotify
import ( import (
@ -17,6 +19,9 @@ func TestInotifyEvents(t *testing.T) {
t.Fatalf("NewWatcher() failed: %s", err) t.Fatalf("NewWatcher() failed: %s", err)
} }
t.Logf("NEEDS TO BE CONVERTED TO NEW GO TOOL") // TODO
return
// Add a watch for "_test" // Add a watch for "_test"
err = watcher.Watch("_test") err = watcher.Watch("_test")
if err != nil { if err != nil {

View File

@ -22,10 +22,10 @@ var drivers = make(map[string]driver.Driver)
// it panics. // it panics.
func Register(name string, driver driver.Driver) { func Register(name string, driver driver.Driver) {
if driver == nil { if driver == nil {
panic("db: Register driver is nil") panic("sql: Register driver is nil")
} }
if _, dup := drivers[name]; dup { if _, dup := drivers[name]; dup {
panic("db: Register called twice for driver " + name) panic("sql: Register called twice for driver " + name)
} }
drivers[name] = driver drivers[name] = driver
} }
@ -80,7 +80,7 @@ type ScannerInto interface {
// ErrNoRows is returned by Scan when QueryRow doesn't return a // ErrNoRows is returned by Scan when QueryRow doesn't return a
// row. In such a case, QueryRow returns a placeholder *Row value that // row. In such a case, QueryRow returns a placeholder *Row value that
// defers this error until a Scan. // defers this error until a Scan.
var ErrNoRows = errors.New("db: no rows in result set") var ErrNoRows = errors.New("sql: no rows in result set")
// DB is a database handle. It's safe for concurrent use by multiple // DB is a database handle. It's safe for concurrent use by multiple
// goroutines. // goroutines.
@ -102,7 +102,7 @@ type DB struct {
func Open(driverName, dataSourceName string) (*DB, error) { func Open(driverName, dataSourceName string) (*DB, error) {
driver, ok := drivers[driverName] driver, ok := drivers[driverName]
if !ok { if !ok {
return nil, fmt.Errorf("db: unknown driver %q (forgotten import?)", driverName) return nil, fmt.Errorf("sql: unknown driver %q (forgotten import?)", driverName)
} }
return &DB{driver: driver, dsn: dataSourceName}, nil return &DB{driver: driver, dsn: dataSourceName}, nil
} }
@ -514,7 +514,7 @@ func (s *Stmt) Exec(args ...interface{}) (Result, error) {
// placeholders, so we won't sanity check input here and instead let the // placeholders, so we won't sanity check input here and instead let the
// driver deal with errors. // driver deal with errors.
if want := si.NumInput(); want != -1 && len(args) != want { if want := si.NumInput(); want != -1 && len(args) != want {
return nil, fmt.Errorf("db: expected %d arguments, got %d", want, len(args)) return nil, fmt.Errorf("sql: expected %d arguments, got %d", want, len(args))
} }
// Convert args to subset types. // Convert args to subset types.
@ -522,10 +522,10 @@ func (s *Stmt) Exec(args ...interface{}) (Result, error) {
for n, arg := range args { for n, arg := range args {
args[n], err = cc.ColumnConverter(n).ConvertValue(arg) args[n], err = cc.ColumnConverter(n).ConvertValue(arg)
if err != nil { if err != nil {
return nil, fmt.Errorf("db: converting Exec argument #%d's type: %v", n, err) return nil, fmt.Errorf("sql: converting Exec argument #%d's type: %v", n, err)
} }
if !driver.IsParameterSubsetType(args[n]) { if !driver.IsParameterSubsetType(args[n]) {
return nil, fmt.Errorf("db: driver ColumnConverter error converted %T to unsupported type %T", return nil, fmt.Errorf("sql: driver ColumnConverter error converted %T to unsupported type %T",
arg, args[n]) arg, args[n])
} }
} }
@ -533,7 +533,7 @@ func (s *Stmt) Exec(args ...interface{}) (Result, error) {
for n, arg := range args { for n, arg := range args {
args[n], err = driver.DefaultParameterConverter.ConvertValue(arg) args[n], err = driver.DefaultParameterConverter.ConvertValue(arg)
if err != nil { if err != nil {
return nil, fmt.Errorf("db: converting Exec argument #%d's type: %v", n, err) return nil, fmt.Errorf("sql: converting Exec argument #%d's type: %v", n, err)
} }
} }
} }
@ -555,7 +555,7 @@ func (s *Stmt) connStmt() (ci driver.Conn, releaseConn func(), si driver.Stmt, e
s.mu.Lock() s.mu.Lock()
if s.closed { if s.closed {
s.mu.Unlock() s.mu.Unlock()
err = errors.New("db: statement is closed") err = errors.New("sql: statement is closed")
return return
} }
@ -617,7 +617,7 @@ func (s *Stmt) Query(args ...interface{}) (*Rows, error) {
// placeholders, so we won't sanity check input here and instead let the // placeholders, so we won't sanity check input here and instead let the
// driver deal with errors. // driver deal with errors.
if want := si.NumInput(); want != -1 && len(args) != want { if want := si.NumInput(); want != -1 && len(args) != want {
return nil, fmt.Errorf("db: statement expects %d inputs; got %d", si.NumInput(), len(args)) return nil, fmt.Errorf("sql: statement expects %d inputs; got %d", si.NumInput(), len(args))
} }
sargs, err := subsetTypeArgs(args) sargs, err := subsetTypeArgs(args)
if err != nil { if err != nil {
@ -737,27 +737,40 @@ func (rs *Rows) Err() error {
return rs.lasterr return rs.lasterr
} }
// Columns returns the column names.
// Columns returns an error if the rows are closed, or if the rows
// are from QueryRow and there was a deferred error.
func (rs *Rows) Columns() ([]string, error) {
if rs.closed {
return nil, errors.New("sql: Rows are closed")
}
if rs.rowsi == nil {
return nil, errors.New("sql: no Rows available")
}
return rs.rowsi.Columns(), nil
}
// Scan copies the columns in the current row into the values pointed // Scan copies the columns in the current row into the values pointed
// at by dest. If dest contains pointers to []byte, the slices should // at by dest. If dest contains pointers to []byte, the slices should
// not be modified and should only be considered valid until the next // not be modified and should only be considered valid until the next
// call to Next or Scan. // call to Next or Scan.
func (rs *Rows) Scan(dest ...interface{}) error { func (rs *Rows) Scan(dest ...interface{}) error {
if rs.closed { if rs.closed {
return errors.New("db: Rows closed") return errors.New("sql: Rows closed")
} }
if rs.lasterr != nil { if rs.lasterr != nil {
return rs.lasterr return rs.lasterr
} }
if rs.lastcols == nil { if rs.lastcols == nil {
return errors.New("db: Scan called without calling Next") return errors.New("sql: Scan called without calling Next")
} }
if len(dest) != len(rs.lastcols) { if len(dest) != len(rs.lastcols) {
return fmt.Errorf("db: expected %d destination arguments in Scan, not %d", len(rs.lastcols), len(dest)) return fmt.Errorf("sql: expected %d destination arguments in Scan, not %d", len(rs.lastcols), len(dest))
} }
for i, sv := range rs.lastcols { for i, sv := range rs.lastcols {
err := convertAssign(dest[i], sv) err := convertAssign(dest[i], sv)
if err != nil { if err != nil {
return fmt.Errorf("db: Scan error on column index %d: %v", i, err) return fmt.Errorf("sql: Scan error on column index %d: %v", i, err)
} }
} }
return nil return nil

View File

@ -75,6 +75,23 @@ func TestQuery(t *testing.T) {
} }
} }
func TestRowsColumns(t *testing.T) {
db := newTestDB(t, "people")
defer closeDB(t, db)
rows, err := db.Query("SELECT|people|age,name|")
if err != nil {
t.Fatalf("Query: %v", err)
}
cols, err := rows.Columns()
if err != nil {
t.Fatalf("Columns: %v", err)
}
want := []string{"age", "name"}
if !reflect.DeepEqual(cols, want) {
t.Errorf("got %#v; want %#v", cols, want)
}
}
func TestQueryRow(t *testing.T) { func TestQueryRow(t *testing.T) {
db := newTestDB(t, "people") db := newTestDB(t, "people")
defer closeDB(t, db) defer closeDB(t, db)
@ -187,12 +204,12 @@ func TestExec(t *testing.T) {
{[]interface{}{7, 9}, ""}, {[]interface{}{7, 9}, ""},
// Invalid conversions: // Invalid conversions:
{[]interface{}{"Brad", int64(0xFFFFFFFF)}, "db: converting Exec argument #1's type: sql/driver: value 4294967295 overflows int32"}, {[]interface{}{"Brad", int64(0xFFFFFFFF)}, "sql: converting Exec argument #1's type: sql/driver: value 4294967295 overflows int32"},
{[]interface{}{"Brad", "strconv fail"}, "db: converting Exec argument #1's type: sql/driver: value \"strconv fail\" can't be converted to int32"}, {[]interface{}{"Brad", "strconv fail"}, "sql: converting Exec argument #1's type: sql/driver: value \"strconv fail\" can't be converted to int32"},
// Wrong number of args: // Wrong number of args:
{[]interface{}{}, "db: expected 2 arguments, got 0"}, {[]interface{}{}, "sql: expected 2 arguments, got 0"},
{[]interface{}{1, 2, 3}, "db: expected 2 arguments, got 3"}, {[]interface{}{1, 2, 3}, "sql: expected 2 arguments, got 3"},
} }
for n, et := range execTests { for n, et := range execTests {
_, err := stmt.Exec(et.args...) _, err := stmt.Exec(et.args...)

View File

@ -283,8 +283,8 @@ func (p *publickeyAuth) method() string {
return "publickey" return "publickey"
} }
// ClientAuthPublickey returns a ClientAuth using public key authentication. // ClientAuthKeyring returns a ClientAuth using public key authentication.
func ClientAuthPublickey(impl ClientKeyring) ClientAuth { func ClientAuthKeyring(impl ClientKeyring) ClientAuth {
return &publickeyAuth{impl} return &publickeyAuth{impl}
} }

View File

@ -122,7 +122,7 @@ var (
PasswordCallback: func(user, pass string) bool { PasswordCallback: func(user, pass string) bool {
return user == "testuser" && pass == string(clientPassword) return user == "testuser" && pass == string(clientPassword)
}, },
PubKeyCallback: func(user, algo string, pubkey []byte) bool { PublicKeyCallback: func(user, algo string, pubkey []byte) bool {
key := clientKeychain.keys[0].(*rsa.PrivateKey).PublicKey key := clientKeychain.keys[0].(*rsa.PrivateKey).PublicKey
expected := []byte(serializePublickey(key)) expected := []byte(serializePublickey(key))
algoname := algoName(key) algoname := algoName(key)
@ -179,7 +179,7 @@ func TestClientAuthPublickey(t *testing.T) {
config := &ClientConfig{ config := &ClientConfig{
User: "testuser", User: "testuser",
Auth: []ClientAuth{ Auth: []ClientAuth{
ClientAuthPublickey(clientKeychain), ClientAuthKeyring(clientKeychain),
}, },
} }
c, err := Dial("tcp", newMockAuthServer(t), config) c, err := Dial("tcp", newMockAuthServer(t), config)
@ -210,7 +210,7 @@ func TestClientAuthWrongPassword(t *testing.T) {
User: "testuser", User: "testuser",
Auth: []ClientAuth{ Auth: []ClientAuth{
ClientAuthPassword(wrongPw), ClientAuthPassword(wrongPw),
ClientAuthPublickey(clientKeychain), ClientAuthKeyring(clientKeychain),
}, },
} }
@ -228,7 +228,7 @@ func TestClientAuthInvalidPublickey(t *testing.T) {
config := &ClientConfig{ config := &ClientConfig{
User: "testuser", User: "testuser",
Auth: []ClientAuth{ Auth: []ClientAuth{
ClientAuthPublickey(kc), ClientAuthKeyring(kc),
}, },
} }
@ -246,7 +246,7 @@ func TestClientAuthRSAandDSA(t *testing.T) {
config := &ClientConfig{ config := &ClientConfig{
User: "testuser", User: "testuser",
Auth: []ClientAuth{ Auth: []ClientAuth{
ClientAuthPublickey(kc), ClientAuthKeyring(kc),
}, },
} }
c, err := Dial("tcp", newMockAuthServer(t), config) c, err := Dial("tcp", newMockAuthServer(t), config)

View File

@ -50,7 +50,7 @@ func TestFuncPublickeyAuth(t *testing.T) {
config := &ClientConfig{ config := &ClientConfig{
User: *sshuser, User: *sshuser,
Auth: []ClientAuth{ Auth: []ClientAuth{
ClientAuthPublickey(kc), ClientAuthKeyring(kc),
}, },
} }
conn, err := Dial("tcp", "localhost:22", config) conn, err := Dial("tcp", "localhost:22", config)

View File

@ -36,10 +36,10 @@ type ServerConfig struct {
// several goroutines. // several goroutines.
PasswordCallback func(user, password string) bool PasswordCallback func(user, password string) bool
// PubKeyCallback, if non-nil, is called when a client attempts public // PublicKeyCallback, if non-nil, is called when a client attempts public
// key authentication. It must return true iff the given public key is // key authentication. It must return true iff the given public key is
// valid for the given user. // valid for the given user.
PubKeyCallback func(user, algo string, pubkey []byte) bool PublicKeyCallback func(user, algo string, pubkey []byte) bool
// Cryptographic-related configuration. // Cryptographic-related configuration.
Crypto CryptoConfig Crypto CryptoConfig
@ -359,7 +359,7 @@ func isAcceptableAlgo(algo string) bool {
// testPubKey returns true if the given public key is acceptable for the user. // testPubKey returns true if the given public key is acceptable for the user.
func (s *ServerConn) testPubKey(user, algo string, pubKey []byte) bool { func (s *ServerConn) testPubKey(user, algo string, pubKey []byte) bool {
if s.config.PubKeyCallback == nil || !isAcceptableAlgo(algo) { if s.config.PublicKeyCallback == nil || !isAcceptableAlgo(algo) {
return false return false
} }
@ -369,7 +369,7 @@ func (s *ServerConn) testPubKey(user, algo string, pubKey []byte) bool {
} }
} }
result := s.config.PubKeyCallback(user, algo, pubKey) result := s.config.PublicKeyCallback(user, algo, pubKey)
if len(s.cachedPubKeys) < maxCachedPubKeys { if len(s.cachedPubKeys) < maxCachedPubKeys {
c := cachedPubKey{ c := cachedPubKey{
user: user, user: user,
@ -425,7 +425,7 @@ userAuthLoop:
break userAuthLoop break userAuthLoop
} }
case "publickey": case "publickey":
if s.config.PubKeyCallback == nil { if s.config.PublicKeyCallback == nil {
break break
} }
payload := userAuthReq.Payload payload := userAuthReq.Payload
@ -499,7 +499,7 @@ userAuthLoop:
if s.config.PasswordCallback != nil { if s.config.PasswordCallback != nil {
failureMsg.Methods = append(failureMsg.Methods, "password") failureMsg.Methods = append(failureMsg.Methods, "password")
} }
if s.config.PubKeyCallback != nil { if s.config.PublicKeyCallback != nil {
failureMsg.Methods = append(failureMsg.Methods, "publickey") failureMsg.Methods = append(failureMsg.Methods, "publickey")
} }

View File

@ -68,10 +68,12 @@ type Session struct {
*clientChan // the channel backing this session *clientChan // the channel backing this session
started bool // true once Start, Run or Shell is invoked. started bool // true once Start, Run or Shell is invoked.
closeAfterWait []io.Closer copyFuncs []func() error
copyFuncs []func() error errch chan error // one send per copyFunc
errch chan error // one send per copyFunc
// true if pipe method is active
stdinpipe, stdoutpipe, stderrpipe bool
} }
// RFC 4254 Section 6.4. // RFC 4254 Section 6.4.
@ -237,11 +239,9 @@ func (s *Session) waitForResponse() error {
func (s *Session) start() error { func (s *Session) start() error {
s.started = true s.started = true
type F func(*Session) error type F func(*Session)
for _, setupFd := range []F{(*Session).stdin, (*Session).stdout, (*Session).stderr} { for _, setupFd := range []F{(*Session).stdin, (*Session).stdout, (*Session).stderr} {
if err := setupFd(s); err != nil { setupFd(s)
return err
}
} }
s.errch = make(chan error, len(s.copyFuncs)) s.errch = make(chan error, len(s.copyFuncs))
@ -274,9 +274,6 @@ func (s *Session) Wait() error {
copyError = err copyError = err
} }
} }
for _, fd := range s.closeAfterWait {
fd.Close()
}
if waitErr != nil { if waitErr != nil {
return waitErr return waitErr
} }
@ -341,7 +338,10 @@ func (s *Session) wait() error {
return &ExitError{wm} return &ExitError{wm}
} }
func (s *Session) stdin() error { func (s *Session) stdin() {
if s.stdinpipe {
return
}
if s.Stdin == nil { if s.Stdin == nil {
s.Stdin = new(bytes.Buffer) s.Stdin = new(bytes.Buffer)
} }
@ -352,10 +352,12 @@ func (s *Session) stdin() error {
} }
return err return err
}) })
return nil
} }
func (s *Session) stdout() error { func (s *Session) stdout() {
if s.stdoutpipe {
return
}
if s.Stdout == nil { if s.Stdout == nil {
s.Stdout = ioutil.Discard s.Stdout = ioutil.Discard
} }
@ -363,10 +365,12 @@ func (s *Session) stdout() error {
_, err := io.Copy(s.Stdout, s.clientChan.stdout) _, err := io.Copy(s.Stdout, s.clientChan.stdout)
return err return err
}) })
return nil
} }
func (s *Session) stderr() error { func (s *Session) stderr() {
if s.stderrpipe {
return
}
if s.Stderr == nil { if s.Stderr == nil {
s.Stderr = ioutil.Discard s.Stderr = ioutil.Discard
} }
@ -374,7 +378,6 @@ func (s *Session) stderr() error {
_, err := io.Copy(s.Stderr, s.clientChan.stderr) _, err := io.Copy(s.Stderr, s.clientChan.stderr)
return err return err
}) })
return nil
} }
// StdinPipe returns a pipe that will be connected to the // StdinPipe returns a pipe that will be connected to the
@ -386,10 +389,8 @@ func (s *Session) StdinPipe() (io.WriteCloser, error) {
if s.started { if s.started {
return nil, errors.New("ssh: StdinPipe after process started") return nil, errors.New("ssh: StdinPipe after process started")
} }
pr, pw := io.Pipe() s.stdinpipe = true
s.Stdin = pr return s.clientChan.stdin, nil
s.closeAfterWait = append(s.closeAfterWait, pr)
return pw, nil
} }
// StdoutPipe returns a pipe that will be connected to the // StdoutPipe returns a pipe that will be connected to the
@ -398,17 +399,15 @@ func (s *Session) StdinPipe() (io.WriteCloser, error) {
// stdout and stderr streams. If the StdoutPipe reader is // stdout and stderr streams. If the StdoutPipe reader is
// not serviced fast enought it may eventually cause the // not serviced fast enought it may eventually cause the
// remote command to block. // remote command to block.
func (s *Session) StdoutPipe() (io.ReadCloser, error) { func (s *Session) StdoutPipe() (io.Reader, error) {
if s.Stdout != nil { if s.Stdout != nil {
return nil, errors.New("ssh: Stdout already set") return nil, errors.New("ssh: Stdout already set")
} }
if s.started { if s.started {
return nil, errors.New("ssh: StdoutPipe after process started") return nil, errors.New("ssh: StdoutPipe after process started")
} }
pr, pw := io.Pipe() s.stdoutpipe = true
s.Stdout = pw return s.clientChan.stdout, nil
s.closeAfterWait = append(s.closeAfterWait, pw)
return pr, nil
} }
// StderrPipe returns a pipe that will be connected to the // StderrPipe returns a pipe that will be connected to the
@ -417,17 +416,15 @@ func (s *Session) StdoutPipe() (io.ReadCloser, error) {
// stdout and stderr streams. If the StderrPipe reader is // stdout and stderr streams. If the StderrPipe reader is
// not serviced fast enought it may eventually cause the // not serviced fast enought it may eventually cause the
// remote command to block. // remote command to block.
func (s *Session) StderrPipe() (io.ReadCloser, error) { func (s *Session) StderrPipe() (io.Reader, error) {
if s.Stderr != nil { if s.Stderr != nil {
return nil, errors.New("ssh: Stderr already set") return nil, errors.New("ssh: Stderr already set")
} }
if s.started { if s.started {
return nil, errors.New("ssh: StderrPipe after process started") return nil, errors.New("ssh: StderrPipe after process started")
} }
pr, pw := io.Pipe() s.stderrpipe = true
s.Stderr = pw return s.clientChan.stderr, nil
s.closeAfterWait = append(s.closeAfterWait, pw)
return pr, nil
} }
// TODO(dfc) add Output and CombinedOutput helpers // TODO(dfc) add Output and CombinedOutput helpers

View File

@ -20,7 +20,7 @@ func dial(handler serverType, t *testing.T) *ClientConn {
serverConfig.PasswordCallback = func(user, pass string) bool { serverConfig.PasswordCallback = func(user, pass string) bool {
return user == "testuser" && pass == string(pw) return user == "testuser" && pass == string(pw)
} }
serverConfig.PubKeyCallback = nil serverConfig.PublicKeyCallback = nil
l, err := Listen("tcp", "127.0.0.1:0", serverConfig) l, err := Listen("tcp", "127.0.0.1:0", serverConfig)
if err != nil { if err != nil {

View File

@ -10,6 +10,7 @@ import (
"io" "io"
"net" "net"
) )
// Dial initiates a connection to the addr from the remote host. // Dial initiates a connection to the addr from the remote host.
// addr is resolved using net.ResolveTCPAddr before connection. // addr is resolved using net.ResolveTCPAddr before connection.
// This could allow an observer to observe the DNS name of the // This could allow an observer to observe the DNS name of the

View File

@ -2,13 +2,56 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build linux
package terminal package terminal
import "io" import (
"io"
"sync"
)
// EscapeCodes contains escape sequences that can be written to the terminal in
// order to achieve different styles of text.
type EscapeCodes struct {
// Foreground colors
Black, Red, Green, Yellow, Blue, Magenta, Cyan, White []byte
// Reset all attributes
Reset []byte
}
var vt100EscapeCodes = EscapeCodes{
Black: []byte{keyEscape, '[', '3', '0', 'm'},
Red: []byte{keyEscape, '[', '3', '1', 'm'},
Green: []byte{keyEscape, '[', '3', '2', 'm'},
Yellow: []byte{keyEscape, '[', '3', '3', 'm'},
Blue: []byte{keyEscape, '[', '3', '4', 'm'},
Magenta: []byte{keyEscape, '[', '3', '5', 'm'},
Cyan: []byte{keyEscape, '[', '3', '6', 'm'},
White: []byte{keyEscape, '[', '3', '7', 'm'},
Reset: []byte{keyEscape, '[', '0', 'm'},
}
// Terminal contains the state for running a VT100 terminal that is capable of // Terminal contains the state for running a VT100 terminal that is capable of
// reading lines of input. // reading lines of input.
type Terminal struct { type Terminal struct {
// AutoCompleteCallback, if non-null, is called for each keypress
// with the full input line and the current position of the cursor.
// If it returns a nil newLine, the key press is processed normally.
// Otherwise it returns a replacement line and the new cursor position.
AutoCompleteCallback func(line []byte, pos, key int) (newLine []byte, newPos int)
// Escape contains a pointer to the escape codes for this terminal.
// It's always a valid pointer, although the escape codes themselves
// may be empty if the terminal doesn't support them.
Escape *EscapeCodes
// lock protects the terminal and the state in this object from
// concurrent processing of a key press and a Write() call.
lock sync.Mutex
c io.ReadWriter c io.ReadWriter
prompt string prompt string
@ -16,6 +59,8 @@ type Terminal struct {
line []byte line []byte
// pos is the logical position of the cursor in line // pos is the logical position of the cursor in line
pos int pos int
// echo is true if local echo is enabled
echo bool
// cursorX contains the current X value of the cursor where the left // cursorX contains the current X value of the cursor where the left
// edge is 0. cursorY contains the row number where the first row of // edge is 0. cursorY contains the row number where the first row of
@ -40,10 +85,12 @@ type Terminal struct {
// "> "). // "> ").
func NewTerminal(c io.ReadWriter, prompt string) *Terminal { func NewTerminal(c io.ReadWriter, prompt string) *Terminal {
return &Terminal{ return &Terminal{
Escape: &vt100EscapeCodes,
c: c, c: c,
prompt: prompt, prompt: prompt,
termWidth: 80, termWidth: 80,
termHeight: 24, termHeight: 24,
echo: true,
} }
} }
@ -109,18 +156,11 @@ func bytesToKey(b []byte) (int, []byte) {
// queue appends data to the end of t.outBuf // queue appends data to the end of t.outBuf
func (t *Terminal) queue(data []byte) { func (t *Terminal) queue(data []byte) {
if len(t.outBuf)+len(data) > cap(t.outBuf) { t.outBuf = append(t.outBuf, data...)
newOutBuf := make([]byte, len(t.outBuf), 2*(len(t.outBuf)+len(data)))
copy(newOutBuf, t.outBuf)
t.outBuf = newOutBuf
}
oldLen := len(t.outBuf)
t.outBuf = t.outBuf[:len(t.outBuf)+len(data)]
copy(t.outBuf[oldLen:], data)
} }
var eraseUnderCursor = []byte{' ', keyEscape, '[', 'D'} var eraseUnderCursor = []byte{' ', keyEscape, '[', 'D'}
var space = []byte{' '}
func isPrintable(key int) bool { func isPrintable(key int) bool {
return key >= 32 && key < 127 return key >= 32 && key < 127
@ -129,6 +169,10 @@ func isPrintable(key int) bool {
// moveCursorToPos appends data to t.outBuf which will move the cursor to the // moveCursorToPos appends data to t.outBuf which will move the cursor to the
// given, logical position in the text. // given, logical position in the text.
func (t *Terminal) moveCursorToPos(pos int) { func (t *Terminal) moveCursorToPos(pos int) {
if !t.echo {
return
}
x := len(t.prompt) + pos x := len(t.prompt) + pos
y := x / t.termWidth y := x / t.termWidth
x = x % t.termWidth x = x % t.termWidth
@ -153,6 +197,12 @@ func (t *Terminal) moveCursorToPos(pos int) {
right = x - t.cursorX right = x - t.cursorX
} }
t.cursorX = x
t.cursorY = y
t.move(up, down, left, right)
}
func (t *Terminal) move(up, down, left, right int) {
movement := make([]byte, 3*(up+down+left+right)) movement := make([]byte, 3*(up+down+left+right))
m := movement m := movement
for i := 0; i < up; i++ { for i := 0; i < up; i++ {
@ -180,11 +230,14 @@ func (t *Terminal) moveCursorToPos(pos int) {
m = m[3:] m = m[3:]
} }
t.cursorX = x
t.cursorY = y
t.queue(movement) t.queue(movement)
} }
func (t *Terminal) clearLineToRight() {
op := []byte{keyEscape, '[', 'K'}
t.queue(op)
}
const maxLineLength = 4096 const maxLineLength = 4096
// handleKey processes the given key and, optionally, returns a line of text // handleKey processes the given key and, optionally, returns a line of text
@ -196,12 +249,15 @@ func (t *Terminal) handleKey(key int) (line string, ok bool) {
return return
} }
t.pos-- t.pos--
t.moveCursorToPos(t.pos)
copy(t.line[t.pos:], t.line[1+t.pos:]) copy(t.line[t.pos:], t.line[1+t.pos:])
t.line = t.line[:len(t.line)-1] t.line = t.line[:len(t.line)-1]
t.writeLine(t.line[t.pos:]) if t.echo {
t.moveCursorToPos(t.pos) t.writeLine(t.line[t.pos:])
}
t.queue(eraseUnderCursor) t.queue(eraseUnderCursor)
t.moveCursorToPos(t.pos)
case keyAltLeft: case keyAltLeft:
// move left by a word. // move left by a word.
if t.pos == 0 { if t.pos == 0 {
@ -260,6 +316,25 @@ func (t *Terminal) handleKey(key int) (line string, ok bool) {
t.cursorY = 0 t.cursorY = 0
t.maxLine = 0 t.maxLine = 0
default: default:
if t.AutoCompleteCallback != nil {
t.lock.Unlock()
newLine, newPos := t.AutoCompleteCallback(t.line, t.pos, key)
t.lock.Lock()
if newLine != nil {
if t.echo {
t.moveCursorToPos(0)
t.writeLine(newLine)
for i := len(newLine); i < len(t.line); i++ {
t.writeLine(space)
}
t.moveCursorToPos(newPos)
}
t.line = newLine
t.pos = newPos
return
}
}
if !isPrintable(key) { if !isPrintable(key) {
return return
} }
@ -274,7 +349,9 @@ func (t *Terminal) handleKey(key int) (line string, ok bool) {
t.line = t.line[:len(t.line)+1] t.line = t.line[:len(t.line)+1]
copy(t.line[t.pos+1:], t.line[t.pos:]) copy(t.line[t.pos+1:], t.line[t.pos:])
t.line[t.pos] = byte(key) t.line[t.pos] = byte(key)
t.writeLine(t.line[t.pos:]) if t.echo {
t.writeLine(t.line[t.pos:])
}
t.pos++ t.pos++
t.moveCursorToPos(t.pos) t.moveCursorToPos(t.pos)
} }
@ -283,15 +360,6 @@ func (t *Terminal) handleKey(key int) (line string, ok bool) {
func (t *Terminal) writeLine(line []byte) { func (t *Terminal) writeLine(line []byte) {
for len(line) != 0 { for len(line) != 0 {
if t.cursorX == t.termWidth {
t.queue([]byte("\r\n"))
t.cursorX = 0
t.cursorY++
if t.cursorY > t.maxLine {
t.maxLine = t.cursorY
}
}
remainingOnLine := t.termWidth - t.cursorX remainingOnLine := t.termWidth - t.cursorX
todo := len(line) todo := len(line)
if todo > remainingOnLine { if todo > remainingOnLine {
@ -300,16 +368,95 @@ func (t *Terminal) writeLine(line []byte) {
t.queue(line[:todo]) t.queue(line[:todo])
t.cursorX += todo t.cursorX += todo
line = line[todo:] line = line[todo:]
if t.cursorX == t.termWidth {
t.cursorX = 0
t.cursorY++
if t.cursorY > t.maxLine {
t.maxLine = t.cursorY
}
}
} }
} }
func (t *Terminal) Write(buf []byte) (n int, err error) { func (t *Terminal) Write(buf []byte) (n int, err error) {
return t.c.Write(buf) t.lock.Lock()
defer t.lock.Unlock()
if t.cursorX == 0 && t.cursorY == 0 {
// This is the easy case: there's nothing on the screen that we
// have to move out of the way.
return t.c.Write(buf)
}
// We have a prompt and possibly user input on the screen. We
// have to clear it first.
t.move(0, /* up */ 0, /* down */ t.cursorX, /* left */ 0 /* right */ )
t.cursorX = 0
t.clearLineToRight()
for t.cursorY > 0 {
t.move(1, /* up */ 0, 0, 0)
t.cursorY--
t.clearLineToRight()
}
if _, err = t.c.Write(t.outBuf); err != nil {
return
}
t.outBuf = t.outBuf[:0]
if n, err = t.c.Write(buf); err != nil {
return
}
t.queue([]byte(t.prompt))
chars := len(t.prompt)
if t.echo {
t.queue(t.line)
chars += len(t.line)
}
t.cursorX = chars % t.termWidth
t.cursorY = chars / t.termWidth
t.moveCursorToPos(t.pos)
if _, err = t.c.Write(t.outBuf); err != nil {
return
}
t.outBuf = t.outBuf[:0]
return
}
// ReadPassword temporarily changes the prompt and reads a password, without
// echo, from the terminal.
func (t *Terminal) ReadPassword(prompt string) (line string, err error) {
t.lock.Lock()
defer t.lock.Unlock()
oldPrompt := t.prompt
t.prompt = prompt
t.echo = false
line, err = t.readLine()
t.prompt = oldPrompt
t.echo = true
return
} }
// ReadLine returns a line of input from the terminal. // ReadLine returns a line of input from the terminal.
func (t *Terminal) ReadLine() (line string, err error) { func (t *Terminal) ReadLine() (line string, err error) {
if t.cursorX == 0 { t.lock.Lock()
defer t.lock.Unlock()
return t.readLine()
}
func (t *Terminal) readLine() (line string, err error) {
// t.lock must be held at this point
if t.cursorX == 0 && t.cursorY == 0 {
t.writeLine([]byte(t.prompt)) t.writeLine([]byte(t.prompt))
t.c.Write(t.outBuf) t.c.Write(t.outBuf)
t.outBuf = t.outBuf[:0] t.outBuf = t.outBuf[:0]
@ -320,7 +467,11 @@ func (t *Terminal) ReadLine() (line string, err error) {
// containing a partial key sequence // containing a partial key sequence
readBuf := t.inBuf[len(t.remainder):] readBuf := t.inBuf[len(t.remainder):]
var n int var n int
t.lock.Unlock()
n, err = t.c.Read(readBuf) n, err = t.c.Read(readBuf)
t.lock.Lock()
if err != nil { if err != nil {
return return
} }
@ -358,5 +509,8 @@ func (t *Terminal) ReadLine() (line string, err error) {
} }
func (t *Terminal) SetSize(width, height int) { func (t *Terminal) SetSize(width, height int) {
t.lock.Lock()
defer t.lock.Unlock()
t.termWidth, t.termHeight = width, height t.termWidth, t.termHeight = width, height
} }

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build linux
package terminal package terminal
import ( import (

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build linux
// Package terminal provides support functions for dealing with terminals, as // Package terminal provides support functions for dealing with terminals, as
// commonly found on UNIX systems. // commonly found on UNIX systems.
// //
@ -9,7 +11,7 @@
// //
// oldState, err := terminal.MakeRaw(0) // oldState, err := terminal.MakeRaw(0)
// if err != nil { // if err != nil {
// panic(err.String()) // panic(err)
// } // }
// defer terminal.Restore(0, oldState) // defer terminal.Restore(0, oldState)
package terminal package terminal
@ -17,6 +19,7 @@ package terminal
import ( import (
"io" "io"
"syscall" "syscall"
"unsafe"
) )
// State contains the state of a terminal. // State contains the state of a terminal.
@ -57,6 +60,18 @@ func Restore(fd int, state *State) error {
return err return err
} }
func ioctl(int, int, unsafe.Pointer) int __asm__("ioctl")
// GetSize returns the dimensions of the given terminal.
func GetSize(fd int) (width, height int, err error) {
var dimensions [4]uint16
if ioctl(fd, syscall.TIOCGWINSZ, unsafe.Pointer(&dimensions)) < 0 {
return -1, -1, syscall.GetErrno()
}
return int(dimensions[1]), int(dimensions[0]), nil
}
// ReadPassword reads a line of input from a terminal without local echo. This // ReadPassword reads a line of input from a terminal without local echo. This
// is commonly used for inputting passwords and other sensitive data. The slice // is commonly used for inputting passwords and other sensitive data. The slice
// returned does not include the \n. // returned does not include the \n.

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build windows
// Package winfsnotify allows the user to receive // Package winfsnotify allows the user to receive
// file system event notifications on Windows. // file system event notifications on Windows.
package winfsnotify package winfsnotify

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build windows
package winfsnotify package winfsnotify
import ( import (

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build windows
package main package main
import ( import (

View File

@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build windows
package main package main
import ( import (

View File

@ -1,3 +1,4 @@
// +build windows
// mksyscall_windows.pl winapi.go // mksyscall_windows.pl winapi.go
// MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT // MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT

View File

@ -506,78 +506,42 @@ func BenchmarkSprintfFloat(b *testing.B) {
} }
} }
var mallocBuf bytes.Buffer
var mallocTest = []struct {
count int
desc string
fn func()
}{
{0, `Sprintf("")`, func() { Sprintf("") }},
{1, `Sprintf("xxx")`, func() { Sprintf("xxx") }},
{1, `Sprintf("%x")`, func() { Sprintf("%x", 7) }},
{2, `Sprintf("%s")`, func() { Sprintf("%s", "hello") }},
{1, `Sprintf("%x %x")`, func() { Sprintf("%x", 7, 112) }},
{1, `Sprintf("%g")`, func() { Sprintf("%g", 3.14159) }},
{0, `Fprintf(buf, "%x %x %x")`, func() { mallocBuf.Reset(); Fprintf(&mallocBuf, "%x %x %x", 7, 8, 9) }},
{1, `Fprintf(buf, "%s")`, func() { mallocBuf.Reset(); Fprintf(&mallocBuf, "%s", "hello") }},
}
var _ bytes.Buffer
func TestCountMallocs(t *testing.T) { func TestCountMallocs(t *testing.T) {
if testing.Short() { if testing.Short() {
return return
} }
const N = 100 for _, mt := range mallocTest {
runtime.UpdateMemStats() const N = 100
mallocs := 0 - runtime.MemStats.Mallocs runtime.UpdateMemStats()
for i := 0; i < N; i++ { mallocs := 0 - runtime.MemStats.Mallocs
Sprintf("") for i := 0; i < N; i++ {
mt.fn()
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
if mallocs/N != uint64(mt.count) {
t.Errorf("%s: expected %d mallocs, got %d", mt.desc, mt.count, mallocs/N)
}
} }
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Sprintf(\"\"): %d\n", mallocs/N)
runtime.UpdateMemStats()
mallocs = 0 - runtime.MemStats.Mallocs
for i := 0; i < N; i++ {
Sprintf("xxx")
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Sprintf(\"xxx\"): %d\n", mallocs/N)
runtime.UpdateMemStats()
mallocs = 0 - runtime.MemStats.Mallocs
for i := 0; i < N; i++ {
Sprintf("%x", i)
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Sprintf(\"%%x\"): %d\n", mallocs/N)
runtime.UpdateMemStats()
mallocs = 0 - runtime.MemStats.Mallocs
for i := 0; i < N; i++ {
Sprintf("%s", "hello")
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Sprintf(\"%%s\"): %d\n", mallocs/N)
runtime.UpdateMemStats()
mallocs = 0 - runtime.MemStats.Mallocs
for i := 0; i < N; i++ {
Sprintf("%x %x", i, i)
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Sprintf(\"%%x %%x\"): %d\n", mallocs/N)
runtime.UpdateMemStats()
mallocs = 0 - runtime.MemStats.Mallocs
for i := 0; i < N; i++ {
Sprintf("%g", 3.14159)
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Sprintf(\"%%g\"): %d\n", mallocs/N)
buf := new(bytes.Buffer)
runtime.UpdateMemStats()
mallocs = 0 - runtime.MemStats.Mallocs
for i := 0; i < N; i++ {
buf.Reset()
Fprintf(buf, "%x %x %x", i, i, i)
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Fprintf(buf, \"%%x %%x %%x\"): %d\n", mallocs/N)
runtime.UpdateMemStats()
mallocs = 0 - runtime.MemStats.Mallocs
for i := 0; i < N; i++ {
buf.Reset()
Fprintf(buf, "%s", "hello")
}
runtime.UpdateMemStats()
mallocs += runtime.MemStats.Mallocs
Printf("mallocs per Fprintf(buf, \"%%s\"): %d\n", mallocs/N)
} }
type flagPrinter struct{} type flagPrinter struct{}

View File

@ -154,12 +154,17 @@ func putint(buf []byte, base, val uint64, digits string) int {
return i - 1 return i - 1
} }
var (
trueBytes = []byte("true")
falseBytes = []byte("false")
)
// fmt_boolean formats a boolean. // fmt_boolean formats a boolean.
func (f *fmt) fmt_boolean(v bool) { func (f *fmt) fmt_boolean(v bool) {
if v { if v {
f.padString("true") f.pad(trueBytes)
} else { } else {
f.padString("false") f.pad(falseBytes)
} }
} }
@ -283,31 +288,18 @@ func (f *fmt) fmt_s(s string) {
} }
// fmt_sx formats a string as a hexadecimal encoding of its bytes. // fmt_sx formats a string as a hexadecimal encoding of its bytes.
func (f *fmt) fmt_sx(s string) { func (f *fmt) fmt_sx(s, digits string) {
t := "" // TODO: Avoid buffer by pre-padding.
var b bytes.Buffer
for i := 0; i < len(s); i++ { for i := 0; i < len(s); i++ {
if i > 0 && f.space { if i > 0 && f.space {
t += " " b.WriteByte(' ')
} }
v := s[i] v := s[i]
t += string(ldigits[v>>4]) b.WriteByte(digits[v>>4])
t += string(ldigits[v&0xF]) b.WriteByte(digits[v&0xF])
} }
f.padString(t) f.pad(b.Bytes())
}
// fmt_sX formats a string as an uppercase hexadecimal encoding of its bytes.
func (f *fmt) fmt_sX(s string) {
t := ""
for i := 0; i < len(s); i++ {
if i > 0 && f.space {
t += " "
}
v := s[i]
t += string(udigits[v>>4])
t += string(udigits[v&0xF])
}
f.padString(t)
} }
// fmt_q formats a string as a double-quoted, escaped Go string constant. // fmt_q formats a string as a double-quoted, escaped Go string constant.
@ -329,13 +321,13 @@ func (f *fmt) fmt_q(s string) {
// fmt_qc formats the integer as a single-quoted, escaped Go character constant. // fmt_qc formats the integer as a single-quoted, escaped Go character constant.
// If the character is not valid Unicode, it will print '\ufffd'. // If the character is not valid Unicode, it will print '\ufffd'.
func (f *fmt) fmt_qc(c int64) { func (f *fmt) fmt_qc(c int64) {
var quoted string var quoted []byte
if f.plus { if f.plus {
quoted = strconv.QuoteRuneToASCII(rune(c)) quoted = strconv.AppendQuoteRuneToASCII(f.intbuf[0:0], rune(c))
} else { } else {
quoted = strconv.QuoteRune(rune(c)) quoted = strconv.AppendQuoteRune(f.intbuf[0:0], rune(c))
} }
f.padString(quoted) f.pad(quoted)
} }
// floating-point // floating-point
@ -347,57 +339,70 @@ func doPrec(f *fmt, def int) int {
return def return def
} }
// Add a plus sign or space to the floating-point string representation if missing and required. // formatFloat formats a float64; it is an efficient equivalent to f.pad(strconv.FormatFloat()...).
func (f *fmt) plusSpace(s string) { func (f *fmt) formatFloat(v float64, verb byte, prec, n int) {
if s[0] != '-' { // We leave one byte at the beginning of f.intbuf for a sign if needed,
// and make it a space, which we might be able to use.
f.intbuf[0] = ' '
slice := strconv.AppendFloat(f.intbuf[0:1], v, verb, prec, n)
// Add a plus sign or space to the floating-point string representation if missing and required.
// The formatted number starts at slice[1].
switch slice[1] {
case '-', '+':
// We're set; drop the leading space.
slice = slice[1:]
default:
// There's no sign, but we might need one.
if f.plus { if f.plus {
s = "+" + s slice[0] = '+'
} else if f.space { } else if f.space {
s = " " + s // space is already there
} else {
slice = slice[1:]
} }
} }
f.padString(s) f.pad(slice)
} }
// fmt_e64 formats a float64 in the form -1.23e+12. // fmt_e64 formats a float64 in the form -1.23e+12.
func (f *fmt) fmt_e64(v float64) { f.plusSpace(strconv.FormatFloat(v, 'e', doPrec(f, 6), 64)) } func (f *fmt) fmt_e64(v float64) { f.formatFloat(v, 'e', doPrec(f, 6), 64) }
// fmt_E64 formats a float64 in the form -1.23E+12. // fmt_E64 formats a float64 in the form -1.23E+12.
func (f *fmt) fmt_E64(v float64) { f.plusSpace(strconv.FormatFloat(v, 'E', doPrec(f, 6), 64)) } func (f *fmt) fmt_E64(v float64) { f.formatFloat(v, 'E', doPrec(f, 6), 64) }
// fmt_f64 formats a float64 in the form -1.23. // fmt_f64 formats a float64 in the form -1.23.
func (f *fmt) fmt_f64(v float64) { f.plusSpace(strconv.FormatFloat(v, 'f', doPrec(f, 6), 64)) } func (f *fmt) fmt_f64(v float64) { f.formatFloat(v, 'f', doPrec(f, 6), 64) }
// fmt_g64 formats a float64 in the 'f' or 'e' form according to size. // fmt_g64 formats a float64 in the 'f' or 'e' form according to size.
func (f *fmt) fmt_g64(v float64) { f.plusSpace(strconv.FormatFloat(v, 'g', doPrec(f, -1), 64)) } func (f *fmt) fmt_g64(v float64) { f.formatFloat(v, 'g', doPrec(f, -1), 64) }
// fmt_g64 formats a float64 in the 'f' or 'E' form according to size. // fmt_g64 formats a float64 in the 'f' or 'E' form according to size.
func (f *fmt) fmt_G64(v float64) { f.plusSpace(strconv.FormatFloat(v, 'G', doPrec(f, -1), 64)) } func (f *fmt) fmt_G64(v float64) { f.formatFloat(v, 'G', doPrec(f, -1), 64) }
// fmt_fb64 formats a float64 in the form -123p3 (exponent is power of 2). // fmt_fb64 formats a float64 in the form -123p3 (exponent is power of 2).
func (f *fmt) fmt_fb64(v float64) { f.plusSpace(strconv.FormatFloat(v, 'b', 0, 64)) } func (f *fmt) fmt_fb64(v float64) { f.formatFloat(v, 'b', 0, 64) }
// float32 // float32
// cannot defer to float64 versions // cannot defer to float64 versions
// because it will get rounding wrong in corner cases. // because it will get rounding wrong in corner cases.
// fmt_e32 formats a float32 in the form -1.23e+12. // fmt_e32 formats a float32 in the form -1.23e+12.
func (f *fmt) fmt_e32(v float32) { f.plusSpace(strconv.FormatFloat(float64(v), 'e', doPrec(f, 6), 32)) } func (f *fmt) fmt_e32(v float32) { f.formatFloat(float64(v), 'e', doPrec(f, 6), 32) }
// fmt_E32 formats a float32 in the form -1.23E+12. // fmt_E32 formats a float32 in the form -1.23E+12.
func (f *fmt) fmt_E32(v float32) { f.plusSpace(strconv.FormatFloat(float64(v), 'E', doPrec(f, 6), 32)) } func (f *fmt) fmt_E32(v float32) { f.formatFloat(float64(v), 'E', doPrec(f, 6), 32) }
// fmt_f32 formats a float32 in the form -1.23. // fmt_f32 formats a float32 in the form -1.23.
func (f *fmt) fmt_f32(v float32) { f.plusSpace(strconv.FormatFloat(float64(v), 'f', doPrec(f, 6), 32)) } func (f *fmt) fmt_f32(v float32) { f.formatFloat(float64(v), 'f', doPrec(f, 6), 32) }
// fmt_g32 formats a float32 in the 'f' or 'e' form according to size. // fmt_g32 formats a float32 in the 'f' or 'e' form according to size.
func (f *fmt) fmt_g32(v float32) { f.plusSpace(strconv.FormatFloat(float64(v), 'g', doPrec(f, -1), 32)) } func (f *fmt) fmt_g32(v float32) { f.formatFloat(float64(v), 'g', doPrec(f, -1), 32) }
// fmt_G32 formats a float32 in the 'f' or 'E' form according to size. // fmt_G32 formats a float32 in the 'f' or 'E' form according to size.
func (f *fmt) fmt_G32(v float32) { f.plusSpace(strconv.FormatFloat(float64(v), 'G', doPrec(f, -1), 32)) } func (f *fmt) fmt_G32(v float32) { f.formatFloat(float64(v), 'G', doPrec(f, -1), 32) }
// fmt_fb32 formats a float32 in the form -123p3 (exponent is power of 2). // fmt_fb32 formats a float32 in the form -123p3 (exponent is power of 2).
func (f *fmt) fmt_fb32(v float32) { f.padString(strconv.FormatFloat(float64(v), 'b', 0, 32)) } func (f *fmt) fmt_fb32(v float32) { f.formatFloat(float64(v), 'b', 0, 32) }
// fmt_c64 formats a complex64 according to the verb. // fmt_c64 formats a complex64 according to the verb.
func (f *fmt) fmt_c64(v complex64, verb rune) { func (f *fmt) fmt_c64(v complex64, verb rune) {

View File

@ -503,9 +503,9 @@ func (p *pp) fmtString(v string, verb rune, goSyntax bool) {
case 's': case 's':
p.fmt.fmt_s(v) p.fmt.fmt_s(v)
case 'x': case 'x':
p.fmt.fmt_sx(v) p.fmt.fmt_sx(v, ldigits)
case 'X': case 'X':
p.fmt.fmt_sX(v) p.fmt.fmt_sx(v, udigits)
case 'q': case 'q':
p.fmt.fmt_q(v) p.fmt.fmt_q(v)
default: default:
@ -542,9 +542,9 @@ func (p *pp) fmtBytes(v []byte, verb rune, goSyntax bool, depth int) {
case 's': case 's':
p.fmt.fmt_s(s) p.fmt.fmt_s(s)
case 'x': case 'x':
p.fmt.fmt_sx(s) p.fmt.fmt_sx(s, ldigits)
case 'X': case 'X':
p.fmt.fmt_sX(s) p.fmt.fmt_sx(s, udigits)
case 'q': case 'q':
p.fmt.fmt_q(s) p.fmt.fmt_q(s)
default: default:

View File

@ -80,7 +80,7 @@ func (s *Scope) String() string {
type Object struct { type Object struct {
Kind ObjKind Kind ObjKind
Name string // declared name Name string // declared name
Decl interface{} // corresponding Field, XxxSpec, FuncDecl, or LabeledStmt; or nil Decl interface{} // corresponding Field, XxxSpec, FuncDecl, LabeledStmt, or AssignStmt; or nil
Data interface{} // object-specific data; or nil Data interface{} // object-specific data; or nil
Type interface{} // place holder for type information; may be nil Type interface{} // place holder for type information; may be nil
} }
@ -125,6 +125,12 @@ func (obj *Object) Pos() token.Pos {
if d.Label.Name == name { if d.Label.Name == name {
return d.Label.Pos() return d.Label.Pos()
} }
case *AssignStmt:
for _, x := range d.Lhs {
if ident, isIdent := x.(*Ident); isIdent && ident.Name == name {
return ident.Pos()
}
}
} }
return token.NoPos return token.NoPos
} }

View File

@ -46,8 +46,9 @@ var buildPkgs = []struct {
{ {
"go/build/cgotest", "go/build/cgotest",
&DirInfo{ &DirInfo{
CgoFiles: []string{"cgotest.go"}, CgoFiles: ifCgo([]string{"cgotest.go"}),
CFiles: []string{"cgotest.c"}, CFiles: []string{"cgotest.c"},
HFiles: []string{"cgotest.h"},
Imports: []string{"C", "unsafe"}, Imports: []string{"C", "unsafe"},
TestImports: []string{}, TestImports: []string{},
Package: "cgotest", Package: "cgotest",
@ -55,6 +56,13 @@ var buildPkgs = []struct {
}, },
} }
func ifCgo(x []string) []string {
if DefaultContext.CgoEnabled {
return x
}
return nil
}
const cmdtestOutput = "3" const cmdtestOutput = "3"
func TestBuild(t *testing.T) { func TestBuild(t *testing.T) {
@ -71,6 +79,10 @@ func TestBuild(t *testing.T) {
continue continue
} }
if tt.dir == "go/build/cgotest" && len(info.CgoFiles) == 0 {
continue
}
s, err := Build(tree, tt.dir, info) s, err := Build(tree, tt.dir, info)
if err != nil { if err != nil {
t.Errorf("Build(%#q): %v", tt.dir, err) t.Errorf("Build(%#q): %v", tt.dir, err)

View File

@ -26,9 +26,9 @@ import (
// A Context specifies the supporting context for a build. // A Context specifies the supporting context for a build.
type Context struct { type Context struct {
GOARCH string // target architecture GOARCH string // target architecture
GOOS string // target operating system GOOS string // target operating system
// TODO(rsc,adg): GOPATH CgoEnabled bool // whether cgo can be used
// By default, ScanDir uses the operating system's // By default, ScanDir uses the operating system's
// file system calls to read directories and files. // file system calls to read directories and files.
@ -75,9 +75,36 @@ func (ctxt *Context) readFile(dir, file string) (string, []byte, error) {
// The DefaultContext is the default Context for builds. // The DefaultContext is the default Context for builds.
// It uses the GOARCH and GOOS environment variables // It uses the GOARCH and GOOS environment variables
// if set, or else the compiled code's GOARCH and GOOS. // if set, or else the compiled code's GOARCH and GOOS.
var DefaultContext = Context{ var DefaultContext = defaultContext()
GOARCH: envOr("GOARCH", runtime.GOARCH),
GOOS: envOr("GOOS", runtime.GOOS), var cgoEnabled = map[string]bool{
"darwin/386": true,
"darwin/amd64": true,
"linux/386": true,
"linux/amd64": true,
"freebsd/386": true,
"freebsd/amd64": true,
"windows/386": true,
"windows/amd64": true,
}
func defaultContext() Context {
var c Context
c.GOARCH = envOr("GOARCH", runtime.GOARCH)
c.GOOS = envOr("GOOS", runtime.GOOS)
s := os.Getenv("CGO_ENABLED")
switch s {
case "1":
c.CgoEnabled = true
case "0":
c.CgoEnabled = false
default:
c.CgoEnabled = cgoEnabled[c.GOOS+"/"+c.GOARCH]
}
return c
} }
func envOr(name, def string) string { func envOr(name, def string) string {
@ -96,8 +123,9 @@ type DirInfo struct {
// Source files // Source files
GoFiles []string // .go files in dir (excluding CgoFiles) GoFiles []string // .go files in dir (excluding CgoFiles)
HFiles []string // .h files in dir
CFiles []string // .c files in dir CFiles []string // .c files in dir
SFiles []string // .s files in dir SFiles []string // .s (and, when using cgo, .S files in dir)
CgoFiles []string // .go files that import "C" CgoFiles []string // .go files that import "C"
// Cgo directives // Cgo directives
@ -135,6 +163,7 @@ func (ctxt *Context) ScanDir(dir string) (info *DirInfo, err error) {
return nil, err return nil, err
} }
var Sfiles []string // files with ".S" (capital S)
var di DirInfo var di DirInfo
imported := make(map[string]bool) imported := make(map[string]bool)
testImported := make(map[string]bool) testImported := make(map[string]bool)
@ -154,7 +183,7 @@ func (ctxt *Context) ScanDir(dir string) (info *DirInfo, err error) {
ext := path.Ext(name) ext := path.Ext(name)
switch ext { switch ext {
case ".go", ".c", ".s": case ".go", ".c", ".s", ".h", ".S":
// tentatively okay // tentatively okay
default: default:
// skip // skip
@ -175,9 +204,15 @@ func (ctxt *Context) ScanDir(dir string) (info *DirInfo, err error) {
case ".c": case ".c":
di.CFiles = append(di.CFiles, name) di.CFiles = append(di.CFiles, name)
continue continue
case ".h":
di.HFiles = append(di.HFiles, name)
continue
case ".s": case ".s":
di.SFiles = append(di.SFiles, name) di.SFiles = append(di.SFiles, name)
continue continue
case ".S":
Sfiles = append(Sfiles, name)
continue
} }
pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments) pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments)
@ -256,7 +291,9 @@ func (ctxt *Context) ScanDir(dir string) (info *DirInfo, err error) {
} }
} }
if isCgo { if isCgo {
di.CgoFiles = append(di.CgoFiles, name) if ctxt.CgoEnabled {
di.CgoFiles = append(di.CgoFiles, name)
}
} else if isTest { } else if isTest {
if pkg == string(pf.Name.Name) { if pkg == string(pf.Name.Name) {
di.TestGoFiles = append(di.TestGoFiles, name) di.TestGoFiles = append(di.TestGoFiles, name)
@ -282,6 +319,15 @@ func (ctxt *Context) ScanDir(dir string) (info *DirInfo, err error) {
di.TestImports[i] = p di.TestImports[i] = p
i++ i++
} }
// add the .S files only if we are using cgo
// (which means gcc will compile them).
// The standard assemblers expect .s files.
if len(di.CgoFiles) > 0 {
di.SFiles = append(di.SFiles, Sfiles...)
sort.Strings(di.SFiles)
}
// File name lists are sorted because ReadDir sorts. // File name lists are sorted because ReadDir sorts.
sort.Strings(di.Imports) sort.Strings(di.Imports)
sort.Strings(di.TestImports) sort.Strings(di.TestImports)
@ -289,7 +335,6 @@ func (ctxt *Context) ScanDir(dir string) (info *DirInfo, err error) {
} }
var slashslash = []byte("//") var slashslash = []byte("//")
var plusBuild = []byte("+build")
// shouldBuild reports whether it is okay to use this file, // shouldBuild reports whether it is okay to use this file,
// The rule is that in the file's leading run of // comments // The rule is that in the file's leading run of // comments
@ -510,14 +555,22 @@ func splitQuoted(s string) (r []string, err error) {
// //
// $GOOS // $GOOS
// $GOARCH // $GOARCH
// $GOOS/$GOARCH // cgo (if cgo is enabled)
// nocgo (if cgo is disabled)
// a slash-separated list of any of these
// //
func (ctxt *Context) matchOSArch(name string) bool { func (ctxt *Context) matchOSArch(name string) bool {
if ctxt.CgoEnabled && name == "cgo" {
return true
}
if !ctxt.CgoEnabled && name == "nocgo" {
return true
}
if name == ctxt.GOOS || name == ctxt.GOARCH { if name == ctxt.GOOS || name == ctxt.GOARCH {
return true return true
} }
i := strings.Index(name, "/") i := strings.Index(name, "/")
return i >= 0 && name[:i] == ctxt.GOOS && name[i+1:] == ctxt.GOARCH return i >= 0 && ctxt.matchOSArch(name[:i]) && ctxt.matchOSArch(name[i+1:])
} }
// goodOSArchFile returns false if the name contains a $GOOS or $GOARCH // goodOSArchFile returns false if the name contains a $GOOS or $GOARCH

View File

@ -57,7 +57,7 @@ func (t *Tree) PkgDir() string {
func (t *Tree) BinDir() string { func (t *Tree) BinDir() string {
if t.Goroot { if t.Goroot {
if gobin := os.Getenv("GOBIN"); gobin != "" { if gobin := os.Getenv("GOBIN"); gobin != "" {
return gobin return filepath.Clean(gobin)
} }
} }
return filepath.Join(t.Path, "bin") return filepath.Join(t.Path, "bin")
@ -85,8 +85,8 @@ func (t *Tree) HasPkg(pkg string) bool {
} }
var ( var (
ErrNotFound = errors.New("go/build: package could not be found locally") ErrNotFound = errors.New("package could not be found locally")
ErrTreeNotFound = errors.New("go/build: no valid GOROOT or GOPATH could be found") ErrTreeNotFound = errors.New("no valid GOROOT or GOPATH could be found")
) )
// FindTree takes an import or filesystem path and returns the // FindTree takes an import or filesystem path and returns the
@ -151,7 +151,7 @@ func init() {
root := runtime.GOROOT() root := runtime.GOROOT()
t, err := newTree(root) t, err := newTree(root)
if err != nil { if err != nil {
log.Printf("go/build: invalid GOROOT %q: %v", root, err) log.Printf("invalid GOROOT %q: %v", root, err)
} else { } else {
t.Goroot = true t.Goroot = true
Path = []*Tree{t} Path = []*Tree{t}
@ -163,7 +163,7 @@ func init() {
} }
t, err := newTree(p) t, err := newTree(p)
if err != nil { if err != nil {
log.Printf("go/build: invalid GOPATH %q: %v", p, err) log.Printf("invalid GOPATH %q: %v", p, err)
continue continue
} }
Path = append(Path, t) Path = append(Path, t)

View File

@ -13,17 +13,32 @@ import (
) )
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Collection of documentation info
type typeDoc struct { // embeddedType describes the type of an anonymous field.
//
type embeddedType struct {
typ *typeInfo // the corresponding base type
ptr bool // if set, the anonymous field type is a pointer
}
type typeInfo struct {
// len(decl.Specs) == 1, and the element type is *ast.TypeSpec // len(decl.Specs) == 1, and the element type is *ast.TypeSpec
// if the type declaration hasn't been seen yet, decl is nil // if the type declaration hasn't been seen yet, decl is nil
decl *ast.GenDecl decl *ast.GenDecl
// values, factory functions, and methods associated with the type embedded []embeddedType
forward *TypeDoc // forward link to processed type documentation
// declarations associated with the type
values []*ast.GenDecl // consts and vars values []*ast.GenDecl // consts and vars
factories map[string]*ast.FuncDecl factories map[string]*ast.FuncDecl
methods map[string]*ast.FuncDecl methods map[string]*ast.FuncDecl
} }
func (info *typeInfo) addEmbeddedType(embedded *typeInfo, isPtr bool) {
info.embedded = append(info.embedded, embeddedType{embedded, isPtr})
}
// docReader accumulates documentation for a single package. // docReader accumulates documentation for a single package.
// It modifies the AST: Comments (declaration documentation) // It modifies the AST: Comments (declaration documentation)
// that have been collected by the DocReader are set to nil // that have been collected by the DocReader are set to nil
@ -32,17 +47,19 @@ type typeDoc struct {
// printing the corresponding AST node). // printing the corresponding AST node).
// //
type docReader struct { type docReader struct {
doc *ast.CommentGroup // package documentation, if any doc *ast.CommentGroup // package documentation, if any
pkgName string pkgName string
values []*ast.GenDecl // consts and vars values []*ast.GenDecl // consts and vars
types map[string]*typeDoc types map[string]*typeInfo
funcs map[string]*ast.FuncDecl embedded map[string]*typeInfo // embedded types, possibly not exported
bugs []*ast.CommentGroup funcs map[string]*ast.FuncDecl
bugs []*ast.CommentGroup
} }
func (doc *docReader) init(pkgName string) { func (doc *docReader) init(pkgName string) {
doc.pkgName = pkgName doc.pkgName = pkgName
doc.types = make(map[string]*typeDoc) doc.types = make(map[string]*typeInfo)
doc.embedded = make(map[string]*typeInfo)
doc.funcs = make(map[string]*ast.FuncDecl) doc.funcs = make(map[string]*ast.FuncDecl)
} }
@ -52,56 +69,40 @@ func (doc *docReader) addDoc(comments *ast.CommentGroup) {
doc.doc = comments doc.doc = comments
return return
} }
// More than one package comment: Usually there will be only // More than one package comment: Usually there will be only
// one file with a package comment, but it's better to collect // one file with a package comment, but it's better to collect
// all comments than drop them on the floor. // all comments than drop them on the floor.
// (This code isn't particularly clever - no amortized doubling is blankComment := &ast.Comment{token.NoPos, "//"}
// used - but this situation occurs rarely and is not time-critical.) list := append(doc.doc.List, blankComment)
n1 := len(doc.doc.List) doc.doc.List = append(list, comments.List...)
n2 := len(comments.List)
list := make([]*ast.Comment, n1+1+n2) // + 1 for separator line
copy(list, doc.doc.List)
list[n1] = &ast.Comment{token.NoPos, "//"} // separator line
copy(list[n1+1:], comments.List)
doc.doc = &ast.CommentGroup{list}
} }
func (doc *docReader) addType(decl *ast.GenDecl) { func (doc *docReader) lookupTypeInfo(name string) *typeInfo {
spec := decl.Specs[0].(*ast.TypeSpec) if name == "" || name == "_" {
typ := doc.lookupTypeDoc(spec.Name.Name)
// typ should always be != nil since declared types
// are always named - be conservative and check
if typ != nil {
// a type should be added at most once, so typ.decl
// should be nil - if it isn't, simply overwrite it
typ.decl = decl
}
}
func (doc *docReader) lookupTypeDoc(name string) *typeDoc {
if name == "" {
return nil // no type docs for anonymous types return nil // no type docs for anonymous types
} }
if tdoc, found := doc.types[name]; found { if info, found := doc.types[name]; found {
return tdoc return info
} }
// type wasn't found - add one without declaration // type wasn't found - add one without declaration
tdoc := &typeDoc{nil, nil, make(map[string]*ast.FuncDecl), make(map[string]*ast.FuncDecl)} info := &typeInfo{
doc.types[name] = tdoc factories: make(map[string]*ast.FuncDecl),
return tdoc methods: make(map[string]*ast.FuncDecl),
}
doc.types[name] = info
return info
} }
func baseTypeName(typ ast.Expr) string { func baseTypeName(typ ast.Expr, allTypes bool) string {
switch t := typ.(type) { switch t := typ.(type) {
case *ast.Ident: case *ast.Ident:
// if the type is not exported, the effect to // if the type is not exported, the effect to
// a client is as if there were no type name // a client is as if there were no type name
if t.IsExported() { if t.IsExported() || allTypes {
return t.Name return t.Name
} }
case *ast.StarExpr: case *ast.StarExpr:
return baseTypeName(t.X) return baseTypeName(t.X, allTypes)
} }
return "" return ""
} }
@ -120,7 +121,7 @@ func (doc *docReader) addValue(decl *ast.GenDecl) {
switch { switch {
case v.Type != nil: case v.Type != nil:
// a type is present; determine its name // a type is present; determine its name
name = baseTypeName(v.Type) name = baseTypeName(v.Type, false)
case decl.Tok == token.CONST: case decl.Tok == token.CONST:
// no type is present but we have a constant declaration; // no type is present but we have a constant declaration;
// use the previous type name (w/o more type information // use the previous type name (w/o more type information
@ -148,7 +149,7 @@ func (doc *docReader) addValue(decl *ast.GenDecl) {
values := &doc.values values := &doc.values
if domName != "" && domFreq >= int(float64(len(decl.Specs))*threshold) { if domName != "" && domFreq >= int(float64(len(decl.Specs))*threshold) {
// typed entries are sufficiently frequent // typed entries are sufficiently frequent
typ := doc.lookupTypeDoc(domName) typ := doc.lookupTypeInfo(domName)
if typ != nil { if typ != nil {
values = &typ.values // associate with that type values = &typ.values // associate with that type
} }
@ -175,10 +176,13 @@ func setFunc(table map[string]*ast.FuncDecl, f *ast.FuncDecl) {
} }
func (doc *docReader) addFunc(fun *ast.FuncDecl) { func (doc *docReader) addFunc(fun *ast.FuncDecl) {
// strip function body
fun.Body = nil
// determine if it should be associated with a type // determine if it should be associated with a type
if fun.Recv != nil { if fun.Recv != nil {
// method // method
typ := doc.lookupTypeDoc(baseTypeName(fun.Recv.List[0].Type)) typ := doc.lookupTypeInfo(baseTypeName(fun.Recv.List[0].Type, false))
if typ != nil { if typ != nil {
// exported receiver type // exported receiver type
setFunc(typ.methods, fun) setFunc(typ.methods, fun)
@ -199,8 +203,8 @@ func (doc *docReader) addFunc(fun *ast.FuncDecl) {
// exactly one (named or anonymous) result associated // exactly one (named or anonymous) result associated
// with the first type in result signature (there may // with the first type in result signature (there may
// be more than one result) // be more than one result)
tname := baseTypeName(res.Type) tname := baseTypeName(res.Type, false)
typ := doc.lookupTypeDoc(tname) typ := doc.lookupTypeInfo(tname)
if typ != nil { if typ != nil {
// named and exported result type // named and exported result type
setFunc(typ.factories, fun) setFunc(typ.factories, fun)
@ -224,10 +228,17 @@ func (doc *docReader) addDecl(decl ast.Decl) {
case token.TYPE: case token.TYPE:
// types are handled individually // types are handled individually
for _, spec := range d.Specs { for _, spec := range d.Specs {
// make a (fake) GenDecl node for this TypeSpec tspec := spec.(*ast.TypeSpec)
// add the type to the documentation
info := doc.lookupTypeInfo(tspec.Name.Name)
if info == nil {
continue // no name - ignore the type
}
// Make a (fake) GenDecl node for this TypeSpec
// (we need to do this here - as opposed to just // (we need to do this here - as opposed to just
// for printing - so we don't lose the GenDecl // for printing - so we don't lose the GenDecl
// documentation) // documentation). Since a new GenDecl node is
// created, there's no need to nil out d.Doc.
// //
// TODO(gri): Consider just collecting the TypeSpec // TODO(gri): Consider just collecting the TypeSpec
// node (and copy in the GenDecl.doc if there is no // node (and copy in the GenDecl.doc if there is no
@ -235,8 +246,32 @@ func (doc *docReader) addDecl(decl ast.Decl) {
// makeTypeDocs below). Simpler data structures, but // makeTypeDocs below). Simpler data structures, but
// would lose GenDecl documentation if the TypeSpec // would lose GenDecl documentation if the TypeSpec
// has documentation as well. // has documentation as well.
doc.addType(&ast.GenDecl{d.Doc, d.Pos(), token.TYPE, token.NoPos, []ast.Spec{spec}, token.NoPos}) fake := &ast.GenDecl{d.Doc, d.Pos(), token.TYPE, token.NoPos,
// A new GenDecl node is created, no need to nil out d.Doc. []ast.Spec{tspec}, token.NoPos}
// A type should be added at most once, so info.decl
// should be nil - if it isn't, simply overwrite it.
info.decl = fake
// Look for anonymous fields that might contribute methods.
var fields *ast.FieldList
switch typ := spec.(*ast.TypeSpec).Type.(type) {
case *ast.StructType:
fields = typ.Fields
case *ast.InterfaceType:
fields = typ.Methods
}
if fields != nil {
for _, field := range fields.List {
if len(field.Names) == 0 {
// anonymous field - add corresponding type
// to the info and collect it in doc
name := baseTypeName(field.Type, true)
if embedded := doc.lookupTypeInfo(name); embedded != nil {
_, ptr := field.Type.(*ast.StarExpr)
info.addEmbeddedType(embedded, ptr)
}
}
}
}
} }
} }
} }
@ -285,19 +320,15 @@ func (doc *docReader) addFile(src *ast.File) {
src.Comments = nil // consumed unassociated comments - remove from ast.File node src.Comments = nil // consumed unassociated comments - remove from ast.File node
} }
func NewFileDoc(file *ast.File) *PackageDoc { func NewPackageDoc(pkg *ast.Package, importpath string, exportsOnly bool) *PackageDoc {
var r docReader
r.init(file.Name.Name)
r.addFile(file)
return r.newDoc("", nil)
}
func NewPackageDoc(pkg *ast.Package, importpath string) *PackageDoc {
var r docReader var r docReader
r.init(pkg.Name) r.init(pkg.Name)
filenames := make([]string, len(pkg.Files)) filenames := make([]string, len(pkg.Files))
i := 0 i := 0
for filename, f := range pkg.Files { for filename, f := range pkg.Files {
if exportsOnly {
r.fileExports(f)
}
r.addFile(f) r.addFile(f)
filenames[i] = filename filenames[i] = filename
i++ i++
@ -397,6 +428,25 @@ func makeFuncDocs(m map[string]*ast.FuncDecl) []*FuncDoc {
return d return d
} }
type methodSet map[string]*FuncDoc
func (mset methodSet) add(m *FuncDoc) {
if mset[m.Name] == nil {
mset[m.Name] = m
}
}
func (mset methodSet) sortedList() []*FuncDoc {
list := make([]*FuncDoc, len(mset))
i := 0
for _, m := range mset {
list[i] = m
i++
}
sort.Sort(sortFuncDoc(list))
return list
}
// TypeDoc is the documentation for a declared type. // TypeDoc is the documentation for a declared type.
// Consts and Vars are sorted lists of constants and variables of (mostly) that type. // Consts and Vars are sorted lists of constants and variables of (mostly) that type.
// Factories is a sorted list of factory functions that return that type. // Factories is a sorted list of factory functions that return that type.
@ -407,7 +457,9 @@ type TypeDoc struct {
Consts []*ValueDoc Consts []*ValueDoc
Vars []*ValueDoc Vars []*ValueDoc
Factories []*FuncDoc Factories []*FuncDoc
Methods []*FuncDoc methods []*FuncDoc // top-level methods only
embedded methodSet // embedded methods only
Methods []*FuncDoc // all methods including embedded ones
Decl *ast.GenDecl Decl *ast.GenDecl
order int order int
} }
@ -429,11 +481,17 @@ func (p sortTypeDoc) Less(i, j int) bool {
// NOTE(rsc): This would appear not to be correct for type ( ) // NOTE(rsc): This would appear not to be correct for type ( )
// blocks, but the doc extractor above has split them into // blocks, but the doc extractor above has split them into
// individual declarations. // individual declarations.
func (doc *docReader) makeTypeDocs(m map[string]*typeDoc) []*TypeDoc { func (doc *docReader) makeTypeDocs(m map[string]*typeInfo) []*TypeDoc {
d := make([]*TypeDoc, len(m)) // TODO(gri) Consider computing the embedded method information
// before calling makeTypeDocs. Then this function can
// be single-phased again. Also, it might simplify some
// of the logic.
//
// phase 1: associate collected declarations with TypeDocs
list := make([]*TypeDoc, len(m))
i := 0 i := 0
for _, old := range m { for _, old := range m {
// all typeDocs should have a declaration associated with // all typeInfos should have a declaration associated with
// them after processing an entire package - be conservative // them after processing an entire package - be conservative
// and check // and check
if decl := old.decl; decl != nil { if decl := old.decl; decl != nil {
@ -451,10 +509,16 @@ func (doc *docReader) makeTypeDocs(m map[string]*typeDoc) []*TypeDoc {
t.Consts = makeValueDocs(old.values, token.CONST) t.Consts = makeValueDocs(old.values, token.CONST)
t.Vars = makeValueDocs(old.values, token.VAR) t.Vars = makeValueDocs(old.values, token.VAR)
t.Factories = makeFuncDocs(old.factories) t.Factories = makeFuncDocs(old.factories)
t.Methods = makeFuncDocs(old.methods) t.methods = makeFuncDocs(old.methods)
// The list of embedded types' methods is computed from the list
// of embedded types, some of which may not have been processed
// yet (i.e., their forward link is nil) - do this in a 2nd phase.
// The final list of methods can only be computed after that -
// do this in a 3rd phase.
t.Decl = old.decl t.Decl = old.decl
t.order = i t.order = i
d[i] = t old.forward = t // old has been processed
list[i] = t
i++ i++
} else { } else {
// no corresponding type declaration found - move any associated // no corresponding type declaration found - move any associated
@ -477,9 +541,99 @@ func (doc *docReader) makeTypeDocs(m map[string]*typeDoc) []*TypeDoc {
} }
} }
} }
d = d[0:i] // some types may have been ignored list = list[0:i] // some types may have been ignored
sort.Sort(sortTypeDoc(d))
return d // phase 2: collect embedded methods for each processed typeInfo
for _, old := range m {
if t := old.forward; t != nil {
// old has been processed into t; collect embedded
// methods for t from the list of processed embedded
// types in old (and thus for which the methods are known)
typ := t.Type
if _, ok := typ.Type.(*ast.StructType); ok {
// struct
t.embedded = make(methodSet)
collectEmbeddedMethods(t.embedded, old, typ.Name.Name)
} else {
// interface
// TODO(gri) fix this
}
}
}
// phase 3: compute final method set for each TypeDoc
for _, d := range list {
if len(d.embedded) > 0 {
// there are embedded methods - exclude
// the ones with names conflicting with
// non-embedded methods
mset := make(methodSet)
// top-level methods have priority
for _, m := range d.methods {
mset.add(m)
}
// add non-conflicting embedded methods
for _, m := range d.embedded {
mset.add(m)
}
d.Methods = mset.sortedList()
} else {
// no embedded methods
d.Methods = d.methods
}
}
sort.Sort(sortTypeDoc(list))
return list
}
// collectEmbeddedMethods collects the embedded methods from all
// processed embedded types found in info in mset. It considers
// embedded types at the most shallow level first so that more
// deeply nested embedded methods with conflicting names are
// excluded.
//
func collectEmbeddedMethods(mset methodSet, info *typeInfo, recvTypeName string) {
for _, e := range info.embedded {
if e.typ.forward != nil { // == e was processed
for _, m := range e.typ.forward.methods {
mset.add(customizeRecv(m, e.ptr, recvTypeName))
}
collectEmbeddedMethods(mset, e.typ, recvTypeName)
}
}
}
func customizeRecv(m *FuncDoc, embeddedIsPtr bool, recvTypeName string) *FuncDoc {
if m == nil || m.Decl == nil || m.Decl.Recv == nil || len(m.Decl.Recv.List) != 1 {
return m // shouldn't happen, but be safe
}
// copy existing receiver field and set new type
// TODO(gri) is receiver type computation correct?
// what about deeply nested embeddings?
newField := *m.Decl.Recv.List[0]
_, origRecvIsPtr := newField.Type.(*ast.StarExpr)
var typ ast.Expr = ast.NewIdent(recvTypeName)
if embeddedIsPtr || origRecvIsPtr {
typ = &ast.StarExpr{token.NoPos, typ}
}
newField.Type = typ
// copy existing receiver field list and set new receiver field
newFieldList := *m.Decl.Recv
newFieldList.List = []*ast.Field{&newField}
// copy existing function declaration and set new receiver field list
newFuncDecl := *m.Decl
newFuncDecl.Recv = &newFieldList
// copy existing function documentation and set new declaration
newM := *m
newM.Decl = &newFuncDecl
newM.Recv = typ
return &newM
} }
func makeBugDocs(list []*ast.CommentGroup) []string { func makeBugDocs(list []*ast.CommentGroup) []string {
@ -523,104 +677,3 @@ func (doc *docReader) newDoc(importpath string, filenames []string) *PackageDoc
p.Bugs = makeBugDocs(doc.bugs) p.Bugs = makeBugDocs(doc.bugs)
return p return p
} }
// ----------------------------------------------------------------------------
// Filtering by name
type Filter func(string) bool
func matchFields(fields *ast.FieldList, f Filter) bool {
if fields != nil {
for _, field := range fields.List {
for _, name := range field.Names {
if f(name.Name) {
return true
}
}
}
}
return false
}
func matchDecl(d *ast.GenDecl, f Filter) bool {
for _, d := range d.Specs {
switch v := d.(type) {
case *ast.ValueSpec:
for _, name := range v.Names {
if f(name.Name) {
return true
}
}
case *ast.TypeSpec:
if f(v.Name.Name) {
return true
}
switch t := v.Type.(type) {
case *ast.StructType:
if matchFields(t.Fields, f) {
return true
}
case *ast.InterfaceType:
if matchFields(t.Methods, f) {
return true
}
}
}
}
return false
}
func filterValueDocs(a []*ValueDoc, f Filter) []*ValueDoc {
w := 0
for _, vd := range a {
if matchDecl(vd.Decl, f) {
a[w] = vd
w++
}
}
return a[0:w]
}
func filterFuncDocs(a []*FuncDoc, f Filter) []*FuncDoc {
w := 0
for _, fd := range a {
if f(fd.Name) {
a[w] = fd
w++
}
}
return a[0:w]
}
func filterTypeDocs(a []*TypeDoc, f Filter) []*TypeDoc {
w := 0
for _, td := range a {
n := 0 // number of matches
if matchDecl(td.Decl, f) {
n = 1
} else {
// type name doesn't match, but we may have matching consts, vars, factories or methods
td.Consts = filterValueDocs(td.Consts, f)
td.Vars = filterValueDocs(td.Vars, f)
td.Factories = filterFuncDocs(td.Factories, f)
td.Methods = filterFuncDocs(td.Methods, f)
n += len(td.Consts) + len(td.Vars) + len(td.Factories) + len(td.Methods)
}
if n > 0 {
a[w] = td
w++
}
}
return a[0:w]
}
// Filter eliminates documentation for names that don't pass through the filter f.
// TODO: Recognize "Type.Method" as a name.
//
func (p *PackageDoc) Filter(f Filter) {
p.Consts = filterValueDocs(p.Consts, f)
p.Vars = filterValueDocs(p.Vars, f)
p.Types = filterTypeDocs(p.Types, f)
p.Funcs = filterFuncDocs(p.Funcs, f)
p.Doc = "" // don't show top-level package doc
}

167
libgo/go/go/doc/exports.go Normal file
View File

@ -0,0 +1,167 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements export filtering of an AST.
package doc
import "go/ast"
func filterIdentList(list []*ast.Ident) []*ast.Ident {
j := 0
for _, x := range list {
if ast.IsExported(x.Name) {
list[j] = x
j++
}
}
return list[0:j]
}
func baseName(x ast.Expr) *ast.Ident {
switch t := x.(type) {
case *ast.Ident:
return t
case *ast.SelectorExpr:
if _, ok := t.X.(*ast.Ident); ok {
return t.Sel
}
case *ast.StarExpr:
return baseName(t.X)
}
return nil
}
func (doc *docReader) filterFieldList(fields *ast.FieldList) (removedFields bool) {
if fields == nil {
return false
}
list := fields.List
j := 0
for _, f := range list {
keepField := false
if len(f.Names) == 0 {
// anonymous field
name := baseName(f.Type)
keepField = name != nil && name.IsExported()
} else {
n := len(f.Names)
f.Names = filterIdentList(f.Names)
if len(f.Names) < n {
removedFields = true
}
keepField = len(f.Names) > 0
}
if keepField {
doc.filterType(f.Type)
list[j] = f
j++
}
}
if j < len(list) {
removedFields = true
}
fields.List = list[0:j]
return
}
func (doc *docReader) filterParamList(fields *ast.FieldList) bool {
if fields == nil {
return false
}
var b bool
for _, f := range fields.List {
if doc.filterType(f.Type) {
b = true
}
}
return b
}
func (doc *docReader) filterType(typ ast.Expr) bool {
switch t := typ.(type) {
case *ast.Ident:
return ast.IsExported(t.Name)
case *ast.ParenExpr:
return doc.filterType(t.X)
case *ast.ArrayType:
return doc.filterType(t.Elt)
case *ast.StructType:
if doc.filterFieldList(t.Fields) {
t.Incomplete = true
}
return len(t.Fields.List) > 0
case *ast.FuncType:
b1 := doc.filterParamList(t.Params)
b2 := doc.filterParamList(t.Results)
return b1 || b2
case *ast.InterfaceType:
if doc.filterFieldList(t.Methods) {
t.Incomplete = true
}
return len(t.Methods.List) > 0
case *ast.MapType:
b1 := doc.filterType(t.Key)
b2 := doc.filterType(t.Value)
return b1 || b2
case *ast.ChanType:
return doc.filterType(t.Value)
}
return false
}
func (doc *docReader) filterSpec(spec ast.Spec) bool {
switch s := spec.(type) {
case *ast.ValueSpec:
s.Names = filterIdentList(s.Names)
if len(s.Names) > 0 {
doc.filterType(s.Type)
return true
}
case *ast.TypeSpec:
if ast.IsExported(s.Name.Name) {
doc.filterType(s.Type)
return true
}
}
return false
}
func (doc *docReader) filterSpecList(list []ast.Spec) []ast.Spec {
j := 0
for _, s := range list {
if doc.filterSpec(s) {
list[j] = s
j++
}
}
return list[0:j]
}
func (doc *docReader) filterDecl(decl ast.Decl) bool {
switch d := decl.(type) {
case *ast.GenDecl:
d.Specs = doc.filterSpecList(d.Specs)
return len(d.Specs) > 0
case *ast.FuncDecl:
return ast.IsExported(d.Name.Name)
}
return false
}
// fileExports trims the AST for a Go file in place such that
// only exported nodes remain. fileExports returns true if
// there are exported declarations; otherwise it returns false.
//
func (doc *docReader) fileExports(src *ast.File) bool {
j := 0
for _, d := range src.Decls {
if doc.filterDecl(d) {
src.Decls[j] = d
j++
}
}
src.Decls = src.Decls[0:j]
return j > 0
}

105
libgo/go/go/doc/filter.go Normal file
View File

@ -0,0 +1,105 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package doc
import "go/ast"
type Filter func(string) bool
func matchFields(fields *ast.FieldList, f Filter) bool {
if fields != nil {
for _, field := range fields.List {
for _, name := range field.Names {
if f(name.Name) {
return true
}
}
}
}
return false
}
func matchDecl(d *ast.GenDecl, f Filter) bool {
for _, d := range d.Specs {
switch v := d.(type) {
case *ast.ValueSpec:
for _, name := range v.Names {
if f(name.Name) {
return true
}
}
case *ast.TypeSpec:
if f(v.Name.Name) {
return true
}
switch t := v.Type.(type) {
case *ast.StructType:
if matchFields(t.Fields, f) {
return true
}
case *ast.InterfaceType:
if matchFields(t.Methods, f) {
return true
}
}
}
}
return false
}
func filterValueDocs(a []*ValueDoc, f Filter) []*ValueDoc {
w := 0
for _, vd := range a {
if matchDecl(vd.Decl, f) {
a[w] = vd
w++
}
}
return a[0:w]
}
func filterFuncDocs(a []*FuncDoc, f Filter) []*FuncDoc {
w := 0
for _, fd := range a {
if f(fd.Name) {
a[w] = fd
w++
}
}
return a[0:w]
}
func filterTypeDocs(a []*TypeDoc, f Filter) []*TypeDoc {
w := 0
for _, td := range a {
n := 0 // number of matches
if matchDecl(td.Decl, f) {
n = 1
} else {
// type name doesn't match, but we may have matching consts, vars, factories or methods
td.Consts = filterValueDocs(td.Consts, f)
td.Vars = filterValueDocs(td.Vars, f)
td.Factories = filterFuncDocs(td.Factories, f)
td.Methods = filterFuncDocs(td.Methods, f)
n += len(td.Consts) + len(td.Vars) + len(td.Factories) + len(td.Methods)
}
if n > 0 {
a[w] = td
w++
}
}
return a[0:w]
}
// Filter eliminates documentation for names that don't pass through the filter f.
// TODO: Recognize "Type.Method" as a name.
//
func (p *PackageDoc) Filter(f Filter) {
p.Consts = filterValueDocs(p.Consts, f)
p.Vars = filterValueDocs(p.Vars, f)
p.Types = filterTypeDocs(p.Types, f)
p.Funcs = filterFuncDocs(p.Funcs, f)
p.Doc = "" // don't show top-level package doc
}

View File

@ -144,28 +144,31 @@ func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjK
} }
} }
func (p *parser) shortVarDecl(idents []*ast.Ident) { func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
// Go spec: A short variable declaration may redeclare variables // Go spec: A short variable declaration may redeclare variables
// provided they were originally declared in the same block with // provided they were originally declared in the same block with
// the same type, and at least one of the non-blank variables is new. // the same type, and at least one of the non-blank variables is new.
n := 0 // number of new variables n := 0 // number of new variables
for _, ident := range idents { for _, x := range list {
assert(ident.Obj == nil, "identifier already declared or resolved") if ident, isIdent := x.(*ast.Ident); isIdent {
obj := ast.NewObj(ast.Var, ident.Name) assert(ident.Obj == nil, "identifier already declared or resolved")
// short var declarations cannot have redeclaration errors obj := ast.NewObj(ast.Var, ident.Name)
// and are not global => no need to remember the respective // remember corresponding assignment for other tools
// declaration obj.Decl = decl
ident.Obj = obj ident.Obj = obj
if ident.Name != "_" { if ident.Name != "_" {
if alt := p.topScope.Insert(obj); alt != nil { if alt := p.topScope.Insert(obj); alt != nil {
ident.Obj = alt // redeclaration ident.Obj = alt // redeclaration
} else { } else {
n++ // new declaration n++ // new declaration
}
} }
} else {
p.errorExpected(x.Pos(), "identifier")
} }
} }
if n == 0 && p.mode&DeclarationErrors != 0 { if n == 0 && p.mode&DeclarationErrors != 0 {
p.error(idents[0].Pos(), "no new variables on left side of :=") p.error(list[0].Pos(), "no new variables on left side of :=")
} }
} }
@ -522,7 +525,7 @@ func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
for i, x := range list { for i, x := range list {
ident, isIdent := x.(*ast.Ident) ident, isIdent := x.(*ast.Ident)
if !isIdent { if !isIdent {
pos := x.(ast.Expr).Pos() pos := x.Pos()
p.errorExpected(pos, "identifier") p.errorExpected(pos, "identifier")
ident = &ast.Ident{pos, "_", nil} ident = &ast.Ident{pos, "_", nil}
} }
@ -1400,10 +1403,11 @@ func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
} else { } else {
y = p.parseRhsList() y = p.parseRhsList()
} }
as := &ast.AssignStmt{x, pos, tok, y}
if tok == token.DEFINE { if tok == token.DEFINE {
p.shortVarDecl(p.makeIdentList(x)) p.shortVarDecl(as, x)
} }
return &ast.AssignStmt{x, pos, tok, y}, isRange return as, isRange
} }
if len(x) > 1 { if len(x) > 1 {
@ -1715,34 +1719,28 @@ func (p *parser) parseCommClause() *ast.CommClause {
comm = &ast.SendStmt{lhs[0], arrow, rhs} comm = &ast.SendStmt{lhs[0], arrow, rhs}
} else { } else {
// RecvStmt // RecvStmt
pos := p.pos if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
tok := p.tok
var rhs ast.Expr
if tok == token.ASSIGN || tok == token.DEFINE {
// RecvStmt with assignment // RecvStmt with assignment
if len(lhs) > 2 { if len(lhs) > 2 {
p.errorExpected(lhs[0].Pos(), "1 or 2 expressions") p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
// continue with first two expressions // continue with first two expressions
lhs = lhs[0:2] lhs = lhs[0:2]
} }
pos := p.pos
p.next() p.next()
rhs = p.parseRhs() rhs := p.parseRhs()
if tok == token.DEFINE && lhs != nil { as := &ast.AssignStmt{lhs, pos, tok, []ast.Expr{rhs}}
p.shortVarDecl(p.makeIdentList(lhs)) if tok == token.DEFINE {
p.shortVarDecl(as, lhs)
} }
comm = as
} else { } else {
// rhs must be single receive operation // lhs must be single receive operation
if len(lhs) > 1 { if len(lhs) > 1 {
p.errorExpected(lhs[0].Pos(), "1 expression") p.errorExpected(lhs[0].Pos(), "1 expression")
// continue with first expression // continue with first expression
} }
rhs = lhs[0] comm = &ast.ExprStmt{lhs[0]}
lhs = nil // there is no lhs
}
if lhs != nil {
comm = &ast.AssignStmt{lhs, pos, tok, []ast.Expr{rhs}}
} else {
comm = &ast.ExprStmt{rhs}
} }
} }
} else { } else {

View File

@ -39,7 +39,10 @@ import (
// future (not yet interspersed) comments in this function. // future (not yet interspersed) comments in this function.
// //
func (p *printer) linebreak(line, min int, ws whiteSpace, newSection bool) (printedBreak bool) { func (p *printer) linebreak(line, min int, ws whiteSpace, newSection bool) (printedBreak bool) {
n := p.nlines(line-p.pos.Line, min) n := nlimit(line - p.pos.Line)
if n < min {
n = min
}
if n > 0 { if n > 0 {
p.print(ws) p.print(ws)
if newSection { if newSection {
@ -361,9 +364,10 @@ func (p *printer) fieldList(fields *ast.FieldList, isStruct, isIncomplete bool)
lbrace := fields.Opening lbrace := fields.Opening
list := fields.List list := fields.List
rbrace := fields.Closing rbrace := fields.Closing
hasComments := isIncomplete || p.commentBefore(p.fset.Position(rbrace))
srcIsOneLine := lbrace.IsValid() && rbrace.IsValid() && p.fset.Position(lbrace).Line == p.fset.Position(rbrace).Line srcIsOneLine := lbrace.IsValid() && rbrace.IsValid() && p.fset.Position(lbrace).Line == p.fset.Position(rbrace).Line
if !isIncomplete && !p.commentBefore(p.fset.Position(rbrace)) && srcIsOneLine { if !hasComments && srcIsOneLine {
// possibly a one-line struct/interface // possibly a one-line struct/interface
if len(list) == 0 { if len(list) == 0 {
// no blank between keyword and {} in this case // no blank between keyword and {} in this case
@ -388,9 +392,13 @@ func (p *printer) fieldList(fields *ast.FieldList, isStruct, isIncomplete bool)
return return
} }
} }
// hasComments || !srcIsOneLine
p.print(blank, lbrace, token.LBRACE, indent)
if hasComments || len(list) > 0 {
p.print(formfeed)
}
// at least one entry or incomplete
p.print(blank, lbrace, token.LBRACE, indent, formfeed)
if isStruct { if isStruct {
sep := vtab sep := vtab
@ -1509,9 +1517,14 @@ func (p *printer) file(src *ast.File) {
prev := tok prev := tok
tok = declToken(d) tok = declToken(d)
// if the declaration token changed (e.g., from CONST to TYPE) // if the declaration token changed (e.g., from CONST to TYPE)
// or the next declaration has documentation associated with it,
// print an empty line between top-level declarations // print an empty line between top-level declarations
// (because p.linebreak is called with the position of d, which
// is past any documentation, the minimum requirement is satisfied
// even w/o the extra getDoc(d) nil-check - leave it in case the
// linebreak logic improves - there's already a TODO).
min := 1 min := 1
if prev != tok { if prev != tok || getDoc(d) != nil {
min = 2 min = 2
} }
p.linebreak(p.fset.Position(d.Pos()).Line, min, ignore, false) p.linebreak(p.fset.Position(d.Pos()).Line, min, ignore, false)

View File

@ -18,8 +18,11 @@ import (
"text/tabwriter" "text/tabwriter"
) )
const debug = false // enable for debugging const (
const infinity = 1 << 30 maxNewlines = 2 // max. number of newlines between source text
debug = false // enable for debugging
infinity = 1 << 30
)
type whiteSpace byte type whiteSpace byte
@ -89,21 +92,7 @@ func (p *printer) internalError(msg ...interface{}) {
} }
} }
// nlines returns the adjusted number of linebreaks given the desired number // writeByte writes ch to p.output and updates p.pos.
// of breaks n such that min <= result <= max.
//
func (p *printer) nlines(n, min int) int {
const max = 2 // max. number of newlines
switch {
case n < min:
return min
case n > max:
return max
}
return n
}
// writeByte writes a single byte to p.output and updates p.pos.
func (p *printer) writeByte(ch byte) { func (p *printer) writeByte(ch byte) {
p.output.WriteByte(ch) p.output.WriteByte(ch)
p.pos.Offset++ p.pos.Offset++
@ -128,13 +117,11 @@ func (p *printer) writeByte(ch byte) {
} }
} }
// writeNewlines writes up to n newlines to p.output and updates p.pos. // writeByteN writes ch n times to p.output and updates p.pos.
// The actual number of newlines written is limited by nlines. func (p *printer) writeByteN(ch byte, n int) {
// nl must be one of '\n' or '\f'. for n > 0 {
// p.writeByte(ch)
func (p *printer) writeNewlines(n int, nl byte) { n--
for n = p.nlines(n, 0); n > 0; n-- {
p.writeByte(nl)
} }
} }
@ -223,8 +210,8 @@ func (p *printer) writeCommentPrefix(pos, next token.Position, prev, comment *as
} }
if pos.IsValid() && pos.Filename != p.last.Filename { if pos.IsValid() && pos.Filename != p.last.Filename {
// comment in a different file - separate with newlines (writeNewlines will limit the number) // comment in a different file - separate with newlines
p.writeNewlines(10, '\f') p.writeByteN('\f', maxNewlines)
return return
} }
@ -270,6 +257,7 @@ func (p *printer) writeCommentPrefix(pos, next token.Position, prev, comment *as
} else { } else {
// comment on a different line: // comment on a different line:
// separate with at least one line break // separate with at least one line break
droppedLinebreak := false
if prev == nil { if prev == nil {
// first comment of a comment group // first comment of a comment group
j := 0 j := 0
@ -295,6 +283,7 @@ func (p *printer) writeCommentPrefix(pos, next token.Position, prev, comment *as
case newline, formfeed: case newline, formfeed:
// TODO(gri): may want to keep formfeed info in some cases // TODO(gri): may want to keep formfeed info in some cases
p.wsbuf[i] = ignore p.wsbuf[i] = ignore
droppedLinebreak = true
} }
j = i j = i
break break
@ -302,25 +291,41 @@ func (p *printer) writeCommentPrefix(pos, next token.Position, prev, comment *as
p.writeWhitespace(j) p.writeWhitespace(j)
} }
// turn off indent if we're about to print a line directive. // determine number of linebreaks before the comment
indent := p.indent n := 0
if strings.HasPrefix(comment.Text, linePrefix) { if pos.IsValid() && p.last.IsValid() {
p.indent = 0 n = pos.Line - p.last.Line
if n < 0 { // should never happen
n = 0
}
} }
// use formfeeds to break columns before a comment; // at the package scope level only (p.indent == 0),
// this is analogous to using formfeeds to separate // add an extra newline if we dropped one before:
// individual lines of /*-style comments - but make // this preserves a blank line before documentation
// sure there is at least one line break if the previous // comments at the package scope level (issue 2570)
// comment was a line comment if p.indent == 0 && droppedLinebreak {
n := pos.Line - p.last.Line // if !pos.IsValid(), pos.Line == 0, and n will be 0 n++
if n <= 0 && prev != nil && prev.Text[1] == '/' { }
// make sure there is at least one line break
// if the previous comment was a line comment
if n == 0 && prev != nil && prev.Text[1] == '/' {
n = 1 n = 1
} }
if n > 0 { if n > 0 {
p.writeNewlines(n, '\f') // turn off indent if we're about to print a line directive
indent := p.indent
if strings.HasPrefix(comment.Text, linePrefix) {
p.indent = 0
}
// use formfeeds to break columns before a comment;
// this is analogous to using formfeeds to separate
// individual lines of /*-style comments
p.writeByteN('\f', nlimit(n))
p.indent = indent // restore indent
} }
p.indent = indent
} }
} }
@ -550,10 +555,11 @@ func (p *printer) writeComment(comment *ast.Comment) {
// writeCommentSuffix writes a line break after a comment if indicated // writeCommentSuffix writes a line break after a comment if indicated
// and processes any leftover indentation information. If a line break // and processes any leftover indentation information. If a line break
// is needed, the kind of break (newline vs formfeed) depends on the // is needed, the kind of break (newline vs formfeed) depends on the
// pending whitespace. writeCommentSuffix returns true if a pending // pending whitespace. The writeCommentSuffix result indicates if a
// formfeed was dropped from the whitespace buffer. // newline was written or if a formfeed was dropped from the whitespace
// buffer.
// //
func (p *printer) writeCommentSuffix(needsLinebreak bool) (droppedFF bool) { func (p *printer) writeCommentSuffix(needsLinebreak bool) (wroteNewline, droppedFF bool) {
for i, ch := range p.wsbuf { for i, ch := range p.wsbuf {
switch ch { switch ch {
case blank, vtab: case blank, vtab:
@ -566,6 +572,7 @@ func (p *printer) writeCommentSuffix(needsLinebreak bool) (droppedFF bool) {
// but remember if we dropped any formfeeds // but remember if we dropped any formfeeds
if needsLinebreak { if needsLinebreak {
needsLinebreak = false needsLinebreak = false
wroteNewline = true
} else { } else {
if ch == formfeed { if ch == formfeed {
droppedFF = true droppedFF = true
@ -579,6 +586,7 @@ func (p *printer) writeCommentSuffix(needsLinebreak bool) (droppedFF bool) {
// make sure we have a line break // make sure we have a line break
if needsLinebreak { if needsLinebreak {
p.writeByte('\n') p.writeByte('\n')
wroteNewline = true
} }
return return
@ -587,10 +595,10 @@ func (p *printer) writeCommentSuffix(needsLinebreak bool) (droppedFF bool) {
// intersperseComments consumes all comments that appear before the next token // intersperseComments consumes all comments that appear before the next token
// tok and prints it together with the buffered whitespace (i.e., the whitespace // tok and prints it together with the buffered whitespace (i.e., the whitespace
// that needs to be written before the next token). A heuristic is used to mix // that needs to be written before the next token). A heuristic is used to mix
// the comments and whitespace. intersperseComments returns true if a pending // the comments and whitespace. The intersperseComments result indicates if a
// formfeed was dropped from the whitespace buffer. // newline was written or if a formfeed was dropped from the whitespace buffer.
// //
func (p *printer) intersperseComments(next token.Position, tok token.Token) (droppedFF bool) { func (p *printer) intersperseComments(next token.Position, tok token.Token) (wroteNewline, droppedFF bool) {
var last *ast.Comment var last *ast.Comment
for ; p.commentBefore(next); p.cindex++ { for ; p.commentBefore(next); p.cindex++ {
for _, c := range p.comments[p.cindex].List { for _, c := range p.comments[p.cindex].List {
@ -618,7 +626,7 @@ func (p *printer) intersperseComments(next token.Position, tok token.Token) (dro
// no comment was written - we should never reach here since // no comment was written - we should never reach here since
// intersperseComments should not be called in that case // intersperseComments should not be called in that case
p.internalError("intersperseComments called without pending comments") p.internalError("intersperseComments called without pending comments")
return false return
} }
// whiteWhitespace writes the first n whitespace entries. // whiteWhitespace writes the first n whitespace entries.
@ -671,6 +679,14 @@ func (p *printer) writeWhitespace(n int) {
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Printing interface // Printing interface
// nlines limits n to maxNewlines.
func nlimit(n int) int {
if n > maxNewlines {
n = maxNewlines
}
return n
}
func mayCombine(prev token.Token, next byte) (b bool) { func mayCombine(prev token.Token, next byte) (b bool) {
switch prev { switch prev {
case token.INT: case token.INT:
@ -765,17 +781,22 @@ func (p *printer) print(args ...interface{}) {
p.pos = next p.pos = next
if data != "" { if data != "" {
nl := byte('\n') wroteNewline, droppedFF := p.flush(next, tok)
if p.flush(next, tok) {
nl = '\f' // dropped formfeed before
}
// intersperse extra newlines if present in the source // intersperse extra newlines if present in the source
// (don't do this in flush as it will cause extra newlines // (don't do this in flush as it will cause extra newlines
// at the end of a file) - use formfeeds if we dropped one // at the end of a file)
// before n := nlimit(next.Line - p.pos.Line)
if n := next.Line - p.pos.Line; n > 0 { // don't exceed maxNewlines if we already wrote one
p.writeNewlines(n, nl) if wroteNewline && n == maxNewlines {
n = maxNewlines - 1
}
if n > 0 {
ch := byte('\n')
if droppedFF {
ch = '\f' // use formfeed since we dropped one before
}
p.writeByteN(ch, n)
} }
p.writeItem(next, data, isLit) p.writeItem(next, data, isLit)
@ -790,16 +811,15 @@ func (p *printer) commentBefore(next token.Position) bool {
return p.cindex < len(p.comments) && p.fset.Position(p.comments[p.cindex].List[0].Pos()).Offset < next.Offset return p.cindex < len(p.comments) && p.fset.Position(p.comments[p.cindex].List[0].Pos()).Offset < next.Offset
} }
// Flush prints any pending comments and whitespace occurring // Flush prints any pending comments and whitespace occurring textually
// textually before the position of the next token tok. Flush // before the position of the next token tok. The Flush result indicates
// returns true if a pending formfeed character was dropped // if a newline was written or if a formfeed was dropped from the whitespace
// from the whitespace buffer as a result of interspersing // buffer.
// comments.
// //
func (p *printer) flush(next token.Position, tok token.Token) (droppedFF bool) { func (p *printer) flush(next token.Position, tok token.Token) (wroteNewline, droppedFF bool) {
if p.commentBefore(next) { if p.commentBefore(next) {
// if there are comments before the next item, intersperse them // if there are comments before the next item, intersperse them
droppedFF = p.intersperseComments(next, tok) wroteNewline, droppedFF = p.intersperseComments(next, tok)
} else { } else {
// otherwise, write any leftover whitespace // otherwise, write any leftover whitespace
p.writeWhitespace(len(p.wsbuf)) p.writeWhitespace(len(p.wsbuf))
@ -810,7 +830,8 @@ func (p *printer) flush(next token.Position, tok token.Token) (droppedFF bool) {
// getNode returns the ast.CommentGroup associated with n, if any. // getNode returns the ast.CommentGroup associated with n, if any.
func getDoc(n ast.Node) *ast.CommentGroup { func getDoc(n ast.Node) *ast.CommentGroup {
switch n := n.(type) { switch n := n.(type) {
// *ast.Fields cannot be printed separately - ignore for now case *ast.Field:
return n.Doc
case *ast.ImportSpec: case *ast.ImportSpec:
return n.Doc return n.Doc
case *ast.ValueSpec: case *ast.ValueSpec:

View File

@ -106,7 +106,7 @@ type S3 struct {
var x int // x var x int // x
var () var ()
// This comment SHOULD be associated with the next declaration. // This comment SHOULD be associated with f0.
func f0() { func f0() {
const pi = 3.14 // pi const pi = 3.14 // pi
var s1 struct{} /* an empty struct */ /* foo */ var s1 struct{} /* an empty struct */ /* foo */
@ -115,8 +115,9 @@ func f0() {
var s2 struct{} = struct{}{} var s2 struct{} = struct{}{}
x := pi x := pi
} }
// //
// NO SPACE HERE // This comment should be associated with f1, with one blank line before the comment.
// //
func f1() { func f1() {
f0() f0()

View File

@ -107,7 +107,7 @@ var x int // x
var () var ()
// This comment SHOULD be associated with the next declaration. // This comment SHOULD be associated with f0.
func f0() { func f0() {
const pi = 3.14 // pi const pi = 3.14 // pi
var s1 struct {} /* an empty struct */ /* foo */ var s1 struct {} /* an empty struct */ /* foo */
@ -117,7 +117,7 @@ func f0() {
x := pi x := pi
} }
// //
// NO SPACE HERE // This comment should be associated with f1, with one blank line before the comment.
// //
func f1() { func f1() {
f0() f0()
@ -130,7 +130,7 @@ func f1() {
func _() { func _() {
// this comment should be properly indented // this comment should be properly indented
} }

View File

@ -115,6 +115,18 @@ import _ "io"
var _ int var _ int
// at least one empty line between declarations of the same kind
// if there is associated documentation (was issue 2570)
type T1 struct{}
// T2 comment
type T2 struct {
} // should be a two-line struct
// T3 comment
type T2 struct {
} // should be a two-line struct
// printing of constant literals // printing of constant literals
const ( const (
_ = "foobar" _ = "foobar"
@ -286,6 +298,15 @@ type _ struct {
} }
} }
// no blank lines in empty structs and interfaces, but leave 1- or 2-line layout alone
type _ struct{}
type _ struct {
}
type _ interface{}
type _ interface {
}
// no tabs for single or ungrouped decls // no tabs for single or ungrouped decls
func _() { func _() {
const xxxxxx = 0 const xxxxxx = 0

View File

@ -115,6 +115,20 @@ import (
import _ "io" import _ "io"
var _ int var _ int
// at least one empty line between declarations of the same kind
// if there is associated documentation (was issue 2570)
type T1 struct{}
// T2 comment
type T2 struct {
} // should be a two-line struct
// T3 comment
type T2 struct {
} // should be a two-line struct
// printing of constant literals // printing of constant literals
const ( const (
@ -293,6 +307,18 @@ type _ struct {
} }
// no blank lines in empty structs and interfaces, but leave 1- or 2-line layout alone
type _ struct{ }
type _ struct {
}
type _ interface{ }
type _ interface {
}
// no tabs for single or ungrouped decls // no tabs for single or ungrouped decls
func _() { func _() {
const xxxxxx = 0 const xxxxxx = 0

View File

@ -271,7 +271,6 @@ func _() {
// Known bug: The first use call may have more than one empty line before // Known bug: The first use call may have more than one empty line before
// (see go/printer/nodes.go, func linebreak). // (see go/printer/nodes.go, func linebreak).
use(x) use(x)
if x < x { if x < x {
@ -386,7 +385,6 @@ L: // A comment on the same line as the label, followed by a single empty line.
// Known bug: There may be more than one empty line before MoreCode() // Known bug: There may be more than one empty line before MoreCode()
// (see go/printer/nodes.go, func linebreak). // (see go/printer/nodes.go, func linebreak).
MoreCode() MoreCode()
} }

View File

@ -426,13 +426,16 @@ func (S *Scanner) scanString() {
S.next() S.next()
} }
func (S *Scanner) scanRawString() { func (S *Scanner) scanRawString() (hasCR bool) {
// '`' opening already consumed // '`' opening already consumed
offs := S.offset - 1 offs := S.offset - 1
for S.ch != '`' { for S.ch != '`' {
ch := S.ch ch := S.ch
S.next() S.next()
if ch == '\r' {
hasCR = true
}
if ch < 0 { if ch < 0 {
S.error(offs, "string not terminated") S.error(offs, "string not terminated")
break break
@ -440,6 +443,7 @@ func (S *Scanner) scanRawString() {
} }
S.next() S.next()
return
} }
func (S *Scanner) skipWhitespace() { func (S *Scanner) skipWhitespace() {
@ -490,6 +494,18 @@ func (S *Scanner) switch4(tok0, tok1 token.Token, ch2 rune, tok2, tok3 token.Tok
return tok0 return tok0
} }
func stripCR(b []byte) []byte {
c := make([]byte, len(b))
i := 0
for _, ch := range b {
if ch != '\r' {
c[i] = ch
i++
}
}
return c[:i]
}
// Scan scans the next token and returns the token position, // Scan scans the next token and returns the token position,
// the token, and the literal string corresponding to the // the token, and the literal string corresponding to the
// token. The source end is indicated by token.EOF. // token. The source end is indicated by token.EOF.
@ -518,6 +534,7 @@ scanAgain:
insertSemi := false insertSemi := false
offs := S.offset offs := S.offset
tok := token.ILLEGAL tok := token.ILLEGAL
hasCR := false
// determine token value // determine token value
switch ch := S.ch; { switch ch := S.ch; {
@ -556,7 +573,7 @@ scanAgain:
case '`': case '`':
insertSemi = true insertSemi = true
tok = token.STRING tok = token.STRING
S.scanRawString() hasCR = S.scanRawString()
case ':': case ':':
tok = S.switch2(token.COLON, token.DEFINE) tok = S.switch2(token.COLON, token.DEFINE)
case '.': case '.':
@ -663,5 +680,9 @@ scanAgain:
// TODO(gri): The scanner API should change such that the literal string // TODO(gri): The scanner API should change such that the literal string
// is only valid if an actual literal was scanned. This will // is only valid if an actual literal was scanned. This will
// permit a more efficient implementation. // permit a more efficient implementation.
return S.file.Pos(offs), tok, string(S.src[offs:S.offset]) lit := S.src[offs:S.offset]
if hasCR {
lit = stripCR(lit)
}
return S.file.Pos(offs), tok, string(lit)
} }

View File

@ -83,6 +83,8 @@ var tokens = [...]elt{
"`", "`",
literal, literal,
}, },
{token.STRING, "`\r`", literal},
{token.STRING, "`foo\r\nbar`", literal},
// Operators and delimiters // Operators and delimiters
{token.ADD, "+", operator}, {token.ADD, "+", operator},
@ -239,8 +241,16 @@ func TestScan(t *testing.T) {
if tok != e.tok { if tok != e.tok {
t.Errorf("bad token for %q: got %s, expected %s", lit, tok, e.tok) t.Errorf("bad token for %q: got %s, expected %s", lit, tok, e.tok)
} }
if e.tok.IsLiteral() && lit != e.lit { if e.tok.IsLiteral() {
t.Errorf("bad literal for %q: got %q, expected %q", lit, lit, e.lit) // no CRs in raw string literals
elit := e.lit
if elit[0] == '`' {
elit = string(stripCR([]byte(elit)))
epos.Offset += len(e.lit) - len(lit) // correct position
}
if lit != elit {
t.Errorf("bad literal for %q: got %q, expected %q", lit, lit, elit)
}
} }
if tokenclass(tok) != e.class { if tokenclass(tok) != e.class {
t.Errorf("bad class for %q: got %d, expected %d", lit, tokenclass(tok), e.class) t.Errorf("bad class for %q: got %d, expected %d", lit, tokenclass(tok), e.class)

View File

@ -7,7 +7,7 @@ package html
// Section 12.2.3.2 of the HTML5 specification says "The following elements // Section 12.2.3.2 of the HTML5 specification says "The following elements
// have varying levels of special parsing rules". // have varying levels of special parsing rules".
// http://www.whatwg.org/specs/web-apps/current-work/multipage/parsing.html#the-stack-of-open-elements // http://www.whatwg.org/specs/web-apps/current-work/multipage/parsing.html#the-stack-of-open-elements
var isSpecialElement = map[string]bool{ var isSpecialElementMap = map[string]bool{
"address": true, "address": true,
"applet": true, "applet": true,
"area": true, "area": true,
@ -88,3 +88,13 @@ var isSpecialElement = map[string]bool{
"wbr": true, "wbr": true,
"xmp": true, "xmp": true,
} }
func isSpecialElement(element *Node) bool {
switch element.Namespace {
case "", "html":
return isSpecialElementMap[element.Data]
case "svg":
return element.Data == "foreignObject"
}
return false
}

View File

@ -319,10 +319,7 @@ func (p *parser) resetInsertionMode() {
case "html": case "html":
p.im = beforeHeadIM p.im = beforeHeadIM
default: default:
if p.top().Namespace == "" { continue
continue
}
p.im = inForeignContentIM
} }
return return
} }
@ -705,7 +702,7 @@ func inBodyIM(p *parser) bool {
case "address", "div", "p": case "address", "div", "p":
continue continue
default: default:
if !isSpecialElement[node.Data] { if !isSpecialElement(node) {
continue continue
} }
} }
@ -723,7 +720,7 @@ func inBodyIM(p *parser) bool {
case "address", "div", "p": case "address", "div", "p":
continue continue
default: default:
if !isSpecialElement[node.Data] { if !isSpecialElement(node) {
continue continue
} }
} }
@ -814,7 +811,6 @@ func inBodyIM(p *parser) bool {
// TODO: adjust foreign attributes. // TODO: adjust foreign attributes.
p.addElement(p.tok.Data, p.tok.Attr) p.addElement(p.tok.Data, p.tok.Attr)
p.top().Namespace = namespace p.top().Namespace = namespace
p.im = inForeignContentIM
return true return true
case "caption", "col", "colgroup", "frame", "head", "tbody", "td", "tfoot", "th", "thead", "tr": case "caption", "col", "colgroup", "frame", "head", "tbody", "td", "tfoot", "th", "thead", "tr":
// Ignore the token. // Ignore the token.
@ -895,7 +891,7 @@ func (p *parser) inBodyEndTagFormatting(tag string) {
// Steps 5-6. Find the furthest block. // Steps 5-6. Find the furthest block.
var furthestBlock *Node var furthestBlock *Node
for _, e := range p.oe[feIndex:] { for _, e := range p.oe[feIndex:] {
if isSpecialElement[e.Data] { if isSpecialElement(e) {
furthestBlock = e furthestBlock = e
break break
} }
@ -988,7 +984,7 @@ func (p *parser) inBodyEndTagOther(tag string) {
p.oe = p.oe[:i] p.oe = p.oe[:i]
break break
} }
if isSpecialElement[p.oe[i].Data] { if isSpecialElement(p.oe[i]) {
break break
} }
} }
@ -1206,6 +1202,13 @@ func inTableBodyIM(p *parser) bool {
add = true add = true
data = "tr" data = "tr"
consumed = false consumed = false
case "caption", "col", "colgroup", "tbody", "tfoot", "thead":
if !p.popUntil(tableScopeStopTags, "tbody", "thead", "tfoot") {
// Ignore the token.
return true
}
p.im = inTableIM
return false
default: default:
// TODO. // TODO.
} }
@ -1569,6 +1572,19 @@ func afterAfterFramesetIM(p *parser) bool {
Type: CommentNode, Type: CommentNode,
Data: p.tok.Data, Data: p.tok.Data,
}) })
case TextToken:
// Ignore all text but whitespace.
s := strings.Map(func(c rune) rune {
switch c {
case ' ', '\t', '\n', '\f', '\r':
return c
}
return -1
}, p.tok.Data)
if s != "" {
p.reconstructActiveFormattingElements()
p.addText(s)
}
case StartTagToken: case StartTagToken:
switch p.tok.Data { switch p.tok.Data {
case "html": case "html":
@ -1583,8 +1599,19 @@ func afterAfterFramesetIM(p *parser) bool {
} }
// Section 12.2.5.5. // Section 12.2.5.5.
func inForeignContentIM(p *parser) bool { func parseForeignContent(p *parser) bool {
switch p.tok.Type { switch p.tok.Type {
case TextToken:
// TODO: HTML integration points.
if p.top().Namespace == "" {
inBodyIM(p)
p.resetInsertionMode()
return true
}
if p.framesetOK {
p.framesetOK = strings.TrimLeft(p.tok.Data, whitespace) == ""
}
p.addText(p.tok.Data)
case CommentToken: case CommentToken:
p.addChild(&Node{ p.addChild(&Node{
Type: CommentNode, Type: CommentNode,
@ -1592,7 +1619,14 @@ func inForeignContentIM(p *parser) bool {
}) })
case StartTagToken: case StartTagToken:
if breakout[p.tok.Data] { if breakout[p.tok.Data] {
// TODO. for i := len(p.oe) - 1; i >= 0; i-- {
// TODO: HTML, MathML integration points.
if p.oe[i].Namespace == "" {
p.oe = p.oe[:i+1]
break
}
}
return false
} }
switch p.top().Namespace { switch p.top().Namespace {
case "mathml": case "mathml":
@ -1606,13 +1640,36 @@ func inForeignContentIM(p *parser) bool {
// TODO: adjust foreign attributes. // TODO: adjust foreign attributes.
p.addElement(p.tok.Data, p.tok.Attr) p.addElement(p.tok.Data, p.tok.Attr)
case EndTagToken: case EndTagToken:
// TODO. for i := len(p.oe) - 1; i >= 0; i-- {
if p.oe[i].Namespace == "" {
return p.im(p)
}
if strings.EqualFold(p.oe[i].Data, p.tok.Data) {
p.oe = p.oe[:i]
break
}
}
return true
default: default:
// Ignore the token. // Ignore the token.
} }
return true return true
} }
// Section 12.2.5.
func (p *parser) inForeignContent() bool {
if len(p.oe) == 0 {
return false
}
n := p.oe[len(p.oe)-1]
if n.Namespace == "" {
return false
}
// TODO: MathML, HTML integration points.
// TODO: MathML's annotation-xml combining with SVG's svg.
return true
}
func (p *parser) parse() error { func (p *parser) parse() error {
// Iterate until EOF. Any other error will cause an early return. // Iterate until EOF. Any other error will cause an early return.
consumed := true consumed := true
@ -1625,7 +1682,11 @@ func (p *parser) parse() error {
return err return err
} }
} }
consumed = p.im(p) if p.inForeignContent() {
consumed = parseForeignContent(p)
} else {
consumed = p.im(p)
}
} }
// Loop until the final token (the ErrorToken signifying EOF) is consumed. // Loop until the final token (the ErrorToken signifying EOF) is consumed.
for { for {

View File

@ -172,7 +172,8 @@ func TestParser(t *testing.T) {
{"tests3.dat", -1}, {"tests3.dat", -1},
{"tests4.dat", -1}, {"tests4.dat", -1},
{"tests5.dat", -1}, {"tests5.dat", -1},
{"tests6.dat", 36}, {"tests6.dat", 47},
{"tests10.dat", 16},
} }
for _, tf := range testFiles { for _, tf := range testFiles {
f, err := os.Open("testdata/webkit/" + tf.filename) f, err := os.Open("testdata/webkit/" + tf.filename)

View File

@ -183,11 +183,11 @@ const (
func (e *Error) Error() string { func (e *Error) Error() string {
if e.Line != 0 { if e.Line != 0 {
return fmt.Sprintf("exp/template/html:%s:%d: %s", e.Name, e.Line, e.Description) return fmt.Sprintf("html/template:%s:%d: %s", e.Name, e.Line, e.Description)
} else if e.Name != "" { } else if e.Name != "" {
return fmt.Sprintf("exp/template/html:%s: %s", e.Name, e.Description) return fmt.Sprintf("html/template:%s: %s", e.Name, e.Description)
} }
return "exp/template/html: " + e.Description return "html/template: " + e.Description
} }
// errorf creates an error given a format string f and args. // errorf creates an error given a format string f and args.

View File

@ -486,9 +486,17 @@ func (e *escaper) escapeTree(c context, name string, line int) (context, string)
} }
t := e.template(name) t := e.template(name)
if t == nil { if t == nil {
// Two cases: The template exists but is empty, or has never been mentioned at
// all. Distinguish the cases in the error messages.
if e.tmpl.set[name] != nil {
return context{
state: stateError,
err: errorf(ErrNoSuchTemplate, line, "%q is an incomplete or empty template", name),
}, dname
}
return context{ return context{
state: stateError, state: stateError,
err: errorf(ErrNoSuchTemplate, line, "no such template %s", name), err: errorf(ErrNoSuchTemplate, line, "no such template %q", name),
}, dname }, dname
} }
if dname != name { if dname != name {

View File

@ -928,7 +928,7 @@ func TestErrors(t *testing.T) {
}, },
{ {
`{{template "foo"}}`, `{{template "foo"}}`,
"z:1: no such template foo", "z:1: no such template \"foo\"",
}, },
{ {
`<div{{template "y"}}>` + `<div{{template "y"}}>` +
@ -944,23 +944,23 @@ func TestErrors(t *testing.T) {
}, },
{ {
`<input type=button value=onclick=>`, `<input type=button value=onclick=>`,
`exp/template/html:z: "=" in unquoted attr: "onclick="`, `html/template:z: "=" in unquoted attr: "onclick="`,
}, },
{ {
`<input type=button value= onclick=>`, `<input type=button value= onclick=>`,
`exp/template/html:z: "=" in unquoted attr: "onclick="`, `html/template:z: "=" in unquoted attr: "onclick="`,
}, },
{ {
`<input type=button value= 1+1=2>`, `<input type=button value= 1+1=2>`,
`exp/template/html:z: "=" in unquoted attr: "1+1=2"`, `html/template:z: "=" in unquoted attr: "1+1=2"`,
}, },
{ {
"<a class=`foo>", "<a class=`foo>",
"exp/template/html:z: \"`\" in unquoted attr: \"`foo\"", "html/template:z: \"`\" in unquoted attr: \"`foo\"",
}, },
{ {
`<a style=font:'Arial'>`, `<a style=font:'Arial'>`,
`exp/template/html:z: "'" in unquoted attr: "font:'Arial'"`, `html/template:z: "'" in unquoted attr: "font:'Arial'"`,
}, },
{ {
`<a=foo>`, `<a=foo>`,

View File

@ -0,0 +1,99 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package color
// RGBToYCbCr converts an RGB triple to a Y'CbCr triple. All components lie
// within the range [0, 255].
func RGBToYCbCr(r, g, b uint8) (uint8, uint8, uint8) {
// The JFIF specification says:
// Y' = 0.2990*R + 0.5870*G + 0.1140*B
// Cb = -0.1687*R - 0.3313*G + 0.5000*B + 128
// Cr = 0.5000*R - 0.4187*G - 0.0813*B + 128
// http://www.w3.org/Graphics/JPEG/jfif3.pdf says Y but means Y'.
r1 := int(r)
g1 := int(g)
b1 := int(b)
yy := (19595*r1 + 38470*g1 + 7471*b1 + 1<<15) >> 16
cb := (-11056*r1 - 21712*g1 + 32768*b1 + 257<<15) >> 16
cr := (32768*r1 - 27440*g1 - 5328*b1 + 257<<15) >> 16
if yy < 0 {
yy = 0
} else if yy > 255 {
yy = 255
}
if cb < 0 {
cb = 0
} else if cb > 255 {
cb = 255
}
if cr < 0 {
cr = 0
} else if cr > 255 {
cr = 255
}
return uint8(yy), uint8(cb), uint8(cr)
}
// YCbCrToRGB converts a Y'CbCr triple to an RGB triple. All components lie
// within the range [0, 255].
func YCbCrToRGB(y, cb, cr uint8) (uint8, uint8, uint8) {
// The JFIF specification says:
// R = Y' + 1.40200*(Cr-128)
// G = Y' - 0.34414*(Cb-128) - 0.71414*(Cr-128)
// B = Y' + 1.77200*(Cb-128)
// http://www.w3.org/Graphics/JPEG/jfif3.pdf says Y but means Y'.
yy1 := int(y)<<16 + 1<<15
cb1 := int(cb) - 128
cr1 := int(cr) - 128
r := (yy1 + 91881*cr1) >> 16
g := (yy1 - 22554*cb1 - 46802*cr1) >> 16
b := (yy1 + 116130*cb1) >> 16
if r < 0 {
r = 0
} else if r > 255 {
r = 255
}
if g < 0 {
g = 0
} else if g > 255 {
g = 255
}
if b < 0 {
b = 0
} else if b > 255 {
b = 255
}
return uint8(r), uint8(g), uint8(b)
}
// YCbCr represents a fully opaque 24-bit Y'CbCr color, having 8 bits each for
// one luma and two chroma components.
//
// JPEG, VP8, the MPEG family and other codecs use this color model. Such
// codecs often use the terms YUV and Y'CbCr interchangeably, but strictly
// speaking, the term YUV applies only to analog video signals, and Y' (luma)
// is Y (luminance) after applying gamma correction.
//
// Conversion between RGB and Y'CbCr is lossy and there are multiple, slightly
// different formulae for converting between the two. This package follows
// the JFIF specification at http://www.w3.org/Graphics/JPEG/jfif3.pdf.
type YCbCr struct {
Y, Cb, Cr uint8
}
func (c YCbCr) RGBA() (uint32, uint32, uint32, uint32) {
r, g, b := YCbCrToRGB(c.Y, c.Cb, c.Cr)
return uint32(r) * 0x101, uint32(g) * 0x101, uint32(b) * 0x101, 0xffff
}
// YCbCrModel is the Model for Y'CbCr colors.
var YCbCrModel Model = ModelFunc(func(c Color) Color {
if _, ok := c.(YCbCr); ok {
return c
}
r, g, b, _ := c.RGBA()
y, u, v := RGBToYCbCr(uint8(r>>8), uint8(g>>8), uint8(b>>8))
return YCbCr{y, u, v}
})

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package ycbcr package color
import ( import (
"testing" "testing"

View File

@ -7,7 +7,6 @@ package draw
import ( import (
"image" "image"
"image/color" "image/color"
"image/ycbcr"
"testing" "testing"
) )
@ -51,7 +50,7 @@ func bench(b *testing.B, dcm, scm, mcm color.Model, op Op) {
} }
dst = dst1 dst = dst1
default: default:
panic("unreachable") b.Fatal("unknown destination color model", dcm)
} }
var src image.Image var src image.Image
@ -97,7 +96,7 @@ func bench(b *testing.B, dcm, scm, mcm color.Model, op Op) {
} }
} }
src = src1 src = src1
case ycbcr.YCbCrColorModel: case color.YCbCrModel:
yy := make([]uint8, srcw*srch) yy := make([]uint8, srcw*srch)
cb := make([]uint8, srcw*srch) cb := make([]uint8, srcw*srch)
cr := make([]uint8, srcw*srch) cr := make([]uint8, srcw*srch)
@ -106,17 +105,17 @@ func bench(b *testing.B, dcm, scm, mcm color.Model, op Op) {
cb[i] = uint8(5 * i % 0x100) cb[i] = uint8(5 * i % 0x100)
cr[i] = uint8(7 * i % 0x100) cr[i] = uint8(7 * i % 0x100)
} }
src = &ycbcr.YCbCr{ src = &image.YCbCr{
Y: yy, Y: yy,
Cb: cb, Cb: cb,
Cr: cr, Cr: cr,
YStride: srcw, YStride: srcw,
CStride: srcw, CStride: srcw,
SubsampleRatio: ycbcr.SubsampleRatio444, SubsampleRatio: image.YCbCrSubsampleRatio444,
Rect: image.Rect(0, 0, srcw, srch), Rect: image.Rect(0, 0, srcw, srch),
} }
default: default:
panic("unreachable") b.Fatal("unknown source color model", scm)
} }
var mask image.Image var mask image.Image
@ -137,7 +136,7 @@ func bench(b *testing.B, dcm, scm, mcm color.Model, op Op) {
} }
mask = mask1 mask = mask1
default: default:
panic("unreachable") b.Fatal("unknown mask color model", mcm)
} }
b.StartTimer() b.StartTimer()
@ -177,7 +176,7 @@ func BenchmarkNRGBASrc(b *testing.B) {
} }
func BenchmarkYCbCr(b *testing.B) { func BenchmarkYCbCr(b *testing.B) {
bench(b, color.RGBAModel, ycbcr.YCbCrColorModel, nil, Over) bench(b, color.RGBAModel, color.YCbCrModel, nil, Over)
} }
func BenchmarkGlyphOver(b *testing.B) { func BenchmarkGlyphOver(b *testing.B) {

View File

@ -11,7 +11,6 @@ package draw
import ( import (
"image" "image"
"image/color" "image/color"
"image/ycbcr"
) )
// m is the maximum color value returned by image.Color.RGBA. // m is the maximum color value returned by image.Color.RGBA.
@ -81,7 +80,7 @@ func DrawMask(dst Image, r image.Rectangle, src image.Image, sp image.Point, mas
case *image.NRGBA: case *image.NRGBA:
drawNRGBAOver(dst0, r, src0, sp) drawNRGBAOver(dst0, r, src0, sp)
return return
case *ycbcr.YCbCr: case *image.YCbCr:
drawYCbCr(dst0, r, src0, sp) drawYCbCr(dst0, r, src0, sp)
return return
} }
@ -104,7 +103,7 @@ func DrawMask(dst Image, r image.Rectangle, src image.Image, sp image.Point, mas
case *image.NRGBA: case *image.NRGBA:
drawNRGBASrc(dst0, r, src0, sp) drawNRGBASrc(dst0, r, src0, sp)
return return
case *ycbcr.YCbCr: case *image.YCbCr:
drawYCbCr(dst0, r, src0, sp) drawYCbCr(dst0, r, src0, sp)
return return
} }
@ -346,8 +345,8 @@ func drawNRGBASrc(dst *image.RGBA, r image.Rectangle, src *image.NRGBA, sp image
} }
} }
func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *ycbcr.YCbCr, sp image.Point) { func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *image.YCbCr, sp image.Point) {
// A YCbCr image is always fully opaque, and so if the mask is implicitly nil // An image.YCbCr is always fully opaque, and so if the mask is implicitly nil
// (i.e. fully opaque) then the op is effectively always Src. // (i.e. fully opaque) then the op is effectively always Src.
var ( var (
yy, cb, cr uint8 yy, cb, cr uint8
@ -357,7 +356,7 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *ycbcr.YCbCr, sp image.Po
y0 := r.Min.Y - dst.Rect.Min.Y y0 := r.Min.Y - dst.Rect.Min.Y
y1 := r.Max.Y - dst.Rect.Min.Y y1 := r.Max.Y - dst.Rect.Min.Y
switch src.SubsampleRatio { switch src.SubsampleRatio {
case ycbcr.SubsampleRatio422: case image.YCbCrSubsampleRatio422:
for y, sy := y0, sp.Y; y != y1; y, sy = y+1, sy+1 { for y, sy := y0, sp.Y; y != y1; y, sy = y+1, sy+1 {
dpix := dst.Pix[y*dst.Stride:] dpix := dst.Pix[y*dst.Stride:]
for x, sx := x0, sp.X; x != x1; x, sx = x+4, sx+1 { for x, sx := x0, sp.X; x != x1; x, sx = x+4, sx+1 {
@ -365,14 +364,14 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *ycbcr.YCbCr, sp image.Po
yy = src.Y[sy*src.YStride+sx] yy = src.Y[sy*src.YStride+sx]
cb = src.Cb[sy*src.CStride+i] cb = src.Cb[sy*src.CStride+i]
cr = src.Cr[sy*src.CStride+i] cr = src.Cr[sy*src.CStride+i]
rr, gg, bb := ycbcr.YCbCrToRGB(yy, cb, cr) rr, gg, bb := color.YCbCrToRGB(yy, cb, cr)
dpix[x+0] = rr dpix[x+0] = rr
dpix[x+1] = gg dpix[x+1] = gg
dpix[x+2] = bb dpix[x+2] = bb
dpix[x+3] = 255 dpix[x+3] = 255
} }
} }
case ycbcr.SubsampleRatio420: case image.YCbCrSubsampleRatio420:
for y, sy := y0, sp.Y; y != y1; y, sy = y+1, sy+1 { for y, sy := y0, sp.Y; y != y1; y, sy = y+1, sy+1 {
dpix := dst.Pix[y*dst.Stride:] dpix := dst.Pix[y*dst.Stride:]
for x, sx := x0, sp.X; x != x1; x, sx = x+4, sx+1 { for x, sx := x0, sp.X; x != x1; x, sx = x+4, sx+1 {
@ -380,7 +379,7 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *ycbcr.YCbCr, sp image.Po
yy = src.Y[sy*src.YStride+sx] yy = src.Y[sy*src.YStride+sx]
cb = src.Cb[j*src.CStride+i] cb = src.Cb[j*src.CStride+i]
cr = src.Cr[j*src.CStride+i] cr = src.Cr[j*src.CStride+i]
rr, gg, bb := ycbcr.YCbCrToRGB(yy, cb, cr) rr, gg, bb := color.YCbCrToRGB(yy, cb, cr)
dpix[x+0] = rr dpix[x+0] = rr
dpix[x+1] = gg dpix[x+1] = gg
dpix[x+2] = bb dpix[x+2] = bb
@ -395,7 +394,7 @@ func drawYCbCr(dst *image.RGBA, r image.Rectangle, src *ycbcr.YCbCr, sp image.Po
yy = src.Y[sy*src.YStride+sx] yy = src.Y[sy*src.YStride+sx]
cb = src.Cb[sy*src.CStride+sx] cb = src.Cb[sy*src.CStride+sx]
cr = src.Cr[sy*src.CStride+sx] cr = src.Cr[sy*src.CStride+sx]
rr, gg, bb := ycbcr.YCbCrToRGB(yy, cb, cr) rr, gg, bb := color.YCbCrToRGB(yy, cb, cr)
dpix[x+0] = rr dpix[x+0] = rr
dpix[x+1] = gg dpix[x+1] = gg
dpix[x+2] = bb dpix[x+2] = bb

View File

@ -7,7 +7,6 @@ package draw
import ( import (
"image" "image"
"image/color" "image/color"
"image/ycbcr"
"testing" "testing"
) )
@ -56,13 +55,13 @@ func vgradGreenNRGBA(alpha int) image.Image {
} }
func vgradCr() image.Image { func vgradCr() image.Image {
m := &ycbcr.YCbCr{ m := &image.YCbCr{
Y: make([]byte, 16*16), Y: make([]byte, 16*16),
Cb: make([]byte, 16*16), Cb: make([]byte, 16*16),
Cr: make([]byte, 16*16), Cr: make([]byte, 16*16),
YStride: 16, YStride: 16,
CStride: 16, CStride: 16,
SubsampleRatio: ycbcr.SubsampleRatio444, SubsampleRatio: image.YCbCrSubsampleRatio444,
Rect: image.Rect(0, 0, 16, 16), Rect: image.Rect(0, 0, 16, 16),
} }
for y := 0; y < 16; y++ { for y := 0; y < 16; y++ {

View File

@ -11,7 +11,6 @@ import (
"bufio" "bufio"
"image" "image"
"image/color" "image/color"
"image/ycbcr"
"io" "io"
) )
@ -97,7 +96,7 @@ type decoder struct {
r Reader r Reader
width, height int width, height int
img1 *image.Gray img1 *image.Gray
img3 *ycbcr.YCbCr img3 *image.YCbCr
ri int // Restart Interval. ri int // Restart Interval.
nComp int nComp int
comp [nColorComponent]component comp [nColorComponent]component
@ -203,20 +202,20 @@ func (d *decoder) makeImg(h0, v0, mxx, myy int) {
d.img1 = m.SubImage(image.Rect(0, 0, d.width, d.height)).(*image.Gray) d.img1 = m.SubImage(image.Rect(0, 0, d.width, d.height)).(*image.Gray)
return return
} }
var subsampleRatio ycbcr.SubsampleRatio var subsampleRatio image.YCbCrSubsampleRatio
n := h0 * v0 n := h0 * v0
switch n { switch n {
case 1: case 1:
subsampleRatio = ycbcr.SubsampleRatio444 subsampleRatio = image.YCbCrSubsampleRatio444
case 2: case 2:
subsampleRatio = ycbcr.SubsampleRatio422 subsampleRatio = image.YCbCrSubsampleRatio422
case 4: case 4:
subsampleRatio = ycbcr.SubsampleRatio420 subsampleRatio = image.YCbCrSubsampleRatio420
default: default:
panic("unreachable") panic("unreachable")
} }
b := make([]byte, mxx*myy*(1*8*8*n+2*8*8)) b := make([]byte, mxx*myy*(1*8*8*n+2*8*8))
d.img3 = &ycbcr.YCbCr{ d.img3 = &image.YCbCr{
Y: b[mxx*myy*(0*8*8*n+0*8*8) : mxx*myy*(1*8*8*n+0*8*8)], Y: b[mxx*myy*(0*8*8*n+0*8*8) : mxx*myy*(1*8*8*n+0*8*8)],
Cb: b[mxx*myy*(1*8*8*n+0*8*8) : mxx*myy*(1*8*8*n+1*8*8)], Cb: b[mxx*myy*(1*8*8*n+0*8*8) : mxx*myy*(1*8*8*n+1*8*8)],
Cr: b[mxx*myy*(1*8*8*n+1*8*8) : mxx*myy*(1*8*8*n+2*8*8)], Cr: b[mxx*myy*(1*8*8*n+1*8*8) : mxx*myy*(1*8*8*n+2*8*8)],
@ -466,7 +465,7 @@ func DecodeConfig(r io.Reader) (image.Config, error) {
case nGrayComponent: case nGrayComponent:
return image.Config{color.GrayModel, d.width, d.height}, nil return image.Config{color.GrayModel, d.width, d.height}, nil
case nColorComponent: case nColorComponent:
return image.Config{ycbcr.YCbCrColorModel, d.width, d.height}, nil return image.Config{color.YCbCrModel, d.width, d.height}, nil
} }
return image.Config{}, FormatError("missing SOF marker") return image.Config{}, FormatError("missing SOF marker")
} }

View File

@ -8,7 +8,7 @@ import (
"bufio" "bufio"
"errors" "errors"
"image" "image"
"image/ycbcr" "image/color"
"io" "io"
) )
@ -379,7 +379,7 @@ func toYCbCr(m image.Image, p image.Point, yBlock, cbBlock, crBlock *block) {
for j := 0; j < 8; j++ { for j := 0; j < 8; j++ {
for i := 0; i < 8; i++ { for i := 0; i < 8; i++ {
r, g, b, _ := m.At(min(p.X+i, xmax), min(p.Y+j, ymax)).RGBA() r, g, b, _ := m.At(min(p.X+i, xmax), min(p.Y+j, ymax)).RGBA()
yy, cb, cr := ycbcr.RGBToYCbCr(uint8(r>>8), uint8(g>>8), uint8(b>>8)) yy, cb, cr := color.RGBToYCbCr(uint8(r>>8), uint8(g>>8), uint8(b>>8))
yBlock[8*j+i] = int(yy) yBlock[8*j+i] = int(yy)
cbBlock[8*j+i] = int(cb) cbBlock[8*j+i] = int(cb)
crBlock[8*j+i] = int(cr) crBlock[8*j+i] = int(cr)
@ -404,7 +404,7 @@ func rgbaToYCbCr(m *image.RGBA, p image.Point, yBlock, cbBlock, crBlock *block)
sx = xmax sx = xmax
} }
pix := m.Pix[offset+sx*4:] pix := m.Pix[offset+sx*4:]
yy, cb, cr := ycbcr.RGBToYCbCr(pix[0], pix[1], pix[2]) yy, cb, cr := color.RGBToYCbCr(pix[0], pix[1], pix[2])
yBlock[8*j+i] = int(yy) yBlock[8*j+i] = int(yy)
cbBlock[8*j+i] = int(cb) cbBlock[8*j+i] = int(cb)
crBlock[8*j+i] = int(cr) crBlock[8*j+i] = int(cr)

View File

@ -105,7 +105,7 @@ func BenchmarkEncodeRGBOpaque(b *testing.B) {
} }
} }
if !img.Opaque() { if !img.Opaque() {
panic("expected image to be opaque") b.Fatal("expected image to be opaque")
} }
b.SetBytes(640 * 480 * 4) b.SetBytes(640 * 480 * 4)
b.StartTimer() b.StartTimer()

View File

@ -125,7 +125,7 @@ func BenchmarkEncodeRGBOpaque(b *testing.B) {
} }
} }
if !img.Opaque() { if !img.Opaque() {
panic("expected image to be opaque") b.Fatal("expected image to be opaque")
} }
b.SetBytes(640 * 480 * 4) b.SetBytes(640 * 480 * 4)
b.StartTimer() b.StartTimer()
@ -138,7 +138,7 @@ func BenchmarkEncodeRGBA(b *testing.B) {
b.StopTimer() b.StopTimer()
img := image.NewRGBA(image.Rect(0, 0, 640, 480)) img := image.NewRGBA(image.Rect(0, 0, 640, 480))
if img.Opaque() { if img.Opaque() {
panic("expected image to not be opaque") b.Fatal("expected image to not be opaque")
} }
b.SetBytes(640 * 480 * 4) b.SetBytes(640 * 480 * 4)
b.StartTimer() b.StartTimer()

View File

@ -113,7 +113,7 @@ func BenchmarkDecode(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
_, err := Decode(r) _, err := Decode(r)
if err != nil { if err != nil {
panic(err) b.Fatal("Decode:", err)
} }
} }
} }

87
libgo/go/image/ycbcr.go Normal file
View File

@ -0,0 +1,87 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package image
import (
"image/color"
)
// YCbCrSubsampleRatio is the chroma subsample ratio used in a YCbCr image.
type YCbCrSubsampleRatio int
const (
YCbCrSubsampleRatio444 YCbCrSubsampleRatio = iota
YCbCrSubsampleRatio422
YCbCrSubsampleRatio420
)
// YCbCr is an in-memory image of Y'CbCr colors. There is one Y sample per
// pixel, but each Cb and Cr sample can span one or more pixels.
// YStride is the Y slice index delta between vertically adjacent pixels.
// CStride is the Cb and Cr slice index delta between vertically adjacent pixels
// that map to separate chroma samples.
// It is not an absolute requirement, but YStride and len(Y) are typically
// multiples of 8, and:
// For 4:4:4, CStride == YStride/1 && len(Cb) == len(Cr) == len(Y)/1.
// For 4:2:2, CStride == YStride/2 && len(Cb) == len(Cr) == len(Y)/2.
// For 4:2:0, CStride == YStride/2 && len(Cb) == len(Cr) == len(Y)/4.
type YCbCr struct {
Y []uint8
Cb []uint8
Cr []uint8
YStride int
CStride int
SubsampleRatio YCbCrSubsampleRatio
Rect Rectangle
}
func (p *YCbCr) ColorModel() color.Model {
return color.YCbCrModel
}
func (p *YCbCr) Bounds() Rectangle {
return p.Rect
}
func (p *YCbCr) At(x, y int) color.Color {
if !(Point{x, y}.In(p.Rect)) {
return color.YCbCr{}
}
switch p.SubsampleRatio {
case YCbCrSubsampleRatio422:
i := x / 2
return color.YCbCr{
p.Y[y*p.YStride+x],
p.Cb[y*p.CStride+i],
p.Cr[y*p.CStride+i],
}
case YCbCrSubsampleRatio420:
i, j := x/2, y/2
return color.YCbCr{
p.Y[y*p.YStride+x],
p.Cb[j*p.CStride+i],
p.Cr[j*p.CStride+i],
}
}
// Default to 4:4:4 subsampling.
return color.YCbCr{
p.Y[y*p.YStride+x],
p.Cb[y*p.CStride+x],
p.Cr[y*p.CStride+x],
}
}
// SubImage returns an image representing the portion of the image p visible
// through r. The returned value shares pixels with the original image.
func (p *YCbCr) SubImage(r Rectangle) Image {
q := new(YCbCr)
*q = *p
q.Rect = q.Rect.Intersect(r)
return q
}
func (p *YCbCr) Opaque() bool {
return true
}

View File

@ -1,184 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package ycbcr provides images from the Y'CbCr color model.
//
// JPEG, VP8, the MPEG family and other codecs use this color model. Such
// codecs often use the terms YUV and Y'CbCr interchangeably, but strictly
// speaking, the term YUV applies only to analog video signals.
//
// Conversion between RGB and Y'CbCr is lossy and there are multiple, slightly
// different formulae for converting between the two. This package follows
// the JFIF specification at http://www.w3.org/Graphics/JPEG/jfif3.pdf.
package ycbcr
import (
"image"
"image/color"
)
// RGBToYCbCr converts an RGB triple to a YCbCr triple. All components lie
// within the range [0, 255].
func RGBToYCbCr(r, g, b uint8) (uint8, uint8, uint8) {
// The JFIF specification says:
// Y' = 0.2990*R + 0.5870*G + 0.1140*B
// Cb = -0.1687*R - 0.3313*G + 0.5000*B + 128
// Cr = 0.5000*R - 0.4187*G - 0.0813*B + 128
// http://www.w3.org/Graphics/JPEG/jfif3.pdf says Y but means Y'.
r1 := int(r)
g1 := int(g)
b1 := int(b)
yy := (19595*r1 + 38470*g1 + 7471*b1 + 1<<15) >> 16
cb := (-11056*r1 - 21712*g1 + 32768*b1 + 257<<15) >> 16
cr := (32768*r1 - 27440*g1 - 5328*b1 + 257<<15) >> 16
if yy < 0 {
yy = 0
} else if yy > 255 {
yy = 255
}
if cb < 0 {
cb = 0
} else if cb > 255 {
cb = 255
}
if cr < 0 {
cr = 0
} else if cr > 255 {
cr = 255
}
return uint8(yy), uint8(cb), uint8(cr)
}
// YCbCrToRGB converts a YCbCr triple to an RGB triple. All components lie
// within the range [0, 255].
func YCbCrToRGB(y, cb, cr uint8) (uint8, uint8, uint8) {
// The JFIF specification says:
// R = Y' + 1.40200*(Cr-128)
// G = Y' - 0.34414*(Cb-128) - 0.71414*(Cr-128)
// B = Y' + 1.77200*(Cb-128)
// http://www.w3.org/Graphics/JPEG/jfif3.pdf says Y but means Y'.
yy1 := int(y)<<16 + 1<<15
cb1 := int(cb) - 128
cr1 := int(cr) - 128
r := (yy1 + 91881*cr1) >> 16
g := (yy1 - 22554*cb1 - 46802*cr1) >> 16
b := (yy1 + 116130*cb1) >> 16
if r < 0 {
r = 0
} else if r > 255 {
r = 255
}
if g < 0 {
g = 0
} else if g > 255 {
g = 255
}
if b < 0 {
b = 0
} else if b > 255 {
b = 255
}
return uint8(r), uint8(g), uint8(b)
}
// YCbCrColor represents a fully opaque 24-bit Y'CbCr color, having 8 bits for
// each of one luma and two chroma components.
type YCbCrColor struct {
Y, Cb, Cr uint8
}
func (c YCbCrColor) RGBA() (uint32, uint32, uint32, uint32) {
r, g, b := YCbCrToRGB(c.Y, c.Cb, c.Cr)
return uint32(r) * 0x101, uint32(g) * 0x101, uint32(b) * 0x101, 0xffff
}
func toYCbCrColor(c color.Color) color.Color {
if _, ok := c.(YCbCrColor); ok {
return c
}
r, g, b, _ := c.RGBA()
y, u, v := RGBToYCbCr(uint8(r>>8), uint8(g>>8), uint8(b>>8))
return YCbCrColor{y, u, v}
}
// YCbCrColorModel is the color model for YCbCrColor.
var YCbCrColorModel color.Model = color.ModelFunc(toYCbCrColor)
// SubsampleRatio is the chroma subsample ratio used in a YCbCr image.
type SubsampleRatio int
const (
SubsampleRatio444 SubsampleRatio = iota
SubsampleRatio422
SubsampleRatio420
)
// YCbCr is an in-memory image of YCbCr colors. There is one Y sample per pixel,
// but each Cb and Cr sample can span one or more pixels.
// YStride is the Y slice index delta between vertically adjacent pixels.
// CStride is the Cb and Cr slice index delta between vertically adjacent pixels
// that map to separate chroma samples.
// It is not an absolute requirement, but YStride and len(Y) are typically
// multiples of 8, and:
// For 4:4:4, CStride == YStride/1 && len(Cb) == len(Cr) == len(Y)/1.
// For 4:2:2, CStride == YStride/2 && len(Cb) == len(Cr) == len(Y)/2.
// For 4:2:0, CStride == YStride/2 && len(Cb) == len(Cr) == len(Y)/4.
type YCbCr struct {
Y []uint8
Cb []uint8
Cr []uint8
YStride int
CStride int
SubsampleRatio SubsampleRatio
Rect image.Rectangle
}
func (p *YCbCr) ColorModel() color.Model {
return YCbCrColorModel
}
func (p *YCbCr) Bounds() image.Rectangle {
return p.Rect
}
func (p *YCbCr) At(x, y int) color.Color {
if !(image.Point{x, y}.In(p.Rect)) {
return YCbCrColor{}
}
switch p.SubsampleRatio {
case SubsampleRatio422:
i := x / 2
return YCbCrColor{
p.Y[y*p.YStride+x],
p.Cb[y*p.CStride+i],
p.Cr[y*p.CStride+i],
}
case SubsampleRatio420:
i, j := x/2, y/2
return YCbCrColor{
p.Y[y*p.YStride+x],
p.Cb[j*p.CStride+i],
p.Cr[j*p.CStride+i],
}
}
// Default to 4:4:4 subsampling.
return YCbCrColor{
p.Y[y*p.YStride+x],
p.Cb[y*p.CStride+x],
p.Cr[y*p.CStride+x],
}
}
// SubImage returns an image representing the portion of the image p visible
// through r. The returned value shares pixels with the original image.
func (p *YCbCr) SubImage(r image.Rectangle) image.Image {
q := new(YCbCr)
*q = *p
q.Rect = q.Rect.Intersect(r)
return q
}
func (p *YCbCr) Opaque() bool {
return true
}

View File

@ -37,7 +37,11 @@ func TestReadFile(t *testing.T) {
} }
func TestWriteFile(t *testing.T) { func TestWriteFile(t *testing.T) {
filename := "_test/rumpelstilzchen" f, err := TempFile("", "ioutil-test")
if err != nil {
t.Fatal(err)
}
filename := f.Name()
data := "Programming today is a race between software engineers striving to " + data := "Programming today is a race between software engineers striving to " +
"build bigger and better idiot-proof programs, and the Universe trying " + "build bigger and better idiot-proof programs, and the Universe trying " +
"to produce bigger and better idiots. So far, the Universe is winning." "to produce bigger and better idiots. So far, the Universe is winning."
@ -56,6 +60,7 @@ func TestWriteFile(t *testing.T) {
} }
// cleanup // cleanup
f.Close()
os.Remove(filename) // ignore error os.Remove(filename) // ignore error
} }
@ -66,26 +71,28 @@ func TestReadDir(t *testing.T) {
t.Fatalf("ReadDir %s: error expected, none found", dirname) t.Fatalf("ReadDir %s: error expected, none found", dirname)
} }
dirname = "." dirname = ".."
list, err := ReadDir(dirname) list, err := ReadDir(dirname)
if err != nil { if err != nil {
t.Fatalf("ReadDir %s: %v", dirname, err) t.Fatalf("ReadDir %s: %v", dirname, err)
} }
foundTest := false /* Does not work in gccgo testing environment.
foundTestDir := false foundFile := false
foundSubDir := false
for _, dir := range list { for _, dir := range list {
switch { switch {
case !dir.IsDir() && dir.Name() == "ioutil_test.go": case !dir.IsDir() && dir.Name() == "io_test.go":
foundTest = true foundFile = true
case dir.IsDir() && dir.Name() == "_test": case dir.IsDir() && dir.Name() == "ioutil":
foundTestDir = true foundSubDir = true
} }
} }
if !foundTest { if !foundFile {
t.Fatalf("ReadDir %s: test file not found", dirname) t.Fatalf("ReadDir %s: io_test.go file not found", dirname)
} }
if !foundTestDir { if !foundSubDir {
t.Fatalf("ReadDir %s: _test directory not found", dirname) t.Fatalf("ReadDir %s: ioutil directory not found", dirname)
} }
*/
} }

View File

@ -92,11 +92,13 @@ func (w *Writer) Emerg(m string) (err error) {
_, err = w.writeString(LOG_EMERG, m) _, err = w.writeString(LOG_EMERG, m)
return err return err
} }
// Crit logs a message using the LOG_CRIT priority. // Crit logs a message using the LOG_CRIT priority.
func (w *Writer) Crit(m string) (err error) { func (w *Writer) Crit(m string) (err error) {
_, err = w.writeString(LOG_CRIT, m) _, err = w.writeString(LOG_CRIT, m)
return err return err
} }
// ERR logs a message using the LOG_ERR priority. // ERR logs a message using the LOG_ERR priority.
func (w *Writer) Err(m string) (err error) { func (w *Writer) Err(m string) (err error) {
_, err = w.writeString(LOG_ERR, m) _, err = w.writeString(LOG_ERR, m)
@ -114,11 +116,13 @@ func (w *Writer) Notice(m string) (err error) {
_, err = w.writeString(LOG_NOTICE, m) _, err = w.writeString(LOG_NOTICE, m)
return err return err
} }
// Info logs a message using the LOG_INFO priority. // Info logs a message using the LOG_INFO priority.
func (w *Writer) Info(m string) (err error) { func (w *Writer) Info(m string) (err error) {
_, err = w.writeString(LOG_INFO, m) _, err = w.writeString(LOG_INFO, m)
return err return err
} }
// Debug logs a message using the LOG_DEBUG priority. // Debug logs a message using the LOG_DEBUG priority.
func (w *Writer) Debug(m string) (err error) { func (w *Writer) Debug(m string) (err error) {
_, err = w.writeString(LOG_DEBUG, m) _, err = w.writeString(LOG_DEBUG, m)

View File

@ -22,6 +22,7 @@ var vf = []float64{
1.8253080916808550e+00, 1.8253080916808550e+00,
-8.6859247685756013e+00, -8.6859247685756013e+00,
} }
// The expected results below were computed by the high precision calculators // The expected results below were computed by the high precision calculators
// at http://keisan.casio.com/. More exact input values (array vf[], above) // at http://keisan.casio.com/. More exact input values (array vf[], above)
// were obtained by printing them with "%.26f". The answers were calculated // were obtained by printing them with "%.26f". The answers were calculated
@ -159,6 +160,7 @@ var cos = []float64{
-2.517729313893103197176091e-01, -2.517729313893103197176091e-01,
-7.39241351595676573201918e-01, -7.39241351595676573201918e-01,
} }
// Results for 100000 * Pi + vf[i] // Results for 100000 * Pi + vf[i]
var cosLarge = []float64{ var cosLarge = []float64{
2.634752141185559426744e-01, 2.634752141185559426744e-01,
@ -514,6 +516,7 @@ var sin = []float64{
9.6778633541687993721617774e-01, 9.6778633541687993721617774e-01,
-6.734405869050344734943028e-01, -6.734405869050344734943028e-01,
} }
// Results for 100000 * Pi + vf[i] // Results for 100000 * Pi + vf[i]
var sinLarge = []float64{ var sinLarge = []float64{
-9.646661658548936063912e-01, -9.646661658548936063912e-01,
@ -563,6 +566,7 @@ var tan = []float64{
-3.843885560201130679995041e+00, -3.843885560201130679995041e+00,
9.10988793377685105753416e-01, 9.10988793377685105753416e-01,
} }
// Results for 100000 * Pi + vf[i] // Results for 100000 * Pi + vf[i]
var tanLarge = []float64{ var tanLarge = []float64{
-3.66131656475596512705e+00, -3.66131656475596512705e+00,

View File

@ -9,6 +9,7 @@ import (
"encoding/gob" "encoding/gob"
"encoding/hex" "encoding/hex"
"fmt" "fmt"
"math/rand"
"testing" "testing"
"testing/quick" "testing/quick"
) )
@ -1405,3 +1406,9 @@ func TestIntGobEncoding(t *testing.T) {
} }
} }
} }
func TestIssue2607(t *testing.T) {
// This code sequence used to hang.
n := NewInt(10)
n.Rand(rand.New(rand.NewSource(9)), n)
}

View File

@ -1196,12 +1196,16 @@ func (x nat) powersOfTwoDecompose() (q nat, k int) {
// random creates a random integer in [0..limit), using the space in z if // random creates a random integer in [0..limit), using the space in z if
// possible. n is the bit length of limit. // possible. n is the bit length of limit.
func (z nat) random(rand *rand.Rand, limit nat, n int) nat { func (z nat) random(rand *rand.Rand, limit nat, n int) nat {
if alias(z, limit) {
z = nil // z is an alias for limit - cannot reuse
}
z = z.make(len(limit))
bitLengthOfMSW := uint(n % _W) bitLengthOfMSW := uint(n % _W)
if bitLengthOfMSW == 0 { if bitLengthOfMSW == 0 {
bitLengthOfMSW = _W bitLengthOfMSW = _W
} }
mask := Word((1 << bitLengthOfMSW) - 1) mask := Word((1 << bitLengthOfMSW) - 1)
z = z.make(len(limit))
for { for {
for i := range z { for i := range z {

View File

@ -5,7 +5,6 @@
package big package big
import ( import (
"fmt"
"io" "io"
"strings" "strings"
"testing" "testing"
@ -402,7 +401,7 @@ func ScanHelper(b *testing.B, base int, x, y Word) {
var s string var s string
s = z.string(lowercaseDigits[0:base]) s = z.string(lowercaseDigits[0:base])
if t := toString(z, lowercaseDigits[0:base]); t != s { if t := toString(z, lowercaseDigits[0:base]); t != s {
panic(fmt.Sprintf("scanning: got %s; want %s", s, t)) b.Fatalf("scanning: got %s; want %s", s, t)
} }
b.StartTimer() b.StartTimer()

View File

@ -98,6 +98,7 @@ var _sin = [...]float64{
8.33333333332211858878E-3, // 0x3f8111111110f7d0 8.33333333332211858878E-3, // 0x3f8111111110f7d0
-1.66666666666666307295E-1, // 0xbfc5555555555548 -1.66666666666666307295E-1, // 0xbfc5555555555548
} }
// cos coefficients // cos coefficients
var _cos = [...]float64{ var _cos = [...]float64{
-1.13585365213876817300E-11, // 0xbda8fa49a0861a9b -1.13585365213876817300E-11, // 0xbda8fa49a0861a9b

Some files were not shown because too many files have changed in this diff Show More