diff --git a/.gitignore b/.gitignore
index 50057a7..c6fa73f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,4 @@
gin-bin
mondash
-Godeps/_workspace/
*.sh
data
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..19b9baf
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,15 @@
+FROM alpine
+
+MAINTAINER Knut Ahlers
+
+ENV GOPATH /go:/go/src/github.com/Luzifer/mondash/Godeps/_workspace
+EXPOSE 3000
+
+ADD . /go/src/github.com/Luzifer/mondash
+WORKDIR /go/src/github.com/Luzifer/mondash
+
+RUN apk --update add git go ca-certificates \
+ && go install -ldflags "-X main.version=$(git describe --tags || git rev-parse --short HEAD || echo dev)" \
+ && apk del --purge go git
+
+ENTRYPOINT ["/go/bin/mondash"]
diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json
index 6bc5d23..23dca1f 100644
--- a/Godeps/Godeps.json
+++ b/Godeps/Godeps.json
@@ -1,7 +1,12 @@
{
"ImportPath": "github.com/Luzifer/mondash",
- "GoVersion": "go1.4.2",
+ "GoVersion": "go1.5.3",
"Deps": [
+ {
+ "ImportPath": "code.google.com/p/go.text/transform",
+ "Comment": "null-104",
+ "Rev": "7ab6d3749429e2c62c53eecadf3e4ec5e715c11b"
+ },
{
"ImportPath": "code.google.com/p/go.text/unicode/norm",
"Comment": "null-104",
@@ -47,11 +52,6 @@
"Comment": "v0.6.4-2-g168a70b",
"Rev": "168a70b9c21a4f60166d7925b690356605907adb"
},
- {
- "ImportPath": "github.com/codegangsta/inject",
- "Comment": "v1.0-rc1-4-g4b81725",
- "Rev": "4b8172520a03fa190f427bbd284db01b459bfce7"
- },
{
"ImportPath": "github.com/extemporalgenome/slug",
"Rev": "de70af6e24d0e6bdd6eaef276d2e9910e68a1397"
@@ -62,22 +62,25 @@
},
{
"ImportPath": "github.com/flosch/pongo2",
- "Comment": "v1.0-rc1-128-g97072c3",
- "Rev": "97072c3325f05afc51c66ae2561b8f3b1543aee4"
+ "Comment": "v1.0-rc1-174-gf5d79aa",
+ "Rev": "f5d79aa0a914c08eb7f51a96cd7b2dbbe46fca46"
},
{
"ImportPath": "github.com/flosch/pongo2-addons",
"Rev": "bb4da1901facc08b4edd9261050143b95e36d0a1"
},
{
- "ImportPath": "github.com/go-martini/martini",
- "Comment": "v1.0-39-g42b0b68",
- "Rev": "42b0b68fb383aac4f72331d0bd0948001b9be080"
+ "ImportPath": "github.com/gorilla/context",
+ "Rev": "1c83b3eabd45b6d76072b66b746c20815fb2872d"
+ },
+ {
+ "ImportPath": "github.com/gorilla/mux",
+ "Rev": "49c024275504f0341e5a9971eb7ba7fa3dc7af40"
},
{
"ImportPath": "github.com/russross/blackfriday",
- "Comment": "v1.2-21-g7c8f3c1",
- "Rev": "7c8f3c1dcc7e0f87c62af34ee193fc3251f6d8a4"
+ "Comment": "v1.3",
+ "Rev": "8cec3a854e68dba10faabbe31c089abf4a3e57a6"
},
{
"ImportPath": "github.com/shurcooL/sanitized_anchor_name",
@@ -90,10 +93,6 @@
{
"ImportPath": "github.com/vaughan0/go-ini",
"Rev": "a98ad7ee00ec53921f08832bc06ecf7fd600e6a1"
- },
- {
- "ImportPath": "golang.org/x/text/transform",
- "Rev": "8ec34a0272a20b3a6b12ecab475dd99e16610e4c"
}
]
}
diff --git a/Godeps/_workspace/.gitignore b/Godeps/_workspace/.gitignore
new file mode 100644
index 0000000..f037d68
--- /dev/null
+++ b/Godeps/_workspace/.gitignore
@@ -0,0 +1,2 @@
+/pkg
+/bin
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/transform/transform.go b/Godeps/_workspace/src/code.google.com/p/go.text/transform/transform.go
new file mode 100644
index 0000000..157ee78
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/transform/transform.go
@@ -0,0 +1,616 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package transform provides reader and writer wrappers that transform the
+// bytes passing through as well as various transformations. Example
+// transformations provided by other packages include normalization and
+// conversion between character sets.
+package transform
+
+import (
+ "bytes"
+ "errors"
+ "io"
+ "unicode/utf8"
+)
+
+var (
+ // ErrShortDst means that the destination buffer was too short to
+ // receive all of the transformed bytes.
+ ErrShortDst = errors.New("transform: short destination buffer")
+
+ // ErrShortSrc means that the source buffer has insufficient data to
+ // complete the transformation.
+ ErrShortSrc = errors.New("transform: short source buffer")
+
+ // errInconsistentByteCount means that Transform returned success (nil
+ // error) but also returned nSrc inconsistent with the src argument.
+ errInconsistentByteCount = errors.New("transform: inconsistent byte count returned")
+
+ // errShortInternal means that an internal buffer is not large enough
+ // to make progress and the Transform operation must be aborted.
+ errShortInternal = errors.New("transform: short internal buffer")
+)
+
+// Transformer transforms bytes.
+type Transformer interface {
+ // Transform writes to dst the transformed bytes read from src, and
+ // returns the number of dst bytes written and src bytes read. The
+ // atEOF argument tells whether src represents the last bytes of the
+ // input.
+ //
+ // Callers should always process the nDst bytes produced and account
+ // for the nSrc bytes consumed before considering the error err.
+ //
+ // A nil error means that all of the transformed bytes (whether freshly
+ // transformed from src or left over from previous Transform calls)
+ // were written to dst. A nil error can be returned regardless of
+ // whether atEOF is true. If err is nil then nSrc must equal len(src);
+ // the converse is not necessarily true.
+ //
+ // ErrShortDst means that dst was too short to receive all of the
+ // transformed bytes. ErrShortSrc means that src had insufficient data
+ // to complete the transformation. If both conditions apply, then
+ // either error may be returned. Other than the error conditions listed
+ // here, implementations are free to report other errors that arise.
+ Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error)
+
+ // Reset resets the state and allows a Transformer to be reused.
+ Reset()
+}
+
+// NopResetter can be embedded by implementations of Transformer to add a nop
+// Reset method.
+type NopResetter struct{}
+
+// Reset implements the Reset method of the Transformer interface.
+func (NopResetter) Reset() {}
+
+// Reader wraps another io.Reader by transforming the bytes read.
+type Reader struct {
+ r io.Reader
+ t Transformer
+ err error
+
+ // dst[dst0:dst1] contains bytes that have been transformed by t but
+ // not yet copied out via Read.
+ dst []byte
+ dst0, dst1 int
+
+ // src[src0:src1] contains bytes that have been read from r but not
+ // yet transformed through t.
+ src []byte
+ src0, src1 int
+
+ // transformComplete is whether the transformation is complete,
+ // regardless of whether or not it was successful.
+ transformComplete bool
+}
+
+const defaultBufSize = 4096
+
+// NewReader returns a new Reader that wraps r by transforming the bytes read
+// via t. It calls Reset on t.
+func NewReader(r io.Reader, t Transformer) *Reader {
+ t.Reset()
+ return &Reader{
+ r: r,
+ t: t,
+ dst: make([]byte, defaultBufSize),
+ src: make([]byte, defaultBufSize),
+ }
+}
+
+// Read implements the io.Reader interface.
+func (r *Reader) Read(p []byte) (int, error) {
+ n, err := 0, error(nil)
+ for {
+ // Copy out any transformed bytes and return the final error if we are done.
+ if r.dst0 != r.dst1 {
+ n = copy(p, r.dst[r.dst0:r.dst1])
+ r.dst0 += n
+ if r.dst0 == r.dst1 && r.transformComplete {
+ return n, r.err
+ }
+ return n, nil
+ } else if r.transformComplete {
+ return 0, r.err
+ }
+
+ // Try to transform some source bytes, or to flush the transformer if we
+ // are out of source bytes. We do this even if r.r.Read returned an error.
+ // As the io.Reader documentation says, "process the n > 0 bytes returned
+ // before considering the error".
+ if r.src0 != r.src1 || r.err != nil {
+ r.dst0 = 0
+ r.dst1, n, err = r.t.Transform(r.dst, r.src[r.src0:r.src1], r.err == io.EOF)
+ r.src0 += n
+
+ switch {
+ case err == nil:
+ if r.src0 != r.src1 {
+ r.err = errInconsistentByteCount
+ }
+ // The Transform call was successful; we are complete if we
+ // cannot read more bytes into src.
+ r.transformComplete = r.err != nil
+ continue
+ case err == ErrShortDst && (r.dst1 != 0 || n != 0):
+ // Make room in dst by copying out, and try again.
+ continue
+ case err == ErrShortSrc && r.src1-r.src0 != len(r.src) && r.err == nil:
+ // Read more bytes into src via the code below, and try again.
+ default:
+ r.transformComplete = true
+ // The reader error (r.err) takes precedence over the
+ // transformer error (err) unless r.err is nil or io.EOF.
+ if r.err == nil || r.err == io.EOF {
+ r.err = err
+ }
+ continue
+ }
+ }
+
+ // Move any untransformed source bytes to the start of the buffer
+ // and read more bytes.
+ if r.src0 != 0 {
+ r.src0, r.src1 = 0, copy(r.src, r.src[r.src0:r.src1])
+ }
+ n, r.err = r.r.Read(r.src[r.src1:])
+ r.src1 += n
+ }
+}
+
+// TODO: implement ReadByte (and ReadRune??).
+
+// Writer wraps another io.Writer by transforming the bytes read.
+// The user needs to call Close to flush unwritten bytes that may
+// be buffered.
+type Writer struct {
+ w io.Writer
+ t Transformer
+ dst []byte
+
+ // src[:n] contains bytes that have not yet passed through t.
+ src []byte
+ n int
+}
+
+// NewWriter returns a new Writer that wraps w by transforming the bytes written
+// via t. It calls Reset on t.
+func NewWriter(w io.Writer, t Transformer) *Writer {
+ t.Reset()
+ return &Writer{
+ w: w,
+ t: t,
+ dst: make([]byte, defaultBufSize),
+ src: make([]byte, defaultBufSize),
+ }
+}
+
+// Write implements the io.Writer interface. If there are not enough
+// bytes available to complete a Transform, the bytes will be buffered
+// for the next write. Call Close to convert the remaining bytes.
+func (w *Writer) Write(data []byte) (n int, err error) {
+ src := data
+ if w.n > 0 {
+ // Append bytes from data to the last remainder.
+ // TODO: limit the amount copied on first try.
+ n = copy(w.src[w.n:], data)
+ w.n += n
+ src = w.src[:w.n]
+ }
+ for {
+ nDst, nSrc, err := w.t.Transform(w.dst, src, false)
+ if _, werr := w.w.Write(w.dst[:nDst]); werr != nil {
+ return n, werr
+ }
+ src = src[nSrc:]
+ if w.n > 0 && len(src) <= n {
+ // Enough bytes from w.src have been consumed. We make src point
+ // to data instead to reduce the copying.
+ w.n = 0
+ n -= len(src)
+ src = data[n:]
+ if n < len(data) && (err == nil || err == ErrShortSrc) {
+ continue
+ }
+ } else {
+ n += nSrc
+ }
+ switch {
+ case err == ErrShortDst && (nDst > 0 || nSrc > 0):
+ case err == ErrShortSrc && len(src) < len(w.src):
+ m := copy(w.src, src)
+ // If w.n > 0, bytes from data were already copied to w.src and n
+ // was already set to the number of bytes consumed.
+ if w.n == 0 {
+ n += m
+ }
+ w.n = m
+ return n, nil
+ case err == nil && w.n > 0:
+ return n, errInconsistentByteCount
+ default:
+ return n, err
+ }
+ }
+}
+
+// Close implements the io.Closer interface.
+func (w *Writer) Close() error {
+ for src := w.src[:w.n]; len(src) > 0; {
+ nDst, nSrc, err := w.t.Transform(w.dst, src, true)
+ if nDst == 0 {
+ return err
+ }
+ if _, werr := w.w.Write(w.dst[:nDst]); werr != nil {
+ return werr
+ }
+ if err != ErrShortDst {
+ return err
+ }
+ src = src[nSrc:]
+ }
+ return nil
+}
+
+type nop struct{ NopResetter }
+
+func (nop) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ n := copy(dst, src)
+ if n < len(src) {
+ err = ErrShortDst
+ }
+ return n, n, err
+}
+
+type discard struct{ NopResetter }
+
+func (discard) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ return 0, len(src), nil
+}
+
+var (
+ // Discard is a Transformer for which all Transform calls succeed
+ // by consuming all bytes and writing nothing.
+ Discard Transformer = discard{}
+
+ // Nop is a Transformer that copies src to dst.
+ Nop Transformer = nop{}
+)
+
+// chain is a sequence of links. A chain with N Transformers has N+1 links and
+// N+1 buffers. Of those N+1 buffers, the first and last are the src and dst
+// buffers given to chain.Transform and the middle N-1 buffers are intermediate
+// buffers owned by the chain. The i'th link transforms bytes from the i'th
+// buffer chain.link[i].b at read offset chain.link[i].p to the i+1'th buffer
+// chain.link[i+1].b at write offset chain.link[i+1].n, for i in [0, N).
+type chain struct {
+ link []link
+ err error
+ // errStart is the index at which the error occurred plus 1. Processing
+ // errStart at this level at the next call to Transform. As long as
+ // errStart > 0, chain will not consume any more source bytes.
+ errStart int
+}
+
+func (c *chain) fatalError(errIndex int, err error) {
+ if i := errIndex + 1; i > c.errStart {
+ c.errStart = i
+ c.err = err
+ }
+}
+
+type link struct {
+ t Transformer
+ // b[p:n] holds the bytes to be transformed by t.
+ b []byte
+ p int
+ n int
+}
+
+func (l *link) src() []byte {
+ return l.b[l.p:l.n]
+}
+
+func (l *link) dst() []byte {
+ return l.b[l.n:]
+}
+
+// Chain returns a Transformer that applies t in sequence.
+func Chain(t ...Transformer) Transformer {
+ if len(t) == 0 {
+ return nop{}
+ }
+ c := &chain{link: make([]link, len(t)+1)}
+ for i, tt := range t {
+ c.link[i].t = tt
+ }
+ // Allocate intermediate buffers.
+ b := make([][defaultBufSize]byte, len(t)-1)
+ for i := range b {
+ c.link[i+1].b = b[i][:]
+ }
+ return c
+}
+
+// Reset resets the state of Chain. It calls Reset on all the Transformers.
+func (c *chain) Reset() {
+ for i, l := range c.link {
+ if l.t != nil {
+ l.t.Reset()
+ }
+ c.link[i].p, c.link[i].n = 0, 0
+ }
+}
+
+// Transform applies the transformers of c in sequence.
+func (c *chain) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ // Set up src and dst in the chain.
+ srcL := &c.link[0]
+ dstL := &c.link[len(c.link)-1]
+ srcL.b, srcL.p, srcL.n = src, 0, len(src)
+ dstL.b, dstL.n = dst, 0
+ var lastFull, needProgress bool // for detecting progress
+
+ // i is the index of the next Transformer to apply, for i in [low, high].
+ // low is the lowest index for which c.link[low] may still produce bytes.
+ // high is the highest index for which c.link[high] has a Transformer.
+ // The error returned by Transform determines whether to increase or
+ // decrease i. We try to completely fill a buffer before converting it.
+ for low, i, high := c.errStart, c.errStart, len(c.link)-2; low <= i && i <= high; {
+ in, out := &c.link[i], &c.link[i+1]
+ nDst, nSrc, err0 := in.t.Transform(out.dst(), in.src(), atEOF && low == i)
+ out.n += nDst
+ in.p += nSrc
+ if i > 0 && in.p == in.n {
+ in.p, in.n = 0, 0
+ }
+ needProgress, lastFull = lastFull, false
+ switch err0 {
+ case ErrShortDst:
+ // Process the destination buffer next. Return if we are already
+ // at the high index.
+ if i == high {
+ return dstL.n, srcL.p, ErrShortDst
+ }
+ if out.n != 0 {
+ i++
+ // If the Transformer at the next index is not able to process any
+ // source bytes there is nothing that can be done to make progress
+ // and the bytes will remain unprocessed. lastFull is used to
+ // detect this and break out of the loop with a fatal error.
+ lastFull = true
+ continue
+ }
+ // The destination buffer was too small, but is completely empty.
+ // Return a fatal error as this transformation can never complete.
+ c.fatalError(i, errShortInternal)
+ case ErrShortSrc:
+ if i == 0 {
+ // Save ErrShortSrc in err. All other errors take precedence.
+ err = ErrShortSrc
+ break
+ }
+ // Source bytes were depleted before filling up the destination buffer.
+ // Verify we made some progress, move the remaining bytes to the errStart
+ // and try to get more source bytes.
+ if needProgress && nSrc == 0 || in.n-in.p == len(in.b) {
+ // There were not enough source bytes to proceed while the source
+ // buffer cannot hold any more bytes. Return a fatal error as this
+ // transformation can never complete.
+ c.fatalError(i, errShortInternal)
+ break
+ }
+ // in.b is an internal buffer and we can make progress.
+ in.p, in.n = 0, copy(in.b, in.src())
+ fallthrough
+ case nil:
+ // if i == low, we have depleted the bytes at index i or any lower levels.
+ // In that case we increase low and i. In all other cases we decrease i to
+ // fetch more bytes before proceeding to the next index.
+ if i > low {
+ i--
+ continue
+ }
+ default:
+ c.fatalError(i, err0)
+ }
+ // Exhausted level low or fatal error: increase low and continue
+ // to process the bytes accepted so far.
+ i++
+ low = i
+ }
+
+ // If c.errStart > 0, this means we found a fatal error. We will clear
+ // all upstream buffers. At this point, no more progress can be made
+ // downstream, as Transform would have bailed while handling ErrShortDst.
+ if c.errStart > 0 {
+ for i := 1; i < c.errStart; i++ {
+ c.link[i].p, c.link[i].n = 0, 0
+ }
+ err, c.errStart, c.err = c.err, 0, nil
+ }
+ return dstL.n, srcL.p, err
+}
+
+// RemoveFunc returns a Transformer that removes from the input all runes r for
+// which f(r) is true. Illegal bytes in the input are replaced by RuneError.
+func RemoveFunc(f func(r rune) bool) Transformer {
+ return removeF(f)
+}
+
+type removeF func(r rune) bool
+
+func (removeF) Reset() {}
+
+// Transform implements the Transformer interface.
+func (t removeF) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ for r, sz := rune(0), 0; len(src) > 0; src = src[sz:] {
+
+ if r = rune(src[0]); r < utf8.RuneSelf {
+ sz = 1
+ } else {
+ r, sz = utf8.DecodeRune(src)
+
+ if sz == 1 {
+ // Invalid rune.
+ if !atEOF && !utf8.FullRune(src) {
+ err = ErrShortSrc
+ break
+ }
+ // We replace illegal bytes with RuneError. Not doing so might
+ // otherwise turn a sequence of invalid UTF-8 into valid UTF-8.
+ // The resulting byte sequence may subsequently contain runes
+ // for which t(r) is true that were passed unnoticed.
+ if !t(r) {
+ if nDst+3 > len(dst) {
+ err = ErrShortDst
+ break
+ }
+ nDst += copy(dst[nDst:], "\uFFFD")
+ }
+ nSrc++
+ continue
+ }
+ }
+
+ if !t(r) {
+ if nDst+sz > len(dst) {
+ err = ErrShortDst
+ break
+ }
+ nDst += copy(dst[nDst:], src[:sz])
+ }
+ nSrc += sz
+ }
+ return
+}
+
+// grow returns a new []byte that is longer than b, and copies the first n bytes
+// of b to the start of the new slice.
+func grow(b []byte, n int) []byte {
+ m := len(b)
+ if m <= 256 {
+ m *= 2
+ } else {
+ m += m >> 1
+ }
+ buf := make([]byte, m)
+ copy(buf, b[:n])
+ return buf
+}
+
+const initialBufSize = 128
+
+// String returns a string with the result of converting s[:n] using t, where
+// n <= len(s). If err == nil, n will be len(s). It calls Reset on t.
+func String(t Transformer, s string) (result string, n int, err error) {
+ if s == "" {
+ return "", 0, nil
+ }
+
+ t.Reset()
+
+ // Allocate only once. Note that both dst and src escape when passed to
+ // Transform.
+ buf := [2 * initialBufSize]byte{}
+ dst := buf[:initialBufSize:initialBufSize]
+ src := buf[initialBufSize : 2*initialBufSize]
+
+ // Avoid allocation if the transformed string is identical to the original.
+ // After this loop, pDst will point to the furthest point in s for which it
+ // could be detected that t gives equal results, src[:nSrc] will
+ // indicated the last processed chunk of s for which the output is not equal
+ // and dst[:nDst] will be the transform of this chunk.
+ var nDst, nSrc int
+ pDst := 0 // Used as index in both src and dst in this loop.
+ for {
+ n := copy(src, s[pDst:])
+ nDst, nSrc, err = t.Transform(dst, src[:n], pDst+n == len(s))
+
+ // Note 1: we will not enter the loop with pDst == len(s) and we will
+ // not end the loop with it either. So if nSrc is 0, this means there is
+ // some kind of error from which we cannot recover given the current
+ // buffer sizes. We will give up in this case.
+ // Note 2: it is not entirely correct to simply do a bytes.Equal as
+ // a Transformer may buffer internally. It will work in most cases,
+ // though, and no harm is done if it doesn't work.
+ // TODO: let transformers implement an optional Spanner interface, akin
+ // to norm's QuickSpan. This would even allow us to avoid any allocation.
+ if nSrc == 0 || !bytes.Equal(dst[:nDst], src[:nSrc]) {
+ break
+ }
+
+ if pDst += nDst; pDst == len(s) {
+ return s, pDst, nil
+ }
+ }
+
+ // Move the bytes seen so far to dst.
+ pSrc := pDst + nSrc
+ if pDst+nDst <= initialBufSize {
+ copy(dst[pDst:], dst[:nDst])
+ } else {
+ b := make([]byte, len(s)+nDst-nSrc)
+ copy(b[pDst:], dst[:nDst])
+ dst = b
+ }
+ copy(dst, s[:pDst])
+ pDst += nDst
+
+ if err != nil && err != ErrShortDst && err != ErrShortSrc {
+ return string(dst[:pDst]), pSrc, err
+ }
+
+ // Complete the string with the remainder.
+ for {
+ n := copy(src, s[pSrc:])
+ nDst, nSrc, err = t.Transform(dst[pDst:], src[:n], pSrc+n == len(s))
+ pDst += nDst
+ pSrc += nSrc
+
+ switch err {
+ case nil:
+ if pSrc == len(s) {
+ return string(dst[:pDst]), pSrc, nil
+ }
+ case ErrShortDst:
+ // Do not grow as long as we can make progress. This may avoid
+ // excessive allocations.
+ if nDst == 0 {
+ dst = grow(dst, pDst)
+ }
+ case ErrShortSrc:
+ if nSrc == 0 {
+ src = grow(src, 0)
+ }
+ default:
+ return string(dst[:pDst]), pSrc, err
+ }
+ }
+}
+
+// Bytes returns a new byte slice with the result of converting b[:n] using t,
+// where n <= len(b). If err == nil, n will be len(b). It calls Reset on t.
+func Bytes(t Transformer, b []byte) (result []byte, n int, err error) {
+ t.Reset()
+ dst := make([]byte, len(b))
+ pDst, pSrc := 0, 0
+ for {
+ nDst, nSrc, err := t.Transform(dst[pDst:], b[pSrc:], true)
+ pDst += nDst
+ pSrc += nSrc
+ if err != ErrShortDst {
+ return dst[:pDst], pSrc, err
+ }
+
+ // Grow the destination buffer, but do not grow as long as we can make
+ // progress. This may avoid excessive allocations.
+ if nDst == 0 {
+ dst = grow(dst, pDst)
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/Makefile b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/Makefile
new file mode 100644
index 0000000..b4f5d35
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/Makefile
@@ -0,0 +1,23 @@
+# Copyright 2011 The Go Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file.
+
+maketables: maketables.go triegen.go
+ go build $^
+
+normregtest: normregtest.go
+ go build $^
+
+tables: maketables
+ ./maketables > tables.go
+ gofmt -w tables.go
+
+# Downloads from www.unicode.org, so not part
+# of standard test scripts.
+test: testtables regtest
+
+testtables: maketables
+ ./maketables -test > data_test.go && go test -tags=test
+
+regtest: normregtest
+ ./normregtest
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/composition.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/composition.go
new file mode 100644
index 0000000..d17b278
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/composition.go
@@ -0,0 +1,514 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package norm
+
+import "unicode/utf8"
+
+const (
+ maxNonStarters = 30
+ // The maximum number of characters needed for a buffer is
+ // maxNonStarters + 1 for the starter + 1 for the GCJ
+ maxBufferSize = maxNonStarters + 2
+ maxNFCExpansion = 3 // NFC(0x1D160)
+ maxNFKCExpansion = 18 // NFKC(0xFDFA)
+
+ maxByteBufferSize = utf8.UTFMax * maxBufferSize // 128
+)
+
+// ssState is used for reporting the segment state after inserting a rune.
+// It is returned by streamSafe.next.
+type ssState int
+
+const (
+ // Indicates a rune was successfully added to the segment.
+ ssSuccess ssState = iota
+ // Indicates a rune starts a new segment and should not be added.
+ ssStarter
+ // Indicates a rune caused a segment overflow and a CGJ should be inserted.
+ ssOverflow
+)
+
+// streamSafe implements the policy of when a CGJ should be inserted.
+type streamSafe uint8
+
+// mkStreamSafe is a shorthand for declaring a streamSafe var and calling
+// first on it.
+func mkStreamSafe(p Properties) streamSafe {
+ return streamSafe(p.nTrailingNonStarters())
+}
+
+// first inserts the first rune of a segment.
+func (ss *streamSafe) first(p Properties) {
+ if *ss != 0 {
+ panic("!= 0")
+ }
+ *ss = streamSafe(p.nTrailingNonStarters())
+}
+
+// insert returns a ssState value to indicate whether a rune represented by p
+// can be inserted.
+func (ss *streamSafe) next(p Properties) ssState {
+ if *ss > maxNonStarters {
+ panic("streamSafe was not reset")
+ }
+ n := p.nLeadingNonStarters()
+ if *ss += streamSafe(n); *ss > maxNonStarters {
+ *ss = 0
+ return ssOverflow
+ }
+ // The Stream-Safe Text Processing prescribes that the counting can stop
+ // as soon as a starter is encountered. However, there are some starters,
+ // like Jamo V and T, that can combine with other runes, leaving their
+ // successive non-starters appended to the previous, possibly causing an
+ // overflow. We will therefore consider any rune with a non-zero nLead to
+ // be a non-starter. Note that it always hold that if nLead > 0 then
+ // nLead == nTrail.
+ if n == 0 {
+ *ss = 0
+ return ssStarter
+ }
+ return ssSuccess
+}
+
+// backwards is used for checking for overflow and segment starts
+// when traversing a string backwards. Users do not need to call first
+// for the first rune. The state of the streamSafe retains the count of
+// the non-starters loaded.
+func (ss *streamSafe) backwards(p Properties) ssState {
+ if *ss > maxNonStarters {
+ panic("streamSafe was not reset")
+ }
+ c := *ss + streamSafe(p.nTrailingNonStarters())
+ if c > maxNonStarters {
+ return ssOverflow
+ }
+ *ss = c
+ if p.nLeadingNonStarters() == 0 {
+ return ssStarter
+ }
+ return ssSuccess
+}
+
+func (ss streamSafe) isMax() bool {
+ return ss == maxNonStarters
+}
+
+// GraphemeJoiner is inserted after maxNonStarters non-starter runes.
+const GraphemeJoiner = "\u034F"
+
+// reorderBuffer is used to normalize a single segment. Characters inserted with
+// insert are decomposed and reordered based on CCC. The compose method can
+// be used to recombine characters. Note that the byte buffer does not hold
+// the UTF-8 characters in order. Only the rune array is maintained in sorted
+// order. flush writes the resulting segment to a byte array.
+type reorderBuffer struct {
+ rune [maxBufferSize]Properties // Per character info.
+ byte [maxByteBufferSize]byte // UTF-8 buffer. Referenced by runeInfo.pos.
+ nbyte uint8 // Number or bytes.
+ ss streamSafe // For limiting length of non-starter sequence.
+ nrune int // Number of runeInfos.
+ f formInfo
+
+ src input
+ nsrc int
+ tmpBytes input
+
+ out []byte
+ flushF func(*reorderBuffer) bool
+}
+
+func (rb *reorderBuffer) init(f Form, src []byte) {
+ rb.f = *formTable[f]
+ rb.src.setBytes(src)
+ rb.nsrc = len(src)
+ rb.ss = 0
+}
+
+func (rb *reorderBuffer) initString(f Form, src string) {
+ rb.f = *formTable[f]
+ rb.src.setString(src)
+ rb.nsrc = len(src)
+ rb.ss = 0
+}
+
+func (rb *reorderBuffer) setFlusher(out []byte, f func(*reorderBuffer) bool) {
+ rb.out = out
+ rb.flushF = f
+}
+
+// reset discards all characters from the buffer.
+func (rb *reorderBuffer) reset() {
+ rb.nrune = 0
+ rb.nbyte = 0
+ rb.ss = 0
+}
+
+func (rb *reorderBuffer) doFlush() bool {
+ if rb.f.composing {
+ rb.compose()
+ }
+ res := rb.flushF(rb)
+ rb.reset()
+ return res
+}
+
+// appendFlush appends the normalized segment to rb.out.
+func appendFlush(rb *reorderBuffer) bool {
+ for i := 0; i < rb.nrune; i++ {
+ start := rb.rune[i].pos
+ end := start + rb.rune[i].size
+ rb.out = append(rb.out, rb.byte[start:end]...)
+ }
+ return true
+}
+
+// flush appends the normalized segment to out and resets rb.
+func (rb *reorderBuffer) flush(out []byte) []byte {
+ for i := 0; i < rb.nrune; i++ {
+ start := rb.rune[i].pos
+ end := start + rb.rune[i].size
+ out = append(out, rb.byte[start:end]...)
+ }
+ rb.reset()
+ return out
+}
+
+// flushCopy copies the normalized segment to buf and resets rb.
+// It returns the number of bytes written to buf.
+func (rb *reorderBuffer) flushCopy(buf []byte) int {
+ p := 0
+ for i := 0; i < rb.nrune; i++ {
+ runep := rb.rune[i]
+ p += copy(buf[p:], rb.byte[runep.pos:runep.pos+runep.size])
+ }
+ rb.reset()
+ return p
+}
+
+// insertOrdered inserts a rune in the buffer, ordered by Canonical Combining Class.
+// It returns false if the buffer is not large enough to hold the rune.
+// It is used internally by insert and insertString only.
+func (rb *reorderBuffer) insertOrdered(info Properties) {
+ n := rb.nrune
+ b := rb.rune[:]
+ cc := info.ccc
+ if cc > 0 {
+ // Find insertion position + move elements to make room.
+ for ; n > 0; n-- {
+ if b[n-1].ccc <= cc {
+ break
+ }
+ b[n] = b[n-1]
+ }
+ }
+ rb.nrune += 1
+ pos := uint8(rb.nbyte)
+ rb.nbyte += utf8.UTFMax
+ info.pos = pos
+ b[n] = info
+}
+
+// insertErr is an error code returned by insert. Using this type instead
+// of error improves performance up to 20% for many of the benchmarks.
+type insertErr int
+
+const (
+ iSuccess insertErr = -iota
+ iShortDst
+ iShortSrc
+)
+
+// insertFlush inserts the given rune in the buffer ordered by CCC.
+// If a decomposition with multiple segments are encountered, they leading
+// ones are flushed.
+// It returns a non-zero error code if the rune was not inserted.
+func (rb *reorderBuffer) insertFlush(src input, i int, info Properties) insertErr {
+ if rune := src.hangul(i); rune != 0 {
+ rb.decomposeHangul(rune)
+ return iSuccess
+ }
+ if info.hasDecomposition() {
+ return rb.insertDecomposed(info.Decomposition())
+ }
+ rb.insertSingle(src, i, info)
+ return iSuccess
+}
+
+// insertUnsafe inserts the given rune in the buffer ordered by CCC.
+// It is assumed there is sufficient space to hold the runes. It is the
+// responsibility of the caller to ensure this. This can be done by checking
+// the state returned by the streamSafe type.
+func (rb *reorderBuffer) insertUnsafe(src input, i int, info Properties) {
+ if rune := src.hangul(i); rune != 0 {
+ rb.decomposeHangul(rune)
+ }
+ if info.hasDecomposition() {
+ // TODO: inline.
+ rb.insertDecomposed(info.Decomposition())
+ } else {
+ rb.insertSingle(src, i, info)
+ }
+}
+
+// insertDecomposed inserts an entry in to the reorderBuffer for each rune
+// in dcomp. dcomp must be a sequence of decomposed UTF-8-encoded runes.
+// It flushes the buffer on each new segment start.
+func (rb *reorderBuffer) insertDecomposed(dcomp []byte) insertErr {
+ rb.tmpBytes.setBytes(dcomp)
+ for i := 0; i < len(dcomp); {
+ info := rb.f.info(rb.tmpBytes, i)
+ if info.BoundaryBefore() && rb.nrune > 0 && !rb.doFlush() {
+ return iShortDst
+ }
+ i += copy(rb.byte[rb.nbyte:], dcomp[i:i+int(info.size)])
+ rb.insertOrdered(info)
+ }
+ return iSuccess
+}
+
+// insertSingle inserts an entry in the reorderBuffer for the rune at
+// position i. info is the runeInfo for the rune at position i.
+func (rb *reorderBuffer) insertSingle(src input, i int, info Properties) {
+ src.copySlice(rb.byte[rb.nbyte:], i, i+int(info.size))
+ rb.insertOrdered(info)
+}
+
+// insertCGJ inserts a Combining Grapheme Joiner (0x034f) into rb.
+func (rb *reorderBuffer) insertCGJ() {
+ rb.insertSingle(input{str: GraphemeJoiner}, 0, Properties{size: uint8(len(GraphemeJoiner))})
+}
+
+// appendRune inserts a rune at the end of the buffer. It is used for Hangul.
+func (rb *reorderBuffer) appendRune(r rune) {
+ bn := rb.nbyte
+ sz := utf8.EncodeRune(rb.byte[bn:], rune(r))
+ rb.nbyte += utf8.UTFMax
+ rb.rune[rb.nrune] = Properties{pos: bn, size: uint8(sz)}
+ rb.nrune++
+}
+
+// assignRune sets a rune at position pos. It is used for Hangul and recomposition.
+func (rb *reorderBuffer) assignRune(pos int, r rune) {
+ bn := rb.rune[pos].pos
+ sz := utf8.EncodeRune(rb.byte[bn:], rune(r))
+ rb.rune[pos] = Properties{pos: bn, size: uint8(sz)}
+}
+
+// runeAt returns the rune at position n. It is used for Hangul and recomposition.
+func (rb *reorderBuffer) runeAt(n int) rune {
+ inf := rb.rune[n]
+ r, _ := utf8.DecodeRune(rb.byte[inf.pos : inf.pos+inf.size])
+ return r
+}
+
+// bytesAt returns the UTF-8 encoding of the rune at position n.
+// It is used for Hangul and recomposition.
+func (rb *reorderBuffer) bytesAt(n int) []byte {
+ inf := rb.rune[n]
+ return rb.byte[inf.pos : int(inf.pos)+int(inf.size)]
+}
+
+// For Hangul we combine algorithmically, instead of using tables.
+const (
+ hangulBase = 0xAC00 // UTF-8(hangulBase) -> EA B0 80
+ hangulBase0 = 0xEA
+ hangulBase1 = 0xB0
+ hangulBase2 = 0x80
+
+ hangulEnd = hangulBase + jamoLVTCount // UTF-8(0xD7A4) -> ED 9E A4
+ hangulEnd0 = 0xED
+ hangulEnd1 = 0x9E
+ hangulEnd2 = 0xA4
+
+ jamoLBase = 0x1100 // UTF-8(jamoLBase) -> E1 84 00
+ jamoLBase0 = 0xE1
+ jamoLBase1 = 0x84
+ jamoLEnd = 0x1113
+ jamoVBase = 0x1161
+ jamoVEnd = 0x1176
+ jamoTBase = 0x11A7
+ jamoTEnd = 0x11C3
+
+ jamoTCount = 28
+ jamoVCount = 21
+ jamoVTCount = 21 * 28
+ jamoLVTCount = 19 * 21 * 28
+)
+
+const hangulUTF8Size = 3
+
+func isHangul(b []byte) bool {
+ if len(b) < hangulUTF8Size {
+ return false
+ }
+ b0 := b[0]
+ if b0 < hangulBase0 {
+ return false
+ }
+ b1 := b[1]
+ switch {
+ case b0 == hangulBase0:
+ return b1 >= hangulBase1
+ case b0 < hangulEnd0:
+ return true
+ case b0 > hangulEnd0:
+ return false
+ case b1 < hangulEnd1:
+ return true
+ }
+ return b1 == hangulEnd1 && b[2] < hangulEnd2
+}
+
+func isHangulString(b string) bool {
+ if len(b) < hangulUTF8Size {
+ return false
+ }
+ b0 := b[0]
+ if b0 < hangulBase0 {
+ return false
+ }
+ b1 := b[1]
+ switch {
+ case b0 == hangulBase0:
+ return b1 >= hangulBase1
+ case b0 < hangulEnd0:
+ return true
+ case b0 > hangulEnd0:
+ return false
+ case b1 < hangulEnd1:
+ return true
+ }
+ return b1 == hangulEnd1 && b[2] < hangulEnd2
+}
+
+// Caller must ensure len(b) >= 2.
+func isJamoVT(b []byte) bool {
+ // True if (rune & 0xff00) == jamoLBase
+ return b[0] == jamoLBase0 && (b[1]&0xFC) == jamoLBase1
+}
+
+func isHangulWithoutJamoT(b []byte) bool {
+ c, _ := utf8.DecodeRune(b)
+ c -= hangulBase
+ return c < jamoLVTCount && c%jamoTCount == 0
+}
+
+// decomposeHangul writes the decomposed Hangul to buf and returns the number
+// of bytes written. len(buf) should be at least 9.
+func decomposeHangul(buf []byte, r rune) int {
+ const JamoUTF8Len = 3
+ r -= hangulBase
+ x := r % jamoTCount
+ r /= jamoTCount
+ utf8.EncodeRune(buf, jamoLBase+r/jamoVCount)
+ utf8.EncodeRune(buf[JamoUTF8Len:], jamoVBase+r%jamoVCount)
+ if x != 0 {
+ utf8.EncodeRune(buf[2*JamoUTF8Len:], jamoTBase+x)
+ return 3 * JamoUTF8Len
+ }
+ return 2 * JamoUTF8Len
+}
+
+// decomposeHangul algorithmically decomposes a Hangul rune into
+// its Jamo components.
+// See http://unicode.org/reports/tr15/#Hangul for details on decomposing Hangul.
+func (rb *reorderBuffer) decomposeHangul(r rune) {
+ r -= hangulBase
+ x := r % jamoTCount
+ r /= jamoTCount
+ rb.appendRune(jamoLBase + r/jamoVCount)
+ rb.appendRune(jamoVBase + r%jamoVCount)
+ if x != 0 {
+ rb.appendRune(jamoTBase + x)
+ }
+}
+
+// combineHangul algorithmically combines Jamo character components into Hangul.
+// See http://unicode.org/reports/tr15/#Hangul for details on combining Hangul.
+func (rb *reorderBuffer) combineHangul(s, i, k int) {
+ b := rb.rune[:]
+ bn := rb.nrune
+ for ; i < bn; i++ {
+ cccB := b[k-1].ccc
+ cccC := b[i].ccc
+ if cccB == 0 {
+ s = k - 1
+ }
+ if s != k-1 && cccB >= cccC {
+ // b[i] is blocked by greater-equal cccX below it
+ b[k] = b[i]
+ k++
+ } else {
+ l := rb.runeAt(s) // also used to compare to hangulBase
+ v := rb.runeAt(i) // also used to compare to jamoT
+ switch {
+ case jamoLBase <= l && l < jamoLEnd &&
+ jamoVBase <= v && v < jamoVEnd:
+ // 11xx plus 116x to LV
+ rb.assignRune(s, hangulBase+
+ (l-jamoLBase)*jamoVTCount+(v-jamoVBase)*jamoTCount)
+ case hangulBase <= l && l < hangulEnd &&
+ jamoTBase < v && v < jamoTEnd &&
+ ((l-hangulBase)%jamoTCount) == 0:
+ // ACxx plus 11Ax to LVT
+ rb.assignRune(s, l+v-jamoTBase)
+ default:
+ b[k] = b[i]
+ k++
+ }
+ }
+ }
+ rb.nrune = k
+}
+
+// compose recombines the runes in the buffer.
+// It should only be used to recompose a single segment, as it will not
+// handle alternations between Hangul and non-Hangul characters correctly.
+func (rb *reorderBuffer) compose() {
+ // UAX #15, section X5 , including Corrigendum #5
+ // "In any character sequence beginning with starter S, a character C is
+ // blocked from S if and only if there is some character B between S
+ // and C, and either B is a starter or it has the same or higher
+ // combining class as C."
+ bn := rb.nrune
+ if bn == 0 {
+ return
+ }
+ k := 1
+ b := rb.rune[:]
+ for s, i := 0, 1; i < bn; i++ {
+ if isJamoVT(rb.bytesAt(i)) {
+ // Redo from start in Hangul mode. Necessary to support
+ // U+320E..U+321E in NFKC mode.
+ rb.combineHangul(s, i, k)
+ return
+ }
+ ii := b[i]
+ // We can only use combineForward as a filter if we later
+ // get the info for the combined character. This is more
+ // expensive than using the filter. Using combinesBackward()
+ // is safe.
+ if ii.combinesBackward() {
+ cccB := b[k-1].ccc
+ cccC := ii.ccc
+ blocked := false // b[i] blocked by starter or greater or equal CCC?
+ if cccB == 0 {
+ s = k - 1
+ } else {
+ blocked = s != k-1 && cccB >= cccC
+ }
+ if !blocked {
+ combined := combine(rb.runeAt(s), rb.runeAt(i))
+ if combined != 0 {
+ rb.assignRune(s, combined)
+ continue
+ }
+ }
+ }
+ b[k] = b[i]
+ k++
+ }
+ rb.nrune = k
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/forminfo.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/forminfo.go
new file mode 100644
index 0000000..15a67c6
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/forminfo.go
@@ -0,0 +1,256 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package norm
+
+// This file contains Form-specific logic and wrappers for data in tables.go.
+
+// Rune info is stored in a separate trie per composing form. A composing form
+// and its corresponding decomposing form share the same trie. Each trie maps
+// a rune to a uint16. The values take two forms. For v >= 0x8000:
+// bits
+// 15: 1 (inverse of NFD_QD bit of qcInfo)
+// 13..7: qcInfo (see below). isYesD is always true (no decompostion).
+// 6..0: ccc (compressed CCC value).
+// For v < 0x8000, the respective rune has a decomposition and v is an index
+// into a byte array of UTF-8 decomposition sequences and additional info and
+// has the form:
+// * [ []]
+// The header contains the number of bytes in the decomposition (excluding this
+// length byte). The two most significant bits of this length byte correspond
+// to bit 5 and 4 of qcInfo (see below). The byte sequence itself starts at v+1.
+// The byte sequence is followed by a trailing and leading CCC if the values
+// for these are not zero. The value of v determines which ccc are appended
+// to the sequences. For v < firstCCC, there are none, for v >= firstCCC,
+// the sequence is followed by a trailing ccc, and for v >= firstLeadingCC
+// there is an additional leading ccc. The value of tccc itself is the
+// trailing CCC shifted left 2 bits. The two least-significant bits of tccc
+// are the number of trailing non-starters.
+
+const (
+ qcInfoMask = 0x3F // to clear all but the relevant bits in a qcInfo
+ headerLenMask = 0x3F // extract the length value from the header byte
+ headerFlagsMask = 0xC0 // extract the qcInfo bits from the header byte
+)
+
+// Properties provides access to normalization properties of a rune.
+type Properties struct {
+ pos uint8 // start position in reorderBuffer; used in composition.go
+ size uint8 // length of UTF-8 encoding of this rune
+ ccc uint8 // leading canonical combining class (ccc if not decomposition)
+ tccc uint8 // trailing canonical combining class (ccc if not decomposition)
+ nLead uint8 // number of leading non-starters.
+ flags qcInfo // quick check flags
+ index uint16
+}
+
+// functions dispatchable per form
+type lookupFunc func(b input, i int) Properties
+
+// formInfo holds Form-specific functions and tables.
+type formInfo struct {
+ form Form
+ composing, compatibility bool // form type
+ info lookupFunc
+ nextMain iterFunc
+}
+
+var formTable []*formInfo
+
+func init() {
+ formTable = make([]*formInfo, 4)
+
+ for i := range formTable {
+ f := &formInfo{}
+ formTable[i] = f
+ f.form = Form(i)
+ if Form(i) == NFKD || Form(i) == NFKC {
+ f.compatibility = true
+ f.info = lookupInfoNFKC
+ } else {
+ f.info = lookupInfoNFC
+ }
+ f.nextMain = nextDecomposed
+ if Form(i) == NFC || Form(i) == NFKC {
+ f.nextMain = nextComposed
+ f.composing = true
+ }
+ }
+}
+
+// We do not distinguish between boundaries for NFC, NFD, etc. to avoid
+// unexpected behavior for the user. For example, in NFD, there is a boundary
+// after 'a'. However, 'a' might combine with modifiers, so from the application's
+// perspective it is not a good boundary. We will therefore always use the
+// boundaries for the combining variants.
+
+// BoundaryBefore returns true if this rune starts a new segment and
+// cannot combine with any rune on the left.
+func (p Properties) BoundaryBefore() bool {
+ if p.ccc == 0 && !p.combinesBackward() {
+ return true
+ }
+ // We assume that the CCC of the first character in a decomposition
+ // is always non-zero if different from info.ccc and that we can return
+ // false at this point. This is verified by maketables.
+ return false
+}
+
+// BoundaryAfter returns true if runes cannot combine with or otherwise
+// interact with this or previous runes.
+func (p Properties) BoundaryAfter() bool {
+ // TODO: loosen these conditions.
+ return p.isInert()
+}
+
+// We pack quick check data in 4 bits:
+// 5: Combines forward (0 == false, 1 == true)
+// 4..3: NFC_QC Yes(00), No (10), or Maybe (11)
+// 2: NFD_QC Yes (0) or No (1). No also means there is a decomposition.
+// 1..0: Number of trailing non-starters.
+//
+// When all 4 bits are zero, the character is inert, meaning it is never
+// influenced by normalization.
+type qcInfo uint8
+
+func (p Properties) isYesC() bool { return p.flags&0x10 == 0 }
+func (p Properties) isYesD() bool { return p.flags&0x4 == 0 }
+
+func (p Properties) combinesForward() bool { return p.flags&0x20 != 0 }
+func (p Properties) combinesBackward() bool { return p.flags&0x8 != 0 } // == isMaybe
+func (p Properties) hasDecomposition() bool { return p.flags&0x4 != 0 } // == isNoD
+
+func (p Properties) isInert() bool {
+ return p.flags&qcInfoMask == 0 && p.ccc == 0
+}
+
+func (p Properties) multiSegment() bool {
+ return p.index >= firstMulti && p.index < endMulti
+}
+
+func (p Properties) nLeadingNonStarters() uint8 {
+ return p.nLead
+}
+
+func (p Properties) nTrailingNonStarters() uint8 {
+ return uint8(p.flags & 0x03)
+}
+
+// Decomposition returns the decomposition for the underlying rune
+// or nil if there is none.
+func (p Properties) Decomposition() []byte {
+ // TODO: create the decomposition for Hangul?
+ if p.index == 0 {
+ return nil
+ }
+ i := p.index
+ n := decomps[i] & headerLenMask
+ i++
+ return decomps[i : i+uint16(n)]
+}
+
+// Size returns the length of UTF-8 encoding of the rune.
+func (p Properties) Size() int {
+ return int(p.size)
+}
+
+// CCC returns the canonical combining class of the underlying rune.
+func (p Properties) CCC() uint8 {
+ if p.index >= firstCCCZeroExcept {
+ return 0
+ }
+ return ccc[p.ccc]
+}
+
+// LeadCCC returns the CCC of the first rune in the decomposition.
+// If there is no decomposition, LeadCCC equals CCC.
+func (p Properties) LeadCCC() uint8 {
+ return ccc[p.ccc]
+}
+
+// TrailCCC returns the CCC of the last rune in the decomposition.
+// If there is no decomposition, TrailCCC equals CCC.
+func (p Properties) TrailCCC() uint8 {
+ return ccc[p.tccc]
+}
+
+// Recomposition
+// We use 32-bit keys instead of 64-bit for the two codepoint keys.
+// This clips off the bits of three entries, but we know this will not
+// result in a collision. In the unlikely event that changes to
+// UnicodeData.txt introduce collisions, the compiler will catch it.
+// Note that the recomposition map for NFC and NFKC are identical.
+
+// combine returns the combined rune or 0 if it doesn't exist.
+func combine(a, b rune) rune {
+ key := uint32(uint16(a))<<16 + uint32(uint16(b))
+ return recompMap[key]
+}
+
+func lookupInfoNFC(b input, i int) Properties {
+ v, sz := b.charinfoNFC(i)
+ return compInfo(v, sz)
+}
+
+func lookupInfoNFKC(b input, i int) Properties {
+ v, sz := b.charinfoNFKC(i)
+ return compInfo(v, sz)
+}
+
+// Properties returns properties for the first rune in s.
+func (f Form) Properties(s []byte) Properties {
+ if f == NFC || f == NFD {
+ return compInfo(nfcData.lookup(s))
+ }
+ return compInfo(nfkcData.lookup(s))
+}
+
+// PropertiesString returns properties for the first rune in s.
+func (f Form) PropertiesString(s string) Properties {
+ if f == NFC || f == NFD {
+ return compInfo(nfcData.lookupString(s))
+ }
+ return compInfo(nfkcData.lookupString(s))
+}
+
+// compInfo converts the information contained in v and sz
+// to a Properties. See the comment at the top of the file
+// for more information on the format.
+func compInfo(v uint16, sz int) Properties {
+ if v == 0 {
+ return Properties{size: uint8(sz)}
+ } else if v >= 0x8000 {
+ p := Properties{
+ size: uint8(sz),
+ ccc: uint8(v),
+ tccc: uint8(v),
+ flags: qcInfo(v >> 8),
+ }
+ if p.ccc > 0 || p.combinesBackward() {
+ p.nLead = uint8(p.flags & 0x3)
+ }
+ return p
+ }
+ // has decomposition
+ h := decomps[v]
+ f := (qcInfo(h&headerFlagsMask) >> 2) | 0x4
+ p := Properties{size: uint8(sz), flags: f, index: v}
+ if v >= firstCCC {
+ v += uint16(h&headerLenMask) + 1
+ c := decomps[v]
+ p.tccc = c >> 2
+ p.flags |= qcInfo(c & 0x3)
+ if v >= firstLeadingCCC {
+ p.nLead = c & 0x3
+ if v >= firstStarterWithNLead {
+ // We were tricked. Remove the decomposition.
+ p.flags &= 0x03
+ p.index = 0
+ return p
+ }
+ p.ccc = decomps[v+1]
+ }
+ }
+ return p
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/input.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/input.go
new file mode 100644
index 0000000..045d4cc
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/input.go
@@ -0,0 +1,105 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package norm
+
+import "unicode/utf8"
+
+type input struct {
+ str string
+ bytes []byte
+}
+
+func inputBytes(str []byte) input {
+ return input{bytes: str}
+}
+
+func inputString(str string) input {
+ return input{str: str}
+}
+
+func (in *input) setBytes(str []byte) {
+ in.str = ""
+ in.bytes = str
+}
+
+func (in *input) setString(str string) {
+ in.str = str
+ in.bytes = nil
+}
+
+func (in *input) _byte(p int) byte {
+ if in.bytes == nil {
+ return in.str[p]
+ }
+ return in.bytes[p]
+}
+
+func (in *input) skipASCII(p, max int) int {
+ if in.bytes == nil {
+ for ; p < max && in.str[p] < utf8.RuneSelf; p++ {
+ }
+ } else {
+ for ; p < max && in.bytes[p] < utf8.RuneSelf; p++ {
+ }
+ }
+ return p
+}
+
+func (in *input) skipContinuationBytes(p int) int {
+ if in.bytes == nil {
+ for ; p < len(in.str) && !utf8.RuneStart(in.str[p]); p++ {
+ }
+ } else {
+ for ; p < len(in.bytes) && !utf8.RuneStart(in.bytes[p]); p++ {
+ }
+ }
+ return p
+}
+
+func (in *input) appendSlice(buf []byte, b, e int) []byte {
+ if in.bytes != nil {
+ return append(buf, in.bytes[b:e]...)
+ }
+ for i := b; i < e; i++ {
+ buf = append(buf, in.str[i])
+ }
+ return buf
+}
+
+func (in *input) copySlice(buf []byte, b, e int) int {
+ if in.bytes == nil {
+ return copy(buf, in.str[b:e])
+ }
+ return copy(buf, in.bytes[b:e])
+}
+
+func (in *input) charinfoNFC(p int) (uint16, int) {
+ if in.bytes == nil {
+ return nfcData.lookupString(in.str[p:])
+ }
+ return nfcData.lookup(in.bytes[p:])
+}
+
+func (in *input) charinfoNFKC(p int) (uint16, int) {
+ if in.bytes == nil {
+ return nfkcData.lookupString(in.str[p:])
+ }
+ return nfkcData.lookup(in.bytes[p:])
+}
+
+func (in *input) hangul(p int) (r rune) {
+ if in.bytes == nil {
+ if !isHangulString(in.str[p:]) {
+ return 0
+ }
+ r, _ = utf8.DecodeRuneInString(in.str[p:])
+ } else {
+ if !isHangul(in.bytes[p:]) {
+ return 0
+ }
+ r, _ = utf8.DecodeRune(in.bytes[p:])
+ }
+ return r
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/iter.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/iter.go
new file mode 100644
index 0000000..0a42a72
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/iter.go
@@ -0,0 +1,450 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package norm
+
+import (
+ "fmt"
+ "unicode/utf8"
+)
+
+// MaxSegmentSize is the maximum size of a byte buffer needed to consider any
+// sequence of starter and non-starter runes for the purpose of normalization.
+const MaxSegmentSize = maxByteBufferSize
+
+// An Iter iterates over a string or byte slice, while normalizing it
+// to a given Form.
+type Iter struct {
+ rb reorderBuffer
+ buf [maxByteBufferSize]byte
+ info Properties // first character saved from previous iteration
+ next iterFunc // implementation of next depends on form
+ asciiF iterFunc
+
+ p int // current position in input source
+ multiSeg []byte // remainder of multi-segment decomposition
+}
+
+type iterFunc func(*Iter) []byte
+
+// Init initializes i to iterate over src after normalizing it to Form f.
+func (i *Iter) Init(f Form, src []byte) {
+ i.p = 0
+ if len(src) == 0 {
+ i.setDone()
+ i.rb.nsrc = 0
+ return
+ }
+ i.multiSeg = nil
+ i.rb.init(f, src)
+ i.next = i.rb.f.nextMain
+ i.asciiF = nextASCIIBytes
+ i.info = i.rb.f.info(i.rb.src, i.p)
+}
+
+// InitString initializes i to iterate over src after normalizing it to Form f.
+func (i *Iter) InitString(f Form, src string) {
+ i.p = 0
+ if len(src) == 0 {
+ i.setDone()
+ i.rb.nsrc = 0
+ return
+ }
+ i.multiSeg = nil
+ i.rb.initString(f, src)
+ i.next = i.rb.f.nextMain
+ i.asciiF = nextASCIIString
+ i.info = i.rb.f.info(i.rb.src, i.p)
+}
+
+// Seek sets the segment to be returned by the next call to Next to start
+// at position p. It is the responsibility of the caller to set p to the
+// start of a UTF8 rune.
+func (i *Iter) Seek(offset int64, whence int) (int64, error) {
+ var abs int64
+ switch whence {
+ case 0:
+ abs = offset
+ case 1:
+ abs = int64(i.p) + offset
+ case 2:
+ abs = int64(i.rb.nsrc) + offset
+ default:
+ return 0, fmt.Errorf("norm: invalid whence")
+ }
+ if abs < 0 {
+ return 0, fmt.Errorf("norm: negative position")
+ }
+ if int(abs) >= i.rb.nsrc {
+ i.setDone()
+ return int64(i.p), nil
+ }
+ i.p = int(abs)
+ i.multiSeg = nil
+ i.next = i.rb.f.nextMain
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ return abs, nil
+}
+
+// returnSlice returns a slice of the underlying input type as a byte slice.
+// If the underlying is of type []byte, it will simply return a slice.
+// If the underlying is of type string, it will copy the slice to the buffer
+// and return that.
+func (i *Iter) returnSlice(a, b int) []byte {
+ if i.rb.src.bytes == nil {
+ return i.buf[:copy(i.buf[:], i.rb.src.str[a:b])]
+ }
+ return i.rb.src.bytes[a:b]
+}
+
+// Pos returns the byte position at which the next call to Next will commence processing.
+func (i *Iter) Pos() int {
+ return i.p
+}
+
+func (i *Iter) setDone() {
+ i.next = nextDone
+ i.p = i.rb.nsrc
+}
+
+// Done returns true if there is no more input to process.
+func (i *Iter) Done() bool {
+ return i.p >= i.rb.nsrc
+}
+
+// Next returns f(i.input[i.Pos():n]), where n is a boundary of i.input.
+// For any input a and b for which f(a) == f(b), subsequent calls
+// to Next will return the same segments.
+// Modifying runes are grouped together with the preceding starter, if such a starter exists.
+// Although not guaranteed, n will typically be the smallest possible n.
+func (i *Iter) Next() []byte {
+ return i.next(i)
+}
+
+func nextASCIIBytes(i *Iter) []byte {
+ p := i.p + 1
+ if p >= i.rb.nsrc {
+ i.setDone()
+ return i.rb.src.bytes[i.p:p]
+ }
+ if i.rb.src.bytes[p] < utf8.RuneSelf {
+ p0 := i.p
+ i.p = p
+ return i.rb.src.bytes[p0:p]
+ }
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ i.next = i.rb.f.nextMain
+ return i.next(i)
+}
+
+func nextASCIIString(i *Iter) []byte {
+ p := i.p + 1
+ if p >= i.rb.nsrc {
+ i.buf[0] = i.rb.src.str[i.p]
+ i.setDone()
+ return i.buf[:1]
+ }
+ if i.rb.src.str[p] < utf8.RuneSelf {
+ i.buf[0] = i.rb.src.str[i.p]
+ i.p = p
+ return i.buf[:1]
+ }
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ i.next = i.rb.f.nextMain
+ return i.next(i)
+}
+
+func nextHangul(i *Iter) []byte {
+ p := i.p
+ next := p + hangulUTF8Size
+ if next >= i.rb.nsrc {
+ i.setDone()
+ } else if i.rb.src.hangul(next) == 0 {
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ i.next = i.rb.f.nextMain
+ return i.next(i)
+ }
+ i.p = next
+ return i.buf[:decomposeHangul(i.buf[:], i.rb.src.hangul(p))]
+}
+
+func nextDone(i *Iter) []byte {
+ return nil
+}
+
+// nextMulti is used for iterating over multi-segment decompositions
+// for decomposing normal forms.
+func nextMulti(i *Iter) []byte {
+ j := 0
+ d := i.multiSeg
+ // skip first rune
+ for j = 1; j < len(d) && !utf8.RuneStart(d[j]); j++ {
+ }
+ for j < len(d) {
+ info := i.rb.f.info(input{bytes: d}, j)
+ if info.BoundaryBefore() {
+ i.multiSeg = d[j:]
+ return d[:j]
+ }
+ j += int(info.size)
+ }
+ // treat last segment as normal decomposition
+ i.next = i.rb.f.nextMain
+ return i.next(i)
+}
+
+// nextMultiNorm is used for iterating over multi-segment decompositions
+// for composing normal forms.
+func nextMultiNorm(i *Iter) []byte {
+ j := 0
+ d := i.multiSeg
+ for j < len(d) {
+ info := i.rb.f.info(input{bytes: d}, j)
+ if info.BoundaryBefore() {
+ i.rb.compose()
+ seg := i.buf[:i.rb.flushCopy(i.buf[:])]
+ i.rb.ss.first(info)
+ i.rb.insertUnsafe(input{bytes: d}, j, info)
+ i.multiSeg = d[j+int(info.size):]
+ return seg
+ }
+ i.rb.ss.next(info)
+ i.rb.insertUnsafe(input{bytes: d}, j, info)
+ j += int(info.size)
+ }
+ i.multiSeg = nil
+ i.next = nextComposed
+ return doNormComposed(i)
+}
+
+// nextDecomposed is the implementation of Next for forms NFD and NFKD.
+func nextDecomposed(i *Iter) (next []byte) {
+ outp := 0
+ inCopyStart, outCopyStart := i.p, 0
+ ss := mkStreamSafe(i.info)
+ for {
+ if sz := int(i.info.size); sz <= 1 {
+ p := i.p
+ i.p++ // ASCII or illegal byte. Either way, advance by 1.
+ if i.p >= i.rb.nsrc {
+ i.setDone()
+ return i.returnSlice(p, i.p)
+ } else if i.rb.src._byte(i.p) < utf8.RuneSelf {
+ i.next = i.asciiF
+ return i.returnSlice(p, i.p)
+ }
+ outp++
+ } else if d := i.info.Decomposition(); d != nil {
+ // Note: If leading CCC != 0, then len(d) == 2 and last is also non-zero.
+ // Case 1: there is a leftover to copy. In this case the decomposition
+ // must begin with a modifier and should always be appended.
+ // Case 2: no leftover. Simply return d if followed by a ccc == 0 value.
+ p := outp + len(d)
+ if outp > 0 {
+ i.rb.src.copySlice(i.buf[outCopyStart:], inCopyStart, i.p)
+ if p > len(i.buf) {
+ return i.buf[:outp]
+ }
+ } else if i.info.multiSegment() {
+ // outp must be 0 as multi-segment decompositions always
+ // start a new segment.
+ if i.multiSeg == nil {
+ i.multiSeg = d
+ i.next = nextMulti
+ return nextMulti(i)
+ }
+ // We are in the last segment. Treat as normal decomposition.
+ d = i.multiSeg
+ i.multiSeg = nil
+ p = len(d)
+ }
+ prevCC := i.info.tccc
+ if i.p += sz; i.p >= i.rb.nsrc {
+ i.setDone()
+ i.info = Properties{} // Force BoundaryBefore to succeed.
+ } else {
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ }
+ switch ss.next(i.info) {
+ case ssOverflow:
+ i.next = nextCGJDecompose
+ fallthrough
+ case ssStarter:
+ if outp > 0 {
+ copy(i.buf[outp:], d)
+ return i.buf[:p]
+ }
+ return d
+ }
+ copy(i.buf[outp:], d)
+ outp = p
+ inCopyStart, outCopyStart = i.p, outp
+ if i.info.ccc < prevCC {
+ goto doNorm
+ }
+ continue
+ } else if r := i.rb.src.hangul(i.p); r != 0 {
+ outp = decomposeHangul(i.buf[:], r)
+ i.p += hangulUTF8Size
+ inCopyStart, outCopyStart = i.p, outp
+ if i.p >= i.rb.nsrc {
+ i.setDone()
+ break
+ } else if i.rb.src.hangul(i.p) != 0 {
+ i.next = nextHangul
+ return i.buf[:outp]
+ }
+ } else {
+ p := outp + sz
+ if p > len(i.buf) {
+ break
+ }
+ outp = p
+ i.p += sz
+ }
+ if i.p >= i.rb.nsrc {
+ i.setDone()
+ break
+ }
+ prevCC := i.info.tccc
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ if v := ss.next(i.info); v == ssStarter {
+ break
+ } else if v == ssOverflow {
+ i.next = nextCGJDecompose
+ break
+ }
+ if i.info.ccc < prevCC {
+ goto doNorm
+ }
+ }
+ if outCopyStart == 0 {
+ return i.returnSlice(inCopyStart, i.p)
+ } else if inCopyStart < i.p {
+ i.rb.src.copySlice(i.buf[outCopyStart:], inCopyStart, i.p)
+ }
+ return i.buf[:outp]
+doNorm:
+ // Insert what we have decomposed so far in the reorderBuffer.
+ // As we will only reorder, there will always be enough room.
+ i.rb.src.copySlice(i.buf[outCopyStart:], inCopyStart, i.p)
+ i.rb.insertDecomposed(i.buf[0:outp])
+ return doNormDecomposed(i)
+}
+
+func doNormDecomposed(i *Iter) []byte {
+ for {
+ if s := i.rb.ss.next(i.info); s == ssOverflow {
+ i.next = nextCGJDecompose
+ break
+ }
+ i.rb.insertUnsafe(i.rb.src, i.p, i.info)
+ if i.p += int(i.info.size); i.p >= i.rb.nsrc {
+ i.setDone()
+ break
+ }
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ if i.info.ccc == 0 {
+ break
+ }
+ }
+ // new segment or too many combining characters: exit normalization
+ return i.buf[:i.rb.flushCopy(i.buf[:])]
+}
+
+func nextCGJDecompose(i *Iter) []byte {
+ i.rb.ss = 0
+ i.rb.insertCGJ()
+ i.next = nextDecomposed
+ buf := doNormDecomposed(i)
+ return buf
+}
+
+// nextComposed is the implementation of Next for forms NFC and NFKC.
+func nextComposed(i *Iter) []byte {
+ outp, startp := 0, i.p
+ var prevCC uint8
+ ss := mkStreamSafe(i.info)
+ for {
+ if !i.info.isYesC() {
+ goto doNorm
+ }
+ prevCC = i.info.tccc
+ sz := int(i.info.size)
+ if sz == 0 {
+ sz = 1 // illegal rune: copy byte-by-byte
+ }
+ p := outp + sz
+ if p > len(i.buf) {
+ break
+ }
+ outp = p
+ i.p += sz
+ if i.p >= i.rb.nsrc {
+ i.setDone()
+ break
+ } else if i.rb.src._byte(i.p) < utf8.RuneSelf {
+ i.next = i.asciiF
+ break
+ }
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ if v := ss.next(i.info); v == ssStarter {
+ break
+ } else if v == ssOverflow {
+ i.next = nextCGJCompose
+ break
+ }
+ if i.info.ccc < prevCC {
+ goto doNorm
+ }
+ }
+ return i.returnSlice(startp, i.p)
+doNorm:
+ i.p = startp
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ if i.info.multiSegment() {
+ d := i.info.Decomposition()
+ info := i.rb.f.info(input{bytes: d}, 0)
+ i.rb.insertUnsafe(input{bytes: d}, 0, info)
+ i.multiSeg = d[int(info.size):]
+ i.next = nextMultiNorm
+ return nextMultiNorm(i)
+ }
+ i.rb.ss.first(i.info)
+ i.rb.insertUnsafe(i.rb.src, i.p, i.info)
+ return doNormComposed(i)
+}
+
+func doNormComposed(i *Iter) []byte {
+ // First rune should already be inserted.
+ for {
+ if i.p += int(i.info.size); i.p >= i.rb.nsrc {
+ i.setDone()
+ break
+ }
+ i.info = i.rb.f.info(i.rb.src, i.p)
+ if s := i.rb.ss.next(i.info); s == ssStarter {
+ break
+ } else if s == ssOverflow {
+ i.next = nextCGJCompose
+ break
+ }
+ i.rb.insertUnsafe(i.rb.src, i.p, i.info)
+ }
+ i.rb.compose()
+ seg := i.buf[:i.rb.flushCopy(i.buf[:])]
+ return seg
+}
+
+func nextCGJCompose(i *Iter) []byte {
+ i.rb.ss = 0 // instead of first
+ i.rb.insertCGJ()
+ i.next = nextComposed
+ // Note that we treat any rune with nLeadingNonStarters > 0 as a non-starter,
+ // even if they are not. This is particularly dubious for U+FF9E and UFF9A.
+ // If we ever change that, insert a check here.
+ i.rb.ss.first(i.info)
+ i.rb.insertUnsafe(i.rb.src, i.p, i.info)
+ return doNormComposed(i)
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/maketables.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/maketables.go
new file mode 100644
index 0000000..f113615
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/maketables.go
@@ -0,0 +1,1033 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+// Normalization table generator.
+// Data read from the web.
+// See forminfo.go for a description of the trie values associated with each rune.
+
+package main
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "os"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "unicode"
+
+ "code.google.com/p/go.text/internal/triegen"
+ "code.google.com/p/go.text/internal/ucd"
+)
+
+func main() {
+ flag.Parse()
+ loadUnicodeData()
+ compactCCC()
+ loadCompositionExclusions()
+ completeCharFields(FCanonical)
+ completeCharFields(FCompatibility)
+ computeNonStarterCounts()
+ verifyComputed()
+ printChars()
+ if *test {
+ testDerived()
+ printTestdata()
+ } else {
+ makeTables()
+ }
+}
+
+var url = flag.String("url",
+ "http://www.unicode.org/Public/"+unicode.Version+"/ucd/",
+ "URL of Unicode database directory")
+var tablelist = flag.String("tables",
+ "all",
+ "comma-separated list of which tables to generate; "+
+ "can be 'decomp', 'recomp', 'info' and 'all'")
+var test = flag.Bool("test",
+ false,
+ "test existing tables against DerivedNormalizationProps and generate test data for regression testing")
+var verbose = flag.Bool("verbose",
+ false,
+ "write data to stdout as it is parsed")
+var localFiles = flag.Bool("local",
+ false,
+ "data files have been copied to the current directory; for debugging only")
+
+var logger = log.New(os.Stderr, "", log.Lshortfile)
+
+const MaxChar = 0x10FFFF // anything above this shouldn't exist
+
+// Quick Check properties of runes allow us to quickly
+// determine whether a rune may occur in a normal form.
+// For a given normal form, a rune may be guaranteed to occur
+// verbatim (QC=Yes), may or may not combine with another
+// rune (QC=Maybe), or may not occur (QC=No).
+type QCResult int
+
+const (
+ QCUnknown QCResult = iota
+ QCYes
+ QCNo
+ QCMaybe
+)
+
+func (r QCResult) String() string {
+ switch r {
+ case QCYes:
+ return "Yes"
+ case QCNo:
+ return "No"
+ case QCMaybe:
+ return "Maybe"
+ }
+ return "***UNKNOWN***"
+}
+
+const (
+ FCanonical = iota // NFC or NFD
+ FCompatibility // NFKC or NFKD
+ FNumberOfFormTypes
+)
+
+const (
+ MComposed = iota // NFC or NFKC
+ MDecomposed // NFD or NFKD
+ MNumberOfModes
+)
+
+// This contains only the properties we're interested in.
+type Char struct {
+ name string
+ codePoint rune // if zero, this index is not a valid code point.
+ ccc uint8 // canonical combining class
+ origCCC uint8
+ excludeInComp bool // from CompositionExclusions.txt
+ compatDecomp bool // it has a compatibility expansion
+
+ nTrailingNonStarters uint8
+ nLeadingNonStarters uint8 // must be equal to trailing if non-zero
+
+ forms [FNumberOfFormTypes]FormInfo // For FCanonical and FCompatibility
+
+ state State
+}
+
+var chars = make([]Char, MaxChar+1)
+var cccMap = make(map[uint8]uint8)
+
+func (c Char) String() string {
+ buf := new(bytes.Buffer)
+
+ fmt.Fprintf(buf, "%U [%s]:\n", c.codePoint, c.name)
+ fmt.Fprintf(buf, " ccc: %v\n", c.ccc)
+ fmt.Fprintf(buf, " excludeInComp: %v\n", c.excludeInComp)
+ fmt.Fprintf(buf, " compatDecomp: %v\n", c.compatDecomp)
+ fmt.Fprintf(buf, " state: %v\n", c.state)
+ fmt.Fprintf(buf, " NFC:\n")
+ fmt.Fprint(buf, c.forms[FCanonical])
+ fmt.Fprintf(buf, " NFKC:\n")
+ fmt.Fprint(buf, c.forms[FCompatibility])
+
+ return buf.String()
+}
+
+// In UnicodeData.txt, some ranges are marked like this:
+// 3400;;Lo;0;L;;;;;N;;;;;
+// 4DB5;;Lo;0;L;;;;;N;;;;;
+// parseCharacter keeps a state variable indicating the weirdness.
+type State int
+
+const (
+ SNormal State = iota // known to be zero for the type
+ SFirst
+ SLast
+ SMissing
+)
+
+var lastChar = rune('\u0000')
+
+func (c Char) isValid() bool {
+ return c.codePoint != 0 && c.state != SMissing
+}
+
+type FormInfo struct {
+ quickCheck [MNumberOfModes]QCResult // index: MComposed or MDecomposed
+ verified [MNumberOfModes]bool // index: MComposed or MDecomposed
+
+ combinesForward bool // May combine with rune on the right
+ combinesBackward bool // May combine with rune on the left
+ isOneWay bool // Never appears in result
+ inDecomp bool // Some decompositions result in this char.
+ decomp Decomposition
+ expandedDecomp Decomposition
+}
+
+func (f FormInfo) String() string {
+ buf := bytes.NewBuffer(make([]byte, 0))
+
+ fmt.Fprintf(buf, " quickCheck[C]: %v\n", f.quickCheck[MComposed])
+ fmt.Fprintf(buf, " quickCheck[D]: %v\n", f.quickCheck[MDecomposed])
+ fmt.Fprintf(buf, " cmbForward: %v\n", f.combinesForward)
+ fmt.Fprintf(buf, " cmbBackward: %v\n", f.combinesBackward)
+ fmt.Fprintf(buf, " isOneWay: %v\n", f.isOneWay)
+ fmt.Fprintf(buf, " inDecomp: %v\n", f.inDecomp)
+ fmt.Fprintf(buf, " decomposition: %X\n", f.decomp)
+ fmt.Fprintf(buf, " expandedDecomp: %X\n", f.expandedDecomp)
+
+ return buf.String()
+}
+
+type Decomposition []rune
+
+func openReader(file string) (input io.ReadCloser) {
+ if *localFiles {
+ f, err := os.Open(file)
+ if err != nil {
+ logger.Fatal(err)
+ }
+ input = f
+ } else {
+ path := *url + file
+ resp, err := http.Get(path)
+ if err != nil {
+ logger.Fatal(err)
+ }
+ if resp.StatusCode != 200 {
+ logger.Fatal("bad GET status for "+file, resp.Status)
+ }
+ input = resp.Body
+ }
+ return
+}
+
+func parseDecomposition(s string, skipfirst bool) (a []rune, err error) {
+ decomp := strings.Split(s, " ")
+ if len(decomp) > 0 && skipfirst {
+ decomp = decomp[1:]
+ }
+ for _, d := range decomp {
+ point, err := strconv.ParseUint(d, 16, 64)
+ if err != nil {
+ return a, err
+ }
+ a = append(a, rune(point))
+ }
+ return a, nil
+}
+
+func loadUnicodeData() {
+ f := openReader("UnicodeData.txt")
+ defer f.Close()
+ p := ucd.New(f)
+ for p.Next() {
+ r := p.Rune(ucd.CodePoint)
+ char := &chars[r]
+
+ char.ccc = uint8(p.Uint(ucd.CanonicalCombiningClass))
+ decmap := p.String(ucd.DecompMapping)
+
+ exp, err := parseDecomposition(decmap, false)
+ isCompat := false
+ if err != nil {
+ if len(decmap) > 0 {
+ exp, err = parseDecomposition(decmap, true)
+ if err != nil {
+ logger.Fatalf(`%U: bad decomp |%v|: "%s"`, r, decmap, err)
+ }
+ isCompat = true
+ }
+ }
+
+ char.name = p.String(ucd.Name)
+ char.codePoint = r
+ char.forms[FCompatibility].decomp = exp
+ if !isCompat {
+ char.forms[FCanonical].decomp = exp
+ } else {
+ char.compatDecomp = true
+ }
+ if len(decmap) > 0 {
+ char.forms[FCompatibility].decomp = exp
+ }
+ }
+ if err := p.Err(); err != nil {
+ logger.Fatal(err)
+ }
+}
+
+// compactCCC converts the sparse set of CCC values to a continguous one,
+// reducing the number of bits needed from 8 to 6.
+func compactCCC() {
+ m := make(map[uint8]uint8)
+ for i := range chars {
+ c := &chars[i]
+ m[c.ccc] = 0
+ }
+ cccs := []int{}
+ for v, _ := range m {
+ cccs = append(cccs, int(v))
+ }
+ sort.Ints(cccs)
+ for i, c := range cccs {
+ cccMap[uint8(i)] = uint8(c)
+ m[uint8(c)] = uint8(i)
+ }
+ for i := range chars {
+ c := &chars[i]
+ c.origCCC = c.ccc
+ c.ccc = m[c.ccc]
+ }
+ if len(m) >= 1<<6 {
+ log.Fatalf("too many difference CCC values: %d >= 64", len(m))
+ }
+}
+
+// CompositionExclusions.txt has form:
+// 0958 # ...
+// See http://unicode.org/reports/tr44/ for full explanation
+func loadCompositionExclusions() {
+ f := openReader("CompositionExclusions.txt")
+ defer f.Close()
+ p := ucd.New(f)
+ for p.Next() {
+ c := &chars[p.Rune(0)]
+ if c.excludeInComp {
+ logger.Fatalf("%U: Duplicate entry in exclusions.", c.codePoint)
+ }
+ c.excludeInComp = true
+ }
+ if e := p.Err(); e != nil {
+ logger.Fatal(e)
+ }
+}
+
+// hasCompatDecomp returns true if any of the recursive
+// decompositions contains a compatibility expansion.
+// In this case, the character may not occur in NFK*.
+func hasCompatDecomp(r rune) bool {
+ c := &chars[r]
+ if c.compatDecomp {
+ return true
+ }
+ for _, d := range c.forms[FCompatibility].decomp {
+ if hasCompatDecomp(d) {
+ return true
+ }
+ }
+ return false
+}
+
+// Hangul related constants.
+const (
+ HangulBase = 0xAC00
+ HangulEnd = 0xD7A4 // hangulBase + Jamo combinations (19 * 21 * 28)
+
+ JamoLBase = 0x1100
+ JamoLEnd = 0x1113
+ JamoVBase = 0x1161
+ JamoVEnd = 0x1176
+ JamoTBase = 0x11A8
+ JamoTEnd = 0x11C3
+
+ JamoLVTCount = 19 * 21 * 28
+ JamoTCount = 28
+)
+
+func isHangul(r rune) bool {
+ return HangulBase <= r && r < HangulEnd
+}
+
+func isHangulWithoutJamoT(r rune) bool {
+ if !isHangul(r) {
+ return false
+ }
+ r -= HangulBase
+ return r < JamoLVTCount && r%JamoTCount == 0
+}
+
+func ccc(r rune) uint8 {
+ return chars[r].ccc
+}
+
+// Insert a rune in a buffer, ordered by Canonical Combining Class.
+func insertOrdered(b Decomposition, r rune) Decomposition {
+ n := len(b)
+ b = append(b, 0)
+ cc := ccc(r)
+ if cc > 0 {
+ // Use bubble sort.
+ for ; n > 0; n-- {
+ if ccc(b[n-1]) <= cc {
+ break
+ }
+ b[n] = b[n-1]
+ }
+ }
+ b[n] = r
+ return b
+}
+
+// Recursively decompose.
+func decomposeRecursive(form int, r rune, d Decomposition) Decomposition {
+ dcomp := chars[r].forms[form].decomp
+ if len(dcomp) == 0 {
+ return insertOrdered(d, r)
+ }
+ for _, c := range dcomp {
+ d = decomposeRecursive(form, c, d)
+ }
+ return d
+}
+
+func completeCharFields(form int) {
+ // Phase 0: pre-expand decomposition.
+ for i := range chars {
+ f := &chars[i].forms[form]
+ if len(f.decomp) == 0 {
+ continue
+ }
+ exp := make(Decomposition, 0)
+ for _, c := range f.decomp {
+ exp = decomposeRecursive(form, c, exp)
+ }
+ f.expandedDecomp = exp
+ }
+
+ // Phase 1: composition exclusion, mark decomposition.
+ for i := range chars {
+ c := &chars[i]
+ f := &c.forms[form]
+
+ // Marks script-specific exclusions and version restricted.
+ f.isOneWay = c.excludeInComp
+
+ // Singletons
+ f.isOneWay = f.isOneWay || len(f.decomp) == 1
+
+ // Non-starter decompositions
+ if len(f.decomp) > 1 {
+ chk := c.ccc != 0 || chars[f.decomp[0]].ccc != 0
+ f.isOneWay = f.isOneWay || chk
+ }
+
+ // Runes that decompose into more than two runes.
+ f.isOneWay = f.isOneWay || len(f.decomp) > 2
+
+ if form == FCompatibility {
+ f.isOneWay = f.isOneWay || hasCompatDecomp(c.codePoint)
+ }
+
+ for _, r := range f.decomp {
+ chars[r].forms[form].inDecomp = true
+ }
+ }
+
+ // Phase 2: forward and backward combining.
+ for i := range chars {
+ c := &chars[i]
+ f := &c.forms[form]
+
+ if !f.isOneWay && len(f.decomp) == 2 {
+ f0 := &chars[f.decomp[0]].forms[form]
+ f1 := &chars[f.decomp[1]].forms[form]
+ if !f0.isOneWay {
+ f0.combinesForward = true
+ }
+ if !f1.isOneWay {
+ f1.combinesBackward = true
+ }
+ }
+ if isHangulWithoutJamoT(rune(i)) {
+ f.combinesForward = true
+ }
+ }
+
+ // Phase 3: quick check values.
+ for i := range chars {
+ c := &chars[i]
+ f := &c.forms[form]
+
+ switch {
+ case len(f.decomp) > 0:
+ f.quickCheck[MDecomposed] = QCNo
+ case isHangul(rune(i)):
+ f.quickCheck[MDecomposed] = QCNo
+ default:
+ f.quickCheck[MDecomposed] = QCYes
+ }
+ switch {
+ case f.isOneWay:
+ f.quickCheck[MComposed] = QCNo
+ case (i & 0xffff00) == JamoLBase:
+ f.quickCheck[MComposed] = QCYes
+ if JamoLBase <= i && i < JamoLEnd {
+ f.combinesForward = true
+ }
+ if JamoVBase <= i && i < JamoVEnd {
+ f.quickCheck[MComposed] = QCMaybe
+ f.combinesBackward = true
+ f.combinesForward = true
+ }
+ if JamoTBase <= i && i < JamoTEnd {
+ f.quickCheck[MComposed] = QCMaybe
+ f.combinesBackward = true
+ }
+ case !f.combinesBackward:
+ f.quickCheck[MComposed] = QCYes
+ default:
+ f.quickCheck[MComposed] = QCMaybe
+ }
+ }
+}
+
+func computeNonStarterCounts() {
+ // Phase 4: leading and trailing non-starter count
+ for i := range chars {
+ c := &chars[i]
+
+ runes := []rune{rune(i)}
+ // We always use FCompatibility so that the CGJ insertion points do not
+ // change for repeated normalizations with different forms.
+ if exp := c.forms[FCompatibility].expandedDecomp; len(exp) > 0 {
+ runes = exp
+ }
+ for _, r := range runes {
+ if chars[r].ccc == 0 {
+ break
+ }
+ c.nLeadingNonStarters++
+ }
+ for i := len(runes) - 1; i >= 0; i-- {
+ if chars[runes[i]].ccc == 0 {
+ break
+ }
+ c.nTrailingNonStarters++
+ }
+
+ // We consider runes that combine backwards to be non-starters for the
+ // purpose of Stream-Safe Text Processing.
+ for _, f := range c.forms {
+ if c.ccc == 0 && f.combinesBackward {
+ if len(c.forms[FCompatibility].expandedDecomp) > 0 {
+ log.Fatalf("%U: CCC==0 modifier with an expansion is not supported.", i)
+ }
+ c.nTrailingNonStarters = 1
+ c.nLeadingNonStarters = 1
+ }
+ }
+
+ if isHangul(rune(i)) {
+ c.nTrailingNonStarters = 2
+ if isHangulWithoutJamoT(rune(i)) {
+ c.nTrailingNonStarters = 1
+ }
+ }
+
+ if l, t := c.nLeadingNonStarters, c.nTrailingNonStarters; l > 0 && l != t {
+ log.Fatalf("%U: number of leading and trailing non-starters should be equal (%d vs %d)", i, l, t)
+ }
+ if t := c.nTrailingNonStarters; t > 3 {
+ log.Fatalf("%U: number of trailing non-starters is %d > 3", t)
+ }
+ }
+}
+
+func printBytes(b []byte, name string) {
+ fmt.Printf("// %s: %d bytes\n", name, len(b))
+ fmt.Printf("var %s = [...]byte {", name)
+ for i, c := range b {
+ switch {
+ case i%64 == 0:
+ fmt.Printf("\n// Bytes %x - %x\n", i, i+63)
+ case i%8 == 0:
+ fmt.Printf("\n")
+ }
+ fmt.Printf("0x%.2X, ", c)
+ }
+ fmt.Print("\n}\n\n")
+}
+
+// See forminfo.go for format.
+func makeEntry(f *FormInfo, c *Char) uint16 {
+ e := uint16(0)
+ if r := c.codePoint; HangulBase <= r && r < HangulEnd {
+ e |= 0x40
+ }
+ if f.combinesForward {
+ e |= 0x20
+ }
+ if f.quickCheck[MDecomposed] == QCNo {
+ e |= 0x4
+ }
+ switch f.quickCheck[MComposed] {
+ case QCYes:
+ case QCNo:
+ e |= 0x10
+ case QCMaybe:
+ e |= 0x18
+ default:
+ log.Fatalf("Illegal quickcheck value %v.", f.quickCheck[MComposed])
+ }
+ e |= uint16(c.nTrailingNonStarters)
+ return e
+}
+
+// decompSet keeps track of unique decompositions, grouped by whether
+// the decomposition is followed by a trailing and/or leading CCC.
+type decompSet [7]map[string]bool
+
+const (
+ normalDecomp = iota
+ firstMulti
+ firstCCC
+ endMulti
+ firstLeadingCCC
+ firstCCCZeroExcept
+ firstStarterWithNLead
+ lastDecomp
+)
+
+var cname = []string{"firstMulti", "firstCCC", "endMulti", "firstLeadingCCC", "firstCCCZeroExcept", "firstStarterWithNLead", "lastDecomp"}
+
+func makeDecompSet() decompSet {
+ m := decompSet{}
+ for i := range m {
+ m[i] = make(map[string]bool)
+ }
+ return m
+}
+func (m *decompSet) insert(key int, s string) {
+ m[key][s] = true
+}
+
+func printCharInfoTables() int {
+ mkstr := func(r rune, f *FormInfo) (int, string) {
+ d := f.expandedDecomp
+ s := string([]rune(d))
+ if max := 1 << 6; len(s) >= max {
+ const msg = "%U: too many bytes in decomposition: %d >= %d"
+ logger.Fatalf(msg, r, len(s), max)
+ }
+ head := uint8(len(s))
+ if f.quickCheck[MComposed] != QCYes {
+ head |= 0x40
+ }
+ if f.combinesForward {
+ head |= 0x80
+ }
+ s = string([]byte{head}) + s
+
+ lccc := ccc(d[0])
+ tccc := ccc(d[len(d)-1])
+ cc := ccc(r)
+ if cc != 0 && lccc == 0 && tccc == 0 {
+ logger.Fatalf("%U: trailing and leading ccc are 0 for non-zero ccc %d", r, cc)
+ }
+ if tccc < lccc && lccc != 0 {
+ const msg = "%U: lccc (%d) must be <= tcc (%d)"
+ logger.Fatalf(msg, r, lccc, tccc)
+ }
+ index := normalDecomp
+ nTrail := chars[r].nTrailingNonStarters
+ if tccc > 0 || lccc > 0 || nTrail > 0 {
+ tccc <<= 2
+ tccc |= nTrail
+ s += string([]byte{tccc})
+ index = endMulti
+ for _, r := range d[1:] {
+ if ccc(r) == 0 {
+ index = firstCCC
+ }
+ }
+ if lccc > 0 {
+ s += string([]byte{lccc})
+ if index == firstCCC {
+ logger.Fatalf("%U: multi-segment decomposition not supported for decompositions with leading CCC != 0", r)
+ }
+ index = firstLeadingCCC
+ }
+ if cc != lccc {
+ if cc != 0 {
+ logger.Fatalf("%U: for lccc != ccc, expected ccc to be 0; was %d", r, cc)
+ }
+ index = firstCCCZeroExcept
+ }
+ } else if len(d) > 1 {
+ index = firstMulti
+ }
+ return index, s
+ }
+
+ decompSet := makeDecompSet()
+ const nLeadStr = "\x00\x01" // 0-byte length and tccc with nTrail.
+ decompSet.insert(firstStarterWithNLead, nLeadStr)
+
+ // Store the uniqued decompositions in a byte buffer,
+ // preceded by their byte length.
+ for _, c := range chars {
+ for _, f := range c.forms {
+ if len(f.expandedDecomp) == 0 {
+ continue
+ }
+ if f.combinesBackward {
+ logger.Fatalf("%U: combinesBackward and decompose", c.codePoint)
+ }
+ index, s := mkstr(c.codePoint, &f)
+ decompSet.insert(index, s)
+ }
+ }
+
+ decompositions := bytes.NewBuffer(make([]byte, 0, 10000))
+ size := 0
+ positionMap := make(map[string]uint16)
+ decompositions.WriteString("\000")
+ fmt.Println("const (")
+ for i, m := range decompSet {
+ sa := []string{}
+ for s := range m {
+ sa = append(sa, s)
+ }
+ sort.Strings(sa)
+ for _, s := range sa {
+ p := decompositions.Len()
+ decompositions.WriteString(s)
+ positionMap[s] = uint16(p)
+ }
+ if cname[i] != "" {
+ fmt.Printf("%s = 0x%X\n", cname[i], decompositions.Len())
+ }
+ }
+ fmt.Println("maxDecomp = 0x8000")
+ fmt.Println(")")
+ b := decompositions.Bytes()
+ printBytes(b, "decomps")
+ size += len(b)
+
+ varnames := []string{"nfc", "nfkc"}
+ for i := 0; i < FNumberOfFormTypes; i++ {
+ trie := triegen.NewTrie(varnames[i])
+
+ for r, c := range chars {
+ f := c.forms[i]
+ d := f.expandedDecomp
+ if len(d) != 0 {
+ _, key := mkstr(c.codePoint, &f)
+ trie.Insert(rune(r), uint64(positionMap[key]))
+ if c.ccc != ccc(d[0]) {
+ // We assume the lead ccc of a decomposition !=0 in this case.
+ if ccc(d[0]) == 0 {
+ logger.Fatalf("Expected leading CCC to be non-zero; ccc is %d", c.ccc)
+ }
+ }
+ } else if c.nLeadingNonStarters > 0 && len(f.expandedDecomp) == 0 && c.ccc == 0 && !f.combinesBackward {
+ // Handle cases where it can't be detected that the nLead should be equal
+ // to nTrail.
+ trie.Insert(c.codePoint, uint64(positionMap[nLeadStr]))
+ } else if v := makeEntry(&f, &c)<<8 | uint16(c.ccc); v != 0 {
+ trie.Insert(c.codePoint, uint64(0x8000|v))
+ }
+ }
+ sz, err := trie.Gen(os.Stdout, triegen.Compact(&normCompacter{name: varnames[i]}))
+ if err != nil {
+ logger.Fatal(err)
+ }
+ size += sz
+ }
+ return size
+}
+
+func contains(sa []string, s string) bool {
+ for _, a := range sa {
+ if a == s {
+ return true
+ }
+ }
+ return false
+}
+
+// Extract the version number from the URL.
+func version() string {
+ // From http://www.unicode.org/standard/versions/#Version_Numbering:
+ // for the later Unicode versions, data files are located in
+ // versioned directories.
+ fields := strings.Split(*url, "/")
+ for _, f := range fields {
+ if match, _ := regexp.MatchString(`[0-9]\.[0-9]\.[0-9]`, f); match {
+ return f
+ }
+ }
+ logger.Fatal("unknown version")
+ return "Unknown"
+}
+
+const fileHeader = `// Generated by running
+// maketables --tables=%s --url=%s
+// DO NOT EDIT
+
+package norm
+
+`
+
+func makeTables() {
+ size := 0
+ if *tablelist == "" {
+ return
+ }
+ list := strings.Split(*tablelist, ",")
+ if *tablelist == "all" {
+ list = []string{"recomp", "info"}
+ }
+ fmt.Printf(fileHeader, *tablelist, *url)
+
+ // Compute maximum decomposition size.
+ max := 0
+ for _, c := range chars {
+ if n := len(string(c.forms[FCompatibility].expandedDecomp)); n > max {
+ max = n
+ }
+ }
+
+ fmt.Println("const (")
+ fmt.Println("\t// Version is the Unicode edition from which the tables are derived.")
+ fmt.Printf("\tVersion = %q\n", version())
+ fmt.Println()
+ fmt.Println("\t// MaxTransformChunkSize indicates the maximum number of bytes that Transform")
+ fmt.Println("\t// may need to write atomically for any Form. Making a destination buffer at")
+ fmt.Println("\t// least this size ensures that Transform can always make progress and that")
+ fmt.Println("\t// the user does not need to grow the buffer on an ErrShortDst.")
+ fmt.Printf("\tMaxTransformChunkSize = %d+maxNonStarters*4\n", len(string(0x034F))+max)
+ fmt.Println(")\n")
+
+ // Print the CCC remap table.
+ size += len(cccMap)
+ fmt.Printf("var ccc = [%d]uint8{", len(cccMap))
+ for i := 0; i < len(cccMap); i++ {
+ if i%8 == 0 {
+ fmt.Println()
+ }
+ fmt.Printf("%3d, ", cccMap[uint8(i)])
+ }
+ fmt.Println("\n}\n")
+
+ if contains(list, "info") {
+ size += printCharInfoTables()
+ }
+
+ if contains(list, "recomp") {
+ // Note that we use 32 bit keys, instead of 64 bit.
+ // This clips the bits of three entries, but we know
+ // this won't cause a collision. The compiler will catch
+ // any changes made to UnicodeData.txt that introduces
+ // a collision.
+ // Note that the recomposition map for NFC and NFKC
+ // are identical.
+
+ // Recomposition map
+ nrentries := 0
+ for _, c := range chars {
+ f := c.forms[FCanonical]
+ if !f.isOneWay && len(f.decomp) > 0 {
+ nrentries++
+ }
+ }
+ sz := nrentries * 8
+ size += sz
+ fmt.Printf("// recompMap: %d bytes (entries only)\n", sz)
+ fmt.Println("var recompMap = map[uint32]rune{")
+ for i, c := range chars {
+ f := c.forms[FCanonical]
+ d := f.decomp
+ if !f.isOneWay && len(d) > 0 {
+ key := uint32(uint16(d[0]))<<16 + uint32(uint16(d[1]))
+ fmt.Printf("0x%.8X: 0x%.4X,\n", key, i)
+ }
+ }
+ fmt.Printf("}\n\n")
+ }
+
+ fmt.Printf("// Total size of tables: %dKB (%d bytes)\n", (size+512)/1024, size)
+}
+
+func printChars() {
+ if *verbose {
+ for _, c := range chars {
+ if !c.isValid() || c.state == SMissing {
+ continue
+ }
+ fmt.Println(c)
+ }
+ }
+}
+
+// verifyComputed does various consistency tests.
+func verifyComputed() {
+ for i, c := range chars {
+ for _, f := range c.forms {
+ isNo := (f.quickCheck[MDecomposed] == QCNo)
+ if (len(f.decomp) > 0) != isNo && !isHangul(rune(i)) {
+ log.Fatalf("%U: NF*D QC must be No if rune decomposes", i)
+ }
+
+ isMaybe := f.quickCheck[MComposed] == QCMaybe
+ if f.combinesBackward != isMaybe {
+ log.Fatalf("%U: NF*C QC must be Maybe if combinesBackward", i)
+ }
+ if len(f.decomp) > 0 && f.combinesForward && isMaybe {
+ log.Fatalf("%U: NF*C QC must be Yes or No if combinesForward and decomposes", i)
+ }
+
+ if len(f.expandedDecomp) != 0 {
+ continue
+ }
+ if a, b := c.nLeadingNonStarters > 0, (c.ccc > 0 || f.combinesBackward); a != b {
+ // We accept these two runes to be treated differently (it only affects
+ // segment breaking in iteration, most likely on inproper use), but
+ // reconsider if more characters are added.
+ if i != 0xFF9E && i != 0xFF9F {
+ log.Fatalf("%U: nLead was %v; want %v", i, a, b)
+ }
+ }
+ }
+ nfc := c.forms[FCanonical]
+ nfkc := c.forms[FCompatibility]
+ if nfc.combinesBackward != nfkc.combinesBackward {
+ logger.Fatalf("%U: Cannot combine combinesBackward\n", c.codePoint)
+ }
+ }
+}
+
+// Use values in DerivedNormalizationProps.txt to compare against the
+// values we computed.
+// DerivedNormalizationProps.txt has form:
+// 00C0..00C5 ; NFD_QC; N # ...
+// 0374 ; NFD_QC; N # ...
+// See http://unicode.org/reports/tr44/ for full explanation
+func testDerived() {
+ f := openReader("DerivedNormalizationProps.txt")
+ defer f.Close()
+ p := ucd.New(f)
+ for p.Next() {
+ r := p.Rune(0)
+ c := &chars[r]
+
+ var ftype, mode int
+ qt := p.String(1)
+ switch qt {
+ case "NFC_QC":
+ ftype, mode = FCanonical, MComposed
+ case "NFD_QC":
+ ftype, mode = FCanonical, MDecomposed
+ case "NFKC_QC":
+ ftype, mode = FCompatibility, MComposed
+ case "NFKD_QC":
+ ftype, mode = FCompatibility, MDecomposed
+ default:
+ continue
+ }
+ var qr QCResult
+ switch p.String(2) {
+ case "Y":
+ qr = QCYes
+ case "N":
+ qr = QCNo
+ case "M":
+ qr = QCMaybe
+ default:
+ log.Fatalf(`Unexpected quick check value "%s"`, p.String(2))
+ }
+ if got := c.forms[ftype].quickCheck[mode]; got != qr {
+ logger.Printf("%U: FAILED %s (was %v need %v)\n", r, qt, got, qr)
+ }
+ c.forms[ftype].verified[mode] = true
+ }
+ if err := p.Err(); err != nil {
+ logger.Fatal(err)
+ }
+ // Any unspecified value must be QCYes. Verify this.
+ for i, c := range chars {
+ for j, fd := range c.forms {
+ for k, qr := range fd.quickCheck {
+ if !fd.verified[k] && qr != QCYes {
+ m := "%U: FAIL F:%d M:%d (was %v need Yes) %s\n"
+ logger.Printf(m, i, j, k, qr, c.name)
+ }
+ }
+ }
+ }
+}
+
+var testHeader = `// Generated by running
+// maketables --test --url=%s
+// +build test
+
+package norm
+
+const (
+ Yes = iota
+ No
+ Maybe
+)
+
+type formData struct {
+ qc uint8
+ combinesForward bool
+ decomposition string
+}
+
+type runeData struct {
+ r rune
+ ccc uint8
+ nLead uint8
+ nTrail uint8
+ f [2]formData // 0: canonical; 1: compatibility
+}
+
+func f(qc uint8, cf bool, dec string) [2]formData {
+ return [2]formData{{qc, cf, dec}, {qc, cf, dec}}
+}
+
+func g(qc, qck uint8, cf, cfk bool, d, dk string) [2]formData {
+ return [2]formData{{qc, cf, d}, {qck, cfk, dk}}
+}
+
+var testData = []runeData{
+`
+
+func printTestdata() {
+ type lastInfo struct {
+ ccc uint8
+ nLead uint8
+ nTrail uint8
+ f string
+ }
+ last := lastInfo{}
+ fmt.Printf(testHeader, *url)
+ for r, c := range chars {
+ f := c.forms[FCanonical]
+ qc, cf, d := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
+ f = c.forms[FCompatibility]
+ qck, cfk, dk := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
+ s := ""
+ if d == dk && qc == qck && cf == cfk {
+ s = fmt.Sprintf("f(%s, %v, %q)", qc, cf, d)
+ } else {
+ s = fmt.Sprintf("g(%s, %s, %v, %v, %q, %q)", qc, qck, cf, cfk, d, dk)
+ }
+ current := lastInfo{c.ccc, c.nLeadingNonStarters, c.nTrailingNonStarters, s}
+ if last != current {
+ fmt.Printf("\t{0x%x, %d, %d, %d, %s},\n", r, c.origCCC, c.nLeadingNonStarters, c.nTrailingNonStarters, s)
+ last = current
+ }
+ }
+ fmt.Println("}")
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/normalize.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/normalize.go
new file mode 100644
index 0000000..d817284
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/normalize.go
@@ -0,0 +1,524 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package norm contains types and functions for normalizing Unicode strings.
+package norm
+
+import "unicode/utf8"
+
+// A Form denotes a canonical representation of Unicode code points.
+// The Unicode-defined normalization and equivalence forms are:
+//
+// NFC Unicode Normalization Form C
+// NFD Unicode Normalization Form D
+// NFKC Unicode Normalization Form KC
+// NFKD Unicode Normalization Form KD
+//
+// For a Form f, this documentation uses the notation f(x) to mean
+// the bytes or string x converted to the given form.
+// A position n in x is called a boundary if conversion to the form can
+// proceed independently on both sides:
+// f(x) == append(f(x[0:n]), f(x[n:])...)
+//
+// References: http://unicode.org/reports/tr15/ and
+// http://unicode.org/notes/tn5/.
+type Form int
+
+const (
+ NFC Form = iota
+ NFD
+ NFKC
+ NFKD
+)
+
+// Bytes returns f(b). May return b if f(b) = b.
+func (f Form) Bytes(b []byte) []byte {
+ src := inputBytes(b)
+ ft := formTable[f]
+ n, ok := ft.quickSpan(src, 0, len(b), true)
+ if ok {
+ return b
+ }
+ out := make([]byte, n, len(b))
+ copy(out, b[0:n])
+ rb := reorderBuffer{f: *ft, src: src, nsrc: len(b), out: out, flushF: appendFlush}
+ return doAppendInner(&rb, n)
+}
+
+// String returns f(s).
+func (f Form) String(s string) string {
+ src := inputString(s)
+ ft := formTable[f]
+ n, ok := ft.quickSpan(src, 0, len(s), true)
+ if ok {
+ return s
+ }
+ out := make([]byte, n, len(s))
+ copy(out, s[0:n])
+ rb := reorderBuffer{f: *ft, src: src, nsrc: len(s), out: out, flushF: appendFlush}
+ return string(doAppendInner(&rb, n))
+}
+
+// IsNormal returns true if b == f(b).
+func (f Form) IsNormal(b []byte) bool {
+ src := inputBytes(b)
+ ft := formTable[f]
+ bp, ok := ft.quickSpan(src, 0, len(b), true)
+ if ok {
+ return true
+ }
+ rb := reorderBuffer{f: *ft, src: src, nsrc: len(b)}
+ rb.setFlusher(nil, cmpNormalBytes)
+ for bp < len(b) {
+ rb.out = b[bp:]
+ if bp = decomposeSegment(&rb, bp, true); bp < 0 {
+ return false
+ }
+ bp, _ = rb.f.quickSpan(rb.src, bp, len(b), true)
+ }
+ return true
+}
+
+func cmpNormalBytes(rb *reorderBuffer) bool {
+ b := rb.out
+ for i := 0; i < rb.nrune; i++ {
+ info := rb.rune[i]
+ if int(info.size) > len(b) {
+ return false
+ }
+ p := info.pos
+ pe := p + info.size
+ for ; p < pe; p++ {
+ if b[0] != rb.byte[p] {
+ return false
+ }
+ b = b[1:]
+ }
+ }
+ return true
+}
+
+// IsNormalString returns true if s == f(s).
+func (f Form) IsNormalString(s string) bool {
+ src := inputString(s)
+ ft := formTable[f]
+ bp, ok := ft.quickSpan(src, 0, len(s), true)
+ if ok {
+ return true
+ }
+ rb := reorderBuffer{f: *ft, src: src, nsrc: len(s)}
+ rb.setFlusher(nil, func(rb *reorderBuffer) bool {
+ for i := 0; i < rb.nrune; i++ {
+ info := rb.rune[i]
+ if bp+int(info.size) > len(s) {
+ return false
+ }
+ p := info.pos
+ pe := p + info.size
+ for ; p < pe; p++ {
+ if s[bp] != rb.byte[p] {
+ return false
+ }
+ bp++
+ }
+ }
+ return true
+ })
+ for bp < len(s) {
+ if bp = decomposeSegment(&rb, bp, true); bp < 0 {
+ return false
+ }
+ bp, _ = rb.f.quickSpan(rb.src, bp, len(s), true)
+ }
+ return true
+}
+
+// patchTail fixes a case where a rune may be incorrectly normalized
+// if it is followed by illegal continuation bytes. It returns the
+// patched buffer and whether the decomposition is still in progress.
+func patchTail(rb *reorderBuffer) bool {
+ info, p := lastRuneStart(&rb.f, rb.out)
+ if p == -1 || info.size == 0 {
+ return true
+ }
+ end := p + int(info.size)
+ extra := len(rb.out) - end
+ if extra > 0 {
+ // Potentially allocating memory. However, this only
+ // happens with ill-formed UTF-8.
+ x := make([]byte, 0)
+ x = append(x, rb.out[len(rb.out)-extra:]...)
+ rb.out = rb.out[:end]
+ decomposeToLastBoundary(rb)
+ rb.doFlush()
+ rb.out = append(rb.out, x...)
+ return false
+ }
+ buf := rb.out[p:]
+ rb.out = rb.out[:p]
+ decomposeToLastBoundary(rb)
+ if s := rb.ss.next(info); s == ssStarter {
+ rb.doFlush()
+ rb.ss.first(info)
+ } else if s == ssOverflow {
+ rb.doFlush()
+ rb.insertCGJ()
+ rb.ss = 0
+ }
+ rb.insertUnsafe(inputBytes(buf), 0, info)
+ return true
+}
+
+func appendQuick(rb *reorderBuffer, i int) int {
+ if rb.nsrc == i {
+ return i
+ }
+ end, _ := rb.f.quickSpan(rb.src, i, rb.nsrc, true)
+ rb.out = rb.src.appendSlice(rb.out, i, end)
+ return end
+}
+
+// Append returns f(append(out, b...)).
+// The buffer out must be nil, empty, or equal to f(out).
+func (f Form) Append(out []byte, src ...byte) []byte {
+ return f.doAppend(out, inputBytes(src), len(src))
+}
+
+func (f Form) doAppend(out []byte, src input, n int) []byte {
+ if n == 0 {
+ return out
+ }
+ ft := formTable[f]
+ // Attempt to do a quickSpan first so we can avoid initializing the reorderBuffer.
+ if len(out) == 0 {
+ p, _ := ft.quickSpan(src, 0, n, true)
+ out = src.appendSlice(out, 0, p)
+ if p == n {
+ return out
+ }
+ rb := reorderBuffer{f: *ft, src: src, nsrc: n, out: out, flushF: appendFlush}
+ return doAppendInner(&rb, p)
+ }
+ rb := reorderBuffer{f: *ft, src: src, nsrc: n}
+ return doAppend(&rb, out, 0)
+}
+
+func doAppend(rb *reorderBuffer, out []byte, p int) []byte {
+ rb.setFlusher(out, appendFlush)
+ src, n := rb.src, rb.nsrc
+ doMerge := len(out) > 0
+ if q := src.skipContinuationBytes(p); q > p {
+ // Move leading non-starters to destination.
+ rb.out = src.appendSlice(rb.out, p, q)
+ p = q
+ doMerge = patchTail(rb)
+ }
+ fd := &rb.f
+ if doMerge {
+ var info Properties
+ if p < n {
+ info = fd.info(src, p)
+ if !info.BoundaryBefore() || info.nLeadingNonStarters() > 0 {
+ if p == 0 {
+ decomposeToLastBoundary(rb)
+ }
+ p = decomposeSegment(rb, p, true)
+ }
+ }
+ if info.size == 0 {
+ rb.doFlush()
+ // Append incomplete UTF-8 encoding.
+ return src.appendSlice(rb.out, p, n)
+ }
+ if rb.nrune > 0 {
+ return doAppendInner(rb, p)
+ }
+ }
+ p = appendQuick(rb, p)
+ return doAppendInner(rb, p)
+}
+
+func doAppendInner(rb *reorderBuffer, p int) []byte {
+ for n := rb.nsrc; p < n; {
+ p = decomposeSegment(rb, p, true)
+ p = appendQuick(rb, p)
+ }
+ return rb.out
+}
+
+// AppendString returns f(append(out, []byte(s))).
+// The buffer out must be nil, empty, or equal to f(out).
+func (f Form) AppendString(out []byte, src string) []byte {
+ return f.doAppend(out, inputString(src), len(src))
+}
+
+// QuickSpan returns a boundary n such that b[0:n] == f(b[0:n]).
+// It is not guaranteed to return the largest such n.
+func (f Form) QuickSpan(b []byte) int {
+ n, _ := formTable[f].quickSpan(inputBytes(b), 0, len(b), true)
+ return n
+}
+
+// quickSpan returns a boundary n such that src[0:n] == f(src[0:n]) and
+// whether any non-normalized parts were found. If atEOF is false, n will
+// not point past the last segment if this segment might be become
+// non-normalized by appending other runes.
+func (f *formInfo) quickSpan(src input, i, end int, atEOF bool) (n int, ok bool) {
+ var lastCC uint8
+ ss := streamSafe(0)
+ lastSegStart := i
+ for n = end; i < n; {
+ if j := src.skipASCII(i, n); i != j {
+ i = j
+ lastSegStart = i - 1
+ lastCC = 0
+ ss = 0
+ continue
+ }
+ info := f.info(src, i)
+ if info.size == 0 {
+ if atEOF {
+ // include incomplete runes
+ return n, true
+ }
+ return lastSegStart, true
+ }
+ // This block needs to be before the next, because it is possible to
+ // have an overflow for runes that are starters (e.g. with U+FF9E).
+ switch ss.next(info) {
+ case ssStarter:
+ ss.first(info)
+ lastSegStart = i
+ case ssOverflow:
+ return lastSegStart, false
+ case ssSuccess:
+ if lastCC > info.ccc {
+ return lastSegStart, false
+ }
+ }
+ if f.composing {
+ if !info.isYesC() {
+ break
+ }
+ } else {
+ if !info.isYesD() {
+ break
+ }
+ }
+ lastCC = info.ccc
+ i += int(info.size)
+ }
+ if i == n {
+ if !atEOF {
+ n = lastSegStart
+ }
+ return n, true
+ }
+ return lastSegStart, false
+}
+
+// QuickSpanString returns a boundary n such that b[0:n] == f(s[0:n]).
+// It is not guaranteed to return the largest such n.
+func (f Form) QuickSpanString(s string) int {
+ n, _ := formTable[f].quickSpan(inputString(s), 0, len(s), true)
+ return n
+}
+
+// FirstBoundary returns the position i of the first boundary in b
+// or -1 if b contains no boundary.
+func (f Form) FirstBoundary(b []byte) int {
+ return f.firstBoundary(inputBytes(b), len(b))
+}
+
+func (f Form) firstBoundary(src input, nsrc int) int {
+ i := src.skipContinuationBytes(0)
+ if i >= nsrc {
+ return -1
+ }
+ fd := formTable[f]
+ ss := streamSafe(0)
+ // We should call ss.first here, but we can't as the first rune is
+ // skipped already. This means FirstBoundary can't really determine
+ // CGJ insertion points correctly. Luckily it doesn't have to.
+ // TODO: consider adding NextBoundary
+ for {
+ info := fd.info(src, i)
+ if info.size == 0 {
+ return -1
+ }
+ if s := ss.next(info); s != ssSuccess {
+ return i
+ }
+ i += int(info.size)
+ if i >= nsrc {
+ if !info.BoundaryAfter() && !ss.isMax() {
+ return -1
+ }
+ return nsrc
+ }
+ }
+}
+
+// FirstBoundaryInString returns the position i of the first boundary in s
+// or -1 if s contains no boundary.
+func (f Form) FirstBoundaryInString(s string) int {
+ return f.firstBoundary(inputString(s), len(s))
+}
+
+// LastBoundary returns the position i of the last boundary in b
+// or -1 if b contains no boundary.
+func (f Form) LastBoundary(b []byte) int {
+ return lastBoundary(formTable[f], b)
+}
+
+func lastBoundary(fd *formInfo, b []byte) int {
+ i := len(b)
+ info, p := lastRuneStart(fd, b)
+ if p == -1 {
+ return -1
+ }
+ if info.size == 0 { // ends with incomplete rune
+ if p == 0 { // starts with incomplete rune
+ return -1
+ }
+ i = p
+ info, p = lastRuneStart(fd, b[:i])
+ if p == -1 { // incomplete UTF-8 encoding or non-starter bytes without a starter
+ return i
+ }
+ }
+ if p+int(info.size) != i { // trailing non-starter bytes: illegal UTF-8
+ return i
+ }
+ if info.BoundaryAfter() {
+ return i
+ }
+ ss := streamSafe(0)
+ v := ss.backwards(info)
+ for i = p; i >= 0 && v != ssStarter; i = p {
+ info, p = lastRuneStart(fd, b[:i])
+ if v = ss.backwards(info); v == ssOverflow {
+ break
+ }
+ if p+int(info.size) != i {
+ if p == -1 { // no boundary found
+ return -1
+ }
+ return i // boundary after an illegal UTF-8 encoding
+ }
+ }
+ return i
+}
+
+// decomposeSegment scans the first segment in src into rb. It inserts 0x034f
+// (Grapheme Joiner) when it encounters a sequence of more than 30 non-starters
+// and returns the number of bytes consumed from src or iShortDst or iShortSrc.
+func decomposeSegment(rb *reorderBuffer, sp int, atEOF bool) int {
+ // Force one character to be consumed.
+ info := rb.f.info(rb.src, sp)
+ if info.size == 0 {
+ return 0
+ }
+ if rb.nrune > 0 {
+ if s := rb.ss.next(info); s == ssStarter {
+ goto end
+ } else if s == ssOverflow {
+ rb.insertCGJ()
+ goto end
+ }
+ } else {
+ rb.ss.first(info)
+ }
+ if err := rb.insertFlush(rb.src, sp, info); err != iSuccess {
+ return int(err)
+ }
+ for {
+ sp += int(info.size)
+ if sp >= rb.nsrc {
+ if !atEOF && !info.BoundaryAfter() {
+ return int(iShortSrc)
+ }
+ break
+ }
+ info = rb.f.info(rb.src, sp)
+ if info.size == 0 {
+ if !atEOF {
+ return int(iShortSrc)
+ }
+ break
+ }
+ if s := rb.ss.next(info); s == ssStarter {
+ break
+ } else if s == ssOverflow {
+ rb.insertCGJ()
+ break
+ }
+ if err := rb.insertFlush(rb.src, sp, info); err != iSuccess {
+ return int(err)
+ }
+ }
+end:
+ if !rb.doFlush() {
+ return int(iShortDst)
+ }
+ return sp
+}
+
+// lastRuneStart returns the runeInfo and position of the last
+// rune in buf or the zero runeInfo and -1 if no rune was found.
+func lastRuneStart(fd *formInfo, buf []byte) (Properties, int) {
+ p := len(buf) - 1
+ for ; p >= 0 && !utf8.RuneStart(buf[p]); p-- {
+ }
+ if p < 0 {
+ return Properties{}, -1
+ }
+ return fd.info(inputBytes(buf), p), p
+}
+
+// decomposeToLastBoundary finds an open segment at the end of the buffer
+// and scans it into rb. Returns the buffer minus the last segment.
+func decomposeToLastBoundary(rb *reorderBuffer) {
+ fd := &rb.f
+ info, i := lastRuneStart(fd, rb.out)
+ if int(info.size) != len(rb.out)-i {
+ // illegal trailing continuation bytes
+ return
+ }
+ if info.BoundaryAfter() {
+ return
+ }
+ var add [maxNonStarters + 1]Properties // stores runeInfo in reverse order
+ padd := 0
+ ss := streamSafe(0)
+ p := len(rb.out)
+ for {
+ add[padd] = info
+ v := ss.backwards(info)
+ if v == ssOverflow {
+ // Note that if we have an overflow, it the string we are appending to
+ // is not correctly normalized. In this case the behavior is undefined.
+ break
+ }
+ padd++
+ p -= int(info.size)
+ if v == ssStarter || p < 0 {
+ break
+ }
+ info, i = lastRuneStart(fd, rb.out[:p])
+ if int(info.size) != p-i {
+ break
+ }
+ }
+ rb.ss = ss
+ // Copy bytes for insertion as we may need to overwrite rb.out.
+ var buf [maxBufferSize * utf8.UTFMax]byte
+ cp := buf[:copy(buf[:], rb.out[p:])]
+ rb.out = rb.out[:p]
+ for padd--; padd >= 0; padd-- {
+ info = add[padd]
+ rb.insertUnsafe(inputBytes(cp), 0, info)
+ cp = cp[info.size:]
+ }
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/normregtest.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/normregtest.go
new file mode 100644
index 0000000..ab07c68
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/normregtest.go
@@ -0,0 +1,318 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "flag"
+ "fmt"
+ "log"
+ "net/http"
+ "os"
+ "path"
+ "regexp"
+ "runtime"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+ "unicode/utf8"
+
+ "code.google.com/p/go.text/unicode/norm"
+)
+
+func main() {
+ flag.Parse()
+ loadTestData()
+ CharacterByCharacterTests()
+ StandardTests()
+ PerformanceTest()
+ if errorCount == 0 {
+ fmt.Println("PASS")
+ }
+}
+
+const file = "NormalizationTest.txt"
+
+var url = flag.String("url",
+ "http://www.unicode.org/Public/"+unicode.Version+"/ucd/"+file,
+ "URL of Unicode database directory")
+var localFiles = flag.Bool("local",
+ false,
+ "data files have been copied to the current directory; for debugging only")
+
+var logger = log.New(os.Stderr, "", log.Lshortfile)
+
+// This regression test runs the test set in NormalizationTest.txt
+// (taken from http://www.unicode.org/Public//ucd/).
+//
+// NormalizationTest.txt has form:
+// @Part0 # Specific cases
+// #
+// 1E0A;1E0A;0044 0307;1E0A;0044 0307; # (Ḋ; Ḋ; D◌̇; Ḋ; D◌̇; ) LATIN CAPITAL LETTER D WITH DOT ABOVE
+// 1E0C;1E0C;0044 0323;1E0C;0044 0323; # (Ḍ; Ḍ; D◌̣; Ḍ; D◌̣; ) LATIN CAPITAL LETTER D WITH DOT BELOW
+//
+// Each test has 5 columns (c1, c2, c3, c4, c5), where
+// (c1, c2, c3, c4, c5) == (c1, NFC(c1), NFD(c1), NFKC(c1), NFKD(c1))
+//
+// CONFORMANCE:
+// 1. The following invariants must be true for all conformant implementations
+//
+// NFC
+// c2 == NFC(c1) == NFC(c2) == NFC(c3)
+// c4 == NFC(c4) == NFC(c5)
+//
+// NFD
+// c3 == NFD(c1) == NFD(c2) == NFD(c3)
+// c5 == NFD(c4) == NFD(c5)
+//
+// NFKC
+// c4 == NFKC(c1) == NFKC(c2) == NFKC(c3) == NFKC(c4) == NFKC(c5)
+//
+// NFKD
+// c5 == NFKD(c1) == NFKD(c2) == NFKD(c3) == NFKD(c4) == NFKD(c5)
+//
+// 2. For every code point X assigned in this version of Unicode that is not
+// specifically listed in Part 1, the following invariants must be true
+// for all conformant implementations:
+//
+// X == NFC(X) == NFD(X) == NFKC(X) == NFKD(X)
+//
+
+// Column types.
+const (
+ cRaw = iota
+ cNFC
+ cNFD
+ cNFKC
+ cNFKD
+ cMaxColumns
+)
+
+// Holds data from NormalizationTest.txt
+var part []Part
+
+type Part struct {
+ name string
+ number int
+ tests []Test
+}
+
+type Test struct {
+ name string
+ partnr int
+ number int
+ r rune // used for character by character test
+ cols [cMaxColumns]string // Each has 5 entries, see below.
+}
+
+func (t Test) Name() string {
+ if t.number < 0 {
+ return part[t.partnr].name
+ }
+ return fmt.Sprintf("%s:%d", part[t.partnr].name, t.number)
+}
+
+var partRe = regexp.MustCompile(`@Part(\d) # (.*)$`)
+var testRe = regexp.MustCompile(`^` + strings.Repeat(`([\dA-F ]+);`, 5) + ` # (.*)$`)
+
+var counter int
+
+// Load the data form NormalizationTest.txt
+func loadTestData() {
+ if *localFiles {
+ pwd, _ := os.Getwd()
+ *url = "file://" + path.Join(pwd, file)
+ }
+ t := &http.Transport{}
+ t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/")))
+ c := &http.Client{Transport: t}
+ resp, err := c.Get(*url)
+ if err != nil {
+ logger.Fatal(err)
+ }
+ if resp.StatusCode != 200 {
+ logger.Fatal("bad GET status for "+file, resp.Status)
+ }
+ f := resp.Body
+ defer f.Close()
+ scanner := bufio.NewScanner(f)
+ for scanner.Scan() {
+ line := scanner.Text()
+ if len(line) == 0 || line[0] == '#' {
+ continue
+ }
+ m := partRe.FindStringSubmatch(line)
+ if m != nil {
+ if len(m) < 3 {
+ logger.Fatal("Failed to parse Part: ", line)
+ }
+ i, err := strconv.Atoi(m[1])
+ if err != nil {
+ logger.Fatal(err)
+ }
+ name := m[2]
+ part = append(part, Part{name: name[:len(name)-1], number: i})
+ continue
+ }
+ m = testRe.FindStringSubmatch(line)
+ if m == nil || len(m) < 7 {
+ logger.Fatalf(`Failed to parse: "%s" result: %#v`, line, m)
+ }
+ test := Test{name: m[6], partnr: len(part) - 1, number: counter}
+ counter++
+ for j := 1; j < len(m)-1; j++ {
+ for _, split := range strings.Split(m[j], " ") {
+ r, err := strconv.ParseUint(split, 16, 64)
+ if err != nil {
+ logger.Fatal(err)
+ }
+ if test.r == 0 {
+ // save for CharacterByCharacterTests
+ test.r = rune(r)
+ }
+ var buf [utf8.UTFMax]byte
+ sz := utf8.EncodeRune(buf[:], rune(r))
+ test.cols[j-1] += string(buf[:sz])
+ }
+ }
+ part := &part[len(part)-1]
+ part.tests = append(part.tests, test)
+ }
+ if scanner.Err() != nil {
+ logger.Fatal(scanner.Err())
+ }
+}
+
+var fstr = []string{"NFC", "NFD", "NFKC", "NFKD"}
+
+var errorCount int
+
+func cmpResult(t *Test, name string, f norm.Form, gold, test, result string) {
+ if gold != result {
+ errorCount++
+ if errorCount > 20 {
+ return
+ }
+ logger.Printf("%s:%s: %s(%+q)=%+q; want %+q: %s",
+ t.Name(), name, fstr[f], test, result, gold, t.name)
+ }
+}
+
+func cmpIsNormal(t *Test, name string, f norm.Form, test string, result, want bool) {
+ if result != want {
+ errorCount++
+ if errorCount > 20 {
+ return
+ }
+ logger.Printf("%s:%s: %s(%+q)=%v; want %v", t.Name(), name, fstr[f], test, result, want)
+ }
+}
+
+func doTest(t *Test, f norm.Form, gold, test string) {
+ testb := []byte(test)
+ result := f.Bytes(testb)
+ cmpResult(t, "Bytes", f, gold, test, string(result))
+
+ sresult := f.String(test)
+ cmpResult(t, "String", f, gold, test, sresult)
+
+ acc := []byte{}
+ i := norm.Iter{}
+ i.InitString(f, test)
+ for !i.Done() {
+ acc = append(acc, i.Next()...)
+ }
+ cmpResult(t, "Iter.Next", f, gold, test, string(acc))
+
+ buf := make([]byte, 128)
+ acc = nil
+ for p := 0; p < len(testb); {
+ nDst, nSrc, _ := f.Transform(buf, testb[p:], true)
+ acc = append(acc, buf[:nDst]...)
+ p += nSrc
+ }
+ cmpResult(t, "Transform", f, gold, test, string(acc))
+
+ for i := range test {
+ out := f.Append(f.Bytes([]byte(test[:i])), []byte(test[i:])...)
+ cmpResult(t, fmt.Sprintf(":Append:%d", i), f, gold, test, string(out))
+ }
+ cmpIsNormal(t, "IsNormal", f, test, f.IsNormal([]byte(test)), test == gold)
+ cmpIsNormal(t, "IsNormalString", f, test, f.IsNormalString(test), test == gold)
+}
+
+func doConformanceTests(t *Test, partn int) {
+ for i := 0; i <= 2; i++ {
+ doTest(t, norm.NFC, t.cols[1], t.cols[i])
+ doTest(t, norm.NFD, t.cols[2], t.cols[i])
+ doTest(t, norm.NFKC, t.cols[3], t.cols[i])
+ doTest(t, norm.NFKD, t.cols[4], t.cols[i])
+ }
+ for i := 3; i <= 4; i++ {
+ doTest(t, norm.NFC, t.cols[3], t.cols[i])
+ doTest(t, norm.NFD, t.cols[4], t.cols[i])
+ doTest(t, norm.NFKC, t.cols[3], t.cols[i])
+ doTest(t, norm.NFKD, t.cols[4], t.cols[i])
+ }
+}
+
+func CharacterByCharacterTests() {
+ tests := part[1].tests
+ var last rune = 0
+ for i := 0; i <= len(tests); i++ { // last one is special case
+ var r rune
+ if i == len(tests) {
+ r = 0x2FA1E // Don't have to go to 0x10FFFF
+ } else {
+ r = tests[i].r
+ }
+ for last++; last < r; last++ {
+ // Check all characters that were not explicitly listed in the test.
+ t := &Test{partnr: 1, number: -1}
+ char := string(last)
+ doTest(t, norm.NFC, char, char)
+ doTest(t, norm.NFD, char, char)
+ doTest(t, norm.NFKC, char, char)
+ doTest(t, norm.NFKD, char, char)
+ }
+ if i < len(tests) {
+ doConformanceTests(&tests[i], 1)
+ }
+ }
+}
+
+func StandardTests() {
+ for _, j := range []int{0, 2, 3} {
+ for _, test := range part[j].tests {
+ doConformanceTests(&test, j)
+ }
+ }
+}
+
+// PerformanceTest verifies that normalization is O(n). If any of the
+// code does not properly check for maxCombiningChars, normalization
+// may exhibit O(n**2) behavior.
+func PerformanceTest() {
+ runtime.GOMAXPROCS(2)
+ success := make(chan bool, 1)
+ go func() {
+ buf := bytes.Repeat([]byte("\u035D"), 1024*1024)
+ buf = append(buf, "\u035B"...)
+ norm.NFC.Append(nil, buf...)
+ success <- true
+ }()
+ timeout := time.After(1 * time.Second)
+ select {
+ case <-success:
+ // test completed before the timeout
+ case <-timeout:
+ errorCount++
+ logger.Printf(`unexpectedly long time to complete PerformanceTest`)
+ }
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/readwriter.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/readwriter.go
new file mode 100644
index 0000000..4fa0e04
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/readwriter.go
@@ -0,0 +1,126 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package norm
+
+import "io"
+
+type normWriter struct {
+ rb reorderBuffer
+ w io.Writer
+ buf []byte
+}
+
+// Write implements the standard write interface. If the last characters are
+// not at a normalization boundary, the bytes will be buffered for the next
+// write. The remaining bytes will be written on close.
+func (w *normWriter) Write(data []byte) (n int, err error) {
+ // Process data in pieces to keep w.buf size bounded.
+ const chunk = 4000
+
+ for len(data) > 0 {
+ // Normalize into w.buf.
+ m := len(data)
+ if m > chunk {
+ m = chunk
+ }
+ w.rb.src = inputBytes(data[:m])
+ w.rb.nsrc = m
+ w.buf = doAppend(&w.rb, w.buf, 0)
+ data = data[m:]
+ n += m
+
+ // Write out complete prefix, save remainder.
+ // Note that lastBoundary looks back at most 31 runes.
+ i := lastBoundary(&w.rb.f, w.buf)
+ if i == -1 {
+ i = 0
+ }
+ if i > 0 {
+ if _, err = w.w.Write(w.buf[:i]); err != nil {
+ break
+ }
+ bn := copy(w.buf, w.buf[i:])
+ w.buf = w.buf[:bn]
+ }
+ }
+ return n, err
+}
+
+// Close forces data that remains in the buffer to be written.
+func (w *normWriter) Close() error {
+ if len(w.buf) > 0 {
+ _, err := w.w.Write(w.buf)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// Writer returns a new writer that implements Write(b)
+// by writing f(b) to w. The returned writer may use an
+// an internal buffer to maintain state across Write calls.
+// Calling its Close method writes any buffered data to w.
+func (f Form) Writer(w io.Writer) io.WriteCloser {
+ wr := &normWriter{rb: reorderBuffer{}, w: w}
+ wr.rb.init(f, nil)
+ return wr
+}
+
+type normReader struct {
+ rb reorderBuffer
+ r io.Reader
+ inbuf []byte
+ outbuf []byte
+ bufStart int
+ lastBoundary int
+ err error
+}
+
+// Read implements the standard read interface.
+func (r *normReader) Read(p []byte) (int, error) {
+ for {
+ if r.lastBoundary-r.bufStart > 0 {
+ n := copy(p, r.outbuf[r.bufStart:r.lastBoundary])
+ r.bufStart += n
+ if r.lastBoundary-r.bufStart > 0 {
+ return n, nil
+ }
+ return n, r.err
+ }
+ if r.err != nil {
+ return 0, r.err
+ }
+ outn := copy(r.outbuf, r.outbuf[r.lastBoundary:])
+ r.outbuf = r.outbuf[0:outn]
+ r.bufStart = 0
+
+ n, err := r.r.Read(r.inbuf)
+ r.rb.src = inputBytes(r.inbuf[0:n])
+ r.rb.nsrc, r.err = n, err
+ if n > 0 {
+ r.outbuf = doAppend(&r.rb, r.outbuf, 0)
+ }
+ if err == io.EOF {
+ r.lastBoundary = len(r.outbuf)
+ } else {
+ r.lastBoundary = lastBoundary(&r.rb.f, r.outbuf)
+ if r.lastBoundary == -1 {
+ r.lastBoundary = 0
+ }
+ }
+ }
+ panic("should not reach here")
+}
+
+// Reader returns a new reader that implements Read
+// by reading data from r and returning f(data).
+func (f Form) Reader(r io.Reader) io.Reader {
+ const chunk = 4000
+ buf := make([]byte, chunk)
+ rr := &normReader{rb: reorderBuffer{}, r: r, inbuf: buf}
+ rr.rb.init(f, buf)
+ return rr
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/tables.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/tables.go
new file mode 100644
index 0000000..932c4ec
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/tables.go
@@ -0,0 +1,7549 @@
+// Generated by running
+// maketables --tables=all --url=http://www.unicode.org/Public/7.0.0/ucd/
+// DO NOT EDIT
+
+package norm
+
+const (
+ // Version is the Unicode edition from which the tables are derived.
+ Version = "7.0.0"
+
+ // MaxTransformChunkSize indicates the maximum number of bytes that Transform
+ // may need to write atomically for any Form. Making a destination buffer at
+ // least this size ensures that Transform can always make progress and that
+ // the user does not need to grow the buffer on an ErrShortDst.
+ MaxTransformChunkSize = 35 + maxNonStarters*4
+)
+
+var ccc = [55]uint8{
+ 0, 1, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20,
+ 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36,
+ 84, 91, 103, 107, 118, 122, 129, 130,
+ 132, 202, 214, 216, 218, 220, 222, 224,
+ 226, 228, 230, 232, 233, 234, 240,
+}
+
+const (
+ firstMulti = 0x18E1
+ firstCCC = 0x2EC5
+ endMulti = 0x2F9B
+ firstLeadingCCC = 0x49E9
+ firstCCCZeroExcept = 0x49FF
+ firstStarterWithNLead = 0x4A26
+ lastDecomp = 0x4A28
+ maxDecomp = 0x8000
+)
+
+// decomps: 18984 bytes
+var decomps = [...]byte{
+ // Bytes 0 - 3f
+ 0x00, 0x41, 0x20, 0x41, 0x21, 0x41, 0x22, 0x41,
+ 0x23, 0x41, 0x24, 0x41, 0x25, 0x41, 0x26, 0x41,
+ 0x27, 0x41, 0x28, 0x41, 0x29, 0x41, 0x2A, 0x41,
+ 0x2B, 0x41, 0x2C, 0x41, 0x2D, 0x41, 0x2E, 0x41,
+ 0x2F, 0x41, 0x30, 0x41, 0x31, 0x41, 0x32, 0x41,
+ 0x33, 0x41, 0x34, 0x41, 0x35, 0x41, 0x36, 0x41,
+ 0x37, 0x41, 0x38, 0x41, 0x39, 0x41, 0x3A, 0x41,
+ 0x3B, 0x41, 0x3C, 0x41, 0x3D, 0x41, 0x3E, 0x41,
+ // Bytes 40 - 7f
+ 0x3F, 0x41, 0x40, 0x41, 0x41, 0x41, 0x42, 0x41,
+ 0x43, 0x41, 0x44, 0x41, 0x45, 0x41, 0x46, 0x41,
+ 0x47, 0x41, 0x48, 0x41, 0x49, 0x41, 0x4A, 0x41,
+ 0x4B, 0x41, 0x4C, 0x41, 0x4D, 0x41, 0x4E, 0x41,
+ 0x4F, 0x41, 0x50, 0x41, 0x51, 0x41, 0x52, 0x41,
+ 0x53, 0x41, 0x54, 0x41, 0x55, 0x41, 0x56, 0x41,
+ 0x57, 0x41, 0x58, 0x41, 0x59, 0x41, 0x5A, 0x41,
+ 0x5B, 0x41, 0x5C, 0x41, 0x5D, 0x41, 0x5E, 0x41,
+ // Bytes 80 - bf
+ 0x5F, 0x41, 0x60, 0x41, 0x61, 0x41, 0x62, 0x41,
+ 0x63, 0x41, 0x64, 0x41, 0x65, 0x41, 0x66, 0x41,
+ 0x67, 0x41, 0x68, 0x41, 0x69, 0x41, 0x6A, 0x41,
+ 0x6B, 0x41, 0x6C, 0x41, 0x6D, 0x41, 0x6E, 0x41,
+ 0x6F, 0x41, 0x70, 0x41, 0x71, 0x41, 0x72, 0x41,
+ 0x73, 0x41, 0x74, 0x41, 0x75, 0x41, 0x76, 0x41,
+ 0x77, 0x41, 0x78, 0x41, 0x79, 0x41, 0x7A, 0x41,
+ 0x7B, 0x41, 0x7C, 0x41, 0x7D, 0x41, 0x7E, 0x42,
+ // Bytes c0 - ff
+ 0xC2, 0xA2, 0x42, 0xC2, 0xA3, 0x42, 0xC2, 0xA5,
+ 0x42, 0xC2, 0xA6, 0x42, 0xC2, 0xAC, 0x42, 0xC2,
+ 0xB7, 0x42, 0xC3, 0x86, 0x42, 0xC3, 0xB0, 0x42,
+ 0xC4, 0xA6, 0x42, 0xC4, 0xA7, 0x42, 0xC4, 0xB1,
+ 0x42, 0xC5, 0x8B, 0x42, 0xC5, 0x93, 0x42, 0xC6,
+ 0x8E, 0x42, 0xC6, 0x90, 0x42, 0xC6, 0xAB, 0x42,
+ 0xC8, 0xA2, 0x42, 0xC8, 0xB7, 0x42, 0xC9, 0x90,
+ 0x42, 0xC9, 0x91, 0x42, 0xC9, 0x92, 0x42, 0xC9,
+ // Bytes 100 - 13f
+ 0x94, 0x42, 0xC9, 0x95, 0x42, 0xC9, 0x99, 0x42,
+ 0xC9, 0x9B, 0x42, 0xC9, 0x9C, 0x42, 0xC9, 0x9F,
+ 0x42, 0xC9, 0xA1, 0x42, 0xC9, 0xA3, 0x42, 0xC9,
+ 0xA5, 0x42, 0xC9, 0xA6, 0x42, 0xC9, 0xA8, 0x42,
+ 0xC9, 0xA9, 0x42, 0xC9, 0xAA, 0x42, 0xC9, 0xAB,
+ 0x42, 0xC9, 0xAD, 0x42, 0xC9, 0xAF, 0x42, 0xC9,
+ 0xB0, 0x42, 0xC9, 0xB1, 0x42, 0xC9, 0xB2, 0x42,
+ 0xC9, 0xB3, 0x42, 0xC9, 0xB4, 0x42, 0xC9, 0xB5,
+ // Bytes 140 - 17f
+ 0x42, 0xC9, 0xB8, 0x42, 0xC9, 0xB9, 0x42, 0xC9,
+ 0xBB, 0x42, 0xCA, 0x81, 0x42, 0xCA, 0x82, 0x42,
+ 0xCA, 0x83, 0x42, 0xCA, 0x89, 0x42, 0xCA, 0x8A,
+ 0x42, 0xCA, 0x8B, 0x42, 0xCA, 0x8C, 0x42, 0xCA,
+ 0x90, 0x42, 0xCA, 0x91, 0x42, 0xCA, 0x92, 0x42,
+ 0xCA, 0x95, 0x42, 0xCA, 0x9D, 0x42, 0xCA, 0x9F,
+ 0x42, 0xCA, 0xB9, 0x42, 0xCE, 0x91, 0x42, 0xCE,
+ 0x92, 0x42, 0xCE, 0x93, 0x42, 0xCE, 0x94, 0x42,
+ // Bytes 180 - 1bf
+ 0xCE, 0x95, 0x42, 0xCE, 0x96, 0x42, 0xCE, 0x97,
+ 0x42, 0xCE, 0x98, 0x42, 0xCE, 0x99, 0x42, 0xCE,
+ 0x9A, 0x42, 0xCE, 0x9B, 0x42, 0xCE, 0x9C, 0x42,
+ 0xCE, 0x9D, 0x42, 0xCE, 0x9E, 0x42, 0xCE, 0x9F,
+ 0x42, 0xCE, 0xA0, 0x42, 0xCE, 0xA1, 0x42, 0xCE,
+ 0xA3, 0x42, 0xCE, 0xA4, 0x42, 0xCE, 0xA5, 0x42,
+ 0xCE, 0xA6, 0x42, 0xCE, 0xA7, 0x42, 0xCE, 0xA8,
+ 0x42, 0xCE, 0xA9, 0x42, 0xCE, 0xB1, 0x42, 0xCE,
+ // Bytes 1c0 - 1ff
+ 0xB2, 0x42, 0xCE, 0xB3, 0x42, 0xCE, 0xB4, 0x42,
+ 0xCE, 0xB5, 0x42, 0xCE, 0xB6, 0x42, 0xCE, 0xB7,
+ 0x42, 0xCE, 0xB8, 0x42, 0xCE, 0xB9, 0x42, 0xCE,
+ 0xBA, 0x42, 0xCE, 0xBB, 0x42, 0xCE, 0xBC, 0x42,
+ 0xCE, 0xBD, 0x42, 0xCE, 0xBE, 0x42, 0xCE, 0xBF,
+ 0x42, 0xCF, 0x80, 0x42, 0xCF, 0x81, 0x42, 0xCF,
+ 0x82, 0x42, 0xCF, 0x83, 0x42, 0xCF, 0x84, 0x42,
+ 0xCF, 0x85, 0x42, 0xCF, 0x86, 0x42, 0xCF, 0x87,
+ // Bytes 200 - 23f
+ 0x42, 0xCF, 0x88, 0x42, 0xCF, 0x89, 0x42, 0xCF,
+ 0x9C, 0x42, 0xCF, 0x9D, 0x42, 0xD0, 0xBD, 0x42,
+ 0xD1, 0x8A, 0x42, 0xD1, 0x8C, 0x42, 0xD7, 0x90,
+ 0x42, 0xD7, 0x91, 0x42, 0xD7, 0x92, 0x42, 0xD7,
+ 0x93, 0x42, 0xD7, 0x94, 0x42, 0xD7, 0x9B, 0x42,
+ 0xD7, 0x9C, 0x42, 0xD7, 0x9D, 0x42, 0xD7, 0xA2,
+ 0x42, 0xD7, 0xA8, 0x42, 0xD7, 0xAA, 0x42, 0xD8,
+ 0xA1, 0x42, 0xD8, 0xA7, 0x42, 0xD8, 0xA8, 0x42,
+ // Bytes 240 - 27f
+ 0xD8, 0xA9, 0x42, 0xD8, 0xAA, 0x42, 0xD8, 0xAB,
+ 0x42, 0xD8, 0xAC, 0x42, 0xD8, 0xAD, 0x42, 0xD8,
+ 0xAE, 0x42, 0xD8, 0xAF, 0x42, 0xD8, 0xB0, 0x42,
+ 0xD8, 0xB1, 0x42, 0xD8, 0xB2, 0x42, 0xD8, 0xB3,
+ 0x42, 0xD8, 0xB4, 0x42, 0xD8, 0xB5, 0x42, 0xD8,
+ 0xB6, 0x42, 0xD8, 0xB7, 0x42, 0xD8, 0xB8, 0x42,
+ 0xD8, 0xB9, 0x42, 0xD8, 0xBA, 0x42, 0xD9, 0x81,
+ 0x42, 0xD9, 0x82, 0x42, 0xD9, 0x83, 0x42, 0xD9,
+ // Bytes 280 - 2bf
+ 0x84, 0x42, 0xD9, 0x85, 0x42, 0xD9, 0x86, 0x42,
+ 0xD9, 0x87, 0x42, 0xD9, 0x88, 0x42, 0xD9, 0x89,
+ 0x42, 0xD9, 0x8A, 0x42, 0xD9, 0xAE, 0x42, 0xD9,
+ 0xAF, 0x42, 0xD9, 0xB1, 0x42, 0xD9, 0xB9, 0x42,
+ 0xD9, 0xBA, 0x42, 0xD9, 0xBB, 0x42, 0xD9, 0xBE,
+ 0x42, 0xD9, 0xBF, 0x42, 0xDA, 0x80, 0x42, 0xDA,
+ 0x83, 0x42, 0xDA, 0x84, 0x42, 0xDA, 0x86, 0x42,
+ 0xDA, 0x87, 0x42, 0xDA, 0x88, 0x42, 0xDA, 0x8C,
+ // Bytes 2c0 - 2ff
+ 0x42, 0xDA, 0x8D, 0x42, 0xDA, 0x8E, 0x42, 0xDA,
+ 0x91, 0x42, 0xDA, 0x98, 0x42, 0xDA, 0xA1, 0x42,
+ 0xDA, 0xA4, 0x42, 0xDA, 0xA6, 0x42, 0xDA, 0xA9,
+ 0x42, 0xDA, 0xAD, 0x42, 0xDA, 0xAF, 0x42, 0xDA,
+ 0xB1, 0x42, 0xDA, 0xB3, 0x42, 0xDA, 0xBA, 0x42,
+ 0xDA, 0xBB, 0x42, 0xDA, 0xBE, 0x42, 0xDB, 0x81,
+ 0x42, 0xDB, 0x85, 0x42, 0xDB, 0x86, 0x42, 0xDB,
+ 0x87, 0x42, 0xDB, 0x88, 0x42, 0xDB, 0x89, 0x42,
+ // Bytes 300 - 33f
+ 0xDB, 0x8B, 0x42, 0xDB, 0x8C, 0x42, 0xDB, 0x90,
+ 0x42, 0xDB, 0x92, 0x43, 0xE0, 0xBC, 0x8B, 0x43,
+ 0xE1, 0x83, 0x9C, 0x43, 0xE1, 0x84, 0x80, 0x43,
+ 0xE1, 0x84, 0x81, 0x43, 0xE1, 0x84, 0x82, 0x43,
+ 0xE1, 0x84, 0x83, 0x43, 0xE1, 0x84, 0x84, 0x43,
+ 0xE1, 0x84, 0x85, 0x43, 0xE1, 0x84, 0x86, 0x43,
+ 0xE1, 0x84, 0x87, 0x43, 0xE1, 0x84, 0x88, 0x43,
+ 0xE1, 0x84, 0x89, 0x43, 0xE1, 0x84, 0x8A, 0x43,
+ // Bytes 340 - 37f
+ 0xE1, 0x84, 0x8B, 0x43, 0xE1, 0x84, 0x8C, 0x43,
+ 0xE1, 0x84, 0x8D, 0x43, 0xE1, 0x84, 0x8E, 0x43,
+ 0xE1, 0x84, 0x8F, 0x43, 0xE1, 0x84, 0x90, 0x43,
+ 0xE1, 0x84, 0x91, 0x43, 0xE1, 0x84, 0x92, 0x43,
+ 0xE1, 0x84, 0x94, 0x43, 0xE1, 0x84, 0x95, 0x43,
+ 0xE1, 0x84, 0x9A, 0x43, 0xE1, 0x84, 0x9C, 0x43,
+ 0xE1, 0x84, 0x9D, 0x43, 0xE1, 0x84, 0x9E, 0x43,
+ 0xE1, 0x84, 0xA0, 0x43, 0xE1, 0x84, 0xA1, 0x43,
+ // Bytes 380 - 3bf
+ 0xE1, 0x84, 0xA2, 0x43, 0xE1, 0x84, 0xA3, 0x43,
+ 0xE1, 0x84, 0xA7, 0x43, 0xE1, 0x84, 0xA9, 0x43,
+ 0xE1, 0x84, 0xAB, 0x43, 0xE1, 0x84, 0xAC, 0x43,
+ 0xE1, 0x84, 0xAD, 0x43, 0xE1, 0x84, 0xAE, 0x43,
+ 0xE1, 0x84, 0xAF, 0x43, 0xE1, 0x84, 0xB2, 0x43,
+ 0xE1, 0x84, 0xB6, 0x43, 0xE1, 0x85, 0x80, 0x43,
+ 0xE1, 0x85, 0x87, 0x43, 0xE1, 0x85, 0x8C, 0x43,
+ 0xE1, 0x85, 0x97, 0x43, 0xE1, 0x85, 0x98, 0x43,
+ // Bytes 3c0 - 3ff
+ 0xE1, 0x85, 0x99, 0x43, 0xE1, 0x85, 0xA0, 0x43,
+ 0xE1, 0x85, 0xA1, 0x43, 0xE1, 0x85, 0xA2, 0x43,
+ 0xE1, 0x85, 0xA3, 0x43, 0xE1, 0x85, 0xA4, 0x43,
+ 0xE1, 0x85, 0xA5, 0x43, 0xE1, 0x85, 0xA6, 0x43,
+ 0xE1, 0x85, 0xA7, 0x43, 0xE1, 0x85, 0xA8, 0x43,
+ 0xE1, 0x85, 0xA9, 0x43, 0xE1, 0x85, 0xAA, 0x43,
+ 0xE1, 0x85, 0xAB, 0x43, 0xE1, 0x85, 0xAC, 0x43,
+ 0xE1, 0x85, 0xAD, 0x43, 0xE1, 0x85, 0xAE, 0x43,
+ // Bytes 400 - 43f
+ 0xE1, 0x85, 0xAF, 0x43, 0xE1, 0x85, 0xB0, 0x43,
+ 0xE1, 0x85, 0xB1, 0x43, 0xE1, 0x85, 0xB2, 0x43,
+ 0xE1, 0x85, 0xB3, 0x43, 0xE1, 0x85, 0xB4, 0x43,
+ 0xE1, 0x85, 0xB5, 0x43, 0xE1, 0x86, 0x84, 0x43,
+ 0xE1, 0x86, 0x85, 0x43, 0xE1, 0x86, 0x88, 0x43,
+ 0xE1, 0x86, 0x91, 0x43, 0xE1, 0x86, 0x92, 0x43,
+ 0xE1, 0x86, 0x94, 0x43, 0xE1, 0x86, 0x9E, 0x43,
+ 0xE1, 0x86, 0xA1, 0x43, 0xE1, 0x86, 0xAA, 0x43,
+ // Bytes 440 - 47f
+ 0xE1, 0x86, 0xAC, 0x43, 0xE1, 0x86, 0xAD, 0x43,
+ 0xE1, 0x86, 0xB0, 0x43, 0xE1, 0x86, 0xB1, 0x43,
+ 0xE1, 0x86, 0xB2, 0x43, 0xE1, 0x86, 0xB3, 0x43,
+ 0xE1, 0x86, 0xB4, 0x43, 0xE1, 0x86, 0xB5, 0x43,
+ 0xE1, 0x87, 0x87, 0x43, 0xE1, 0x87, 0x88, 0x43,
+ 0xE1, 0x87, 0x8C, 0x43, 0xE1, 0x87, 0x8E, 0x43,
+ 0xE1, 0x87, 0x93, 0x43, 0xE1, 0x87, 0x97, 0x43,
+ 0xE1, 0x87, 0x99, 0x43, 0xE1, 0x87, 0x9D, 0x43,
+ // Bytes 480 - 4bf
+ 0xE1, 0x87, 0x9F, 0x43, 0xE1, 0x87, 0xB1, 0x43,
+ 0xE1, 0x87, 0xB2, 0x43, 0xE1, 0xB4, 0x82, 0x43,
+ 0xE1, 0xB4, 0x96, 0x43, 0xE1, 0xB4, 0x97, 0x43,
+ 0xE1, 0xB4, 0x9C, 0x43, 0xE1, 0xB4, 0x9D, 0x43,
+ 0xE1, 0xB4, 0xA5, 0x43, 0xE1, 0xB5, 0xBB, 0x43,
+ 0xE1, 0xB6, 0x85, 0x43, 0xE2, 0x80, 0x82, 0x43,
+ 0xE2, 0x80, 0x83, 0x43, 0xE2, 0x80, 0x90, 0x43,
+ 0xE2, 0x80, 0x93, 0x43, 0xE2, 0x80, 0x94, 0x43,
+ // Bytes 4c0 - 4ff
+ 0xE2, 0x82, 0xA9, 0x43, 0xE2, 0x86, 0x90, 0x43,
+ 0xE2, 0x86, 0x91, 0x43, 0xE2, 0x86, 0x92, 0x43,
+ 0xE2, 0x86, 0x93, 0x43, 0xE2, 0x88, 0x82, 0x43,
+ 0xE2, 0x88, 0x87, 0x43, 0xE2, 0x88, 0x91, 0x43,
+ 0xE2, 0x88, 0x92, 0x43, 0xE2, 0x94, 0x82, 0x43,
+ 0xE2, 0x96, 0xA0, 0x43, 0xE2, 0x97, 0x8B, 0x43,
+ 0xE2, 0xA6, 0x85, 0x43, 0xE2, 0xA6, 0x86, 0x43,
+ 0xE2, 0xB5, 0xA1, 0x43, 0xE3, 0x80, 0x81, 0x43,
+ // Bytes 500 - 53f
+ 0xE3, 0x80, 0x82, 0x43, 0xE3, 0x80, 0x88, 0x43,
+ 0xE3, 0x80, 0x89, 0x43, 0xE3, 0x80, 0x8A, 0x43,
+ 0xE3, 0x80, 0x8B, 0x43, 0xE3, 0x80, 0x8C, 0x43,
+ 0xE3, 0x80, 0x8D, 0x43, 0xE3, 0x80, 0x8E, 0x43,
+ 0xE3, 0x80, 0x8F, 0x43, 0xE3, 0x80, 0x90, 0x43,
+ 0xE3, 0x80, 0x91, 0x43, 0xE3, 0x80, 0x92, 0x43,
+ 0xE3, 0x80, 0x94, 0x43, 0xE3, 0x80, 0x95, 0x43,
+ 0xE3, 0x80, 0x96, 0x43, 0xE3, 0x80, 0x97, 0x43,
+ // Bytes 540 - 57f
+ 0xE3, 0x82, 0xA1, 0x43, 0xE3, 0x82, 0xA2, 0x43,
+ 0xE3, 0x82, 0xA3, 0x43, 0xE3, 0x82, 0xA4, 0x43,
+ 0xE3, 0x82, 0xA5, 0x43, 0xE3, 0x82, 0xA6, 0x43,
+ 0xE3, 0x82, 0xA7, 0x43, 0xE3, 0x82, 0xA8, 0x43,
+ 0xE3, 0x82, 0xA9, 0x43, 0xE3, 0x82, 0xAA, 0x43,
+ 0xE3, 0x82, 0xAB, 0x43, 0xE3, 0x82, 0xAD, 0x43,
+ 0xE3, 0x82, 0xAF, 0x43, 0xE3, 0x82, 0xB1, 0x43,
+ 0xE3, 0x82, 0xB3, 0x43, 0xE3, 0x82, 0xB5, 0x43,
+ // Bytes 580 - 5bf
+ 0xE3, 0x82, 0xB7, 0x43, 0xE3, 0x82, 0xB9, 0x43,
+ 0xE3, 0x82, 0xBB, 0x43, 0xE3, 0x82, 0xBD, 0x43,
+ 0xE3, 0x82, 0xBF, 0x43, 0xE3, 0x83, 0x81, 0x43,
+ 0xE3, 0x83, 0x83, 0x43, 0xE3, 0x83, 0x84, 0x43,
+ 0xE3, 0x83, 0x86, 0x43, 0xE3, 0x83, 0x88, 0x43,
+ 0xE3, 0x83, 0x8A, 0x43, 0xE3, 0x83, 0x8B, 0x43,
+ 0xE3, 0x83, 0x8C, 0x43, 0xE3, 0x83, 0x8D, 0x43,
+ 0xE3, 0x83, 0x8E, 0x43, 0xE3, 0x83, 0x8F, 0x43,
+ // Bytes 5c0 - 5ff
+ 0xE3, 0x83, 0x92, 0x43, 0xE3, 0x83, 0x95, 0x43,
+ 0xE3, 0x83, 0x98, 0x43, 0xE3, 0x83, 0x9B, 0x43,
+ 0xE3, 0x83, 0x9E, 0x43, 0xE3, 0x83, 0x9F, 0x43,
+ 0xE3, 0x83, 0xA0, 0x43, 0xE3, 0x83, 0xA1, 0x43,
+ 0xE3, 0x83, 0xA2, 0x43, 0xE3, 0x83, 0xA3, 0x43,
+ 0xE3, 0x83, 0xA4, 0x43, 0xE3, 0x83, 0xA5, 0x43,
+ 0xE3, 0x83, 0xA6, 0x43, 0xE3, 0x83, 0xA7, 0x43,
+ 0xE3, 0x83, 0xA8, 0x43, 0xE3, 0x83, 0xA9, 0x43,
+ // Bytes 600 - 63f
+ 0xE3, 0x83, 0xAA, 0x43, 0xE3, 0x83, 0xAB, 0x43,
+ 0xE3, 0x83, 0xAC, 0x43, 0xE3, 0x83, 0xAD, 0x43,
+ 0xE3, 0x83, 0xAF, 0x43, 0xE3, 0x83, 0xB0, 0x43,
+ 0xE3, 0x83, 0xB1, 0x43, 0xE3, 0x83, 0xB2, 0x43,
+ 0xE3, 0x83, 0xB3, 0x43, 0xE3, 0x83, 0xBB, 0x43,
+ 0xE3, 0x83, 0xBC, 0x43, 0xE3, 0x92, 0x9E, 0x43,
+ 0xE3, 0x92, 0xB9, 0x43, 0xE3, 0x92, 0xBB, 0x43,
+ 0xE3, 0x93, 0x9F, 0x43, 0xE3, 0x94, 0x95, 0x43,
+ // Bytes 640 - 67f
+ 0xE3, 0x9B, 0xAE, 0x43, 0xE3, 0x9B, 0xBC, 0x43,
+ 0xE3, 0x9E, 0x81, 0x43, 0xE3, 0xA0, 0xAF, 0x43,
+ 0xE3, 0xA1, 0xA2, 0x43, 0xE3, 0xA1, 0xBC, 0x43,
+ 0xE3, 0xA3, 0x87, 0x43, 0xE3, 0xA3, 0xA3, 0x43,
+ 0xE3, 0xA4, 0x9C, 0x43, 0xE3, 0xA4, 0xBA, 0x43,
+ 0xE3, 0xA8, 0xAE, 0x43, 0xE3, 0xA9, 0xAC, 0x43,
+ 0xE3, 0xAB, 0xA4, 0x43, 0xE3, 0xAC, 0x88, 0x43,
+ 0xE3, 0xAC, 0x99, 0x43, 0xE3, 0xAD, 0x89, 0x43,
+ // Bytes 680 - 6bf
+ 0xE3, 0xAE, 0x9D, 0x43, 0xE3, 0xB0, 0x98, 0x43,
+ 0xE3, 0xB1, 0x8E, 0x43, 0xE3, 0xB4, 0xB3, 0x43,
+ 0xE3, 0xB6, 0x96, 0x43, 0xE3, 0xBA, 0xAC, 0x43,
+ 0xE3, 0xBA, 0xB8, 0x43, 0xE3, 0xBC, 0x9B, 0x43,
+ 0xE3, 0xBF, 0xBC, 0x43, 0xE4, 0x80, 0x88, 0x43,
+ 0xE4, 0x80, 0x98, 0x43, 0xE4, 0x80, 0xB9, 0x43,
+ 0xE4, 0x81, 0x86, 0x43, 0xE4, 0x82, 0x96, 0x43,
+ 0xE4, 0x83, 0xA3, 0x43, 0xE4, 0x84, 0xAF, 0x43,
+ // Bytes 6c0 - 6ff
+ 0xE4, 0x88, 0x82, 0x43, 0xE4, 0x88, 0xA7, 0x43,
+ 0xE4, 0x8A, 0xA0, 0x43, 0xE4, 0x8C, 0x81, 0x43,
+ 0xE4, 0x8C, 0xB4, 0x43, 0xE4, 0x8D, 0x99, 0x43,
+ 0xE4, 0x8F, 0x95, 0x43, 0xE4, 0x8F, 0x99, 0x43,
+ 0xE4, 0x90, 0x8B, 0x43, 0xE4, 0x91, 0xAB, 0x43,
+ 0xE4, 0x94, 0xAB, 0x43, 0xE4, 0x95, 0x9D, 0x43,
+ 0xE4, 0x95, 0xA1, 0x43, 0xE4, 0x95, 0xAB, 0x43,
+ 0xE4, 0x97, 0x97, 0x43, 0xE4, 0x97, 0xB9, 0x43,
+ // Bytes 700 - 73f
+ 0xE4, 0x98, 0xB5, 0x43, 0xE4, 0x9A, 0xBE, 0x43,
+ 0xE4, 0x9B, 0x87, 0x43, 0xE4, 0xA6, 0x95, 0x43,
+ 0xE4, 0xA7, 0xA6, 0x43, 0xE4, 0xA9, 0xAE, 0x43,
+ 0xE4, 0xA9, 0xB6, 0x43, 0xE4, 0xAA, 0xB2, 0x43,
+ 0xE4, 0xAC, 0xB3, 0x43, 0xE4, 0xAF, 0x8E, 0x43,
+ 0xE4, 0xB3, 0x8E, 0x43, 0xE4, 0xB3, 0xAD, 0x43,
+ 0xE4, 0xB3, 0xB8, 0x43, 0xE4, 0xB5, 0x96, 0x43,
+ 0xE4, 0xB8, 0x80, 0x43, 0xE4, 0xB8, 0x81, 0x43,
+ // Bytes 740 - 77f
+ 0xE4, 0xB8, 0x83, 0x43, 0xE4, 0xB8, 0x89, 0x43,
+ 0xE4, 0xB8, 0x8A, 0x43, 0xE4, 0xB8, 0x8B, 0x43,
+ 0xE4, 0xB8, 0x8D, 0x43, 0xE4, 0xB8, 0x99, 0x43,
+ 0xE4, 0xB8, 0xA6, 0x43, 0xE4, 0xB8, 0xA8, 0x43,
+ 0xE4, 0xB8, 0xAD, 0x43, 0xE4, 0xB8, 0xB2, 0x43,
+ 0xE4, 0xB8, 0xB6, 0x43, 0xE4, 0xB8, 0xB8, 0x43,
+ 0xE4, 0xB8, 0xB9, 0x43, 0xE4, 0xB8, 0xBD, 0x43,
+ 0xE4, 0xB8, 0xBF, 0x43, 0xE4, 0xB9, 0x81, 0x43,
+ // Bytes 780 - 7bf
+ 0xE4, 0xB9, 0x99, 0x43, 0xE4, 0xB9, 0x9D, 0x43,
+ 0xE4, 0xBA, 0x82, 0x43, 0xE4, 0xBA, 0x85, 0x43,
+ 0xE4, 0xBA, 0x86, 0x43, 0xE4, 0xBA, 0x8C, 0x43,
+ 0xE4, 0xBA, 0x94, 0x43, 0xE4, 0xBA, 0xA0, 0x43,
+ 0xE4, 0xBA, 0xA4, 0x43, 0xE4, 0xBA, 0xAE, 0x43,
+ 0xE4, 0xBA, 0xBA, 0x43, 0xE4, 0xBB, 0x80, 0x43,
+ 0xE4, 0xBB, 0x8C, 0x43, 0xE4, 0xBB, 0xA4, 0x43,
+ 0xE4, 0xBC, 0x81, 0x43, 0xE4, 0xBC, 0x91, 0x43,
+ // Bytes 7c0 - 7ff
+ 0xE4, 0xBD, 0xA0, 0x43, 0xE4, 0xBE, 0x80, 0x43,
+ 0xE4, 0xBE, 0x86, 0x43, 0xE4, 0xBE, 0x8B, 0x43,
+ 0xE4, 0xBE, 0xAE, 0x43, 0xE4, 0xBE, 0xBB, 0x43,
+ 0xE4, 0xBE, 0xBF, 0x43, 0xE5, 0x80, 0x82, 0x43,
+ 0xE5, 0x80, 0xAB, 0x43, 0xE5, 0x81, 0xBA, 0x43,
+ 0xE5, 0x82, 0x99, 0x43, 0xE5, 0x83, 0x8F, 0x43,
+ 0xE5, 0x83, 0x9A, 0x43, 0xE5, 0x83, 0xA7, 0x43,
+ 0xE5, 0x84, 0xAA, 0x43, 0xE5, 0x84, 0xBF, 0x43,
+ // Bytes 800 - 83f
+ 0xE5, 0x85, 0x80, 0x43, 0xE5, 0x85, 0x85, 0x43,
+ 0xE5, 0x85, 0x8D, 0x43, 0xE5, 0x85, 0x94, 0x43,
+ 0xE5, 0x85, 0xA4, 0x43, 0xE5, 0x85, 0xA5, 0x43,
+ 0xE5, 0x85, 0xA7, 0x43, 0xE5, 0x85, 0xA8, 0x43,
+ 0xE5, 0x85, 0xA9, 0x43, 0xE5, 0x85, 0xAB, 0x43,
+ 0xE5, 0x85, 0xAD, 0x43, 0xE5, 0x85, 0xB7, 0x43,
+ 0xE5, 0x86, 0x80, 0x43, 0xE5, 0x86, 0x82, 0x43,
+ 0xE5, 0x86, 0x8D, 0x43, 0xE5, 0x86, 0x92, 0x43,
+ // Bytes 840 - 87f
+ 0xE5, 0x86, 0x95, 0x43, 0xE5, 0x86, 0x96, 0x43,
+ 0xE5, 0x86, 0x97, 0x43, 0xE5, 0x86, 0x99, 0x43,
+ 0xE5, 0x86, 0xA4, 0x43, 0xE5, 0x86, 0xAB, 0x43,
+ 0xE5, 0x86, 0xAC, 0x43, 0xE5, 0x86, 0xB5, 0x43,
+ 0xE5, 0x86, 0xB7, 0x43, 0xE5, 0x87, 0x89, 0x43,
+ 0xE5, 0x87, 0x8C, 0x43, 0xE5, 0x87, 0x9C, 0x43,
+ 0xE5, 0x87, 0x9E, 0x43, 0xE5, 0x87, 0xA0, 0x43,
+ 0xE5, 0x87, 0xB5, 0x43, 0xE5, 0x88, 0x80, 0x43,
+ // Bytes 880 - 8bf
+ 0xE5, 0x88, 0x83, 0x43, 0xE5, 0x88, 0x87, 0x43,
+ 0xE5, 0x88, 0x97, 0x43, 0xE5, 0x88, 0x9D, 0x43,
+ 0xE5, 0x88, 0xA9, 0x43, 0xE5, 0x88, 0xBA, 0x43,
+ 0xE5, 0x88, 0xBB, 0x43, 0xE5, 0x89, 0x86, 0x43,
+ 0xE5, 0x89, 0x8D, 0x43, 0xE5, 0x89, 0xB2, 0x43,
+ 0xE5, 0x89, 0xB7, 0x43, 0xE5, 0x8A, 0x89, 0x43,
+ 0xE5, 0x8A, 0x9B, 0x43, 0xE5, 0x8A, 0xA3, 0x43,
+ 0xE5, 0x8A, 0xB3, 0x43, 0xE5, 0x8A, 0xB4, 0x43,
+ // Bytes 8c0 - 8ff
+ 0xE5, 0x8B, 0x87, 0x43, 0xE5, 0x8B, 0x89, 0x43,
+ 0xE5, 0x8B, 0x92, 0x43, 0xE5, 0x8B, 0x9E, 0x43,
+ 0xE5, 0x8B, 0xA4, 0x43, 0xE5, 0x8B, 0xB5, 0x43,
+ 0xE5, 0x8B, 0xB9, 0x43, 0xE5, 0x8B, 0xBA, 0x43,
+ 0xE5, 0x8C, 0x85, 0x43, 0xE5, 0x8C, 0x86, 0x43,
+ 0xE5, 0x8C, 0x95, 0x43, 0xE5, 0x8C, 0x97, 0x43,
+ 0xE5, 0x8C, 0x9A, 0x43, 0xE5, 0x8C, 0xB8, 0x43,
+ 0xE5, 0x8C, 0xBB, 0x43, 0xE5, 0x8C, 0xBF, 0x43,
+ // Bytes 900 - 93f
+ 0xE5, 0x8D, 0x81, 0x43, 0xE5, 0x8D, 0x84, 0x43,
+ 0xE5, 0x8D, 0x85, 0x43, 0xE5, 0x8D, 0x89, 0x43,
+ 0xE5, 0x8D, 0x91, 0x43, 0xE5, 0x8D, 0x94, 0x43,
+ 0xE5, 0x8D, 0x9A, 0x43, 0xE5, 0x8D, 0x9C, 0x43,
+ 0xE5, 0x8D, 0xA9, 0x43, 0xE5, 0x8D, 0xB0, 0x43,
+ 0xE5, 0x8D, 0xB3, 0x43, 0xE5, 0x8D, 0xB5, 0x43,
+ 0xE5, 0x8D, 0xBD, 0x43, 0xE5, 0x8D, 0xBF, 0x43,
+ 0xE5, 0x8E, 0x82, 0x43, 0xE5, 0x8E, 0xB6, 0x43,
+ // Bytes 940 - 97f
+ 0xE5, 0x8F, 0x83, 0x43, 0xE5, 0x8F, 0x88, 0x43,
+ 0xE5, 0x8F, 0x8A, 0x43, 0xE5, 0x8F, 0x8C, 0x43,
+ 0xE5, 0x8F, 0x9F, 0x43, 0xE5, 0x8F, 0xA3, 0x43,
+ 0xE5, 0x8F, 0xA5, 0x43, 0xE5, 0x8F, 0xAB, 0x43,
+ 0xE5, 0x8F, 0xAF, 0x43, 0xE5, 0x8F, 0xB1, 0x43,
+ 0xE5, 0x8F, 0xB3, 0x43, 0xE5, 0x90, 0x86, 0x43,
+ 0xE5, 0x90, 0x88, 0x43, 0xE5, 0x90, 0x8D, 0x43,
+ 0xE5, 0x90, 0x8F, 0x43, 0xE5, 0x90, 0x9D, 0x43,
+ // Bytes 980 - 9bf
+ 0xE5, 0x90, 0xB8, 0x43, 0xE5, 0x90, 0xB9, 0x43,
+ 0xE5, 0x91, 0x82, 0x43, 0xE5, 0x91, 0x88, 0x43,
+ 0xE5, 0x91, 0xA8, 0x43, 0xE5, 0x92, 0x9E, 0x43,
+ 0xE5, 0x92, 0xA2, 0x43, 0xE5, 0x92, 0xBD, 0x43,
+ 0xE5, 0x93, 0xB6, 0x43, 0xE5, 0x94, 0x90, 0x43,
+ 0xE5, 0x95, 0x8F, 0x43, 0xE5, 0x95, 0x93, 0x43,
+ 0xE5, 0x95, 0x95, 0x43, 0xE5, 0x95, 0xA3, 0x43,
+ 0xE5, 0x96, 0x84, 0x43, 0xE5, 0x96, 0x87, 0x43,
+ // Bytes 9c0 - 9ff
+ 0xE5, 0x96, 0x99, 0x43, 0xE5, 0x96, 0x9D, 0x43,
+ 0xE5, 0x96, 0xAB, 0x43, 0xE5, 0x96, 0xB3, 0x43,
+ 0xE5, 0x96, 0xB6, 0x43, 0xE5, 0x97, 0x80, 0x43,
+ 0xE5, 0x97, 0x82, 0x43, 0xE5, 0x97, 0xA2, 0x43,
+ 0xE5, 0x98, 0x86, 0x43, 0xE5, 0x99, 0x91, 0x43,
+ 0xE5, 0x99, 0xA8, 0x43, 0xE5, 0x99, 0xB4, 0x43,
+ 0xE5, 0x9B, 0x97, 0x43, 0xE5, 0x9B, 0x9B, 0x43,
+ 0xE5, 0x9B, 0xB9, 0x43, 0xE5, 0x9C, 0x96, 0x43,
+ // Bytes a00 - a3f
+ 0xE5, 0x9C, 0x97, 0x43, 0xE5, 0x9C, 0x9F, 0x43,
+ 0xE5, 0x9C, 0xB0, 0x43, 0xE5, 0x9E, 0x8B, 0x43,
+ 0xE5, 0x9F, 0x8E, 0x43, 0xE5, 0x9F, 0xB4, 0x43,
+ 0xE5, 0xA0, 0x8D, 0x43, 0xE5, 0xA0, 0xB1, 0x43,
+ 0xE5, 0xA0, 0xB2, 0x43, 0xE5, 0xA1, 0x80, 0x43,
+ 0xE5, 0xA1, 0x9A, 0x43, 0xE5, 0xA1, 0x9E, 0x43,
+ 0xE5, 0xA2, 0xA8, 0x43, 0xE5, 0xA2, 0xAC, 0x43,
+ 0xE5, 0xA2, 0xB3, 0x43, 0xE5, 0xA3, 0x98, 0x43,
+ // Bytes a40 - a7f
+ 0xE5, 0xA3, 0x9F, 0x43, 0xE5, 0xA3, 0xAB, 0x43,
+ 0xE5, 0xA3, 0xAE, 0x43, 0xE5, 0xA3, 0xB0, 0x43,
+ 0xE5, 0xA3, 0xB2, 0x43, 0xE5, 0xA3, 0xB7, 0x43,
+ 0xE5, 0xA4, 0x82, 0x43, 0xE5, 0xA4, 0x86, 0x43,
+ 0xE5, 0xA4, 0x8A, 0x43, 0xE5, 0xA4, 0x95, 0x43,
+ 0xE5, 0xA4, 0x9A, 0x43, 0xE5, 0xA4, 0x9C, 0x43,
+ 0xE5, 0xA4, 0xA2, 0x43, 0xE5, 0xA4, 0xA7, 0x43,
+ 0xE5, 0xA4, 0xA9, 0x43, 0xE5, 0xA5, 0x84, 0x43,
+ // Bytes a80 - abf
+ 0xE5, 0xA5, 0x88, 0x43, 0xE5, 0xA5, 0x91, 0x43,
+ 0xE5, 0xA5, 0x94, 0x43, 0xE5, 0xA5, 0xA2, 0x43,
+ 0xE5, 0xA5, 0xB3, 0x43, 0xE5, 0xA7, 0x98, 0x43,
+ 0xE5, 0xA7, 0xAC, 0x43, 0xE5, 0xA8, 0x9B, 0x43,
+ 0xE5, 0xA8, 0xA7, 0x43, 0xE5, 0xA9, 0xA2, 0x43,
+ 0xE5, 0xA9, 0xA6, 0x43, 0xE5, 0xAA, 0xB5, 0x43,
+ 0xE5, 0xAC, 0x88, 0x43, 0xE5, 0xAC, 0xA8, 0x43,
+ 0xE5, 0xAC, 0xBE, 0x43, 0xE5, 0xAD, 0x90, 0x43,
+ // Bytes ac0 - aff
+ 0xE5, 0xAD, 0x97, 0x43, 0xE5, 0xAD, 0xA6, 0x43,
+ 0xE5, 0xAE, 0x80, 0x43, 0xE5, 0xAE, 0x85, 0x43,
+ 0xE5, 0xAE, 0x97, 0x43, 0xE5, 0xAF, 0x83, 0x43,
+ 0xE5, 0xAF, 0x98, 0x43, 0xE5, 0xAF, 0xA7, 0x43,
+ 0xE5, 0xAF, 0xAE, 0x43, 0xE5, 0xAF, 0xB3, 0x43,
+ 0xE5, 0xAF, 0xB8, 0x43, 0xE5, 0xAF, 0xBF, 0x43,
+ 0xE5, 0xB0, 0x86, 0x43, 0xE5, 0xB0, 0x8F, 0x43,
+ 0xE5, 0xB0, 0xA2, 0x43, 0xE5, 0xB0, 0xB8, 0x43,
+ // Bytes b00 - b3f
+ 0xE5, 0xB0, 0xBF, 0x43, 0xE5, 0xB1, 0xA0, 0x43,
+ 0xE5, 0xB1, 0xA2, 0x43, 0xE5, 0xB1, 0xA4, 0x43,
+ 0xE5, 0xB1, 0xA5, 0x43, 0xE5, 0xB1, 0xAE, 0x43,
+ 0xE5, 0xB1, 0xB1, 0x43, 0xE5, 0xB2, 0x8D, 0x43,
+ 0xE5, 0xB3, 0x80, 0x43, 0xE5, 0xB4, 0x99, 0x43,
+ 0xE5, 0xB5, 0x83, 0x43, 0xE5, 0xB5, 0x90, 0x43,
+ 0xE5, 0xB5, 0xAB, 0x43, 0xE5, 0xB5, 0xAE, 0x43,
+ 0xE5, 0xB5, 0xBC, 0x43, 0xE5, 0xB6, 0xB2, 0x43,
+ // Bytes b40 - b7f
+ 0xE5, 0xB6, 0xBA, 0x43, 0xE5, 0xB7, 0x9B, 0x43,
+ 0xE5, 0xB7, 0xA1, 0x43, 0xE5, 0xB7, 0xA2, 0x43,
+ 0xE5, 0xB7, 0xA5, 0x43, 0xE5, 0xB7, 0xA6, 0x43,
+ 0xE5, 0xB7, 0xB1, 0x43, 0xE5, 0xB7, 0xBD, 0x43,
+ 0xE5, 0xB7, 0xBE, 0x43, 0xE5, 0xB8, 0xA8, 0x43,
+ 0xE5, 0xB8, 0xBD, 0x43, 0xE5, 0xB9, 0xA9, 0x43,
+ 0xE5, 0xB9, 0xB2, 0x43, 0xE5, 0xB9, 0xB4, 0x43,
+ 0xE5, 0xB9, 0xBA, 0x43, 0xE5, 0xB9, 0xBC, 0x43,
+ // Bytes b80 - bbf
+ 0xE5, 0xB9, 0xBF, 0x43, 0xE5, 0xBA, 0xA6, 0x43,
+ 0xE5, 0xBA, 0xB0, 0x43, 0xE5, 0xBA, 0xB3, 0x43,
+ 0xE5, 0xBA, 0xB6, 0x43, 0xE5, 0xBB, 0x89, 0x43,
+ 0xE5, 0xBB, 0x8A, 0x43, 0xE5, 0xBB, 0x92, 0x43,
+ 0xE5, 0xBB, 0x93, 0x43, 0xE5, 0xBB, 0x99, 0x43,
+ 0xE5, 0xBB, 0xAC, 0x43, 0xE5, 0xBB, 0xB4, 0x43,
+ 0xE5, 0xBB, 0xBE, 0x43, 0xE5, 0xBC, 0x84, 0x43,
+ 0xE5, 0xBC, 0x8B, 0x43, 0xE5, 0xBC, 0x93, 0x43,
+ // Bytes bc0 - bff
+ 0xE5, 0xBC, 0xA2, 0x43, 0xE5, 0xBD, 0x90, 0x43,
+ 0xE5, 0xBD, 0x93, 0x43, 0xE5, 0xBD, 0xA1, 0x43,
+ 0xE5, 0xBD, 0xA2, 0x43, 0xE5, 0xBD, 0xA9, 0x43,
+ 0xE5, 0xBD, 0xAB, 0x43, 0xE5, 0xBD, 0xB3, 0x43,
+ 0xE5, 0xBE, 0x8B, 0x43, 0xE5, 0xBE, 0x8C, 0x43,
+ 0xE5, 0xBE, 0x97, 0x43, 0xE5, 0xBE, 0x9A, 0x43,
+ 0xE5, 0xBE, 0xA9, 0x43, 0xE5, 0xBE, 0xAD, 0x43,
+ 0xE5, 0xBF, 0x83, 0x43, 0xE5, 0xBF, 0x8D, 0x43,
+ // Bytes c00 - c3f
+ 0xE5, 0xBF, 0x97, 0x43, 0xE5, 0xBF, 0xB5, 0x43,
+ 0xE5, 0xBF, 0xB9, 0x43, 0xE6, 0x80, 0x92, 0x43,
+ 0xE6, 0x80, 0x9C, 0x43, 0xE6, 0x81, 0xB5, 0x43,
+ 0xE6, 0x82, 0x81, 0x43, 0xE6, 0x82, 0x94, 0x43,
+ 0xE6, 0x83, 0x87, 0x43, 0xE6, 0x83, 0x98, 0x43,
+ 0xE6, 0x83, 0xA1, 0x43, 0xE6, 0x84, 0x88, 0x43,
+ 0xE6, 0x85, 0x84, 0x43, 0xE6, 0x85, 0x88, 0x43,
+ 0xE6, 0x85, 0x8C, 0x43, 0xE6, 0x85, 0x8E, 0x43,
+ // Bytes c40 - c7f
+ 0xE6, 0x85, 0xA0, 0x43, 0xE6, 0x85, 0xA8, 0x43,
+ 0xE6, 0x85, 0xBA, 0x43, 0xE6, 0x86, 0x8E, 0x43,
+ 0xE6, 0x86, 0x90, 0x43, 0xE6, 0x86, 0xA4, 0x43,
+ 0xE6, 0x86, 0xAF, 0x43, 0xE6, 0x86, 0xB2, 0x43,
+ 0xE6, 0x87, 0x9E, 0x43, 0xE6, 0x87, 0xB2, 0x43,
+ 0xE6, 0x87, 0xB6, 0x43, 0xE6, 0x88, 0x80, 0x43,
+ 0xE6, 0x88, 0x88, 0x43, 0xE6, 0x88, 0x90, 0x43,
+ 0xE6, 0x88, 0x9B, 0x43, 0xE6, 0x88, 0xAE, 0x43,
+ // Bytes c80 - cbf
+ 0xE6, 0x88, 0xB4, 0x43, 0xE6, 0x88, 0xB6, 0x43,
+ 0xE6, 0x89, 0x8B, 0x43, 0xE6, 0x89, 0x93, 0x43,
+ 0xE6, 0x89, 0x9D, 0x43, 0xE6, 0x8A, 0x95, 0x43,
+ 0xE6, 0x8A, 0xB1, 0x43, 0xE6, 0x8B, 0x89, 0x43,
+ 0xE6, 0x8B, 0x8F, 0x43, 0xE6, 0x8B, 0x93, 0x43,
+ 0xE6, 0x8B, 0x94, 0x43, 0xE6, 0x8B, 0xBC, 0x43,
+ 0xE6, 0x8B, 0xBE, 0x43, 0xE6, 0x8C, 0x87, 0x43,
+ 0xE6, 0x8C, 0xBD, 0x43, 0xE6, 0x8D, 0x90, 0x43,
+ // Bytes cc0 - cff
+ 0xE6, 0x8D, 0x95, 0x43, 0xE6, 0x8D, 0xA8, 0x43,
+ 0xE6, 0x8D, 0xBB, 0x43, 0xE6, 0x8E, 0x83, 0x43,
+ 0xE6, 0x8E, 0xA0, 0x43, 0xE6, 0x8E, 0xA9, 0x43,
+ 0xE6, 0x8F, 0x84, 0x43, 0xE6, 0x8F, 0x85, 0x43,
+ 0xE6, 0x8F, 0xA4, 0x43, 0xE6, 0x90, 0x9C, 0x43,
+ 0xE6, 0x90, 0xA2, 0x43, 0xE6, 0x91, 0x92, 0x43,
+ 0xE6, 0x91, 0xA9, 0x43, 0xE6, 0x91, 0xB7, 0x43,
+ 0xE6, 0x91, 0xBE, 0x43, 0xE6, 0x92, 0x9A, 0x43,
+ // Bytes d00 - d3f
+ 0xE6, 0x92, 0x9D, 0x43, 0xE6, 0x93, 0x84, 0x43,
+ 0xE6, 0x94, 0xAF, 0x43, 0xE6, 0x94, 0xB4, 0x43,
+ 0xE6, 0x95, 0x8F, 0x43, 0xE6, 0x95, 0x96, 0x43,
+ 0xE6, 0x95, 0xAC, 0x43, 0xE6, 0x95, 0xB8, 0x43,
+ 0xE6, 0x96, 0x87, 0x43, 0xE6, 0x96, 0x97, 0x43,
+ 0xE6, 0x96, 0x99, 0x43, 0xE6, 0x96, 0xA4, 0x43,
+ 0xE6, 0x96, 0xB0, 0x43, 0xE6, 0x96, 0xB9, 0x43,
+ 0xE6, 0x97, 0x85, 0x43, 0xE6, 0x97, 0xA0, 0x43,
+ // Bytes d40 - d7f
+ 0xE6, 0x97, 0xA2, 0x43, 0xE6, 0x97, 0xA3, 0x43,
+ 0xE6, 0x97, 0xA5, 0x43, 0xE6, 0x98, 0x93, 0x43,
+ 0xE6, 0x98, 0xA0, 0x43, 0xE6, 0x99, 0x89, 0x43,
+ 0xE6, 0x99, 0xB4, 0x43, 0xE6, 0x9A, 0x88, 0x43,
+ 0xE6, 0x9A, 0x91, 0x43, 0xE6, 0x9A, 0x9C, 0x43,
+ 0xE6, 0x9A, 0xB4, 0x43, 0xE6, 0x9B, 0x86, 0x43,
+ 0xE6, 0x9B, 0xB0, 0x43, 0xE6, 0x9B, 0xB4, 0x43,
+ 0xE6, 0x9B, 0xB8, 0x43, 0xE6, 0x9C, 0x80, 0x43,
+ // Bytes d80 - dbf
+ 0xE6, 0x9C, 0x88, 0x43, 0xE6, 0x9C, 0x89, 0x43,
+ 0xE6, 0x9C, 0x97, 0x43, 0xE6, 0x9C, 0x9B, 0x43,
+ 0xE6, 0x9C, 0xA1, 0x43, 0xE6, 0x9C, 0xA8, 0x43,
+ 0xE6, 0x9D, 0x8E, 0x43, 0xE6, 0x9D, 0x93, 0x43,
+ 0xE6, 0x9D, 0x96, 0x43, 0xE6, 0x9D, 0x9E, 0x43,
+ 0xE6, 0x9D, 0xBB, 0x43, 0xE6, 0x9E, 0x85, 0x43,
+ 0xE6, 0x9E, 0x97, 0x43, 0xE6, 0x9F, 0xB3, 0x43,
+ 0xE6, 0x9F, 0xBA, 0x43, 0xE6, 0xA0, 0x97, 0x43,
+ // Bytes dc0 - dff
+ 0xE6, 0xA0, 0x9F, 0x43, 0xE6, 0xA0, 0xAA, 0x43,
+ 0xE6, 0xA1, 0x92, 0x43, 0xE6, 0xA2, 0x81, 0x43,
+ 0xE6, 0xA2, 0x85, 0x43, 0xE6, 0xA2, 0x8E, 0x43,
+ 0xE6, 0xA2, 0xA8, 0x43, 0xE6, 0xA4, 0x94, 0x43,
+ 0xE6, 0xA5, 0x82, 0x43, 0xE6, 0xA6, 0xA3, 0x43,
+ 0xE6, 0xA7, 0xAA, 0x43, 0xE6, 0xA8, 0x82, 0x43,
+ 0xE6, 0xA8, 0x93, 0x43, 0xE6, 0xAA, 0xA8, 0x43,
+ 0xE6, 0xAB, 0x93, 0x43, 0xE6, 0xAB, 0x9B, 0x43,
+ // Bytes e00 - e3f
+ 0xE6, 0xAC, 0x84, 0x43, 0xE6, 0xAC, 0xA0, 0x43,
+ 0xE6, 0xAC, 0xA1, 0x43, 0xE6, 0xAD, 0x94, 0x43,
+ 0xE6, 0xAD, 0xA2, 0x43, 0xE6, 0xAD, 0xA3, 0x43,
+ 0xE6, 0xAD, 0xB2, 0x43, 0xE6, 0xAD, 0xB7, 0x43,
+ 0xE6, 0xAD, 0xB9, 0x43, 0xE6, 0xAE, 0x9F, 0x43,
+ 0xE6, 0xAE, 0xAE, 0x43, 0xE6, 0xAE, 0xB3, 0x43,
+ 0xE6, 0xAE, 0xBA, 0x43, 0xE6, 0xAE, 0xBB, 0x43,
+ 0xE6, 0xAF, 0x8B, 0x43, 0xE6, 0xAF, 0x8D, 0x43,
+ // Bytes e40 - e7f
+ 0xE6, 0xAF, 0x94, 0x43, 0xE6, 0xAF, 0x9B, 0x43,
+ 0xE6, 0xB0, 0x8F, 0x43, 0xE6, 0xB0, 0x94, 0x43,
+ 0xE6, 0xB0, 0xB4, 0x43, 0xE6, 0xB1, 0x8E, 0x43,
+ 0xE6, 0xB1, 0xA7, 0x43, 0xE6, 0xB2, 0x88, 0x43,
+ 0xE6, 0xB2, 0xBF, 0x43, 0xE6, 0xB3, 0x8C, 0x43,
+ 0xE6, 0xB3, 0x8D, 0x43, 0xE6, 0xB3, 0xA5, 0x43,
+ 0xE6, 0xB3, 0xA8, 0x43, 0xE6, 0xB4, 0x96, 0x43,
+ 0xE6, 0xB4, 0x9B, 0x43, 0xE6, 0xB4, 0x9E, 0x43,
+ // Bytes e80 - ebf
+ 0xE6, 0xB4, 0xB4, 0x43, 0xE6, 0xB4, 0xBE, 0x43,
+ 0xE6, 0xB5, 0x81, 0x43, 0xE6, 0xB5, 0xA9, 0x43,
+ 0xE6, 0xB5, 0xAA, 0x43, 0xE6, 0xB5, 0xB7, 0x43,
+ 0xE6, 0xB5, 0xB8, 0x43, 0xE6, 0xB6, 0x85, 0x43,
+ 0xE6, 0xB7, 0x8B, 0x43, 0xE6, 0xB7, 0x9A, 0x43,
+ 0xE6, 0xB7, 0xAA, 0x43, 0xE6, 0xB7, 0xB9, 0x43,
+ 0xE6, 0xB8, 0x9A, 0x43, 0xE6, 0xB8, 0xAF, 0x43,
+ 0xE6, 0xB9, 0xAE, 0x43, 0xE6, 0xBA, 0x80, 0x43,
+ // Bytes ec0 - eff
+ 0xE6, 0xBA, 0x9C, 0x43, 0xE6, 0xBA, 0xBA, 0x43,
+ 0xE6, 0xBB, 0x87, 0x43, 0xE6, 0xBB, 0x8B, 0x43,
+ 0xE6, 0xBB, 0x91, 0x43, 0xE6, 0xBB, 0x9B, 0x43,
+ 0xE6, 0xBC, 0x8F, 0x43, 0xE6, 0xBC, 0x94, 0x43,
+ 0xE6, 0xBC, 0xA2, 0x43, 0xE6, 0xBC, 0xA3, 0x43,
+ 0xE6, 0xBD, 0xAE, 0x43, 0xE6, 0xBF, 0x86, 0x43,
+ 0xE6, 0xBF, 0xAB, 0x43, 0xE6, 0xBF, 0xBE, 0x43,
+ 0xE7, 0x80, 0x9B, 0x43, 0xE7, 0x80, 0x9E, 0x43,
+ // Bytes f00 - f3f
+ 0xE7, 0x80, 0xB9, 0x43, 0xE7, 0x81, 0x8A, 0x43,
+ 0xE7, 0x81, 0xAB, 0x43, 0xE7, 0x81, 0xB0, 0x43,
+ 0xE7, 0x81, 0xB7, 0x43, 0xE7, 0x81, 0xBD, 0x43,
+ 0xE7, 0x82, 0x99, 0x43, 0xE7, 0x82, 0xAD, 0x43,
+ 0xE7, 0x83, 0x88, 0x43, 0xE7, 0x83, 0x99, 0x43,
+ 0xE7, 0x84, 0xA1, 0x43, 0xE7, 0x85, 0x85, 0x43,
+ 0xE7, 0x85, 0x89, 0x43, 0xE7, 0x85, 0xAE, 0x43,
+ 0xE7, 0x86, 0x9C, 0x43, 0xE7, 0x87, 0x8E, 0x43,
+ // Bytes f40 - f7f
+ 0xE7, 0x87, 0x90, 0x43, 0xE7, 0x88, 0x90, 0x43,
+ 0xE7, 0x88, 0x9B, 0x43, 0xE7, 0x88, 0xA8, 0x43,
+ 0xE7, 0x88, 0xAA, 0x43, 0xE7, 0x88, 0xAB, 0x43,
+ 0xE7, 0x88, 0xB5, 0x43, 0xE7, 0x88, 0xB6, 0x43,
+ 0xE7, 0x88, 0xBB, 0x43, 0xE7, 0x88, 0xBF, 0x43,
+ 0xE7, 0x89, 0x87, 0x43, 0xE7, 0x89, 0x90, 0x43,
+ 0xE7, 0x89, 0x99, 0x43, 0xE7, 0x89, 0x9B, 0x43,
+ 0xE7, 0x89, 0xA2, 0x43, 0xE7, 0x89, 0xB9, 0x43,
+ // Bytes f80 - fbf
+ 0xE7, 0x8A, 0x80, 0x43, 0xE7, 0x8A, 0x95, 0x43,
+ 0xE7, 0x8A, 0xAC, 0x43, 0xE7, 0x8A, 0xAF, 0x43,
+ 0xE7, 0x8B, 0x80, 0x43, 0xE7, 0x8B, 0xBC, 0x43,
+ 0xE7, 0x8C, 0xAA, 0x43, 0xE7, 0x8D, 0xB5, 0x43,
+ 0xE7, 0x8D, 0xBA, 0x43, 0xE7, 0x8E, 0x84, 0x43,
+ 0xE7, 0x8E, 0x87, 0x43, 0xE7, 0x8E, 0x89, 0x43,
+ 0xE7, 0x8E, 0x8B, 0x43, 0xE7, 0x8E, 0xA5, 0x43,
+ 0xE7, 0x8E, 0xB2, 0x43, 0xE7, 0x8F, 0x9E, 0x43,
+ // Bytes fc0 - fff
+ 0xE7, 0x90, 0x86, 0x43, 0xE7, 0x90, 0x89, 0x43,
+ 0xE7, 0x90, 0xA2, 0x43, 0xE7, 0x91, 0x87, 0x43,
+ 0xE7, 0x91, 0x9C, 0x43, 0xE7, 0x91, 0xA9, 0x43,
+ 0xE7, 0x91, 0xB1, 0x43, 0xE7, 0x92, 0x85, 0x43,
+ 0xE7, 0x92, 0x89, 0x43, 0xE7, 0x92, 0x98, 0x43,
+ 0xE7, 0x93, 0x8A, 0x43, 0xE7, 0x93, 0x9C, 0x43,
+ 0xE7, 0x93, 0xA6, 0x43, 0xE7, 0x94, 0x86, 0x43,
+ 0xE7, 0x94, 0x98, 0x43, 0xE7, 0x94, 0x9F, 0x43,
+ // Bytes 1000 - 103f
+ 0xE7, 0x94, 0xA4, 0x43, 0xE7, 0x94, 0xA8, 0x43,
+ 0xE7, 0x94, 0xB0, 0x43, 0xE7, 0x94, 0xB2, 0x43,
+ 0xE7, 0x94, 0xB3, 0x43, 0xE7, 0x94, 0xB7, 0x43,
+ 0xE7, 0x94, 0xBB, 0x43, 0xE7, 0x94, 0xBE, 0x43,
+ 0xE7, 0x95, 0x99, 0x43, 0xE7, 0x95, 0xA5, 0x43,
+ 0xE7, 0x95, 0xB0, 0x43, 0xE7, 0x96, 0x8B, 0x43,
+ 0xE7, 0x96, 0x92, 0x43, 0xE7, 0x97, 0xA2, 0x43,
+ 0xE7, 0x98, 0x90, 0x43, 0xE7, 0x98, 0x9D, 0x43,
+ // Bytes 1040 - 107f
+ 0xE7, 0x98, 0x9F, 0x43, 0xE7, 0x99, 0x82, 0x43,
+ 0xE7, 0x99, 0xA9, 0x43, 0xE7, 0x99, 0xB6, 0x43,
+ 0xE7, 0x99, 0xBD, 0x43, 0xE7, 0x9A, 0xAE, 0x43,
+ 0xE7, 0x9A, 0xBF, 0x43, 0xE7, 0x9B, 0x8A, 0x43,
+ 0xE7, 0x9B, 0x9B, 0x43, 0xE7, 0x9B, 0xA3, 0x43,
+ 0xE7, 0x9B, 0xA7, 0x43, 0xE7, 0x9B, 0xAE, 0x43,
+ 0xE7, 0x9B, 0xB4, 0x43, 0xE7, 0x9C, 0x81, 0x43,
+ 0xE7, 0x9C, 0x9E, 0x43, 0xE7, 0x9C, 0x9F, 0x43,
+ // Bytes 1080 - 10bf
+ 0xE7, 0x9D, 0x80, 0x43, 0xE7, 0x9D, 0x8A, 0x43,
+ 0xE7, 0x9E, 0x8B, 0x43, 0xE7, 0x9E, 0xA7, 0x43,
+ 0xE7, 0x9F, 0x9B, 0x43, 0xE7, 0x9F, 0xA2, 0x43,
+ 0xE7, 0x9F, 0xB3, 0x43, 0xE7, 0xA1, 0x8E, 0x43,
+ 0xE7, 0xA1, 0xAB, 0x43, 0xE7, 0xA2, 0x8C, 0x43,
+ 0xE7, 0xA2, 0x91, 0x43, 0xE7, 0xA3, 0x8A, 0x43,
+ 0xE7, 0xA3, 0x8C, 0x43, 0xE7, 0xA3, 0xBB, 0x43,
+ 0xE7, 0xA4, 0xAA, 0x43, 0xE7, 0xA4, 0xBA, 0x43,
+ // Bytes 10c0 - 10ff
+ 0xE7, 0xA4, 0xBC, 0x43, 0xE7, 0xA4, 0xBE, 0x43,
+ 0xE7, 0xA5, 0x88, 0x43, 0xE7, 0xA5, 0x89, 0x43,
+ 0xE7, 0xA5, 0x90, 0x43, 0xE7, 0xA5, 0x96, 0x43,
+ 0xE7, 0xA5, 0x9D, 0x43, 0xE7, 0xA5, 0x9E, 0x43,
+ 0xE7, 0xA5, 0xA5, 0x43, 0xE7, 0xA5, 0xBF, 0x43,
+ 0xE7, 0xA6, 0x81, 0x43, 0xE7, 0xA6, 0x8D, 0x43,
+ 0xE7, 0xA6, 0x8E, 0x43, 0xE7, 0xA6, 0x8F, 0x43,
+ 0xE7, 0xA6, 0xAE, 0x43, 0xE7, 0xA6, 0xB8, 0x43,
+ // Bytes 1100 - 113f
+ 0xE7, 0xA6, 0xBE, 0x43, 0xE7, 0xA7, 0x8A, 0x43,
+ 0xE7, 0xA7, 0x98, 0x43, 0xE7, 0xA7, 0xAB, 0x43,
+ 0xE7, 0xA8, 0x9C, 0x43, 0xE7, 0xA9, 0x80, 0x43,
+ 0xE7, 0xA9, 0x8A, 0x43, 0xE7, 0xA9, 0x8F, 0x43,
+ 0xE7, 0xA9, 0xB4, 0x43, 0xE7, 0xA9, 0xBA, 0x43,
+ 0xE7, 0xAA, 0x81, 0x43, 0xE7, 0xAA, 0xB1, 0x43,
+ 0xE7, 0xAB, 0x8B, 0x43, 0xE7, 0xAB, 0xAE, 0x43,
+ 0xE7, 0xAB, 0xB9, 0x43, 0xE7, 0xAC, 0xA0, 0x43,
+ // Bytes 1140 - 117f
+ 0xE7, 0xAE, 0x8F, 0x43, 0xE7, 0xAF, 0x80, 0x43,
+ 0xE7, 0xAF, 0x86, 0x43, 0xE7, 0xAF, 0x89, 0x43,
+ 0xE7, 0xB0, 0xBE, 0x43, 0xE7, 0xB1, 0xA0, 0x43,
+ 0xE7, 0xB1, 0xB3, 0x43, 0xE7, 0xB1, 0xBB, 0x43,
+ 0xE7, 0xB2, 0x92, 0x43, 0xE7, 0xB2, 0xBE, 0x43,
+ 0xE7, 0xB3, 0x92, 0x43, 0xE7, 0xB3, 0x96, 0x43,
+ 0xE7, 0xB3, 0xA3, 0x43, 0xE7, 0xB3, 0xA7, 0x43,
+ 0xE7, 0xB3, 0xA8, 0x43, 0xE7, 0xB3, 0xB8, 0x43,
+ // Bytes 1180 - 11bf
+ 0xE7, 0xB4, 0x80, 0x43, 0xE7, 0xB4, 0x90, 0x43,
+ 0xE7, 0xB4, 0xA2, 0x43, 0xE7, 0xB4, 0xAF, 0x43,
+ 0xE7, 0xB5, 0x82, 0x43, 0xE7, 0xB5, 0x9B, 0x43,
+ 0xE7, 0xB5, 0xA3, 0x43, 0xE7, 0xB6, 0xA0, 0x43,
+ 0xE7, 0xB6, 0xBE, 0x43, 0xE7, 0xB7, 0x87, 0x43,
+ 0xE7, 0xB7, 0xB4, 0x43, 0xE7, 0xB8, 0x82, 0x43,
+ 0xE7, 0xB8, 0x89, 0x43, 0xE7, 0xB8, 0xB7, 0x43,
+ 0xE7, 0xB9, 0x81, 0x43, 0xE7, 0xB9, 0x85, 0x43,
+ // Bytes 11c0 - 11ff
+ 0xE7, 0xBC, 0xB6, 0x43, 0xE7, 0xBC, 0xBE, 0x43,
+ 0xE7, 0xBD, 0x91, 0x43, 0xE7, 0xBD, 0xB2, 0x43,
+ 0xE7, 0xBD, 0xB9, 0x43, 0xE7, 0xBD, 0xBA, 0x43,
+ 0xE7, 0xBE, 0x85, 0x43, 0xE7, 0xBE, 0x8A, 0x43,
+ 0xE7, 0xBE, 0x95, 0x43, 0xE7, 0xBE, 0x9A, 0x43,
+ 0xE7, 0xBE, 0xBD, 0x43, 0xE7, 0xBF, 0xBA, 0x43,
+ 0xE8, 0x80, 0x81, 0x43, 0xE8, 0x80, 0x85, 0x43,
+ 0xE8, 0x80, 0x8C, 0x43, 0xE8, 0x80, 0x92, 0x43,
+ // Bytes 1200 - 123f
+ 0xE8, 0x80, 0xB3, 0x43, 0xE8, 0x81, 0x86, 0x43,
+ 0xE8, 0x81, 0xA0, 0x43, 0xE8, 0x81, 0xAF, 0x43,
+ 0xE8, 0x81, 0xB0, 0x43, 0xE8, 0x81, 0xBE, 0x43,
+ 0xE8, 0x81, 0xBF, 0x43, 0xE8, 0x82, 0x89, 0x43,
+ 0xE8, 0x82, 0x8B, 0x43, 0xE8, 0x82, 0xAD, 0x43,
+ 0xE8, 0x82, 0xB2, 0x43, 0xE8, 0x84, 0x83, 0x43,
+ 0xE8, 0x84, 0xBE, 0x43, 0xE8, 0x87, 0x98, 0x43,
+ 0xE8, 0x87, 0xA3, 0x43, 0xE8, 0x87, 0xA8, 0x43,
+ // Bytes 1240 - 127f
+ 0xE8, 0x87, 0xAA, 0x43, 0xE8, 0x87, 0xAD, 0x43,
+ 0xE8, 0x87, 0xB3, 0x43, 0xE8, 0x87, 0xBC, 0x43,
+ 0xE8, 0x88, 0x81, 0x43, 0xE8, 0x88, 0x84, 0x43,
+ 0xE8, 0x88, 0x8C, 0x43, 0xE8, 0x88, 0x98, 0x43,
+ 0xE8, 0x88, 0x9B, 0x43, 0xE8, 0x88, 0x9F, 0x43,
+ 0xE8, 0x89, 0xAE, 0x43, 0xE8, 0x89, 0xAF, 0x43,
+ 0xE8, 0x89, 0xB2, 0x43, 0xE8, 0x89, 0xB8, 0x43,
+ 0xE8, 0x89, 0xB9, 0x43, 0xE8, 0x8A, 0x8B, 0x43,
+ // Bytes 1280 - 12bf
+ 0xE8, 0x8A, 0x91, 0x43, 0xE8, 0x8A, 0x9D, 0x43,
+ 0xE8, 0x8A, 0xB1, 0x43, 0xE8, 0x8A, 0xB3, 0x43,
+ 0xE8, 0x8A, 0xBD, 0x43, 0xE8, 0x8B, 0xA5, 0x43,
+ 0xE8, 0x8B, 0xA6, 0x43, 0xE8, 0x8C, 0x9D, 0x43,
+ 0xE8, 0x8C, 0xA3, 0x43, 0xE8, 0x8C, 0xB6, 0x43,
+ 0xE8, 0x8D, 0x92, 0x43, 0xE8, 0x8D, 0x93, 0x43,
+ 0xE8, 0x8D, 0xA3, 0x43, 0xE8, 0x8E, 0xAD, 0x43,
+ 0xE8, 0x8E, 0xBD, 0x43, 0xE8, 0x8F, 0x89, 0x43,
+ // Bytes 12c0 - 12ff
+ 0xE8, 0x8F, 0x8A, 0x43, 0xE8, 0x8F, 0x8C, 0x43,
+ 0xE8, 0x8F, 0x9C, 0x43, 0xE8, 0x8F, 0xA7, 0x43,
+ 0xE8, 0x8F, 0xAF, 0x43, 0xE8, 0x8F, 0xB1, 0x43,
+ 0xE8, 0x90, 0xBD, 0x43, 0xE8, 0x91, 0x89, 0x43,
+ 0xE8, 0x91, 0x97, 0x43, 0xE8, 0x93, 0xAE, 0x43,
+ 0xE8, 0x93, 0xB1, 0x43, 0xE8, 0x93, 0xB3, 0x43,
+ 0xE8, 0x93, 0xBC, 0x43, 0xE8, 0x94, 0x96, 0x43,
+ 0xE8, 0x95, 0xA4, 0x43, 0xE8, 0x97, 0x8D, 0x43,
+ // Bytes 1300 - 133f
+ 0xE8, 0x97, 0xBA, 0x43, 0xE8, 0x98, 0x86, 0x43,
+ 0xE8, 0x98, 0x92, 0x43, 0xE8, 0x98, 0xAD, 0x43,
+ 0xE8, 0x98, 0xBF, 0x43, 0xE8, 0x99, 0x8D, 0x43,
+ 0xE8, 0x99, 0x90, 0x43, 0xE8, 0x99, 0x9C, 0x43,
+ 0xE8, 0x99, 0xA7, 0x43, 0xE8, 0x99, 0xA9, 0x43,
+ 0xE8, 0x99, 0xAB, 0x43, 0xE8, 0x9A, 0x88, 0x43,
+ 0xE8, 0x9A, 0xA9, 0x43, 0xE8, 0x9B, 0xA2, 0x43,
+ 0xE8, 0x9C, 0x8E, 0x43, 0xE8, 0x9C, 0xA8, 0x43,
+ // Bytes 1340 - 137f
+ 0xE8, 0x9D, 0xAB, 0x43, 0xE8, 0x9D, 0xB9, 0x43,
+ 0xE8, 0x9E, 0x86, 0x43, 0xE8, 0x9E, 0xBA, 0x43,
+ 0xE8, 0x9F, 0xA1, 0x43, 0xE8, 0xA0, 0x81, 0x43,
+ 0xE8, 0xA0, 0x9F, 0x43, 0xE8, 0xA1, 0x80, 0x43,
+ 0xE8, 0xA1, 0x8C, 0x43, 0xE8, 0xA1, 0xA0, 0x43,
+ 0xE8, 0xA1, 0xA3, 0x43, 0xE8, 0xA3, 0x82, 0x43,
+ 0xE8, 0xA3, 0x8F, 0x43, 0xE8, 0xA3, 0x97, 0x43,
+ 0xE8, 0xA3, 0x9E, 0x43, 0xE8, 0xA3, 0xA1, 0x43,
+ // Bytes 1380 - 13bf
+ 0xE8, 0xA3, 0xB8, 0x43, 0xE8, 0xA3, 0xBA, 0x43,
+ 0xE8, 0xA4, 0x90, 0x43, 0xE8, 0xA5, 0x81, 0x43,
+ 0xE8, 0xA5, 0xA4, 0x43, 0xE8, 0xA5, 0xBE, 0x43,
+ 0xE8, 0xA6, 0x86, 0x43, 0xE8, 0xA6, 0x8B, 0x43,
+ 0xE8, 0xA6, 0x96, 0x43, 0xE8, 0xA7, 0x92, 0x43,
+ 0xE8, 0xA7, 0xA3, 0x43, 0xE8, 0xA8, 0x80, 0x43,
+ 0xE8, 0xAA, 0xA0, 0x43, 0xE8, 0xAA, 0xAA, 0x43,
+ 0xE8, 0xAA, 0xBF, 0x43, 0xE8, 0xAB, 0x8B, 0x43,
+ // Bytes 13c0 - 13ff
+ 0xE8, 0xAB, 0x92, 0x43, 0xE8, 0xAB, 0x96, 0x43,
+ 0xE8, 0xAB, 0xAD, 0x43, 0xE8, 0xAB, 0xB8, 0x43,
+ 0xE8, 0xAB, 0xBE, 0x43, 0xE8, 0xAC, 0x81, 0x43,
+ 0xE8, 0xAC, 0xB9, 0x43, 0xE8, 0xAD, 0x98, 0x43,
+ 0xE8, 0xAE, 0x80, 0x43, 0xE8, 0xAE, 0x8A, 0x43,
+ 0xE8, 0xB0, 0xB7, 0x43, 0xE8, 0xB1, 0x86, 0x43,
+ 0xE8, 0xB1, 0x88, 0x43, 0xE8, 0xB1, 0x95, 0x43,
+ 0xE8, 0xB1, 0xB8, 0x43, 0xE8, 0xB2, 0x9D, 0x43,
+ // Bytes 1400 - 143f
+ 0xE8, 0xB2, 0xA1, 0x43, 0xE8, 0xB2, 0xA9, 0x43,
+ 0xE8, 0xB2, 0xAB, 0x43, 0xE8, 0xB3, 0x81, 0x43,
+ 0xE8, 0xB3, 0x82, 0x43, 0xE8, 0xB3, 0x87, 0x43,
+ 0xE8, 0xB3, 0x88, 0x43, 0xE8, 0xB3, 0x93, 0x43,
+ 0xE8, 0xB4, 0x88, 0x43, 0xE8, 0xB4, 0x9B, 0x43,
+ 0xE8, 0xB5, 0xA4, 0x43, 0xE8, 0xB5, 0xB0, 0x43,
+ 0xE8, 0xB5, 0xB7, 0x43, 0xE8, 0xB6, 0xB3, 0x43,
+ 0xE8, 0xB6, 0xBC, 0x43, 0xE8, 0xB7, 0x8B, 0x43,
+ // Bytes 1440 - 147f
+ 0xE8, 0xB7, 0xAF, 0x43, 0xE8, 0xB7, 0xB0, 0x43,
+ 0xE8, 0xBA, 0xAB, 0x43, 0xE8, 0xBB, 0x8A, 0x43,
+ 0xE8, 0xBB, 0x94, 0x43, 0xE8, 0xBC, 0xA6, 0x43,
+ 0xE8, 0xBC, 0xAA, 0x43, 0xE8, 0xBC, 0xB8, 0x43,
+ 0xE8, 0xBC, 0xBB, 0x43, 0xE8, 0xBD, 0xA2, 0x43,
+ 0xE8, 0xBE, 0x9B, 0x43, 0xE8, 0xBE, 0x9E, 0x43,
+ 0xE8, 0xBE, 0xB0, 0x43, 0xE8, 0xBE, 0xB5, 0x43,
+ 0xE8, 0xBE, 0xB6, 0x43, 0xE9, 0x80, 0xA3, 0x43,
+ // Bytes 1480 - 14bf
+ 0xE9, 0x80, 0xB8, 0x43, 0xE9, 0x81, 0x8A, 0x43,
+ 0xE9, 0x81, 0xA9, 0x43, 0xE9, 0x81, 0xB2, 0x43,
+ 0xE9, 0x81, 0xBC, 0x43, 0xE9, 0x82, 0x8F, 0x43,
+ 0xE9, 0x82, 0x91, 0x43, 0xE9, 0x82, 0x94, 0x43,
+ 0xE9, 0x83, 0x8E, 0x43, 0xE9, 0x83, 0x9E, 0x43,
+ 0xE9, 0x83, 0xB1, 0x43, 0xE9, 0x83, 0xBD, 0x43,
+ 0xE9, 0x84, 0x91, 0x43, 0xE9, 0x84, 0x9B, 0x43,
+ 0xE9, 0x85, 0x89, 0x43, 0xE9, 0x85, 0xAA, 0x43,
+ // Bytes 14c0 - 14ff
+ 0xE9, 0x86, 0x99, 0x43, 0xE9, 0x86, 0xB4, 0x43,
+ 0xE9, 0x87, 0x86, 0x43, 0xE9, 0x87, 0x8C, 0x43,
+ 0xE9, 0x87, 0x8F, 0x43, 0xE9, 0x87, 0x91, 0x43,
+ 0xE9, 0x88, 0xB4, 0x43, 0xE9, 0x88, 0xB8, 0x43,
+ 0xE9, 0x89, 0xB6, 0x43, 0xE9, 0x89, 0xBC, 0x43,
+ 0xE9, 0x8B, 0x97, 0x43, 0xE9, 0x8B, 0x98, 0x43,
+ 0xE9, 0x8C, 0x84, 0x43, 0xE9, 0x8D, 0x8A, 0x43,
+ 0xE9, 0x8F, 0xB9, 0x43, 0xE9, 0x90, 0x95, 0x43,
+ // Bytes 1500 - 153f
+ 0xE9, 0x95, 0xB7, 0x43, 0xE9, 0x96, 0x80, 0x43,
+ 0xE9, 0x96, 0x8B, 0x43, 0xE9, 0x96, 0xAD, 0x43,
+ 0xE9, 0x96, 0xB7, 0x43, 0xE9, 0x98, 0x9C, 0x43,
+ 0xE9, 0x98, 0xAE, 0x43, 0xE9, 0x99, 0x8B, 0x43,
+ 0xE9, 0x99, 0x8D, 0x43, 0xE9, 0x99, 0xB5, 0x43,
+ 0xE9, 0x99, 0xB8, 0x43, 0xE9, 0x99, 0xBC, 0x43,
+ 0xE9, 0x9A, 0x86, 0x43, 0xE9, 0x9A, 0xA3, 0x43,
+ 0xE9, 0x9A, 0xB6, 0x43, 0xE9, 0x9A, 0xB7, 0x43,
+ // Bytes 1540 - 157f
+ 0xE9, 0x9A, 0xB8, 0x43, 0xE9, 0x9A, 0xB9, 0x43,
+ 0xE9, 0x9B, 0x83, 0x43, 0xE9, 0x9B, 0xA2, 0x43,
+ 0xE9, 0x9B, 0xA3, 0x43, 0xE9, 0x9B, 0xA8, 0x43,
+ 0xE9, 0x9B, 0xB6, 0x43, 0xE9, 0x9B, 0xB7, 0x43,
+ 0xE9, 0x9C, 0xA3, 0x43, 0xE9, 0x9C, 0xB2, 0x43,
+ 0xE9, 0x9D, 0x88, 0x43, 0xE9, 0x9D, 0x91, 0x43,
+ 0xE9, 0x9D, 0x96, 0x43, 0xE9, 0x9D, 0x9E, 0x43,
+ 0xE9, 0x9D, 0xA2, 0x43, 0xE9, 0x9D, 0xA9, 0x43,
+ // Bytes 1580 - 15bf
+ 0xE9, 0x9F, 0x8B, 0x43, 0xE9, 0x9F, 0x9B, 0x43,
+ 0xE9, 0x9F, 0xA0, 0x43, 0xE9, 0x9F, 0xAD, 0x43,
+ 0xE9, 0x9F, 0xB3, 0x43, 0xE9, 0x9F, 0xBF, 0x43,
+ 0xE9, 0xA0, 0x81, 0x43, 0xE9, 0xA0, 0x85, 0x43,
+ 0xE9, 0xA0, 0x8B, 0x43, 0xE9, 0xA0, 0x98, 0x43,
+ 0xE9, 0xA0, 0xA9, 0x43, 0xE9, 0xA0, 0xBB, 0x43,
+ 0xE9, 0xA1, 0x9E, 0x43, 0xE9, 0xA2, 0xA8, 0x43,
+ 0xE9, 0xA3, 0x9B, 0x43, 0xE9, 0xA3, 0x9F, 0x43,
+ // Bytes 15c0 - 15ff
+ 0xE9, 0xA3, 0xA2, 0x43, 0xE9, 0xA3, 0xAF, 0x43,
+ 0xE9, 0xA3, 0xBC, 0x43, 0xE9, 0xA4, 0xA8, 0x43,
+ 0xE9, 0xA4, 0xA9, 0x43, 0xE9, 0xA6, 0x96, 0x43,
+ 0xE9, 0xA6, 0x99, 0x43, 0xE9, 0xA6, 0xA7, 0x43,
+ 0xE9, 0xA6, 0xAC, 0x43, 0xE9, 0xA7, 0x82, 0x43,
+ 0xE9, 0xA7, 0xB1, 0x43, 0xE9, 0xA7, 0xBE, 0x43,
+ 0xE9, 0xA9, 0xAA, 0x43, 0xE9, 0xAA, 0xA8, 0x43,
+ 0xE9, 0xAB, 0x98, 0x43, 0xE9, 0xAB, 0x9F, 0x43,
+ // Bytes 1600 - 163f
+ 0xE9, 0xAC, 0x92, 0x43, 0xE9, 0xAC, 0xA5, 0x43,
+ 0xE9, 0xAC, 0xAF, 0x43, 0xE9, 0xAC, 0xB2, 0x43,
+ 0xE9, 0xAC, 0xBC, 0x43, 0xE9, 0xAD, 0x9A, 0x43,
+ 0xE9, 0xAD, 0xAF, 0x43, 0xE9, 0xB1, 0x80, 0x43,
+ 0xE9, 0xB1, 0x97, 0x43, 0xE9, 0xB3, 0xA5, 0x43,
+ 0xE9, 0xB3, 0xBD, 0x43, 0xE9, 0xB5, 0xA7, 0x43,
+ 0xE9, 0xB6, 0xB4, 0x43, 0xE9, 0xB7, 0xBA, 0x43,
+ 0xE9, 0xB8, 0x9E, 0x43, 0xE9, 0xB9, 0xB5, 0x43,
+ // Bytes 1640 - 167f
+ 0xE9, 0xB9, 0xBF, 0x43, 0xE9, 0xBA, 0x97, 0x43,
+ 0xE9, 0xBA, 0x9F, 0x43, 0xE9, 0xBA, 0xA5, 0x43,
+ 0xE9, 0xBA, 0xBB, 0x43, 0xE9, 0xBB, 0x83, 0x43,
+ 0xE9, 0xBB, 0x8D, 0x43, 0xE9, 0xBB, 0x8E, 0x43,
+ 0xE9, 0xBB, 0x91, 0x43, 0xE9, 0xBB, 0xB9, 0x43,
+ 0xE9, 0xBB, 0xBD, 0x43, 0xE9, 0xBB, 0xBE, 0x43,
+ 0xE9, 0xBC, 0x85, 0x43, 0xE9, 0xBC, 0x8E, 0x43,
+ 0xE9, 0xBC, 0x8F, 0x43, 0xE9, 0xBC, 0x93, 0x43,
+ // Bytes 1680 - 16bf
+ 0xE9, 0xBC, 0x96, 0x43, 0xE9, 0xBC, 0xA0, 0x43,
+ 0xE9, 0xBC, 0xBB, 0x43, 0xE9, 0xBD, 0x83, 0x43,
+ 0xE9, 0xBD, 0x8A, 0x43, 0xE9, 0xBD, 0x92, 0x43,
+ 0xE9, 0xBE, 0x8D, 0x43, 0xE9, 0xBE, 0x8E, 0x43,
+ 0xE9, 0xBE, 0x9C, 0x43, 0xE9, 0xBE, 0x9F, 0x43,
+ 0xE9, 0xBE, 0xA0, 0x43, 0xEA, 0x9C, 0xA7, 0x43,
+ 0xEA, 0x9D, 0xAF, 0x43, 0xEA, 0xAC, 0xB7, 0x43,
+ 0xEA, 0xAD, 0x92, 0x44, 0xF0, 0xA0, 0x84, 0xA2,
+ // Bytes 16c0 - 16ff
+ 0x44, 0xF0, 0xA0, 0x94, 0x9C, 0x44, 0xF0, 0xA0,
+ 0x94, 0xA5, 0x44, 0xF0, 0xA0, 0x95, 0x8B, 0x44,
+ 0xF0, 0xA0, 0x98, 0xBA, 0x44, 0xF0, 0xA0, 0xA0,
+ 0x84, 0x44, 0xF0, 0xA0, 0xA3, 0x9E, 0x44, 0xF0,
+ 0xA0, 0xA8, 0xAC, 0x44, 0xF0, 0xA0, 0xAD, 0xA3,
+ 0x44, 0xF0, 0xA1, 0x93, 0xA4, 0x44, 0xF0, 0xA1,
+ 0x9A, 0xA8, 0x44, 0xF0, 0xA1, 0x9B, 0xAA, 0x44,
+ 0xF0, 0xA1, 0xA7, 0x88, 0x44, 0xF0, 0xA1, 0xAC,
+ // Bytes 1700 - 173f
+ 0x98, 0x44, 0xF0, 0xA1, 0xB4, 0x8B, 0x44, 0xF0,
+ 0xA1, 0xB7, 0xA4, 0x44, 0xF0, 0xA1, 0xB7, 0xA6,
+ 0x44, 0xF0, 0xA2, 0x86, 0x83, 0x44, 0xF0, 0xA2,
+ 0x86, 0x9F, 0x44, 0xF0, 0xA2, 0x8C, 0xB1, 0x44,
+ 0xF0, 0xA2, 0x9B, 0x94, 0x44, 0xF0, 0xA2, 0xA1,
+ 0x84, 0x44, 0xF0, 0xA2, 0xA1, 0x8A, 0x44, 0xF0,
+ 0xA2, 0xAC, 0x8C, 0x44, 0xF0, 0xA2, 0xAF, 0xB1,
+ 0x44, 0xF0, 0xA3, 0x80, 0x8A, 0x44, 0xF0, 0xA3,
+ // Bytes 1740 - 177f
+ 0x8A, 0xB8, 0x44, 0xF0, 0xA3, 0x8D, 0x9F, 0x44,
+ 0xF0, 0xA3, 0x8E, 0x93, 0x44, 0xF0, 0xA3, 0x8E,
+ 0x9C, 0x44, 0xF0, 0xA3, 0x8F, 0x83, 0x44, 0xF0,
+ 0xA3, 0x8F, 0x95, 0x44, 0xF0, 0xA3, 0x91, 0xAD,
+ 0x44, 0xF0, 0xA3, 0x9A, 0xA3, 0x44, 0xF0, 0xA3,
+ 0xA2, 0xA7, 0x44, 0xF0, 0xA3, 0xAA, 0x8D, 0x44,
+ 0xF0, 0xA3, 0xAB, 0xBA, 0x44, 0xF0, 0xA3, 0xB2,
+ 0xBC, 0x44, 0xF0, 0xA3, 0xB4, 0x9E, 0x44, 0xF0,
+ // Bytes 1780 - 17bf
+ 0xA3, 0xBB, 0x91, 0x44, 0xF0, 0xA3, 0xBD, 0x9E,
+ 0x44, 0xF0, 0xA3, 0xBE, 0x8E, 0x44, 0xF0, 0xA4,
+ 0x89, 0xA3, 0x44, 0xF0, 0xA4, 0x8B, 0xAE, 0x44,
+ 0xF0, 0xA4, 0x8E, 0xAB, 0x44, 0xF0, 0xA4, 0x98,
+ 0x88, 0x44, 0xF0, 0xA4, 0x9C, 0xB5, 0x44, 0xF0,
+ 0xA4, 0xA0, 0x94, 0x44, 0xF0, 0xA4, 0xB0, 0xB6,
+ 0x44, 0xF0, 0xA4, 0xB2, 0x92, 0x44, 0xF0, 0xA4,
+ 0xBE, 0xA1, 0x44, 0xF0, 0xA4, 0xBE, 0xB8, 0x44,
+ // Bytes 17c0 - 17ff
+ 0xF0, 0xA5, 0x81, 0x84, 0x44, 0xF0, 0xA5, 0x83,
+ 0xB2, 0x44, 0xF0, 0xA5, 0x83, 0xB3, 0x44, 0xF0,
+ 0xA5, 0x84, 0x99, 0x44, 0xF0, 0xA5, 0x84, 0xB3,
+ 0x44, 0xF0, 0xA5, 0x89, 0x89, 0x44, 0xF0, 0xA5,
+ 0x90, 0x9D, 0x44, 0xF0, 0xA5, 0x98, 0xA6, 0x44,
+ 0xF0, 0xA5, 0x9A, 0x9A, 0x44, 0xF0, 0xA5, 0x9B,
+ 0x85, 0x44, 0xF0, 0xA5, 0xA5, 0xBC, 0x44, 0xF0,
+ 0xA5, 0xAA, 0xA7, 0x44, 0xF0, 0xA5, 0xAE, 0xAB,
+ // Bytes 1800 - 183f
+ 0x44, 0xF0, 0xA5, 0xB2, 0x80, 0x44, 0xF0, 0xA5,
+ 0xB3, 0x90, 0x44, 0xF0, 0xA5, 0xBE, 0x86, 0x44,
+ 0xF0, 0xA6, 0x87, 0x9A, 0x44, 0xF0, 0xA6, 0x88,
+ 0xA8, 0x44, 0xF0, 0xA6, 0x89, 0x87, 0x44, 0xF0,
+ 0xA6, 0x8B, 0x99, 0x44, 0xF0, 0xA6, 0x8C, 0xBE,
+ 0x44, 0xF0, 0xA6, 0x93, 0x9A, 0x44, 0xF0, 0xA6,
+ 0x94, 0xA3, 0x44, 0xF0, 0xA6, 0x96, 0xA8, 0x44,
+ 0xF0, 0xA6, 0x9E, 0xA7, 0x44, 0xF0, 0xA6, 0x9E,
+ // Bytes 1840 - 187f
+ 0xB5, 0x44, 0xF0, 0xA6, 0xAC, 0xBC, 0x44, 0xF0,
+ 0xA6, 0xB0, 0xB6, 0x44, 0xF0, 0xA6, 0xB3, 0x95,
+ 0x44, 0xF0, 0xA6, 0xB5, 0xAB, 0x44, 0xF0, 0xA6,
+ 0xBC, 0xAC, 0x44, 0xF0, 0xA6, 0xBE, 0xB1, 0x44,
+ 0xF0, 0xA7, 0x83, 0x92, 0x44, 0xF0, 0xA7, 0x8F,
+ 0x8A, 0x44, 0xF0, 0xA7, 0x99, 0xA7, 0x44, 0xF0,
+ 0xA7, 0xA2, 0xAE, 0x44, 0xF0, 0xA7, 0xA5, 0xA6,
+ 0x44, 0xF0, 0xA7, 0xB2, 0xA8, 0x44, 0xF0, 0xA7,
+ // Bytes 1880 - 18bf
+ 0xBB, 0x93, 0x44, 0xF0, 0xA7, 0xBC, 0xAF, 0x44,
+ 0xF0, 0xA8, 0x97, 0x92, 0x44, 0xF0, 0xA8, 0x97,
+ 0xAD, 0x44, 0xF0, 0xA8, 0x9C, 0xAE, 0x44, 0xF0,
+ 0xA8, 0xAF, 0xBA, 0x44, 0xF0, 0xA8, 0xB5, 0xB7,
+ 0x44, 0xF0, 0xA9, 0x85, 0x85, 0x44, 0xF0, 0xA9,
+ 0x87, 0x9F, 0x44, 0xF0, 0xA9, 0x88, 0x9A, 0x44,
+ 0xF0, 0xA9, 0x90, 0x8A, 0x44, 0xF0, 0xA9, 0x92,
+ 0x96, 0x44, 0xF0, 0xA9, 0x96, 0xB6, 0x44, 0xF0,
+ // Bytes 18c0 - 18ff
+ 0xA9, 0xAC, 0xB0, 0x44, 0xF0, 0xAA, 0x83, 0x8E,
+ 0x44, 0xF0, 0xAA, 0x84, 0x85, 0x44, 0xF0, 0xAA,
+ 0x88, 0x8E, 0x44, 0xF0, 0xAA, 0x8A, 0x91, 0x44,
+ 0xF0, 0xAA, 0x8E, 0x92, 0x44, 0xF0, 0xAA, 0x98,
+ 0x80, 0x06, 0xE0, 0xA7, 0x87, 0xE0, 0xA6, 0xBE,
+ 0x06, 0xE0, 0xA7, 0x87, 0xE0, 0xA7, 0x97, 0x06,
+ 0xE0, 0xAD, 0x87, 0xE0, 0xAC, 0xBE, 0x06, 0xE0,
+ 0xAD, 0x87, 0xE0, 0xAD, 0x96, 0x06, 0xE0, 0xAD,
+ // Bytes 1900 - 193f
+ 0x87, 0xE0, 0xAD, 0x97, 0x06, 0xE0, 0xAE, 0x92,
+ 0xE0, 0xAF, 0x97, 0x06, 0xE0, 0xAF, 0x86, 0xE0,
+ 0xAE, 0xBE, 0x06, 0xE0, 0xAF, 0x86, 0xE0, 0xAF,
+ 0x97, 0x06, 0xE0, 0xAF, 0x87, 0xE0, 0xAE, 0xBE,
+ 0x06, 0xE0, 0xB2, 0xBF, 0xE0, 0xB3, 0x95, 0x06,
+ 0xE0, 0xB3, 0x86, 0xE0, 0xB3, 0x95, 0x06, 0xE0,
+ 0xB3, 0x86, 0xE0, 0xB3, 0x96, 0x06, 0xE0, 0xB5,
+ 0x86, 0xE0, 0xB4, 0xBE, 0x06, 0xE0, 0xB5, 0x86,
+ // Bytes 1940 - 197f
+ 0xE0, 0xB5, 0x97, 0x06, 0xE0, 0xB5, 0x87, 0xE0,
+ 0xB4, 0xBE, 0x06, 0xE0, 0xB7, 0x99, 0xE0, 0xB7,
+ 0x9F, 0x06, 0xE1, 0x80, 0xA5, 0xE1, 0x80, 0xAE,
+ 0x06, 0xE1, 0xAC, 0x85, 0xE1, 0xAC, 0xB5, 0x06,
+ 0xE1, 0xAC, 0x87, 0xE1, 0xAC, 0xB5, 0x06, 0xE1,
+ 0xAC, 0x89, 0xE1, 0xAC, 0xB5, 0x06, 0xE1, 0xAC,
+ 0x8B, 0xE1, 0xAC, 0xB5, 0x06, 0xE1, 0xAC, 0x8D,
+ 0xE1, 0xAC, 0xB5, 0x06, 0xE1, 0xAC, 0x91, 0xE1,
+ // Bytes 1980 - 19bf
+ 0xAC, 0xB5, 0x06, 0xE1, 0xAC, 0xBA, 0xE1, 0xAC,
+ 0xB5, 0x06, 0xE1, 0xAC, 0xBC, 0xE1, 0xAC, 0xB5,
+ 0x06, 0xE1, 0xAC, 0xBE, 0xE1, 0xAC, 0xB5, 0x06,
+ 0xE1, 0xAC, 0xBF, 0xE1, 0xAC, 0xB5, 0x06, 0xE1,
+ 0xAD, 0x82, 0xE1, 0xAC, 0xB5, 0x08, 0xF0, 0x91,
+ 0x84, 0xB1, 0xF0, 0x91, 0x84, 0xA7, 0x08, 0xF0,
+ 0x91, 0x84, 0xB2, 0xF0, 0x91, 0x84, 0xA7, 0x08,
+ 0xF0, 0x91, 0x8D, 0x87, 0xF0, 0x91, 0x8C, 0xBE,
+ // Bytes 19c0 - 19ff
+ 0x08, 0xF0, 0x91, 0x8D, 0x87, 0xF0, 0x91, 0x8D,
+ 0x97, 0x08, 0xF0, 0x91, 0x92, 0xB9, 0xF0, 0x91,
+ 0x92, 0xB0, 0x08, 0xF0, 0x91, 0x92, 0xB9, 0xF0,
+ 0x91, 0x92, 0xBA, 0x08, 0xF0, 0x91, 0x92, 0xB9,
+ 0xF0, 0x91, 0x92, 0xBD, 0x08, 0xF0, 0x91, 0x96,
+ 0xB8, 0xF0, 0x91, 0x96, 0xAF, 0x08, 0xF0, 0x91,
+ 0x96, 0xB9, 0xF0, 0x91, 0x96, 0xAF, 0x09, 0xE0,
+ 0xB3, 0x86, 0xE0, 0xB3, 0x82, 0xE0, 0xB3, 0x95,
+ // Bytes 1a00 - 1a3f
+ 0x42, 0x21, 0x21, 0x42, 0x21, 0x3F, 0x42, 0x2E,
+ 0x2E, 0x42, 0x30, 0x2C, 0x42, 0x30, 0x2E, 0x42,
+ 0x31, 0x2C, 0x42, 0x31, 0x2E, 0x42, 0x31, 0x30,
+ 0x42, 0x31, 0x31, 0x42, 0x31, 0x32, 0x42, 0x31,
+ 0x33, 0x42, 0x31, 0x34, 0x42, 0x31, 0x35, 0x42,
+ 0x31, 0x36, 0x42, 0x31, 0x37, 0x42, 0x31, 0x38,
+ 0x42, 0x31, 0x39, 0x42, 0x32, 0x2C, 0x42, 0x32,
+ 0x2E, 0x42, 0x32, 0x30, 0x42, 0x32, 0x31, 0x42,
+ // Bytes 1a40 - 1a7f
+ 0x32, 0x32, 0x42, 0x32, 0x33, 0x42, 0x32, 0x34,
+ 0x42, 0x32, 0x35, 0x42, 0x32, 0x36, 0x42, 0x32,
+ 0x37, 0x42, 0x32, 0x38, 0x42, 0x32, 0x39, 0x42,
+ 0x33, 0x2C, 0x42, 0x33, 0x2E, 0x42, 0x33, 0x30,
+ 0x42, 0x33, 0x31, 0x42, 0x33, 0x32, 0x42, 0x33,
+ 0x33, 0x42, 0x33, 0x34, 0x42, 0x33, 0x35, 0x42,
+ 0x33, 0x36, 0x42, 0x33, 0x37, 0x42, 0x33, 0x38,
+ 0x42, 0x33, 0x39, 0x42, 0x34, 0x2C, 0x42, 0x34,
+ // Bytes 1a80 - 1abf
+ 0x2E, 0x42, 0x34, 0x30, 0x42, 0x34, 0x31, 0x42,
+ 0x34, 0x32, 0x42, 0x34, 0x33, 0x42, 0x34, 0x34,
+ 0x42, 0x34, 0x35, 0x42, 0x34, 0x36, 0x42, 0x34,
+ 0x37, 0x42, 0x34, 0x38, 0x42, 0x34, 0x39, 0x42,
+ 0x35, 0x2C, 0x42, 0x35, 0x2E, 0x42, 0x35, 0x30,
+ 0x42, 0x36, 0x2C, 0x42, 0x36, 0x2E, 0x42, 0x37,
+ 0x2C, 0x42, 0x37, 0x2E, 0x42, 0x38, 0x2C, 0x42,
+ 0x38, 0x2E, 0x42, 0x39, 0x2C, 0x42, 0x39, 0x2E,
+ // Bytes 1ac0 - 1aff
+ 0x42, 0x3D, 0x3D, 0x42, 0x3F, 0x21, 0x42, 0x3F,
+ 0x3F, 0x42, 0x41, 0x55, 0x42, 0x42, 0x71, 0x42,
+ 0x43, 0x44, 0x42, 0x44, 0x4A, 0x42, 0x44, 0x5A,
+ 0x42, 0x44, 0x7A, 0x42, 0x47, 0x42, 0x42, 0x47,
+ 0x79, 0x42, 0x48, 0x50, 0x42, 0x48, 0x56, 0x42,
+ 0x48, 0x67, 0x42, 0x48, 0x7A, 0x42, 0x49, 0x49,
+ 0x42, 0x49, 0x4A, 0x42, 0x49, 0x55, 0x42, 0x49,
+ 0x56, 0x42, 0x49, 0x58, 0x42, 0x4B, 0x42, 0x42,
+ // Bytes 1b00 - 1b3f
+ 0x4B, 0x4B, 0x42, 0x4B, 0x4D, 0x42, 0x4C, 0x4A,
+ 0x42, 0x4C, 0x6A, 0x42, 0x4D, 0x42, 0x42, 0x4D,
+ 0x43, 0x42, 0x4D, 0x44, 0x42, 0x4D, 0x56, 0x42,
+ 0x4D, 0x57, 0x42, 0x4E, 0x4A, 0x42, 0x4E, 0x6A,
+ 0x42, 0x4E, 0x6F, 0x42, 0x50, 0x48, 0x42, 0x50,
+ 0x52, 0x42, 0x50, 0x61, 0x42, 0x52, 0x73, 0x42,
+ 0x53, 0x44, 0x42, 0x53, 0x4D, 0x42, 0x53, 0x53,
+ 0x42, 0x53, 0x76, 0x42, 0x54, 0x4D, 0x42, 0x56,
+ // Bytes 1b40 - 1b7f
+ 0x49, 0x42, 0x57, 0x43, 0x42, 0x57, 0x5A, 0x42,
+ 0x57, 0x62, 0x42, 0x58, 0x49, 0x42, 0x63, 0x63,
+ 0x42, 0x63, 0x64, 0x42, 0x63, 0x6D, 0x42, 0x64,
+ 0x42, 0x42, 0x64, 0x61, 0x42, 0x64, 0x6C, 0x42,
+ 0x64, 0x6D, 0x42, 0x64, 0x7A, 0x42, 0x65, 0x56,
+ 0x42, 0x66, 0x66, 0x42, 0x66, 0x69, 0x42, 0x66,
+ 0x6C, 0x42, 0x66, 0x6D, 0x42, 0x68, 0x61, 0x42,
+ 0x69, 0x69, 0x42, 0x69, 0x6A, 0x42, 0x69, 0x6E,
+ // Bytes 1b80 - 1bbf
+ 0x42, 0x69, 0x76, 0x42, 0x69, 0x78, 0x42, 0x6B,
+ 0x41, 0x42, 0x6B, 0x56, 0x42, 0x6B, 0x57, 0x42,
+ 0x6B, 0x67, 0x42, 0x6B, 0x6C, 0x42, 0x6B, 0x6D,
+ 0x42, 0x6B, 0x74, 0x42, 0x6C, 0x6A, 0x42, 0x6C,
+ 0x6D, 0x42, 0x6C, 0x6E, 0x42, 0x6C, 0x78, 0x42,
+ 0x6D, 0x32, 0x42, 0x6D, 0x33, 0x42, 0x6D, 0x41,
+ 0x42, 0x6D, 0x56, 0x42, 0x6D, 0x57, 0x42, 0x6D,
+ 0x62, 0x42, 0x6D, 0x67, 0x42, 0x6D, 0x6C, 0x42,
+ // Bytes 1bc0 - 1bff
+ 0x6D, 0x6D, 0x42, 0x6D, 0x73, 0x42, 0x6E, 0x41,
+ 0x42, 0x6E, 0x46, 0x42, 0x6E, 0x56, 0x42, 0x6E,
+ 0x57, 0x42, 0x6E, 0x6A, 0x42, 0x6E, 0x6D, 0x42,
+ 0x6E, 0x73, 0x42, 0x6F, 0x56, 0x42, 0x70, 0x41,
+ 0x42, 0x70, 0x46, 0x42, 0x70, 0x56, 0x42, 0x70,
+ 0x57, 0x42, 0x70, 0x63, 0x42, 0x70, 0x73, 0x42,
+ 0x73, 0x72, 0x42, 0x73, 0x74, 0x42, 0x76, 0x69,
+ 0x42, 0x78, 0x69, 0x43, 0x28, 0x31, 0x29, 0x43,
+ // Bytes 1c00 - 1c3f
+ 0x28, 0x32, 0x29, 0x43, 0x28, 0x33, 0x29, 0x43,
+ 0x28, 0x34, 0x29, 0x43, 0x28, 0x35, 0x29, 0x43,
+ 0x28, 0x36, 0x29, 0x43, 0x28, 0x37, 0x29, 0x43,
+ 0x28, 0x38, 0x29, 0x43, 0x28, 0x39, 0x29, 0x43,
+ 0x28, 0x41, 0x29, 0x43, 0x28, 0x42, 0x29, 0x43,
+ 0x28, 0x43, 0x29, 0x43, 0x28, 0x44, 0x29, 0x43,
+ 0x28, 0x45, 0x29, 0x43, 0x28, 0x46, 0x29, 0x43,
+ 0x28, 0x47, 0x29, 0x43, 0x28, 0x48, 0x29, 0x43,
+ // Bytes 1c40 - 1c7f
+ 0x28, 0x49, 0x29, 0x43, 0x28, 0x4A, 0x29, 0x43,
+ 0x28, 0x4B, 0x29, 0x43, 0x28, 0x4C, 0x29, 0x43,
+ 0x28, 0x4D, 0x29, 0x43, 0x28, 0x4E, 0x29, 0x43,
+ 0x28, 0x4F, 0x29, 0x43, 0x28, 0x50, 0x29, 0x43,
+ 0x28, 0x51, 0x29, 0x43, 0x28, 0x52, 0x29, 0x43,
+ 0x28, 0x53, 0x29, 0x43, 0x28, 0x54, 0x29, 0x43,
+ 0x28, 0x55, 0x29, 0x43, 0x28, 0x56, 0x29, 0x43,
+ 0x28, 0x57, 0x29, 0x43, 0x28, 0x58, 0x29, 0x43,
+ // Bytes 1c80 - 1cbf
+ 0x28, 0x59, 0x29, 0x43, 0x28, 0x5A, 0x29, 0x43,
+ 0x28, 0x61, 0x29, 0x43, 0x28, 0x62, 0x29, 0x43,
+ 0x28, 0x63, 0x29, 0x43, 0x28, 0x64, 0x29, 0x43,
+ 0x28, 0x65, 0x29, 0x43, 0x28, 0x66, 0x29, 0x43,
+ 0x28, 0x67, 0x29, 0x43, 0x28, 0x68, 0x29, 0x43,
+ 0x28, 0x69, 0x29, 0x43, 0x28, 0x6A, 0x29, 0x43,
+ 0x28, 0x6B, 0x29, 0x43, 0x28, 0x6C, 0x29, 0x43,
+ 0x28, 0x6D, 0x29, 0x43, 0x28, 0x6E, 0x29, 0x43,
+ // Bytes 1cc0 - 1cff
+ 0x28, 0x6F, 0x29, 0x43, 0x28, 0x70, 0x29, 0x43,
+ 0x28, 0x71, 0x29, 0x43, 0x28, 0x72, 0x29, 0x43,
+ 0x28, 0x73, 0x29, 0x43, 0x28, 0x74, 0x29, 0x43,
+ 0x28, 0x75, 0x29, 0x43, 0x28, 0x76, 0x29, 0x43,
+ 0x28, 0x77, 0x29, 0x43, 0x28, 0x78, 0x29, 0x43,
+ 0x28, 0x79, 0x29, 0x43, 0x28, 0x7A, 0x29, 0x43,
+ 0x2E, 0x2E, 0x2E, 0x43, 0x31, 0x30, 0x2E, 0x43,
+ 0x31, 0x31, 0x2E, 0x43, 0x31, 0x32, 0x2E, 0x43,
+ // Bytes 1d00 - 1d3f
+ 0x31, 0x33, 0x2E, 0x43, 0x31, 0x34, 0x2E, 0x43,
+ 0x31, 0x35, 0x2E, 0x43, 0x31, 0x36, 0x2E, 0x43,
+ 0x31, 0x37, 0x2E, 0x43, 0x31, 0x38, 0x2E, 0x43,
+ 0x31, 0x39, 0x2E, 0x43, 0x32, 0x30, 0x2E, 0x43,
+ 0x3A, 0x3A, 0x3D, 0x43, 0x3D, 0x3D, 0x3D, 0x43,
+ 0x43, 0x6F, 0x2E, 0x43, 0x46, 0x41, 0x58, 0x43,
+ 0x47, 0x48, 0x7A, 0x43, 0x47, 0x50, 0x61, 0x43,
+ 0x49, 0x49, 0x49, 0x43, 0x4C, 0x54, 0x44, 0x43,
+ // Bytes 1d40 - 1d7f
+ 0x4C, 0xC2, 0xB7, 0x43, 0x4D, 0x48, 0x7A, 0x43,
+ 0x4D, 0x50, 0x61, 0x43, 0x4D, 0xCE, 0xA9, 0x43,
+ 0x50, 0x50, 0x4D, 0x43, 0x50, 0x50, 0x56, 0x43,
+ 0x50, 0x54, 0x45, 0x43, 0x54, 0x45, 0x4C, 0x43,
+ 0x54, 0x48, 0x7A, 0x43, 0x56, 0x49, 0x49, 0x43,
+ 0x58, 0x49, 0x49, 0x43, 0x61, 0x2F, 0x63, 0x43,
+ 0x61, 0x2F, 0x73, 0x43, 0x61, 0xCA, 0xBE, 0x43,
+ 0x62, 0x61, 0x72, 0x43, 0x63, 0x2F, 0x6F, 0x43,
+ // Bytes 1d80 - 1dbf
+ 0x63, 0x2F, 0x75, 0x43, 0x63, 0x61, 0x6C, 0x43,
+ 0x63, 0x6D, 0x32, 0x43, 0x63, 0x6D, 0x33, 0x43,
+ 0x64, 0x6D, 0x32, 0x43, 0x64, 0x6D, 0x33, 0x43,
+ 0x65, 0x72, 0x67, 0x43, 0x66, 0x66, 0x69, 0x43,
+ 0x66, 0x66, 0x6C, 0x43, 0x67, 0x61, 0x6C, 0x43,
+ 0x68, 0x50, 0x61, 0x43, 0x69, 0x69, 0x69, 0x43,
+ 0x6B, 0x48, 0x7A, 0x43, 0x6B, 0x50, 0x61, 0x43,
+ 0x6B, 0x6D, 0x32, 0x43, 0x6B, 0x6D, 0x33, 0x43,
+ // Bytes 1dc0 - 1dff
+ 0x6B, 0xCE, 0xA9, 0x43, 0x6C, 0x6F, 0x67, 0x43,
+ 0x6C, 0xC2, 0xB7, 0x43, 0x6D, 0x69, 0x6C, 0x43,
+ 0x6D, 0x6D, 0x32, 0x43, 0x6D, 0x6D, 0x33, 0x43,
+ 0x6D, 0x6F, 0x6C, 0x43, 0x72, 0x61, 0x64, 0x43,
+ 0x76, 0x69, 0x69, 0x43, 0x78, 0x69, 0x69, 0x43,
+ 0xC2, 0xB0, 0x43, 0x43, 0xC2, 0xB0, 0x46, 0x43,
+ 0xCA, 0xBC, 0x6E, 0x43, 0xCE, 0xBC, 0x41, 0x43,
+ 0xCE, 0xBC, 0x46, 0x43, 0xCE, 0xBC, 0x56, 0x43,
+ // Bytes 1e00 - 1e3f
+ 0xCE, 0xBC, 0x57, 0x43, 0xCE, 0xBC, 0x67, 0x43,
+ 0xCE, 0xBC, 0x6C, 0x43, 0xCE, 0xBC, 0x6D, 0x43,
+ 0xCE, 0xBC, 0x73, 0x44, 0x28, 0x31, 0x30, 0x29,
+ 0x44, 0x28, 0x31, 0x31, 0x29, 0x44, 0x28, 0x31,
+ 0x32, 0x29, 0x44, 0x28, 0x31, 0x33, 0x29, 0x44,
+ 0x28, 0x31, 0x34, 0x29, 0x44, 0x28, 0x31, 0x35,
+ 0x29, 0x44, 0x28, 0x31, 0x36, 0x29, 0x44, 0x28,
+ 0x31, 0x37, 0x29, 0x44, 0x28, 0x31, 0x38, 0x29,
+ // Bytes 1e40 - 1e7f
+ 0x44, 0x28, 0x31, 0x39, 0x29, 0x44, 0x28, 0x32,
+ 0x30, 0x29, 0x44, 0x30, 0xE7, 0x82, 0xB9, 0x44,
+ 0x31, 0xE2, 0x81, 0x84, 0x44, 0x31, 0xE6, 0x97,
+ 0xA5, 0x44, 0x31, 0xE6, 0x9C, 0x88, 0x44, 0x31,
+ 0xE7, 0x82, 0xB9, 0x44, 0x32, 0xE6, 0x97, 0xA5,
+ 0x44, 0x32, 0xE6, 0x9C, 0x88, 0x44, 0x32, 0xE7,
+ 0x82, 0xB9, 0x44, 0x33, 0xE6, 0x97, 0xA5, 0x44,
+ 0x33, 0xE6, 0x9C, 0x88, 0x44, 0x33, 0xE7, 0x82,
+ // Bytes 1e80 - 1ebf
+ 0xB9, 0x44, 0x34, 0xE6, 0x97, 0xA5, 0x44, 0x34,
+ 0xE6, 0x9C, 0x88, 0x44, 0x34, 0xE7, 0x82, 0xB9,
+ 0x44, 0x35, 0xE6, 0x97, 0xA5, 0x44, 0x35, 0xE6,
+ 0x9C, 0x88, 0x44, 0x35, 0xE7, 0x82, 0xB9, 0x44,
+ 0x36, 0xE6, 0x97, 0xA5, 0x44, 0x36, 0xE6, 0x9C,
+ 0x88, 0x44, 0x36, 0xE7, 0x82, 0xB9, 0x44, 0x37,
+ 0xE6, 0x97, 0xA5, 0x44, 0x37, 0xE6, 0x9C, 0x88,
+ 0x44, 0x37, 0xE7, 0x82, 0xB9, 0x44, 0x38, 0xE6,
+ // Bytes 1ec0 - 1eff
+ 0x97, 0xA5, 0x44, 0x38, 0xE6, 0x9C, 0x88, 0x44,
+ 0x38, 0xE7, 0x82, 0xB9, 0x44, 0x39, 0xE6, 0x97,
+ 0xA5, 0x44, 0x39, 0xE6, 0x9C, 0x88, 0x44, 0x39,
+ 0xE7, 0x82, 0xB9, 0x44, 0x56, 0x49, 0x49, 0x49,
+ 0x44, 0x61, 0x2E, 0x6D, 0x2E, 0x44, 0x6B, 0x63,
+ 0x61, 0x6C, 0x44, 0x70, 0x2E, 0x6D, 0x2E, 0x44,
+ 0x76, 0x69, 0x69, 0x69, 0x44, 0xD5, 0xA5, 0xD6,
+ 0x82, 0x44, 0xD5, 0xB4, 0xD5, 0xA5, 0x44, 0xD5,
+ // Bytes 1f00 - 1f3f
+ 0xB4, 0xD5, 0xAB, 0x44, 0xD5, 0xB4, 0xD5, 0xAD,
+ 0x44, 0xD5, 0xB4, 0xD5, 0xB6, 0x44, 0xD5, 0xBE,
+ 0xD5, 0xB6, 0x44, 0xD7, 0x90, 0xD7, 0x9C, 0x44,
+ 0xD8, 0xA7, 0xD9, 0xB4, 0x44, 0xD8, 0xA8, 0xD8,
+ 0xAC, 0x44, 0xD8, 0xA8, 0xD8, 0xAD, 0x44, 0xD8,
+ 0xA8, 0xD8, 0xAE, 0x44, 0xD8, 0xA8, 0xD8, 0xB1,
+ 0x44, 0xD8, 0xA8, 0xD8, 0xB2, 0x44, 0xD8, 0xA8,
+ 0xD9, 0x85, 0x44, 0xD8, 0xA8, 0xD9, 0x86, 0x44,
+ // Bytes 1f40 - 1f7f
+ 0xD8, 0xA8, 0xD9, 0x87, 0x44, 0xD8, 0xA8, 0xD9,
+ 0x89, 0x44, 0xD8, 0xA8, 0xD9, 0x8A, 0x44, 0xD8,
+ 0xAA, 0xD8, 0xAC, 0x44, 0xD8, 0xAA, 0xD8, 0xAD,
+ 0x44, 0xD8, 0xAA, 0xD8, 0xAE, 0x44, 0xD8, 0xAA,
+ 0xD8, 0xB1, 0x44, 0xD8, 0xAA, 0xD8, 0xB2, 0x44,
+ 0xD8, 0xAA, 0xD9, 0x85, 0x44, 0xD8, 0xAA, 0xD9,
+ 0x86, 0x44, 0xD8, 0xAA, 0xD9, 0x87, 0x44, 0xD8,
+ 0xAA, 0xD9, 0x89, 0x44, 0xD8, 0xAA, 0xD9, 0x8A,
+ // Bytes 1f80 - 1fbf
+ 0x44, 0xD8, 0xAB, 0xD8, 0xAC, 0x44, 0xD8, 0xAB,
+ 0xD8, 0xB1, 0x44, 0xD8, 0xAB, 0xD8, 0xB2, 0x44,
+ 0xD8, 0xAB, 0xD9, 0x85, 0x44, 0xD8, 0xAB, 0xD9,
+ 0x86, 0x44, 0xD8, 0xAB, 0xD9, 0x87, 0x44, 0xD8,
+ 0xAB, 0xD9, 0x89, 0x44, 0xD8, 0xAB, 0xD9, 0x8A,
+ 0x44, 0xD8, 0xAC, 0xD8, 0xAD, 0x44, 0xD8, 0xAC,
+ 0xD9, 0x85, 0x44, 0xD8, 0xAC, 0xD9, 0x89, 0x44,
+ 0xD8, 0xAC, 0xD9, 0x8A, 0x44, 0xD8, 0xAD, 0xD8,
+ // Bytes 1fc0 - 1fff
+ 0xAC, 0x44, 0xD8, 0xAD, 0xD9, 0x85, 0x44, 0xD8,
+ 0xAD, 0xD9, 0x89, 0x44, 0xD8, 0xAD, 0xD9, 0x8A,
+ 0x44, 0xD8, 0xAE, 0xD8, 0xAC, 0x44, 0xD8, 0xAE,
+ 0xD8, 0xAD, 0x44, 0xD8, 0xAE, 0xD9, 0x85, 0x44,
+ 0xD8, 0xAE, 0xD9, 0x89, 0x44, 0xD8, 0xAE, 0xD9,
+ 0x8A, 0x44, 0xD8, 0xB3, 0xD8, 0xAC, 0x44, 0xD8,
+ 0xB3, 0xD8, 0xAD, 0x44, 0xD8, 0xB3, 0xD8, 0xAE,
+ 0x44, 0xD8, 0xB3, 0xD8, 0xB1, 0x44, 0xD8, 0xB3,
+ // Bytes 2000 - 203f
+ 0xD9, 0x85, 0x44, 0xD8, 0xB3, 0xD9, 0x87, 0x44,
+ 0xD8, 0xB3, 0xD9, 0x89, 0x44, 0xD8, 0xB3, 0xD9,
+ 0x8A, 0x44, 0xD8, 0xB4, 0xD8, 0xAC, 0x44, 0xD8,
+ 0xB4, 0xD8, 0xAD, 0x44, 0xD8, 0xB4, 0xD8, 0xAE,
+ 0x44, 0xD8, 0xB4, 0xD8, 0xB1, 0x44, 0xD8, 0xB4,
+ 0xD9, 0x85, 0x44, 0xD8, 0xB4, 0xD9, 0x87, 0x44,
+ 0xD8, 0xB4, 0xD9, 0x89, 0x44, 0xD8, 0xB4, 0xD9,
+ 0x8A, 0x44, 0xD8, 0xB5, 0xD8, 0xAD, 0x44, 0xD8,
+ // Bytes 2040 - 207f
+ 0xB5, 0xD8, 0xAE, 0x44, 0xD8, 0xB5, 0xD8, 0xB1,
+ 0x44, 0xD8, 0xB5, 0xD9, 0x85, 0x44, 0xD8, 0xB5,
+ 0xD9, 0x89, 0x44, 0xD8, 0xB5, 0xD9, 0x8A, 0x44,
+ 0xD8, 0xB6, 0xD8, 0xAC, 0x44, 0xD8, 0xB6, 0xD8,
+ 0xAD, 0x44, 0xD8, 0xB6, 0xD8, 0xAE, 0x44, 0xD8,
+ 0xB6, 0xD8, 0xB1, 0x44, 0xD8, 0xB6, 0xD9, 0x85,
+ 0x44, 0xD8, 0xB6, 0xD9, 0x89, 0x44, 0xD8, 0xB6,
+ 0xD9, 0x8A, 0x44, 0xD8, 0xB7, 0xD8, 0xAD, 0x44,
+ // Bytes 2080 - 20bf
+ 0xD8, 0xB7, 0xD9, 0x85, 0x44, 0xD8, 0xB7, 0xD9,
+ 0x89, 0x44, 0xD8, 0xB7, 0xD9, 0x8A, 0x44, 0xD8,
+ 0xB8, 0xD9, 0x85, 0x44, 0xD8, 0xB9, 0xD8, 0xAC,
+ 0x44, 0xD8, 0xB9, 0xD9, 0x85, 0x44, 0xD8, 0xB9,
+ 0xD9, 0x89, 0x44, 0xD8, 0xB9, 0xD9, 0x8A, 0x44,
+ 0xD8, 0xBA, 0xD8, 0xAC, 0x44, 0xD8, 0xBA, 0xD9,
+ 0x85, 0x44, 0xD8, 0xBA, 0xD9, 0x89, 0x44, 0xD8,
+ 0xBA, 0xD9, 0x8A, 0x44, 0xD9, 0x81, 0xD8, 0xAC,
+ // Bytes 20c0 - 20ff
+ 0x44, 0xD9, 0x81, 0xD8, 0xAD, 0x44, 0xD9, 0x81,
+ 0xD8, 0xAE, 0x44, 0xD9, 0x81, 0xD9, 0x85, 0x44,
+ 0xD9, 0x81, 0xD9, 0x89, 0x44, 0xD9, 0x81, 0xD9,
+ 0x8A, 0x44, 0xD9, 0x82, 0xD8, 0xAD, 0x44, 0xD9,
+ 0x82, 0xD9, 0x85, 0x44, 0xD9, 0x82, 0xD9, 0x89,
+ 0x44, 0xD9, 0x82, 0xD9, 0x8A, 0x44, 0xD9, 0x83,
+ 0xD8, 0xA7, 0x44, 0xD9, 0x83, 0xD8, 0xAC, 0x44,
+ 0xD9, 0x83, 0xD8, 0xAD, 0x44, 0xD9, 0x83, 0xD8,
+ // Bytes 2100 - 213f
+ 0xAE, 0x44, 0xD9, 0x83, 0xD9, 0x84, 0x44, 0xD9,
+ 0x83, 0xD9, 0x85, 0x44, 0xD9, 0x83, 0xD9, 0x89,
+ 0x44, 0xD9, 0x83, 0xD9, 0x8A, 0x44, 0xD9, 0x84,
+ 0xD8, 0xA7, 0x44, 0xD9, 0x84, 0xD8, 0xAC, 0x44,
+ 0xD9, 0x84, 0xD8, 0xAD, 0x44, 0xD9, 0x84, 0xD8,
+ 0xAE, 0x44, 0xD9, 0x84, 0xD9, 0x85, 0x44, 0xD9,
+ 0x84, 0xD9, 0x87, 0x44, 0xD9, 0x84, 0xD9, 0x89,
+ 0x44, 0xD9, 0x84, 0xD9, 0x8A, 0x44, 0xD9, 0x85,
+ // Bytes 2140 - 217f
+ 0xD8, 0xA7, 0x44, 0xD9, 0x85, 0xD8, 0xAC, 0x44,
+ 0xD9, 0x85, 0xD8, 0xAD, 0x44, 0xD9, 0x85, 0xD8,
+ 0xAE, 0x44, 0xD9, 0x85, 0xD9, 0x85, 0x44, 0xD9,
+ 0x85, 0xD9, 0x89, 0x44, 0xD9, 0x85, 0xD9, 0x8A,
+ 0x44, 0xD9, 0x86, 0xD8, 0xAC, 0x44, 0xD9, 0x86,
+ 0xD8, 0xAD, 0x44, 0xD9, 0x86, 0xD8, 0xAE, 0x44,
+ 0xD9, 0x86, 0xD8, 0xB1, 0x44, 0xD9, 0x86, 0xD8,
+ 0xB2, 0x44, 0xD9, 0x86, 0xD9, 0x85, 0x44, 0xD9,
+ // Bytes 2180 - 21bf
+ 0x86, 0xD9, 0x86, 0x44, 0xD9, 0x86, 0xD9, 0x87,
+ 0x44, 0xD9, 0x86, 0xD9, 0x89, 0x44, 0xD9, 0x86,
+ 0xD9, 0x8A, 0x44, 0xD9, 0x87, 0xD8, 0xAC, 0x44,
+ 0xD9, 0x87, 0xD9, 0x85, 0x44, 0xD9, 0x87, 0xD9,
+ 0x89, 0x44, 0xD9, 0x87, 0xD9, 0x8A, 0x44, 0xD9,
+ 0x88, 0xD9, 0xB4, 0x44, 0xD9, 0x8A, 0xD8, 0xAC,
+ 0x44, 0xD9, 0x8A, 0xD8, 0xAD, 0x44, 0xD9, 0x8A,
+ 0xD8, 0xAE, 0x44, 0xD9, 0x8A, 0xD8, 0xB1, 0x44,
+ // Bytes 21c0 - 21ff
+ 0xD9, 0x8A, 0xD8, 0xB2, 0x44, 0xD9, 0x8A, 0xD9,
+ 0x85, 0x44, 0xD9, 0x8A, 0xD9, 0x86, 0x44, 0xD9,
+ 0x8A, 0xD9, 0x87, 0x44, 0xD9, 0x8A, 0xD9, 0x89,
+ 0x44, 0xD9, 0x8A, 0xD9, 0x8A, 0x44, 0xD9, 0x8A,
+ 0xD9, 0xB4, 0x44, 0xDB, 0x87, 0xD9, 0xB4, 0x45,
+ 0x28, 0xE1, 0x84, 0x80, 0x29, 0x45, 0x28, 0xE1,
+ 0x84, 0x82, 0x29, 0x45, 0x28, 0xE1, 0x84, 0x83,
+ 0x29, 0x45, 0x28, 0xE1, 0x84, 0x85, 0x29, 0x45,
+ // Bytes 2200 - 223f
+ 0x28, 0xE1, 0x84, 0x86, 0x29, 0x45, 0x28, 0xE1,
+ 0x84, 0x87, 0x29, 0x45, 0x28, 0xE1, 0x84, 0x89,
+ 0x29, 0x45, 0x28, 0xE1, 0x84, 0x8B, 0x29, 0x45,
+ 0x28, 0xE1, 0x84, 0x8C, 0x29, 0x45, 0x28, 0xE1,
+ 0x84, 0x8E, 0x29, 0x45, 0x28, 0xE1, 0x84, 0x8F,
+ 0x29, 0x45, 0x28, 0xE1, 0x84, 0x90, 0x29, 0x45,
+ 0x28, 0xE1, 0x84, 0x91, 0x29, 0x45, 0x28, 0xE1,
+ 0x84, 0x92, 0x29, 0x45, 0x28, 0xE4, 0xB8, 0x80,
+ // Bytes 2240 - 227f
+ 0x29, 0x45, 0x28, 0xE4, 0xB8, 0x83, 0x29, 0x45,
+ 0x28, 0xE4, 0xB8, 0x89, 0x29, 0x45, 0x28, 0xE4,
+ 0xB9, 0x9D, 0x29, 0x45, 0x28, 0xE4, 0xBA, 0x8C,
+ 0x29, 0x45, 0x28, 0xE4, 0xBA, 0x94, 0x29, 0x45,
+ 0x28, 0xE4, 0xBB, 0xA3, 0x29, 0x45, 0x28, 0xE4,
+ 0xBC, 0x81, 0x29, 0x45, 0x28, 0xE4, 0xBC, 0x91,
+ 0x29, 0x45, 0x28, 0xE5, 0x85, 0xAB, 0x29, 0x45,
+ 0x28, 0xE5, 0x85, 0xAD, 0x29, 0x45, 0x28, 0xE5,
+ // Bytes 2280 - 22bf
+ 0x8A, 0xB4, 0x29, 0x45, 0x28, 0xE5, 0x8D, 0x81,
+ 0x29, 0x45, 0x28, 0xE5, 0x8D, 0x94, 0x29, 0x45,
+ 0x28, 0xE5, 0x90, 0x8D, 0x29, 0x45, 0x28, 0xE5,
+ 0x91, 0xBC, 0x29, 0x45, 0x28, 0xE5, 0x9B, 0x9B,
+ 0x29, 0x45, 0x28, 0xE5, 0x9C, 0x9F, 0x29, 0x45,
+ 0x28, 0xE5, 0xAD, 0xA6, 0x29, 0x45, 0x28, 0xE6,
+ 0x97, 0xA5, 0x29, 0x45, 0x28, 0xE6, 0x9C, 0x88,
+ 0x29, 0x45, 0x28, 0xE6, 0x9C, 0x89, 0x29, 0x45,
+ // Bytes 22c0 - 22ff
+ 0x28, 0xE6, 0x9C, 0xA8, 0x29, 0x45, 0x28, 0xE6,
+ 0xA0, 0xAA, 0x29, 0x45, 0x28, 0xE6, 0xB0, 0xB4,
+ 0x29, 0x45, 0x28, 0xE7, 0x81, 0xAB, 0x29, 0x45,
+ 0x28, 0xE7, 0x89, 0xB9, 0x29, 0x45, 0x28, 0xE7,
+ 0x9B, 0xA3, 0x29, 0x45, 0x28, 0xE7, 0xA4, 0xBE,
+ 0x29, 0x45, 0x28, 0xE7, 0xA5, 0x9D, 0x29, 0x45,
+ 0x28, 0xE7, 0xA5, 0xAD, 0x29, 0x45, 0x28, 0xE8,
+ 0x87, 0xAA, 0x29, 0x45, 0x28, 0xE8, 0x87, 0xB3,
+ // Bytes 2300 - 233f
+ 0x29, 0x45, 0x28, 0xE8, 0xB2, 0xA1, 0x29, 0x45,
+ 0x28, 0xE8, 0xB3, 0x87, 0x29, 0x45, 0x28, 0xE9,
+ 0x87, 0x91, 0x29, 0x45, 0x30, 0xE2, 0x81, 0x84,
+ 0x33, 0x45, 0x31, 0x30, 0xE6, 0x97, 0xA5, 0x45,
+ 0x31, 0x30, 0xE6, 0x9C, 0x88, 0x45, 0x31, 0x30,
+ 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x31, 0xE6, 0x97,
+ 0xA5, 0x45, 0x31, 0x31, 0xE6, 0x9C, 0x88, 0x45,
+ 0x31, 0x31, 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x32,
+ // Bytes 2340 - 237f
+ 0xE6, 0x97, 0xA5, 0x45, 0x31, 0x32, 0xE6, 0x9C,
+ 0x88, 0x45, 0x31, 0x32, 0xE7, 0x82, 0xB9, 0x45,
+ 0x31, 0x33, 0xE6, 0x97, 0xA5, 0x45, 0x31, 0x33,
+ 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x34, 0xE6, 0x97,
+ 0xA5, 0x45, 0x31, 0x34, 0xE7, 0x82, 0xB9, 0x45,
+ 0x31, 0x35, 0xE6, 0x97, 0xA5, 0x45, 0x31, 0x35,
+ 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x36, 0xE6, 0x97,
+ 0xA5, 0x45, 0x31, 0x36, 0xE7, 0x82, 0xB9, 0x45,
+ // Bytes 2380 - 23bf
+ 0x31, 0x37, 0xE6, 0x97, 0xA5, 0x45, 0x31, 0x37,
+ 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x38, 0xE6, 0x97,
+ 0xA5, 0x45, 0x31, 0x38, 0xE7, 0x82, 0xB9, 0x45,
+ 0x31, 0x39, 0xE6, 0x97, 0xA5, 0x45, 0x31, 0x39,
+ 0xE7, 0x82, 0xB9, 0x45, 0x31, 0xE2, 0x81, 0x84,
+ 0x32, 0x45, 0x31, 0xE2, 0x81, 0x84, 0x33, 0x45,
+ 0x31, 0xE2, 0x81, 0x84, 0x34, 0x45, 0x31, 0xE2,
+ 0x81, 0x84, 0x35, 0x45, 0x31, 0xE2, 0x81, 0x84,
+ // Bytes 23c0 - 23ff
+ 0x36, 0x45, 0x31, 0xE2, 0x81, 0x84, 0x37, 0x45,
+ 0x31, 0xE2, 0x81, 0x84, 0x38, 0x45, 0x31, 0xE2,
+ 0x81, 0x84, 0x39, 0x45, 0x32, 0x30, 0xE6, 0x97,
+ 0xA5, 0x45, 0x32, 0x30, 0xE7, 0x82, 0xB9, 0x45,
+ 0x32, 0x31, 0xE6, 0x97, 0xA5, 0x45, 0x32, 0x31,
+ 0xE7, 0x82, 0xB9, 0x45, 0x32, 0x32, 0xE6, 0x97,
+ 0xA5, 0x45, 0x32, 0x32, 0xE7, 0x82, 0xB9, 0x45,
+ 0x32, 0x33, 0xE6, 0x97, 0xA5, 0x45, 0x32, 0x33,
+ // Bytes 2400 - 243f
+ 0xE7, 0x82, 0xB9, 0x45, 0x32, 0x34, 0xE6, 0x97,
+ 0xA5, 0x45, 0x32, 0x34, 0xE7, 0x82, 0xB9, 0x45,
+ 0x32, 0x35, 0xE6, 0x97, 0xA5, 0x45, 0x32, 0x36,
+ 0xE6, 0x97, 0xA5, 0x45, 0x32, 0x37, 0xE6, 0x97,
+ 0xA5, 0x45, 0x32, 0x38, 0xE6, 0x97, 0xA5, 0x45,
+ 0x32, 0x39, 0xE6, 0x97, 0xA5, 0x45, 0x32, 0xE2,
+ 0x81, 0x84, 0x33, 0x45, 0x32, 0xE2, 0x81, 0x84,
+ 0x35, 0x45, 0x33, 0x30, 0xE6, 0x97, 0xA5, 0x45,
+ // Bytes 2440 - 247f
+ 0x33, 0x31, 0xE6, 0x97, 0xA5, 0x45, 0x33, 0xE2,
+ 0x81, 0x84, 0x34, 0x45, 0x33, 0xE2, 0x81, 0x84,
+ 0x35, 0x45, 0x33, 0xE2, 0x81, 0x84, 0x38, 0x45,
+ 0x34, 0xE2, 0x81, 0x84, 0x35, 0x45, 0x35, 0xE2,
+ 0x81, 0x84, 0x36, 0x45, 0x35, 0xE2, 0x81, 0x84,
+ 0x38, 0x45, 0x37, 0xE2, 0x81, 0x84, 0x38, 0x45,
+ 0x41, 0xE2, 0x88, 0x95, 0x6D, 0x45, 0x56, 0xE2,
+ 0x88, 0x95, 0x6D, 0x45, 0x6D, 0xE2, 0x88, 0x95,
+ // Bytes 2480 - 24bf
+ 0x73, 0x46, 0x31, 0xE2, 0x81, 0x84, 0x31, 0x30,
+ 0x46, 0x43, 0xE2, 0x88, 0x95, 0x6B, 0x67, 0x46,
+ 0x6D, 0xE2, 0x88, 0x95, 0x73, 0x32, 0x46, 0xD8,
+ 0xA8, 0xD8, 0xAD, 0xD9, 0x8A, 0x46, 0xD8, 0xA8,
+ 0xD8, 0xAE, 0xD9, 0x8A, 0x46, 0xD8, 0xAA, 0xD8,
+ 0xAC, 0xD9, 0x85, 0x46, 0xD8, 0xAA, 0xD8, 0xAC,
+ 0xD9, 0x89, 0x46, 0xD8, 0xAA, 0xD8, 0xAC, 0xD9,
+ 0x8A, 0x46, 0xD8, 0xAA, 0xD8, 0xAD, 0xD8, 0xAC,
+ // Bytes 24c0 - 24ff
+ 0x46, 0xD8, 0xAA, 0xD8, 0xAD, 0xD9, 0x85, 0x46,
+ 0xD8, 0xAA, 0xD8, 0xAE, 0xD9, 0x85, 0x46, 0xD8,
+ 0xAA, 0xD8, 0xAE, 0xD9, 0x89, 0x46, 0xD8, 0xAA,
+ 0xD8, 0xAE, 0xD9, 0x8A, 0x46, 0xD8, 0xAA, 0xD9,
+ 0x85, 0xD8, 0xAC, 0x46, 0xD8, 0xAA, 0xD9, 0x85,
+ 0xD8, 0xAD, 0x46, 0xD8, 0xAA, 0xD9, 0x85, 0xD8,
+ 0xAE, 0x46, 0xD8, 0xAA, 0xD9, 0x85, 0xD9, 0x89,
+ 0x46, 0xD8, 0xAA, 0xD9, 0x85, 0xD9, 0x8A, 0x46,
+ // Bytes 2500 - 253f
+ 0xD8, 0xAC, 0xD8, 0xAD, 0xD9, 0x89, 0x46, 0xD8,
+ 0xAC, 0xD8, 0xAD, 0xD9, 0x8A, 0x46, 0xD8, 0xAC,
+ 0xD9, 0x85, 0xD8, 0xAD, 0x46, 0xD8, 0xAC, 0xD9,
+ 0x85, 0xD9, 0x89, 0x46, 0xD8, 0xAC, 0xD9, 0x85,
+ 0xD9, 0x8A, 0x46, 0xD8, 0xAD, 0xD8, 0xAC, 0xD9,
+ 0x8A, 0x46, 0xD8, 0xAD, 0xD9, 0x85, 0xD9, 0x89,
+ 0x46, 0xD8, 0xAD, 0xD9, 0x85, 0xD9, 0x8A, 0x46,
+ 0xD8, 0xB3, 0xD8, 0xAC, 0xD8, 0xAD, 0x46, 0xD8,
+ // Bytes 2540 - 257f
+ 0xB3, 0xD8, 0xAC, 0xD9, 0x89, 0x46, 0xD8, 0xB3,
+ 0xD8, 0xAD, 0xD8, 0xAC, 0x46, 0xD8, 0xB3, 0xD8,
+ 0xAE, 0xD9, 0x89, 0x46, 0xD8, 0xB3, 0xD8, 0xAE,
+ 0xD9, 0x8A, 0x46, 0xD8, 0xB3, 0xD9, 0x85, 0xD8,
+ 0xAC, 0x46, 0xD8, 0xB3, 0xD9, 0x85, 0xD8, 0xAD,
+ 0x46, 0xD8, 0xB3, 0xD9, 0x85, 0xD9, 0x85, 0x46,
+ 0xD8, 0xB4, 0xD8, 0xAC, 0xD9, 0x8A, 0x46, 0xD8,
+ 0xB4, 0xD8, 0xAD, 0xD9, 0x85, 0x46, 0xD8, 0xB4,
+ // Bytes 2580 - 25bf
+ 0xD8, 0xAD, 0xD9, 0x8A, 0x46, 0xD8, 0xB4, 0xD9,
+ 0x85, 0xD8, 0xAE, 0x46, 0xD8, 0xB4, 0xD9, 0x85,
+ 0xD9, 0x85, 0x46, 0xD8, 0xB5, 0xD8, 0xAD, 0xD8,
+ 0xAD, 0x46, 0xD8, 0xB5, 0xD8, 0xAD, 0xD9, 0x8A,
+ 0x46, 0xD8, 0xB5, 0xD9, 0x84, 0xD9, 0x89, 0x46,
+ 0xD8, 0xB5, 0xD9, 0x84, 0xDB, 0x92, 0x46, 0xD8,
+ 0xB5, 0xD9, 0x85, 0xD9, 0x85, 0x46, 0xD8, 0xB6,
+ 0xD8, 0xAD, 0xD9, 0x89, 0x46, 0xD8, 0xB6, 0xD8,
+ // Bytes 25c0 - 25ff
+ 0xAD, 0xD9, 0x8A, 0x46, 0xD8, 0xB6, 0xD8, 0xAE,
+ 0xD9, 0x85, 0x46, 0xD8, 0xB7, 0xD9, 0x85, 0xD8,
+ 0xAD, 0x46, 0xD8, 0xB7, 0xD9, 0x85, 0xD9, 0x85,
+ 0x46, 0xD8, 0xB7, 0xD9, 0x85, 0xD9, 0x8A, 0x46,
+ 0xD8, 0xB9, 0xD8, 0xAC, 0xD9, 0x85, 0x46, 0xD8,
+ 0xB9, 0xD9, 0x85, 0xD9, 0x85, 0x46, 0xD8, 0xB9,
+ 0xD9, 0x85, 0xD9, 0x89, 0x46, 0xD8, 0xB9, 0xD9,
+ 0x85, 0xD9, 0x8A, 0x46, 0xD8, 0xBA, 0xD9, 0x85,
+ // Bytes 2600 - 263f
+ 0xD9, 0x85, 0x46, 0xD8, 0xBA, 0xD9, 0x85, 0xD9,
+ 0x89, 0x46, 0xD8, 0xBA, 0xD9, 0x85, 0xD9, 0x8A,
+ 0x46, 0xD9, 0x81, 0xD8, 0xAE, 0xD9, 0x85, 0x46,
+ 0xD9, 0x81, 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD9,
+ 0x82, 0xD9, 0x84, 0xDB, 0x92, 0x46, 0xD9, 0x82,
+ 0xD9, 0x85, 0xD8, 0xAD, 0x46, 0xD9, 0x82, 0xD9,
+ 0x85, 0xD9, 0x85, 0x46, 0xD9, 0x82, 0xD9, 0x85,
+ 0xD9, 0x8A, 0x46, 0xD9, 0x83, 0xD9, 0x85, 0xD9,
+ // Bytes 2640 - 267f
+ 0x85, 0x46, 0xD9, 0x83, 0xD9, 0x85, 0xD9, 0x8A,
+ 0x46, 0xD9, 0x84, 0xD8, 0xAC, 0xD8, 0xAC, 0x46,
+ 0xD9, 0x84, 0xD8, 0xAC, 0xD9, 0x85, 0x46, 0xD9,
+ 0x84, 0xD8, 0xAC, 0xD9, 0x8A, 0x46, 0xD9, 0x84,
+ 0xD8, 0xAD, 0xD9, 0x85, 0x46, 0xD9, 0x84, 0xD8,
+ 0xAD, 0xD9, 0x89, 0x46, 0xD9, 0x84, 0xD8, 0xAD,
+ 0xD9, 0x8A, 0x46, 0xD9, 0x84, 0xD8, 0xAE, 0xD9,
+ 0x85, 0x46, 0xD9, 0x84, 0xD9, 0x85, 0xD8, 0xAD,
+ // Bytes 2680 - 26bf
+ 0x46, 0xD9, 0x84, 0xD9, 0x85, 0xD9, 0x8A, 0x46,
+ 0xD9, 0x85, 0xD8, 0xAC, 0xD8, 0xAD, 0x46, 0xD9,
+ 0x85, 0xD8, 0xAC, 0xD8, 0xAE, 0x46, 0xD9, 0x85,
+ 0xD8, 0xAC, 0xD9, 0x85, 0x46, 0xD9, 0x85, 0xD8,
+ 0xAC, 0xD9, 0x8A, 0x46, 0xD9, 0x85, 0xD8, 0xAD,
+ 0xD8, 0xAC, 0x46, 0xD9, 0x85, 0xD8, 0xAD, 0xD9,
+ 0x85, 0x46, 0xD9, 0x85, 0xD8, 0xAD, 0xD9, 0x8A,
+ 0x46, 0xD9, 0x85, 0xD8, 0xAE, 0xD8, 0xAC, 0x46,
+ // Bytes 26c0 - 26ff
+ 0xD9, 0x85, 0xD8, 0xAE, 0xD9, 0x85, 0x46, 0xD9,
+ 0x85, 0xD8, 0xAE, 0xD9, 0x8A, 0x46, 0xD9, 0x85,
+ 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD9, 0x86, 0xD8,
+ 0xAC, 0xD8, 0xAD, 0x46, 0xD9, 0x86, 0xD8, 0xAC,
+ 0xD9, 0x85, 0x46, 0xD9, 0x86, 0xD8, 0xAC, 0xD9,
+ 0x89, 0x46, 0xD9, 0x86, 0xD8, 0xAC, 0xD9, 0x8A,
+ 0x46, 0xD9, 0x86, 0xD8, 0xAD, 0xD9, 0x85, 0x46,
+ 0xD9, 0x86, 0xD8, 0xAD, 0xD9, 0x89, 0x46, 0xD9,
+ // Bytes 2700 - 273f
+ 0x86, 0xD8, 0xAD, 0xD9, 0x8A, 0x46, 0xD9, 0x86,
+ 0xD9, 0x85, 0xD9, 0x89, 0x46, 0xD9, 0x86, 0xD9,
+ 0x85, 0xD9, 0x8A, 0x46, 0xD9, 0x87, 0xD9, 0x85,
+ 0xD8, 0xAC, 0x46, 0xD9, 0x87, 0xD9, 0x85, 0xD9,
+ 0x85, 0x46, 0xD9, 0x8A, 0xD8, 0xAC, 0xD9, 0x8A,
+ 0x46, 0xD9, 0x8A, 0xD8, 0xAD, 0xD9, 0x8A, 0x46,
+ 0xD9, 0x8A, 0xD9, 0x85, 0xD9, 0x85, 0x46, 0xD9,
+ 0x8A, 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD9, 0x8A,
+ // Bytes 2740 - 277f
+ 0xD9, 0x94, 0xD8, 0xA7, 0x46, 0xD9, 0x8A, 0xD9,
+ 0x94, 0xD8, 0xAC, 0x46, 0xD9, 0x8A, 0xD9, 0x94,
+ 0xD8, 0xAD, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD8,
+ 0xAE, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD8, 0xB1,
+ 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD8, 0xB2, 0x46,
+ 0xD9, 0x8A, 0xD9, 0x94, 0xD9, 0x85, 0x46, 0xD9,
+ 0x8A, 0xD9, 0x94, 0xD9, 0x86, 0x46, 0xD9, 0x8A,
+ 0xD9, 0x94, 0xD9, 0x87, 0x46, 0xD9, 0x8A, 0xD9,
+ // Bytes 2780 - 27bf
+ 0x94, 0xD9, 0x88, 0x46, 0xD9, 0x8A, 0xD9, 0x94,
+ 0xD9, 0x89, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD9,
+ 0x8A, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xDB, 0x86,
+ 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xDB, 0x87, 0x46,
+ 0xD9, 0x8A, 0xD9, 0x94, 0xDB, 0x88, 0x46, 0xD9,
+ 0x8A, 0xD9, 0x94, 0xDB, 0x90, 0x46, 0xD9, 0x8A,
+ 0xD9, 0x94, 0xDB, 0x95, 0x46, 0xE0, 0xB9, 0x8D,
+ 0xE0, 0xB8, 0xB2, 0x46, 0xE0, 0xBA, 0xAB, 0xE0,
+ // Bytes 27c0 - 27ff
+ 0xBA, 0x99, 0x46, 0xE0, 0xBA, 0xAB, 0xE0, 0xBA,
+ 0xA1, 0x46, 0xE0, 0xBB, 0x8D, 0xE0, 0xBA, 0xB2,
+ 0x46, 0xE0, 0xBD, 0x80, 0xE0, 0xBE, 0xB5, 0x46,
+ 0xE0, 0xBD, 0x82, 0xE0, 0xBE, 0xB7, 0x46, 0xE0,
+ 0xBD, 0x8C, 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBD,
+ 0x91, 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBD, 0x96,
+ 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBD, 0x9B, 0xE0,
+ 0xBE, 0xB7, 0x46, 0xE0, 0xBE, 0x90, 0xE0, 0xBE,
+ // Bytes 2800 - 283f
+ 0xB5, 0x46, 0xE0, 0xBE, 0x92, 0xE0, 0xBE, 0xB7,
+ 0x46, 0xE0, 0xBE, 0x9C, 0xE0, 0xBE, 0xB7, 0x46,
+ 0xE0, 0xBE, 0xA1, 0xE0, 0xBE, 0xB7, 0x46, 0xE0,
+ 0xBE, 0xA6, 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBE,
+ 0xAB, 0xE0, 0xBE, 0xB7, 0x46, 0xE1, 0x84, 0x80,
+ 0xE1, 0x85, 0xA1, 0x46, 0xE1, 0x84, 0x82, 0xE1,
+ 0x85, 0xA1, 0x46, 0xE1, 0x84, 0x83, 0xE1, 0x85,
+ 0xA1, 0x46, 0xE1, 0x84, 0x85, 0xE1, 0x85, 0xA1,
+ // Bytes 2840 - 287f
+ 0x46, 0xE1, 0x84, 0x86, 0xE1, 0x85, 0xA1, 0x46,
+ 0xE1, 0x84, 0x87, 0xE1, 0x85, 0xA1, 0x46, 0xE1,
+ 0x84, 0x89, 0xE1, 0x85, 0xA1, 0x46, 0xE1, 0x84,
+ 0x8B, 0xE1, 0x85, 0xA1, 0x46, 0xE1, 0x84, 0x8B,
+ 0xE1, 0x85, 0xAE, 0x46, 0xE1, 0x84, 0x8C, 0xE1,
+ 0x85, 0xA1, 0x46, 0xE1, 0x84, 0x8E, 0xE1, 0x85,
+ 0xA1, 0x46, 0xE1, 0x84, 0x8F, 0xE1, 0x85, 0xA1,
+ 0x46, 0xE1, 0x84, 0x90, 0xE1, 0x85, 0xA1, 0x46,
+ // Bytes 2880 - 28bf
+ 0xE1, 0x84, 0x91, 0xE1, 0x85, 0xA1, 0x46, 0xE1,
+ 0x84, 0x92, 0xE1, 0x85, 0xA1, 0x46, 0xE2, 0x80,
+ 0xB2, 0xE2, 0x80, 0xB2, 0x46, 0xE2, 0x80, 0xB5,
+ 0xE2, 0x80, 0xB5, 0x46, 0xE2, 0x88, 0xAB, 0xE2,
+ 0x88, 0xAB, 0x46, 0xE2, 0x88, 0xAE, 0xE2, 0x88,
+ 0xAE, 0x46, 0xE3, 0x81, 0xBB, 0xE3, 0x81, 0x8B,
+ 0x46, 0xE3, 0x82, 0x88, 0xE3, 0x82, 0x8A, 0x46,
+ 0xE3, 0x82, 0xAD, 0xE3, 0x83, 0xAD, 0x46, 0xE3,
+ // Bytes 28c0 - 28ff
+ 0x82, 0xB3, 0xE3, 0x82, 0xB3, 0x46, 0xE3, 0x82,
+ 0xB3, 0xE3, 0x83, 0x88, 0x46, 0xE3, 0x83, 0x88,
+ 0xE3, 0x83, 0xB3, 0x46, 0xE3, 0x83, 0x8A, 0xE3,
+ 0x83, 0x8E, 0x46, 0xE3, 0x83, 0x9B, 0xE3, 0x83,
+ 0xB3, 0x46, 0xE3, 0x83, 0x9F, 0xE3, 0x83, 0xAA,
+ 0x46, 0xE3, 0x83, 0xAA, 0xE3, 0x83, 0xA9, 0x46,
+ 0xE3, 0x83, 0xAC, 0xE3, 0x83, 0xA0, 0x46, 0xE5,
+ 0xA4, 0xA7, 0xE6, 0xAD, 0xA3, 0x46, 0xE5, 0xB9,
+ // Bytes 2900 - 293f
+ 0xB3, 0xE6, 0x88, 0x90, 0x46, 0xE6, 0x98, 0x8E,
+ 0xE6, 0xB2, 0xBB, 0x46, 0xE6, 0x98, 0xAD, 0xE5,
+ 0x92, 0x8C, 0x47, 0x72, 0x61, 0x64, 0xE2, 0x88,
+ 0x95, 0x73, 0x47, 0xE3, 0x80, 0x94, 0x53, 0xE3,
+ 0x80, 0x95, 0x48, 0x28, 0xE1, 0x84, 0x80, 0xE1,
+ 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, 0x82,
+ 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84,
+ 0x83, 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1,
+ // Bytes 2940 - 297f
+ 0x84, 0x85, 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28,
+ 0xE1, 0x84, 0x86, 0xE1, 0x85, 0xA1, 0x29, 0x48,
+ 0x28, 0xE1, 0x84, 0x87, 0xE1, 0x85, 0xA1, 0x29,
+ 0x48, 0x28, 0xE1, 0x84, 0x89, 0xE1, 0x85, 0xA1,
+ 0x29, 0x48, 0x28, 0xE1, 0x84, 0x8B, 0xE1, 0x85,
+ 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, 0x8C, 0xE1,
+ 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, 0x8C,
+ 0xE1, 0x85, 0xAE, 0x29, 0x48, 0x28, 0xE1, 0x84,
+ // Bytes 2980 - 29bf
+ 0x8E, 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1,
+ 0x84, 0x8F, 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28,
+ 0xE1, 0x84, 0x90, 0xE1, 0x85, 0xA1, 0x29, 0x48,
+ 0x28, 0xE1, 0x84, 0x91, 0xE1, 0x85, 0xA1, 0x29,
+ 0x48, 0x28, 0xE1, 0x84, 0x92, 0xE1, 0x85, 0xA1,
+ 0x29, 0x48, 0x72, 0x61, 0x64, 0xE2, 0x88, 0x95,
+ 0x73, 0x32, 0x48, 0xD8, 0xA7, 0xD9, 0x83, 0xD8,
+ 0xA8, 0xD8, 0xB1, 0x48, 0xD8, 0xA7, 0xD9, 0x84,
+ // Bytes 29c0 - 29ff
+ 0xD9, 0x84, 0xD9, 0x87, 0x48, 0xD8, 0xB1, 0xD8,
+ 0xB3, 0xD9, 0x88, 0xD9, 0x84, 0x48, 0xD8, 0xB1,
+ 0xDB, 0x8C, 0xD8, 0xA7, 0xD9, 0x84, 0x48, 0xD8,
+ 0xB5, 0xD9, 0x84, 0xD8, 0xB9, 0xD9, 0x85, 0x48,
+ 0xD8, 0xB9, 0xD9, 0x84, 0xD9, 0x8A, 0xD9, 0x87,
+ 0x48, 0xD9, 0x85, 0xD8, 0xAD, 0xD9, 0x85, 0xD8,
+ 0xAF, 0x48, 0xD9, 0x88, 0xD8, 0xB3, 0xD9, 0x84,
+ 0xD9, 0x85, 0x49, 0xE2, 0x80, 0xB2, 0xE2, 0x80,
+ // Bytes 2a00 - 2a3f
+ 0xB2, 0xE2, 0x80, 0xB2, 0x49, 0xE2, 0x80, 0xB5,
+ 0xE2, 0x80, 0xB5, 0xE2, 0x80, 0xB5, 0x49, 0xE2,
+ 0x88, 0xAB, 0xE2, 0x88, 0xAB, 0xE2, 0x88, 0xAB,
+ 0x49, 0xE2, 0x88, 0xAE, 0xE2, 0x88, 0xAE, 0xE2,
+ 0x88, 0xAE, 0x49, 0xE3, 0x80, 0x94, 0xE4, 0xB8,
+ 0x89, 0xE3, 0x80, 0x95, 0x49, 0xE3, 0x80, 0x94,
+ 0xE4, 0xBA, 0x8C, 0xE3, 0x80, 0x95, 0x49, 0xE3,
+ 0x80, 0x94, 0xE5, 0x8B, 0x9D, 0xE3, 0x80, 0x95,
+ // Bytes 2a40 - 2a7f
+ 0x49, 0xE3, 0x80, 0x94, 0xE5, 0xAE, 0x89, 0xE3,
+ 0x80, 0x95, 0x49, 0xE3, 0x80, 0x94, 0xE6, 0x89,
+ 0x93, 0xE3, 0x80, 0x95, 0x49, 0xE3, 0x80, 0x94,
+ 0xE6, 0x95, 0x97, 0xE3, 0x80, 0x95, 0x49, 0xE3,
+ 0x80, 0x94, 0xE6, 0x9C, 0xAC, 0xE3, 0x80, 0x95,
+ 0x49, 0xE3, 0x80, 0x94, 0xE7, 0x82, 0xB9, 0xE3,
+ 0x80, 0x95, 0x49, 0xE3, 0x80, 0x94, 0xE7, 0x9B,
+ 0x97, 0xE3, 0x80, 0x95, 0x49, 0xE3, 0x82, 0xA2,
+ // Bytes 2a80 - 2abf
+ 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0xAB, 0x49, 0xE3,
+ 0x82, 0xA4, 0xE3, 0x83, 0xB3, 0xE3, 0x83, 0x81,
+ 0x49, 0xE3, 0x82, 0xA6, 0xE3, 0x82, 0xA9, 0xE3,
+ 0x83, 0xB3, 0x49, 0xE3, 0x82, 0xAA, 0xE3, 0x83,
+ 0xB3, 0xE3, 0x82, 0xB9, 0x49, 0xE3, 0x82, 0xAA,
+ 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0xA0, 0x49, 0xE3,
+ 0x82, 0xAB, 0xE3, 0x82, 0xA4, 0xE3, 0x83, 0xAA,
+ 0x49, 0xE3, 0x82, 0xB1, 0xE3, 0x83, 0xBC, 0xE3,
+ // Bytes 2ac0 - 2aff
+ 0x82, 0xB9, 0x49, 0xE3, 0x82, 0xB3, 0xE3, 0x83,
+ 0xAB, 0xE3, 0x83, 0x8A, 0x49, 0xE3, 0x82, 0xBB,
+ 0xE3, 0x83, 0xB3, 0xE3, 0x83, 0x81, 0x49, 0xE3,
+ 0x82, 0xBB, 0xE3, 0x83, 0xB3, 0xE3, 0x83, 0x88,
+ 0x49, 0xE3, 0x83, 0x86, 0xE3, 0x82, 0x99, 0xE3,
+ 0x82, 0xB7, 0x49, 0xE3, 0x83, 0x88, 0xE3, 0x82,
+ 0x99, 0xE3, 0x83, 0xAB, 0x49, 0xE3, 0x83, 0x8E,
+ 0xE3, 0x83, 0x83, 0xE3, 0x83, 0x88, 0x49, 0xE3,
+ // Bytes 2b00 - 2b3f
+ 0x83, 0x8F, 0xE3, 0x82, 0xA4, 0xE3, 0x83, 0x84,
+ 0x49, 0xE3, 0x83, 0x92, 0xE3, 0x82, 0x99, 0xE3,
+ 0x83, 0xAB, 0x49, 0xE3, 0x83, 0x92, 0xE3, 0x82,
+ 0x9A, 0xE3, 0x82, 0xB3, 0x49, 0xE3, 0x83, 0x95,
+ 0xE3, 0x83, 0xA9, 0xE3, 0x83, 0xB3, 0x49, 0xE3,
+ 0x83, 0x98, 0xE3, 0x82, 0x9A, 0xE3, 0x82, 0xBD,
+ 0x49, 0xE3, 0x83, 0x98, 0xE3, 0x83, 0xAB, 0xE3,
+ 0x83, 0x84, 0x49, 0xE3, 0x83, 0x9B, 0xE3, 0x83,
+ // Bytes 2b40 - 2b7f
+ 0xBC, 0xE3, 0x83, 0xAB, 0x49, 0xE3, 0x83, 0x9B,
+ 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0xB3, 0x49, 0xE3,
+ 0x83, 0x9E, 0xE3, 0x82, 0xA4, 0xE3, 0x83, 0xAB,
+ 0x49, 0xE3, 0x83, 0x9E, 0xE3, 0x83, 0x83, 0xE3,
+ 0x83, 0x8F, 0x49, 0xE3, 0x83, 0x9E, 0xE3, 0x83,
+ 0xAB, 0xE3, 0x82, 0xAF, 0x49, 0xE3, 0x83, 0xA4,
+ 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0xAB, 0x49, 0xE3,
+ 0x83, 0xA6, 0xE3, 0x82, 0xA2, 0xE3, 0x83, 0xB3,
+ // Bytes 2b80 - 2bbf
+ 0x49, 0xE3, 0x83, 0xAF, 0xE3, 0x83, 0x83, 0xE3,
+ 0x83, 0x88, 0x4C, 0xE1, 0x84, 0x8C, 0xE1, 0x85,
+ 0xAE, 0xE1, 0x84, 0x8B, 0xE1, 0x85, 0xB4, 0x4C,
+ 0xE2, 0x80, 0xB2, 0xE2, 0x80, 0xB2, 0xE2, 0x80,
+ 0xB2, 0xE2, 0x80, 0xB2, 0x4C, 0xE2, 0x88, 0xAB,
+ 0xE2, 0x88, 0xAB, 0xE2, 0x88, 0xAB, 0xE2, 0x88,
+ 0xAB, 0x4C, 0xE3, 0x82, 0xA2, 0xE3, 0x83, 0xAB,
+ 0xE3, 0x83, 0x95, 0xE3, 0x82, 0xA1, 0x4C, 0xE3,
+ // Bytes 2bc0 - 2bff
+ 0x82, 0xA8, 0xE3, 0x83, 0xBC, 0xE3, 0x82, 0xAB,
+ 0xE3, 0x83, 0xBC, 0x4C, 0xE3, 0x82, 0xAB, 0xE3,
+ 0x82, 0x99, 0xE3, 0x83, 0xAD, 0xE3, 0x83, 0xB3,
+ 0x4C, 0xE3, 0x82, 0xAB, 0xE3, 0x82, 0x99, 0xE3,
+ 0x83, 0xB3, 0xE3, 0x83, 0x9E, 0x4C, 0xE3, 0x82,
+ 0xAB, 0xE3, 0x83, 0xA9, 0xE3, 0x83, 0x83, 0xE3,
+ 0x83, 0x88, 0x4C, 0xE3, 0x82, 0xAB, 0xE3, 0x83,
+ 0xAD, 0xE3, 0x83, 0xAA, 0xE3, 0x83, 0xBC, 0x4C,
+ // Bytes 2c00 - 2c3f
+ 0xE3, 0x82, 0xAD, 0xE3, 0x82, 0x99, 0xE3, 0x83,
+ 0x8B, 0xE3, 0x83, 0xBC, 0x4C, 0xE3, 0x82, 0xAD,
+ 0xE3, 0x83, 0xA5, 0xE3, 0x83, 0xAA, 0xE3, 0x83,
+ 0xBC, 0x4C, 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99,
+ 0xE3, 0x83, 0xA9, 0xE3, 0x83, 0xA0, 0x4C, 0xE3,
+ 0x82, 0xAF, 0xE3, 0x83, 0xAD, 0xE3, 0x83, 0xBC,
+ 0xE3, 0x83, 0x8D, 0x4C, 0xE3, 0x82, 0xB5, 0xE3,
+ 0x82, 0xA4, 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xAB,
+ // Bytes 2c40 - 2c7f
+ 0x4C, 0xE3, 0x82, 0xBF, 0xE3, 0x82, 0x99, 0xE3,
+ 0x83, 0xBC, 0xE3, 0x82, 0xB9, 0x4C, 0xE3, 0x83,
+ 0x8F, 0xE3, 0x82, 0x9A, 0xE3, 0x83, 0xBC, 0xE3,
+ 0x83, 0x84, 0x4C, 0xE3, 0x83, 0x92, 0xE3, 0x82,
+ 0x9A, 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xAB, 0x4C,
+ 0xE3, 0x83, 0x95, 0xE3, 0x82, 0xA3, 0xE3, 0x83,
+ 0xBC, 0xE3, 0x83, 0x88, 0x4C, 0xE3, 0x83, 0x98,
+ 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xBC, 0xE3, 0x82,
+ // Bytes 2c80 - 2cbf
+ 0xBF, 0x4C, 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x9A,
+ 0xE3, 0x83, 0x8B, 0xE3, 0x83, 0x92, 0x4C, 0xE3,
+ 0x83, 0x98, 0xE3, 0x82, 0x9A, 0xE3, 0x83, 0xB3,
+ 0xE3, 0x82, 0xB9, 0x4C, 0xE3, 0x83, 0x9B, 0xE3,
+ 0x82, 0x99, 0xE3, 0x83, 0xAB, 0xE3, 0x83, 0x88,
+ 0x4C, 0xE3, 0x83, 0x9E, 0xE3, 0x82, 0xA4, 0xE3,
+ 0x82, 0xAF, 0xE3, 0x83, 0xAD, 0x4C, 0xE3, 0x83,
+ 0x9F, 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xAD, 0xE3,
+ // Bytes 2cc0 - 2cff
+ 0x83, 0xB3, 0x4C, 0xE3, 0x83, 0xA1, 0xE3, 0x83,
+ 0xBC, 0xE3, 0x83, 0x88, 0xE3, 0x83, 0xAB, 0x4C,
+ 0xE3, 0x83, 0xAA, 0xE3, 0x83, 0x83, 0xE3, 0x83,
+ 0x88, 0xE3, 0x83, 0xAB, 0x4C, 0xE3, 0x83, 0xAB,
+ 0xE3, 0x83, 0x92, 0xE3, 0x82, 0x9A, 0xE3, 0x83,
+ 0xBC, 0x4C, 0xE6, 0xA0, 0xAA, 0xE5, 0xBC, 0x8F,
+ 0xE4, 0xBC, 0x9A, 0xE7, 0xA4, 0xBE, 0x4E, 0x28,
+ 0xE1, 0x84, 0x8B, 0xE1, 0x85, 0xA9, 0xE1, 0x84,
+ // Bytes 2d00 - 2d3f
+ 0x92, 0xE1, 0x85, 0xAE, 0x29, 0x4F, 0xD8, 0xAC,
+ 0xD9, 0x84, 0x20, 0xD8, 0xAC, 0xD9, 0x84, 0xD8,
+ 0xA7, 0xD9, 0x84, 0xD9, 0x87, 0x4F, 0xE1, 0x84,
+ 0x8E, 0xE1, 0x85, 0xA1, 0xE1, 0x86, 0xB7, 0xE1,
+ 0x84, 0x80, 0xE1, 0x85, 0xA9, 0x4F, 0xE3, 0x82,
+ 0xA2, 0xE3, 0x83, 0x8F, 0xE3, 0x82, 0x9A, 0xE3,
+ 0x83, 0xBC, 0xE3, 0x83, 0x88, 0x4F, 0xE3, 0x82,
+ 0xA2, 0xE3, 0x83, 0xB3, 0xE3, 0x83, 0x98, 0xE3,
+ // Bytes 2d40 - 2d7f
+ 0x82, 0x9A, 0xE3, 0x82, 0xA2, 0x4F, 0xE3, 0x82,
+ 0xAD, 0xE3, 0x83, 0xAD, 0xE3, 0x83, 0xAF, 0xE3,
+ 0x83, 0x83, 0xE3, 0x83, 0x88, 0x4F, 0xE3, 0x82,
+ 0xB5, 0xE3, 0x83, 0xB3, 0xE3, 0x83, 0x81, 0xE3,
+ 0x83, 0xBC, 0xE3, 0x83, 0xA0, 0x4F, 0xE3, 0x83,
+ 0x8F, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xBC, 0xE3,
+ 0x83, 0xAC, 0xE3, 0x83, 0xAB, 0x4F, 0xE3, 0x83,
+ 0x98, 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0xBF, 0xE3,
+ // Bytes 2d80 - 2dbf
+ 0x83, 0xBC, 0xE3, 0x83, 0xAB, 0x4F, 0xE3, 0x83,
+ 0x9B, 0xE3, 0x82, 0x9A, 0xE3, 0x82, 0xA4, 0xE3,
+ 0x83, 0xB3, 0xE3, 0x83, 0x88, 0x4F, 0xE3, 0x83,
+ 0x9E, 0xE3, 0x83, 0xB3, 0xE3, 0x82, 0xB7, 0xE3,
+ 0x83, 0xA7, 0xE3, 0x83, 0xB3, 0x4F, 0xE3, 0x83,
+ 0xA1, 0xE3, 0x82, 0xAB, 0xE3, 0x82, 0x99, 0xE3,
+ 0x83, 0x88, 0xE3, 0x83, 0xB3, 0x4F, 0xE3, 0x83,
+ 0xAB, 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0x95, 0xE3,
+ // Bytes 2dc0 - 2dff
+ 0x82, 0x99, 0xE3, 0x83, 0xAB, 0x51, 0x28, 0xE1,
+ 0x84, 0x8B, 0xE1, 0x85, 0xA9, 0xE1, 0x84, 0x8C,
+ 0xE1, 0x85, 0xA5, 0xE1, 0x86, 0xAB, 0x29, 0x52,
+ 0xE3, 0x82, 0xAD, 0xE3, 0x82, 0x99, 0xE3, 0x83,
+ 0xAB, 0xE3, 0x82, 0xBF, 0xE3, 0x82, 0x99, 0xE3,
+ 0x83, 0xBC, 0x52, 0xE3, 0x82, 0xAD, 0xE3, 0x83,
+ 0xAD, 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99, 0xE3,
+ 0x83, 0xA9, 0xE3, 0x83, 0xA0, 0x52, 0xE3, 0x82,
+ // Bytes 2e00 - 2e3f
+ 0xAD, 0xE3, 0x83, 0xAD, 0xE3, 0x83, 0xA1, 0xE3,
+ 0x83, 0xBC, 0xE3, 0x83, 0x88, 0xE3, 0x83, 0xAB,
+ 0x52, 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99, 0xE3,
+ 0x83, 0xA9, 0xE3, 0x83, 0xA0, 0xE3, 0x83, 0x88,
+ 0xE3, 0x83, 0xB3, 0x52, 0xE3, 0x82, 0xAF, 0xE3,
+ 0x83, 0xAB, 0xE3, 0x82, 0xBB, 0xE3, 0x82, 0x99,
+ 0xE3, 0x82, 0xA4, 0xE3, 0x83, 0xAD, 0x52, 0xE3,
+ 0x83, 0x8F, 0xE3, 0x82, 0x9A, 0xE3, 0x83, 0xBC,
+ // Bytes 2e40 - 2e7f
+ 0xE3, 0x82, 0xBB, 0xE3, 0x83, 0xB3, 0xE3, 0x83,
+ 0x88, 0x52, 0xE3, 0x83, 0x92, 0xE3, 0x82, 0x9A,
+ 0xE3, 0x82, 0xA2, 0xE3, 0x82, 0xB9, 0xE3, 0x83,
+ 0x88, 0xE3, 0x83, 0xAB, 0x52, 0xE3, 0x83, 0x95,
+ 0xE3, 0x82, 0x99, 0xE3, 0x83, 0x83, 0xE3, 0x82,
+ 0xB7, 0xE3, 0x82, 0xA7, 0xE3, 0x83, 0xAB, 0x52,
+ 0xE3, 0x83, 0x9F, 0xE3, 0x83, 0xAA, 0xE3, 0x83,
+ 0x8F, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xBC, 0xE3,
+ // Bytes 2e80 - 2ebf
+ 0x83, 0xAB, 0x52, 0xE3, 0x83, 0xAC, 0xE3, 0x83,
+ 0xB3, 0xE3, 0x83, 0x88, 0xE3, 0x82, 0xB1, 0xE3,
+ 0x82, 0x99, 0xE3, 0x83, 0xB3, 0x61, 0xD8, 0xB5,
+ 0xD9, 0x84, 0xD9, 0x89, 0x20, 0xD8, 0xA7, 0xD9,
+ 0x84, 0xD9, 0x84, 0xD9, 0x87, 0x20, 0xD8, 0xB9,
+ 0xD9, 0x84, 0xD9, 0x8A, 0xD9, 0x87, 0x20, 0xD9,
+ 0x88, 0xD8, 0xB3, 0xD9, 0x84, 0xD9, 0x85, 0x86,
+ 0xE0, 0xB3, 0x86, 0xE0, 0xB3, 0x82, 0x86, 0xE0,
+ // Bytes 2ec0 - 2eff
+ 0xB7, 0x99, 0xE0, 0xB7, 0x8F, 0x09, 0xE0, 0xB7,
+ 0x99, 0xE0, 0xB7, 0x8F, 0xE0, 0xB7, 0x8A, 0x11,
+ 0x44, 0x44, 0x5A, 0xCC, 0x8C, 0xC9, 0x44, 0x44,
+ 0x7A, 0xCC, 0x8C, 0xC9, 0x44, 0x64, 0x7A, 0xCC,
+ 0x8C, 0xC9, 0x46, 0xD9, 0x84, 0xD8, 0xA7, 0xD9,
+ 0x93, 0xC9, 0x46, 0xD9, 0x84, 0xD8, 0xA7, 0xD9,
+ 0x94, 0xC9, 0x46, 0xD9, 0x84, 0xD8, 0xA7, 0xD9,
+ 0x95, 0xB5, 0x49, 0xE3, 0x83, 0xA1, 0xE3, 0x82,
+ // Bytes 2f00 - 2f3f
+ 0xAB, 0xE3, 0x82, 0x99, 0x0D, 0x4C, 0xE3, 0x82,
+ 0xAD, 0xE3, 0x82, 0x99, 0xE3, 0x82, 0xAB, 0xE3,
+ 0x82, 0x99, 0x0D, 0x4C, 0xE3, 0x82, 0xB3, 0xE3,
+ 0x83, 0xBC, 0xE3, 0x83, 0x9B, 0xE3, 0x82, 0x9A,
+ 0x0D, 0x4C, 0xE3, 0x83, 0xA4, 0xE3, 0x83, 0xBC,
+ 0xE3, 0x83, 0x88, 0xE3, 0x82, 0x99, 0x0D, 0x4F,
+ 0xE3, 0x82, 0xA4, 0xE3, 0x83, 0x8B, 0xE3, 0x83,
+ 0xB3, 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99, 0x0D,
+ // Bytes 2f40 - 2f7f
+ 0x4F, 0xE3, 0x82, 0xB7, 0xE3, 0x83, 0xAA, 0xE3,
+ 0x83, 0xB3, 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99,
+ 0x0D, 0x4F, 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x9A,
+ 0xE3, 0x83, 0xBC, 0xE3, 0x82, 0xB7, 0xE3, 0x82,
+ 0x99, 0x0D, 0x4F, 0xE3, 0x83, 0x9B, 0xE3, 0x82,
+ 0x9A, 0xE3, 0x83, 0xB3, 0xE3, 0x83, 0x88, 0xE3,
+ 0x82, 0x99, 0x0D, 0x52, 0xE3, 0x82, 0xA8, 0xE3,
+ 0x82, 0xB9, 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xBC,
+ // Bytes 2f80 - 2fbf
+ 0xE3, 0x83, 0x88, 0xE3, 0x82, 0x99, 0x0D, 0x52,
+ 0xE3, 0x83, 0x95, 0xE3, 0x82, 0xA1, 0xE3, 0x83,
+ 0xA9, 0xE3, 0x83, 0x83, 0xE3, 0x83, 0x88, 0xE3,
+ 0x82, 0x99, 0x0D, 0x03, 0x3C, 0xCC, 0xB8, 0x05,
+ 0x03, 0x3D, 0xCC, 0xB8, 0x05, 0x03, 0x3E, 0xCC,
+ 0xB8, 0x05, 0x03, 0x41, 0xCC, 0x80, 0xC9, 0x03,
+ 0x41, 0xCC, 0x81, 0xC9, 0x03, 0x41, 0xCC, 0x83,
+ 0xC9, 0x03, 0x41, 0xCC, 0x84, 0xC9, 0x03, 0x41,
+ // Bytes 2fc0 - 2fff
+ 0xCC, 0x89, 0xC9, 0x03, 0x41, 0xCC, 0x8C, 0xC9,
+ 0x03, 0x41, 0xCC, 0x8F, 0xC9, 0x03, 0x41, 0xCC,
+ 0x91, 0xC9, 0x03, 0x41, 0xCC, 0xA5, 0xB5, 0x03,
+ 0x41, 0xCC, 0xA8, 0xA5, 0x03, 0x42, 0xCC, 0x87,
+ 0xC9, 0x03, 0x42, 0xCC, 0xA3, 0xB5, 0x03, 0x42,
+ 0xCC, 0xB1, 0xB5, 0x03, 0x43, 0xCC, 0x81, 0xC9,
+ 0x03, 0x43, 0xCC, 0x82, 0xC9, 0x03, 0x43, 0xCC,
+ 0x87, 0xC9, 0x03, 0x43, 0xCC, 0x8C, 0xC9, 0x03,
+ // Bytes 3000 - 303f
+ 0x44, 0xCC, 0x87, 0xC9, 0x03, 0x44, 0xCC, 0x8C,
+ 0xC9, 0x03, 0x44, 0xCC, 0xA3, 0xB5, 0x03, 0x44,
+ 0xCC, 0xA7, 0xA5, 0x03, 0x44, 0xCC, 0xAD, 0xB5,
+ 0x03, 0x44, 0xCC, 0xB1, 0xB5, 0x03, 0x45, 0xCC,
+ 0x80, 0xC9, 0x03, 0x45, 0xCC, 0x81, 0xC9, 0x03,
+ 0x45, 0xCC, 0x83, 0xC9, 0x03, 0x45, 0xCC, 0x86,
+ 0xC9, 0x03, 0x45, 0xCC, 0x87, 0xC9, 0x03, 0x45,
+ 0xCC, 0x88, 0xC9, 0x03, 0x45, 0xCC, 0x89, 0xC9,
+ // Bytes 3040 - 307f
+ 0x03, 0x45, 0xCC, 0x8C, 0xC9, 0x03, 0x45, 0xCC,
+ 0x8F, 0xC9, 0x03, 0x45, 0xCC, 0x91, 0xC9, 0x03,
+ 0x45, 0xCC, 0xA8, 0xA5, 0x03, 0x45, 0xCC, 0xAD,
+ 0xB5, 0x03, 0x45, 0xCC, 0xB0, 0xB5, 0x03, 0x46,
+ 0xCC, 0x87, 0xC9, 0x03, 0x47, 0xCC, 0x81, 0xC9,
+ 0x03, 0x47, 0xCC, 0x82, 0xC9, 0x03, 0x47, 0xCC,
+ 0x84, 0xC9, 0x03, 0x47, 0xCC, 0x86, 0xC9, 0x03,
+ 0x47, 0xCC, 0x87, 0xC9, 0x03, 0x47, 0xCC, 0x8C,
+ // Bytes 3080 - 30bf
+ 0xC9, 0x03, 0x47, 0xCC, 0xA7, 0xA5, 0x03, 0x48,
+ 0xCC, 0x82, 0xC9, 0x03, 0x48, 0xCC, 0x87, 0xC9,
+ 0x03, 0x48, 0xCC, 0x88, 0xC9, 0x03, 0x48, 0xCC,
+ 0x8C, 0xC9, 0x03, 0x48, 0xCC, 0xA3, 0xB5, 0x03,
+ 0x48, 0xCC, 0xA7, 0xA5, 0x03, 0x48, 0xCC, 0xAE,
+ 0xB5, 0x03, 0x49, 0xCC, 0x80, 0xC9, 0x03, 0x49,
+ 0xCC, 0x81, 0xC9, 0x03, 0x49, 0xCC, 0x82, 0xC9,
+ 0x03, 0x49, 0xCC, 0x83, 0xC9, 0x03, 0x49, 0xCC,
+ // Bytes 30c0 - 30ff
+ 0x84, 0xC9, 0x03, 0x49, 0xCC, 0x86, 0xC9, 0x03,
+ 0x49, 0xCC, 0x87, 0xC9, 0x03, 0x49, 0xCC, 0x89,
+ 0xC9, 0x03, 0x49, 0xCC, 0x8C, 0xC9, 0x03, 0x49,
+ 0xCC, 0x8F, 0xC9, 0x03, 0x49, 0xCC, 0x91, 0xC9,
+ 0x03, 0x49, 0xCC, 0xA3, 0xB5, 0x03, 0x49, 0xCC,
+ 0xA8, 0xA5, 0x03, 0x49, 0xCC, 0xB0, 0xB5, 0x03,
+ 0x4A, 0xCC, 0x82, 0xC9, 0x03, 0x4B, 0xCC, 0x81,
+ 0xC9, 0x03, 0x4B, 0xCC, 0x8C, 0xC9, 0x03, 0x4B,
+ // Bytes 3100 - 313f
+ 0xCC, 0xA3, 0xB5, 0x03, 0x4B, 0xCC, 0xA7, 0xA5,
+ 0x03, 0x4B, 0xCC, 0xB1, 0xB5, 0x03, 0x4C, 0xCC,
+ 0x81, 0xC9, 0x03, 0x4C, 0xCC, 0x8C, 0xC9, 0x03,
+ 0x4C, 0xCC, 0xA7, 0xA5, 0x03, 0x4C, 0xCC, 0xAD,
+ 0xB5, 0x03, 0x4C, 0xCC, 0xB1, 0xB5, 0x03, 0x4D,
+ 0xCC, 0x81, 0xC9, 0x03, 0x4D, 0xCC, 0x87, 0xC9,
+ 0x03, 0x4D, 0xCC, 0xA3, 0xB5, 0x03, 0x4E, 0xCC,
+ 0x80, 0xC9, 0x03, 0x4E, 0xCC, 0x81, 0xC9, 0x03,
+ // Bytes 3140 - 317f
+ 0x4E, 0xCC, 0x83, 0xC9, 0x03, 0x4E, 0xCC, 0x87,
+ 0xC9, 0x03, 0x4E, 0xCC, 0x8C, 0xC9, 0x03, 0x4E,
+ 0xCC, 0xA3, 0xB5, 0x03, 0x4E, 0xCC, 0xA7, 0xA5,
+ 0x03, 0x4E, 0xCC, 0xAD, 0xB5, 0x03, 0x4E, 0xCC,
+ 0xB1, 0xB5, 0x03, 0x4F, 0xCC, 0x80, 0xC9, 0x03,
+ 0x4F, 0xCC, 0x81, 0xC9, 0x03, 0x4F, 0xCC, 0x86,
+ 0xC9, 0x03, 0x4F, 0xCC, 0x89, 0xC9, 0x03, 0x4F,
+ 0xCC, 0x8B, 0xC9, 0x03, 0x4F, 0xCC, 0x8C, 0xC9,
+ // Bytes 3180 - 31bf
+ 0x03, 0x4F, 0xCC, 0x8F, 0xC9, 0x03, 0x4F, 0xCC,
+ 0x91, 0xC9, 0x03, 0x50, 0xCC, 0x81, 0xC9, 0x03,
+ 0x50, 0xCC, 0x87, 0xC9, 0x03, 0x52, 0xCC, 0x81,
+ 0xC9, 0x03, 0x52, 0xCC, 0x87, 0xC9, 0x03, 0x52,
+ 0xCC, 0x8C, 0xC9, 0x03, 0x52, 0xCC, 0x8F, 0xC9,
+ 0x03, 0x52, 0xCC, 0x91, 0xC9, 0x03, 0x52, 0xCC,
+ 0xA7, 0xA5, 0x03, 0x52, 0xCC, 0xB1, 0xB5, 0x03,
+ 0x53, 0xCC, 0x82, 0xC9, 0x03, 0x53, 0xCC, 0x87,
+ // Bytes 31c0 - 31ff
+ 0xC9, 0x03, 0x53, 0xCC, 0xA6, 0xB5, 0x03, 0x53,
+ 0xCC, 0xA7, 0xA5, 0x03, 0x54, 0xCC, 0x87, 0xC9,
+ 0x03, 0x54, 0xCC, 0x8C, 0xC9, 0x03, 0x54, 0xCC,
+ 0xA3, 0xB5, 0x03, 0x54, 0xCC, 0xA6, 0xB5, 0x03,
+ 0x54, 0xCC, 0xA7, 0xA5, 0x03, 0x54, 0xCC, 0xAD,
+ 0xB5, 0x03, 0x54, 0xCC, 0xB1, 0xB5, 0x03, 0x55,
+ 0xCC, 0x80, 0xC9, 0x03, 0x55, 0xCC, 0x81, 0xC9,
+ 0x03, 0x55, 0xCC, 0x82, 0xC9, 0x03, 0x55, 0xCC,
+ // Bytes 3200 - 323f
+ 0x86, 0xC9, 0x03, 0x55, 0xCC, 0x89, 0xC9, 0x03,
+ 0x55, 0xCC, 0x8A, 0xC9, 0x03, 0x55, 0xCC, 0x8B,
+ 0xC9, 0x03, 0x55, 0xCC, 0x8C, 0xC9, 0x03, 0x55,
+ 0xCC, 0x8F, 0xC9, 0x03, 0x55, 0xCC, 0x91, 0xC9,
+ 0x03, 0x55, 0xCC, 0xA3, 0xB5, 0x03, 0x55, 0xCC,
+ 0xA4, 0xB5, 0x03, 0x55, 0xCC, 0xA8, 0xA5, 0x03,
+ 0x55, 0xCC, 0xAD, 0xB5, 0x03, 0x55, 0xCC, 0xB0,
+ 0xB5, 0x03, 0x56, 0xCC, 0x83, 0xC9, 0x03, 0x56,
+ // Bytes 3240 - 327f
+ 0xCC, 0xA3, 0xB5, 0x03, 0x57, 0xCC, 0x80, 0xC9,
+ 0x03, 0x57, 0xCC, 0x81, 0xC9, 0x03, 0x57, 0xCC,
+ 0x82, 0xC9, 0x03, 0x57, 0xCC, 0x87, 0xC9, 0x03,
+ 0x57, 0xCC, 0x88, 0xC9, 0x03, 0x57, 0xCC, 0xA3,
+ 0xB5, 0x03, 0x58, 0xCC, 0x87, 0xC9, 0x03, 0x58,
+ 0xCC, 0x88, 0xC9, 0x03, 0x59, 0xCC, 0x80, 0xC9,
+ 0x03, 0x59, 0xCC, 0x81, 0xC9, 0x03, 0x59, 0xCC,
+ 0x82, 0xC9, 0x03, 0x59, 0xCC, 0x83, 0xC9, 0x03,
+ // Bytes 3280 - 32bf
+ 0x59, 0xCC, 0x84, 0xC9, 0x03, 0x59, 0xCC, 0x87,
+ 0xC9, 0x03, 0x59, 0xCC, 0x88, 0xC9, 0x03, 0x59,
+ 0xCC, 0x89, 0xC9, 0x03, 0x59, 0xCC, 0xA3, 0xB5,
+ 0x03, 0x5A, 0xCC, 0x81, 0xC9, 0x03, 0x5A, 0xCC,
+ 0x82, 0xC9, 0x03, 0x5A, 0xCC, 0x87, 0xC9, 0x03,
+ 0x5A, 0xCC, 0x8C, 0xC9, 0x03, 0x5A, 0xCC, 0xA3,
+ 0xB5, 0x03, 0x5A, 0xCC, 0xB1, 0xB5, 0x03, 0x61,
+ 0xCC, 0x80, 0xC9, 0x03, 0x61, 0xCC, 0x81, 0xC9,
+ // Bytes 32c0 - 32ff
+ 0x03, 0x61, 0xCC, 0x83, 0xC9, 0x03, 0x61, 0xCC,
+ 0x84, 0xC9, 0x03, 0x61, 0xCC, 0x89, 0xC9, 0x03,
+ 0x61, 0xCC, 0x8C, 0xC9, 0x03, 0x61, 0xCC, 0x8F,
+ 0xC9, 0x03, 0x61, 0xCC, 0x91, 0xC9, 0x03, 0x61,
+ 0xCC, 0xA5, 0xB5, 0x03, 0x61, 0xCC, 0xA8, 0xA5,
+ 0x03, 0x62, 0xCC, 0x87, 0xC9, 0x03, 0x62, 0xCC,
+ 0xA3, 0xB5, 0x03, 0x62, 0xCC, 0xB1, 0xB5, 0x03,
+ 0x63, 0xCC, 0x81, 0xC9, 0x03, 0x63, 0xCC, 0x82,
+ // Bytes 3300 - 333f
+ 0xC9, 0x03, 0x63, 0xCC, 0x87, 0xC9, 0x03, 0x63,
+ 0xCC, 0x8C, 0xC9, 0x03, 0x64, 0xCC, 0x87, 0xC9,
+ 0x03, 0x64, 0xCC, 0x8C, 0xC9, 0x03, 0x64, 0xCC,
+ 0xA3, 0xB5, 0x03, 0x64, 0xCC, 0xA7, 0xA5, 0x03,
+ 0x64, 0xCC, 0xAD, 0xB5, 0x03, 0x64, 0xCC, 0xB1,
+ 0xB5, 0x03, 0x65, 0xCC, 0x80, 0xC9, 0x03, 0x65,
+ 0xCC, 0x81, 0xC9, 0x03, 0x65, 0xCC, 0x83, 0xC9,
+ 0x03, 0x65, 0xCC, 0x86, 0xC9, 0x03, 0x65, 0xCC,
+ // Bytes 3340 - 337f
+ 0x87, 0xC9, 0x03, 0x65, 0xCC, 0x88, 0xC9, 0x03,
+ 0x65, 0xCC, 0x89, 0xC9, 0x03, 0x65, 0xCC, 0x8C,
+ 0xC9, 0x03, 0x65, 0xCC, 0x8F, 0xC9, 0x03, 0x65,
+ 0xCC, 0x91, 0xC9, 0x03, 0x65, 0xCC, 0xA8, 0xA5,
+ 0x03, 0x65, 0xCC, 0xAD, 0xB5, 0x03, 0x65, 0xCC,
+ 0xB0, 0xB5, 0x03, 0x66, 0xCC, 0x87, 0xC9, 0x03,
+ 0x67, 0xCC, 0x81, 0xC9, 0x03, 0x67, 0xCC, 0x82,
+ 0xC9, 0x03, 0x67, 0xCC, 0x84, 0xC9, 0x03, 0x67,
+ // Bytes 3380 - 33bf
+ 0xCC, 0x86, 0xC9, 0x03, 0x67, 0xCC, 0x87, 0xC9,
+ 0x03, 0x67, 0xCC, 0x8C, 0xC9, 0x03, 0x67, 0xCC,
+ 0xA7, 0xA5, 0x03, 0x68, 0xCC, 0x82, 0xC9, 0x03,
+ 0x68, 0xCC, 0x87, 0xC9, 0x03, 0x68, 0xCC, 0x88,
+ 0xC9, 0x03, 0x68, 0xCC, 0x8C, 0xC9, 0x03, 0x68,
+ 0xCC, 0xA3, 0xB5, 0x03, 0x68, 0xCC, 0xA7, 0xA5,
+ 0x03, 0x68, 0xCC, 0xAE, 0xB5, 0x03, 0x68, 0xCC,
+ 0xB1, 0xB5, 0x03, 0x69, 0xCC, 0x80, 0xC9, 0x03,
+ // Bytes 33c0 - 33ff
+ 0x69, 0xCC, 0x81, 0xC9, 0x03, 0x69, 0xCC, 0x82,
+ 0xC9, 0x03, 0x69, 0xCC, 0x83, 0xC9, 0x03, 0x69,
+ 0xCC, 0x84, 0xC9, 0x03, 0x69, 0xCC, 0x86, 0xC9,
+ 0x03, 0x69, 0xCC, 0x89, 0xC9, 0x03, 0x69, 0xCC,
+ 0x8C, 0xC9, 0x03, 0x69, 0xCC, 0x8F, 0xC9, 0x03,
+ 0x69, 0xCC, 0x91, 0xC9, 0x03, 0x69, 0xCC, 0xA3,
+ 0xB5, 0x03, 0x69, 0xCC, 0xA8, 0xA5, 0x03, 0x69,
+ 0xCC, 0xB0, 0xB5, 0x03, 0x6A, 0xCC, 0x82, 0xC9,
+ // Bytes 3400 - 343f
+ 0x03, 0x6A, 0xCC, 0x8C, 0xC9, 0x03, 0x6B, 0xCC,
+ 0x81, 0xC9, 0x03, 0x6B, 0xCC, 0x8C, 0xC9, 0x03,
+ 0x6B, 0xCC, 0xA3, 0xB5, 0x03, 0x6B, 0xCC, 0xA7,
+ 0xA5, 0x03, 0x6B, 0xCC, 0xB1, 0xB5, 0x03, 0x6C,
+ 0xCC, 0x81, 0xC9, 0x03, 0x6C, 0xCC, 0x8C, 0xC9,
+ 0x03, 0x6C, 0xCC, 0xA7, 0xA5, 0x03, 0x6C, 0xCC,
+ 0xAD, 0xB5, 0x03, 0x6C, 0xCC, 0xB1, 0xB5, 0x03,
+ 0x6D, 0xCC, 0x81, 0xC9, 0x03, 0x6D, 0xCC, 0x87,
+ // Bytes 3440 - 347f
+ 0xC9, 0x03, 0x6D, 0xCC, 0xA3, 0xB5, 0x03, 0x6E,
+ 0xCC, 0x80, 0xC9, 0x03, 0x6E, 0xCC, 0x81, 0xC9,
+ 0x03, 0x6E, 0xCC, 0x83, 0xC9, 0x03, 0x6E, 0xCC,
+ 0x87, 0xC9, 0x03, 0x6E, 0xCC, 0x8C, 0xC9, 0x03,
+ 0x6E, 0xCC, 0xA3, 0xB5, 0x03, 0x6E, 0xCC, 0xA7,
+ 0xA5, 0x03, 0x6E, 0xCC, 0xAD, 0xB5, 0x03, 0x6E,
+ 0xCC, 0xB1, 0xB5, 0x03, 0x6F, 0xCC, 0x80, 0xC9,
+ 0x03, 0x6F, 0xCC, 0x81, 0xC9, 0x03, 0x6F, 0xCC,
+ // Bytes 3480 - 34bf
+ 0x86, 0xC9, 0x03, 0x6F, 0xCC, 0x89, 0xC9, 0x03,
+ 0x6F, 0xCC, 0x8B, 0xC9, 0x03, 0x6F, 0xCC, 0x8C,
+ 0xC9, 0x03, 0x6F, 0xCC, 0x8F, 0xC9, 0x03, 0x6F,
+ 0xCC, 0x91, 0xC9, 0x03, 0x70, 0xCC, 0x81, 0xC9,
+ 0x03, 0x70, 0xCC, 0x87, 0xC9, 0x03, 0x72, 0xCC,
+ 0x81, 0xC9, 0x03, 0x72, 0xCC, 0x87, 0xC9, 0x03,
+ 0x72, 0xCC, 0x8C, 0xC9, 0x03, 0x72, 0xCC, 0x8F,
+ 0xC9, 0x03, 0x72, 0xCC, 0x91, 0xC9, 0x03, 0x72,
+ // Bytes 34c0 - 34ff
+ 0xCC, 0xA7, 0xA5, 0x03, 0x72, 0xCC, 0xB1, 0xB5,
+ 0x03, 0x73, 0xCC, 0x82, 0xC9, 0x03, 0x73, 0xCC,
+ 0x87, 0xC9, 0x03, 0x73, 0xCC, 0xA6, 0xB5, 0x03,
+ 0x73, 0xCC, 0xA7, 0xA5, 0x03, 0x74, 0xCC, 0x87,
+ 0xC9, 0x03, 0x74, 0xCC, 0x88, 0xC9, 0x03, 0x74,
+ 0xCC, 0x8C, 0xC9, 0x03, 0x74, 0xCC, 0xA3, 0xB5,
+ 0x03, 0x74, 0xCC, 0xA6, 0xB5, 0x03, 0x74, 0xCC,
+ 0xA7, 0xA5, 0x03, 0x74, 0xCC, 0xAD, 0xB5, 0x03,
+ // Bytes 3500 - 353f
+ 0x74, 0xCC, 0xB1, 0xB5, 0x03, 0x75, 0xCC, 0x80,
+ 0xC9, 0x03, 0x75, 0xCC, 0x81, 0xC9, 0x03, 0x75,
+ 0xCC, 0x82, 0xC9, 0x03, 0x75, 0xCC, 0x86, 0xC9,
+ 0x03, 0x75, 0xCC, 0x89, 0xC9, 0x03, 0x75, 0xCC,
+ 0x8A, 0xC9, 0x03, 0x75, 0xCC, 0x8B, 0xC9, 0x03,
+ 0x75, 0xCC, 0x8C, 0xC9, 0x03, 0x75, 0xCC, 0x8F,
+ 0xC9, 0x03, 0x75, 0xCC, 0x91, 0xC9, 0x03, 0x75,
+ 0xCC, 0xA3, 0xB5, 0x03, 0x75, 0xCC, 0xA4, 0xB5,
+ // Bytes 3540 - 357f
+ 0x03, 0x75, 0xCC, 0xA8, 0xA5, 0x03, 0x75, 0xCC,
+ 0xAD, 0xB5, 0x03, 0x75, 0xCC, 0xB0, 0xB5, 0x03,
+ 0x76, 0xCC, 0x83, 0xC9, 0x03, 0x76, 0xCC, 0xA3,
+ 0xB5, 0x03, 0x77, 0xCC, 0x80, 0xC9, 0x03, 0x77,
+ 0xCC, 0x81, 0xC9, 0x03, 0x77, 0xCC, 0x82, 0xC9,
+ 0x03, 0x77, 0xCC, 0x87, 0xC9, 0x03, 0x77, 0xCC,
+ 0x88, 0xC9, 0x03, 0x77, 0xCC, 0x8A, 0xC9, 0x03,
+ 0x77, 0xCC, 0xA3, 0xB5, 0x03, 0x78, 0xCC, 0x87,
+ // Bytes 3580 - 35bf
+ 0xC9, 0x03, 0x78, 0xCC, 0x88, 0xC9, 0x03, 0x79,
+ 0xCC, 0x80, 0xC9, 0x03, 0x79, 0xCC, 0x81, 0xC9,
+ 0x03, 0x79, 0xCC, 0x82, 0xC9, 0x03, 0x79, 0xCC,
+ 0x83, 0xC9, 0x03, 0x79, 0xCC, 0x84, 0xC9, 0x03,
+ 0x79, 0xCC, 0x87, 0xC9, 0x03, 0x79, 0xCC, 0x88,
+ 0xC9, 0x03, 0x79, 0xCC, 0x89, 0xC9, 0x03, 0x79,
+ 0xCC, 0x8A, 0xC9, 0x03, 0x79, 0xCC, 0xA3, 0xB5,
+ 0x03, 0x7A, 0xCC, 0x81, 0xC9, 0x03, 0x7A, 0xCC,
+ // Bytes 35c0 - 35ff
+ 0x82, 0xC9, 0x03, 0x7A, 0xCC, 0x87, 0xC9, 0x03,
+ 0x7A, 0xCC, 0x8C, 0xC9, 0x03, 0x7A, 0xCC, 0xA3,
+ 0xB5, 0x03, 0x7A, 0xCC, 0xB1, 0xB5, 0x04, 0xC2,
+ 0xA8, 0xCC, 0x80, 0xCA, 0x04, 0xC2, 0xA8, 0xCC,
+ 0x81, 0xCA, 0x04, 0xC2, 0xA8, 0xCD, 0x82, 0xCA,
+ 0x04, 0xC3, 0x86, 0xCC, 0x81, 0xC9, 0x04, 0xC3,
+ 0x86, 0xCC, 0x84, 0xC9, 0x04, 0xC3, 0x98, 0xCC,
+ 0x81, 0xC9, 0x04, 0xC3, 0xA6, 0xCC, 0x81, 0xC9,
+ // Bytes 3600 - 363f
+ 0x04, 0xC3, 0xA6, 0xCC, 0x84, 0xC9, 0x04, 0xC3,
+ 0xB8, 0xCC, 0x81, 0xC9, 0x04, 0xC5, 0xBF, 0xCC,
+ 0x87, 0xC9, 0x04, 0xC6, 0xB7, 0xCC, 0x8C, 0xC9,
+ 0x04, 0xCA, 0x92, 0xCC, 0x8C, 0xC9, 0x04, 0xCE,
+ 0x91, 0xCC, 0x80, 0xC9, 0x04, 0xCE, 0x91, 0xCC,
+ 0x81, 0xC9, 0x04, 0xCE, 0x91, 0xCC, 0x84, 0xC9,
+ 0x04, 0xCE, 0x91, 0xCC, 0x86, 0xC9, 0x04, 0xCE,
+ 0x91, 0xCD, 0x85, 0xD9, 0x04, 0xCE, 0x95, 0xCC,
+ // Bytes 3640 - 367f
+ 0x80, 0xC9, 0x04, 0xCE, 0x95, 0xCC, 0x81, 0xC9,
+ 0x04, 0xCE, 0x97, 0xCC, 0x80, 0xC9, 0x04, 0xCE,
+ 0x97, 0xCC, 0x81, 0xC9, 0x04, 0xCE, 0x97, 0xCD,
+ 0x85, 0xD9, 0x04, 0xCE, 0x99, 0xCC, 0x80, 0xC9,
+ 0x04, 0xCE, 0x99, 0xCC, 0x81, 0xC9, 0x04, 0xCE,
+ 0x99, 0xCC, 0x84, 0xC9, 0x04, 0xCE, 0x99, 0xCC,
+ 0x86, 0xC9, 0x04, 0xCE, 0x99, 0xCC, 0x88, 0xC9,
+ 0x04, 0xCE, 0x9F, 0xCC, 0x80, 0xC9, 0x04, 0xCE,
+ // Bytes 3680 - 36bf
+ 0x9F, 0xCC, 0x81, 0xC9, 0x04, 0xCE, 0xA1, 0xCC,
+ 0x94, 0xC9, 0x04, 0xCE, 0xA5, 0xCC, 0x80, 0xC9,
+ 0x04, 0xCE, 0xA5, 0xCC, 0x81, 0xC9, 0x04, 0xCE,
+ 0xA5, 0xCC, 0x84, 0xC9, 0x04, 0xCE, 0xA5, 0xCC,
+ 0x86, 0xC9, 0x04, 0xCE, 0xA5, 0xCC, 0x88, 0xC9,
+ 0x04, 0xCE, 0xA9, 0xCC, 0x80, 0xC9, 0x04, 0xCE,
+ 0xA9, 0xCC, 0x81, 0xC9, 0x04, 0xCE, 0xA9, 0xCD,
+ 0x85, 0xD9, 0x04, 0xCE, 0xB1, 0xCC, 0x84, 0xC9,
+ // Bytes 36c0 - 36ff
+ 0x04, 0xCE, 0xB1, 0xCC, 0x86, 0xC9, 0x04, 0xCE,
+ 0xB1, 0xCD, 0x85, 0xD9, 0x04, 0xCE, 0xB5, 0xCC,
+ 0x80, 0xC9, 0x04, 0xCE, 0xB5, 0xCC, 0x81, 0xC9,
+ 0x04, 0xCE, 0xB7, 0xCD, 0x85, 0xD9, 0x04, 0xCE,
+ 0xB9, 0xCC, 0x80, 0xC9, 0x04, 0xCE, 0xB9, 0xCC,
+ 0x81, 0xC9, 0x04, 0xCE, 0xB9, 0xCC, 0x84, 0xC9,
+ 0x04, 0xCE, 0xB9, 0xCC, 0x86, 0xC9, 0x04, 0xCE,
+ 0xB9, 0xCD, 0x82, 0xC9, 0x04, 0xCE, 0xBF, 0xCC,
+ // Bytes 3700 - 373f
+ 0x80, 0xC9, 0x04, 0xCE, 0xBF, 0xCC, 0x81, 0xC9,
+ 0x04, 0xCF, 0x81, 0xCC, 0x93, 0xC9, 0x04, 0xCF,
+ 0x81, 0xCC, 0x94, 0xC9, 0x04, 0xCF, 0x85, 0xCC,
+ 0x80, 0xC9, 0x04, 0xCF, 0x85, 0xCC, 0x81, 0xC9,
+ 0x04, 0xCF, 0x85, 0xCC, 0x84, 0xC9, 0x04, 0xCF,
+ 0x85, 0xCC, 0x86, 0xC9, 0x04, 0xCF, 0x85, 0xCD,
+ 0x82, 0xC9, 0x04, 0xCF, 0x89, 0xCD, 0x85, 0xD9,
+ 0x04, 0xCF, 0x92, 0xCC, 0x81, 0xC9, 0x04, 0xCF,
+ // Bytes 3740 - 377f
+ 0x92, 0xCC, 0x88, 0xC9, 0x04, 0xD0, 0x86, 0xCC,
+ 0x88, 0xC9, 0x04, 0xD0, 0x90, 0xCC, 0x86, 0xC9,
+ 0x04, 0xD0, 0x90, 0xCC, 0x88, 0xC9, 0x04, 0xD0,
+ 0x93, 0xCC, 0x81, 0xC9, 0x04, 0xD0, 0x95, 0xCC,
+ 0x80, 0xC9, 0x04, 0xD0, 0x95, 0xCC, 0x86, 0xC9,
+ 0x04, 0xD0, 0x95, 0xCC, 0x88, 0xC9, 0x04, 0xD0,
+ 0x96, 0xCC, 0x86, 0xC9, 0x04, 0xD0, 0x96, 0xCC,
+ 0x88, 0xC9, 0x04, 0xD0, 0x97, 0xCC, 0x88, 0xC9,
+ // Bytes 3780 - 37bf
+ 0x04, 0xD0, 0x98, 0xCC, 0x80, 0xC9, 0x04, 0xD0,
+ 0x98, 0xCC, 0x84, 0xC9, 0x04, 0xD0, 0x98, 0xCC,
+ 0x86, 0xC9, 0x04, 0xD0, 0x98, 0xCC, 0x88, 0xC9,
+ 0x04, 0xD0, 0x9A, 0xCC, 0x81, 0xC9, 0x04, 0xD0,
+ 0x9E, 0xCC, 0x88, 0xC9, 0x04, 0xD0, 0xA3, 0xCC,
+ 0x84, 0xC9, 0x04, 0xD0, 0xA3, 0xCC, 0x86, 0xC9,
+ 0x04, 0xD0, 0xA3, 0xCC, 0x88, 0xC9, 0x04, 0xD0,
+ 0xA3, 0xCC, 0x8B, 0xC9, 0x04, 0xD0, 0xA7, 0xCC,
+ // Bytes 37c0 - 37ff
+ 0x88, 0xC9, 0x04, 0xD0, 0xAB, 0xCC, 0x88, 0xC9,
+ 0x04, 0xD0, 0xAD, 0xCC, 0x88, 0xC9, 0x04, 0xD0,
+ 0xB0, 0xCC, 0x86, 0xC9, 0x04, 0xD0, 0xB0, 0xCC,
+ 0x88, 0xC9, 0x04, 0xD0, 0xB3, 0xCC, 0x81, 0xC9,
+ 0x04, 0xD0, 0xB5, 0xCC, 0x80, 0xC9, 0x04, 0xD0,
+ 0xB5, 0xCC, 0x86, 0xC9, 0x04, 0xD0, 0xB5, 0xCC,
+ 0x88, 0xC9, 0x04, 0xD0, 0xB6, 0xCC, 0x86, 0xC9,
+ 0x04, 0xD0, 0xB6, 0xCC, 0x88, 0xC9, 0x04, 0xD0,
+ // Bytes 3800 - 383f
+ 0xB7, 0xCC, 0x88, 0xC9, 0x04, 0xD0, 0xB8, 0xCC,
+ 0x80, 0xC9, 0x04, 0xD0, 0xB8, 0xCC, 0x84, 0xC9,
+ 0x04, 0xD0, 0xB8, 0xCC, 0x86, 0xC9, 0x04, 0xD0,
+ 0xB8, 0xCC, 0x88, 0xC9, 0x04, 0xD0, 0xBA, 0xCC,
+ 0x81, 0xC9, 0x04, 0xD0, 0xBE, 0xCC, 0x88, 0xC9,
+ 0x04, 0xD1, 0x83, 0xCC, 0x84, 0xC9, 0x04, 0xD1,
+ 0x83, 0xCC, 0x86, 0xC9, 0x04, 0xD1, 0x83, 0xCC,
+ 0x88, 0xC9, 0x04, 0xD1, 0x83, 0xCC, 0x8B, 0xC9,
+ // Bytes 3840 - 387f
+ 0x04, 0xD1, 0x87, 0xCC, 0x88, 0xC9, 0x04, 0xD1,
+ 0x8B, 0xCC, 0x88, 0xC9, 0x04, 0xD1, 0x8D, 0xCC,
+ 0x88, 0xC9, 0x04, 0xD1, 0x96, 0xCC, 0x88, 0xC9,
+ 0x04, 0xD1, 0xB4, 0xCC, 0x8F, 0xC9, 0x04, 0xD1,
+ 0xB5, 0xCC, 0x8F, 0xC9, 0x04, 0xD3, 0x98, 0xCC,
+ 0x88, 0xC9, 0x04, 0xD3, 0x99, 0xCC, 0x88, 0xC9,
+ 0x04, 0xD3, 0xA8, 0xCC, 0x88, 0xC9, 0x04, 0xD3,
+ 0xA9, 0xCC, 0x88, 0xC9, 0x04, 0xD8, 0xA7, 0xD9,
+ // Bytes 3880 - 38bf
+ 0x93, 0xC9, 0x04, 0xD8, 0xA7, 0xD9, 0x94, 0xC9,
+ 0x04, 0xD8, 0xA7, 0xD9, 0x95, 0xB5, 0x04, 0xD9,
+ 0x88, 0xD9, 0x94, 0xC9, 0x04, 0xD9, 0x8A, 0xD9,
+ 0x94, 0xC9, 0x04, 0xDB, 0x81, 0xD9, 0x94, 0xC9,
+ 0x04, 0xDB, 0x92, 0xD9, 0x94, 0xC9, 0x04, 0xDB,
+ 0x95, 0xD9, 0x94, 0xC9, 0x05, 0x41, 0xCC, 0x82,
+ 0xCC, 0x80, 0xCA, 0x05, 0x41, 0xCC, 0x82, 0xCC,
+ 0x81, 0xCA, 0x05, 0x41, 0xCC, 0x82, 0xCC, 0x83,
+ // Bytes 38c0 - 38ff
+ 0xCA, 0x05, 0x41, 0xCC, 0x82, 0xCC, 0x89, 0xCA,
+ 0x05, 0x41, 0xCC, 0x86, 0xCC, 0x80, 0xCA, 0x05,
+ 0x41, 0xCC, 0x86, 0xCC, 0x81, 0xCA, 0x05, 0x41,
+ 0xCC, 0x86, 0xCC, 0x83, 0xCA, 0x05, 0x41, 0xCC,
+ 0x86, 0xCC, 0x89, 0xCA, 0x05, 0x41, 0xCC, 0x87,
+ 0xCC, 0x84, 0xCA, 0x05, 0x41, 0xCC, 0x88, 0xCC,
+ 0x84, 0xCA, 0x05, 0x41, 0xCC, 0x8A, 0xCC, 0x81,
+ 0xCA, 0x05, 0x41, 0xCC, 0xA3, 0xCC, 0x82, 0xCA,
+ // Bytes 3900 - 393f
+ 0x05, 0x41, 0xCC, 0xA3, 0xCC, 0x86, 0xCA, 0x05,
+ 0x43, 0xCC, 0xA7, 0xCC, 0x81, 0xCA, 0x05, 0x45,
+ 0xCC, 0x82, 0xCC, 0x80, 0xCA, 0x05, 0x45, 0xCC,
+ 0x82, 0xCC, 0x81, 0xCA, 0x05, 0x45, 0xCC, 0x82,
+ 0xCC, 0x83, 0xCA, 0x05, 0x45, 0xCC, 0x82, 0xCC,
+ 0x89, 0xCA, 0x05, 0x45, 0xCC, 0x84, 0xCC, 0x80,
+ 0xCA, 0x05, 0x45, 0xCC, 0x84, 0xCC, 0x81, 0xCA,
+ 0x05, 0x45, 0xCC, 0xA3, 0xCC, 0x82, 0xCA, 0x05,
+ // Bytes 3940 - 397f
+ 0x45, 0xCC, 0xA7, 0xCC, 0x86, 0xCA, 0x05, 0x49,
+ 0xCC, 0x88, 0xCC, 0x81, 0xCA, 0x05, 0x4C, 0xCC,
+ 0xA3, 0xCC, 0x84, 0xCA, 0x05, 0x4F, 0xCC, 0x82,
+ 0xCC, 0x80, 0xCA, 0x05, 0x4F, 0xCC, 0x82, 0xCC,
+ 0x81, 0xCA, 0x05, 0x4F, 0xCC, 0x82, 0xCC, 0x83,
+ 0xCA, 0x05, 0x4F, 0xCC, 0x82, 0xCC, 0x89, 0xCA,
+ 0x05, 0x4F, 0xCC, 0x83, 0xCC, 0x81, 0xCA, 0x05,
+ 0x4F, 0xCC, 0x83, 0xCC, 0x84, 0xCA, 0x05, 0x4F,
+ // Bytes 3980 - 39bf
+ 0xCC, 0x83, 0xCC, 0x88, 0xCA, 0x05, 0x4F, 0xCC,
+ 0x84, 0xCC, 0x80, 0xCA, 0x05, 0x4F, 0xCC, 0x84,
+ 0xCC, 0x81, 0xCA, 0x05, 0x4F, 0xCC, 0x87, 0xCC,
+ 0x84, 0xCA, 0x05, 0x4F, 0xCC, 0x88, 0xCC, 0x84,
+ 0xCA, 0x05, 0x4F, 0xCC, 0x9B, 0xCC, 0x80, 0xCA,
+ 0x05, 0x4F, 0xCC, 0x9B, 0xCC, 0x81, 0xCA, 0x05,
+ 0x4F, 0xCC, 0x9B, 0xCC, 0x83, 0xCA, 0x05, 0x4F,
+ 0xCC, 0x9B, 0xCC, 0x89, 0xCA, 0x05, 0x4F, 0xCC,
+ // Bytes 39c0 - 39ff
+ 0x9B, 0xCC, 0xA3, 0xB6, 0x05, 0x4F, 0xCC, 0xA3,
+ 0xCC, 0x82, 0xCA, 0x05, 0x4F, 0xCC, 0xA8, 0xCC,
+ 0x84, 0xCA, 0x05, 0x52, 0xCC, 0xA3, 0xCC, 0x84,
+ 0xCA, 0x05, 0x53, 0xCC, 0x81, 0xCC, 0x87, 0xCA,
+ 0x05, 0x53, 0xCC, 0x8C, 0xCC, 0x87, 0xCA, 0x05,
+ 0x53, 0xCC, 0xA3, 0xCC, 0x87, 0xCA, 0x05, 0x55,
+ 0xCC, 0x83, 0xCC, 0x81, 0xCA, 0x05, 0x55, 0xCC,
+ 0x84, 0xCC, 0x88, 0xCA, 0x05, 0x55, 0xCC, 0x88,
+ // Bytes 3a00 - 3a3f
+ 0xCC, 0x80, 0xCA, 0x05, 0x55, 0xCC, 0x88, 0xCC,
+ 0x81, 0xCA, 0x05, 0x55, 0xCC, 0x88, 0xCC, 0x84,
+ 0xCA, 0x05, 0x55, 0xCC, 0x88, 0xCC, 0x8C, 0xCA,
+ 0x05, 0x55, 0xCC, 0x9B, 0xCC, 0x80, 0xCA, 0x05,
+ 0x55, 0xCC, 0x9B, 0xCC, 0x81, 0xCA, 0x05, 0x55,
+ 0xCC, 0x9B, 0xCC, 0x83, 0xCA, 0x05, 0x55, 0xCC,
+ 0x9B, 0xCC, 0x89, 0xCA, 0x05, 0x55, 0xCC, 0x9B,
+ 0xCC, 0xA3, 0xB6, 0x05, 0x61, 0xCC, 0x82, 0xCC,
+ // Bytes 3a40 - 3a7f
+ 0x80, 0xCA, 0x05, 0x61, 0xCC, 0x82, 0xCC, 0x81,
+ 0xCA, 0x05, 0x61, 0xCC, 0x82, 0xCC, 0x83, 0xCA,
+ 0x05, 0x61, 0xCC, 0x82, 0xCC, 0x89, 0xCA, 0x05,
+ 0x61, 0xCC, 0x86, 0xCC, 0x80, 0xCA, 0x05, 0x61,
+ 0xCC, 0x86, 0xCC, 0x81, 0xCA, 0x05, 0x61, 0xCC,
+ 0x86, 0xCC, 0x83, 0xCA, 0x05, 0x61, 0xCC, 0x86,
+ 0xCC, 0x89, 0xCA, 0x05, 0x61, 0xCC, 0x87, 0xCC,
+ 0x84, 0xCA, 0x05, 0x61, 0xCC, 0x88, 0xCC, 0x84,
+ // Bytes 3a80 - 3abf
+ 0xCA, 0x05, 0x61, 0xCC, 0x8A, 0xCC, 0x81, 0xCA,
+ 0x05, 0x61, 0xCC, 0xA3, 0xCC, 0x82, 0xCA, 0x05,
+ 0x61, 0xCC, 0xA3, 0xCC, 0x86, 0xCA, 0x05, 0x63,
+ 0xCC, 0xA7, 0xCC, 0x81, 0xCA, 0x05, 0x65, 0xCC,
+ 0x82, 0xCC, 0x80, 0xCA, 0x05, 0x65, 0xCC, 0x82,
+ 0xCC, 0x81, 0xCA, 0x05, 0x65, 0xCC, 0x82, 0xCC,
+ 0x83, 0xCA, 0x05, 0x65, 0xCC, 0x82, 0xCC, 0x89,
+ 0xCA, 0x05, 0x65, 0xCC, 0x84, 0xCC, 0x80, 0xCA,
+ // Bytes 3ac0 - 3aff
+ 0x05, 0x65, 0xCC, 0x84, 0xCC, 0x81, 0xCA, 0x05,
+ 0x65, 0xCC, 0xA3, 0xCC, 0x82, 0xCA, 0x05, 0x65,
+ 0xCC, 0xA7, 0xCC, 0x86, 0xCA, 0x05, 0x69, 0xCC,
+ 0x88, 0xCC, 0x81, 0xCA, 0x05, 0x6C, 0xCC, 0xA3,
+ 0xCC, 0x84, 0xCA, 0x05, 0x6F, 0xCC, 0x82, 0xCC,
+ 0x80, 0xCA, 0x05, 0x6F, 0xCC, 0x82, 0xCC, 0x81,
+ 0xCA, 0x05, 0x6F, 0xCC, 0x82, 0xCC, 0x83, 0xCA,
+ 0x05, 0x6F, 0xCC, 0x82, 0xCC, 0x89, 0xCA, 0x05,
+ // Bytes 3b00 - 3b3f
+ 0x6F, 0xCC, 0x83, 0xCC, 0x81, 0xCA, 0x05, 0x6F,
+ 0xCC, 0x83, 0xCC, 0x84, 0xCA, 0x05, 0x6F, 0xCC,
+ 0x83, 0xCC, 0x88, 0xCA, 0x05, 0x6F, 0xCC, 0x84,
+ 0xCC, 0x80, 0xCA, 0x05, 0x6F, 0xCC, 0x84, 0xCC,
+ 0x81, 0xCA, 0x05, 0x6F, 0xCC, 0x87, 0xCC, 0x84,
+ 0xCA, 0x05, 0x6F, 0xCC, 0x88, 0xCC, 0x84, 0xCA,
+ 0x05, 0x6F, 0xCC, 0x9B, 0xCC, 0x80, 0xCA, 0x05,
+ 0x6F, 0xCC, 0x9B, 0xCC, 0x81, 0xCA, 0x05, 0x6F,
+ // Bytes 3b40 - 3b7f
+ 0xCC, 0x9B, 0xCC, 0x83, 0xCA, 0x05, 0x6F, 0xCC,
+ 0x9B, 0xCC, 0x89, 0xCA, 0x05, 0x6F, 0xCC, 0x9B,
+ 0xCC, 0xA3, 0xB6, 0x05, 0x6F, 0xCC, 0xA3, 0xCC,
+ 0x82, 0xCA, 0x05, 0x6F, 0xCC, 0xA8, 0xCC, 0x84,
+ 0xCA, 0x05, 0x72, 0xCC, 0xA3, 0xCC, 0x84, 0xCA,
+ 0x05, 0x73, 0xCC, 0x81, 0xCC, 0x87, 0xCA, 0x05,
+ 0x73, 0xCC, 0x8C, 0xCC, 0x87, 0xCA, 0x05, 0x73,
+ 0xCC, 0xA3, 0xCC, 0x87, 0xCA, 0x05, 0x75, 0xCC,
+ // Bytes 3b80 - 3bbf
+ 0x83, 0xCC, 0x81, 0xCA, 0x05, 0x75, 0xCC, 0x84,
+ 0xCC, 0x88, 0xCA, 0x05, 0x75, 0xCC, 0x88, 0xCC,
+ 0x80, 0xCA, 0x05, 0x75, 0xCC, 0x88, 0xCC, 0x81,
+ 0xCA, 0x05, 0x75, 0xCC, 0x88, 0xCC, 0x84, 0xCA,
+ 0x05, 0x75, 0xCC, 0x88, 0xCC, 0x8C, 0xCA, 0x05,
+ 0x75, 0xCC, 0x9B, 0xCC, 0x80, 0xCA, 0x05, 0x75,
+ 0xCC, 0x9B, 0xCC, 0x81, 0xCA, 0x05, 0x75, 0xCC,
+ 0x9B, 0xCC, 0x83, 0xCA, 0x05, 0x75, 0xCC, 0x9B,
+ // Bytes 3bc0 - 3bff
+ 0xCC, 0x89, 0xCA, 0x05, 0x75, 0xCC, 0x9B, 0xCC,
+ 0xA3, 0xB6, 0x05, 0xE1, 0xBE, 0xBF, 0xCC, 0x80,
+ 0xCA, 0x05, 0xE1, 0xBE, 0xBF, 0xCC, 0x81, 0xCA,
+ 0x05, 0xE1, 0xBE, 0xBF, 0xCD, 0x82, 0xCA, 0x05,
+ 0xE1, 0xBF, 0xBE, 0xCC, 0x80, 0xCA, 0x05, 0xE1,
+ 0xBF, 0xBE, 0xCC, 0x81, 0xCA, 0x05, 0xE1, 0xBF,
+ 0xBE, 0xCD, 0x82, 0xCA, 0x05, 0xE2, 0x86, 0x90,
+ 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x86, 0x92, 0xCC,
+ // Bytes 3c00 - 3c3f
+ 0xB8, 0x05, 0x05, 0xE2, 0x86, 0x94, 0xCC, 0xB8,
+ 0x05, 0x05, 0xE2, 0x87, 0x90, 0xCC, 0xB8, 0x05,
+ 0x05, 0xE2, 0x87, 0x92, 0xCC, 0xB8, 0x05, 0x05,
+ 0xE2, 0x87, 0x94, 0xCC, 0xB8, 0x05, 0x05, 0xE2,
+ 0x88, 0x83, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x88,
+ 0x88, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x88, 0x8B,
+ 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x88, 0xA3, 0xCC,
+ 0xB8, 0x05, 0x05, 0xE2, 0x88, 0xA5, 0xCC, 0xB8,
+ // Bytes 3c40 - 3c7f
+ 0x05, 0x05, 0xE2, 0x88, 0xBC, 0xCC, 0xB8, 0x05,
+ 0x05, 0xE2, 0x89, 0x83, 0xCC, 0xB8, 0x05, 0x05,
+ 0xE2, 0x89, 0x85, 0xCC, 0xB8, 0x05, 0x05, 0xE2,
+ 0x89, 0x88, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89,
+ 0x8D, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xA1,
+ 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xA4, 0xCC,
+ 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xA5, 0xCC, 0xB8,
+ 0x05, 0x05, 0xE2, 0x89, 0xB2, 0xCC, 0xB8, 0x05,
+ // Bytes 3c80 - 3cbf
+ 0x05, 0xE2, 0x89, 0xB3, 0xCC, 0xB8, 0x05, 0x05,
+ 0xE2, 0x89, 0xB6, 0xCC, 0xB8, 0x05, 0x05, 0xE2,
+ 0x89, 0xB7, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89,
+ 0xBA, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xBB,
+ 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xBC, 0xCC,
+ 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xBD, 0xCC, 0xB8,
+ 0x05, 0x05, 0xE2, 0x8A, 0x82, 0xCC, 0xB8, 0x05,
+ 0x05, 0xE2, 0x8A, 0x83, 0xCC, 0xB8, 0x05, 0x05,
+ // Bytes 3cc0 - 3cff
+ 0xE2, 0x8A, 0x86, 0xCC, 0xB8, 0x05, 0x05, 0xE2,
+ 0x8A, 0x87, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A,
+ 0x91, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0x92,
+ 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0xA2, 0xCC,
+ 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0xA8, 0xCC, 0xB8,
+ 0x05, 0x05, 0xE2, 0x8A, 0xA9, 0xCC, 0xB8, 0x05,
+ 0x05, 0xE2, 0x8A, 0xAB, 0xCC, 0xB8, 0x05, 0x05,
+ 0xE2, 0x8A, 0xB2, 0xCC, 0xB8, 0x05, 0x05, 0xE2,
+ // Bytes 3d00 - 3d3f
+ 0x8A, 0xB3, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A,
+ 0xB4, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0xB5,
+ 0xCC, 0xB8, 0x05, 0x06, 0xCE, 0x91, 0xCC, 0x93,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0x91, 0xCC, 0x94,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0x95, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0x95, 0xCC, 0x93,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0x95, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0x95, 0xCC, 0x94,
+ // Bytes 3d40 - 3d7f
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0x97, 0xCC, 0x93,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0x97, 0xCC, 0x94,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0x99, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0x99, 0xCC, 0x93,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0x99, 0xCC, 0x93,
+ 0xCD, 0x82, 0xCA, 0x06, 0xCE, 0x99, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0x99, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0x99, 0xCC, 0x94,
+ // Bytes 3d80 - 3dbf
+ 0xCD, 0x82, 0xCA, 0x06, 0xCE, 0x9F, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0x9F, 0xCC, 0x93,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0x9F, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0x9F, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xA5, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xA5, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xA5, 0xCC, 0x94,
+ 0xCD, 0x82, 0xCA, 0x06, 0xCE, 0xA9, 0xCC, 0x93,
+ // Bytes 3dc0 - 3dff
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xA9, 0xCC, 0x94,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB1, 0xCC, 0x80,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB1, 0xCC, 0x81,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB1, 0xCC, 0x93,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB1, 0xCC, 0x94,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB1, 0xCD, 0x82,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB5, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xB5, 0xCC, 0x93,
+ // Bytes 3e00 - 3e3f
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xB5, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xB5, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xB7, 0xCC, 0x80,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB7, 0xCC, 0x81,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB7, 0xCC, 0x93,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB7, 0xCC, 0x94,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB7, 0xCD, 0x82,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCE, 0xB9, 0xCC, 0x88,
+ // Bytes 3e40 - 3e7f
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x88,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x88,
+ 0xCD, 0x82, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x93,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x93,
+ 0xCD, 0x82, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xB9, 0xCC, 0x94,
+ // Bytes 3e80 - 3ebf
+ 0xCD, 0x82, 0xCA, 0x06, 0xCE, 0xBF, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xBF, 0xCC, 0x93,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCE, 0xBF, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCE, 0xBF, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x88,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x88,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x88,
+ 0xCD, 0x82, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x93,
+ // Bytes 3ec0 - 3eff
+ 0xCC, 0x80, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x93,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x93,
+ 0xCD, 0x82, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x94,
+ 0xCC, 0x80, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCA, 0x06, 0xCF, 0x85, 0xCC, 0x94,
+ 0xCD, 0x82, 0xCA, 0x06, 0xCF, 0x89, 0xCC, 0x80,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCF, 0x89, 0xCC, 0x81,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCF, 0x89, 0xCC, 0x93,
+ // Bytes 3f00 - 3f3f
+ 0xCD, 0x85, 0xDA, 0x06, 0xCF, 0x89, 0xCC, 0x94,
+ 0xCD, 0x85, 0xDA, 0x06, 0xCF, 0x89, 0xCD, 0x82,
+ 0xCD, 0x85, 0xDA, 0x06, 0xE0, 0xA4, 0xA8, 0xE0,
+ 0xA4, 0xBC, 0x09, 0x06, 0xE0, 0xA4, 0xB0, 0xE0,
+ 0xA4, 0xBC, 0x09, 0x06, 0xE0, 0xA4, 0xB3, 0xE0,
+ 0xA4, 0xBC, 0x09, 0x06, 0xE0, 0xB1, 0x86, 0xE0,
+ 0xB1, 0x96, 0x85, 0x06, 0xE0, 0xB7, 0x99, 0xE0,
+ 0xB7, 0x8A, 0x11, 0x06, 0xE3, 0x81, 0x86, 0xE3,
+ // Bytes 3f40 - 3f7f
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x8B, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x8D, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x8F, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x91, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x93, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x95, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x97, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x99, 0xE3,
+ // Bytes 3f80 - 3fbf
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x9B, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x9D, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0x9F, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xA1, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xA4, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xA6, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xA8, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xAF, 0xE3,
+ // Bytes 3fc0 - 3fff
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xAF, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x81, 0xB2, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xB2, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x81, 0xB5, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xB5, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x81, 0xB8, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xB8, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x81, 0xBB, 0xE3,
+ // Bytes 4000 - 403f
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x81, 0xBB, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x82, 0x9D, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xA6, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xAB, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xAD, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xAF, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xB1, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xB3, 0xE3,
+ // Bytes 4040 - 407f
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xB5, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xB7, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xB9, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xBB, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xBD, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x82, 0xBF, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x81, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x84, 0xE3,
+ // Bytes 4080 - 40bf
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x86, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x88, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x8F, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x8F, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x83, 0x92, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x92, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x83, 0x95, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x95, 0xE3,
+ // Bytes 40c0 - 40ff
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x83, 0x98, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x98, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x83, 0x9B, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0x9B, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x06, 0xE3, 0x83, 0xAF, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0xB0, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0xB1, 0xE3,
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0xB2, 0xE3,
+ // Bytes 4100 - 413f
+ 0x82, 0x99, 0x0D, 0x06, 0xE3, 0x83, 0xBD, 0xE3,
+ 0x82, 0x99, 0x0D, 0x08, 0xCE, 0x91, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0x91,
+ 0xCC, 0x93, 0xCC, 0x81, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCE, 0x91, 0xCC, 0x93, 0xCD, 0x82, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCE, 0x91, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0x91, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0x91,
+ // Bytes 4140 - 417f
+ 0xCC, 0x94, 0xCD, 0x82, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCE, 0x97, 0xCC, 0x93, 0xCC, 0x80, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCE, 0x97, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0x97, 0xCC, 0x93,
+ 0xCD, 0x82, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0x97,
+ 0xCC, 0x94, 0xCC, 0x80, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCE, 0x97, 0xCC, 0x94, 0xCC, 0x81, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCE, 0x97, 0xCC, 0x94, 0xCD, 0x82,
+ // Bytes 4180 - 41bf
+ 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xA9, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xA9,
+ 0xCC, 0x93, 0xCC, 0x81, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCE, 0xA9, 0xCC, 0x93, 0xCD, 0x82, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCE, 0xA9, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xA9, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xA9,
+ 0xCC, 0x94, 0xCD, 0x82, 0xCD, 0x85, 0xDB, 0x08,
+ // Bytes 41c0 - 41ff
+ 0xCE, 0xB1, 0xCC, 0x93, 0xCC, 0x80, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCE, 0xB1, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xB1, 0xCC, 0x93,
+ 0xCD, 0x82, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xB1,
+ 0xCC, 0x94, 0xCC, 0x80, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCE, 0xB1, 0xCC, 0x94, 0xCC, 0x81, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCE, 0xB1, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xB7, 0xCC, 0x93,
+ // Bytes 4200 - 423f
+ 0xCC, 0x80, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xB7,
+ 0xCC, 0x93, 0xCC, 0x81, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCE, 0xB7, 0xCC, 0x93, 0xCD, 0x82, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCE, 0xB7, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xB7, 0xCC, 0x94,
+ 0xCC, 0x81, 0xCD, 0x85, 0xDB, 0x08, 0xCE, 0xB7,
+ 0xCC, 0x94, 0xCD, 0x82, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCF, 0x89, 0xCC, 0x93, 0xCC, 0x80, 0xCD, 0x85,
+ // Bytes 4240 - 427f
+ 0xDB, 0x08, 0xCF, 0x89, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCD, 0x85, 0xDB, 0x08, 0xCF, 0x89, 0xCC, 0x93,
+ 0xCD, 0x82, 0xCD, 0x85, 0xDB, 0x08, 0xCF, 0x89,
+ 0xCC, 0x94, 0xCC, 0x80, 0xCD, 0x85, 0xDB, 0x08,
+ 0xCF, 0x89, 0xCC, 0x94, 0xCC, 0x81, 0xCD, 0x85,
+ 0xDB, 0x08, 0xCF, 0x89, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCD, 0x85, 0xDB, 0x08, 0xF0, 0x91, 0x82, 0x99,
+ 0xF0, 0x91, 0x82, 0xBA, 0x09, 0x08, 0xF0, 0x91,
+ // Bytes 4280 - 42bf
+ 0x82, 0x9B, 0xF0, 0x91, 0x82, 0xBA, 0x09, 0x08,
+ 0xF0, 0x91, 0x82, 0xA5, 0xF0, 0x91, 0x82, 0xBA,
+ 0x09, 0x42, 0xC2, 0xB4, 0x01, 0x43, 0x20, 0xCC,
+ 0x81, 0xC9, 0x43, 0x20, 0xCC, 0x83, 0xC9, 0x43,
+ 0x20, 0xCC, 0x84, 0xC9, 0x43, 0x20, 0xCC, 0x85,
+ 0xC9, 0x43, 0x20, 0xCC, 0x86, 0xC9, 0x43, 0x20,
+ 0xCC, 0x87, 0xC9, 0x43, 0x20, 0xCC, 0x88, 0xC9,
+ 0x43, 0x20, 0xCC, 0x8A, 0xC9, 0x43, 0x20, 0xCC,
+ // Bytes 42c0 - 42ff
+ 0x8B, 0xC9, 0x43, 0x20, 0xCC, 0x93, 0xC9, 0x43,
+ 0x20, 0xCC, 0x94, 0xC9, 0x43, 0x20, 0xCC, 0xA7,
+ 0xA5, 0x43, 0x20, 0xCC, 0xA8, 0xA5, 0x43, 0x20,
+ 0xCC, 0xB3, 0xB5, 0x43, 0x20, 0xCD, 0x82, 0xC9,
+ 0x43, 0x20, 0xCD, 0x85, 0xD9, 0x43, 0x20, 0xD9,
+ 0x8B, 0x59, 0x43, 0x20, 0xD9, 0x8C, 0x5D, 0x43,
+ 0x20, 0xD9, 0x8D, 0x61, 0x43, 0x20, 0xD9, 0x8E,
+ 0x65, 0x43, 0x20, 0xD9, 0x8F, 0x69, 0x43, 0x20,
+ // Bytes 4300 - 433f
+ 0xD9, 0x90, 0x6D, 0x43, 0x20, 0xD9, 0x91, 0x71,
+ 0x43, 0x20, 0xD9, 0x92, 0x75, 0x43, 0x41, 0xCC,
+ 0x8A, 0xC9, 0x43, 0x73, 0xCC, 0x87, 0xC9, 0x44,
+ 0x20, 0xE3, 0x82, 0x99, 0x0D, 0x44, 0x20, 0xE3,
+ 0x82, 0x9A, 0x0D, 0x44, 0xC2, 0xA8, 0xCC, 0x81,
+ 0xCA, 0x44, 0xCE, 0x91, 0xCC, 0x81, 0xC9, 0x44,
+ 0xCE, 0x95, 0xCC, 0x81, 0xC9, 0x44, 0xCE, 0x97,
+ 0xCC, 0x81, 0xC9, 0x44, 0xCE, 0x99, 0xCC, 0x81,
+ // Bytes 4340 - 437f
+ 0xC9, 0x44, 0xCE, 0x9F, 0xCC, 0x81, 0xC9, 0x44,
+ 0xCE, 0xA5, 0xCC, 0x81, 0xC9, 0x44, 0xCE, 0xA5,
+ 0xCC, 0x88, 0xC9, 0x44, 0xCE, 0xA9, 0xCC, 0x81,
+ 0xC9, 0x44, 0xCE, 0xB1, 0xCC, 0x81, 0xC9, 0x44,
+ 0xCE, 0xB5, 0xCC, 0x81, 0xC9, 0x44, 0xCE, 0xB7,
+ 0xCC, 0x81, 0xC9, 0x44, 0xCE, 0xB9, 0xCC, 0x81,
+ 0xC9, 0x44, 0xCE, 0xBF, 0xCC, 0x81, 0xC9, 0x44,
+ 0xCF, 0x85, 0xCC, 0x81, 0xC9, 0x44, 0xCF, 0x89,
+ // Bytes 4380 - 43bf
+ 0xCC, 0x81, 0xC9, 0x44, 0xD7, 0x90, 0xD6, 0xB7,
+ 0x31, 0x44, 0xD7, 0x90, 0xD6, 0xB8, 0x35, 0x44,
+ 0xD7, 0x90, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x91,
+ 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x91, 0xD6, 0xBF,
+ 0x49, 0x44, 0xD7, 0x92, 0xD6, 0xBC, 0x41, 0x44,
+ 0xD7, 0x93, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x94,
+ 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x95, 0xD6, 0xB9,
+ 0x39, 0x44, 0xD7, 0x95, 0xD6, 0xBC, 0x41, 0x44,
+ // Bytes 43c0 - 43ff
+ 0xD7, 0x96, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x98,
+ 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x99, 0xD6, 0xB4,
+ 0x25, 0x44, 0xD7, 0x99, 0xD6, 0xBC, 0x41, 0x44,
+ 0xD7, 0x9A, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x9B,
+ 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0x9B, 0xD6, 0xBF,
+ 0x49, 0x44, 0xD7, 0x9C, 0xD6, 0xBC, 0x41, 0x44,
+ 0xD7, 0x9E, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0xA0,
+ 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0xA1, 0xD6, 0xBC,
+ // Bytes 4400 - 443f
+ 0x41, 0x44, 0xD7, 0xA3, 0xD6, 0xBC, 0x41, 0x44,
+ 0xD7, 0xA4, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0xA4,
+ 0xD6, 0xBF, 0x49, 0x44, 0xD7, 0xA6, 0xD6, 0xBC,
+ 0x41, 0x44, 0xD7, 0xA7, 0xD6, 0xBC, 0x41, 0x44,
+ 0xD7, 0xA8, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0xA9,
+ 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0xA9, 0xD7, 0x81,
+ 0x4D, 0x44, 0xD7, 0xA9, 0xD7, 0x82, 0x51, 0x44,
+ 0xD7, 0xAA, 0xD6, 0xBC, 0x41, 0x44, 0xD7, 0xB2,
+ // Bytes 4440 - 447f
+ 0xD6, 0xB7, 0x31, 0x44, 0xD8, 0xA7, 0xD9, 0x8B,
+ 0x59, 0x44, 0xD8, 0xA7, 0xD9, 0x93, 0xC9, 0x44,
+ 0xD8, 0xA7, 0xD9, 0x94, 0xC9, 0x44, 0xD8, 0xA7,
+ 0xD9, 0x95, 0xB5, 0x44, 0xD8, 0xB0, 0xD9, 0xB0,
+ 0x79, 0x44, 0xD8, 0xB1, 0xD9, 0xB0, 0x79, 0x44,
+ 0xD9, 0x80, 0xD9, 0x8B, 0x59, 0x44, 0xD9, 0x80,
+ 0xD9, 0x8E, 0x65, 0x44, 0xD9, 0x80, 0xD9, 0x8F,
+ 0x69, 0x44, 0xD9, 0x80, 0xD9, 0x90, 0x6D, 0x44,
+ // Bytes 4480 - 44bf
+ 0xD9, 0x80, 0xD9, 0x91, 0x71, 0x44, 0xD9, 0x80,
+ 0xD9, 0x92, 0x75, 0x44, 0xD9, 0x87, 0xD9, 0xB0,
+ 0x79, 0x44, 0xD9, 0x88, 0xD9, 0x94, 0xC9, 0x44,
+ 0xD9, 0x89, 0xD9, 0xB0, 0x79, 0x44, 0xD9, 0x8A,
+ 0xD9, 0x94, 0xC9, 0x44, 0xDB, 0x92, 0xD9, 0x94,
+ 0xC9, 0x44, 0xDB, 0x95, 0xD9, 0x94, 0xC9, 0x45,
+ 0x20, 0xCC, 0x88, 0xCC, 0x80, 0xCA, 0x45, 0x20,
+ 0xCC, 0x88, 0xCC, 0x81, 0xCA, 0x45, 0x20, 0xCC,
+ // Bytes 44c0 - 44ff
+ 0x88, 0xCD, 0x82, 0xCA, 0x45, 0x20, 0xCC, 0x93,
+ 0xCC, 0x80, 0xCA, 0x45, 0x20, 0xCC, 0x93, 0xCC,
+ 0x81, 0xCA, 0x45, 0x20, 0xCC, 0x93, 0xCD, 0x82,
+ 0xCA, 0x45, 0x20, 0xCC, 0x94, 0xCC, 0x80, 0xCA,
+ 0x45, 0x20, 0xCC, 0x94, 0xCC, 0x81, 0xCA, 0x45,
+ 0x20, 0xCC, 0x94, 0xCD, 0x82, 0xCA, 0x45, 0x20,
+ 0xD9, 0x8C, 0xD9, 0x91, 0x72, 0x45, 0x20, 0xD9,
+ 0x8D, 0xD9, 0x91, 0x72, 0x45, 0x20, 0xD9, 0x8E,
+ // Bytes 4500 - 453f
+ 0xD9, 0x91, 0x72, 0x45, 0x20, 0xD9, 0x8F, 0xD9,
+ 0x91, 0x72, 0x45, 0x20, 0xD9, 0x90, 0xD9, 0x91,
+ 0x72, 0x45, 0x20, 0xD9, 0x91, 0xD9, 0xB0, 0x7A,
+ 0x45, 0xE2, 0xAB, 0x9D, 0xCC, 0xB8, 0x05, 0x46,
+ 0xCE, 0xB9, 0xCC, 0x88, 0xCC, 0x81, 0xCA, 0x46,
+ 0xCF, 0x85, 0xCC, 0x88, 0xCC, 0x81, 0xCA, 0x46,
+ 0xD7, 0xA9, 0xD6, 0xBC, 0xD7, 0x81, 0x4E, 0x46,
+ 0xD7, 0xA9, 0xD6, 0xBC, 0xD7, 0x82, 0x52, 0x46,
+ // Bytes 4540 - 457f
+ 0xD9, 0x80, 0xD9, 0x8E, 0xD9, 0x91, 0x72, 0x46,
+ 0xD9, 0x80, 0xD9, 0x8F, 0xD9, 0x91, 0x72, 0x46,
+ 0xD9, 0x80, 0xD9, 0x90, 0xD9, 0x91, 0x72, 0x46,
+ 0xE0, 0xA4, 0x95, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA4, 0x96, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA4, 0x97, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA4, 0x9C, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA4, 0xA1, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ // Bytes 4580 - 45bf
+ 0xE0, 0xA4, 0xA2, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA4, 0xAB, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA4, 0xAF, 0xE0, 0xA4, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA6, 0xA1, 0xE0, 0xA6, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA6, 0xA2, 0xE0, 0xA6, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA6, 0xAF, 0xE0, 0xA6, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA8, 0x96, 0xE0, 0xA8, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA8, 0x97, 0xE0, 0xA8, 0xBC, 0x09, 0x46,
+ // Bytes 45c0 - 45ff
+ 0xE0, 0xA8, 0x9C, 0xE0, 0xA8, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA8, 0xAB, 0xE0, 0xA8, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA8, 0xB2, 0xE0, 0xA8, 0xBC, 0x09, 0x46,
+ 0xE0, 0xA8, 0xB8, 0xE0, 0xA8, 0xBC, 0x09, 0x46,
+ 0xE0, 0xAC, 0xA1, 0xE0, 0xAC, 0xBC, 0x09, 0x46,
+ 0xE0, 0xAC, 0xA2, 0xE0, 0xAC, 0xBC, 0x09, 0x46,
+ 0xE0, 0xBE, 0xB2, 0xE0, 0xBE, 0x80, 0x9D, 0x46,
+ 0xE0, 0xBE, 0xB3, 0xE0, 0xBE, 0x80, 0x9D, 0x46,
+ // Bytes 4600 - 463f
+ 0xE3, 0x83, 0x86, 0xE3, 0x82, 0x99, 0x0D, 0x48,
+ 0xF0, 0x9D, 0x85, 0x97, 0xF0, 0x9D, 0x85, 0xA5,
+ 0xAD, 0x48, 0xF0, 0x9D, 0x85, 0x98, 0xF0, 0x9D,
+ 0x85, 0xA5, 0xAD, 0x48, 0xF0, 0x9D, 0x86, 0xB9,
+ 0xF0, 0x9D, 0x85, 0xA5, 0xAD, 0x48, 0xF0, 0x9D,
+ 0x86, 0xBA, 0xF0, 0x9D, 0x85, 0xA5, 0xAD, 0x49,
+ 0xE0, 0xBE, 0xB2, 0xE0, 0xBD, 0xB1, 0xE0, 0xBE,
+ 0x80, 0x9E, 0x49, 0xE0, 0xBE, 0xB3, 0xE0, 0xBD,
+ // Bytes 4640 - 467f
+ 0xB1, 0xE0, 0xBE, 0x80, 0x9E, 0x4C, 0xF0, 0x9D,
+ 0x85, 0x98, 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D,
+ 0x85, 0xAE, 0xAE, 0x4C, 0xF0, 0x9D, 0x85, 0x98,
+ 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xAF,
+ 0xAE, 0x4C, 0xF0, 0x9D, 0x85, 0x98, 0xF0, 0x9D,
+ 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xB0, 0xAE, 0x4C,
+ 0xF0, 0x9D, 0x85, 0x98, 0xF0, 0x9D, 0x85, 0xA5,
+ 0xF0, 0x9D, 0x85, 0xB1, 0xAE, 0x4C, 0xF0, 0x9D,
+ // Bytes 4680 - 46bf
+ 0x85, 0x98, 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D,
+ 0x85, 0xB2, 0xAE, 0x4C, 0xF0, 0x9D, 0x86, 0xB9,
+ 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xAE,
+ 0xAE, 0x4C, 0xF0, 0x9D, 0x86, 0xB9, 0xF0, 0x9D,
+ 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xAF, 0xAE, 0x4C,
+ 0xF0, 0x9D, 0x86, 0xBA, 0xF0, 0x9D, 0x85, 0xA5,
+ 0xF0, 0x9D, 0x85, 0xAE, 0xAE, 0x4C, 0xF0, 0x9D,
+ 0x86, 0xBA, 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D,
+ // Bytes 46c0 - 46ff
+ 0x85, 0xAF, 0xAE, 0x83, 0x41, 0xCC, 0x82, 0xC9,
+ 0x83, 0x41, 0xCC, 0x86, 0xC9, 0x83, 0x41, 0xCC,
+ 0x87, 0xC9, 0x83, 0x41, 0xCC, 0x88, 0xC9, 0x83,
+ 0x41, 0xCC, 0x8A, 0xC9, 0x83, 0x41, 0xCC, 0xA3,
+ 0xB5, 0x83, 0x43, 0xCC, 0xA7, 0xA5, 0x83, 0x45,
+ 0xCC, 0x82, 0xC9, 0x83, 0x45, 0xCC, 0x84, 0xC9,
+ 0x83, 0x45, 0xCC, 0xA3, 0xB5, 0x83, 0x45, 0xCC,
+ 0xA7, 0xA5, 0x83, 0x49, 0xCC, 0x88, 0xC9, 0x83,
+ // Bytes 4700 - 473f
+ 0x4C, 0xCC, 0xA3, 0xB5, 0x83, 0x4F, 0xCC, 0x82,
+ 0xC9, 0x83, 0x4F, 0xCC, 0x83, 0xC9, 0x83, 0x4F,
+ 0xCC, 0x84, 0xC9, 0x83, 0x4F, 0xCC, 0x87, 0xC9,
+ 0x83, 0x4F, 0xCC, 0x88, 0xC9, 0x83, 0x4F, 0xCC,
+ 0x9B, 0xAD, 0x83, 0x4F, 0xCC, 0xA3, 0xB5, 0x83,
+ 0x4F, 0xCC, 0xA8, 0xA5, 0x83, 0x52, 0xCC, 0xA3,
+ 0xB5, 0x83, 0x53, 0xCC, 0x81, 0xC9, 0x83, 0x53,
+ 0xCC, 0x8C, 0xC9, 0x83, 0x53, 0xCC, 0xA3, 0xB5,
+ // Bytes 4740 - 477f
+ 0x83, 0x55, 0xCC, 0x83, 0xC9, 0x83, 0x55, 0xCC,
+ 0x84, 0xC9, 0x83, 0x55, 0xCC, 0x88, 0xC9, 0x83,
+ 0x55, 0xCC, 0x9B, 0xAD, 0x83, 0x61, 0xCC, 0x82,
+ 0xC9, 0x83, 0x61, 0xCC, 0x86, 0xC9, 0x83, 0x61,
+ 0xCC, 0x87, 0xC9, 0x83, 0x61, 0xCC, 0x88, 0xC9,
+ 0x83, 0x61, 0xCC, 0x8A, 0xC9, 0x83, 0x61, 0xCC,
+ 0xA3, 0xB5, 0x83, 0x63, 0xCC, 0xA7, 0xA5, 0x83,
+ 0x65, 0xCC, 0x82, 0xC9, 0x83, 0x65, 0xCC, 0x84,
+ // Bytes 4780 - 47bf
+ 0xC9, 0x83, 0x65, 0xCC, 0xA3, 0xB5, 0x83, 0x65,
+ 0xCC, 0xA7, 0xA5, 0x83, 0x69, 0xCC, 0x88, 0xC9,
+ 0x83, 0x6C, 0xCC, 0xA3, 0xB5, 0x83, 0x6F, 0xCC,
+ 0x82, 0xC9, 0x83, 0x6F, 0xCC, 0x83, 0xC9, 0x83,
+ 0x6F, 0xCC, 0x84, 0xC9, 0x83, 0x6F, 0xCC, 0x87,
+ 0xC9, 0x83, 0x6F, 0xCC, 0x88, 0xC9, 0x83, 0x6F,
+ 0xCC, 0x9B, 0xAD, 0x83, 0x6F, 0xCC, 0xA3, 0xB5,
+ 0x83, 0x6F, 0xCC, 0xA8, 0xA5, 0x83, 0x72, 0xCC,
+ // Bytes 47c0 - 47ff
+ 0xA3, 0xB5, 0x83, 0x73, 0xCC, 0x81, 0xC9, 0x83,
+ 0x73, 0xCC, 0x8C, 0xC9, 0x83, 0x73, 0xCC, 0xA3,
+ 0xB5, 0x83, 0x75, 0xCC, 0x83, 0xC9, 0x83, 0x75,
+ 0xCC, 0x84, 0xC9, 0x83, 0x75, 0xCC, 0x88, 0xC9,
+ 0x83, 0x75, 0xCC, 0x9B, 0xAD, 0x84, 0xCE, 0x91,
+ 0xCC, 0x93, 0xC9, 0x84, 0xCE, 0x91, 0xCC, 0x94,
+ 0xC9, 0x84, 0xCE, 0x95, 0xCC, 0x93, 0xC9, 0x84,
+ 0xCE, 0x95, 0xCC, 0x94, 0xC9, 0x84, 0xCE, 0x97,
+ // Bytes 4800 - 483f
+ 0xCC, 0x93, 0xC9, 0x84, 0xCE, 0x97, 0xCC, 0x94,
+ 0xC9, 0x84, 0xCE, 0x99, 0xCC, 0x93, 0xC9, 0x84,
+ 0xCE, 0x99, 0xCC, 0x94, 0xC9, 0x84, 0xCE, 0x9F,
+ 0xCC, 0x93, 0xC9, 0x84, 0xCE, 0x9F, 0xCC, 0x94,
+ 0xC9, 0x84, 0xCE, 0xA5, 0xCC, 0x94, 0xC9, 0x84,
+ 0xCE, 0xA9, 0xCC, 0x93, 0xC9, 0x84, 0xCE, 0xA9,
+ 0xCC, 0x94, 0xC9, 0x84, 0xCE, 0xB1, 0xCC, 0x80,
+ 0xC9, 0x84, 0xCE, 0xB1, 0xCC, 0x81, 0xC9, 0x84,
+ // Bytes 4840 - 487f
+ 0xCE, 0xB1, 0xCC, 0x93, 0xC9, 0x84, 0xCE, 0xB1,
+ 0xCC, 0x94, 0xC9, 0x84, 0xCE, 0xB1, 0xCD, 0x82,
+ 0xC9, 0x84, 0xCE, 0xB5, 0xCC, 0x93, 0xC9, 0x84,
+ 0xCE, 0xB5, 0xCC, 0x94, 0xC9, 0x84, 0xCE, 0xB7,
+ 0xCC, 0x80, 0xC9, 0x84, 0xCE, 0xB7, 0xCC, 0x81,
+ 0xC9, 0x84, 0xCE, 0xB7, 0xCC, 0x93, 0xC9, 0x84,
+ 0xCE, 0xB7, 0xCC, 0x94, 0xC9, 0x84, 0xCE, 0xB7,
+ 0xCD, 0x82, 0xC9, 0x84, 0xCE, 0xB9, 0xCC, 0x88,
+ // Bytes 4880 - 48bf
+ 0xC9, 0x84, 0xCE, 0xB9, 0xCC, 0x93, 0xC9, 0x84,
+ 0xCE, 0xB9, 0xCC, 0x94, 0xC9, 0x84, 0xCE, 0xBF,
+ 0xCC, 0x93, 0xC9, 0x84, 0xCE, 0xBF, 0xCC, 0x94,
+ 0xC9, 0x84, 0xCF, 0x85, 0xCC, 0x88, 0xC9, 0x84,
+ 0xCF, 0x85, 0xCC, 0x93, 0xC9, 0x84, 0xCF, 0x85,
+ 0xCC, 0x94, 0xC9, 0x84, 0xCF, 0x89, 0xCC, 0x80,
+ 0xC9, 0x84, 0xCF, 0x89, 0xCC, 0x81, 0xC9, 0x84,
+ 0xCF, 0x89, 0xCC, 0x93, 0xC9, 0x84, 0xCF, 0x89,
+ // Bytes 48c0 - 48ff
+ 0xCC, 0x94, 0xC9, 0x84, 0xCF, 0x89, 0xCD, 0x82,
+ 0xC9, 0x86, 0xCE, 0x91, 0xCC, 0x93, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0x91, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0x91, 0xCC, 0x93, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0x91, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0x91, 0xCC, 0x94, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0x91, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0x97, 0xCC, 0x93, 0xCC, 0x80,
+ // Bytes 4900 - 493f
+ 0xCA, 0x86, 0xCE, 0x97, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0x97, 0xCC, 0x93, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0x97, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0x97, 0xCC, 0x94, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0x97, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0xA9, 0xCC, 0x93, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0xA9, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0xA9, 0xCC, 0x93, 0xCD, 0x82,
+ // Bytes 4940 - 497f
+ 0xCA, 0x86, 0xCE, 0xA9, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0xA9, 0xCC, 0x94, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0xA9, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0xB1, 0xCC, 0x93, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0xB1, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0xB1, 0xCC, 0x93, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0xB1, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0xB1, 0xCC, 0x94, 0xCC, 0x81,
+ // Bytes 4980 - 49bf
+ 0xCA, 0x86, 0xCE, 0xB1, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0xB7, 0xCC, 0x93, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0xB7, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0xB7, 0xCC, 0x93, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCE, 0xB7, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCE, 0xB7, 0xCC, 0x94, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCE, 0xB7, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCF, 0x89, 0xCC, 0x93, 0xCC, 0x80,
+ // Bytes 49c0 - 49ff
+ 0xCA, 0x86, 0xCF, 0x89, 0xCC, 0x93, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCF, 0x89, 0xCC, 0x93, 0xCD, 0x82,
+ 0xCA, 0x86, 0xCF, 0x89, 0xCC, 0x94, 0xCC, 0x80,
+ 0xCA, 0x86, 0xCF, 0x89, 0xCC, 0x94, 0xCC, 0x81,
+ 0xCA, 0x86, 0xCF, 0x89, 0xCC, 0x94, 0xCD, 0x82,
+ 0xCA, 0x42, 0xCC, 0x80, 0xC9, 0x32, 0x42, 0xCC,
+ 0x81, 0xC9, 0x32, 0x42, 0xCC, 0x93, 0xC9, 0x32,
+ 0x44, 0xCC, 0x88, 0xCC, 0x81, 0xCA, 0x32, 0x43,
+ // Bytes 4a00 - 4a3f
+ 0xE3, 0x82, 0x99, 0x0D, 0x03, 0x43, 0xE3, 0x82,
+ 0x9A, 0x0D, 0x03, 0x46, 0xE0, 0xBD, 0xB1, 0xE0,
+ 0xBD, 0xB2, 0x9E, 0x26, 0x46, 0xE0, 0xBD, 0xB1,
+ 0xE0, 0xBD, 0xB4, 0xA2, 0x26, 0x46, 0xE0, 0xBD,
+ 0xB1, 0xE0, 0xBE, 0x80, 0x9E, 0x26, 0x00, 0x01,
+}
+
+// lookup returns the trie value for the first UTF-8 encoding in s and
+// the width in bytes of this encoding. The size will be 0 if s does not
+// hold enough bytes to complete the encoding. len(s) must be greater than 0.
+func (t *nfcTrie) lookup(s []byte) (v uint16, sz int) {
+ c0 := s[0]
+ switch {
+ case c0 < 0x80: // is ASCII
+ return nfcValues[c0], 1
+ case c0 < 0xC0:
+ return 0, 1 // Illegal UTF-8: not a starter, not ASCII.
+ case c0 < 0xE0: // 2-byte UTF-8
+ if len(s) < 2 {
+ return 0, 0
+ }
+ i := nfcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c1), 2
+ case c0 < 0xF0: // 3-byte UTF-8
+ if len(s) < 3 {
+ return 0, 0
+ }
+ i := nfcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c2), 3
+ case c0 < 0xF8: // 4-byte UTF-8
+ if len(s) < 4 {
+ return 0, 0
+ }
+ i := nfcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ o = uint32(i)<<6 + uint32(c2)
+ i = nfcIndex[o]
+ c3 := s[3]
+ if c3 < 0x80 || 0xC0 <= c3 {
+ return 0, 3 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c3), 4
+ }
+ // Illegal rune
+ return 0, 1
+}
+
+// lookupUnsafe returns the trie value for the first UTF-8 encoding in s.
+// s must start with a full and valid UTF-8 encoded rune.
+func (t *nfcTrie) lookupUnsafe(s []byte) uint16 {
+ c0 := s[0]
+ if c0 < 0x80 { // is ASCII
+ return nfcValues[c0]
+ }
+ i := nfcIndex[c0]
+ if c0 < 0xE0 { // 2-byte UTF-8
+ return t.lookupValue(uint32(i), s[1])
+ }
+ i = nfcIndex[uint32(i)<<6+uint32(s[1])]
+ if c0 < 0xF0 { // 3-byte UTF-8
+ return t.lookupValue(uint32(i), s[2])
+ }
+ i = nfcIndex[uint32(i)<<6+uint32(s[2])]
+ if c0 < 0xF8 { // 4-byte UTF-8
+ return t.lookupValue(uint32(i), s[3])
+ }
+ return 0
+}
+
+// lookupString returns the trie value for the first UTF-8 encoding in s and
+// the width in bytes of this encoding. The size will be 0 if s does not
+// hold enough bytes to complete the encoding. len(s) must be greater than 0.
+func (t *nfcTrie) lookupString(s string) (v uint16, sz int) {
+ c0 := s[0]
+ switch {
+ case c0 < 0x80: // is ASCII
+ return nfcValues[c0], 1
+ case c0 < 0xC0:
+ return 0, 1 // Illegal UTF-8: not a starter, not ASCII.
+ case c0 < 0xE0: // 2-byte UTF-8
+ if len(s) < 2 {
+ return 0, 0
+ }
+ i := nfcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c1), 2
+ case c0 < 0xF0: // 3-byte UTF-8
+ if len(s) < 3 {
+ return 0, 0
+ }
+ i := nfcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c2), 3
+ case c0 < 0xF8: // 4-byte UTF-8
+ if len(s) < 4 {
+ return 0, 0
+ }
+ i := nfcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ o = uint32(i)<<6 + uint32(c2)
+ i = nfcIndex[o]
+ c3 := s[3]
+ if c3 < 0x80 || 0xC0 <= c3 {
+ return 0, 3 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c3), 4
+ }
+ // Illegal rune
+ return 0, 1
+}
+
+// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s.
+// s must start with a full and valid UTF-8 encoded rune.
+func (t *nfcTrie) lookupStringUnsafe(s string) uint16 {
+ c0 := s[0]
+ if c0 < 0x80 { // is ASCII
+ return nfcValues[c0]
+ }
+ i := nfcIndex[c0]
+ if c0 < 0xE0 { // 2-byte UTF-8
+ return t.lookupValue(uint32(i), s[1])
+ }
+ i = nfcIndex[uint32(i)<<6+uint32(s[1])]
+ if c0 < 0xF0 { // 3-byte UTF-8
+ return t.lookupValue(uint32(i), s[2])
+ }
+ i = nfcIndex[uint32(i)<<6+uint32(s[2])]
+ if c0 < 0xF8 { // 4-byte UTF-8
+ return t.lookupValue(uint32(i), s[3])
+ }
+ return 0
+}
+
+// nfcTrie. Total size: 10176 bytes (9.94 KiB). Checksum: d7c74933ce923a3f.
+type nfcTrie struct{}
+
+func newNfcTrie(i int) *nfcTrie {
+ return &nfcTrie{}
+}
+
+// lookupValue determines the type of block n and looks up the value for b.
+func (t *nfcTrie) lookupValue(n uint32, b byte) uint16 {
+ switch {
+ case n < 44:
+ return uint16(nfcValues[n<<6+uint32(b)])
+ default:
+ n -= 44
+ return uint16(nfcSparse.lookup(n, b))
+ }
+}
+
+// nfcValues: 46 blocks, 2944 entries, 5888 bytes
+// The third block is the zero block.
+var nfcValues = [2944]uint16{
+ // Block 0x0, offset 0x0
+ 0x3c: 0xa000, 0x3d: 0xa000, 0x3e: 0xa000,
+ // Block 0x1, offset 0x40
+ 0x41: 0xa000, 0x42: 0xa000, 0x43: 0xa000, 0x44: 0xa000, 0x45: 0xa000,
+ 0x46: 0xa000, 0x47: 0xa000, 0x48: 0xa000, 0x49: 0xa000, 0x4a: 0xa000, 0x4b: 0xa000,
+ 0x4c: 0xa000, 0x4d: 0xa000, 0x4e: 0xa000, 0x4f: 0xa000, 0x50: 0xa000,
+ 0x52: 0xa000, 0x53: 0xa000, 0x54: 0xa000, 0x55: 0xa000, 0x56: 0xa000, 0x57: 0xa000,
+ 0x58: 0xa000, 0x59: 0xa000, 0x5a: 0xa000,
+ 0x61: 0xa000, 0x62: 0xa000, 0x63: 0xa000,
+ 0x64: 0xa000, 0x65: 0xa000, 0x66: 0xa000, 0x67: 0xa000, 0x68: 0xa000, 0x69: 0xa000,
+ 0x6a: 0xa000, 0x6b: 0xa000, 0x6c: 0xa000, 0x6d: 0xa000, 0x6e: 0xa000, 0x6f: 0xa000,
+ 0x70: 0xa000, 0x72: 0xa000, 0x73: 0xa000, 0x74: 0xa000, 0x75: 0xa000,
+ 0x76: 0xa000, 0x77: 0xa000, 0x78: 0xa000, 0x79: 0xa000, 0x7a: 0xa000,
+ // Block 0x2, offset 0x80
+ // Block 0x3, offset 0xc0
+ 0xc0: 0x2faa, 0xc1: 0x2faf, 0xc2: 0x46c3, 0xc3: 0x2fb4, 0xc4: 0x46d2, 0xc5: 0x46d7,
+ 0xc6: 0xa000, 0xc7: 0x46e1, 0xc8: 0x301d, 0xc9: 0x3022, 0xca: 0x46e6, 0xcb: 0x3036,
+ 0xcc: 0x30a9, 0xcd: 0x30ae, 0xce: 0x30b3, 0xcf: 0x46fa, 0xd1: 0x313f,
+ 0xd2: 0x3162, 0xd3: 0x3167, 0xd4: 0x4704, 0xd5: 0x4709, 0xd6: 0x4718,
+ 0xd8: 0xa000, 0xd9: 0x31ee, 0xda: 0x31f3, 0xdb: 0x31f8, 0xdc: 0x474a, 0xdd: 0x3270,
+ 0xe0: 0x32b6, 0xe1: 0x32bb, 0xe2: 0x4754, 0xe3: 0x32c0,
+ 0xe4: 0x4763, 0xe5: 0x4768, 0xe6: 0xa000, 0xe7: 0x4772, 0xe8: 0x3329, 0xe9: 0x332e,
+ 0xea: 0x4777, 0xeb: 0x3342, 0xec: 0x33ba, 0xed: 0x33bf, 0xee: 0x33c4, 0xef: 0x478b,
+ 0xf1: 0x3450, 0xf2: 0x3473, 0xf3: 0x3478, 0xf4: 0x4795, 0xf5: 0x479a,
+ 0xf6: 0x47a9, 0xf8: 0xa000, 0xf9: 0x3504, 0xfa: 0x3509, 0xfb: 0x350e,
+ 0xfc: 0x47db, 0xfd: 0x358b, 0xff: 0x35a4,
+ // Block 0x4, offset 0x100
+ 0x100: 0x2fb9, 0x101: 0x32c5, 0x102: 0x46c8, 0x103: 0x4759, 0x104: 0x2fd7, 0x105: 0x32e3,
+ 0x106: 0x2feb, 0x107: 0x32f7, 0x108: 0x2ff0, 0x109: 0x32fc, 0x10a: 0x2ff5, 0x10b: 0x3301,
+ 0x10c: 0x2ffa, 0x10d: 0x3306, 0x10e: 0x3004, 0x10f: 0x3310,
+ 0x112: 0x46eb, 0x113: 0x477c, 0x114: 0x302c, 0x115: 0x3338, 0x116: 0x3031, 0x117: 0x333d,
+ 0x118: 0x304f, 0x119: 0x335b, 0x11a: 0x3040, 0x11b: 0x334c, 0x11c: 0x3068, 0x11d: 0x3374,
+ 0x11e: 0x3072, 0x11f: 0x337e, 0x120: 0x3077, 0x121: 0x3383, 0x122: 0x3081, 0x123: 0x338d,
+ 0x124: 0x3086, 0x125: 0x3392, 0x128: 0x30b8, 0x129: 0x33c9,
+ 0x12a: 0x30bd, 0x12b: 0x33ce, 0x12c: 0x30c2, 0x12d: 0x33d3, 0x12e: 0x30e5, 0x12f: 0x33f1,
+ 0x130: 0x30c7, 0x134: 0x30ef, 0x135: 0x33fb,
+ 0x136: 0x3103, 0x137: 0x3414, 0x139: 0x310d, 0x13a: 0x341e, 0x13b: 0x3117,
+ 0x13c: 0x3428, 0x13d: 0x3112, 0x13e: 0x3423,
+ // Block 0x5, offset 0x140
+ 0x143: 0x313a, 0x144: 0x344b, 0x145: 0x3153,
+ 0x146: 0x3464, 0x147: 0x3149, 0x148: 0x345a,
+ 0x14c: 0x470e, 0x14d: 0x479f, 0x14e: 0x316c, 0x14f: 0x347d, 0x150: 0x3176, 0x151: 0x3487,
+ 0x154: 0x3194, 0x155: 0x34a5, 0x156: 0x31ad, 0x157: 0x34be,
+ 0x158: 0x319e, 0x159: 0x34af, 0x15a: 0x4731, 0x15b: 0x47c2, 0x15c: 0x31b7, 0x15d: 0x34c8,
+ 0x15e: 0x31c6, 0x15f: 0x34d7, 0x160: 0x4736, 0x161: 0x47c7, 0x162: 0x31df, 0x163: 0x34f5,
+ 0x164: 0x31d0, 0x165: 0x34e6, 0x168: 0x4740, 0x169: 0x47d1,
+ 0x16a: 0x4745, 0x16b: 0x47d6, 0x16c: 0x31fd, 0x16d: 0x3513, 0x16e: 0x3207, 0x16f: 0x351d,
+ 0x170: 0x320c, 0x171: 0x3522, 0x172: 0x322a, 0x173: 0x3540, 0x174: 0x324d, 0x175: 0x3563,
+ 0x176: 0x3275, 0x177: 0x3590, 0x178: 0x3289, 0x179: 0x3298, 0x17a: 0x35b8, 0x17b: 0x32a2,
+ 0x17c: 0x35c2, 0x17d: 0x32a7, 0x17e: 0x35c7, 0x17f: 0xa000,
+ // Block 0x6, offset 0x180
+ 0x184: 0x8100, 0x185: 0x8100,
+ 0x186: 0x8100,
+ 0x18d: 0x2fc3, 0x18e: 0x32cf, 0x18f: 0x30d1, 0x190: 0x33dd, 0x191: 0x317b,
+ 0x192: 0x348c, 0x193: 0x3211, 0x194: 0x3527, 0x195: 0x3a0a, 0x196: 0x3b99, 0x197: 0x3a03,
+ 0x198: 0x3b92, 0x199: 0x3a11, 0x19a: 0x3ba0, 0x19b: 0x39fc, 0x19c: 0x3b8b,
+ 0x19e: 0x38eb, 0x19f: 0x3a7a, 0x1a0: 0x38e4, 0x1a1: 0x3a73, 0x1a2: 0x35ee, 0x1a3: 0x3600,
+ 0x1a6: 0x307c, 0x1a7: 0x3388, 0x1a8: 0x30f9, 0x1a9: 0x340a,
+ 0x1aa: 0x4727, 0x1ab: 0x47b8, 0x1ac: 0x39cb, 0x1ad: 0x3b5a, 0x1ae: 0x3612, 0x1af: 0x3618,
+ 0x1b0: 0x3400, 0x1b4: 0x3063, 0x1b5: 0x336f,
+ 0x1b8: 0x3135, 0x1b9: 0x3446, 0x1ba: 0x38f2, 0x1bb: 0x3a81,
+ 0x1bc: 0x35e8, 0x1bd: 0x35fa, 0x1be: 0x35f4, 0x1bf: 0x3606,
+ // Block 0x7, offset 0x1c0
+ 0x1c0: 0x2fc8, 0x1c1: 0x32d4, 0x1c2: 0x2fcd, 0x1c3: 0x32d9, 0x1c4: 0x3045, 0x1c5: 0x3351,
+ 0x1c6: 0x304a, 0x1c7: 0x3356, 0x1c8: 0x30d6, 0x1c9: 0x33e2, 0x1ca: 0x30db, 0x1cb: 0x33e7,
+ 0x1cc: 0x3180, 0x1cd: 0x3491, 0x1ce: 0x3185, 0x1cf: 0x3496, 0x1d0: 0x31a3, 0x1d1: 0x34b4,
+ 0x1d2: 0x31a8, 0x1d3: 0x34b9, 0x1d4: 0x3216, 0x1d5: 0x352c, 0x1d6: 0x321b, 0x1d7: 0x3531,
+ 0x1d8: 0x31c1, 0x1d9: 0x34d2, 0x1da: 0x31da, 0x1db: 0x34f0,
+ 0x1de: 0x3095, 0x1df: 0x33a1,
+ 0x1e6: 0x46cd, 0x1e7: 0x475e, 0x1e8: 0x46f5, 0x1e9: 0x4786,
+ 0x1ea: 0x399a, 0x1eb: 0x3b29, 0x1ec: 0x3977, 0x1ed: 0x3b06, 0x1ee: 0x4713, 0x1ef: 0x47a4,
+ 0x1f0: 0x3993, 0x1f1: 0x3b22, 0x1f2: 0x327f, 0x1f3: 0x359a,
+ // Block 0x8, offset 0x200
+ 0x200: 0x9932, 0x201: 0x9932, 0x202: 0x9932, 0x203: 0x9932, 0x204: 0x9932, 0x205: 0x8132,
+ 0x206: 0x9932, 0x207: 0x9932, 0x208: 0x9932, 0x209: 0x9932, 0x20a: 0x9932, 0x20b: 0x9932,
+ 0x20c: 0x9932, 0x20d: 0x8132, 0x20e: 0x8132, 0x20f: 0x9932, 0x210: 0x8132, 0x211: 0x9932,
+ 0x212: 0x8132, 0x213: 0x9932, 0x214: 0x9932, 0x215: 0x8133, 0x216: 0x812d, 0x217: 0x812d,
+ 0x218: 0x812d, 0x219: 0x812d, 0x21a: 0x8133, 0x21b: 0x992b, 0x21c: 0x812d, 0x21d: 0x812d,
+ 0x21e: 0x812d, 0x21f: 0x812d, 0x220: 0x812d, 0x221: 0x8129, 0x222: 0x8129, 0x223: 0x992d,
+ 0x224: 0x992d, 0x225: 0x992d, 0x226: 0x992d, 0x227: 0x9929, 0x228: 0x9929, 0x229: 0x812d,
+ 0x22a: 0x812d, 0x22b: 0x812d, 0x22c: 0x812d, 0x22d: 0x992d, 0x22e: 0x992d, 0x22f: 0x812d,
+ 0x230: 0x992d, 0x231: 0x992d, 0x232: 0x812d, 0x233: 0x812d, 0x234: 0x8101, 0x235: 0x8101,
+ 0x236: 0x8101, 0x237: 0x8101, 0x238: 0x9901, 0x239: 0x812d, 0x23a: 0x812d, 0x23b: 0x812d,
+ 0x23c: 0x812d, 0x23d: 0x8132, 0x23e: 0x8132, 0x23f: 0x8132,
+ // Block 0x9, offset 0x240
+ 0x240: 0x49e9, 0x241: 0x49ee, 0x242: 0x9932, 0x243: 0x49f3, 0x244: 0x49f8, 0x245: 0x9936,
+ 0x246: 0x8132, 0x247: 0x812d, 0x248: 0x812d, 0x249: 0x812d, 0x24a: 0x8132, 0x24b: 0x8132,
+ 0x24c: 0x8132, 0x24d: 0x812d, 0x24e: 0x812d, 0x250: 0x8132, 0x251: 0x8132,
+ 0x252: 0x8132, 0x253: 0x812d, 0x254: 0x812d, 0x255: 0x812d, 0x256: 0x812d, 0x257: 0x8132,
+ 0x258: 0x8133, 0x259: 0x812d, 0x25a: 0x812d, 0x25b: 0x8132, 0x25c: 0x8134, 0x25d: 0x8135,
+ 0x25e: 0x8135, 0x25f: 0x8134, 0x260: 0x8135, 0x261: 0x8135, 0x262: 0x8134, 0x263: 0x8132,
+ 0x264: 0x8132, 0x265: 0x8132, 0x266: 0x8132, 0x267: 0x8132, 0x268: 0x8132, 0x269: 0x8132,
+ 0x26a: 0x8132, 0x26b: 0x8132, 0x26c: 0x8132, 0x26d: 0x8132, 0x26e: 0x8132, 0x26f: 0x8132,
+ 0x274: 0x0170,
+ 0x27a: 0x8100,
+ 0x27e: 0x0037,
+ // Block 0xa, offset 0x280
+ 0x284: 0x8100, 0x285: 0x35dc,
+ 0x286: 0x3624, 0x287: 0x00ce, 0x288: 0x3642, 0x289: 0x364e, 0x28a: 0x3660,
+ 0x28c: 0x367e, 0x28e: 0x3690, 0x28f: 0x36ae, 0x290: 0x3e43, 0x291: 0xa000,
+ 0x295: 0xa000, 0x297: 0xa000,
+ 0x299: 0xa000,
+ 0x29f: 0xa000, 0x2a1: 0xa000,
+ 0x2a5: 0xa000, 0x2a9: 0xa000,
+ 0x2aa: 0x3672, 0x2ab: 0x36a2, 0x2ac: 0x4839, 0x2ad: 0x36d2, 0x2ae: 0x4863, 0x2af: 0x36e4,
+ 0x2b0: 0x3eab, 0x2b1: 0xa000, 0x2b5: 0xa000,
+ 0x2b7: 0xa000, 0x2b9: 0xa000,
+ 0x2bf: 0xa000,
+ // Block 0xb, offset 0x2c0
+ 0x2c0: 0x375c, 0x2c1: 0x3768, 0x2c3: 0x3756,
+ 0x2c6: 0xa000, 0x2c7: 0x3744,
+ 0x2cc: 0x3798, 0x2cd: 0x3780, 0x2ce: 0x37aa, 0x2d0: 0xa000,
+ 0x2d3: 0xa000, 0x2d5: 0xa000, 0x2d6: 0xa000, 0x2d7: 0xa000,
+ 0x2d8: 0xa000, 0x2d9: 0x378c, 0x2da: 0xa000,
+ 0x2de: 0xa000, 0x2e3: 0xa000,
+ 0x2e7: 0xa000,
+ 0x2eb: 0xa000, 0x2ed: 0xa000,
+ 0x2f0: 0xa000, 0x2f3: 0xa000, 0x2f5: 0xa000,
+ 0x2f6: 0xa000, 0x2f7: 0xa000, 0x2f8: 0xa000, 0x2f9: 0x3810, 0x2fa: 0xa000,
+ 0x2fe: 0xa000,
+ // Block 0xc, offset 0x300
+ 0x301: 0x376e, 0x302: 0x37f2,
+ 0x310: 0x374a, 0x311: 0x37ce,
+ 0x312: 0x3750, 0x313: 0x37d4, 0x316: 0x3762, 0x317: 0x37e6,
+ 0x318: 0xa000, 0x319: 0xa000, 0x31a: 0x3864, 0x31b: 0x386a, 0x31c: 0x3774, 0x31d: 0x37f8,
+ 0x31e: 0x377a, 0x31f: 0x37fe, 0x322: 0x3786, 0x323: 0x380a,
+ 0x324: 0x3792, 0x325: 0x3816, 0x326: 0x379e, 0x327: 0x3822, 0x328: 0xa000, 0x329: 0xa000,
+ 0x32a: 0x3870, 0x32b: 0x3876, 0x32c: 0x37c8, 0x32d: 0x384c, 0x32e: 0x37a4, 0x32f: 0x3828,
+ 0x330: 0x37b0, 0x331: 0x3834, 0x332: 0x37b6, 0x333: 0x383a, 0x334: 0x37bc, 0x335: 0x3840,
+ 0x338: 0x37c2, 0x339: 0x3846,
+ // Block 0xd, offset 0x340
+ 0x351: 0x812d,
+ 0x352: 0x8132, 0x353: 0x8132, 0x354: 0x8132, 0x355: 0x8132, 0x356: 0x812d, 0x357: 0x8132,
+ 0x358: 0x8132, 0x359: 0x8132, 0x35a: 0x812e, 0x35b: 0x812d, 0x35c: 0x8132, 0x35d: 0x8132,
+ 0x35e: 0x8132, 0x35f: 0x8132, 0x360: 0x8132, 0x361: 0x8132, 0x362: 0x812d, 0x363: 0x812d,
+ 0x364: 0x812d, 0x365: 0x812d, 0x366: 0x812d, 0x367: 0x812d, 0x368: 0x8132, 0x369: 0x8132,
+ 0x36a: 0x812d, 0x36b: 0x8132, 0x36c: 0x8132, 0x36d: 0x812e, 0x36e: 0x8131, 0x36f: 0x8132,
+ 0x370: 0x8105, 0x371: 0x8106, 0x372: 0x8107, 0x373: 0x8108, 0x374: 0x8109, 0x375: 0x810a,
+ 0x376: 0x810b, 0x377: 0x810c, 0x378: 0x810d, 0x379: 0x810e, 0x37a: 0x810e, 0x37b: 0x810f,
+ 0x37c: 0x8110, 0x37d: 0x8111, 0x37f: 0x8112,
+ // Block 0xe, offset 0x380
+ 0x388: 0xa000, 0x38a: 0xa000, 0x38b: 0x8116,
+ 0x38c: 0x8117, 0x38d: 0x8118, 0x38e: 0x8119, 0x38f: 0x811a, 0x390: 0x811b, 0x391: 0x811c,
+ 0x392: 0x811d, 0x393: 0x9932, 0x394: 0x9932, 0x395: 0x992d, 0x396: 0x812d, 0x397: 0x8132,
+ 0x398: 0x8132, 0x399: 0x8132, 0x39a: 0x8132, 0x39b: 0x8132, 0x39c: 0x812d, 0x39d: 0x8132,
+ 0x39e: 0x8132, 0x39f: 0x812d,
+ 0x3b0: 0x811e,
+ // Block 0xf, offset 0x3c0
+ 0x3c5: 0xa000,
+ 0x3c6: 0x1958, 0x3c7: 0xa000, 0x3c8: 0x195f, 0x3c9: 0xa000, 0x3ca: 0x1966, 0x3cb: 0xa000,
+ 0x3cc: 0x196d, 0x3cd: 0xa000, 0x3ce: 0x1974, 0x3d1: 0xa000,
+ 0x3d2: 0x197b,
+ 0x3f4: 0x8102, 0x3f5: 0x9900,
+ 0x3fa: 0xa000, 0x3fb: 0x1982,
+ 0x3fc: 0xa000, 0x3fd: 0x1989, 0x3fe: 0xa000, 0x3ff: 0xa000,
+ // Block 0x10, offset 0x400
+ 0x400: 0x2fd2, 0x401: 0x32de, 0x402: 0x2fdc, 0x403: 0x32e8, 0x404: 0x2fe1, 0x405: 0x32ed,
+ 0x406: 0x2fe6, 0x407: 0x32f2, 0x408: 0x3907, 0x409: 0x3a96, 0x40a: 0x2fff, 0x40b: 0x330b,
+ 0x40c: 0x3009, 0x40d: 0x3315, 0x40e: 0x3018, 0x40f: 0x3324, 0x410: 0x300e, 0x411: 0x331a,
+ 0x412: 0x3013, 0x413: 0x331f, 0x414: 0x392a, 0x415: 0x3ab9, 0x416: 0x3931, 0x417: 0x3ac0,
+ 0x418: 0x3054, 0x419: 0x3360, 0x41a: 0x3059, 0x41b: 0x3365, 0x41c: 0x393f, 0x41d: 0x3ace,
+ 0x41e: 0x305e, 0x41f: 0x336a, 0x420: 0x306d, 0x421: 0x3379, 0x422: 0x308b, 0x423: 0x3397,
+ 0x424: 0x309a, 0x425: 0x33a6, 0x426: 0x3090, 0x427: 0x339c, 0x428: 0x309f, 0x429: 0x33ab,
+ 0x42a: 0x30a4, 0x42b: 0x33b0, 0x42c: 0x30ea, 0x42d: 0x33f6, 0x42e: 0x3946, 0x42f: 0x3ad5,
+ 0x430: 0x30f4, 0x431: 0x3405, 0x432: 0x30fe, 0x433: 0x340f, 0x434: 0x3108, 0x435: 0x3419,
+ 0x436: 0x46ff, 0x437: 0x4790, 0x438: 0x394d, 0x439: 0x3adc, 0x43a: 0x3121, 0x43b: 0x3432,
+ 0x43c: 0x311c, 0x43d: 0x342d, 0x43e: 0x3126, 0x43f: 0x3437,
+ // Block 0x11, offset 0x440
+ 0x440: 0x312b, 0x441: 0x343c, 0x442: 0x3130, 0x443: 0x3441, 0x444: 0x3144, 0x445: 0x3455,
+ 0x446: 0x314e, 0x447: 0x345f, 0x448: 0x315d, 0x449: 0x346e, 0x44a: 0x3158, 0x44b: 0x3469,
+ 0x44c: 0x3970, 0x44d: 0x3aff, 0x44e: 0x397e, 0x44f: 0x3b0d, 0x450: 0x3985, 0x451: 0x3b14,
+ 0x452: 0x398c, 0x453: 0x3b1b, 0x454: 0x318a, 0x455: 0x349b, 0x456: 0x318f, 0x457: 0x34a0,
+ 0x458: 0x3199, 0x459: 0x34aa, 0x45a: 0x472c, 0x45b: 0x47bd, 0x45c: 0x39d2, 0x45d: 0x3b61,
+ 0x45e: 0x31b2, 0x45f: 0x34c3, 0x460: 0x31bc, 0x461: 0x34cd, 0x462: 0x473b, 0x463: 0x47cc,
+ 0x464: 0x39d9, 0x465: 0x3b68, 0x466: 0x39e0, 0x467: 0x3b6f, 0x468: 0x39e7, 0x469: 0x3b76,
+ 0x46a: 0x31cb, 0x46b: 0x34dc, 0x46c: 0x31d5, 0x46d: 0x34eb, 0x46e: 0x31e9, 0x46f: 0x34ff,
+ 0x470: 0x31e4, 0x471: 0x34fa, 0x472: 0x3225, 0x473: 0x353b, 0x474: 0x3234, 0x475: 0x354a,
+ 0x476: 0x322f, 0x477: 0x3545, 0x478: 0x39ee, 0x479: 0x3b7d, 0x47a: 0x39f5, 0x47b: 0x3b84,
+ 0x47c: 0x3239, 0x47d: 0x354f, 0x47e: 0x323e, 0x47f: 0x3554,
+ // Block 0x12, offset 0x480
+ 0x480: 0x3243, 0x481: 0x3559, 0x482: 0x3248, 0x483: 0x355e, 0x484: 0x3257, 0x485: 0x356d,
+ 0x486: 0x3252, 0x487: 0x3568, 0x488: 0x325c, 0x489: 0x3577, 0x48a: 0x3261, 0x48b: 0x357c,
+ 0x48c: 0x3266, 0x48d: 0x3581, 0x48e: 0x3284, 0x48f: 0x359f, 0x490: 0x329d, 0x491: 0x35bd,
+ 0x492: 0x32ac, 0x493: 0x35cc, 0x494: 0x32b1, 0x495: 0x35d1, 0x496: 0x33b5, 0x497: 0x34e1,
+ 0x498: 0x3572, 0x499: 0x35ae, 0x49b: 0x360c,
+ 0x4a0: 0x46dc, 0x4a1: 0x476d, 0x4a2: 0x2fbe, 0x4a3: 0x32ca,
+ 0x4a4: 0x38b3, 0x4a5: 0x3a42, 0x4a6: 0x38ac, 0x4a7: 0x3a3b, 0x4a8: 0x38c1, 0x4a9: 0x3a50,
+ 0x4aa: 0x38ba, 0x4ab: 0x3a49, 0x4ac: 0x38f9, 0x4ad: 0x3a88, 0x4ae: 0x38cf, 0x4af: 0x3a5e,
+ 0x4b0: 0x38c8, 0x4b1: 0x3a57, 0x4b2: 0x38dd, 0x4b3: 0x3a6c, 0x4b4: 0x38d6, 0x4b5: 0x3a65,
+ 0x4b6: 0x3900, 0x4b7: 0x3a8f, 0x4b8: 0x46f0, 0x4b9: 0x4781, 0x4ba: 0x303b, 0x4bb: 0x3347,
+ 0x4bc: 0x3027, 0x4bd: 0x3333, 0x4be: 0x3915, 0x4bf: 0x3aa4,
+ // Block 0x13, offset 0x4c0
+ 0x4c0: 0x390e, 0x4c1: 0x3a9d, 0x4c2: 0x3923, 0x4c3: 0x3ab2, 0x4c4: 0x391c, 0x4c5: 0x3aab,
+ 0x4c6: 0x3938, 0x4c7: 0x3ac7, 0x4c8: 0x30cc, 0x4c9: 0x33d8, 0x4ca: 0x30e0, 0x4cb: 0x33ec,
+ 0x4cc: 0x4722, 0x4cd: 0x47b3, 0x4ce: 0x3171, 0x4cf: 0x3482, 0x4d0: 0x395b, 0x4d1: 0x3aea,
+ 0x4d2: 0x3954, 0x4d3: 0x3ae3, 0x4d4: 0x3969, 0x4d5: 0x3af8, 0x4d6: 0x3962, 0x4d7: 0x3af1,
+ 0x4d8: 0x39c4, 0x4d9: 0x3b53, 0x4da: 0x39a8, 0x4db: 0x3b37, 0x4dc: 0x39a1, 0x4dd: 0x3b30,
+ 0x4de: 0x39b6, 0x4df: 0x3b45, 0x4e0: 0x39af, 0x4e1: 0x3b3e, 0x4e2: 0x39bd, 0x4e3: 0x3b4c,
+ 0x4e4: 0x3220, 0x4e5: 0x3536, 0x4e6: 0x3202, 0x4e7: 0x3518, 0x4e8: 0x3a1f, 0x4e9: 0x3bae,
+ 0x4ea: 0x3a18, 0x4eb: 0x3ba7, 0x4ec: 0x3a2d, 0x4ed: 0x3bbc, 0x4ee: 0x3a26, 0x4ef: 0x3bb5,
+ 0x4f0: 0x3a34, 0x4f1: 0x3bc3, 0x4f2: 0x326b, 0x4f3: 0x3586, 0x4f4: 0x3293, 0x4f5: 0x35b3,
+ 0x4f6: 0x328e, 0x4f7: 0x35a9, 0x4f8: 0x327a, 0x4f9: 0x3595,
+ // Block 0x14, offset 0x500
+ 0x500: 0x483f, 0x501: 0x4845, 0x502: 0x4959, 0x503: 0x4971, 0x504: 0x4961, 0x505: 0x4979,
+ 0x506: 0x4969, 0x507: 0x4981, 0x508: 0x47e5, 0x509: 0x47eb, 0x50a: 0x48c9, 0x50b: 0x48e1,
+ 0x50c: 0x48d1, 0x50d: 0x48e9, 0x50e: 0x48d9, 0x50f: 0x48f1, 0x510: 0x4851, 0x511: 0x4857,
+ 0x512: 0x3df3, 0x513: 0x3e03, 0x514: 0x3dfb, 0x515: 0x3e0b,
+ 0x518: 0x47f1, 0x519: 0x47f7, 0x51a: 0x3d23, 0x51b: 0x3d33, 0x51c: 0x3d2b, 0x51d: 0x3d3b,
+ 0x520: 0x4869, 0x521: 0x486f, 0x522: 0x4989, 0x523: 0x49a1,
+ 0x524: 0x4991, 0x525: 0x49a9, 0x526: 0x4999, 0x527: 0x49b1, 0x528: 0x47fd, 0x529: 0x4803,
+ 0x52a: 0x48f9, 0x52b: 0x4911, 0x52c: 0x4901, 0x52d: 0x4919, 0x52e: 0x4909, 0x52f: 0x4921,
+ 0x530: 0x4881, 0x531: 0x4887, 0x532: 0x3e53, 0x533: 0x3e6b, 0x534: 0x3e5b, 0x535: 0x3e73,
+ 0x536: 0x3e63, 0x537: 0x3e7b, 0x538: 0x4809, 0x539: 0x480f, 0x53a: 0x3d53, 0x53b: 0x3d6b,
+ 0x53c: 0x3d5b, 0x53d: 0x3d73, 0x53e: 0x3d63, 0x53f: 0x3d7b,
+ // Block 0x15, offset 0x540
+ 0x540: 0x488d, 0x541: 0x4893, 0x542: 0x3e83, 0x543: 0x3e93, 0x544: 0x3e8b, 0x545: 0x3e9b,
+ 0x548: 0x4815, 0x549: 0x481b, 0x54a: 0x3d83, 0x54b: 0x3d93,
+ 0x54c: 0x3d8b, 0x54d: 0x3d9b, 0x550: 0x489f, 0x551: 0x48a5,
+ 0x552: 0x3ebb, 0x553: 0x3ed3, 0x554: 0x3ec3, 0x555: 0x3edb, 0x556: 0x3ecb, 0x557: 0x3ee3,
+ 0x559: 0x4821, 0x55b: 0x3da3, 0x55d: 0x3dab,
+ 0x55f: 0x3db3, 0x560: 0x48b7, 0x561: 0x48bd, 0x562: 0x49b9, 0x563: 0x49d1,
+ 0x564: 0x49c1, 0x565: 0x49d9, 0x566: 0x49c9, 0x567: 0x49e1, 0x568: 0x4827, 0x569: 0x482d,
+ 0x56a: 0x4929, 0x56b: 0x4941, 0x56c: 0x4931, 0x56d: 0x4949, 0x56e: 0x4939, 0x56f: 0x4951,
+ 0x570: 0x4833, 0x571: 0x4359, 0x572: 0x36cc, 0x573: 0x435f, 0x574: 0x485d, 0x575: 0x4365,
+ 0x576: 0x36de, 0x577: 0x436b, 0x578: 0x36fc, 0x579: 0x4371, 0x57a: 0x3714, 0x57b: 0x4377,
+ 0x57c: 0x48ab, 0x57d: 0x437d,
+ // Block 0x16, offset 0x580
+ 0x580: 0x3ddb, 0x581: 0x3de3, 0x582: 0x41bf, 0x583: 0x41dd, 0x584: 0x41c9, 0x585: 0x41e7,
+ 0x586: 0x41d3, 0x587: 0x41f1, 0x588: 0x3d13, 0x589: 0x3d1b, 0x58a: 0x410b, 0x58b: 0x4129,
+ 0x58c: 0x4115, 0x58d: 0x4133, 0x58e: 0x411f, 0x58f: 0x413d, 0x590: 0x3e23, 0x591: 0x3e2b,
+ 0x592: 0x41fb, 0x593: 0x4219, 0x594: 0x4205, 0x595: 0x4223, 0x596: 0x420f, 0x597: 0x422d,
+ 0x598: 0x3d43, 0x599: 0x3d4b, 0x59a: 0x4147, 0x59b: 0x4165, 0x59c: 0x4151, 0x59d: 0x416f,
+ 0x59e: 0x415b, 0x59f: 0x4179, 0x5a0: 0x3efb, 0x5a1: 0x3f03, 0x5a2: 0x4237, 0x5a3: 0x4255,
+ 0x5a4: 0x4241, 0x5a5: 0x425f, 0x5a6: 0x424b, 0x5a7: 0x4269, 0x5a8: 0x3dbb, 0x5a9: 0x3dc3,
+ 0x5aa: 0x4183, 0x5ab: 0x41a1, 0x5ac: 0x418d, 0x5ad: 0x41ab, 0x5ae: 0x4197, 0x5af: 0x41b5,
+ 0x5b0: 0x36c0, 0x5b1: 0x36ba, 0x5b2: 0x3dcb, 0x5b3: 0x36c6, 0x5b4: 0x3dd3,
+ 0x5b6: 0x484b, 0x5b7: 0x3deb, 0x5b8: 0x3630, 0x5b9: 0x362a, 0x5ba: 0x361e, 0x5bb: 0x4329,
+ 0x5bc: 0x3636, 0x5bd: 0x8100, 0x5be: 0x01d3, 0x5bf: 0xa100,
+ // Block 0x17, offset 0x5c0
+ 0x5c0: 0x8100, 0x5c1: 0x35e2, 0x5c2: 0x3e13, 0x5c3: 0x36d8, 0x5c4: 0x3e1b,
+ 0x5c6: 0x4875, 0x5c7: 0x3e33, 0x5c8: 0x363c, 0x5c9: 0x432f, 0x5ca: 0x3648, 0x5cb: 0x4335,
+ 0x5cc: 0x3654, 0x5cd: 0x3bca, 0x5ce: 0x3bd1, 0x5cf: 0x3bd8, 0x5d0: 0x36f0, 0x5d1: 0x36ea,
+ 0x5d2: 0x3e3b, 0x5d3: 0x451f, 0x5d6: 0x36f6, 0x5d7: 0x3e4b,
+ 0x5d8: 0x366c, 0x5d9: 0x3666, 0x5da: 0x365a, 0x5db: 0x433b, 0x5dd: 0x3bdf,
+ 0x5de: 0x3be6, 0x5df: 0x3bed, 0x5e0: 0x3726, 0x5e1: 0x3720, 0x5e2: 0x3ea3, 0x5e3: 0x4527,
+ 0x5e4: 0x3708, 0x5e5: 0x370e, 0x5e6: 0x372c, 0x5e7: 0x3eb3, 0x5e8: 0x369c, 0x5e9: 0x3696,
+ 0x5ea: 0x368a, 0x5eb: 0x4347, 0x5ec: 0x3684, 0x5ed: 0x35d6, 0x5ee: 0x4323, 0x5ef: 0x0081,
+ 0x5f2: 0x3eeb, 0x5f3: 0x3732, 0x5f4: 0x3ef3,
+ 0x5f6: 0x48c3, 0x5f7: 0x3f0b, 0x5f8: 0x3678, 0x5f9: 0x4341, 0x5fa: 0x36a8, 0x5fb: 0x4353,
+ 0x5fc: 0x36b4, 0x5fd: 0x4291, 0x5fe: 0xa100,
+ // Block 0x18, offset 0x600
+ 0x601: 0x3c41, 0x603: 0xa000, 0x604: 0x3c48, 0x605: 0xa000,
+ 0x607: 0x3c4f, 0x608: 0xa000, 0x609: 0x3c56,
+ 0x60d: 0xa000,
+ 0x620: 0x2fa0, 0x621: 0xa000, 0x622: 0x3c64,
+ 0x624: 0xa000, 0x625: 0xa000,
+ 0x62d: 0x3c5d, 0x62e: 0x2f9b, 0x62f: 0x2fa5,
+ 0x630: 0x3c6b, 0x631: 0x3c72, 0x632: 0xa000, 0x633: 0xa000, 0x634: 0x3c79, 0x635: 0x3c80,
+ 0x636: 0xa000, 0x637: 0xa000, 0x638: 0x3c87, 0x639: 0x3c8e, 0x63a: 0xa000, 0x63b: 0xa000,
+ 0x63c: 0xa000, 0x63d: 0xa000,
+ // Block 0x19, offset 0x640
+ 0x640: 0x3c95, 0x641: 0x3c9c, 0x642: 0xa000, 0x643: 0xa000, 0x644: 0x3cb1, 0x645: 0x3cb8,
+ 0x646: 0xa000, 0x647: 0xa000, 0x648: 0x3cbf, 0x649: 0x3cc6,
+ 0x651: 0xa000,
+ 0x652: 0xa000,
+ 0x662: 0xa000,
+ 0x668: 0xa000, 0x669: 0xa000,
+ 0x66b: 0xa000, 0x66c: 0x3cdb, 0x66d: 0x3ce2, 0x66e: 0x3ce9, 0x66f: 0x3cf0,
+ 0x672: 0xa000, 0x673: 0xa000, 0x674: 0xa000, 0x675: 0xa000,
+ // Block 0x1a, offset 0x680
+ 0x686: 0xa000, 0x68b: 0xa000,
+ 0x68c: 0x3f43, 0x68d: 0xa000, 0x68e: 0x3f4b, 0x68f: 0xa000, 0x690: 0x3f53, 0x691: 0xa000,
+ 0x692: 0x3f5b, 0x693: 0xa000, 0x694: 0x3f63, 0x695: 0xa000, 0x696: 0x3f6b, 0x697: 0xa000,
+ 0x698: 0x3f73, 0x699: 0xa000, 0x69a: 0x3f7b, 0x69b: 0xa000, 0x69c: 0x3f83, 0x69d: 0xa000,
+ 0x69e: 0x3f8b, 0x69f: 0xa000, 0x6a0: 0x3f93, 0x6a1: 0xa000, 0x6a2: 0x3f9b,
+ 0x6a4: 0xa000, 0x6a5: 0x3fa3, 0x6a6: 0xa000, 0x6a7: 0x3fab, 0x6a8: 0xa000, 0x6a9: 0x3fb3,
+ 0x6af: 0xa000,
+ 0x6b0: 0x3fbb, 0x6b1: 0x3fc3, 0x6b2: 0xa000, 0x6b3: 0x3fcb, 0x6b4: 0x3fd3, 0x6b5: 0xa000,
+ 0x6b6: 0x3fdb, 0x6b7: 0x3fe3, 0x6b8: 0xa000, 0x6b9: 0x3feb, 0x6ba: 0x3ff3, 0x6bb: 0xa000,
+ 0x6bc: 0x3ffb, 0x6bd: 0x4003,
+ // Block 0x1b, offset 0x6c0
+ 0x6d4: 0x3f3b,
+ 0x6d9: 0x9903, 0x6da: 0x9903, 0x6db: 0x8100, 0x6dc: 0x8100, 0x6dd: 0xa000,
+ 0x6de: 0x400b,
+ 0x6e6: 0xa000,
+ 0x6eb: 0xa000, 0x6ec: 0x401b, 0x6ed: 0xa000, 0x6ee: 0x4023, 0x6ef: 0xa000,
+ 0x6f0: 0x402b, 0x6f1: 0xa000, 0x6f2: 0x4033, 0x6f3: 0xa000, 0x6f4: 0x403b, 0x6f5: 0xa000,
+ 0x6f6: 0x4043, 0x6f7: 0xa000, 0x6f8: 0x404b, 0x6f9: 0xa000, 0x6fa: 0x4053, 0x6fb: 0xa000,
+ 0x6fc: 0x405b, 0x6fd: 0xa000, 0x6fe: 0x4063, 0x6ff: 0xa000,
+ // Block 0x1c, offset 0x700
+ 0x700: 0x406b, 0x701: 0xa000, 0x702: 0x4073, 0x704: 0xa000, 0x705: 0x407b,
+ 0x706: 0xa000, 0x707: 0x4083, 0x708: 0xa000, 0x709: 0x408b,
+ 0x70f: 0xa000, 0x710: 0x4093, 0x711: 0x409b,
+ 0x712: 0xa000, 0x713: 0x40a3, 0x714: 0x40ab, 0x715: 0xa000, 0x716: 0x40b3, 0x717: 0x40bb,
+ 0x718: 0xa000, 0x719: 0x40c3, 0x71a: 0x40cb, 0x71b: 0xa000, 0x71c: 0x40d3, 0x71d: 0x40db,
+ 0x72f: 0xa000,
+ 0x730: 0xa000, 0x731: 0xa000, 0x732: 0xa000, 0x734: 0x4013,
+ 0x737: 0x40e3, 0x738: 0x40eb, 0x739: 0x40f3, 0x73a: 0x40fb,
+ 0x73d: 0xa000, 0x73e: 0x4103,
+ // Block 0x1d, offset 0x740
+ 0x740: 0x13ef, 0x741: 0x0d73, 0x742: 0x144b, 0x743: 0x1417, 0x744: 0x0ecf, 0x745: 0x0763,
+ 0x746: 0x0957, 0x747: 0x169f, 0x748: 0x169f, 0x749: 0x0a83, 0x74a: 0x14d3, 0x74b: 0x09bb,
+ 0x74c: 0x0a7f, 0x74d: 0x0c67, 0x74e: 0x1047, 0x74f: 0x11d7, 0x750: 0x130f, 0x751: 0x134b,
+ 0x752: 0x137f, 0x753: 0x1493, 0x754: 0x0deb, 0x755: 0x0e77, 0x756: 0x0f23, 0x757: 0x0fbb,
+ 0x758: 0x12d7, 0x759: 0x14bb, 0x75a: 0x15e7, 0x75b: 0x0787, 0x75c: 0x092b, 0x75d: 0x0dff,
+ 0x75e: 0x0f47, 0x75f: 0x130b, 0x760: 0x1637, 0x761: 0x0b2b, 0x762: 0x0eef, 0x763: 0x12fb,
+ 0x764: 0x138f, 0x765: 0x0c9b, 0x766: 0x1233, 0x767: 0x1357, 0x768: 0x0b97, 0x769: 0x0d87,
+ 0x76a: 0x0e8f, 0x76b: 0x0f93, 0x76c: 0x149f, 0x76d: 0x07c7, 0x76e: 0x085f, 0x76f: 0x08cb,
+ 0x770: 0x0d03, 0x771: 0x0df7, 0x772: 0x0f43, 0x773: 0x1067, 0x774: 0x11ef, 0x775: 0x1303,
+ 0x776: 0x131b, 0x777: 0x143f, 0x778: 0x1563, 0x779: 0x1617, 0x77a: 0x1633, 0x77b: 0x10a3,
+ 0x77c: 0x10e3, 0x77d: 0x119b, 0x77e: 0x12bb, 0x77f: 0x14ef,
+ // Block 0x1e, offset 0x780
+ 0x780: 0x163f, 0x781: 0x13c3, 0x782: 0x0a3f, 0x783: 0x0bb3, 0x784: 0x1153, 0x785: 0x1213,
+ 0x786: 0x0f77, 0x787: 0x10ab, 0x788: 0x140f, 0x789: 0x155b, 0x78a: 0x0a3b, 0x78b: 0x0b07,
+ 0x78c: 0x0def, 0x78d: 0x0ea3, 0x78e: 0x0ed7, 0x78f: 0x118b, 0x790: 0x11b3, 0x791: 0x151b,
+ 0x792: 0x08c7, 0x793: 0x121f, 0x794: 0x086b, 0x795: 0x0867, 0x796: 0x110f, 0x797: 0x119f,
+ 0x798: 0x12d3, 0x799: 0x1523, 0x79a: 0x13df, 0x79b: 0x0c9f, 0x79c: 0x0deb, 0x79d: 0x13cf,
+ 0x79e: 0x076f, 0x79f: 0x0adb, 0x7a0: 0x0c0b, 0x7a1: 0x0fa7, 0x7a2: 0x1027, 0x7a3: 0x08eb,
+ 0x7a4: 0x10b3, 0x7a5: 0x07d7, 0x7a6: 0x0bef, 0x7a7: 0x074f, 0x7a8: 0x0e63, 0x7a9: 0x0d1b,
+ 0x7aa: 0x1187, 0x7ab: 0x093f, 0x7ac: 0x0a2b, 0x7ad: 0x1073, 0x7ae: 0x12db, 0x7af: 0x13b3,
+ 0x7b0: 0x0e2f, 0x7b1: 0x146f, 0x7b2: 0x0e5b, 0x7b3: 0x0caf, 0x7b4: 0x1293, 0x7b5: 0x0ccf,
+ 0x7b6: 0x1023, 0x7b7: 0x07a3, 0x7b8: 0x081f, 0x7b9: 0x0863, 0x7ba: 0x0dcb, 0x7bb: 0x1173,
+ 0x7bc: 0x126b, 0x7bd: 0x13bf, 0x7be: 0x14cf, 0x7bf: 0x08d3,
+ // Block 0x1f, offset 0x7c0
+ 0x7c0: 0x0987, 0x7c1: 0x0a8f, 0x7c2: 0x0ba7, 0x7c3: 0x0d37, 0x7c4: 0x0ef3, 0x7c5: 0x10b7,
+ 0x7c6: 0x150b, 0x7c7: 0x15ef, 0x7c8: 0x1643, 0x7c9: 0x165b, 0x7ca: 0x08af, 0x7cb: 0x0d6b,
+ 0x7cc: 0x0e1b, 0x7cd: 0x1463, 0x7ce: 0x0b73, 0x7cf: 0x0c4f, 0x7d0: 0x0c6b, 0x7d1: 0x0cfb,
+ 0x7d2: 0x0ee3, 0x7d3: 0x0f2f, 0x7d4: 0x0fdf, 0x7d5: 0x1103, 0x7d6: 0x11a7, 0x7d7: 0x120b,
+ 0x7d8: 0x1453, 0x7d9: 0x12e3, 0x7da: 0x147b, 0x7db: 0x14f3, 0x7dc: 0x0887, 0x7dd: 0x08b3,
+ 0x7de: 0x099b, 0x7df: 0x0f1f, 0x7e0: 0x136b, 0x7e1: 0x13b3, 0x7e2: 0x0b93, 0x7e3: 0x0c03,
+ 0x7e4: 0x0cc7, 0x7e5: 0x0e27, 0x7e6: 0x114f, 0x7e7: 0x0f9b, 0x7e8: 0x07b3, 0x7e9: 0x09f7,
+ 0x7ea: 0x0adb, 0x7eb: 0x0b3f, 0x7ec: 0x0c0f, 0x7ed: 0x0fb7, 0x7ee: 0x0fd3, 0x7ef: 0x11e3,
+ 0x7f0: 0x1203, 0x7f1: 0x14d7, 0x7f2: 0x1557, 0x7f3: 0x1567, 0x7f4: 0x15a3, 0x7f5: 0x07cb,
+ 0x7f6: 0x10f7, 0x7f7: 0x14c3, 0x7f8: 0x153f, 0x7f9: 0x0c27, 0x7fa: 0x078f, 0x7fb: 0x07ef,
+ 0x7fc: 0x0adf, 0x7fd: 0x0aff, 0x7fe: 0x0d27, 0x7ff: 0x0deb,
+ // Block 0x20, offset 0x800
+ 0x800: 0x0f3b, 0x801: 0x1043, 0x802: 0x12ef, 0x803: 0x148f, 0x804: 0x1697, 0x805: 0x0d5b,
+ 0x806: 0x1517, 0x807: 0x08ab, 0x808: 0x0da7, 0x809: 0x0db3, 0x80a: 0x0e87, 0x80b: 0x0ebf,
+ 0x80c: 0x0fc3, 0x80d: 0x101f, 0x80e: 0x109f, 0x80f: 0x1183, 0x810: 0x15af, 0x811: 0x0827,
+ 0x812: 0x0c7b, 0x813: 0x1527, 0x814: 0x07df, 0x815: 0x0b23, 0x816: 0x0ea7, 0x817: 0x1457,
+ 0x818: 0x0bdf, 0x819: 0x0c2f, 0x81a: 0x0dbb, 0x81b: 0x0fa7, 0x81c: 0x152f, 0x81d: 0x088f,
+ 0x81e: 0x0977, 0x81f: 0x0b0f, 0x820: 0x0d4b, 0x821: 0x0d97, 0x822: 0x0dd7, 0x823: 0x0e6b,
+ 0x824: 0x0fbf, 0x825: 0x1033, 0x826: 0x11cf, 0x827: 0x136f, 0x828: 0x137b, 0x829: 0x14cb,
+ 0x82a: 0x154b, 0x82b: 0x08fb, 0x82c: 0x0ec3, 0x82d: 0x097b, 0x82e: 0x0f3f, 0x82f: 0x0fe3,
+ 0x830: 0x12ff, 0x831: 0x1533, 0x832: 0x161f, 0x833: 0x1647, 0x834: 0x0daf, 0x835: 0x0e9f,
+ 0x836: 0x123b, 0x837: 0x112f, 0x838: 0x113b, 0x839: 0x115f, 0x83a: 0x0f8f, 0x83b: 0x0f17,
+ 0x83c: 0x13db, 0x83d: 0x07ab, 0x83e: 0x12a3, 0x83f: 0x0893,
+ // Block 0x21, offset 0x840
+ 0x840: 0x0883, 0x841: 0x0b83, 0x842: 0x0ca3, 0x843: 0x116b, 0x844: 0x0acb, 0x845: 0x0e7b,
+ 0x846: 0x0d67, 0x847: 0x145f, 0x848: 0x135f, 0x849: 0x151f, 0x84a: 0x139b, 0x84b: 0x0b9f,
+ 0x84c: 0x07ff, 0x84d: 0x09d3, 0x850: 0x0a27,
+ 0x852: 0x0d57, 0x855: 0x086f, 0x856: 0x0f97, 0x857: 0x105b,
+ 0x858: 0x10bf, 0x859: 0x10db, 0x85a: 0x10df, 0x85b: 0x10f3, 0x85c: 0x156f, 0x85d: 0x1163,
+ 0x85e: 0x11e7, 0x860: 0x1307, 0x862: 0x13cb,
+ 0x865: 0x147f, 0x866: 0x14ab,
+ 0x86a: 0x15c3, 0x86b: 0x15c7, 0x86c: 0x15cb, 0x86d: 0x162f, 0x86e: 0x14a3, 0x86f: 0x153b,
+ 0x870: 0x07cf, 0x871: 0x07f3, 0x872: 0x0807, 0x873: 0x08c3, 0x874: 0x08cf, 0x875: 0x090f,
+ 0x876: 0x09c3, 0x877: 0x09df, 0x878: 0x09e7, 0x879: 0x0a23, 0x87a: 0x0a2f, 0x87b: 0x0b0b,
+ 0x87c: 0x0b13, 0x87d: 0x0c1b, 0x87e: 0x0c43, 0x87f: 0x0c4b,
+ // Block 0x22, offset 0x880
+ 0x880: 0x0c63, 0x881: 0x0d0f, 0x882: 0x0d3f, 0x883: 0x0d5f, 0x884: 0x0dcf, 0x885: 0x0e93,
+ 0x886: 0x0eaf, 0x887: 0x0edf, 0x888: 0x0f33, 0x889: 0x0f53, 0x88a: 0x0fc7, 0x88b: 0x10a7,
+ 0x88c: 0x10c3, 0x88d: 0x10cb, 0x88e: 0x10c7, 0x88f: 0x10cf, 0x890: 0x10d3, 0x891: 0x10d7,
+ 0x892: 0x10eb, 0x893: 0x10ef, 0x894: 0x1113, 0x895: 0x1127, 0x896: 0x1143, 0x897: 0x11a7,
+ 0x898: 0x11af, 0x899: 0x11b7, 0x89a: 0x11cb, 0x89b: 0x11f3, 0x89c: 0x1243, 0x89d: 0x1277,
+ 0x89e: 0x1277, 0x89f: 0x12df, 0x8a0: 0x1387, 0x8a1: 0x139f, 0x8a2: 0x13d3, 0x8a3: 0x13d7,
+ 0x8a4: 0x141b, 0x8a5: 0x141f, 0x8a6: 0x1477, 0x8a7: 0x147f, 0x8a8: 0x154f, 0x8a9: 0x1593,
+ 0x8aa: 0x15ab, 0x8ab: 0x0c13, 0x8ac: 0x1792, 0x8ad: 0x125b,
+ 0x8b0: 0x0757, 0x8b1: 0x085b, 0x8b2: 0x081b, 0x8b3: 0x07c3, 0x8b4: 0x0803, 0x8b5: 0x082f,
+ 0x8b6: 0x08bf, 0x8b7: 0x08db, 0x8b8: 0x09c3, 0x8b9: 0x09af, 0x8ba: 0x09bf, 0x8bb: 0x09db,
+ 0x8bc: 0x0a27, 0x8bd: 0x0a37, 0x8be: 0x0a7b, 0x8bf: 0x0a87,
+ // Block 0x23, offset 0x8c0
+ 0x8c0: 0x0aa3, 0x8c1: 0x0ab3, 0x8c2: 0x0b9b, 0x8c3: 0x0ba3, 0x8c4: 0x0bd3, 0x8c5: 0x0bf3,
+ 0x8c6: 0x0c23, 0x8c7: 0x0c3b, 0x8c8: 0x0c2b, 0x8c9: 0x0c4b, 0x8ca: 0x0c3f, 0x8cb: 0x0c63,
+ 0x8cc: 0x0c7f, 0x8cd: 0x0cd7, 0x8ce: 0x0ce3, 0x8cf: 0x0ceb, 0x8d0: 0x0d13, 0x8d1: 0x0d57,
+ 0x8d2: 0x0d87, 0x8d3: 0x0d8b, 0x8d4: 0x0d9f, 0x8d5: 0x0e1f, 0x8d6: 0x0e2f, 0x8d7: 0x0e87,
+ 0x8d8: 0x0ed3, 0x8d9: 0x0ecb, 0x8da: 0x0edf, 0x8db: 0x0efb, 0x8dc: 0x0f33, 0x8dd: 0x108b,
+ 0x8de: 0x0f57, 0x8df: 0x0f8b, 0x8e0: 0x0f97, 0x8e1: 0x0fd7, 0x8e2: 0x0ff3, 0x8e3: 0x1017,
+ 0x8e4: 0x103b, 0x8e5: 0x103f, 0x8e6: 0x105b, 0x8e7: 0x105f, 0x8e8: 0x106f, 0x8e9: 0x1083,
+ 0x8ea: 0x107f, 0x8eb: 0x10af, 0x8ec: 0x112b, 0x8ed: 0x1143, 0x8ee: 0x115b, 0x8ef: 0x1193,
+ 0x8f0: 0x11a7, 0x8f1: 0x11c3, 0x8f2: 0x11f3, 0x8f3: 0x12a7, 0x8f4: 0x12cf, 0x8f5: 0x1343,
+ 0x8f6: 0x138b, 0x8f7: 0x1397, 0x8f8: 0x139f, 0x8f9: 0x13b7, 0x8fa: 0x13cb, 0x8fb: 0x13bb,
+ 0x8fc: 0x13d3, 0x8fd: 0x13cf, 0x8fe: 0x13c7, 0x8ff: 0x13d7,
+ // Block 0x24, offset 0x900
+ 0x900: 0x13e3, 0x901: 0x141f, 0x902: 0x145b, 0x903: 0x148b, 0x904: 0x14bf, 0x905: 0x14df,
+ 0x906: 0x152b, 0x907: 0x154f, 0x908: 0x156f, 0x909: 0x1583, 0x90a: 0x1593, 0x90b: 0x159f,
+ 0x90c: 0x15ab, 0x90d: 0x15ff, 0x90e: 0x169f, 0x90f: 0x1729, 0x910: 0x1724, 0x911: 0x1756,
+ 0x912: 0x067f, 0x913: 0x06a7, 0x914: 0x06ab, 0x915: 0x17d8, 0x916: 0x1805, 0x917: 0x187d,
+ 0x918: 0x168b, 0x919: 0x169b,
+ // Block 0x25, offset 0x940
+ 0x940: 0x0773, 0x941: 0x076b, 0x942: 0x077b, 0x943: 0x16bb, 0x944: 0x07bf, 0x945: 0x07cf,
+ 0x946: 0x07d3, 0x947: 0x07db, 0x948: 0x07e3, 0x949: 0x07e7, 0x94a: 0x07f3, 0x94b: 0x07eb,
+ 0x94c: 0x062b, 0x94d: 0x16cf, 0x94e: 0x0807, 0x94f: 0x080b, 0x950: 0x080f, 0x951: 0x082b,
+ 0x952: 0x16c0, 0x953: 0x062f, 0x954: 0x0817, 0x955: 0x0837, 0x956: 0x16ca, 0x957: 0x0847,
+ 0x958: 0x084f, 0x959: 0x07af, 0x95a: 0x0857, 0x95b: 0x085b, 0x95c: 0x18a5, 0x95d: 0x0877,
+ 0x95e: 0x087f, 0x95f: 0x0637, 0x960: 0x0897, 0x961: 0x089b, 0x962: 0x08a3, 0x963: 0x08a7,
+ 0x964: 0x063b, 0x965: 0x08bf, 0x966: 0x08c3, 0x967: 0x08cf, 0x968: 0x08db, 0x969: 0x08df,
+ 0x96a: 0x08e3, 0x96b: 0x08eb, 0x96c: 0x090b, 0x96d: 0x090f, 0x96e: 0x0917, 0x96f: 0x0927,
+ 0x970: 0x092f, 0x971: 0x0933, 0x972: 0x0933, 0x973: 0x0933, 0x974: 0x16de, 0x975: 0x0f0b,
+ 0x976: 0x0947, 0x977: 0x094f, 0x978: 0x16e3, 0x979: 0x095b, 0x97a: 0x0963, 0x97b: 0x096b,
+ 0x97c: 0x0993, 0x97d: 0x097f, 0x97e: 0x098b, 0x97f: 0x098f,
+ // Block 0x26, offset 0x980
+ 0x980: 0x0997, 0x981: 0x099f, 0x982: 0x09a3, 0x983: 0x09ab, 0x984: 0x09b3, 0x985: 0x09b7,
+ 0x986: 0x09b7, 0x987: 0x09bf, 0x988: 0x09c7, 0x989: 0x09cb, 0x98a: 0x09d7, 0x98b: 0x09fb,
+ 0x98c: 0x09df, 0x98d: 0x09ff, 0x98e: 0x09e3, 0x98f: 0x09eb, 0x990: 0x0883, 0x991: 0x0a47,
+ 0x992: 0x0a0f, 0x993: 0x0a13, 0x994: 0x0a17, 0x995: 0x0a0b, 0x996: 0x0a1f, 0x997: 0x0a1b,
+ 0x998: 0x0a33, 0x999: 0x16e8, 0x99a: 0x0a4f, 0x99b: 0x0a53, 0x99c: 0x0a5b, 0x99d: 0x0a67,
+ 0x99e: 0x0a6f, 0x99f: 0x0a8b, 0x9a0: 0x16ed, 0x9a1: 0x16f2, 0x9a2: 0x0a97, 0x9a3: 0x0a9b,
+ 0x9a4: 0x0a9f, 0x9a5: 0x0a93, 0x9a6: 0x0aa7, 0x9a7: 0x063f, 0x9a8: 0x0643, 0x9a9: 0x0aaf,
+ 0x9aa: 0x0ab7, 0x9ab: 0x0ab7, 0x9ac: 0x16f7, 0x9ad: 0x0ad3, 0x9ae: 0x0ad7, 0x9af: 0x0adb,
+ 0x9b0: 0x0ae3, 0x9b1: 0x16fc, 0x9b2: 0x0aeb, 0x9b3: 0x0aef, 0x9b4: 0x0bc7, 0x9b5: 0x0af7,
+ 0x9b6: 0x0647, 0x9b7: 0x0b03, 0x9b8: 0x0b13, 0x9b9: 0x0b1f, 0x9ba: 0x0b1b, 0x9bb: 0x1706,
+ 0x9bc: 0x0b27, 0x9bd: 0x170b, 0x9be: 0x0b33, 0x9bf: 0x0b2f,
+ // Block 0x27, offset 0x9c0
+ 0x9c0: 0x0b37, 0x9c1: 0x0b47, 0x9c2: 0x0b4b, 0x9c3: 0x064b, 0x9c4: 0x0b5b, 0x9c5: 0x0b63,
+ 0x9c6: 0x0b67, 0x9c7: 0x0b6b, 0x9c8: 0x064f, 0x9c9: 0x1710, 0x9ca: 0x0653, 0x9cb: 0x0b87,
+ 0x9cc: 0x0b8b, 0x9cd: 0x0b8f, 0x9ce: 0x0b97, 0x9cf: 0x18d7, 0x9d0: 0x0baf, 0x9d1: 0x171a,
+ 0x9d2: 0x171a, 0x9d3: 0x124f, 0x9d4: 0x0bbf, 0x9d5: 0x0bbf, 0x9d6: 0x0657, 0x9d7: 0x173d,
+ 0x9d8: 0x180f, 0x9d9: 0x0bcf, 0x9da: 0x0bd7, 0x9db: 0x065b, 0x9dc: 0x0beb, 0x9dd: 0x0bfb,
+ 0x9de: 0x0bff, 0x9df: 0x0c07, 0x9e0: 0x0c17, 0x9e1: 0x0663, 0x9e2: 0x065f, 0x9e3: 0x0c1b,
+ 0x9e4: 0x171f, 0x9e5: 0x0c1f, 0x9e6: 0x0c33, 0x9e7: 0x0c37, 0x9e8: 0x0c3b, 0x9e9: 0x0c37,
+ 0x9ea: 0x0c47, 0x9eb: 0x0c4b, 0x9ec: 0x0c5b, 0x9ed: 0x0c53, 0x9ee: 0x0c57, 0x9ef: 0x0c5f,
+ 0x9f0: 0x0c63, 0x9f1: 0x0c67, 0x9f2: 0x0c73, 0x9f3: 0x0c77, 0x9f4: 0x0c8f, 0x9f5: 0x0c97,
+ 0x9f6: 0x0ca7, 0x9f7: 0x0cbb, 0x9f8: 0x172e, 0x9f9: 0x0cb7, 0x9fa: 0x0cab, 0x9fb: 0x0cc3,
+ 0x9fc: 0x0ccb, 0x9fd: 0x0cdf, 0x9fe: 0x1733, 0x9ff: 0x0ce7,
+ // Block 0x28, offset 0xa00
+ 0xa00: 0x0cdb, 0xa01: 0x0cd3, 0xa02: 0x0667, 0xa03: 0x0cef, 0xa04: 0x0cf7, 0xa05: 0x0cff,
+ 0xa06: 0x0cf3, 0xa07: 0x066b, 0xa08: 0x0d0f, 0xa09: 0x0d17, 0xa0a: 0x1738, 0xa0b: 0x0d43,
+ 0xa0c: 0x0d77, 0xa0d: 0x0d53, 0xa0e: 0x0677, 0xa0f: 0x0d5f, 0xa10: 0x0673, 0xa11: 0x066f,
+ 0xa12: 0x083b, 0xa13: 0x083f, 0xa14: 0x0d7b, 0xa15: 0x0d63, 0xa16: 0x1223, 0xa17: 0x06db,
+ 0xa18: 0x0d87, 0xa19: 0x0d8b, 0xa1a: 0x0d8f, 0xa1b: 0x0da3, 0xa1c: 0x0d9b, 0xa1d: 0x1751,
+ 0xa1e: 0x067b, 0xa1f: 0x0db7, 0xa20: 0x0dab, 0xa21: 0x0dc7, 0xa22: 0x0dcf, 0xa23: 0x175b,
+ 0xa24: 0x0dd3, 0xa25: 0x0dbf, 0xa26: 0x0ddb, 0xa27: 0x067f, 0xa28: 0x0ddf, 0xa29: 0x0de3,
+ 0xa2a: 0x0de7, 0xa2b: 0x0df3, 0xa2c: 0x1760, 0xa2d: 0x0dfb, 0xa2e: 0x0683, 0xa2f: 0x0e07,
+ 0xa30: 0x1765, 0xa31: 0x0e0b, 0xa32: 0x0687, 0xa33: 0x0e17, 0xa34: 0x0e23, 0xa35: 0x0e2f,
+ 0xa36: 0x0e33, 0xa37: 0x176a, 0xa38: 0x1701, 0xa39: 0x176f, 0xa3a: 0x0e53, 0xa3b: 0x1774,
+ 0xa3c: 0x0e5f, 0xa3d: 0x0e67, 0xa3e: 0x0e57, 0xa3f: 0x0e73,
+ // Block 0x29, offset 0xa40
+ 0xa40: 0x0e83, 0xa41: 0x0e93, 0xa42: 0x0e87, 0xa43: 0x0e8b, 0xa44: 0x0e97, 0xa45: 0x0e9b,
+ 0xa46: 0x1779, 0xa47: 0x0e7f, 0xa48: 0x0eb3, 0xa49: 0x0eb7, 0xa4a: 0x068b, 0xa4b: 0x0ecb,
+ 0xa4c: 0x0ec7, 0xa4d: 0x177e, 0xa4e: 0x0eab, 0xa4f: 0x0ee7, 0xa50: 0x1783, 0xa51: 0x1788,
+ 0xa52: 0x0eeb, 0xa53: 0x0eff, 0xa54: 0x0efb, 0xa55: 0x0ef7, 0xa56: 0x068f, 0xa57: 0x0f03,
+ 0xa58: 0x0f13, 0xa59: 0x0f0f, 0xa5a: 0x0f1b, 0xa5b: 0x16c5, 0xa5c: 0x0f2b, 0xa5d: 0x178d,
+ 0xa5e: 0x0f37, 0xa5f: 0x1797, 0xa60: 0x0f4b, 0xa61: 0x0f57, 0xa62: 0x0f6b, 0xa63: 0x179c,
+ 0xa64: 0x0f7f, 0xa65: 0x0f83, 0xa66: 0x17a1, 0xa67: 0x17a6, 0xa68: 0x0f9f, 0xa69: 0x0faf,
+ 0xa6a: 0x0693, 0xa6b: 0x0fb3, 0xa6c: 0x0697, 0xa6d: 0x0697, 0xa6e: 0x0fcb, 0xa6f: 0x0fcf,
+ 0xa70: 0x0fd7, 0xa71: 0x0fdb, 0xa72: 0x0fe7, 0xa73: 0x069b, 0xa74: 0x0fff, 0xa75: 0x17ab,
+ 0xa76: 0x101b, 0xa77: 0x17b0, 0xa78: 0x1027, 0xa79: 0x1715, 0xa7a: 0x1037, 0xa7b: 0x17b5,
+ 0xa7c: 0x17ba, 0xa7d: 0x17bf, 0xa7e: 0x069f, 0xa7f: 0x06a3,
+ // Block 0x2a, offset 0xa80
+ 0xa80: 0x106f, 0xa81: 0x17c9, 0xa82: 0x17c4, 0xa83: 0x17ce, 0xa84: 0x17d3, 0xa85: 0x1077,
+ 0xa86: 0x107b, 0xa87: 0x107b, 0xa88: 0x1083, 0xa89: 0x06ab, 0xa8a: 0x1087, 0xa8b: 0x06af,
+ 0xa8c: 0x06b3, 0xa8d: 0x17dd, 0xa8e: 0x109b, 0xa8f: 0x10a3, 0xa90: 0x10af, 0xa91: 0x06b7,
+ 0xa92: 0x17e2, 0xa93: 0x10d3, 0xa94: 0x17e7, 0xa95: 0x17ec, 0xa96: 0x10f3, 0xa97: 0x110b,
+ 0xa98: 0x06bb, 0xa99: 0x1113, 0xa9a: 0x1117, 0xa9b: 0x111b, 0xa9c: 0x17f1, 0xa9d: 0x17f6,
+ 0xa9e: 0x17f6, 0xa9f: 0x1133, 0xaa0: 0x06bf, 0xaa1: 0x17fb, 0xaa2: 0x1147, 0xaa3: 0x114b,
+ 0xaa4: 0x06c3, 0xaa5: 0x1800, 0xaa6: 0x1167, 0xaa7: 0x06c7, 0xaa8: 0x1177, 0xaa9: 0x116f,
+ 0xaaa: 0x117f, 0xaab: 0x180a, 0xaac: 0x1197, 0xaad: 0x06cb, 0xaae: 0x11a3, 0xaaf: 0x11ab,
+ 0xab0: 0x11bb, 0xab1: 0x06cf, 0xab2: 0x1814, 0xab3: 0x1819, 0xab4: 0x06d3, 0xab5: 0x181e,
+ 0xab6: 0x11d3, 0xab7: 0x1823, 0xab8: 0x11df, 0xab9: 0x11eb, 0xaba: 0x11f3, 0xabb: 0x1828,
+ 0xabc: 0x182d, 0xabd: 0x1207, 0xabe: 0x1832, 0xabf: 0x120f,
+ // Block 0x2b, offset 0xac0
+ 0xac0: 0x1742, 0xac1: 0x06d7, 0xac2: 0x1227, 0xac3: 0x122b, 0xac4: 0x06df, 0xac5: 0x122f,
+ 0xac6: 0x0aab, 0xac7: 0x1837, 0xac8: 0x183c, 0xac9: 0x1747, 0xaca: 0x174c, 0xacb: 0x124f,
+ 0xacc: 0x1253, 0xacd: 0x146b, 0xace: 0x06e3, 0xacf: 0x127f, 0xad0: 0x127b, 0xad1: 0x1283,
+ 0xad2: 0x08b7, 0xad3: 0x1287, 0xad4: 0x128b, 0xad5: 0x128f, 0xad6: 0x1297, 0xad7: 0x1841,
+ 0xad8: 0x1293, 0xad9: 0x129b, 0xada: 0x12af, 0xadb: 0x12b3, 0xadc: 0x129f, 0xadd: 0x12b7,
+ 0xade: 0x12cb, 0xadf: 0x12df, 0xae0: 0x12ab, 0xae1: 0x12bf, 0xae2: 0x12c3, 0xae3: 0x12c7,
+ 0xae4: 0x1846, 0xae5: 0x1850, 0xae6: 0x184b, 0xae7: 0x06e7, 0xae8: 0x12e7, 0xae9: 0x12eb,
+ 0xaea: 0x12f3, 0xaeb: 0x1864, 0xaec: 0x12f7, 0xaed: 0x1855, 0xaee: 0x06eb, 0xaef: 0x06ef,
+ 0xaf0: 0x185a, 0xaf1: 0x185f, 0xaf2: 0x06f3, 0xaf3: 0x1317, 0xaf4: 0x131b, 0xaf5: 0x131f,
+ 0xaf6: 0x1323, 0xaf7: 0x132f, 0xaf8: 0x132b, 0xaf9: 0x1337, 0xafa: 0x1333, 0xafb: 0x1343,
+ 0xafc: 0x133b, 0xafd: 0x133f, 0xafe: 0x1347, 0xaff: 0x06f7,
+ // Block 0x2c, offset 0xb00
+ 0xb00: 0x134f, 0xb01: 0x1353, 0xb02: 0x06fb, 0xb03: 0x1363, 0xb04: 0x1367, 0xb05: 0x1869,
+ 0xb06: 0x1373, 0xb07: 0x1377, 0xb08: 0x06ff, 0xb09: 0x1383, 0xb0a: 0x0633, 0xb0b: 0x186e,
+ 0xb0c: 0x1873, 0xb0d: 0x0703, 0xb0e: 0x0707, 0xb0f: 0x13af, 0xb10: 0x13c7, 0xb11: 0x13e3,
+ 0xb12: 0x13f3, 0xb13: 0x1878, 0xb14: 0x1407, 0xb15: 0x140b, 0xb16: 0x1423, 0xb17: 0x142f,
+ 0xb18: 0x1882, 0xb19: 0x16d4, 0xb1a: 0x143b, 0xb1b: 0x1437, 0xb1c: 0x1443, 0xb1d: 0x16d9,
+ 0xb1e: 0x144f, 0xb1f: 0x145b, 0xb20: 0x1887, 0xb21: 0x188c, 0xb22: 0x149b, 0xb23: 0x14a7,
+ 0xb24: 0x14af, 0xb25: 0x1891, 0xb26: 0x14b3, 0xb27: 0x14db, 0xb28: 0x14e7, 0xb29: 0x14eb,
+ 0xb2a: 0x14e3, 0xb2b: 0x14f7, 0xb2c: 0x14fb, 0xb2d: 0x1896, 0xb2e: 0x1507, 0xb2f: 0x070b,
+ 0xb30: 0x150f, 0xb31: 0x189b, 0xb32: 0x070f, 0xb33: 0x1547, 0xb34: 0x0b3b, 0xb35: 0x155f,
+ 0xb36: 0x18a0, 0xb37: 0x18aa, 0xb38: 0x0713, 0xb39: 0x0717, 0xb3a: 0x1587, 0xb3b: 0x18af,
+ 0xb3c: 0x071b, 0xb3d: 0x18b4, 0xb3e: 0x159f, 0xb3f: 0x159f,
+ // Block 0x2d, offset 0xb40
+ 0xb40: 0x15a7, 0xb41: 0x18b9, 0xb42: 0x15bf, 0xb43: 0x071f, 0xb44: 0x15cf, 0xb45: 0x15db,
+ 0xb46: 0x15e3, 0xb47: 0x15eb, 0xb48: 0x0723, 0xb49: 0x18be, 0xb4a: 0x15ff, 0xb4b: 0x161b,
+ 0xb4c: 0x1627, 0xb4d: 0x0727, 0xb4e: 0x072b, 0xb4f: 0x162b, 0xb50: 0x18c3, 0xb51: 0x072f,
+ 0xb52: 0x18c8, 0xb53: 0x18cd, 0xb54: 0x18d2, 0xb55: 0x164f, 0xb56: 0x0733, 0xb57: 0x1663,
+ 0xb58: 0x166b, 0xb59: 0x166f, 0xb5a: 0x1677, 0xb5b: 0x167f, 0xb5c: 0x1687, 0xb5d: 0x18dc,
+}
+
+// nfcIndex: 22 blocks, 1408 entries, 1408 bytes
+// Block 0 is the zero block.
+var nfcIndex = [1408]uint8{
+ // Block 0x0, offset 0x0
+ // Block 0x1, offset 0x40
+ // Block 0x2, offset 0x80
+ // Block 0x3, offset 0xc0
+ 0xc2: 0x2c, 0xc3: 0x01, 0xc4: 0x02, 0xc5: 0x03, 0xc6: 0x2d, 0xc7: 0x04,
+ 0xc8: 0x05, 0xca: 0x2e, 0xcb: 0x2f, 0xcc: 0x06, 0xcd: 0x07, 0xce: 0x08, 0xcf: 0x30,
+ 0xd0: 0x09, 0xd1: 0x31, 0xd2: 0x32, 0xd3: 0x0a, 0xd6: 0x0b, 0xd7: 0x33,
+ 0xd8: 0x34, 0xd9: 0x0c, 0xdb: 0x35, 0xdc: 0x36, 0xdd: 0x37, 0xdf: 0x38,
+ 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05,
+ 0xea: 0x06, 0xeb: 0x07, 0xec: 0x08, 0xed: 0x09, 0xef: 0x0a,
+ 0xf0: 0x13,
+ // Block 0x4, offset 0x100
+ 0x120: 0x39, 0x121: 0x3a, 0x123: 0x3b, 0x124: 0x3c, 0x125: 0x3d, 0x126: 0x3e, 0x127: 0x3f,
+ 0x128: 0x40, 0x129: 0x41, 0x12a: 0x42, 0x12b: 0x43, 0x12c: 0x3e, 0x12d: 0x44, 0x12e: 0x45, 0x12f: 0x46,
+ 0x131: 0x47, 0x132: 0x48, 0x133: 0x49, 0x134: 0x4a, 0x135: 0x4b, 0x137: 0x4c,
+ 0x138: 0x4d, 0x139: 0x4e, 0x13a: 0x4f, 0x13b: 0x50, 0x13c: 0x51, 0x13d: 0x52, 0x13e: 0x53, 0x13f: 0x54,
+ // Block 0x5, offset 0x140
+ 0x140: 0x55, 0x142: 0x56, 0x144: 0x57, 0x145: 0x58, 0x146: 0x59, 0x147: 0x5a,
+ 0x14d: 0x5b,
+ 0x15c: 0x5c, 0x15f: 0x5d,
+ 0x162: 0x5e, 0x164: 0x5f,
+ 0x168: 0x60, 0x169: 0x61, 0x16a: 0x62, 0x16c: 0x0d, 0x16d: 0x63, 0x16e: 0x64, 0x16f: 0x65,
+ 0x170: 0x66, 0x173: 0x67, 0x177: 0x68,
+ 0x178: 0x0e, 0x179: 0x0f, 0x17a: 0x10, 0x17b: 0x11, 0x17c: 0x12, 0x17d: 0x13, 0x17e: 0x14, 0x17f: 0x15,
+ // Block 0x6, offset 0x180
+ 0x180: 0x69, 0x183: 0x6a, 0x184: 0x6b, 0x186: 0x6c, 0x187: 0x6d,
+ 0x188: 0x6e, 0x189: 0x16, 0x18a: 0x17, 0x18b: 0x6f, 0x18c: 0x70,
+ 0x1ab: 0x71,
+ 0x1b3: 0x72, 0x1b5: 0x73, 0x1b7: 0x74,
+ // Block 0x7, offset 0x1c0
+ 0x1c0: 0x75, 0x1c1: 0x18, 0x1c2: 0x19, 0x1c3: 0x1a,
+ 0x1cc: 0x76, 0x1cd: 0x77,
+ // Block 0x8, offset 0x200
+ 0x219: 0x78, 0x21a: 0x79, 0x21b: 0x7a,
+ 0x220: 0x7b, 0x223: 0x7c, 0x224: 0x7d, 0x225: 0x7e, 0x226: 0x7f, 0x227: 0x80,
+ 0x22a: 0x81, 0x22b: 0x82, 0x22f: 0x83,
+ 0x230: 0x84, 0x231: 0x85, 0x232: 0x86, 0x233: 0x87, 0x234: 0x88, 0x235: 0x89, 0x236: 0x8a, 0x237: 0x84,
+ 0x238: 0x85, 0x239: 0x86, 0x23a: 0x87, 0x23b: 0x88, 0x23c: 0x89, 0x23d: 0x8a, 0x23e: 0x84, 0x23f: 0x85,
+ // Block 0x9, offset 0x240
+ 0x240: 0x86, 0x241: 0x87, 0x242: 0x88, 0x243: 0x89, 0x244: 0x8a, 0x245: 0x84, 0x246: 0x85, 0x247: 0x86,
+ 0x248: 0x87, 0x249: 0x88, 0x24a: 0x89, 0x24b: 0x8a, 0x24c: 0x84, 0x24d: 0x85, 0x24e: 0x86, 0x24f: 0x87,
+ 0x250: 0x88, 0x251: 0x89, 0x252: 0x8a, 0x253: 0x84, 0x254: 0x85, 0x255: 0x86, 0x256: 0x87, 0x257: 0x88,
+ 0x258: 0x89, 0x259: 0x8a, 0x25a: 0x84, 0x25b: 0x85, 0x25c: 0x86, 0x25d: 0x87, 0x25e: 0x88, 0x25f: 0x89,
+ 0x260: 0x8a, 0x261: 0x84, 0x262: 0x85, 0x263: 0x86, 0x264: 0x87, 0x265: 0x88, 0x266: 0x89, 0x267: 0x8a,
+ 0x268: 0x84, 0x269: 0x85, 0x26a: 0x86, 0x26b: 0x87, 0x26c: 0x88, 0x26d: 0x89, 0x26e: 0x8a, 0x26f: 0x84,
+ 0x270: 0x85, 0x271: 0x86, 0x272: 0x87, 0x273: 0x88, 0x274: 0x89, 0x275: 0x8a, 0x276: 0x84, 0x277: 0x85,
+ 0x278: 0x86, 0x279: 0x87, 0x27a: 0x88, 0x27b: 0x89, 0x27c: 0x8a, 0x27d: 0x84, 0x27e: 0x85, 0x27f: 0x86,
+ // Block 0xa, offset 0x280
+ 0x280: 0x87, 0x281: 0x88, 0x282: 0x89, 0x283: 0x8a, 0x284: 0x84, 0x285: 0x85, 0x286: 0x86, 0x287: 0x87,
+ 0x288: 0x88, 0x289: 0x89, 0x28a: 0x8a, 0x28b: 0x84, 0x28c: 0x85, 0x28d: 0x86, 0x28e: 0x87, 0x28f: 0x88,
+ 0x290: 0x89, 0x291: 0x8a, 0x292: 0x84, 0x293: 0x85, 0x294: 0x86, 0x295: 0x87, 0x296: 0x88, 0x297: 0x89,
+ 0x298: 0x8a, 0x299: 0x84, 0x29a: 0x85, 0x29b: 0x86, 0x29c: 0x87, 0x29d: 0x88, 0x29e: 0x89, 0x29f: 0x8a,
+ 0x2a0: 0x84, 0x2a1: 0x85, 0x2a2: 0x86, 0x2a3: 0x87, 0x2a4: 0x88, 0x2a5: 0x89, 0x2a6: 0x8a, 0x2a7: 0x84,
+ 0x2a8: 0x85, 0x2a9: 0x86, 0x2aa: 0x87, 0x2ab: 0x88, 0x2ac: 0x89, 0x2ad: 0x8a, 0x2ae: 0x84, 0x2af: 0x85,
+ 0x2b0: 0x86, 0x2b1: 0x87, 0x2b2: 0x88, 0x2b3: 0x89, 0x2b4: 0x8a, 0x2b5: 0x84, 0x2b6: 0x85, 0x2b7: 0x86,
+ 0x2b8: 0x87, 0x2b9: 0x88, 0x2ba: 0x89, 0x2bb: 0x8a, 0x2bc: 0x84, 0x2bd: 0x85, 0x2be: 0x86, 0x2bf: 0x87,
+ // Block 0xb, offset 0x2c0
+ 0x2c0: 0x88, 0x2c1: 0x89, 0x2c2: 0x8a, 0x2c3: 0x84, 0x2c4: 0x85, 0x2c5: 0x86, 0x2c6: 0x87, 0x2c7: 0x88,
+ 0x2c8: 0x89, 0x2c9: 0x8a, 0x2ca: 0x84, 0x2cb: 0x85, 0x2cc: 0x86, 0x2cd: 0x87, 0x2ce: 0x88, 0x2cf: 0x89,
+ 0x2d0: 0x8a, 0x2d1: 0x84, 0x2d2: 0x85, 0x2d3: 0x86, 0x2d4: 0x87, 0x2d5: 0x88, 0x2d6: 0x89, 0x2d7: 0x8a,
+ 0x2d8: 0x84, 0x2d9: 0x85, 0x2da: 0x86, 0x2db: 0x87, 0x2dc: 0x88, 0x2dd: 0x89, 0x2de: 0x8b,
+ // Block 0xc, offset 0x300
+ 0x324: 0x1b, 0x325: 0x1c, 0x326: 0x1d, 0x327: 0x1e,
+ 0x328: 0x1f, 0x329: 0x20, 0x32a: 0x21, 0x32b: 0x22, 0x32c: 0x8c, 0x32d: 0x8d, 0x32e: 0x8e,
+ 0x331: 0x8f, 0x332: 0x90, 0x333: 0x91, 0x334: 0x92,
+ 0x338: 0x93, 0x339: 0x94, 0x33a: 0x95, 0x33b: 0x96, 0x33e: 0x97, 0x33f: 0x98,
+ // Block 0xd, offset 0x340
+ 0x347: 0x99,
+ 0x34b: 0x9a, 0x34d: 0x9b,
+ 0x368: 0x9c, 0x36b: 0x9d,
+ // Block 0xe, offset 0x380
+ 0x381: 0x9e, 0x382: 0x9f, 0x384: 0xa0, 0x385: 0x7f, 0x387: 0x80,
+ 0x388: 0xa1, 0x38b: 0xa2, 0x38c: 0x3e, 0x38d: 0xa3,
+ 0x392: 0xa4, 0x393: 0xa5, 0x396: 0xa6, 0x397: 0xa7,
+ 0x398: 0x73, 0x39a: 0xa8,
+ // Block 0xf, offset 0x3c0
+ 0x3eb: 0xa9, 0x3ec: 0xaa,
+ // Block 0x10, offset 0x400
+ 0x432: 0xab,
+ // Block 0x11, offset 0x440
+ 0x445: 0xac, 0x446: 0xad, 0x447: 0xae,
+ 0x449: 0xaf,
+ // Block 0x12, offset 0x480
+ 0x4a3: 0xb0,
+ // Block 0x13, offset 0x4c0
+ 0x4c8: 0xb1,
+ // Block 0x14, offset 0x500
+ 0x520: 0x23, 0x521: 0x24, 0x522: 0x25, 0x523: 0x26, 0x524: 0x27, 0x525: 0x28, 0x526: 0x29, 0x527: 0x2a,
+ 0x528: 0x2b,
+ // Block 0x15, offset 0x540
+ 0x550: 0x0b, 0x551: 0x0c, 0x556: 0x0d,
+ 0x55b: 0x0e, 0x55d: 0x0f, 0x55e: 0x10, 0x55f: 0x11,
+ 0x56f: 0x12,
+}
+
+// nfcSparseOffset: 134 entries, 268 bytes
+var nfcSparseOffset = []uint16{0x0, 0x5, 0x9, 0xb, 0xd, 0x18, 0x28, 0x2a, 0x2f, 0x3a, 0x49, 0x56, 0x5e, 0x62, 0x67, 0x69, 0x78, 0x80, 0x87, 0x8a, 0x92, 0x96, 0x9a, 0x9c, 0x9e, 0xa7, 0xab, 0xb2, 0xb7, 0xba, 0xc4, 0xc6, 0xcd, 0xd5, 0xd8, 0xda, 0xdc, 0xde, 0xe3, 0xf4, 0x100, 0x102, 0x108, 0x10a, 0x10c, 0x10e, 0x110, 0x112, 0x114, 0x117, 0x11a, 0x11c, 0x11f, 0x122, 0x126, 0x12b, 0x134, 0x136, 0x139, 0x13b, 0x146, 0x155, 0x159, 0x167, 0x16a, 0x170, 0x176, 0x181, 0x185, 0x187, 0x189, 0x18b, 0x18d, 0x18f, 0x195, 0x19d, 0x1a1, 0x1a4, 0x1a6, 0x1a8, 0x1aa, 0x1ad, 0x1af, 0x1b1, 0x1b3, 0x1b5, 0x1bb, 0x1be, 0x1c0, 0x1c7, 0x1cd, 0x1d3, 0x1db, 0x1e1, 0x1e7, 0x1ed, 0x1f1, 0x1ff, 0x208, 0x20b, 0x20e, 0x210, 0x213, 0x215, 0x218, 0x21d, 0x21f, 0x221, 0x223, 0x225, 0x227, 0x229, 0x22b, 0x231, 0x234, 0x237, 0x23f, 0x246, 0x249, 0x24b, 0x253, 0x25a, 0x25d, 0x263, 0x265, 0x268, 0x26a, 0x26c, 0x26e, 0x27b, 0x285, 0x287, 0x289, 0x28b}
+
+// nfcSparseValues: 653 entries, 2612 bytes
+var nfcSparseValues = [653]valueRange{
+ // Block 0x0, offset 0x0
+ {value: 0x0000, lo: 0x04},
+ {value: 0xa100, lo: 0xa8, hi: 0xa8},
+ {value: 0x8100, lo: 0xaf, hi: 0xaf},
+ {value: 0x8100, lo: 0xb4, hi: 0xb4},
+ {value: 0x8100, lo: 0xb8, hi: 0xb8},
+ // Block 0x1, offset 0x5
+ {value: 0x0091, lo: 0x03},
+ {value: 0x471d, lo: 0xa0, hi: 0xa1},
+ {value: 0x474f, lo: 0xaf, hi: 0xb0},
+ {value: 0xa000, lo: 0xb7, hi: 0xb7},
+ // Block 0x2, offset 0x9
+ {value: 0x0000, lo: 0x01},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ // Block 0x3, offset 0xb
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8100, lo: 0x98, hi: 0x9d},
+ // Block 0x4, offset 0xd
+ {value: 0x0006, lo: 0x0a},
+ {value: 0xa000, lo: 0x81, hi: 0x81},
+ {value: 0xa000, lo: 0x85, hi: 0x85},
+ {value: 0xa000, lo: 0x89, hi: 0x89},
+ {value: 0x487b, lo: 0x8a, hi: 0x8a},
+ {value: 0x4899, lo: 0x8b, hi: 0x8b},
+ {value: 0x3702, lo: 0x8c, hi: 0x8c},
+ {value: 0x371a, lo: 0x8d, hi: 0x8d},
+ {value: 0x48b1, lo: 0x8e, hi: 0x8e},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0x3738, lo: 0x93, hi: 0x94},
+ // Block 0x5, offset 0x18
+ {value: 0x0000, lo: 0x0f},
+ {value: 0xa000, lo: 0x83, hi: 0x83},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0xa000, lo: 0x8b, hi: 0x8b},
+ {value: 0xa000, lo: 0x8d, hi: 0x8d},
+ {value: 0x37e0, lo: 0x90, hi: 0x90},
+ {value: 0x37ec, lo: 0x91, hi: 0x91},
+ {value: 0x37da, lo: 0x93, hi: 0x93},
+ {value: 0xa000, lo: 0x96, hi: 0x96},
+ {value: 0x3852, lo: 0x97, hi: 0x97},
+ {value: 0x381c, lo: 0x9c, hi: 0x9c},
+ {value: 0x3804, lo: 0x9d, hi: 0x9d},
+ {value: 0x382e, lo: 0x9e, hi: 0x9e},
+ {value: 0xa000, lo: 0xb4, hi: 0xb5},
+ {value: 0x3858, lo: 0xb6, hi: 0xb6},
+ {value: 0x385e, lo: 0xb7, hi: 0xb7},
+ // Block 0x6, offset 0x28
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0x83, hi: 0x87},
+ // Block 0x7, offset 0x2a
+ {value: 0x0001, lo: 0x04},
+ {value: 0x8113, lo: 0x81, hi: 0x82},
+ {value: 0x8132, lo: 0x84, hi: 0x84},
+ {value: 0x812d, lo: 0x85, hi: 0x85},
+ {value: 0x810d, lo: 0x87, hi: 0x87},
+ // Block 0x8, offset 0x2f
+ {value: 0x0000, lo: 0x0a},
+ {value: 0x8132, lo: 0x90, hi: 0x97},
+ {value: 0x8119, lo: 0x98, hi: 0x98},
+ {value: 0x811a, lo: 0x99, hi: 0x99},
+ {value: 0x811b, lo: 0x9a, hi: 0x9a},
+ {value: 0x387c, lo: 0xa2, hi: 0xa2},
+ {value: 0x3882, lo: 0xa3, hi: 0xa3},
+ {value: 0x388e, lo: 0xa4, hi: 0xa4},
+ {value: 0x3888, lo: 0xa5, hi: 0xa5},
+ {value: 0x3894, lo: 0xa6, hi: 0xa6},
+ {value: 0xa000, lo: 0xa7, hi: 0xa7},
+ // Block 0x9, offset 0x3a
+ {value: 0x0000, lo: 0x0e},
+ {value: 0x38a6, lo: 0x80, hi: 0x80},
+ {value: 0xa000, lo: 0x81, hi: 0x81},
+ {value: 0x389a, lo: 0x82, hi: 0x82},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0x38a0, lo: 0x93, hi: 0x93},
+ {value: 0xa000, lo: 0x95, hi: 0x95},
+ {value: 0x8132, lo: 0x96, hi: 0x9c},
+ {value: 0x8132, lo: 0x9f, hi: 0xa2},
+ {value: 0x812d, lo: 0xa3, hi: 0xa3},
+ {value: 0x8132, lo: 0xa4, hi: 0xa4},
+ {value: 0x8132, lo: 0xa7, hi: 0xa8},
+ {value: 0x812d, lo: 0xaa, hi: 0xaa},
+ {value: 0x8132, lo: 0xab, hi: 0xac},
+ {value: 0x812d, lo: 0xad, hi: 0xad},
+ // Block 0xa, offset 0x49
+ {value: 0x0000, lo: 0x0c},
+ {value: 0x811f, lo: 0x91, hi: 0x91},
+ {value: 0x8132, lo: 0xb0, hi: 0xb0},
+ {value: 0x812d, lo: 0xb1, hi: 0xb1},
+ {value: 0x8132, lo: 0xb2, hi: 0xb3},
+ {value: 0x812d, lo: 0xb4, hi: 0xb4},
+ {value: 0x8132, lo: 0xb5, hi: 0xb6},
+ {value: 0x812d, lo: 0xb7, hi: 0xb9},
+ {value: 0x8132, lo: 0xba, hi: 0xba},
+ {value: 0x812d, lo: 0xbb, hi: 0xbc},
+ {value: 0x8132, lo: 0xbd, hi: 0xbd},
+ {value: 0x812d, lo: 0xbe, hi: 0xbe},
+ {value: 0x8132, lo: 0xbf, hi: 0xbf},
+ // Block 0xb, offset 0x56
+ {value: 0x0005, lo: 0x07},
+ {value: 0x8132, lo: 0x80, hi: 0x80},
+ {value: 0x8132, lo: 0x81, hi: 0x81},
+ {value: 0x812d, lo: 0x82, hi: 0x83},
+ {value: 0x812d, lo: 0x84, hi: 0x85},
+ {value: 0x812d, lo: 0x86, hi: 0x87},
+ {value: 0x812d, lo: 0x88, hi: 0x89},
+ {value: 0x8132, lo: 0x8a, hi: 0x8a},
+ // Block 0xc, offset 0x5e
+ {value: 0x0000, lo: 0x03},
+ {value: 0x8132, lo: 0xab, hi: 0xb1},
+ {value: 0x812d, lo: 0xb2, hi: 0xb2},
+ {value: 0x8132, lo: 0xb3, hi: 0xb3},
+ // Block 0xd, offset 0x62
+ {value: 0x0000, lo: 0x04},
+ {value: 0x8132, lo: 0x96, hi: 0x99},
+ {value: 0x8132, lo: 0x9b, hi: 0xa3},
+ {value: 0x8132, lo: 0xa5, hi: 0xa7},
+ {value: 0x8132, lo: 0xa9, hi: 0xad},
+ // Block 0xe, offset 0x67
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x99, hi: 0x9b},
+ // Block 0xf, offset 0x69
+ {value: 0x0000, lo: 0x0e},
+ {value: 0x8132, lo: 0xa4, hi: 0xa5},
+ {value: 0x812d, lo: 0xa6, hi: 0xa6},
+ {value: 0x8132, lo: 0xa7, hi: 0xa8},
+ {value: 0x812d, lo: 0xa9, hi: 0xa9},
+ {value: 0x8132, lo: 0xaa, hi: 0xac},
+ {value: 0x812d, lo: 0xad, hi: 0xaf},
+ {value: 0x8116, lo: 0xb0, hi: 0xb0},
+ {value: 0x8117, lo: 0xb1, hi: 0xb1},
+ {value: 0x8118, lo: 0xb2, hi: 0xb2},
+ {value: 0x8132, lo: 0xb3, hi: 0xb5},
+ {value: 0x812d, lo: 0xb6, hi: 0xb6},
+ {value: 0x8132, lo: 0xb7, hi: 0xb8},
+ {value: 0x812d, lo: 0xb9, hi: 0xba},
+ {value: 0x8132, lo: 0xbb, hi: 0xbf},
+ // Block 0x10, offset 0x78
+ {value: 0x0000, lo: 0x07},
+ {value: 0xa000, lo: 0xa8, hi: 0xa8},
+ {value: 0x3f13, lo: 0xa9, hi: 0xa9},
+ {value: 0xa000, lo: 0xb0, hi: 0xb0},
+ {value: 0x3f1b, lo: 0xb1, hi: 0xb1},
+ {value: 0xa000, lo: 0xb3, hi: 0xb3},
+ {value: 0x3f23, lo: 0xb4, hi: 0xb4},
+ {value: 0x9902, lo: 0xbc, hi: 0xbc},
+ // Block 0x11, offset 0x80
+ {value: 0x0008, lo: 0x06},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x8132, lo: 0x91, hi: 0x91},
+ {value: 0x812d, lo: 0x92, hi: 0x92},
+ {value: 0x8132, lo: 0x93, hi: 0x93},
+ {value: 0x8132, lo: 0x94, hi: 0x94},
+ {value: 0x4557, lo: 0x98, hi: 0x9f},
+ // Block 0x12, offset 0x87
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ {value: 0x9900, lo: 0xbe, hi: 0xbe},
+ // Block 0x13, offset 0x8a
+ {value: 0x0007, lo: 0x07},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0x18e1, lo: 0x8b, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ {value: 0x4597, lo: 0x9c, hi: 0x9c},
+ {value: 0x459f, lo: 0x9d, hi: 0x9d},
+ {value: 0x45a7, lo: 0x9f, hi: 0x9f},
+ // Block 0x14, offset 0x92
+ {value: 0x0000, lo: 0x03},
+ {value: 0x45cf, lo: 0xb3, hi: 0xb3},
+ {value: 0x45d7, lo: 0xb6, hi: 0xb6},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ // Block 0x15, offset 0x96
+ {value: 0x0008, lo: 0x03},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x45af, lo: 0x99, hi: 0x9b},
+ {value: 0x45c7, lo: 0x9e, hi: 0x9e},
+ // Block 0x16, offset 0x9a
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ // Block 0x17, offset 0x9c
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ // Block 0x18, offset 0x9e
+ {value: 0x0000, lo: 0x08},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0x18f6, lo: 0x88, hi: 0x88},
+ {value: 0x18ef, lo: 0x8b, hi: 0x8b},
+ {value: 0x18fd, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x96, hi: 0x97},
+ {value: 0x45df, lo: 0x9c, hi: 0x9c},
+ {value: 0x45e7, lo: 0x9d, hi: 0x9d},
+ // Block 0x19, offset 0xa7
+ {value: 0x0000, lo: 0x03},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0x1904, lo: 0x94, hi: 0x94},
+ {value: 0x9900, lo: 0xbe, hi: 0xbe},
+ // Block 0x1a, offset 0xab
+ {value: 0x0000, lo: 0x06},
+ {value: 0xa000, lo: 0x86, hi: 0x87},
+ {value: 0x190b, lo: 0x8a, hi: 0x8a},
+ {value: 0x1919, lo: 0x8b, hi: 0x8b},
+ {value: 0x1912, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ // Block 0x1b, offset 0xb2
+ {value: 0x1801, lo: 0x04},
+ {value: 0xa000, lo: 0x86, hi: 0x86},
+ {value: 0x3f2b, lo: 0x88, hi: 0x88},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x8120, lo: 0x95, hi: 0x96},
+ // Block 0x1c, offset 0xb7
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ {value: 0xa000, lo: 0xbf, hi: 0xbf},
+ // Block 0x1d, offset 0xba
+ {value: 0x0000, lo: 0x09},
+ {value: 0x1920, lo: 0x80, hi: 0x80},
+ {value: 0x9900, lo: 0x82, hi: 0x82},
+ {value: 0xa000, lo: 0x86, hi: 0x86},
+ {value: 0x1927, lo: 0x87, hi: 0x87},
+ {value: 0x192e, lo: 0x88, hi: 0x88},
+ {value: 0x2eb7, lo: 0x8a, hi: 0x8a},
+ {value: 0x19f6, lo: 0x8b, hi: 0x8b},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x95, hi: 0x96},
+ // Block 0x1e, offset 0xc4
+ {value: 0x0000, lo: 0x01},
+ {value: 0x9900, lo: 0xbe, hi: 0xbe},
+ // Block 0x1f, offset 0xc6
+ {value: 0x0000, lo: 0x06},
+ {value: 0xa000, lo: 0x86, hi: 0x87},
+ {value: 0x1935, lo: 0x8a, hi: 0x8a},
+ {value: 0x1943, lo: 0x8b, hi: 0x8b},
+ {value: 0x193c, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ // Block 0x20, offset 0xcd
+ {value: 0x0007, lo: 0x07},
+ {value: 0x9904, lo: 0x8a, hi: 0x8a},
+ {value: 0x9900, lo: 0x8f, hi: 0x8f},
+ {value: 0xa000, lo: 0x99, hi: 0x99},
+ {value: 0x3f33, lo: 0x9a, hi: 0x9a},
+ {value: 0x2ebe, lo: 0x9c, hi: 0x9d},
+ {value: 0x194a, lo: 0x9e, hi: 0x9e},
+ {value: 0x9900, lo: 0x9f, hi: 0x9f},
+ // Block 0x21, offset 0xd5
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8122, lo: 0xb8, hi: 0xb9},
+ {value: 0x8104, lo: 0xba, hi: 0xba},
+ // Block 0x22, offset 0xd8
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8123, lo: 0x88, hi: 0x8b},
+ // Block 0x23, offset 0xda
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8124, lo: 0xb8, hi: 0xb9},
+ // Block 0x24, offset 0xdc
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8125, lo: 0x88, hi: 0x8b},
+ // Block 0x25, offset 0xde
+ {value: 0x0000, lo: 0x04},
+ {value: 0x812d, lo: 0x98, hi: 0x99},
+ {value: 0x812d, lo: 0xb5, hi: 0xb5},
+ {value: 0x812d, lo: 0xb7, hi: 0xb7},
+ {value: 0x812b, lo: 0xb9, hi: 0xb9},
+ // Block 0x26, offset 0xe3
+ {value: 0x0000, lo: 0x10},
+ {value: 0x27d7, lo: 0x83, hi: 0x83},
+ {value: 0x27de, lo: 0x8d, hi: 0x8d},
+ {value: 0x27e5, lo: 0x92, hi: 0x92},
+ {value: 0x27ec, lo: 0x97, hi: 0x97},
+ {value: 0x27f3, lo: 0x9c, hi: 0x9c},
+ {value: 0x27d0, lo: 0xa9, hi: 0xa9},
+ {value: 0x8126, lo: 0xb1, hi: 0xb1},
+ {value: 0x8127, lo: 0xb2, hi: 0xb2},
+ {value: 0x4a0b, lo: 0xb3, hi: 0xb3},
+ {value: 0x8128, lo: 0xb4, hi: 0xb4},
+ {value: 0x4a14, lo: 0xb5, hi: 0xb5},
+ {value: 0x45ef, lo: 0xb6, hi: 0xb6},
+ {value: 0x8200, lo: 0xb7, hi: 0xb7},
+ {value: 0x45f7, lo: 0xb8, hi: 0xb8},
+ {value: 0x8200, lo: 0xb9, hi: 0xb9},
+ {value: 0x8127, lo: 0xba, hi: 0xbd},
+ // Block 0x27, offset 0xf4
+ {value: 0x0000, lo: 0x0b},
+ {value: 0x8127, lo: 0x80, hi: 0x80},
+ {value: 0x4a1d, lo: 0x81, hi: 0x81},
+ {value: 0x8132, lo: 0x82, hi: 0x83},
+ {value: 0x8104, lo: 0x84, hi: 0x84},
+ {value: 0x8132, lo: 0x86, hi: 0x87},
+ {value: 0x2801, lo: 0x93, hi: 0x93},
+ {value: 0x2808, lo: 0x9d, hi: 0x9d},
+ {value: 0x280f, lo: 0xa2, hi: 0xa2},
+ {value: 0x2816, lo: 0xa7, hi: 0xa7},
+ {value: 0x281d, lo: 0xac, hi: 0xac},
+ {value: 0x27fa, lo: 0xb9, hi: 0xb9},
+ // Block 0x28, offset 0x100
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x86, hi: 0x86},
+ // Block 0x29, offset 0x102
+ {value: 0x0000, lo: 0x05},
+ {value: 0xa000, lo: 0xa5, hi: 0xa5},
+ {value: 0x1951, lo: 0xa6, hi: 0xa6},
+ {value: 0x9900, lo: 0xae, hi: 0xae},
+ {value: 0x8102, lo: 0xb7, hi: 0xb7},
+ {value: 0x8104, lo: 0xb9, hi: 0xba},
+ // Block 0x2a, offset 0x108
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x8d, hi: 0x8d},
+ // Block 0x2b, offset 0x10a
+ {value: 0x0000, lo: 0x01},
+ {value: 0xa000, lo: 0x80, hi: 0x92},
+ // Block 0x2c, offset 0x10c
+ {value: 0x0000, lo: 0x01},
+ {value: 0xb900, lo: 0xa1, hi: 0xb5},
+ // Block 0x2d, offset 0x10e
+ {value: 0x0000, lo: 0x01},
+ {value: 0x9900, lo: 0xa8, hi: 0xbf},
+ // Block 0x2e, offset 0x110
+ {value: 0x0000, lo: 0x01},
+ {value: 0x9900, lo: 0x80, hi: 0x82},
+ // Block 0x2f, offset 0x112
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0x9d, hi: 0x9f},
+ // Block 0x30, offset 0x114
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x94, hi: 0x94},
+ {value: 0x8104, lo: 0xb4, hi: 0xb4},
+ // Block 0x31, offset 0x117
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x92, hi: 0x92},
+ {value: 0x8132, lo: 0x9d, hi: 0x9d},
+ // Block 0x32, offset 0x11a
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8131, lo: 0xa9, hi: 0xa9},
+ // Block 0x33, offset 0x11c
+ {value: 0x0004, lo: 0x02},
+ {value: 0x812e, lo: 0xb9, hi: 0xba},
+ {value: 0x812d, lo: 0xbb, hi: 0xbb},
+ // Block 0x34, offset 0x11f
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0x97, hi: 0x97},
+ {value: 0x812d, lo: 0x98, hi: 0x98},
+ // Block 0x35, offset 0x122
+ {value: 0x0000, lo: 0x03},
+ {value: 0x8104, lo: 0xa0, hi: 0xa0},
+ {value: 0x8132, lo: 0xb5, hi: 0xbc},
+ {value: 0x812d, lo: 0xbf, hi: 0xbf},
+ // Block 0x36, offset 0x126
+ {value: 0x0000, lo: 0x04},
+ {value: 0x8132, lo: 0xb0, hi: 0xb4},
+ {value: 0x812d, lo: 0xb5, hi: 0xba},
+ {value: 0x8132, lo: 0xbb, hi: 0xbc},
+ {value: 0x812d, lo: 0xbd, hi: 0xbd},
+ // Block 0x37, offset 0x12b
+ {value: 0x0000, lo: 0x08},
+ {value: 0x1990, lo: 0x80, hi: 0x80},
+ {value: 0x1997, lo: 0x81, hi: 0x81},
+ {value: 0xa000, lo: 0x82, hi: 0x82},
+ {value: 0x199e, lo: 0x83, hi: 0x83},
+ {value: 0x8104, lo: 0x84, hi: 0x84},
+ {value: 0x8132, lo: 0xab, hi: 0xab},
+ {value: 0x812d, lo: 0xac, hi: 0xac},
+ {value: 0x8132, lo: 0xad, hi: 0xb3},
+ // Block 0x38, offset 0x134
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0xaa, hi: 0xab},
+ // Block 0x39, offset 0x136
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8102, lo: 0xa6, hi: 0xa6},
+ {value: 0x8104, lo: 0xb2, hi: 0xb3},
+ // Block 0x3a, offset 0x139
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0xb7, hi: 0xb7},
+ // Block 0x3b, offset 0x13b
+ {value: 0x0000, lo: 0x0a},
+ {value: 0x8132, lo: 0x90, hi: 0x92},
+ {value: 0x8101, lo: 0x94, hi: 0x94},
+ {value: 0x812d, lo: 0x95, hi: 0x99},
+ {value: 0x8132, lo: 0x9a, hi: 0x9b},
+ {value: 0x812d, lo: 0x9c, hi: 0x9f},
+ {value: 0x8132, lo: 0xa0, hi: 0xa0},
+ {value: 0x8101, lo: 0xa2, hi: 0xa8},
+ {value: 0x812d, lo: 0xad, hi: 0xad},
+ {value: 0x8132, lo: 0xb4, hi: 0xb4},
+ {value: 0x8132, lo: 0xb8, hi: 0xb9},
+ // Block 0x3c, offset 0x146
+ {value: 0x0000, lo: 0x0e},
+ {value: 0x8132, lo: 0x80, hi: 0x81},
+ {value: 0x812d, lo: 0x82, hi: 0x82},
+ {value: 0x8132, lo: 0x83, hi: 0x89},
+ {value: 0x812d, lo: 0x8a, hi: 0x8a},
+ {value: 0x8132, lo: 0x8b, hi: 0x8c},
+ {value: 0x8135, lo: 0x8d, hi: 0x8d},
+ {value: 0x812a, lo: 0x8e, hi: 0x8e},
+ {value: 0x812d, lo: 0x8f, hi: 0x8f},
+ {value: 0x8129, lo: 0x90, hi: 0x90},
+ {value: 0x8132, lo: 0x91, hi: 0xb5},
+ {value: 0x8134, lo: 0xbc, hi: 0xbc},
+ {value: 0x812d, lo: 0xbd, hi: 0xbd},
+ {value: 0x8132, lo: 0xbe, hi: 0xbe},
+ {value: 0x812d, lo: 0xbf, hi: 0xbf},
+ // Block 0x3d, offset 0x155
+ {value: 0x0004, lo: 0x03},
+ {value: 0x04ab, lo: 0x80, hi: 0x81},
+ {value: 0x8100, lo: 0x97, hi: 0x97},
+ {value: 0x8100, lo: 0xbe, hi: 0xbe},
+ // Block 0x3e, offset 0x159
+ {value: 0x0000, lo: 0x0d},
+ {value: 0x8132, lo: 0x90, hi: 0x91},
+ {value: 0x8101, lo: 0x92, hi: 0x93},
+ {value: 0x8132, lo: 0x94, hi: 0x97},
+ {value: 0x8101, lo: 0x98, hi: 0x9a},
+ {value: 0x8132, lo: 0x9b, hi: 0x9c},
+ {value: 0x8132, lo: 0xa1, hi: 0xa1},
+ {value: 0x8101, lo: 0xa5, hi: 0xa6},
+ {value: 0x8132, lo: 0xa7, hi: 0xa7},
+ {value: 0x812d, lo: 0xa8, hi: 0xa8},
+ {value: 0x8132, lo: 0xa9, hi: 0xa9},
+ {value: 0x8101, lo: 0xaa, hi: 0xab},
+ {value: 0x812d, lo: 0xac, hi: 0xaf},
+ {value: 0x8132, lo: 0xb0, hi: 0xb0},
+ // Block 0x3f, offset 0x167
+ {value: 0x42b6, lo: 0x02},
+ {value: 0x01b8, lo: 0xa6, hi: 0xa6},
+ {value: 0x0057, lo: 0xaa, hi: 0xab},
+ // Block 0x40, offset 0x16a
+ {value: 0x0007, lo: 0x05},
+ {value: 0xa000, lo: 0x90, hi: 0x90},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0xa000, lo: 0x94, hi: 0x94},
+ {value: 0x3bf4, lo: 0x9a, hi: 0x9b},
+ {value: 0x3c02, lo: 0xae, hi: 0xae},
+ // Block 0x41, offset 0x170
+ {value: 0x000e, lo: 0x05},
+ {value: 0x3c09, lo: 0x8d, hi: 0x8e},
+ {value: 0x3c10, lo: 0x8f, hi: 0x8f},
+ {value: 0xa000, lo: 0x90, hi: 0x90},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0xa000, lo: 0x94, hi: 0x94},
+ // Block 0x42, offset 0x176
+ {value: 0x63cd, lo: 0x0a},
+ {value: 0xa000, lo: 0x83, hi: 0x83},
+ {value: 0x3c1e, lo: 0x84, hi: 0x84},
+ {value: 0xa000, lo: 0x88, hi: 0x88},
+ {value: 0x3c25, lo: 0x89, hi: 0x89},
+ {value: 0xa000, lo: 0x8b, hi: 0x8b},
+ {value: 0x3c2c, lo: 0x8c, hi: 0x8c},
+ {value: 0xa000, lo: 0xa3, hi: 0xa3},
+ {value: 0x3c33, lo: 0xa4, hi: 0xa5},
+ {value: 0x3c3a, lo: 0xa6, hi: 0xa6},
+ {value: 0xa000, lo: 0xbc, hi: 0xbc},
+ // Block 0x43, offset 0x181
+ {value: 0x0007, lo: 0x03},
+ {value: 0x3ca3, lo: 0xa0, hi: 0xa1},
+ {value: 0x3ccd, lo: 0xa2, hi: 0xa3},
+ {value: 0x3cf7, lo: 0xaa, hi: 0xad},
+ // Block 0x44, offset 0x185
+ {value: 0x0004, lo: 0x01},
+ {value: 0x0503, lo: 0xa9, hi: 0xaa},
+ // Block 0x45, offset 0x187
+ {value: 0x0000, lo: 0x01},
+ {value: 0x4518, lo: 0x9c, hi: 0x9c},
+ // Block 0x46, offset 0x189
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xaf, hi: 0xb1},
+ // Block 0x47, offset 0x18b
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x48, offset 0x18d
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xa0, hi: 0xbf},
+ // Block 0x49, offset 0x18f
+ {value: 0x0000, lo: 0x05},
+ {value: 0x812c, lo: 0xaa, hi: 0xaa},
+ {value: 0x8131, lo: 0xab, hi: 0xab},
+ {value: 0x8133, lo: 0xac, hi: 0xac},
+ {value: 0x812e, lo: 0xad, hi: 0xad},
+ {value: 0x812f, lo: 0xae, hi: 0xaf},
+ // Block 0x4a, offset 0x195
+ {value: 0x0000, lo: 0x07},
+ {value: 0x8100, lo: 0x84, hi: 0x84},
+ {value: 0x8100, lo: 0x87, hi: 0x87},
+ {value: 0x8100, lo: 0x90, hi: 0x90},
+ {value: 0x8100, lo: 0x9e, hi: 0x9e},
+ {value: 0x8100, lo: 0xa1, hi: 0xa1},
+ {value: 0x8100, lo: 0xb2, hi: 0xb2},
+ {value: 0x8100, lo: 0xbb, hi: 0xbb},
+ // Block 0x4b, offset 0x19d
+ {value: 0x0000, lo: 0x03},
+ {value: 0x8100, lo: 0x80, hi: 0x80},
+ {value: 0x8100, lo: 0x8b, hi: 0x8b},
+ {value: 0x8100, lo: 0x8e, hi: 0x8e},
+ // Block 0x4c, offset 0x1a1
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0xaf, hi: 0xaf},
+ {value: 0x8132, lo: 0xb4, hi: 0xbd},
+ // Block 0x4d, offset 0x1a4
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0x9f, hi: 0x9f},
+ // Block 0x4e, offset 0x1a6
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xb0, hi: 0xb1},
+ // Block 0x4f, offset 0x1a8
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x86, hi: 0x86},
+ // Block 0x50, offset 0x1aa
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x84, hi: 0x84},
+ {value: 0x8132, lo: 0xa0, hi: 0xb1},
+ // Block 0x51, offset 0x1ad
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0xab, hi: 0xad},
+ // Block 0x52, offset 0x1af
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x93, hi: 0x93},
+ // Block 0x53, offset 0x1b1
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0xb3, hi: 0xb3},
+ // Block 0x54, offset 0x1b3
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x80, hi: 0x80},
+ // Block 0x55, offset 0x1b5
+ {value: 0x0000, lo: 0x05},
+ {value: 0x8132, lo: 0xb0, hi: 0xb0},
+ {value: 0x8132, lo: 0xb2, hi: 0xb3},
+ {value: 0x812d, lo: 0xb4, hi: 0xb4},
+ {value: 0x8132, lo: 0xb7, hi: 0xb8},
+ {value: 0x8132, lo: 0xbe, hi: 0xbf},
+ // Block 0x56, offset 0x1bb
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0x81, hi: 0x81},
+ {value: 0x8104, lo: 0xb6, hi: 0xb6},
+ // Block 0x57, offset 0x1be
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0xad, hi: 0xad},
+ // Block 0x58, offset 0x1c0
+ {value: 0x0000, lo: 0x06},
+ {value: 0xe500, lo: 0x80, hi: 0x80},
+ {value: 0xc600, lo: 0x81, hi: 0x9b},
+ {value: 0xe500, lo: 0x9c, hi: 0x9c},
+ {value: 0xc600, lo: 0x9d, hi: 0xb7},
+ {value: 0xe500, lo: 0xb8, hi: 0xb8},
+ {value: 0xc600, lo: 0xb9, hi: 0xbf},
+ // Block 0x59, offset 0x1c7
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x93},
+ {value: 0xe500, lo: 0x94, hi: 0x94},
+ {value: 0xc600, lo: 0x95, hi: 0xaf},
+ {value: 0xe500, lo: 0xb0, hi: 0xb0},
+ {value: 0xc600, lo: 0xb1, hi: 0xbf},
+ // Block 0x5a, offset 0x1cd
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x8b},
+ {value: 0xe500, lo: 0x8c, hi: 0x8c},
+ {value: 0xc600, lo: 0x8d, hi: 0xa7},
+ {value: 0xe500, lo: 0xa8, hi: 0xa8},
+ {value: 0xc600, lo: 0xa9, hi: 0xbf},
+ // Block 0x5b, offset 0x1d3
+ {value: 0x0000, lo: 0x07},
+ {value: 0xc600, lo: 0x80, hi: 0x83},
+ {value: 0xe500, lo: 0x84, hi: 0x84},
+ {value: 0xc600, lo: 0x85, hi: 0x9f},
+ {value: 0xe500, lo: 0xa0, hi: 0xa0},
+ {value: 0xc600, lo: 0xa1, hi: 0xbb},
+ {value: 0xe500, lo: 0xbc, hi: 0xbc},
+ {value: 0xc600, lo: 0xbd, hi: 0xbf},
+ // Block 0x5c, offset 0x1db
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x97},
+ {value: 0xe500, lo: 0x98, hi: 0x98},
+ {value: 0xc600, lo: 0x99, hi: 0xb3},
+ {value: 0xe500, lo: 0xb4, hi: 0xb4},
+ {value: 0xc600, lo: 0xb5, hi: 0xbf},
+ // Block 0x5d, offset 0x1e1
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x8f},
+ {value: 0xe500, lo: 0x90, hi: 0x90},
+ {value: 0xc600, lo: 0x91, hi: 0xab},
+ {value: 0xe500, lo: 0xac, hi: 0xac},
+ {value: 0xc600, lo: 0xad, hi: 0xbf},
+ // Block 0x5e, offset 0x1e7
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x87},
+ {value: 0xe500, lo: 0x88, hi: 0x88},
+ {value: 0xc600, lo: 0x89, hi: 0xa3},
+ {value: 0xe500, lo: 0xa4, hi: 0xa4},
+ {value: 0xc600, lo: 0xa5, hi: 0xbf},
+ // Block 0x5f, offset 0x1ed
+ {value: 0x0000, lo: 0x03},
+ {value: 0xc600, lo: 0x80, hi: 0x87},
+ {value: 0xe500, lo: 0x88, hi: 0x88},
+ {value: 0xc600, lo: 0x89, hi: 0xa3},
+ // Block 0x60, offset 0x1f1
+ {value: 0x0006, lo: 0x0d},
+ {value: 0x43cb, lo: 0x9d, hi: 0x9d},
+ {value: 0x8115, lo: 0x9e, hi: 0x9e},
+ {value: 0x443d, lo: 0x9f, hi: 0x9f},
+ {value: 0x442b, lo: 0xaa, hi: 0xab},
+ {value: 0x452f, lo: 0xac, hi: 0xac},
+ {value: 0x4537, lo: 0xad, hi: 0xad},
+ {value: 0x4383, lo: 0xae, hi: 0xb1},
+ {value: 0x43a1, lo: 0xb2, hi: 0xb4},
+ {value: 0x43b9, lo: 0xb5, hi: 0xb6},
+ {value: 0x43c5, lo: 0xb8, hi: 0xb8},
+ {value: 0x43d1, lo: 0xb9, hi: 0xbb},
+ {value: 0x43e9, lo: 0xbc, hi: 0xbc},
+ {value: 0x43ef, lo: 0xbe, hi: 0xbe},
+ // Block 0x61, offset 0x1ff
+ {value: 0x0006, lo: 0x08},
+ {value: 0x43f5, lo: 0x80, hi: 0x81},
+ {value: 0x4401, lo: 0x83, hi: 0x84},
+ {value: 0x4413, lo: 0x86, hi: 0x89},
+ {value: 0x4437, lo: 0x8a, hi: 0x8a},
+ {value: 0x43b3, lo: 0x8b, hi: 0x8b},
+ {value: 0x439b, lo: 0x8c, hi: 0x8c},
+ {value: 0x43e3, lo: 0x8d, hi: 0x8d},
+ {value: 0x440d, lo: 0x8e, hi: 0x8e},
+ // Block 0x62, offset 0x208
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8100, lo: 0xa4, hi: 0xa5},
+ {value: 0x8100, lo: 0xb0, hi: 0xb1},
+ // Block 0x63, offset 0x20b
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8100, lo: 0x9b, hi: 0x9d},
+ {value: 0x8200, lo: 0x9e, hi: 0xa3},
+ // Block 0x64, offset 0x20e
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8100, lo: 0x90, hi: 0x90},
+ // Block 0x65, offset 0x210
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8100, lo: 0x99, hi: 0x99},
+ {value: 0x8200, lo: 0xb2, hi: 0xb4},
+ // Block 0x66, offset 0x213
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8100, lo: 0xbc, hi: 0xbd},
+ // Block 0x67, offset 0x215
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0xa0, hi: 0xa6},
+ {value: 0x812d, lo: 0xa7, hi: 0xad},
+ // Block 0x68, offset 0x218
+ {value: 0x0000, lo: 0x04},
+ {value: 0x8100, lo: 0x89, hi: 0x8c},
+ {value: 0x8100, lo: 0xb0, hi: 0xb2},
+ {value: 0x8100, lo: 0xb4, hi: 0xb4},
+ {value: 0x8100, lo: 0xb6, hi: 0xbf},
+ // Block 0x69, offset 0x21d
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8100, lo: 0x81, hi: 0x8c},
+ // Block 0x6a, offset 0x21f
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8100, lo: 0xb5, hi: 0xba},
+ // Block 0x6b, offset 0x221
+ {value: 0x0000, lo: 0x01},
+ {value: 0x4a26, lo: 0x9e, hi: 0x9f},
+ // Block 0x6c, offset 0x223
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8100, lo: 0xa3, hi: 0xa3},
+ // Block 0x6d, offset 0x225
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0xbd, hi: 0xbd},
+ // Block 0x6e, offset 0x227
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0xa0, hi: 0xa0},
+ // Block 0x6f, offset 0x229
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xb6, hi: 0xba},
+ // Block 0x70, offset 0x22b
+ {value: 0x002c, lo: 0x05},
+ {value: 0x812d, lo: 0x8d, hi: 0x8d},
+ {value: 0x8132, lo: 0x8f, hi: 0x8f},
+ {value: 0x8132, lo: 0xb8, hi: 0xb8},
+ {value: 0x8101, lo: 0xb9, hi: 0xba},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x71, offset 0x231
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0xa5, hi: 0xa5},
+ {value: 0x812d, lo: 0xa6, hi: 0xa6},
+ // Block 0x72, offset 0x234
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x86, hi: 0x86},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x73, offset 0x237
+ {value: 0x17fe, lo: 0x07},
+ {value: 0xa000, lo: 0x99, hi: 0x99},
+ {value: 0x4273, lo: 0x9a, hi: 0x9a},
+ {value: 0xa000, lo: 0x9b, hi: 0x9b},
+ {value: 0x427d, lo: 0x9c, hi: 0x9c},
+ {value: 0xa000, lo: 0xa5, hi: 0xa5},
+ {value: 0x4287, lo: 0xab, hi: 0xab},
+ {value: 0x8104, lo: 0xb9, hi: 0xba},
+ // Block 0x74, offset 0x23f
+ {value: 0x0000, lo: 0x06},
+ {value: 0x8132, lo: 0x80, hi: 0x82},
+ {value: 0x9900, lo: 0xa7, hi: 0xa7},
+ {value: 0x19a5, lo: 0xae, hi: 0xae},
+ {value: 0x19ae, lo: 0xaf, hi: 0xaf},
+ {value: 0xa000, lo: 0xb1, hi: 0xb2},
+ {value: 0x8104, lo: 0xb3, hi: 0xb4},
+ // Block 0x75, offset 0x246
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0xb5, hi: 0xb5},
+ {value: 0x8102, lo: 0xb6, hi: 0xb6},
+ // Block 0x76, offset 0x249
+ {value: 0x0002, lo: 0x01},
+ {value: 0x8102, lo: 0xa9, hi: 0xaa},
+ // Block 0x77, offset 0x24b
+ {value: 0x0000, lo: 0x07},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0x19b7, lo: 0x8b, hi: 0x8b},
+ {value: 0x19c0, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ {value: 0x8132, lo: 0xa6, hi: 0xac},
+ {value: 0x8132, lo: 0xb0, hi: 0xb4},
+ // Block 0x78, offset 0x253
+ {value: 0x7f37, lo: 0x06},
+ {value: 0x9900, lo: 0xb0, hi: 0xb0},
+ {value: 0xa000, lo: 0xb9, hi: 0xb9},
+ {value: 0x9900, lo: 0xba, hi: 0xba},
+ {value: 0x19d2, lo: 0xbb, hi: 0xbb},
+ {value: 0x19c9, lo: 0xbc, hi: 0xbd},
+ {value: 0x19db, lo: 0xbe, hi: 0xbe},
+ // Block 0x79, offset 0x25a
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x82, hi: 0x82},
+ {value: 0x8102, lo: 0x83, hi: 0x83},
+ // Block 0x7a, offset 0x25d
+ {value: 0x0000, lo: 0x05},
+ {value: 0x9900, lo: 0xaf, hi: 0xaf},
+ {value: 0xa000, lo: 0xb8, hi: 0xb9},
+ {value: 0x19e4, lo: 0xba, hi: 0xba},
+ {value: 0x19ed, lo: 0xbb, hi: 0xbb},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x7b, offset 0x263
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0x80, hi: 0x80},
+ // Block 0x7c, offset 0x265
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0xb6, hi: 0xb6},
+ {value: 0x8102, lo: 0xb7, hi: 0xb7},
+ // Block 0x7d, offset 0x268
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8101, lo: 0xb0, hi: 0xb4},
+ // Block 0x7e, offset 0x26a
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xb0, hi: 0xb6},
+ // Block 0x7f, offset 0x26c
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8101, lo: 0x9e, hi: 0x9e},
+ // Block 0x80, offset 0x26e
+ {value: 0x0000, lo: 0x0c},
+ {value: 0x4607, lo: 0x9e, hi: 0x9e},
+ {value: 0x4611, lo: 0x9f, hi: 0x9f},
+ {value: 0x4645, lo: 0xa0, hi: 0xa0},
+ {value: 0x4653, lo: 0xa1, hi: 0xa1},
+ {value: 0x4661, lo: 0xa2, hi: 0xa2},
+ {value: 0x466f, lo: 0xa3, hi: 0xa3},
+ {value: 0x467d, lo: 0xa4, hi: 0xa4},
+ {value: 0x812b, lo: 0xa5, hi: 0xa6},
+ {value: 0x8101, lo: 0xa7, hi: 0xa9},
+ {value: 0x8130, lo: 0xad, hi: 0xad},
+ {value: 0x812b, lo: 0xae, hi: 0xb2},
+ {value: 0x812d, lo: 0xbb, hi: 0xbf},
+ // Block 0x81, offset 0x27b
+ {value: 0x0000, lo: 0x09},
+ {value: 0x812d, lo: 0x80, hi: 0x82},
+ {value: 0x8132, lo: 0x85, hi: 0x89},
+ {value: 0x812d, lo: 0x8a, hi: 0x8b},
+ {value: 0x8132, lo: 0xaa, hi: 0xad},
+ {value: 0x461b, lo: 0xbb, hi: 0xbb},
+ {value: 0x4625, lo: 0xbc, hi: 0xbc},
+ {value: 0x468b, lo: 0xbd, hi: 0xbd},
+ {value: 0x46a7, lo: 0xbe, hi: 0xbe},
+ {value: 0x4699, lo: 0xbf, hi: 0xbf},
+ // Block 0x82, offset 0x285
+ {value: 0x0000, lo: 0x01},
+ {value: 0x46b5, lo: 0x80, hi: 0x80},
+ // Block 0x83, offset 0x287
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0x82, hi: 0x84},
+ // Block 0x84, offset 0x289
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x90, hi: 0x96},
+ // Block 0x85, offset 0x28b
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8100, lo: 0x93, hi: 0x93},
+}
+
+// lookup returns the trie value for the first UTF-8 encoding in s and
+// the width in bytes of this encoding. The size will be 0 if s does not
+// hold enough bytes to complete the encoding. len(s) must be greater than 0.
+func (t *nfkcTrie) lookup(s []byte) (v uint16, sz int) {
+ c0 := s[0]
+ switch {
+ case c0 < 0x80: // is ASCII
+ return nfkcValues[c0], 1
+ case c0 < 0xC0:
+ return 0, 1 // Illegal UTF-8: not a starter, not ASCII.
+ case c0 < 0xE0: // 2-byte UTF-8
+ if len(s) < 2 {
+ return 0, 0
+ }
+ i := nfkcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c1), 2
+ case c0 < 0xF0: // 3-byte UTF-8
+ if len(s) < 3 {
+ return 0, 0
+ }
+ i := nfkcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfkcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c2), 3
+ case c0 < 0xF8: // 4-byte UTF-8
+ if len(s) < 4 {
+ return 0, 0
+ }
+ i := nfkcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfkcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ o = uint32(i)<<6 + uint32(c2)
+ i = nfkcIndex[o]
+ c3 := s[3]
+ if c3 < 0x80 || 0xC0 <= c3 {
+ return 0, 3 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c3), 4
+ }
+ // Illegal rune
+ return 0, 1
+}
+
+// lookupUnsafe returns the trie value for the first UTF-8 encoding in s.
+// s must start with a full and valid UTF-8 encoded rune.
+func (t *nfkcTrie) lookupUnsafe(s []byte) uint16 {
+ c0 := s[0]
+ if c0 < 0x80 { // is ASCII
+ return nfkcValues[c0]
+ }
+ i := nfkcIndex[c0]
+ if c0 < 0xE0 { // 2-byte UTF-8
+ return t.lookupValue(uint32(i), s[1])
+ }
+ i = nfkcIndex[uint32(i)<<6+uint32(s[1])]
+ if c0 < 0xF0 { // 3-byte UTF-8
+ return t.lookupValue(uint32(i), s[2])
+ }
+ i = nfkcIndex[uint32(i)<<6+uint32(s[2])]
+ if c0 < 0xF8 { // 4-byte UTF-8
+ return t.lookupValue(uint32(i), s[3])
+ }
+ return 0
+}
+
+// lookupString returns the trie value for the first UTF-8 encoding in s and
+// the width in bytes of this encoding. The size will be 0 if s does not
+// hold enough bytes to complete the encoding. len(s) must be greater than 0.
+func (t *nfkcTrie) lookupString(s string) (v uint16, sz int) {
+ c0 := s[0]
+ switch {
+ case c0 < 0x80: // is ASCII
+ return nfkcValues[c0], 1
+ case c0 < 0xC0:
+ return 0, 1 // Illegal UTF-8: not a starter, not ASCII.
+ case c0 < 0xE0: // 2-byte UTF-8
+ if len(s) < 2 {
+ return 0, 0
+ }
+ i := nfkcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c1), 2
+ case c0 < 0xF0: // 3-byte UTF-8
+ if len(s) < 3 {
+ return 0, 0
+ }
+ i := nfkcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfkcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c2), 3
+ case c0 < 0xF8: // 4-byte UTF-8
+ if len(s) < 4 {
+ return 0, 0
+ }
+ i := nfkcIndex[c0]
+ c1 := s[1]
+ if c1 < 0x80 || 0xC0 <= c1 {
+ return 0, 1 // Illegal UTF-8: not a continuation byte.
+ }
+ o := uint32(i)<<6 + uint32(c1)
+ i = nfkcIndex[o]
+ c2 := s[2]
+ if c2 < 0x80 || 0xC0 <= c2 {
+ return 0, 2 // Illegal UTF-8: not a continuation byte.
+ }
+ o = uint32(i)<<6 + uint32(c2)
+ i = nfkcIndex[o]
+ c3 := s[3]
+ if c3 < 0x80 || 0xC0 <= c3 {
+ return 0, 3 // Illegal UTF-8: not a continuation byte.
+ }
+ return t.lookupValue(uint32(i), c3), 4
+ }
+ // Illegal rune
+ return 0, 1
+}
+
+// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s.
+// s must start with a full and valid UTF-8 encoded rune.
+func (t *nfkcTrie) lookupStringUnsafe(s string) uint16 {
+ c0 := s[0]
+ if c0 < 0x80 { // is ASCII
+ return nfkcValues[c0]
+ }
+ i := nfkcIndex[c0]
+ if c0 < 0xE0 { // 2-byte UTF-8
+ return t.lookupValue(uint32(i), s[1])
+ }
+ i = nfkcIndex[uint32(i)<<6+uint32(s[1])]
+ if c0 < 0xF0 { // 3-byte UTF-8
+ return t.lookupValue(uint32(i), s[2])
+ }
+ i = nfkcIndex[uint32(i)<<6+uint32(s[2])]
+ if c0 < 0xF8 { // 4-byte UTF-8
+ return t.lookupValue(uint32(i), s[3])
+ }
+ return 0
+}
+
+// nfkcTrie. Total size: 16752 bytes (16.36 KiB). Checksum: 5cd2e697fcf78b3e.
+type nfkcTrie struct{}
+
+func newNfkcTrie(i int) *nfkcTrie {
+ return &nfkcTrie{}
+}
+
+// lookupValue determines the type of block n and looks up the value for b.
+func (t *nfkcTrie) lookupValue(n uint32, b byte) uint16 {
+ switch {
+ case n < 88:
+ return uint16(nfkcValues[n<<6+uint32(b)])
+ default:
+ n -= 88
+ return uint16(nfkcSparse.lookup(n, b))
+ }
+}
+
+// nfkcValues: 90 blocks, 5760 entries, 11520 bytes
+// The third block is the zero block.
+var nfkcValues = [5760]uint16{
+ // Block 0x0, offset 0x0
+ 0x3c: 0xa000, 0x3d: 0xa000, 0x3e: 0xa000,
+ // Block 0x1, offset 0x40
+ 0x41: 0xa000, 0x42: 0xa000, 0x43: 0xa000, 0x44: 0xa000, 0x45: 0xa000,
+ 0x46: 0xa000, 0x47: 0xa000, 0x48: 0xa000, 0x49: 0xa000, 0x4a: 0xa000, 0x4b: 0xa000,
+ 0x4c: 0xa000, 0x4d: 0xa000, 0x4e: 0xa000, 0x4f: 0xa000, 0x50: 0xa000,
+ 0x52: 0xa000, 0x53: 0xa000, 0x54: 0xa000, 0x55: 0xa000, 0x56: 0xa000, 0x57: 0xa000,
+ 0x58: 0xa000, 0x59: 0xa000, 0x5a: 0xa000,
+ 0x61: 0xa000, 0x62: 0xa000, 0x63: 0xa000,
+ 0x64: 0xa000, 0x65: 0xa000, 0x66: 0xa000, 0x67: 0xa000, 0x68: 0xa000, 0x69: 0xa000,
+ 0x6a: 0xa000, 0x6b: 0xa000, 0x6c: 0xa000, 0x6d: 0xa000, 0x6e: 0xa000, 0x6f: 0xa000,
+ 0x70: 0xa000, 0x72: 0xa000, 0x73: 0xa000, 0x74: 0xa000, 0x75: 0xa000,
+ 0x76: 0xa000, 0x77: 0xa000, 0x78: 0xa000, 0x79: 0xa000, 0x7a: 0xa000,
+ // Block 0x2, offset 0x80
+ // Block 0x3, offset 0xc0
+ 0xc0: 0x2faa, 0xc1: 0x2faf, 0xc2: 0x46c3, 0xc3: 0x2fb4, 0xc4: 0x46d2, 0xc5: 0x46d7,
+ 0xc6: 0xa000, 0xc7: 0x46e1, 0xc8: 0x301d, 0xc9: 0x3022, 0xca: 0x46e6, 0xcb: 0x3036,
+ 0xcc: 0x30a9, 0xcd: 0x30ae, 0xce: 0x30b3, 0xcf: 0x46fa, 0xd1: 0x313f,
+ 0xd2: 0x3162, 0xd3: 0x3167, 0xd4: 0x4704, 0xd5: 0x4709, 0xd6: 0x4718,
+ 0xd8: 0xa000, 0xd9: 0x31ee, 0xda: 0x31f3, 0xdb: 0x31f8, 0xdc: 0x474a, 0xdd: 0x3270,
+ 0xe0: 0x32b6, 0xe1: 0x32bb, 0xe2: 0x4754, 0xe3: 0x32c0,
+ 0xe4: 0x4763, 0xe5: 0x4768, 0xe6: 0xa000, 0xe7: 0x4772, 0xe8: 0x3329, 0xe9: 0x332e,
+ 0xea: 0x4777, 0xeb: 0x3342, 0xec: 0x33ba, 0xed: 0x33bf, 0xee: 0x33c4, 0xef: 0x478b,
+ 0xf1: 0x3450, 0xf2: 0x3473, 0xf3: 0x3478, 0xf4: 0x4795, 0xf5: 0x479a,
+ 0xf6: 0x47a9, 0xf8: 0xa000, 0xf9: 0x3504, 0xfa: 0x3509, 0xfb: 0x350e,
+ 0xfc: 0x47db, 0xfd: 0x358b, 0xff: 0x35a4,
+ // Block 0x4, offset 0x100
+ 0x100: 0x2fb9, 0x101: 0x32c5, 0x102: 0x46c8, 0x103: 0x4759, 0x104: 0x2fd7, 0x105: 0x32e3,
+ 0x106: 0x2feb, 0x107: 0x32f7, 0x108: 0x2ff0, 0x109: 0x32fc, 0x10a: 0x2ff5, 0x10b: 0x3301,
+ 0x10c: 0x2ffa, 0x10d: 0x3306, 0x10e: 0x3004, 0x10f: 0x3310,
+ 0x112: 0x46eb, 0x113: 0x477c, 0x114: 0x302c, 0x115: 0x3338, 0x116: 0x3031, 0x117: 0x333d,
+ 0x118: 0x304f, 0x119: 0x335b, 0x11a: 0x3040, 0x11b: 0x334c, 0x11c: 0x3068, 0x11d: 0x3374,
+ 0x11e: 0x3072, 0x11f: 0x337e, 0x120: 0x3077, 0x121: 0x3383, 0x122: 0x3081, 0x123: 0x338d,
+ 0x124: 0x3086, 0x125: 0x3392, 0x128: 0x30b8, 0x129: 0x33c9,
+ 0x12a: 0x30bd, 0x12b: 0x33ce, 0x12c: 0x30c2, 0x12d: 0x33d3, 0x12e: 0x30e5, 0x12f: 0x33f1,
+ 0x130: 0x30c7, 0x132: 0x1af0, 0x133: 0x1b7a, 0x134: 0x30ef, 0x135: 0x33fb,
+ 0x136: 0x3103, 0x137: 0x3414, 0x139: 0x310d, 0x13a: 0x341e, 0x13b: 0x3117,
+ 0x13c: 0x3428, 0x13d: 0x3112, 0x13e: 0x3423, 0x13f: 0x1d3f,
+ // Block 0x5, offset 0x140
+ 0x140: 0x1dc7, 0x143: 0x313a, 0x144: 0x344b, 0x145: 0x3153,
+ 0x146: 0x3464, 0x147: 0x3149, 0x148: 0x345a, 0x149: 0x1def,
+ 0x14c: 0x470e, 0x14d: 0x479f, 0x14e: 0x316c, 0x14f: 0x347d, 0x150: 0x3176, 0x151: 0x3487,
+ 0x154: 0x3194, 0x155: 0x34a5, 0x156: 0x31ad, 0x157: 0x34be,
+ 0x158: 0x319e, 0x159: 0x34af, 0x15a: 0x4731, 0x15b: 0x47c2, 0x15c: 0x31b7, 0x15d: 0x34c8,
+ 0x15e: 0x31c6, 0x15f: 0x34d7, 0x160: 0x4736, 0x161: 0x47c7, 0x162: 0x31df, 0x163: 0x34f5,
+ 0x164: 0x31d0, 0x165: 0x34e6, 0x168: 0x4740, 0x169: 0x47d1,
+ 0x16a: 0x4745, 0x16b: 0x47d6, 0x16c: 0x31fd, 0x16d: 0x3513, 0x16e: 0x3207, 0x16f: 0x351d,
+ 0x170: 0x320c, 0x171: 0x3522, 0x172: 0x322a, 0x173: 0x3540, 0x174: 0x324d, 0x175: 0x3563,
+ 0x176: 0x3275, 0x177: 0x3590, 0x178: 0x3289, 0x179: 0x3298, 0x17a: 0x35b8, 0x17b: 0x32a2,
+ 0x17c: 0x35c2, 0x17d: 0x32a7, 0x17e: 0x35c7, 0x17f: 0x00a7,
+ // Block 0x6, offset 0x180
+ 0x184: 0x2ed0, 0x185: 0x2ed6,
+ 0x186: 0x2edc, 0x187: 0x1b05, 0x188: 0x1b08, 0x189: 0x1b9b, 0x18a: 0x1b1a, 0x18b: 0x1b1d,
+ 0x18c: 0x1bd1, 0x18d: 0x2fc3, 0x18e: 0x32cf, 0x18f: 0x30d1, 0x190: 0x33dd, 0x191: 0x317b,
+ 0x192: 0x348c, 0x193: 0x3211, 0x194: 0x3527, 0x195: 0x3a0a, 0x196: 0x3b99, 0x197: 0x3a03,
+ 0x198: 0x3b92, 0x199: 0x3a11, 0x19a: 0x3ba0, 0x19b: 0x39fc, 0x19c: 0x3b8b,
+ 0x19e: 0x38eb, 0x19f: 0x3a7a, 0x1a0: 0x38e4, 0x1a1: 0x3a73, 0x1a2: 0x35ee, 0x1a3: 0x3600,
+ 0x1a6: 0x307c, 0x1a7: 0x3388, 0x1a8: 0x30f9, 0x1a9: 0x340a,
+ 0x1aa: 0x4727, 0x1ab: 0x47b8, 0x1ac: 0x39cb, 0x1ad: 0x3b5a, 0x1ae: 0x3612, 0x1af: 0x3618,
+ 0x1b0: 0x3400, 0x1b1: 0x1ad5, 0x1b2: 0x1ad8, 0x1b3: 0x1b62, 0x1b4: 0x3063, 0x1b5: 0x336f,
+ 0x1b8: 0x3135, 0x1b9: 0x3446, 0x1ba: 0x38f2, 0x1bb: 0x3a81,
+ 0x1bc: 0x35e8, 0x1bd: 0x35fa, 0x1be: 0x35f4, 0x1bf: 0x3606,
+ // Block 0x7, offset 0x1c0
+ 0x1c0: 0x2fc8, 0x1c1: 0x32d4, 0x1c2: 0x2fcd, 0x1c3: 0x32d9, 0x1c4: 0x3045, 0x1c5: 0x3351,
+ 0x1c6: 0x304a, 0x1c7: 0x3356, 0x1c8: 0x30d6, 0x1c9: 0x33e2, 0x1ca: 0x30db, 0x1cb: 0x33e7,
+ 0x1cc: 0x3180, 0x1cd: 0x3491, 0x1ce: 0x3185, 0x1cf: 0x3496, 0x1d0: 0x31a3, 0x1d1: 0x34b4,
+ 0x1d2: 0x31a8, 0x1d3: 0x34b9, 0x1d4: 0x3216, 0x1d5: 0x352c, 0x1d6: 0x321b, 0x1d7: 0x3531,
+ 0x1d8: 0x31c1, 0x1d9: 0x34d2, 0x1da: 0x31da, 0x1db: 0x34f0,
+ 0x1de: 0x3095, 0x1df: 0x33a1,
+ 0x1e6: 0x46cd, 0x1e7: 0x475e, 0x1e8: 0x46f5, 0x1e9: 0x4786,
+ 0x1ea: 0x399a, 0x1eb: 0x3b29, 0x1ec: 0x3977, 0x1ed: 0x3b06, 0x1ee: 0x4713, 0x1ef: 0x47a4,
+ 0x1f0: 0x3993, 0x1f1: 0x3b22, 0x1f2: 0x327f, 0x1f3: 0x359a,
+ // Block 0x8, offset 0x200
+ 0x200: 0x9932, 0x201: 0x9932, 0x202: 0x9932, 0x203: 0x9932, 0x204: 0x9932, 0x205: 0x8132,
+ 0x206: 0x9932, 0x207: 0x9932, 0x208: 0x9932, 0x209: 0x9932, 0x20a: 0x9932, 0x20b: 0x9932,
+ 0x20c: 0x9932, 0x20d: 0x8132, 0x20e: 0x8132, 0x20f: 0x9932, 0x210: 0x8132, 0x211: 0x9932,
+ 0x212: 0x8132, 0x213: 0x9932, 0x214: 0x9932, 0x215: 0x8133, 0x216: 0x812d, 0x217: 0x812d,
+ 0x218: 0x812d, 0x219: 0x812d, 0x21a: 0x8133, 0x21b: 0x992b, 0x21c: 0x812d, 0x21d: 0x812d,
+ 0x21e: 0x812d, 0x21f: 0x812d, 0x220: 0x812d, 0x221: 0x8129, 0x222: 0x8129, 0x223: 0x992d,
+ 0x224: 0x992d, 0x225: 0x992d, 0x226: 0x992d, 0x227: 0x9929, 0x228: 0x9929, 0x229: 0x812d,
+ 0x22a: 0x812d, 0x22b: 0x812d, 0x22c: 0x812d, 0x22d: 0x992d, 0x22e: 0x992d, 0x22f: 0x812d,
+ 0x230: 0x992d, 0x231: 0x992d, 0x232: 0x812d, 0x233: 0x812d, 0x234: 0x8101, 0x235: 0x8101,
+ 0x236: 0x8101, 0x237: 0x8101, 0x238: 0x9901, 0x239: 0x812d, 0x23a: 0x812d, 0x23b: 0x812d,
+ 0x23c: 0x812d, 0x23d: 0x8132, 0x23e: 0x8132, 0x23f: 0x8132,
+ // Block 0x9, offset 0x240
+ 0x240: 0x49e9, 0x241: 0x49ee, 0x242: 0x9932, 0x243: 0x49f3, 0x244: 0x49f8, 0x245: 0x9936,
+ 0x246: 0x8132, 0x247: 0x812d, 0x248: 0x812d, 0x249: 0x812d, 0x24a: 0x8132, 0x24b: 0x8132,
+ 0x24c: 0x8132, 0x24d: 0x812d, 0x24e: 0x812d, 0x250: 0x8132, 0x251: 0x8132,
+ 0x252: 0x8132, 0x253: 0x812d, 0x254: 0x812d, 0x255: 0x812d, 0x256: 0x812d, 0x257: 0x8132,
+ 0x258: 0x8133, 0x259: 0x812d, 0x25a: 0x812d, 0x25b: 0x8132, 0x25c: 0x8134, 0x25d: 0x8135,
+ 0x25e: 0x8135, 0x25f: 0x8134, 0x260: 0x8135, 0x261: 0x8135, 0x262: 0x8134, 0x263: 0x8132,
+ 0x264: 0x8132, 0x265: 0x8132, 0x266: 0x8132, 0x267: 0x8132, 0x268: 0x8132, 0x269: 0x8132,
+ 0x26a: 0x8132, 0x26b: 0x8132, 0x26c: 0x8132, 0x26d: 0x8132, 0x26e: 0x8132, 0x26f: 0x8132,
+ 0x274: 0x0170,
+ 0x27a: 0x42e0,
+ 0x27e: 0x0037,
+ // Block 0xa, offset 0x280
+ 0x284: 0x4295, 0x285: 0x44b6,
+ 0x286: 0x3624, 0x287: 0x00ce, 0x288: 0x3642, 0x289: 0x364e, 0x28a: 0x3660,
+ 0x28c: 0x367e, 0x28e: 0x3690, 0x28f: 0x36ae, 0x290: 0x3e43, 0x291: 0xa000,
+ 0x295: 0xa000, 0x297: 0xa000,
+ 0x299: 0xa000,
+ 0x29f: 0xa000, 0x2a1: 0xa000,
+ 0x2a5: 0xa000, 0x2a9: 0xa000,
+ 0x2aa: 0x3672, 0x2ab: 0x36a2, 0x2ac: 0x4839, 0x2ad: 0x36d2, 0x2ae: 0x4863, 0x2af: 0x36e4,
+ 0x2b0: 0x3eab, 0x2b1: 0xa000, 0x2b5: 0xa000,
+ 0x2b7: 0xa000, 0x2b9: 0xa000,
+ 0x2bf: 0xa000,
+ // Block 0xb, offset 0x2c0
+ 0x2c1: 0xa000, 0x2c5: 0xa000,
+ 0x2c9: 0xa000, 0x2ca: 0x487b, 0x2cb: 0x4899,
+ 0x2cc: 0x3702, 0x2cd: 0x371a, 0x2ce: 0x48b1, 0x2d0: 0x01be, 0x2d1: 0x01d0,
+ 0x2d2: 0x01ac, 0x2d3: 0x4347, 0x2d4: 0x434d, 0x2d5: 0x01fa, 0x2d6: 0x01e8,
+ 0x2f0: 0x01d6, 0x2f1: 0x01eb, 0x2f2: 0x01ee, 0x2f4: 0x0188, 0x2f5: 0x01c7,
+ 0x2f9: 0x01a6,
+ // Block 0xc, offset 0x300
+ 0x300: 0x375c, 0x301: 0x3768, 0x303: 0x3756,
+ 0x306: 0xa000, 0x307: 0x3744,
+ 0x30c: 0x3798, 0x30d: 0x3780, 0x30e: 0x37aa, 0x310: 0xa000,
+ 0x313: 0xa000, 0x315: 0xa000, 0x316: 0xa000, 0x317: 0xa000,
+ 0x318: 0xa000, 0x319: 0x378c, 0x31a: 0xa000,
+ 0x31e: 0xa000, 0x323: 0xa000,
+ 0x327: 0xa000,
+ 0x32b: 0xa000, 0x32d: 0xa000,
+ 0x330: 0xa000, 0x333: 0xa000, 0x335: 0xa000,
+ 0x336: 0xa000, 0x337: 0xa000, 0x338: 0xa000, 0x339: 0x3810, 0x33a: 0xa000,
+ 0x33e: 0xa000,
+ // Block 0xd, offset 0x340
+ 0x341: 0x376e, 0x342: 0x37f2,
+ 0x350: 0x374a, 0x351: 0x37ce,
+ 0x352: 0x3750, 0x353: 0x37d4, 0x356: 0x3762, 0x357: 0x37e6,
+ 0x358: 0xa000, 0x359: 0xa000, 0x35a: 0x3864, 0x35b: 0x386a, 0x35c: 0x3774, 0x35d: 0x37f8,
+ 0x35e: 0x377a, 0x35f: 0x37fe, 0x362: 0x3786, 0x363: 0x380a,
+ 0x364: 0x3792, 0x365: 0x3816, 0x366: 0x379e, 0x367: 0x3822, 0x368: 0xa000, 0x369: 0xa000,
+ 0x36a: 0x3870, 0x36b: 0x3876, 0x36c: 0x37c8, 0x36d: 0x384c, 0x36e: 0x37a4, 0x36f: 0x3828,
+ 0x370: 0x37b0, 0x371: 0x3834, 0x372: 0x37b6, 0x373: 0x383a, 0x374: 0x37bc, 0x375: 0x3840,
+ 0x378: 0x37c2, 0x379: 0x3846,
+ // Block 0xe, offset 0x380
+ 0x387: 0x1ef4,
+ 0x391: 0x812d,
+ 0x392: 0x8132, 0x393: 0x8132, 0x394: 0x8132, 0x395: 0x8132, 0x396: 0x812d, 0x397: 0x8132,
+ 0x398: 0x8132, 0x399: 0x8132, 0x39a: 0x812e, 0x39b: 0x812d, 0x39c: 0x8132, 0x39d: 0x8132,
+ 0x39e: 0x8132, 0x39f: 0x8132, 0x3a0: 0x8132, 0x3a1: 0x8132, 0x3a2: 0x812d, 0x3a3: 0x812d,
+ 0x3a4: 0x812d, 0x3a5: 0x812d, 0x3a6: 0x812d, 0x3a7: 0x812d, 0x3a8: 0x8132, 0x3a9: 0x8132,
+ 0x3aa: 0x812d, 0x3ab: 0x8132, 0x3ac: 0x8132, 0x3ad: 0x812e, 0x3ae: 0x8131, 0x3af: 0x8132,
+ 0x3b0: 0x8105, 0x3b1: 0x8106, 0x3b2: 0x8107, 0x3b3: 0x8108, 0x3b4: 0x8109, 0x3b5: 0x810a,
+ 0x3b6: 0x810b, 0x3b7: 0x810c, 0x3b8: 0x810d, 0x3b9: 0x810e, 0x3ba: 0x810e, 0x3bb: 0x810f,
+ 0x3bc: 0x8110, 0x3bd: 0x8111, 0x3bf: 0x8112,
+ // Block 0xf, offset 0x3c0
+ 0x3c8: 0xa000, 0x3ca: 0xa000, 0x3cb: 0x8116,
+ 0x3cc: 0x8117, 0x3cd: 0x8118, 0x3ce: 0x8119, 0x3cf: 0x811a, 0x3d0: 0x811b, 0x3d1: 0x811c,
+ 0x3d2: 0x811d, 0x3d3: 0x9932, 0x3d4: 0x9932, 0x3d5: 0x992d, 0x3d6: 0x812d, 0x3d7: 0x8132,
+ 0x3d8: 0x8132, 0x3d9: 0x8132, 0x3da: 0x8132, 0x3db: 0x8132, 0x3dc: 0x812d, 0x3dd: 0x8132,
+ 0x3de: 0x8132, 0x3df: 0x812d,
+ 0x3f0: 0x811e, 0x3f5: 0x1f17,
+ 0x3f6: 0x21a6, 0x3f7: 0x21e2, 0x3f8: 0x21dd,
+ // Block 0x10, offset 0x400
+ 0x405: 0xa000,
+ 0x406: 0x1958, 0x407: 0xa000, 0x408: 0x195f, 0x409: 0xa000, 0x40a: 0x1966, 0x40b: 0xa000,
+ 0x40c: 0x196d, 0x40d: 0xa000, 0x40e: 0x1974, 0x411: 0xa000,
+ 0x412: 0x197b,
+ 0x434: 0x8102, 0x435: 0x9900,
+ 0x43a: 0xa000, 0x43b: 0x1982,
+ 0x43c: 0xa000, 0x43d: 0x1989, 0x43e: 0xa000, 0x43f: 0xa000,
+ // Block 0x11, offset 0x440
+ 0x440: 0x0069, 0x441: 0x006b, 0x442: 0x006f, 0x443: 0x0083, 0x444: 0x00f5, 0x445: 0x00f8,
+ 0x446: 0x048b, 0x447: 0x0085, 0x448: 0x0089, 0x449: 0x008b, 0x44a: 0x0104, 0x44b: 0x0107,
+ 0x44c: 0x010a, 0x44d: 0x008f, 0x44f: 0x0097, 0x450: 0x009b, 0x451: 0x00e0,
+ 0x452: 0x009f, 0x453: 0x00fe, 0x454: 0x048f, 0x455: 0x0493, 0x456: 0x00a1, 0x457: 0x00a9,
+ 0x458: 0x00ab, 0x459: 0x049b, 0x45a: 0x012b, 0x45b: 0x00ad, 0x45c: 0x049f, 0x45d: 0x01be,
+ 0x45e: 0x01c1, 0x45f: 0x01c4, 0x460: 0x01fa, 0x461: 0x01fd, 0x462: 0x0093, 0x463: 0x00a5,
+ 0x464: 0x00ab, 0x465: 0x00ad, 0x466: 0x01be, 0x467: 0x01c1, 0x468: 0x01eb, 0x469: 0x01fa,
+ 0x46a: 0x01fd,
+ 0x478: 0x020c,
+ // Block 0x12, offset 0x480
+ 0x49b: 0x00fb, 0x49c: 0x0087, 0x49d: 0x0101,
+ 0x49e: 0x00d4, 0x49f: 0x010a, 0x4a0: 0x008d, 0x4a1: 0x010d, 0x4a2: 0x0110, 0x4a3: 0x0116,
+ 0x4a4: 0x011c, 0x4a5: 0x011f, 0x4a6: 0x0122, 0x4a7: 0x04a3, 0x4a8: 0x016a, 0x4a9: 0x0128,
+ 0x4aa: 0x04a7, 0x4ab: 0x016d, 0x4ac: 0x0131, 0x4ad: 0x012e, 0x4ae: 0x0134, 0x4af: 0x0137,
+ 0x4b0: 0x013a, 0x4b1: 0x013d, 0x4b2: 0x0140, 0x4b3: 0x014c, 0x4b4: 0x014f, 0x4b5: 0x00ec,
+ 0x4b6: 0x0152, 0x4b7: 0x0155, 0x4b8: 0x0497, 0x4b9: 0x0158, 0x4ba: 0x015b, 0x4bb: 0x00b5,
+ 0x4bc: 0x015e, 0x4bd: 0x0161, 0x4be: 0x0164, 0x4bf: 0x01d0,
+ // Block 0x13, offset 0x4c0
+ 0x4c0: 0x2fd2, 0x4c1: 0x32de, 0x4c2: 0x2fdc, 0x4c3: 0x32e8, 0x4c4: 0x2fe1, 0x4c5: 0x32ed,
+ 0x4c6: 0x2fe6, 0x4c7: 0x32f2, 0x4c8: 0x3907, 0x4c9: 0x3a96, 0x4ca: 0x2fff, 0x4cb: 0x330b,
+ 0x4cc: 0x3009, 0x4cd: 0x3315, 0x4ce: 0x3018, 0x4cf: 0x3324, 0x4d0: 0x300e, 0x4d1: 0x331a,
+ 0x4d2: 0x3013, 0x4d3: 0x331f, 0x4d4: 0x392a, 0x4d5: 0x3ab9, 0x4d6: 0x3931, 0x4d7: 0x3ac0,
+ 0x4d8: 0x3054, 0x4d9: 0x3360, 0x4da: 0x3059, 0x4db: 0x3365, 0x4dc: 0x393f, 0x4dd: 0x3ace,
+ 0x4de: 0x305e, 0x4df: 0x336a, 0x4e0: 0x306d, 0x4e1: 0x3379, 0x4e2: 0x308b, 0x4e3: 0x3397,
+ 0x4e4: 0x309a, 0x4e5: 0x33a6, 0x4e6: 0x3090, 0x4e7: 0x339c, 0x4e8: 0x309f, 0x4e9: 0x33ab,
+ 0x4ea: 0x30a4, 0x4eb: 0x33b0, 0x4ec: 0x30ea, 0x4ed: 0x33f6, 0x4ee: 0x3946, 0x4ef: 0x3ad5,
+ 0x4f0: 0x30f4, 0x4f1: 0x3405, 0x4f2: 0x30fe, 0x4f3: 0x340f, 0x4f4: 0x3108, 0x4f5: 0x3419,
+ 0x4f6: 0x46ff, 0x4f7: 0x4790, 0x4f8: 0x394d, 0x4f9: 0x3adc, 0x4fa: 0x3121, 0x4fb: 0x3432,
+ 0x4fc: 0x311c, 0x4fd: 0x342d, 0x4fe: 0x3126, 0x4ff: 0x3437,
+ // Block 0x14, offset 0x500
+ 0x500: 0x312b, 0x501: 0x343c, 0x502: 0x3130, 0x503: 0x3441, 0x504: 0x3144, 0x505: 0x3455,
+ 0x506: 0x314e, 0x507: 0x345f, 0x508: 0x315d, 0x509: 0x346e, 0x50a: 0x3158, 0x50b: 0x3469,
+ 0x50c: 0x3970, 0x50d: 0x3aff, 0x50e: 0x397e, 0x50f: 0x3b0d, 0x510: 0x3985, 0x511: 0x3b14,
+ 0x512: 0x398c, 0x513: 0x3b1b, 0x514: 0x318a, 0x515: 0x349b, 0x516: 0x318f, 0x517: 0x34a0,
+ 0x518: 0x3199, 0x519: 0x34aa, 0x51a: 0x472c, 0x51b: 0x47bd, 0x51c: 0x39d2, 0x51d: 0x3b61,
+ 0x51e: 0x31b2, 0x51f: 0x34c3, 0x520: 0x31bc, 0x521: 0x34cd, 0x522: 0x473b, 0x523: 0x47cc,
+ 0x524: 0x39d9, 0x525: 0x3b68, 0x526: 0x39e0, 0x527: 0x3b6f, 0x528: 0x39e7, 0x529: 0x3b76,
+ 0x52a: 0x31cb, 0x52b: 0x34dc, 0x52c: 0x31d5, 0x52d: 0x34eb, 0x52e: 0x31e9, 0x52f: 0x34ff,
+ 0x530: 0x31e4, 0x531: 0x34fa, 0x532: 0x3225, 0x533: 0x353b, 0x534: 0x3234, 0x535: 0x354a,
+ 0x536: 0x322f, 0x537: 0x3545, 0x538: 0x39ee, 0x539: 0x3b7d, 0x53a: 0x39f5, 0x53b: 0x3b84,
+ 0x53c: 0x3239, 0x53d: 0x354f, 0x53e: 0x323e, 0x53f: 0x3554,
+ // Block 0x15, offset 0x540
+ 0x540: 0x3243, 0x541: 0x3559, 0x542: 0x3248, 0x543: 0x355e, 0x544: 0x3257, 0x545: 0x356d,
+ 0x546: 0x3252, 0x547: 0x3568, 0x548: 0x325c, 0x549: 0x3577, 0x54a: 0x3261, 0x54b: 0x357c,
+ 0x54c: 0x3266, 0x54d: 0x3581, 0x54e: 0x3284, 0x54f: 0x359f, 0x550: 0x329d, 0x551: 0x35bd,
+ 0x552: 0x32ac, 0x553: 0x35cc, 0x554: 0x32b1, 0x555: 0x35d1, 0x556: 0x33b5, 0x557: 0x34e1,
+ 0x558: 0x3572, 0x559: 0x35ae, 0x55a: 0x1d73, 0x55b: 0x4312,
+ 0x560: 0x46dc, 0x561: 0x476d, 0x562: 0x2fbe, 0x563: 0x32ca,
+ 0x564: 0x38b3, 0x565: 0x3a42, 0x566: 0x38ac, 0x567: 0x3a3b, 0x568: 0x38c1, 0x569: 0x3a50,
+ 0x56a: 0x38ba, 0x56b: 0x3a49, 0x56c: 0x38f9, 0x56d: 0x3a88, 0x56e: 0x38cf, 0x56f: 0x3a5e,
+ 0x570: 0x38c8, 0x571: 0x3a57, 0x572: 0x38dd, 0x573: 0x3a6c, 0x574: 0x38d6, 0x575: 0x3a65,
+ 0x576: 0x3900, 0x577: 0x3a8f, 0x578: 0x46f0, 0x579: 0x4781, 0x57a: 0x303b, 0x57b: 0x3347,
+ 0x57c: 0x3027, 0x57d: 0x3333, 0x57e: 0x3915, 0x57f: 0x3aa4,
+ // Block 0x16, offset 0x580
+ 0x580: 0x390e, 0x581: 0x3a9d, 0x582: 0x3923, 0x583: 0x3ab2, 0x584: 0x391c, 0x585: 0x3aab,
+ 0x586: 0x3938, 0x587: 0x3ac7, 0x588: 0x30cc, 0x589: 0x33d8, 0x58a: 0x30e0, 0x58b: 0x33ec,
+ 0x58c: 0x4722, 0x58d: 0x47b3, 0x58e: 0x3171, 0x58f: 0x3482, 0x590: 0x395b, 0x591: 0x3aea,
+ 0x592: 0x3954, 0x593: 0x3ae3, 0x594: 0x3969, 0x595: 0x3af8, 0x596: 0x3962, 0x597: 0x3af1,
+ 0x598: 0x39c4, 0x599: 0x3b53, 0x59a: 0x39a8, 0x59b: 0x3b37, 0x59c: 0x39a1, 0x59d: 0x3b30,
+ 0x59e: 0x39b6, 0x59f: 0x3b45, 0x5a0: 0x39af, 0x5a1: 0x3b3e, 0x5a2: 0x39bd, 0x5a3: 0x3b4c,
+ 0x5a4: 0x3220, 0x5a5: 0x3536, 0x5a6: 0x3202, 0x5a7: 0x3518, 0x5a8: 0x3a1f, 0x5a9: 0x3bae,
+ 0x5aa: 0x3a18, 0x5ab: 0x3ba7, 0x5ac: 0x3a2d, 0x5ad: 0x3bbc, 0x5ae: 0x3a26, 0x5af: 0x3bb5,
+ 0x5b0: 0x3a34, 0x5b1: 0x3bc3, 0x5b2: 0x326b, 0x5b3: 0x3586, 0x5b4: 0x3293, 0x5b5: 0x35b3,
+ 0x5b6: 0x328e, 0x5b7: 0x35a9, 0x5b8: 0x327a, 0x5b9: 0x3595,
+ // Block 0x17, offset 0x5c0
+ 0x5c0: 0x483f, 0x5c1: 0x4845, 0x5c2: 0x4959, 0x5c3: 0x4971, 0x5c4: 0x4961, 0x5c5: 0x4979,
+ 0x5c6: 0x4969, 0x5c7: 0x4981, 0x5c8: 0x47e5, 0x5c9: 0x47eb, 0x5ca: 0x48c9, 0x5cb: 0x48e1,
+ 0x5cc: 0x48d1, 0x5cd: 0x48e9, 0x5ce: 0x48d9, 0x5cf: 0x48f1, 0x5d0: 0x4851, 0x5d1: 0x4857,
+ 0x5d2: 0x3df3, 0x5d3: 0x3e03, 0x5d4: 0x3dfb, 0x5d5: 0x3e0b,
+ 0x5d8: 0x47f1, 0x5d9: 0x47f7, 0x5da: 0x3d23, 0x5db: 0x3d33, 0x5dc: 0x3d2b, 0x5dd: 0x3d3b,
+ 0x5e0: 0x4869, 0x5e1: 0x486f, 0x5e2: 0x4989, 0x5e3: 0x49a1,
+ 0x5e4: 0x4991, 0x5e5: 0x49a9, 0x5e6: 0x4999, 0x5e7: 0x49b1, 0x5e8: 0x47fd, 0x5e9: 0x4803,
+ 0x5ea: 0x48f9, 0x5eb: 0x4911, 0x5ec: 0x4901, 0x5ed: 0x4919, 0x5ee: 0x4909, 0x5ef: 0x4921,
+ 0x5f0: 0x4881, 0x5f1: 0x4887, 0x5f2: 0x3e53, 0x5f3: 0x3e6b, 0x5f4: 0x3e5b, 0x5f5: 0x3e73,
+ 0x5f6: 0x3e63, 0x5f7: 0x3e7b, 0x5f8: 0x4809, 0x5f9: 0x480f, 0x5fa: 0x3d53, 0x5fb: 0x3d6b,
+ 0x5fc: 0x3d5b, 0x5fd: 0x3d73, 0x5fe: 0x3d63, 0x5ff: 0x3d7b,
+ // Block 0x18, offset 0x600
+ 0x600: 0x488d, 0x601: 0x4893, 0x602: 0x3e83, 0x603: 0x3e93, 0x604: 0x3e8b, 0x605: 0x3e9b,
+ 0x608: 0x4815, 0x609: 0x481b, 0x60a: 0x3d83, 0x60b: 0x3d93,
+ 0x60c: 0x3d8b, 0x60d: 0x3d9b, 0x610: 0x489f, 0x611: 0x48a5,
+ 0x612: 0x3ebb, 0x613: 0x3ed3, 0x614: 0x3ec3, 0x615: 0x3edb, 0x616: 0x3ecb, 0x617: 0x3ee3,
+ 0x619: 0x4821, 0x61b: 0x3da3, 0x61d: 0x3dab,
+ 0x61f: 0x3db3, 0x620: 0x48b7, 0x621: 0x48bd, 0x622: 0x49b9, 0x623: 0x49d1,
+ 0x624: 0x49c1, 0x625: 0x49d9, 0x626: 0x49c9, 0x627: 0x49e1, 0x628: 0x4827, 0x629: 0x482d,
+ 0x62a: 0x4929, 0x62b: 0x4941, 0x62c: 0x4931, 0x62d: 0x4949, 0x62e: 0x4939, 0x62f: 0x4951,
+ 0x630: 0x4833, 0x631: 0x4359, 0x632: 0x36cc, 0x633: 0x435f, 0x634: 0x485d, 0x635: 0x4365,
+ 0x636: 0x36de, 0x637: 0x436b, 0x638: 0x36fc, 0x639: 0x4371, 0x63a: 0x3714, 0x63b: 0x4377,
+ 0x63c: 0x48ab, 0x63d: 0x437d,
+ // Block 0x19, offset 0x640
+ 0x640: 0x3ddb, 0x641: 0x3de3, 0x642: 0x41bf, 0x643: 0x41dd, 0x644: 0x41c9, 0x645: 0x41e7,
+ 0x646: 0x41d3, 0x647: 0x41f1, 0x648: 0x3d13, 0x649: 0x3d1b, 0x64a: 0x410b, 0x64b: 0x4129,
+ 0x64c: 0x4115, 0x64d: 0x4133, 0x64e: 0x411f, 0x64f: 0x413d, 0x650: 0x3e23, 0x651: 0x3e2b,
+ 0x652: 0x41fb, 0x653: 0x4219, 0x654: 0x4205, 0x655: 0x4223, 0x656: 0x420f, 0x657: 0x422d,
+ 0x658: 0x3d43, 0x659: 0x3d4b, 0x65a: 0x4147, 0x65b: 0x4165, 0x65c: 0x4151, 0x65d: 0x416f,
+ 0x65e: 0x415b, 0x65f: 0x4179, 0x660: 0x3efb, 0x661: 0x3f03, 0x662: 0x4237, 0x663: 0x4255,
+ 0x664: 0x4241, 0x665: 0x425f, 0x666: 0x424b, 0x667: 0x4269, 0x668: 0x3dbb, 0x669: 0x3dc3,
+ 0x66a: 0x4183, 0x66b: 0x41a1, 0x66c: 0x418d, 0x66d: 0x41ab, 0x66e: 0x4197, 0x66f: 0x41b5,
+ 0x670: 0x36c0, 0x671: 0x36ba, 0x672: 0x3dcb, 0x673: 0x36c6, 0x674: 0x3dd3,
+ 0x676: 0x484b, 0x677: 0x3deb, 0x678: 0x3630, 0x679: 0x362a, 0x67a: 0x361e, 0x67b: 0x4329,
+ 0x67c: 0x3636, 0x67d: 0x42c2, 0x67e: 0x01d3, 0x67f: 0x42c2,
+ // Block 0x1a, offset 0x680
+ 0x680: 0x42db, 0x681: 0x44bd, 0x682: 0x3e13, 0x683: 0x36d8, 0x684: 0x3e1b,
+ 0x686: 0x4875, 0x687: 0x3e33, 0x688: 0x363c, 0x689: 0x432f, 0x68a: 0x3648, 0x68b: 0x4335,
+ 0x68c: 0x3654, 0x68d: 0x44c4, 0x68e: 0x44cb, 0x68f: 0x44d2, 0x690: 0x36f0, 0x691: 0x36ea,
+ 0x692: 0x3e3b, 0x693: 0x451f, 0x696: 0x36f6, 0x697: 0x3e4b,
+ 0x698: 0x366c, 0x699: 0x3666, 0x69a: 0x365a, 0x69b: 0x433b, 0x69d: 0x44d9,
+ 0x69e: 0x44e0, 0x69f: 0x44e7, 0x6a0: 0x3726, 0x6a1: 0x3720, 0x6a2: 0x3ea3, 0x6a3: 0x4527,
+ 0x6a4: 0x3708, 0x6a5: 0x370e, 0x6a6: 0x372c, 0x6a7: 0x3eb3, 0x6a8: 0x369c, 0x6a9: 0x3696,
+ 0x6aa: 0x368a, 0x6ab: 0x4347, 0x6ac: 0x3684, 0x6ad: 0x44af, 0x6ae: 0x44b6, 0x6af: 0x0081,
+ 0x6b2: 0x3eeb, 0x6b3: 0x3732, 0x6b4: 0x3ef3,
+ 0x6b6: 0x48c3, 0x6b7: 0x3f0b, 0x6b8: 0x3678, 0x6b9: 0x4341, 0x6ba: 0x36a8, 0x6bb: 0x4353,
+ 0x6bc: 0x36b4, 0x6bd: 0x4295, 0x6be: 0x42c7,
+ // Block 0x1b, offset 0x6c0
+ 0x6c0: 0x1d6b, 0x6c1: 0x1d6f, 0x6c2: 0x0047, 0x6c3: 0x1de7, 0x6c5: 0x1d7b,
+ 0x6c6: 0x1d7f, 0x6c7: 0x00e9, 0x6c9: 0x1deb, 0x6ca: 0x008f, 0x6cb: 0x0051,
+ 0x6cc: 0x0051, 0x6cd: 0x0051, 0x6ce: 0x0091, 0x6cf: 0x00da, 0x6d0: 0x0053, 0x6d1: 0x0053,
+ 0x6d2: 0x0059, 0x6d3: 0x0099, 0x6d5: 0x005d, 0x6d6: 0x1b20,
+ 0x6d9: 0x0061, 0x6da: 0x0063, 0x6db: 0x0065, 0x6dc: 0x0065, 0x6dd: 0x0065,
+ 0x6e0: 0x1b32, 0x6e1: 0x1d5b, 0x6e2: 0x1b3b,
+ 0x6e4: 0x0075, 0x6e6: 0x01b8, 0x6e8: 0x0075,
+ 0x6ea: 0x0057, 0x6eb: 0x430d, 0x6ec: 0x0045, 0x6ed: 0x0047, 0x6ef: 0x008b,
+ 0x6f0: 0x004b, 0x6f1: 0x004d, 0x6f3: 0x005b, 0x6f4: 0x009f, 0x6f5: 0x0215,
+ 0x6f6: 0x0218, 0x6f7: 0x021b, 0x6f8: 0x021e, 0x6f9: 0x0093, 0x6fb: 0x1d2b,
+ 0x6fc: 0x01e8, 0x6fd: 0x01c1, 0x6fe: 0x0179, 0x6ff: 0x01a0,
+ // Block 0x1c, offset 0x700
+ 0x700: 0x04db, 0x705: 0x0049,
+ 0x706: 0x0089, 0x707: 0x008b, 0x708: 0x0093, 0x709: 0x0095,
+ 0x710: 0x23c1, 0x711: 0x23cd,
+ 0x712: 0x2481, 0x713: 0x23a9, 0x714: 0x242d, 0x715: 0x23b5, 0x716: 0x2433, 0x717: 0x244b,
+ 0x718: 0x2457, 0x719: 0x23bb, 0x71a: 0x245d, 0x71b: 0x23c7, 0x71c: 0x2451, 0x71d: 0x2463,
+ 0x71e: 0x2469, 0x71f: 0x1e4f, 0x720: 0x0053, 0x721: 0x1aed, 0x722: 0x1d37, 0x723: 0x1af6,
+ 0x724: 0x006d, 0x725: 0x1b3e, 0x726: 0x1d63, 0x727: 0x1edb, 0x728: 0x1af9, 0x729: 0x0071,
+ 0x72a: 0x1b4a, 0x72b: 0x1d67, 0x72c: 0x0059, 0x72d: 0x0047, 0x72e: 0x0049, 0x72f: 0x005b,
+ 0x730: 0x0093, 0x731: 0x1b77, 0x732: 0x1dab, 0x733: 0x1b80, 0x734: 0x00ad, 0x735: 0x1bf5,
+ 0x736: 0x1ddf, 0x737: 0x1eef, 0x738: 0x1b83, 0x739: 0x00b1, 0x73a: 0x1bf8, 0x73b: 0x1de3,
+ 0x73c: 0x0099, 0x73d: 0x0087, 0x73e: 0x0089, 0x73f: 0x009b,
+ // Block 0x1d, offset 0x740
+ 0x741: 0x3c41, 0x743: 0xa000, 0x744: 0x3c48, 0x745: 0xa000,
+ 0x747: 0x3c4f, 0x748: 0xa000, 0x749: 0x3c56,
+ 0x74d: 0xa000,
+ 0x760: 0x2fa0, 0x761: 0xa000, 0x762: 0x3c64,
+ 0x764: 0xa000, 0x765: 0xa000,
+ 0x76d: 0x3c5d, 0x76e: 0x2f9b, 0x76f: 0x2fa5,
+ 0x770: 0x3c6b, 0x771: 0x3c72, 0x772: 0xa000, 0x773: 0xa000, 0x774: 0x3c79, 0x775: 0x3c80,
+ 0x776: 0xa000, 0x777: 0xa000, 0x778: 0x3c87, 0x779: 0x3c8e, 0x77a: 0xa000, 0x77b: 0xa000,
+ 0x77c: 0xa000, 0x77d: 0xa000,
+ // Block 0x1e, offset 0x780
+ 0x780: 0x3c95, 0x781: 0x3c9c, 0x782: 0xa000, 0x783: 0xa000, 0x784: 0x3cb1, 0x785: 0x3cb8,
+ 0x786: 0xa000, 0x787: 0xa000, 0x788: 0x3cbf, 0x789: 0x3cc6,
+ 0x791: 0xa000,
+ 0x792: 0xa000,
+ 0x7a2: 0xa000,
+ 0x7a8: 0xa000, 0x7a9: 0xa000,
+ 0x7ab: 0xa000, 0x7ac: 0x3cdb, 0x7ad: 0x3ce2, 0x7ae: 0x3ce9, 0x7af: 0x3cf0,
+ 0x7b2: 0xa000, 0x7b3: 0xa000, 0x7b4: 0xa000, 0x7b5: 0xa000,
+ // Block 0x1f, offset 0x7c0
+ 0x7e0: 0x0023, 0x7e1: 0x0025, 0x7e2: 0x0027, 0x7e3: 0x0029,
+ 0x7e4: 0x002b, 0x7e5: 0x002d, 0x7e6: 0x002f, 0x7e7: 0x0031, 0x7e8: 0x0033, 0x7e9: 0x1a15,
+ 0x7ea: 0x1a18, 0x7eb: 0x1a1b, 0x7ec: 0x1a1e, 0x7ed: 0x1a21, 0x7ee: 0x1a24, 0x7ef: 0x1a27,
+ 0x7f0: 0x1a2a, 0x7f1: 0x1a2d, 0x7f2: 0x1a30, 0x7f3: 0x1a39, 0x7f4: 0x1bfb, 0x7f5: 0x1bff,
+ 0x7f6: 0x1c03, 0x7f7: 0x1c07, 0x7f8: 0x1c0b, 0x7f9: 0x1c0f, 0x7fa: 0x1c13, 0x7fb: 0x1c17,
+ 0x7fc: 0x1c1b, 0x7fd: 0x1e13, 0x7fe: 0x1e18, 0x7ff: 0x1e1d,
+ // Block 0x20, offset 0x800
+ 0x800: 0x1e22, 0x801: 0x1e27, 0x802: 0x1e2c, 0x803: 0x1e31, 0x804: 0x1e36, 0x805: 0x1e3b,
+ 0x806: 0x1e40, 0x807: 0x1e45, 0x808: 0x1a12, 0x809: 0x1a36, 0x80a: 0x1a5a, 0x80b: 0x1a7e,
+ 0x80c: 0x1aa2, 0x80d: 0x1aab, 0x80e: 0x1ab1, 0x80f: 0x1ab7, 0x810: 0x1abd, 0x811: 0x1cf3,
+ 0x812: 0x1cf7, 0x813: 0x1cfb, 0x814: 0x1cff, 0x815: 0x1d03, 0x816: 0x1d07, 0x817: 0x1d0b,
+ 0x818: 0x1d0f, 0x819: 0x1d13, 0x81a: 0x1d17, 0x81b: 0x1d1b, 0x81c: 0x1c87, 0x81d: 0x1c8b,
+ 0x81e: 0x1c8f, 0x81f: 0x1c93, 0x820: 0x1c97, 0x821: 0x1c9b, 0x822: 0x1c9f, 0x823: 0x1ca3,
+ 0x824: 0x1ca7, 0x825: 0x1cab, 0x826: 0x1caf, 0x827: 0x1cb3, 0x828: 0x1cb7, 0x829: 0x1cbb,
+ 0x82a: 0x1cbf, 0x82b: 0x1cc3, 0x82c: 0x1cc7, 0x82d: 0x1ccb, 0x82e: 0x1ccf, 0x82f: 0x1cd3,
+ 0x830: 0x1cd7, 0x831: 0x1cdb, 0x832: 0x1cdf, 0x833: 0x1ce3, 0x834: 0x1ce7, 0x835: 0x1ceb,
+ 0x836: 0x0043, 0x837: 0x0045, 0x838: 0x0047, 0x839: 0x0049, 0x83a: 0x004b, 0x83b: 0x004d,
+ 0x83c: 0x004f, 0x83d: 0x0051, 0x83e: 0x0053, 0x83f: 0x0055,
+ // Block 0x21, offset 0x840
+ 0x840: 0x0737, 0x841: 0x075b, 0x842: 0x0767, 0x843: 0x0777, 0x844: 0x077f, 0x845: 0x078b,
+ 0x846: 0x0793, 0x847: 0x079b, 0x848: 0x07a7, 0x849: 0x07fb, 0x84a: 0x0813, 0x84b: 0x0823,
+ 0x84c: 0x0833, 0x84d: 0x0843, 0x84e: 0x0853, 0x84f: 0x0873, 0x850: 0x0877, 0x851: 0x087b,
+ 0x852: 0x08af, 0x853: 0x08d7, 0x854: 0x08e7, 0x855: 0x08ef, 0x856: 0x08f3, 0x857: 0x08ff,
+ 0x858: 0x091b, 0x859: 0x091f, 0x85a: 0x0937, 0x85b: 0x093b, 0x85c: 0x0943, 0x85d: 0x0953,
+ 0x85e: 0x09ef, 0x85f: 0x0a03, 0x860: 0x0a43, 0x861: 0x0a57, 0x862: 0x0a5f, 0x863: 0x0a63,
+ 0x864: 0x0a73, 0x865: 0x0a8f, 0x866: 0x0abb, 0x867: 0x0ac7, 0x868: 0x0ae7, 0x869: 0x0af3,
+ 0x86a: 0x0af7, 0x86b: 0x0afb, 0x86c: 0x0b13, 0x86d: 0x0b17, 0x86e: 0x0b43, 0x86f: 0x0b4f,
+ 0x870: 0x0b57, 0x871: 0x0b5f, 0x872: 0x0b6f, 0x873: 0x0b77, 0x874: 0x0b7f, 0x875: 0x0bab,
+ 0x876: 0x0baf, 0x877: 0x0bb7, 0x878: 0x0bbb, 0x879: 0x0bc3, 0x87a: 0x0bcb, 0x87b: 0x0bdb,
+ 0x87c: 0x0bf7, 0x87d: 0x0c6f, 0x87e: 0x0c83, 0x87f: 0x0c87,
+ // Block 0x22, offset 0x880
+ 0x880: 0x0d07, 0x881: 0x0d0b, 0x882: 0x0d1f, 0x883: 0x0d23, 0x884: 0x0d2b, 0x885: 0x0d33,
+ 0x886: 0x0d3b, 0x887: 0x0d47, 0x888: 0x0d6f, 0x889: 0x0d7f, 0x88a: 0x0d93, 0x88b: 0x0e03,
+ 0x88c: 0x0e0f, 0x88d: 0x0e1f, 0x88e: 0x0e2b, 0x88f: 0x0e37, 0x890: 0x0e3f, 0x891: 0x0e43,
+ 0x892: 0x0e47, 0x893: 0x0e4b, 0x894: 0x0e4f, 0x895: 0x0f07, 0x896: 0x0f4f, 0x897: 0x0f5b,
+ 0x898: 0x0f5f, 0x899: 0x0f63, 0x89a: 0x0f67, 0x89b: 0x0f6f, 0x89c: 0x0f73, 0x89d: 0x0f87,
+ 0x89e: 0x0fa3, 0x89f: 0x0fab, 0x8a0: 0x0feb, 0x8a1: 0x0fef, 0x8a2: 0x0ff7, 0x8a3: 0x0ffb,
+ 0x8a4: 0x1003, 0x8a5: 0x1007, 0x8a6: 0x102b, 0x8a7: 0x102f, 0x8a8: 0x104b, 0x8a9: 0x104f,
+ 0x8aa: 0x1053, 0x8ab: 0x1057, 0x8ac: 0x106b, 0x8ad: 0x108f, 0x8ae: 0x1093, 0x8af: 0x1097,
+ 0x8b0: 0x10bb, 0x8b1: 0x10fb, 0x8b2: 0x10ff, 0x8b3: 0x111f, 0x8b4: 0x112f, 0x8b5: 0x1137,
+ 0x8b6: 0x1157, 0x8b7: 0x117b, 0x8b8: 0x11bf, 0x8b9: 0x11c7, 0x8ba: 0x11db, 0x8bb: 0x11e7,
+ 0x8bc: 0x11ef, 0x8bd: 0x11f7, 0x8be: 0x11fb, 0x8bf: 0x11ff,
+ // Block 0x23, offset 0x8c0
+ 0x8c0: 0x1217, 0x8c1: 0x121b, 0x8c2: 0x1237, 0x8c3: 0x123f, 0x8c4: 0x1247, 0x8c5: 0x124b,
+ 0x8c6: 0x1257, 0x8c7: 0x125f, 0x8c8: 0x1263, 0x8c9: 0x1267, 0x8ca: 0x126f, 0x8cb: 0x1273,
+ 0x8cc: 0x1313, 0x8cd: 0x1327, 0x8ce: 0x135b, 0x8cf: 0x135f, 0x8d0: 0x1367, 0x8d1: 0x1393,
+ 0x8d2: 0x139b, 0x8d3: 0x13a3, 0x8d4: 0x13ab, 0x8d5: 0x13e7, 0x8d6: 0x13eb, 0x8d7: 0x13f3,
+ 0x8d8: 0x13f7, 0x8d9: 0x13fb, 0x8da: 0x1427, 0x8db: 0x142b, 0x8dc: 0x1433, 0x8dd: 0x1447,
+ 0x8de: 0x144b, 0x8df: 0x1467, 0x8e0: 0x146f, 0x8e1: 0x1473, 0x8e2: 0x1497, 0x8e3: 0x14b7,
+ 0x8e4: 0x14c7, 0x8e5: 0x14cb, 0x8e6: 0x14d3, 0x8e7: 0x14ff, 0x8e8: 0x1503, 0x8e9: 0x1513,
+ 0x8ea: 0x1537, 0x8eb: 0x1543, 0x8ec: 0x1553, 0x8ed: 0x156b, 0x8ee: 0x1573, 0x8ef: 0x1577,
+ 0x8f0: 0x157b, 0x8f1: 0x157f, 0x8f2: 0x158b, 0x8f3: 0x158f, 0x8f4: 0x1597, 0x8f5: 0x15b3,
+ 0x8f6: 0x15b7, 0x8f7: 0x15bb, 0x8f8: 0x15d3, 0x8f9: 0x15d7, 0x8fa: 0x15df, 0x8fb: 0x15f3,
+ 0x8fc: 0x15f7, 0x8fd: 0x15fb, 0x8fe: 0x1603, 0x8ff: 0x1607,
+ // Block 0x24, offset 0x900
+ 0x906: 0xa000, 0x90b: 0xa000,
+ 0x90c: 0x3f43, 0x90d: 0xa000, 0x90e: 0x3f4b, 0x90f: 0xa000, 0x910: 0x3f53, 0x911: 0xa000,
+ 0x912: 0x3f5b, 0x913: 0xa000, 0x914: 0x3f63, 0x915: 0xa000, 0x916: 0x3f6b, 0x917: 0xa000,
+ 0x918: 0x3f73, 0x919: 0xa000, 0x91a: 0x3f7b, 0x91b: 0xa000, 0x91c: 0x3f83, 0x91d: 0xa000,
+ 0x91e: 0x3f8b, 0x91f: 0xa000, 0x920: 0x3f93, 0x921: 0xa000, 0x922: 0x3f9b,
+ 0x924: 0xa000, 0x925: 0x3fa3, 0x926: 0xa000, 0x927: 0x3fab, 0x928: 0xa000, 0x929: 0x3fb3,
+ 0x92f: 0xa000,
+ 0x930: 0x3fbb, 0x931: 0x3fc3, 0x932: 0xa000, 0x933: 0x3fcb, 0x934: 0x3fd3, 0x935: 0xa000,
+ 0x936: 0x3fdb, 0x937: 0x3fe3, 0x938: 0xa000, 0x939: 0x3feb, 0x93a: 0x3ff3, 0x93b: 0xa000,
+ 0x93c: 0x3ffb, 0x93d: 0x4003,
+ // Block 0x25, offset 0x940
+ 0x954: 0x3f3b,
+ 0x959: 0x9903, 0x95a: 0x9903, 0x95b: 0x4317, 0x95c: 0x431d, 0x95d: 0xa000,
+ 0x95e: 0x400b, 0x95f: 0x28b0,
+ 0x966: 0xa000,
+ 0x96b: 0xa000, 0x96c: 0x401b, 0x96d: 0xa000, 0x96e: 0x4023, 0x96f: 0xa000,
+ 0x970: 0x402b, 0x971: 0xa000, 0x972: 0x4033, 0x973: 0xa000, 0x974: 0x403b, 0x975: 0xa000,
+ 0x976: 0x4043, 0x977: 0xa000, 0x978: 0x404b, 0x979: 0xa000, 0x97a: 0x4053, 0x97b: 0xa000,
+ 0x97c: 0x405b, 0x97d: 0xa000, 0x97e: 0x4063, 0x97f: 0xa000,
+ // Block 0x26, offset 0x980
+ 0x980: 0x406b, 0x981: 0xa000, 0x982: 0x4073, 0x984: 0xa000, 0x985: 0x407b,
+ 0x986: 0xa000, 0x987: 0x4083, 0x988: 0xa000, 0x989: 0x408b,
+ 0x98f: 0xa000, 0x990: 0x4093, 0x991: 0x409b,
+ 0x992: 0xa000, 0x993: 0x40a3, 0x994: 0x40ab, 0x995: 0xa000, 0x996: 0x40b3, 0x997: 0x40bb,
+ 0x998: 0xa000, 0x999: 0x40c3, 0x99a: 0x40cb, 0x99b: 0xa000, 0x99c: 0x40d3, 0x99d: 0x40db,
+ 0x9af: 0xa000,
+ 0x9b0: 0xa000, 0x9b1: 0xa000, 0x9b2: 0xa000, 0x9b4: 0x4013,
+ 0x9b7: 0x40e3, 0x9b8: 0x40eb, 0x9b9: 0x40f3, 0x9ba: 0x40fb,
+ 0x9bd: 0xa000, 0x9be: 0x4103, 0x9bf: 0x28c5,
+ // Block 0x27, offset 0x9c0
+ 0x9c0: 0x03af, 0x9c1: 0x03b3, 0x9c2: 0x0483, 0x9c3: 0x0487, 0x9c4: 0x03b7, 0x9c5: 0x03bb,
+ 0x9c6: 0x03bf, 0x9c7: 0x041b, 0x9c8: 0x041f, 0x9c9: 0x0423, 0x9ca: 0x0427, 0x9cb: 0x042b,
+ 0x9cc: 0x042f, 0x9cd: 0x0433, 0x9ce: 0x0437,
+ 0x9d2: 0x0737, 0x9d3: 0x0793, 0x9d4: 0x0743, 0x9d5: 0x09f3, 0x9d6: 0x0747, 0x9d7: 0x075f,
+ 0x9d8: 0x074b, 0x9d9: 0x100b, 0x9da: 0x077f, 0x9db: 0x0753, 0x9dc: 0x073b, 0x9dd: 0x0a77,
+ 0x9de: 0x0a07, 0x9df: 0x07a7,
+ // Block 0x28, offset 0xa00
+ 0xa00: 0x21e7, 0xa01: 0x21ed, 0xa02: 0x21f3, 0xa03: 0x21f9, 0xa04: 0x21ff, 0xa05: 0x2205,
+ 0xa06: 0x220b, 0xa07: 0x2211, 0xa08: 0x2217, 0xa09: 0x221d, 0xa0a: 0x2223, 0xa0b: 0x2229,
+ 0xa0c: 0x222f, 0xa0d: 0x2235, 0xa0e: 0x2922, 0xa0f: 0x292b, 0xa10: 0x2934, 0xa11: 0x293d,
+ 0xa12: 0x2946, 0xa13: 0x294f, 0xa14: 0x2958, 0xa15: 0x2961, 0xa16: 0x296a, 0xa17: 0x297c,
+ 0xa18: 0x2985, 0xa19: 0x298e, 0xa1a: 0x2997, 0xa1b: 0x29a0, 0xa1c: 0x2973, 0xa1d: 0x2dc5,
+ 0xa1e: 0x2cf6, 0xa20: 0x223b, 0xa21: 0x2253, 0xa22: 0x2247, 0xa23: 0x229b,
+ 0xa24: 0x2259, 0xa25: 0x2277, 0xa26: 0x2241, 0xa27: 0x2271, 0xa28: 0x224d, 0xa29: 0x2283,
+ 0xa2a: 0x22b3, 0xa2b: 0x22d1, 0xa2c: 0x22cb, 0xa2d: 0x22bf, 0xa2e: 0x230d, 0xa2f: 0x22a1,
+ 0xa30: 0x22ad, 0xa31: 0x22c5, 0xa32: 0x22b9, 0xa33: 0x22e3, 0xa34: 0x228f, 0xa35: 0x22d7,
+ 0xa36: 0x2301, 0xa37: 0x22e9, 0xa38: 0x227d, 0xa39: 0x225f, 0xa3a: 0x2295, 0xa3b: 0x22a7,
+ 0xa3c: 0x22dd, 0xa3d: 0x2265, 0xa3e: 0x2307, 0xa3f: 0x2289,
+ // Block 0x29, offset 0xa40
+ 0xa40: 0x22ef, 0xa41: 0x226b, 0xa42: 0x22f5, 0xa43: 0x22fb, 0xa44: 0x09a7, 0xa45: 0x0b7b,
+ 0xa46: 0x0d1f, 0xa47: 0x113f,
+ 0xa50: 0x1d57, 0xa51: 0x1a3c,
+ 0xa52: 0x1a3f, 0xa53: 0x1a42, 0xa54: 0x1a45, 0xa55: 0x1a48, 0xa56: 0x1a4b, 0xa57: 0x1a4e,
+ 0xa58: 0x1a51, 0xa59: 0x1a54, 0xa5a: 0x1a5d, 0xa5b: 0x1a60, 0xa5c: 0x1a63, 0xa5d: 0x1a66,
+ 0xa5e: 0x1a69, 0xa5f: 0x1a6c, 0xa60: 0x0313, 0xa61: 0x031b, 0xa62: 0x031f, 0xa63: 0x0327,
+ 0xa64: 0x032b, 0xa65: 0x032f, 0xa66: 0x0337, 0xa67: 0x033f, 0xa68: 0x0343, 0xa69: 0x034b,
+ 0xa6a: 0x034f, 0xa6b: 0x0353, 0xa6c: 0x0357, 0xa6d: 0x035b, 0xa6e: 0x2824, 0xa6f: 0x282b,
+ 0xa70: 0x2832, 0xa71: 0x2839, 0xa72: 0x2840, 0xa73: 0x2847, 0xa74: 0x284e, 0xa75: 0x2855,
+ 0xa76: 0x2863, 0xa77: 0x286a, 0xa78: 0x2871, 0xa79: 0x2878, 0xa7a: 0x287f, 0xa7b: 0x2886,
+ 0xa7c: 0x2d15, 0xa7d: 0x2b8a, 0xa7e: 0x285c,
+ // Block 0x2a, offset 0xa80
+ 0xa80: 0x0737, 0xa81: 0x0793, 0xa82: 0x0743, 0xa83: 0x09f3, 0xa84: 0x0797, 0xa85: 0x0827,
+ 0xa86: 0x073f, 0xa87: 0x0823, 0xa88: 0x0783, 0xa89: 0x08ff, 0xa8a: 0x0d7f, 0xa8b: 0x0f07,
+ 0xa8c: 0x0e4f, 0xa8d: 0x0d93, 0xa8e: 0x14d3, 0xa8f: 0x0a03, 0xa90: 0x0d47, 0xa91: 0x0dc3,
+ 0xa92: 0x0d83, 0xa93: 0x10c3, 0xa94: 0x0973, 0xa95: 0x0f7b, 0xa96: 0x13ff, 0xa97: 0x10d7,
+ 0xa98: 0x08bb, 0xa99: 0x1107, 0xa9a: 0x1013, 0xa9b: 0x0a8f, 0xa9c: 0x1487, 0xa9d: 0x07f7,
+ 0xa9e: 0x0923, 0xa9f: 0x0e6f, 0xaa0: 0x159b, 0xaa1: 0x07bb, 0xaa2: 0x084b, 0xaa3: 0x0e13,
+ 0xaa4: 0x0747, 0xaa5: 0x075f, 0xaa6: 0x074b, 0xaa7: 0x0b53, 0xaa8: 0x0967, 0xaa9: 0x08f7,
+ 0xaaa: 0x0acf, 0xaab: 0x0ac3, 0xaac: 0x1063, 0xaad: 0x07b7, 0xaae: 0x1413, 0xaaf: 0x0913,
+ 0xab0: 0x0a6b, 0xab1: 0x1a6f, 0xab2: 0x1a72, 0xab3: 0x1a75, 0xab4: 0x1a78, 0xab5: 0x1a81,
+ 0xab6: 0x1a84, 0xab7: 0x1a87, 0xab8: 0x1a8a, 0xab9: 0x1a8d, 0xaba: 0x1a90, 0xabb: 0x1a93,
+ 0xabc: 0x1a96, 0xabd: 0x1a99, 0xabe: 0x1a9c, 0xabf: 0x1aa5,
+ // Block 0x2b, offset 0xac0
+ 0xac0: 0x1e59, 0xac1: 0x1e68, 0xac2: 0x1e77, 0xac3: 0x1e86, 0xac4: 0x1e95, 0xac5: 0x1ea4,
+ 0xac6: 0x1eb3, 0xac7: 0x1ec2, 0xac8: 0x1ed1, 0xac9: 0x231f, 0xaca: 0x2331, 0xacb: 0x2343,
+ 0xacc: 0x1ae7, 0xacd: 0x1d97, 0xace: 0x1b65, 0xacf: 0x1d3b, 0xad0: 0x0543, 0xad1: 0x054b,
+ 0xad2: 0x0553, 0xad3: 0x055b, 0xad4: 0x0563, 0xad5: 0x0567, 0xad6: 0x056b, 0xad7: 0x056f,
+ 0xad8: 0x0573, 0xad9: 0x0577, 0xada: 0x057b, 0xadb: 0x057f, 0xadc: 0x0583, 0xadd: 0x0587,
+ 0xade: 0x058b, 0xadf: 0x058f, 0xae0: 0x0593, 0xae1: 0x059b, 0xae2: 0x059f, 0xae3: 0x05a3,
+ 0xae4: 0x05a7, 0xae5: 0x05ab, 0xae6: 0x05af, 0xae7: 0x05b3, 0xae8: 0x05b7, 0xae9: 0x05bb,
+ 0xaea: 0x05bf, 0xaeb: 0x05c3, 0xaec: 0x05c7, 0xaed: 0x05cb, 0xaee: 0x05cf, 0xaef: 0x05d3,
+ 0xaf0: 0x05d7, 0xaf1: 0x05db, 0xaf2: 0x05df, 0xaf3: 0x05e7, 0xaf4: 0x05ef, 0xaf5: 0x05f7,
+ 0xaf6: 0x05fb, 0xaf7: 0x05ff, 0xaf8: 0x0603, 0xaf9: 0x0607, 0xafa: 0x060b, 0xafb: 0x060f,
+ 0xafc: 0x0613, 0xafd: 0x0617, 0xafe: 0x061b,
+ // Block 0x2c, offset 0xb00
+ 0xb00: 0x2d25, 0xb01: 0x2bb1, 0xb02: 0x2d35, 0xb03: 0x2a7c, 0xb04: 0x2f2f, 0xb05: 0x2a86,
+ 0xb06: 0x2a90, 0xb07: 0x2f73, 0xb08: 0x2bbe, 0xb09: 0x2a9a, 0xb0a: 0x2aa4, 0xb0b: 0x2aae,
+ 0xb0c: 0x2be5, 0xb0d: 0x2bf2, 0xb0e: 0x2bcb, 0xb0f: 0x2bd8, 0xb10: 0x2f05, 0xb11: 0x2bff,
+ 0xb12: 0x2c0c, 0xb13: 0x2dd7, 0xb14: 0x28b7, 0xb15: 0x2dea, 0xb16: 0x2dfd, 0xb17: 0x2d45,
+ 0xb18: 0x2c19, 0xb19: 0x2e10, 0xb1a: 0x2e23, 0xb1b: 0x2c26, 0xb1c: 0x2ab8, 0xb1d: 0x2ac2,
+ 0xb1e: 0x2f13, 0xb1f: 0x2c33, 0xb20: 0x2d55, 0xb21: 0x2f40, 0xb22: 0x2acc, 0xb23: 0x2ad6,
+ 0xb24: 0x2c40, 0xb25: 0x2ae0, 0xb26: 0x2aea, 0xb27: 0x28cc, 0xb28: 0x28d3, 0xb29: 0x2af4,
+ 0xb2a: 0x2afe, 0xb2b: 0x2e36, 0xb2c: 0x2c4d, 0xb2d: 0x2d65, 0xb2e: 0x2e49, 0xb2f: 0x2c5a,
+ 0xb30: 0x2b12, 0xb31: 0x2b08, 0xb32: 0x2f87, 0xb33: 0x2c67, 0xb34: 0x2e5c, 0xb35: 0x2b1c,
+ 0xb36: 0x2d75, 0xb37: 0x2b26, 0xb38: 0x2c81, 0xb39: 0x2b30, 0xb3a: 0x2c8e, 0xb3b: 0x2f51,
+ 0xb3c: 0x2c74, 0xb3d: 0x2d85, 0xb3e: 0x2c9b, 0xb3f: 0x28da,
+ // Block 0x2d, offset 0xb40
+ 0xb40: 0x2f62, 0xb41: 0x2b3a, 0xb42: 0x2b44, 0xb43: 0x2ca8, 0xb44: 0x2b4e, 0xb45: 0x2b58,
+ 0xb46: 0x2b62, 0xb47: 0x2d95, 0xb48: 0x2cb5, 0xb49: 0x28e1, 0xb4a: 0x2e6f, 0xb4b: 0x2efa,
+ 0xb4c: 0x2da5, 0xb4d: 0x2cc2, 0xb4e: 0x2f21, 0xb4f: 0x2b6c, 0xb50: 0x2b76, 0xb51: 0x2ccf,
+ 0xb52: 0x28e8, 0xb53: 0x2cdc, 0xb54: 0x2db5, 0xb55: 0x28ef, 0xb56: 0x2e82, 0xb57: 0x2b80,
+ 0xb58: 0x1e4a, 0xb59: 0x1e5e, 0xb5a: 0x1e6d, 0xb5b: 0x1e7c, 0xb5c: 0x1e8b, 0xb5d: 0x1e9a,
+ 0xb5e: 0x1ea9, 0xb5f: 0x1eb8, 0xb60: 0x1ec7, 0xb61: 0x1ed6, 0xb62: 0x2325, 0xb63: 0x2337,
+ 0xb64: 0x2349, 0xb65: 0x2355, 0xb66: 0x2361, 0xb67: 0x236d, 0xb68: 0x2379, 0xb69: 0x2385,
+ 0xb6a: 0x2391, 0xb6b: 0x239d, 0xb6c: 0x23d9, 0xb6d: 0x23e5, 0xb6e: 0x23f1, 0xb6f: 0x23fd,
+ 0xb70: 0x2409, 0xb71: 0x1da7, 0xb72: 0x1b59, 0xb73: 0x1ac9, 0xb74: 0x1d77, 0xb75: 0x1bda,
+ 0xb76: 0x1be9, 0xb77: 0x1b5f, 0xb78: 0x1d8f, 0xb79: 0x1d93, 0xb7a: 0x1af3, 0xb7b: 0x28fd,
+ 0xb7c: 0x290b, 0xb7d: 0x28f6, 0xb7e: 0x2904, 0xb7f: 0x2ce9,
+ // Block 0x2e, offset 0xb80
+ 0xb80: 0x1bdd, 0xb81: 0x1bc5, 0xb82: 0x1df3, 0xb83: 0x1bad, 0xb84: 0x1b86, 0xb85: 0x1afc,
+ 0xb86: 0x1b0b, 0xb87: 0x1adb, 0xb88: 0x1d83, 0xb89: 0x1ee5, 0xb8a: 0x1be0, 0xb8b: 0x1bc8,
+ 0xb8c: 0x1df7, 0xb8d: 0x1e03, 0xb8e: 0x1bb9, 0xb8f: 0x1b8f, 0xb90: 0x1aea, 0xb91: 0x1daf,
+ 0xb92: 0x1d43, 0xb93: 0x1d2f, 0xb94: 0x1d5f, 0xb95: 0x1e07, 0xb96: 0x1bbc, 0xb97: 0x1b5c,
+ 0xb98: 0x1b92, 0xb99: 0x1b71, 0xb9a: 0x1bd4, 0xb9b: 0x1e0b, 0xb9c: 0x1bbf, 0xb9d: 0x1b53,
+ 0xb9e: 0x1b95, 0xb9f: 0x1dcf, 0xba0: 0x1d87, 0xba1: 0x1ba7, 0xba2: 0x1db7, 0xba3: 0x1dd3,
+ 0xba4: 0x1d8b, 0xba5: 0x1baa, 0xba6: 0x1dbb, 0xba7: 0x247b, 0xba8: 0x248f, 0xba9: 0x1b29,
+ 0xbaa: 0x1db3, 0xbab: 0x1d47, 0xbac: 0x1d33, 0xbad: 0x1ddb, 0xbae: 0x2912, 0xbaf: 0x29a9,
+ 0xbb0: 0x1bec, 0xbb1: 0x1bd7, 0xbb2: 0x1e0f, 0xbb3: 0x1bc2, 0xbb4: 0x1be3, 0xbb5: 0x1bcb,
+ 0xbb6: 0x1dfb, 0xbb7: 0x1bb0, 0xbb8: 0x1b89, 0xbb9: 0x1b14, 0xbba: 0x1be6, 0xbbb: 0x1bce,
+ 0xbbc: 0x1dff, 0xbbd: 0x1bb3, 0xbbe: 0x1b8c, 0xbbf: 0x1b17,
+ // Block 0x2f, offset 0xbc0
+ 0xbc0: 0x1dbf, 0xbc1: 0x1d4b, 0xbc2: 0x1ee0, 0xbc3: 0x1acc, 0xbc4: 0x1b4d, 0xbc5: 0x1b50,
+ 0xbc6: 0x2488, 0xbc7: 0x1d27, 0xbc8: 0x1b56, 0xbc9: 0x1ade, 0xbca: 0x1b74, 0xbcb: 0x1ae1,
+ 0xbcc: 0x1b7d, 0xbcd: 0x1aff, 0xbce: 0x1b02, 0xbcf: 0x1b98, 0xbd0: 0x1b9e, 0xbd1: 0x1ba1,
+ 0xbd2: 0x1dc3, 0xbd3: 0x1ba4, 0xbd4: 0x1bb6, 0xbd5: 0x1dcb, 0xbd6: 0x1dd7, 0xbd7: 0x1b23,
+ 0xbd8: 0x1eea, 0xbd9: 0x1d4f, 0xbda: 0x1b26, 0xbdb: 0x1bef, 0xbdc: 0x1b38, 0xbdd: 0x1b47,
+ 0xbde: 0x2475, 0xbdf: 0x246f, 0xbe0: 0x1e54, 0xbe1: 0x1e63, 0xbe2: 0x1e72, 0xbe3: 0x1e81,
+ 0xbe4: 0x1e90, 0xbe5: 0x1e9f, 0xbe6: 0x1eae, 0xbe7: 0x1ebd, 0xbe8: 0x1ecc, 0xbe9: 0x2319,
+ 0xbea: 0x232b, 0xbeb: 0x233d, 0xbec: 0x234f, 0xbed: 0x235b, 0xbee: 0x2367, 0xbef: 0x2373,
+ 0xbf0: 0x237f, 0xbf1: 0x238b, 0xbf2: 0x2397, 0xbf3: 0x23d3, 0xbf4: 0x23df, 0xbf5: 0x23eb,
+ 0xbf6: 0x23f7, 0xbf7: 0x2403, 0xbf8: 0x240f, 0xbf9: 0x2415, 0xbfa: 0x241b, 0xbfb: 0x2421,
+ 0xbfc: 0x2427, 0xbfd: 0x2439, 0xbfe: 0x243f, 0xbff: 0x1da3,
+ // Block 0x30, offset 0xc00
+ 0xc00: 0x13ef, 0xc01: 0x0d73, 0xc02: 0x144b, 0xc03: 0x1417, 0xc04: 0x0ecf, 0xc05: 0x0763,
+ 0xc06: 0x0957, 0xc07: 0x169f, 0xc08: 0x169f, 0xc09: 0x0a83, 0xc0a: 0x14d3, 0xc0b: 0x09bb,
+ 0xc0c: 0x0a7f, 0xc0d: 0x0c67, 0xc0e: 0x1047, 0xc0f: 0x11d7, 0xc10: 0x130f, 0xc11: 0x134b,
+ 0xc12: 0x137f, 0xc13: 0x1493, 0xc14: 0x0deb, 0xc15: 0x0e77, 0xc16: 0x0f23, 0xc17: 0x0fbb,
+ 0xc18: 0x12d7, 0xc19: 0x14bb, 0xc1a: 0x15e7, 0xc1b: 0x0787, 0xc1c: 0x092b, 0xc1d: 0x0dff,
+ 0xc1e: 0x0f47, 0xc1f: 0x130b, 0xc20: 0x1637, 0xc21: 0x0b2b, 0xc22: 0x0eef, 0xc23: 0x12fb,
+ 0xc24: 0x138f, 0xc25: 0x0c9b, 0xc26: 0x1233, 0xc27: 0x1357, 0xc28: 0x0b97, 0xc29: 0x0d87,
+ 0xc2a: 0x0e8f, 0xc2b: 0x0f93, 0xc2c: 0x149f, 0xc2d: 0x07c7, 0xc2e: 0x085f, 0xc2f: 0x08cb,
+ 0xc30: 0x0d03, 0xc31: 0x0df7, 0xc32: 0x0f43, 0xc33: 0x1067, 0xc34: 0x11ef, 0xc35: 0x1303,
+ 0xc36: 0x131b, 0xc37: 0x143f, 0xc38: 0x1563, 0xc39: 0x1617, 0xc3a: 0x1633, 0xc3b: 0x10a3,
+ 0xc3c: 0x10e3, 0xc3d: 0x119b, 0xc3e: 0x12bb, 0xc3f: 0x14ef,
+ // Block 0x31, offset 0xc40
+ 0xc40: 0x163f, 0xc41: 0x13c3, 0xc42: 0x0a3f, 0xc43: 0x0bb3, 0xc44: 0x1153, 0xc45: 0x1213,
+ 0xc46: 0x0f77, 0xc47: 0x10ab, 0xc48: 0x140f, 0xc49: 0x155b, 0xc4a: 0x0a3b, 0xc4b: 0x0b07,
+ 0xc4c: 0x0def, 0xc4d: 0x0ea3, 0xc4e: 0x0ed7, 0xc4f: 0x118b, 0xc50: 0x11b3, 0xc51: 0x151b,
+ 0xc52: 0x08c7, 0xc53: 0x121f, 0xc54: 0x086b, 0xc55: 0x0867, 0xc56: 0x110f, 0xc57: 0x119f,
+ 0xc58: 0x12d3, 0xc59: 0x1523, 0xc5a: 0x13df, 0xc5b: 0x0c9f, 0xc5c: 0x0deb, 0xc5d: 0x13cf,
+ 0xc5e: 0x076f, 0xc5f: 0x0adb, 0xc60: 0x0c0b, 0xc61: 0x0fa7, 0xc62: 0x1027, 0xc63: 0x08eb,
+ 0xc64: 0x10b3, 0xc65: 0x07d7, 0xc66: 0x0bef, 0xc67: 0x074f, 0xc68: 0x0e63, 0xc69: 0x0d1b,
+ 0xc6a: 0x1187, 0xc6b: 0x093f, 0xc6c: 0x0a2b, 0xc6d: 0x1073, 0xc6e: 0x12db, 0xc6f: 0x13b3,
+ 0xc70: 0x0e2f, 0xc71: 0x146f, 0xc72: 0x0e5b, 0xc73: 0x0caf, 0xc74: 0x1293, 0xc75: 0x0ccf,
+ 0xc76: 0x1023, 0xc77: 0x07a3, 0xc78: 0x081f, 0xc79: 0x0863, 0xc7a: 0x0dcb, 0xc7b: 0x1173,
+ 0xc7c: 0x126b, 0xc7d: 0x13bf, 0xc7e: 0x14cf, 0xc7f: 0x08d3,
+ // Block 0x32, offset 0xc80
+ 0xc80: 0x0987, 0xc81: 0x0a8f, 0xc82: 0x0ba7, 0xc83: 0x0d37, 0xc84: 0x0ef3, 0xc85: 0x10b7,
+ 0xc86: 0x150b, 0xc87: 0x15ef, 0xc88: 0x1643, 0xc89: 0x165b, 0xc8a: 0x08af, 0xc8b: 0x0d6b,
+ 0xc8c: 0x0e1b, 0xc8d: 0x1463, 0xc8e: 0x0b73, 0xc8f: 0x0c4f, 0xc90: 0x0c6b, 0xc91: 0x0cfb,
+ 0xc92: 0x0ee3, 0xc93: 0x0f2f, 0xc94: 0x0fdf, 0xc95: 0x1103, 0xc96: 0x11a7, 0xc97: 0x120b,
+ 0xc98: 0x1453, 0xc99: 0x12e3, 0xc9a: 0x147b, 0xc9b: 0x14f3, 0xc9c: 0x0887, 0xc9d: 0x08b3,
+ 0xc9e: 0x099b, 0xc9f: 0x0f1f, 0xca0: 0x136b, 0xca1: 0x13b3, 0xca2: 0x0b93, 0xca3: 0x0c03,
+ 0xca4: 0x0cc7, 0xca5: 0x0e27, 0xca6: 0x114f, 0xca7: 0x0f9b, 0xca8: 0x07b3, 0xca9: 0x09f7,
+ 0xcaa: 0x0adb, 0xcab: 0x0b3f, 0xcac: 0x0c0f, 0xcad: 0x0fb7, 0xcae: 0x0fd3, 0xcaf: 0x11e3,
+ 0xcb0: 0x1203, 0xcb1: 0x14d7, 0xcb2: 0x1557, 0xcb3: 0x1567, 0xcb4: 0x15a3, 0xcb5: 0x07cb,
+ 0xcb6: 0x10f7, 0xcb7: 0x14c3, 0xcb8: 0x153f, 0xcb9: 0x0c27, 0xcba: 0x078f, 0xcbb: 0x07ef,
+ 0xcbc: 0x0adf, 0xcbd: 0x0aff, 0xcbe: 0x0d27, 0xcbf: 0x0deb,
+ // Block 0x33, offset 0xcc0
+ 0xcc0: 0x0f3b, 0xcc1: 0x1043, 0xcc2: 0x12ef, 0xcc3: 0x148f, 0xcc4: 0x1697, 0xcc5: 0x0d5b,
+ 0xcc6: 0x1517, 0xcc7: 0x08ab, 0xcc8: 0x0da7, 0xcc9: 0x0db3, 0xcca: 0x0e87, 0xccb: 0x0ebf,
+ 0xccc: 0x0fc3, 0xccd: 0x101f, 0xcce: 0x109f, 0xccf: 0x1183, 0xcd0: 0x15af, 0xcd1: 0x0827,
+ 0xcd2: 0x0c7b, 0xcd3: 0x1527, 0xcd4: 0x07df, 0xcd5: 0x0b23, 0xcd6: 0x0ea7, 0xcd7: 0x1457,
+ 0xcd8: 0x0bdf, 0xcd9: 0x0c2f, 0xcda: 0x0dbb, 0xcdb: 0x0fa7, 0xcdc: 0x152f, 0xcdd: 0x088f,
+ 0xcde: 0x0977, 0xcdf: 0x0b0f, 0xce0: 0x0d4b, 0xce1: 0x0d97, 0xce2: 0x0dd7, 0xce3: 0x0e6b,
+ 0xce4: 0x0fbf, 0xce5: 0x1033, 0xce6: 0x11cf, 0xce7: 0x136f, 0xce8: 0x137b, 0xce9: 0x14cb,
+ 0xcea: 0x154b, 0xceb: 0x08fb, 0xcec: 0x0ec3, 0xced: 0x097b, 0xcee: 0x0f3f, 0xcef: 0x0fe3,
+ 0xcf0: 0x12ff, 0xcf1: 0x1533, 0xcf2: 0x161f, 0xcf3: 0x1647, 0xcf4: 0x0daf, 0xcf5: 0x0e9f,
+ 0xcf6: 0x123b, 0xcf7: 0x112f, 0xcf8: 0x113b, 0xcf9: 0x115f, 0xcfa: 0x0f8f, 0xcfb: 0x0f17,
+ 0xcfc: 0x13db, 0xcfd: 0x07ab, 0xcfe: 0x12a3, 0xcff: 0x0893,
+ // Block 0x34, offset 0xd00
+ 0xd00: 0x0883, 0xd01: 0x0b83, 0xd02: 0x0ca3, 0xd03: 0x116b, 0xd04: 0x0acb, 0xd05: 0x0e7b,
+ 0xd06: 0x0d67, 0xd07: 0x145f, 0xd08: 0x135f, 0xd09: 0x151f, 0xd0a: 0x139b, 0xd0b: 0x0b9f,
+ 0xd0c: 0x07ff, 0xd0d: 0x09d3, 0xd10: 0x0a27,
+ 0xd12: 0x0d57, 0xd15: 0x086f, 0xd16: 0x0f97, 0xd17: 0x105b,
+ 0xd18: 0x10bf, 0xd19: 0x10db, 0xd1a: 0x10df, 0xd1b: 0x10f3, 0xd1c: 0x156f, 0xd1d: 0x1163,
+ 0xd1e: 0x11e7, 0xd20: 0x1307, 0xd22: 0x13cb,
+ 0xd25: 0x147f, 0xd26: 0x14ab,
+ 0xd2a: 0x15c3, 0xd2b: 0x15c7, 0xd2c: 0x15cb, 0xd2d: 0x162f, 0xd2e: 0x14a3, 0xd2f: 0x153b,
+ 0xd30: 0x07cf, 0xd31: 0x07f3, 0xd32: 0x0807, 0xd33: 0x08c3, 0xd34: 0x08cf, 0xd35: 0x090f,
+ 0xd36: 0x09c3, 0xd37: 0x09df, 0xd38: 0x09e7, 0xd39: 0x0a23, 0xd3a: 0x0a2f, 0xd3b: 0x0b0b,
+ 0xd3c: 0x0b13, 0xd3d: 0x0c1b, 0xd3e: 0x0c43, 0xd3f: 0x0c4b,
+ // Block 0x35, offset 0xd40
+ 0xd40: 0x0c63, 0xd41: 0x0d0f, 0xd42: 0x0d3f, 0xd43: 0x0d5f, 0xd44: 0x0dcf, 0xd45: 0x0e93,
+ 0xd46: 0x0eaf, 0xd47: 0x0edf, 0xd48: 0x0f33, 0xd49: 0x0f53, 0xd4a: 0x0fc7, 0xd4b: 0x10a7,
+ 0xd4c: 0x10c3, 0xd4d: 0x10cb, 0xd4e: 0x10c7, 0xd4f: 0x10cf, 0xd50: 0x10d3, 0xd51: 0x10d7,
+ 0xd52: 0x10eb, 0xd53: 0x10ef, 0xd54: 0x1113, 0xd55: 0x1127, 0xd56: 0x1143, 0xd57: 0x11a7,
+ 0xd58: 0x11af, 0xd59: 0x11b7, 0xd5a: 0x11cb, 0xd5b: 0x11f3, 0xd5c: 0x1243, 0xd5d: 0x1277,
+ 0xd5e: 0x1277, 0xd5f: 0x12df, 0xd60: 0x1387, 0xd61: 0x139f, 0xd62: 0x13d3, 0xd63: 0x13d7,
+ 0xd64: 0x141b, 0xd65: 0x141f, 0xd66: 0x1477, 0xd67: 0x147f, 0xd68: 0x154f, 0xd69: 0x1593,
+ 0xd6a: 0x15ab, 0xd6b: 0x0c13, 0xd6c: 0x1792, 0xd6d: 0x125b,
+ 0xd70: 0x0757, 0xd71: 0x085b, 0xd72: 0x081b, 0xd73: 0x07c3, 0xd74: 0x0803, 0xd75: 0x082f,
+ 0xd76: 0x08bf, 0xd77: 0x08db, 0xd78: 0x09c3, 0xd79: 0x09af, 0xd7a: 0x09bf, 0xd7b: 0x09db,
+ 0xd7c: 0x0a27, 0xd7d: 0x0a37, 0xd7e: 0x0a7b, 0xd7f: 0x0a87,
+ // Block 0x36, offset 0xd80
+ 0xd80: 0x0aa3, 0xd81: 0x0ab3, 0xd82: 0x0b9b, 0xd83: 0x0ba3, 0xd84: 0x0bd3, 0xd85: 0x0bf3,
+ 0xd86: 0x0c23, 0xd87: 0x0c3b, 0xd88: 0x0c2b, 0xd89: 0x0c4b, 0xd8a: 0x0c3f, 0xd8b: 0x0c63,
+ 0xd8c: 0x0c7f, 0xd8d: 0x0cd7, 0xd8e: 0x0ce3, 0xd8f: 0x0ceb, 0xd90: 0x0d13, 0xd91: 0x0d57,
+ 0xd92: 0x0d87, 0xd93: 0x0d8b, 0xd94: 0x0d9f, 0xd95: 0x0e1f, 0xd96: 0x0e2f, 0xd97: 0x0e87,
+ 0xd98: 0x0ed3, 0xd99: 0x0ecb, 0xd9a: 0x0edf, 0xd9b: 0x0efb, 0xd9c: 0x0f33, 0xd9d: 0x108b,
+ 0xd9e: 0x0f57, 0xd9f: 0x0f8b, 0xda0: 0x0f97, 0xda1: 0x0fd7, 0xda2: 0x0ff3, 0xda3: 0x1017,
+ 0xda4: 0x103b, 0xda5: 0x103f, 0xda6: 0x105b, 0xda7: 0x105f, 0xda8: 0x106f, 0xda9: 0x1083,
+ 0xdaa: 0x107f, 0xdab: 0x10af, 0xdac: 0x112b, 0xdad: 0x1143, 0xdae: 0x115b, 0xdaf: 0x1193,
+ 0xdb0: 0x11a7, 0xdb1: 0x11c3, 0xdb2: 0x11f3, 0xdb3: 0x12a7, 0xdb4: 0x12cf, 0xdb5: 0x1343,
+ 0xdb6: 0x138b, 0xdb7: 0x1397, 0xdb8: 0x139f, 0xdb9: 0x13b7, 0xdba: 0x13cb, 0xdbb: 0x13bb,
+ 0xdbc: 0x13d3, 0xdbd: 0x13cf, 0xdbe: 0x13c7, 0xdbf: 0x13d7,
+ // Block 0x37, offset 0xdc0
+ 0xdc0: 0x13e3, 0xdc1: 0x141f, 0xdc2: 0x145b, 0xdc3: 0x148b, 0xdc4: 0x14bf, 0xdc5: 0x14df,
+ 0xdc6: 0x152b, 0xdc7: 0x154f, 0xdc8: 0x156f, 0xdc9: 0x1583, 0xdca: 0x1593, 0xdcb: 0x159f,
+ 0xdcc: 0x15ab, 0xdcd: 0x15ff, 0xdce: 0x169f, 0xdcf: 0x1729, 0xdd0: 0x1724, 0xdd1: 0x1756,
+ 0xdd2: 0x067f, 0xdd3: 0x06a7, 0xdd4: 0x06ab, 0xdd5: 0x17d8, 0xdd6: 0x1805, 0xdd7: 0x187d,
+ 0xdd8: 0x168b, 0xdd9: 0x169b,
+ // Block 0x38, offset 0xe00
+ 0xe00: 0x1b68, 0xe01: 0x1b6b, 0xe02: 0x1b6e, 0xe03: 0x1d9b, 0xe04: 0x1d9f, 0xe05: 0x1bf2,
+ 0xe06: 0x1bf2,
+ 0xe13: 0x1f08, 0xe14: 0x1ef9, 0xe15: 0x1efe, 0xe16: 0x1f0d, 0xe17: 0x1f03,
+ 0xe1d: 0x43cb,
+ 0xe1e: 0x8115, 0xe1f: 0x443d, 0xe20: 0x022d, 0xe21: 0x0215, 0xe22: 0x021e, 0xe23: 0x0221,
+ 0xe24: 0x0224, 0xe25: 0x0227, 0xe26: 0x022a, 0xe27: 0x0230, 0xe28: 0x0233, 0xe29: 0x0017,
+ 0xe2a: 0x442b, 0xe2b: 0x4431, 0xe2c: 0x452f, 0xe2d: 0x4537, 0xe2e: 0x4383, 0xe2f: 0x4389,
+ 0xe30: 0x438f, 0xe31: 0x4395, 0xe32: 0x43a1, 0xe33: 0x43a7, 0xe34: 0x43ad, 0xe35: 0x43b9,
+ 0xe36: 0x43bf, 0xe38: 0x43c5, 0xe39: 0x43d1, 0xe3a: 0x43d7, 0xe3b: 0x43dd,
+ 0xe3c: 0x43e9, 0xe3e: 0x43ef,
+ // Block 0x39, offset 0xe40
+ 0xe40: 0x43f5, 0xe41: 0x43fb, 0xe43: 0x4401, 0xe44: 0x4407,
+ 0xe46: 0x4413, 0xe47: 0x4419, 0xe48: 0x441f, 0xe49: 0x4425, 0xe4a: 0x4437, 0xe4b: 0x43b3,
+ 0xe4c: 0x439b, 0xe4d: 0x43e3, 0xe4e: 0x440d, 0xe4f: 0x1f12, 0xe50: 0x0299, 0xe51: 0x0299,
+ 0xe52: 0x02a2, 0xe53: 0x02a2, 0xe54: 0x02a2, 0xe55: 0x02a2, 0xe56: 0x02a5, 0xe57: 0x02a5,
+ 0xe58: 0x02a5, 0xe59: 0x02a5, 0xe5a: 0x02ab, 0xe5b: 0x02ab, 0xe5c: 0x02ab, 0xe5d: 0x02ab,
+ 0xe5e: 0x029f, 0xe5f: 0x029f, 0xe60: 0x029f, 0xe61: 0x029f, 0xe62: 0x02a8, 0xe63: 0x02a8,
+ 0xe64: 0x02a8, 0xe65: 0x02a8, 0xe66: 0x029c, 0xe67: 0x029c, 0xe68: 0x029c, 0xe69: 0x029c,
+ 0xe6a: 0x02cf, 0xe6b: 0x02cf, 0xe6c: 0x02cf, 0xe6d: 0x02cf, 0xe6e: 0x02d2, 0xe6f: 0x02d2,
+ 0xe70: 0x02d2, 0xe71: 0x02d2, 0xe72: 0x02b1, 0xe73: 0x02b1, 0xe74: 0x02b1, 0xe75: 0x02b1,
+ 0xe76: 0x02ae, 0xe77: 0x02ae, 0xe78: 0x02ae, 0xe79: 0x02ae, 0xe7a: 0x02b4, 0xe7b: 0x02b4,
+ 0xe7c: 0x02b4, 0xe7d: 0x02b4, 0xe7e: 0x02b7, 0xe7f: 0x02b7,
+ // Block 0x3a, offset 0xe80
+ 0xe80: 0x02b7, 0xe81: 0x02b7, 0xe82: 0x02c0, 0xe83: 0x02c0, 0xe84: 0x02bd, 0xe85: 0x02bd,
+ 0xe86: 0x02c3, 0xe87: 0x02c3, 0xe88: 0x02ba, 0xe89: 0x02ba, 0xe8a: 0x02c9, 0xe8b: 0x02c9,
+ 0xe8c: 0x02c6, 0xe8d: 0x02c6, 0xe8e: 0x02d5, 0xe8f: 0x02d5, 0xe90: 0x02d5, 0xe91: 0x02d5,
+ 0xe92: 0x02db, 0xe93: 0x02db, 0xe94: 0x02db, 0xe95: 0x02db, 0xe96: 0x02e1, 0xe97: 0x02e1,
+ 0xe98: 0x02e1, 0xe99: 0x02e1, 0xe9a: 0x02de, 0xe9b: 0x02de, 0xe9c: 0x02de, 0xe9d: 0x02de,
+ 0xe9e: 0x02e4, 0xe9f: 0x02e4, 0xea0: 0x02e7, 0xea1: 0x02e7, 0xea2: 0x02e7, 0xea3: 0x02e7,
+ 0xea4: 0x44a9, 0xea5: 0x44a9, 0xea6: 0x02ed, 0xea7: 0x02ed, 0xea8: 0x02ed, 0xea9: 0x02ed,
+ 0xeaa: 0x02ea, 0xeab: 0x02ea, 0xeac: 0x02ea, 0xead: 0x02ea, 0xeae: 0x0308, 0xeaf: 0x0308,
+ 0xeb0: 0x44a3, 0xeb1: 0x44a3,
+ // Block 0x3b, offset 0xec0
+ 0xed3: 0x02d8, 0xed4: 0x02d8, 0xed5: 0x02d8, 0xed6: 0x02d8, 0xed7: 0x02f6,
+ 0xed8: 0x02f6, 0xed9: 0x02f3, 0xeda: 0x02f3, 0xedb: 0x02f9, 0xedc: 0x02f9, 0xedd: 0x21e2,
+ 0xede: 0x02ff, 0xedf: 0x02ff, 0xee0: 0x02f0, 0xee1: 0x02f0, 0xee2: 0x02fc, 0xee3: 0x02fc,
+ 0xee4: 0x0305, 0xee5: 0x0305, 0xee6: 0x0305, 0xee7: 0x0305, 0xee8: 0x028d, 0xee9: 0x028d,
+ 0xeea: 0x273d, 0xeeb: 0x273d, 0xeec: 0x27ad, 0xeed: 0x27ad, 0xeee: 0x277c, 0xeef: 0x277c,
+ 0xef0: 0x2798, 0xef1: 0x2798, 0xef2: 0x2791, 0xef3: 0x2791, 0xef4: 0x279f, 0xef5: 0x279f,
+ 0xef6: 0x27a6, 0xef7: 0x27a6, 0xef8: 0x27a6, 0xef9: 0x2783, 0xefa: 0x2783, 0xefb: 0x2783,
+ 0xefc: 0x0302, 0xefd: 0x0302, 0xefe: 0x0302, 0xeff: 0x0302,
+ // Block 0x3c, offset 0xf00
+ 0xf00: 0x2744, 0xf01: 0x274b, 0xf02: 0x2767, 0xf03: 0x2783, 0xf04: 0x278a, 0xf05: 0x1f1c,
+ 0xf06: 0x1f21, 0xf07: 0x1f26, 0xf08: 0x1f35, 0xf09: 0x1f44, 0xf0a: 0x1f49, 0xf0b: 0x1f4e,
+ 0xf0c: 0x1f53, 0xf0d: 0x1f58, 0xf0e: 0x1f67, 0xf0f: 0x1f76, 0xf10: 0x1f7b, 0xf11: 0x1f80,
+ 0xf12: 0x1f8f, 0xf13: 0x1f9e, 0xf14: 0x1fa3, 0xf15: 0x1fa8, 0xf16: 0x1fad, 0xf17: 0x1fbc,
+ 0xf18: 0x1fc1, 0xf19: 0x1fd0, 0xf1a: 0x1fd5, 0xf1b: 0x1fda, 0xf1c: 0x1fe9, 0xf1d: 0x1fee,
+ 0xf1e: 0x1ff3, 0xf1f: 0x1ffd, 0xf20: 0x2039, 0xf21: 0x2048, 0xf22: 0x2057, 0xf23: 0x205c,
+ 0xf24: 0x2061, 0xf25: 0x206b, 0xf26: 0x207a, 0xf27: 0x207f, 0xf28: 0x208e, 0xf29: 0x2093,
+ 0xf2a: 0x2098, 0xf2b: 0x20a7, 0xf2c: 0x20ac, 0xf2d: 0x20bb, 0xf2e: 0x20c0, 0xf2f: 0x20c5,
+ 0xf30: 0x20ca, 0xf31: 0x20cf, 0xf32: 0x20d4, 0xf33: 0x20d9, 0xf34: 0x20de, 0xf35: 0x20e3,
+ 0xf36: 0x20e8, 0xf37: 0x20ed, 0xf38: 0x20f2, 0xf39: 0x20f7, 0xf3a: 0x20fc, 0xf3b: 0x2101,
+ 0xf3c: 0x2106, 0xf3d: 0x210b, 0xf3e: 0x2110, 0xf3f: 0x211a,
+ // Block 0x3d, offset 0xf40
+ 0xf40: 0x211f, 0xf41: 0x2124, 0xf42: 0x2129, 0xf43: 0x2133, 0xf44: 0x2138, 0xf45: 0x2142,
+ 0xf46: 0x2147, 0xf47: 0x214c, 0xf48: 0x2151, 0xf49: 0x2156, 0xf4a: 0x215b, 0xf4b: 0x2160,
+ 0xf4c: 0x2165, 0xf4d: 0x216a, 0xf4e: 0x2179, 0xf4f: 0x2188, 0xf50: 0x218d, 0xf51: 0x2192,
+ 0xf52: 0x2197, 0xf53: 0x219c, 0xf54: 0x21a1, 0xf55: 0x21ab, 0xf56: 0x21b0, 0xf57: 0x21b5,
+ 0xf58: 0x21c4, 0xf59: 0x21d3, 0xf5a: 0x21d8, 0xf5b: 0x445b, 0xf5c: 0x4461, 0xf5d: 0x4497,
+ 0xf5e: 0x44ee, 0xf5f: 0x44f5, 0xf60: 0x44fc, 0xf61: 0x4503, 0xf62: 0x450a, 0xf63: 0x4511,
+ 0xf64: 0x2759, 0xf65: 0x2760, 0xf66: 0x2767, 0xf67: 0x276e, 0xf68: 0x2783, 0xf69: 0x278a,
+ 0xf6a: 0x1f2b, 0xf6b: 0x1f30, 0xf6c: 0x1f35, 0xf6d: 0x1f3a, 0xf6e: 0x1f44, 0xf6f: 0x1f49,
+ 0xf70: 0x1f5d, 0xf71: 0x1f62, 0xf72: 0x1f67, 0xf73: 0x1f6c, 0xf74: 0x1f76, 0xf75: 0x1f7b,
+ 0xf76: 0x1f85, 0xf77: 0x1f8a, 0xf78: 0x1f8f, 0xf79: 0x1f94, 0xf7a: 0x1f9e, 0xf7b: 0x1fa3,
+ 0xf7c: 0x20cf, 0xf7d: 0x20d4, 0xf7e: 0x20e3, 0xf7f: 0x20e8,
+ // Block 0x3e, offset 0xf80
+ 0xf80: 0x20ed, 0xf81: 0x2101, 0xf82: 0x2106, 0xf83: 0x210b, 0xf84: 0x2110, 0xf85: 0x2129,
+ 0xf86: 0x2133, 0xf87: 0x2138, 0xf88: 0x213d, 0xf89: 0x2151, 0xf8a: 0x216f, 0xf8b: 0x2174,
+ 0xf8c: 0x2179, 0xf8d: 0x217e, 0xf8e: 0x2188, 0xf8f: 0x218d, 0xf90: 0x4497, 0xf91: 0x21ba,
+ 0xf92: 0x21bf, 0xf93: 0x21c4, 0xf94: 0x21c9, 0xf95: 0x21d3, 0xf96: 0x21d8, 0xf97: 0x2744,
+ 0xf98: 0x274b, 0xf99: 0x2752, 0xf9a: 0x2767, 0xf9b: 0x2775, 0xf9c: 0x1f1c, 0xf9d: 0x1f21,
+ 0xf9e: 0x1f26, 0xf9f: 0x1f35, 0xfa0: 0x1f3f, 0xfa1: 0x1f4e, 0xfa2: 0x1f53, 0xfa3: 0x1f58,
+ 0xfa4: 0x1f67, 0xfa5: 0x1f71, 0xfa6: 0x1f8f, 0xfa7: 0x1fa8, 0xfa8: 0x1fad, 0xfa9: 0x1fbc,
+ 0xfaa: 0x1fc1, 0xfab: 0x1fd0, 0xfac: 0x1fda, 0xfad: 0x1fe9, 0xfae: 0x1fee, 0xfaf: 0x1ff3,
+ 0xfb0: 0x1ffd, 0xfb1: 0x2039, 0xfb2: 0x203e, 0xfb3: 0x2048, 0xfb4: 0x2057, 0xfb5: 0x205c,
+ 0xfb6: 0x2061, 0xfb7: 0x206b, 0xfb8: 0x207a, 0xfb9: 0x208e, 0xfba: 0x2093, 0xfbb: 0x2098,
+ 0xfbc: 0x20a7, 0xfbd: 0x20ac, 0xfbe: 0x20bb, 0xfbf: 0x20c0,
+ // Block 0x3f, offset 0xfc0
+ 0xfc0: 0x20c5, 0xfc1: 0x20ca, 0xfc2: 0x20d9, 0xfc3: 0x20de, 0xfc4: 0x20f2, 0xfc5: 0x20f7,
+ 0xfc6: 0x20fc, 0xfc7: 0x2101, 0xfc8: 0x2106, 0xfc9: 0x211a, 0xfca: 0x211f, 0xfcb: 0x2124,
+ 0xfcc: 0x2129, 0xfcd: 0x212e, 0xfce: 0x2142, 0xfcf: 0x2147, 0xfd0: 0x214c, 0xfd1: 0x2151,
+ 0xfd2: 0x2160, 0xfd3: 0x2165, 0xfd4: 0x216a, 0xfd5: 0x2179, 0xfd6: 0x2183, 0xfd7: 0x2192,
+ 0xfd8: 0x2197, 0xfd9: 0x448b, 0xfda: 0x21ab, 0xfdb: 0x21b0, 0xfdc: 0x21b5, 0xfdd: 0x21c4,
+ 0xfde: 0x21ce, 0xfdf: 0x2767, 0xfe0: 0x2775, 0xfe1: 0x1f35, 0xfe2: 0x1f3f, 0xfe3: 0x1f67,
+ 0xfe4: 0x1f71, 0xfe5: 0x1f8f, 0xfe6: 0x1f99, 0xfe7: 0x1ffd, 0xfe8: 0x2002, 0xfe9: 0x2025,
+ 0xfea: 0x202a, 0xfeb: 0x2101, 0xfec: 0x2106, 0xfed: 0x2129, 0xfee: 0x2179, 0xfef: 0x2183,
+ 0xff0: 0x21c4, 0xff1: 0x21ce, 0xff2: 0x453f, 0xff3: 0x4547, 0xff4: 0x454f, 0xff5: 0x2084,
+ 0xff6: 0x2089, 0xff7: 0x209d, 0xff8: 0x20a2, 0xff9: 0x20b1, 0xffa: 0x20b6, 0xffb: 0x2007,
+ 0xffc: 0x200c, 0xffd: 0x202f, 0xffe: 0x2034, 0xfff: 0x1fc6,
+ // Block 0x40, offset 0x1000
+ 0x1000: 0x1fcb, 0x1001: 0x1fb2, 0x1002: 0x1fb7, 0x1003: 0x1fdf, 0x1004: 0x1fe4, 0x1005: 0x204d,
+ 0x1006: 0x2052, 0x1007: 0x2070, 0x1008: 0x2075, 0x1009: 0x2011, 0x100a: 0x2016, 0x100b: 0x201b,
+ 0x100c: 0x2025, 0x100d: 0x2020, 0x100e: 0x1ff8, 0x100f: 0x2043, 0x1010: 0x2066, 0x1011: 0x2084,
+ 0x1012: 0x2089, 0x1013: 0x209d, 0x1014: 0x20a2, 0x1015: 0x20b1, 0x1016: 0x20b6, 0x1017: 0x2007,
+ 0x1018: 0x200c, 0x1019: 0x202f, 0x101a: 0x2034, 0x101b: 0x1fc6, 0x101c: 0x1fcb, 0x101d: 0x1fb2,
+ 0x101e: 0x1fb7, 0x101f: 0x1fdf, 0x1020: 0x1fe4, 0x1021: 0x204d, 0x1022: 0x2052, 0x1023: 0x2070,
+ 0x1024: 0x2075, 0x1025: 0x2011, 0x1026: 0x2016, 0x1027: 0x201b, 0x1028: 0x2025, 0x1029: 0x2020,
+ 0x102a: 0x1ff8, 0x102b: 0x2043, 0x102c: 0x2066, 0x102d: 0x2011, 0x102e: 0x2016, 0x102f: 0x201b,
+ 0x1030: 0x2025, 0x1031: 0x2002, 0x1032: 0x202a, 0x1033: 0x207f, 0x1034: 0x1fe9, 0x1035: 0x1fee,
+ 0x1036: 0x1ff3, 0x1037: 0x2011, 0x1038: 0x2016, 0x1039: 0x201b, 0x103a: 0x207f, 0x103b: 0x208e,
+ 0x103c: 0x4443, 0x103d: 0x4443,
+ // Block 0x41, offset 0x1040
+ 0x1050: 0x24a4, 0x1051: 0x24b9,
+ 0x1052: 0x24b9, 0x1053: 0x24c0, 0x1054: 0x24c7, 0x1055: 0x24dc, 0x1056: 0x24e3, 0x1057: 0x24ea,
+ 0x1058: 0x250d, 0x1059: 0x250d, 0x105a: 0x2530, 0x105b: 0x2529, 0x105c: 0x2545, 0x105d: 0x2537,
+ 0x105e: 0x253e, 0x105f: 0x2561, 0x1060: 0x2561, 0x1061: 0x255a, 0x1062: 0x2568, 0x1063: 0x2568,
+ 0x1064: 0x2592, 0x1065: 0x2592, 0x1066: 0x25ae, 0x1067: 0x2576, 0x1068: 0x2576, 0x1069: 0x256f,
+ 0x106a: 0x2584, 0x106b: 0x2584, 0x106c: 0x258b, 0x106d: 0x258b, 0x106e: 0x25b5, 0x106f: 0x25c3,
+ 0x1070: 0x25c3, 0x1071: 0x25ca, 0x1072: 0x25ca, 0x1073: 0x25d1, 0x1074: 0x25d8, 0x1075: 0x25df,
+ 0x1076: 0x25e6, 0x1077: 0x25e6, 0x1078: 0x25ed, 0x1079: 0x25fb, 0x107a: 0x2609, 0x107b: 0x2602,
+ 0x107c: 0x2610, 0x107d: 0x2610, 0x107e: 0x2625, 0x107f: 0x262c,
+ // Block 0x42, offset 0x1080
+ 0x1080: 0x265d, 0x1081: 0x266b, 0x1082: 0x2664, 0x1083: 0x2648, 0x1084: 0x2648, 0x1085: 0x2672,
+ 0x1086: 0x2672, 0x1087: 0x2679, 0x1088: 0x2679, 0x1089: 0x26a3, 0x108a: 0x26aa, 0x108b: 0x26b1,
+ 0x108c: 0x2687, 0x108d: 0x2695, 0x108e: 0x26b8, 0x108f: 0x26bf,
+ 0x1092: 0x268e, 0x1093: 0x2713, 0x1094: 0x271a, 0x1095: 0x26f0, 0x1096: 0x26f7, 0x1097: 0x26db,
+ 0x1098: 0x26db, 0x1099: 0x26e2, 0x109a: 0x270c, 0x109b: 0x2705, 0x109c: 0x272f, 0x109d: 0x272f,
+ 0x109e: 0x249d, 0x109f: 0x24b2, 0x10a0: 0x24ab, 0x10a1: 0x24d5, 0x10a2: 0x24ce, 0x10a3: 0x24f8,
+ 0x10a4: 0x24f1, 0x10a5: 0x251b, 0x10a6: 0x24ff, 0x10a7: 0x2514, 0x10a8: 0x254c, 0x10a9: 0x2599,
+ 0x10aa: 0x257d, 0x10ab: 0x25bc, 0x10ac: 0x2656, 0x10ad: 0x2680, 0x10ae: 0x2728, 0x10af: 0x2721,
+ 0x10b0: 0x2736, 0x10b1: 0x26cd, 0x10b2: 0x2633, 0x10b3: 0x26fe, 0x10b4: 0x2625, 0x10b5: 0x265d,
+ 0x10b6: 0x25f4, 0x10b7: 0x2641, 0x10b8: 0x26d4, 0x10b9: 0x26c6, 0x10ba: 0x264f, 0x10bb: 0x263a,
+ 0x10bc: 0x264f, 0x10bd: 0x26d4, 0x10be: 0x2506, 0x10bf: 0x2522,
+ // Block 0x43, offset 0x10c0
+ 0x10c0: 0x269c, 0x10c1: 0x2617, 0x10c2: 0x2496, 0x10c3: 0x263a, 0x10c4: 0x25df, 0x10c5: 0x25ae,
+ 0x10c6: 0x2553, 0x10c7: 0x26e9,
+ 0x10f0: 0x25a7, 0x10f1: 0x261e, 0x10f2: 0x29bb, 0x10f3: 0x29b2, 0x10f4: 0x29e8, 0x10f5: 0x29d6,
+ 0x10f6: 0x29c4, 0x10f7: 0x29df, 0x10f8: 0x29f1, 0x10f9: 0x25a0, 0x10fa: 0x2e95, 0x10fb: 0x2d05,
+ 0x10fc: 0x29cd,
+ // Block 0x44, offset 0x1100
+ 0x1110: 0x0019, 0x1111: 0x04fb,
+ 0x1112: 0x04ff, 0x1113: 0x0035, 0x1114: 0x0037, 0x1115: 0x0003, 0x1116: 0x003f, 0x1117: 0x0537,
+ 0x1118: 0x053b, 0x1119: 0x1cef,
+ 0x1120: 0x8132, 0x1121: 0x8132, 0x1122: 0x8132, 0x1123: 0x8132,
+ 0x1124: 0x8132, 0x1125: 0x8132, 0x1126: 0x8132, 0x1127: 0x812d, 0x1128: 0x812d, 0x1129: 0x812d,
+ 0x112a: 0x812d, 0x112b: 0x812d, 0x112c: 0x812d, 0x112d: 0x812d,
+ 0x1130: 0x1a06, 0x1131: 0x04bb, 0x1132: 0x04b7, 0x1133: 0x007f, 0x1134: 0x007f, 0x1135: 0x0011,
+ 0x1136: 0x0013, 0x1137: 0x00b7, 0x1138: 0x00bb, 0x1139: 0x052f, 0x113a: 0x0533, 0x113b: 0x0523,
+ 0x113c: 0x0527, 0x113d: 0x050b, 0x113e: 0x050f, 0x113f: 0x0503,
+ // Block 0x45, offset 0x1140
+ 0x1140: 0x0507, 0x1141: 0x0513, 0x1142: 0x0517, 0x1143: 0x051b, 0x1144: 0x051f,
+ 0x1147: 0x0077, 0x1148: 0x007b, 0x1149: 0x42a4, 0x114a: 0x42a4, 0x114b: 0x42a4,
+ 0x114c: 0x42a4, 0x114d: 0x007f, 0x114e: 0x007f, 0x114f: 0x007f, 0x1150: 0x0019, 0x1151: 0x04fb,
+ 0x1152: 0x001d, 0x1154: 0x0037, 0x1155: 0x0035, 0x1156: 0x003f, 0x1157: 0x0003,
+ 0x1158: 0x04bb, 0x1159: 0x0011, 0x115a: 0x0013, 0x115b: 0x00b7, 0x115c: 0x00bb, 0x115d: 0x052f,
+ 0x115e: 0x0533, 0x115f: 0x0007, 0x1160: 0x000d, 0x1161: 0x0015, 0x1162: 0x0017, 0x1163: 0x001b,
+ 0x1164: 0x0039, 0x1165: 0x003d, 0x1166: 0x003b, 0x1168: 0x0079, 0x1169: 0x0009,
+ 0x116a: 0x000b, 0x116b: 0x0041,
+ 0x1170: 0x42e5, 0x1171: 0x4467, 0x1172: 0x42ea, 0x1174: 0x42ef,
+ 0x1176: 0x42f4, 0x1177: 0x446d, 0x1178: 0x42f9, 0x1179: 0x4473, 0x117a: 0x42fe, 0x117b: 0x4479,
+ 0x117c: 0x4303, 0x117d: 0x447f, 0x117e: 0x4308, 0x117f: 0x4485,
+ // Block 0x46, offset 0x1180
+ 0x1180: 0x0236, 0x1181: 0x4449, 0x1182: 0x4449, 0x1183: 0x444f, 0x1184: 0x444f, 0x1185: 0x4491,
+ 0x1186: 0x4491, 0x1187: 0x4455, 0x1188: 0x4455, 0x1189: 0x449d, 0x118a: 0x449d, 0x118b: 0x449d,
+ 0x118c: 0x449d, 0x118d: 0x0239, 0x118e: 0x0239, 0x118f: 0x023c, 0x1190: 0x023c, 0x1191: 0x023c,
+ 0x1192: 0x023c, 0x1193: 0x023f, 0x1194: 0x023f, 0x1195: 0x0242, 0x1196: 0x0242, 0x1197: 0x0242,
+ 0x1198: 0x0242, 0x1199: 0x0245, 0x119a: 0x0245, 0x119b: 0x0245, 0x119c: 0x0245, 0x119d: 0x0248,
+ 0x119e: 0x0248, 0x119f: 0x0248, 0x11a0: 0x0248, 0x11a1: 0x024b, 0x11a2: 0x024b, 0x11a3: 0x024b,
+ 0x11a4: 0x024b, 0x11a5: 0x024e, 0x11a6: 0x024e, 0x11a7: 0x024e, 0x11a8: 0x024e, 0x11a9: 0x0251,
+ 0x11aa: 0x0251, 0x11ab: 0x0254, 0x11ac: 0x0254, 0x11ad: 0x0257, 0x11ae: 0x0257, 0x11af: 0x025a,
+ 0x11b0: 0x025a, 0x11b1: 0x025d, 0x11b2: 0x025d, 0x11b3: 0x025d, 0x11b4: 0x025d, 0x11b5: 0x0260,
+ 0x11b6: 0x0260, 0x11b7: 0x0260, 0x11b8: 0x0260, 0x11b9: 0x0263, 0x11ba: 0x0263, 0x11bb: 0x0263,
+ 0x11bc: 0x0263, 0x11bd: 0x0266, 0x11be: 0x0266, 0x11bf: 0x0266,
+ // Block 0x47, offset 0x11c0
+ 0x11c0: 0x0266, 0x11c1: 0x0269, 0x11c2: 0x0269, 0x11c3: 0x0269, 0x11c4: 0x0269, 0x11c5: 0x026c,
+ 0x11c6: 0x026c, 0x11c7: 0x026c, 0x11c8: 0x026c, 0x11c9: 0x026f, 0x11ca: 0x026f, 0x11cb: 0x026f,
+ 0x11cc: 0x026f, 0x11cd: 0x0272, 0x11ce: 0x0272, 0x11cf: 0x0272, 0x11d0: 0x0272, 0x11d1: 0x0275,
+ 0x11d2: 0x0275, 0x11d3: 0x0275, 0x11d4: 0x0275, 0x11d5: 0x0278, 0x11d6: 0x0278, 0x11d7: 0x0278,
+ 0x11d8: 0x0278, 0x11d9: 0x027b, 0x11da: 0x027b, 0x11db: 0x027b, 0x11dc: 0x027b, 0x11dd: 0x027e,
+ 0x11de: 0x027e, 0x11df: 0x027e, 0x11e0: 0x027e, 0x11e1: 0x0281, 0x11e2: 0x0281, 0x11e3: 0x0281,
+ 0x11e4: 0x0281, 0x11e5: 0x0284, 0x11e6: 0x0284, 0x11e7: 0x0284, 0x11e8: 0x0284, 0x11e9: 0x0287,
+ 0x11ea: 0x0287, 0x11eb: 0x0287, 0x11ec: 0x0287, 0x11ed: 0x028a, 0x11ee: 0x028a, 0x11ef: 0x028d,
+ 0x11f0: 0x028d, 0x11f1: 0x0290, 0x11f2: 0x0290, 0x11f3: 0x0290, 0x11f4: 0x0290, 0x11f5: 0x2ee2,
+ 0x11f6: 0x2ee2, 0x11f7: 0x2eea, 0x11f8: 0x2eea, 0x11f9: 0x2ef2, 0x11fa: 0x2ef2, 0x11fb: 0x2115,
+ 0x11fc: 0x2115,
+ // Block 0x48, offset 0x1200
+ 0x1200: 0x0081, 0x1201: 0x0083, 0x1202: 0x0085, 0x1203: 0x0087, 0x1204: 0x0089, 0x1205: 0x008b,
+ 0x1206: 0x008d, 0x1207: 0x008f, 0x1208: 0x0091, 0x1209: 0x0093, 0x120a: 0x0095, 0x120b: 0x0097,
+ 0x120c: 0x0099, 0x120d: 0x009b, 0x120e: 0x009d, 0x120f: 0x009f, 0x1210: 0x00a1, 0x1211: 0x00a3,
+ 0x1212: 0x00a5, 0x1213: 0x00a7, 0x1214: 0x00a9, 0x1215: 0x00ab, 0x1216: 0x00ad, 0x1217: 0x00af,
+ 0x1218: 0x00b1, 0x1219: 0x00b3, 0x121a: 0x00b5, 0x121b: 0x00b7, 0x121c: 0x00b9, 0x121d: 0x00bb,
+ 0x121e: 0x00bd, 0x121f: 0x04ef, 0x1220: 0x04f3, 0x1221: 0x04ff, 0x1222: 0x0513, 0x1223: 0x0517,
+ 0x1224: 0x04fb, 0x1225: 0x0623, 0x1226: 0x061b, 0x1227: 0x053f, 0x1228: 0x0547, 0x1229: 0x054f,
+ 0x122a: 0x0557, 0x122b: 0x055f, 0x122c: 0x05e3, 0x122d: 0x05eb, 0x122e: 0x05f3, 0x122f: 0x0597,
+ 0x1230: 0x0627, 0x1231: 0x0543, 0x1232: 0x054b, 0x1233: 0x0553, 0x1234: 0x055b, 0x1235: 0x0563,
+ 0x1236: 0x0567, 0x1237: 0x056b, 0x1238: 0x056f, 0x1239: 0x0573, 0x123a: 0x0577, 0x123b: 0x057b,
+ 0x123c: 0x057f, 0x123d: 0x0583, 0x123e: 0x0587, 0x123f: 0x058b,
+ // Block 0x49, offset 0x1240
+ 0x1240: 0x058f, 0x1241: 0x0593, 0x1242: 0x059b, 0x1243: 0x059f, 0x1244: 0x05a3, 0x1245: 0x05a7,
+ 0x1246: 0x05ab, 0x1247: 0x05af, 0x1248: 0x05b3, 0x1249: 0x05b7, 0x124a: 0x05bb, 0x124b: 0x05bf,
+ 0x124c: 0x05c3, 0x124d: 0x05c7, 0x124e: 0x05cb, 0x124f: 0x05cf, 0x1250: 0x05d3, 0x1251: 0x05d7,
+ 0x1252: 0x05db, 0x1253: 0x05df, 0x1254: 0x05e7, 0x1255: 0x05ef, 0x1256: 0x05f7, 0x1257: 0x05fb,
+ 0x1258: 0x05ff, 0x1259: 0x0603, 0x125a: 0x0607, 0x125b: 0x060b, 0x125c: 0x060f, 0x125d: 0x061f,
+ 0x125e: 0x49ff, 0x125f: 0x4a05, 0x1260: 0x03c3, 0x1261: 0x0313, 0x1262: 0x0317, 0x1263: 0x043b,
+ 0x1264: 0x031b, 0x1265: 0x043f, 0x1266: 0x0443, 0x1267: 0x031f, 0x1268: 0x0323, 0x1269: 0x0327,
+ 0x126a: 0x0447, 0x126b: 0x044b, 0x126c: 0x044f, 0x126d: 0x0453, 0x126e: 0x0457, 0x126f: 0x045b,
+ 0x1270: 0x0367, 0x1271: 0x032b, 0x1272: 0x032f, 0x1273: 0x0333, 0x1274: 0x037b, 0x1275: 0x0337,
+ 0x1276: 0x033b, 0x1277: 0x033f, 0x1278: 0x0343, 0x1279: 0x0347, 0x127a: 0x034b, 0x127b: 0x034f,
+ 0x127c: 0x0353, 0x127d: 0x0357, 0x127e: 0x035b,
+ // Block 0x4a, offset 0x1280
+ 0x1280: 0x0063, 0x1281: 0x0065, 0x1282: 0x0067, 0x1283: 0x0069, 0x1284: 0x006b, 0x1285: 0x006d,
+ 0x1286: 0x006f, 0x1287: 0x0071, 0x1288: 0x0073, 0x1289: 0x0075, 0x128a: 0x0083, 0x128b: 0x0085,
+ 0x128c: 0x0087, 0x128d: 0x0089, 0x128e: 0x008b, 0x128f: 0x008d, 0x1290: 0x008f, 0x1291: 0x0091,
+ 0x1292: 0x0093, 0x1293: 0x0095, 0x1294: 0x0097, 0x1295: 0x0099, 0x1296: 0x009b, 0x1297: 0x009d,
+ 0x1298: 0x009f, 0x1299: 0x00a1, 0x129a: 0x00a3, 0x129b: 0x00a5, 0x129c: 0x00a7, 0x129d: 0x00a9,
+ 0x129e: 0x00ab, 0x129f: 0x00ad, 0x12a0: 0x00af, 0x12a1: 0x00b1, 0x12a2: 0x00b3, 0x12a3: 0x00b5,
+ 0x12a4: 0x00dd, 0x12a5: 0x00f2, 0x12a8: 0x0173, 0x12a9: 0x0176,
+ 0x12aa: 0x0179, 0x12ab: 0x017c, 0x12ac: 0x017f, 0x12ad: 0x0182, 0x12ae: 0x0185, 0x12af: 0x0188,
+ 0x12b0: 0x018b, 0x12b1: 0x018e, 0x12b2: 0x0191, 0x12b3: 0x0194, 0x12b4: 0x0197, 0x12b5: 0x019a,
+ 0x12b6: 0x019d, 0x12b7: 0x01a0, 0x12b8: 0x01a3, 0x12b9: 0x0188, 0x12ba: 0x01a6, 0x12bb: 0x01a9,
+ 0x12bc: 0x01ac, 0x12bd: 0x01af, 0x12be: 0x01b2, 0x12bf: 0x01b5,
+ // Block 0x4b, offset 0x12c0
+ 0x12c0: 0x01fd, 0x12c1: 0x0200, 0x12c2: 0x0203, 0x12c3: 0x04d3, 0x12c4: 0x01c7, 0x12c5: 0x01d0,
+ 0x12c6: 0x01d6, 0x12c7: 0x01fa, 0x12c8: 0x01eb, 0x12c9: 0x01e8, 0x12ca: 0x0206, 0x12cb: 0x0209,
+ 0x12ce: 0x0021, 0x12cf: 0x0023, 0x12d0: 0x0025, 0x12d1: 0x0027,
+ 0x12d2: 0x0029, 0x12d3: 0x002b, 0x12d4: 0x002d, 0x12d5: 0x002f, 0x12d6: 0x0031, 0x12d7: 0x0033,
+ 0x12d8: 0x0021, 0x12d9: 0x0023, 0x12da: 0x0025, 0x12db: 0x0027, 0x12dc: 0x0029, 0x12dd: 0x002b,
+ 0x12de: 0x002d, 0x12df: 0x002f, 0x12e0: 0x0031, 0x12e1: 0x0033, 0x12e2: 0x0021, 0x12e3: 0x0023,
+ 0x12e4: 0x0025, 0x12e5: 0x0027, 0x12e6: 0x0029, 0x12e7: 0x002b, 0x12e8: 0x002d, 0x12e9: 0x002f,
+ 0x12ea: 0x0031, 0x12eb: 0x0033, 0x12ec: 0x0021, 0x12ed: 0x0023, 0x12ee: 0x0025, 0x12ef: 0x0027,
+ 0x12f0: 0x0029, 0x12f1: 0x002b, 0x12f2: 0x002d, 0x12f3: 0x002f, 0x12f4: 0x0031, 0x12f5: 0x0033,
+ 0x12f6: 0x0021, 0x12f7: 0x0023, 0x12f8: 0x0025, 0x12f9: 0x0027, 0x12fa: 0x0029, 0x12fb: 0x002b,
+ 0x12fc: 0x002d, 0x12fd: 0x002f, 0x12fe: 0x0031, 0x12ff: 0x0033,
+ // Block 0x4c, offset 0x1300
+ 0x1300: 0x0239, 0x1301: 0x023c, 0x1302: 0x0248, 0x1303: 0x0251, 0x1305: 0x028a,
+ 0x1306: 0x025a, 0x1307: 0x024b, 0x1308: 0x0269, 0x1309: 0x0290, 0x130a: 0x027b, 0x130b: 0x027e,
+ 0x130c: 0x0281, 0x130d: 0x0284, 0x130e: 0x025d, 0x130f: 0x026f, 0x1310: 0x0275, 0x1311: 0x0263,
+ 0x1312: 0x0278, 0x1313: 0x0257, 0x1314: 0x0260, 0x1315: 0x0242, 0x1316: 0x0245, 0x1317: 0x024e,
+ 0x1318: 0x0254, 0x1319: 0x0266, 0x131a: 0x026c, 0x131b: 0x0272, 0x131c: 0x0293, 0x131d: 0x02e4,
+ 0x131e: 0x02cc, 0x131f: 0x0296, 0x1321: 0x023c, 0x1322: 0x0248,
+ 0x1324: 0x0287, 0x1327: 0x024b, 0x1329: 0x0290,
+ 0x132a: 0x027b, 0x132b: 0x027e, 0x132c: 0x0281, 0x132d: 0x0284, 0x132e: 0x025d, 0x132f: 0x026f,
+ 0x1330: 0x0275, 0x1331: 0x0263, 0x1332: 0x0278, 0x1334: 0x0260, 0x1335: 0x0242,
+ 0x1336: 0x0245, 0x1337: 0x024e, 0x1339: 0x0266, 0x133b: 0x0272,
+ // Block 0x4d, offset 0x1340
+ 0x1342: 0x0248,
+ 0x1347: 0x024b, 0x1349: 0x0290, 0x134b: 0x027e,
+ 0x134d: 0x0284, 0x134e: 0x025d, 0x134f: 0x026f, 0x1351: 0x0263,
+ 0x1352: 0x0278, 0x1354: 0x0260, 0x1357: 0x024e,
+ 0x1359: 0x0266, 0x135b: 0x0272, 0x135d: 0x02e4,
+ 0x135f: 0x0296, 0x1361: 0x023c, 0x1362: 0x0248,
+ 0x1364: 0x0287, 0x1367: 0x024b, 0x1368: 0x0269, 0x1369: 0x0290,
+ 0x136a: 0x027b, 0x136c: 0x0281, 0x136d: 0x0284, 0x136e: 0x025d, 0x136f: 0x026f,
+ 0x1370: 0x0275, 0x1371: 0x0263, 0x1372: 0x0278, 0x1374: 0x0260, 0x1375: 0x0242,
+ 0x1376: 0x0245, 0x1377: 0x024e, 0x1379: 0x0266, 0x137a: 0x026c, 0x137b: 0x0272,
+ 0x137c: 0x0293, 0x137e: 0x02cc,
+ // Block 0x4e, offset 0x1380
+ 0x1380: 0x0239, 0x1381: 0x023c, 0x1382: 0x0248, 0x1383: 0x0251, 0x1384: 0x0287, 0x1385: 0x028a,
+ 0x1386: 0x025a, 0x1387: 0x024b, 0x1388: 0x0269, 0x1389: 0x0290, 0x138b: 0x027e,
+ 0x138c: 0x0281, 0x138d: 0x0284, 0x138e: 0x025d, 0x138f: 0x026f, 0x1390: 0x0275, 0x1391: 0x0263,
+ 0x1392: 0x0278, 0x1393: 0x0257, 0x1394: 0x0260, 0x1395: 0x0242, 0x1396: 0x0245, 0x1397: 0x024e,
+ 0x1398: 0x0254, 0x1399: 0x0266, 0x139a: 0x026c, 0x139b: 0x0272,
+ 0x13a1: 0x023c, 0x13a2: 0x0248, 0x13a3: 0x0251,
+ 0x13a5: 0x028a, 0x13a6: 0x025a, 0x13a7: 0x024b, 0x13a8: 0x0269, 0x13a9: 0x0290,
+ 0x13ab: 0x027e, 0x13ac: 0x0281, 0x13ad: 0x0284, 0x13ae: 0x025d, 0x13af: 0x026f,
+ 0x13b0: 0x0275, 0x13b1: 0x0263, 0x13b2: 0x0278, 0x13b3: 0x0257, 0x13b4: 0x0260, 0x13b5: 0x0242,
+ 0x13b6: 0x0245, 0x13b7: 0x024e, 0x13b8: 0x0254, 0x13b9: 0x0266, 0x13ba: 0x026c, 0x13bb: 0x0272,
+ // Block 0x4f, offset 0x13c0
+ 0x13c0: 0x1a0c, 0x13c1: 0x1a09, 0x13c2: 0x1a0f, 0x13c3: 0x1a33, 0x13c4: 0x1a57, 0x13c5: 0x1a7b,
+ 0x13c6: 0x1a9f, 0x13c7: 0x1aa8, 0x13c8: 0x1aae, 0x13c9: 0x1ab4, 0x13ca: 0x1aba,
+ 0x13d0: 0x1c1f, 0x13d1: 0x1c23,
+ 0x13d2: 0x1c27, 0x13d3: 0x1c2b, 0x13d4: 0x1c2f, 0x13d5: 0x1c33, 0x13d6: 0x1c37, 0x13d7: 0x1c3b,
+ 0x13d8: 0x1c3f, 0x13d9: 0x1c43, 0x13da: 0x1c47, 0x13db: 0x1c4b, 0x13dc: 0x1c4f, 0x13dd: 0x1c53,
+ 0x13de: 0x1c57, 0x13df: 0x1c5b, 0x13e0: 0x1c5f, 0x13e1: 0x1c63, 0x13e2: 0x1c67, 0x13e3: 0x1c6b,
+ 0x13e4: 0x1c6f, 0x13e5: 0x1c73, 0x13e6: 0x1c77, 0x13e7: 0x1c7b, 0x13e8: 0x1c7f, 0x13e9: 0x1c83,
+ 0x13ea: 0x291a, 0x13eb: 0x0047, 0x13ec: 0x0065, 0x13ed: 0x1acf, 0x13ee: 0x1b44,
+ 0x13f0: 0x0043, 0x13f1: 0x0045, 0x13f2: 0x0047, 0x13f3: 0x0049, 0x13f4: 0x004b, 0x13f5: 0x004d,
+ 0x13f6: 0x004f, 0x13f7: 0x0051, 0x13f8: 0x0053, 0x13f9: 0x0055, 0x13fa: 0x0057, 0x13fb: 0x0059,
+ 0x13fc: 0x005b, 0x13fd: 0x005d, 0x13fe: 0x005f, 0x13ff: 0x0061,
+ // Block 0x50, offset 0x1400
+ 0x1400: 0x28a9, 0x1401: 0x28be, 0x1402: 0x057b,
+ 0x1410: 0x0c87, 0x1411: 0x0abf,
+ 0x1412: 0x094b, 0x1413: 0x45ff, 0x1414: 0x0793, 0x1415: 0x0a67, 0x1416: 0x13a7, 0x1417: 0x0a77,
+ 0x1418: 0x079f, 0x1419: 0x0d4f, 0x141a: 0x0f27, 0x141b: 0x0d27, 0x141c: 0x089f, 0x141d: 0x0be3,
+ 0x141e: 0x0837, 0x141f: 0x0d2f, 0x1420: 0x088b, 0x1421: 0x118f, 0x1422: 0x0ffb, 0x1423: 0x1403,
+ 0x1424: 0x0a4b, 0x1425: 0x0983, 0x1426: 0x0edb, 0x1427: 0x0c93, 0x1428: 0x0cbf, 0x1429: 0x0737,
+ 0x142a: 0x0743, 0x142b: 0x1483, 0x142c: 0x0b53, 0x142d: 0x075f, 0x142e: 0x0967, 0x142f: 0x0cb3,
+ 0x1430: 0x142b, 0x1431: 0x0c8b, 0x1432: 0x10e7, 0x1433: 0x1123, 0x1434: 0x096f, 0x1435: 0x0ebb,
+ 0x1436: 0x0d83, 0x1437: 0x0d7f, 0x1438: 0x100f, 0x1439: 0x08a3, 0x143a: 0x09cf,
+ // Block 0x51, offset 0x1440
+ 0x1440: 0x0773, 0x1441: 0x076b, 0x1442: 0x077b, 0x1443: 0x16bb, 0x1444: 0x07bf, 0x1445: 0x07cf,
+ 0x1446: 0x07d3, 0x1447: 0x07db, 0x1448: 0x07e3, 0x1449: 0x07e7, 0x144a: 0x07f3, 0x144b: 0x07eb,
+ 0x144c: 0x062b, 0x144d: 0x16cf, 0x144e: 0x0807, 0x144f: 0x080b, 0x1450: 0x080f, 0x1451: 0x082b,
+ 0x1452: 0x16c0, 0x1453: 0x062f, 0x1454: 0x0817, 0x1455: 0x0837, 0x1456: 0x16ca, 0x1457: 0x0847,
+ 0x1458: 0x084f, 0x1459: 0x07af, 0x145a: 0x0857, 0x145b: 0x085b, 0x145c: 0x18a5, 0x145d: 0x0877,
+ 0x145e: 0x087f, 0x145f: 0x0637, 0x1460: 0x0897, 0x1461: 0x089b, 0x1462: 0x08a3, 0x1463: 0x08a7,
+ 0x1464: 0x063b, 0x1465: 0x08bf, 0x1466: 0x08c3, 0x1467: 0x08cf, 0x1468: 0x08db, 0x1469: 0x08df,
+ 0x146a: 0x08e3, 0x146b: 0x08eb, 0x146c: 0x090b, 0x146d: 0x090f, 0x146e: 0x0917, 0x146f: 0x0927,
+ 0x1470: 0x092f, 0x1471: 0x0933, 0x1472: 0x0933, 0x1473: 0x0933, 0x1474: 0x16de, 0x1475: 0x0f0b,
+ 0x1476: 0x0947, 0x1477: 0x094f, 0x1478: 0x16e3, 0x1479: 0x095b, 0x147a: 0x0963, 0x147b: 0x096b,
+ 0x147c: 0x0993, 0x147d: 0x097f, 0x147e: 0x098b, 0x147f: 0x098f,
+ // Block 0x52, offset 0x1480
+ 0x1480: 0x0997, 0x1481: 0x099f, 0x1482: 0x09a3, 0x1483: 0x09ab, 0x1484: 0x09b3, 0x1485: 0x09b7,
+ 0x1486: 0x09b7, 0x1487: 0x09bf, 0x1488: 0x09c7, 0x1489: 0x09cb, 0x148a: 0x09d7, 0x148b: 0x09fb,
+ 0x148c: 0x09df, 0x148d: 0x09ff, 0x148e: 0x09e3, 0x148f: 0x09eb, 0x1490: 0x0883, 0x1491: 0x0a47,
+ 0x1492: 0x0a0f, 0x1493: 0x0a13, 0x1494: 0x0a17, 0x1495: 0x0a0b, 0x1496: 0x0a1f, 0x1497: 0x0a1b,
+ 0x1498: 0x0a33, 0x1499: 0x16e8, 0x149a: 0x0a4f, 0x149b: 0x0a53, 0x149c: 0x0a5b, 0x149d: 0x0a67,
+ 0x149e: 0x0a6f, 0x149f: 0x0a8b, 0x14a0: 0x16ed, 0x14a1: 0x16f2, 0x14a2: 0x0a97, 0x14a3: 0x0a9b,
+ 0x14a4: 0x0a9f, 0x14a5: 0x0a93, 0x14a6: 0x0aa7, 0x14a7: 0x063f, 0x14a8: 0x0643, 0x14a9: 0x0aaf,
+ 0x14aa: 0x0ab7, 0x14ab: 0x0ab7, 0x14ac: 0x16f7, 0x14ad: 0x0ad3, 0x14ae: 0x0ad7, 0x14af: 0x0adb,
+ 0x14b0: 0x0ae3, 0x14b1: 0x16fc, 0x14b2: 0x0aeb, 0x14b3: 0x0aef, 0x14b4: 0x0bc7, 0x14b5: 0x0af7,
+ 0x14b6: 0x0647, 0x14b7: 0x0b03, 0x14b8: 0x0b13, 0x14b9: 0x0b1f, 0x14ba: 0x0b1b, 0x14bb: 0x1706,
+ 0x14bc: 0x0b27, 0x14bd: 0x170b, 0x14be: 0x0b33, 0x14bf: 0x0b2f,
+ // Block 0x53, offset 0x14c0
+ 0x14c0: 0x0b37, 0x14c1: 0x0b47, 0x14c2: 0x0b4b, 0x14c3: 0x064b, 0x14c4: 0x0b5b, 0x14c5: 0x0b63,
+ 0x14c6: 0x0b67, 0x14c7: 0x0b6b, 0x14c8: 0x064f, 0x14c9: 0x1710, 0x14ca: 0x0653, 0x14cb: 0x0b87,
+ 0x14cc: 0x0b8b, 0x14cd: 0x0b8f, 0x14ce: 0x0b97, 0x14cf: 0x18d7, 0x14d0: 0x0baf, 0x14d1: 0x171a,
+ 0x14d2: 0x171a, 0x14d3: 0x124f, 0x14d4: 0x0bbf, 0x14d5: 0x0bbf, 0x14d6: 0x0657, 0x14d7: 0x173d,
+ 0x14d8: 0x180f, 0x14d9: 0x0bcf, 0x14da: 0x0bd7, 0x14db: 0x065b, 0x14dc: 0x0beb, 0x14dd: 0x0bfb,
+ 0x14de: 0x0bff, 0x14df: 0x0c07, 0x14e0: 0x0c17, 0x14e1: 0x0663, 0x14e2: 0x065f, 0x14e3: 0x0c1b,
+ 0x14e4: 0x171f, 0x14e5: 0x0c1f, 0x14e6: 0x0c33, 0x14e7: 0x0c37, 0x14e8: 0x0c3b, 0x14e9: 0x0c37,
+ 0x14ea: 0x0c47, 0x14eb: 0x0c4b, 0x14ec: 0x0c5b, 0x14ed: 0x0c53, 0x14ee: 0x0c57, 0x14ef: 0x0c5f,
+ 0x14f0: 0x0c63, 0x14f1: 0x0c67, 0x14f2: 0x0c73, 0x14f3: 0x0c77, 0x14f4: 0x0c8f, 0x14f5: 0x0c97,
+ 0x14f6: 0x0ca7, 0x14f7: 0x0cbb, 0x14f8: 0x172e, 0x14f9: 0x0cb7, 0x14fa: 0x0cab, 0x14fb: 0x0cc3,
+ 0x14fc: 0x0ccb, 0x14fd: 0x0cdf, 0x14fe: 0x1733, 0x14ff: 0x0ce7,
+ // Block 0x54, offset 0x1500
+ 0x1500: 0x0cdb, 0x1501: 0x0cd3, 0x1502: 0x0667, 0x1503: 0x0cef, 0x1504: 0x0cf7, 0x1505: 0x0cff,
+ 0x1506: 0x0cf3, 0x1507: 0x066b, 0x1508: 0x0d0f, 0x1509: 0x0d17, 0x150a: 0x1738, 0x150b: 0x0d43,
+ 0x150c: 0x0d77, 0x150d: 0x0d53, 0x150e: 0x0677, 0x150f: 0x0d5f, 0x1510: 0x0673, 0x1511: 0x066f,
+ 0x1512: 0x083b, 0x1513: 0x083f, 0x1514: 0x0d7b, 0x1515: 0x0d63, 0x1516: 0x1223, 0x1517: 0x06db,
+ 0x1518: 0x0d87, 0x1519: 0x0d8b, 0x151a: 0x0d8f, 0x151b: 0x0da3, 0x151c: 0x0d9b, 0x151d: 0x1751,
+ 0x151e: 0x067b, 0x151f: 0x0db7, 0x1520: 0x0dab, 0x1521: 0x0dc7, 0x1522: 0x0dcf, 0x1523: 0x175b,
+ 0x1524: 0x0dd3, 0x1525: 0x0dbf, 0x1526: 0x0ddb, 0x1527: 0x067f, 0x1528: 0x0ddf, 0x1529: 0x0de3,
+ 0x152a: 0x0de7, 0x152b: 0x0df3, 0x152c: 0x1760, 0x152d: 0x0dfb, 0x152e: 0x0683, 0x152f: 0x0e07,
+ 0x1530: 0x1765, 0x1531: 0x0e0b, 0x1532: 0x0687, 0x1533: 0x0e17, 0x1534: 0x0e23, 0x1535: 0x0e2f,
+ 0x1536: 0x0e33, 0x1537: 0x176a, 0x1538: 0x1701, 0x1539: 0x176f, 0x153a: 0x0e53, 0x153b: 0x1774,
+ 0x153c: 0x0e5f, 0x153d: 0x0e67, 0x153e: 0x0e57, 0x153f: 0x0e73,
+ // Block 0x55, offset 0x1540
+ 0x1540: 0x0e83, 0x1541: 0x0e93, 0x1542: 0x0e87, 0x1543: 0x0e8b, 0x1544: 0x0e97, 0x1545: 0x0e9b,
+ 0x1546: 0x1779, 0x1547: 0x0e7f, 0x1548: 0x0eb3, 0x1549: 0x0eb7, 0x154a: 0x068b, 0x154b: 0x0ecb,
+ 0x154c: 0x0ec7, 0x154d: 0x177e, 0x154e: 0x0eab, 0x154f: 0x0ee7, 0x1550: 0x1783, 0x1551: 0x1788,
+ 0x1552: 0x0eeb, 0x1553: 0x0eff, 0x1554: 0x0efb, 0x1555: 0x0ef7, 0x1556: 0x068f, 0x1557: 0x0f03,
+ 0x1558: 0x0f13, 0x1559: 0x0f0f, 0x155a: 0x0f1b, 0x155b: 0x16c5, 0x155c: 0x0f2b, 0x155d: 0x178d,
+ 0x155e: 0x0f37, 0x155f: 0x1797, 0x1560: 0x0f4b, 0x1561: 0x0f57, 0x1562: 0x0f6b, 0x1563: 0x179c,
+ 0x1564: 0x0f7f, 0x1565: 0x0f83, 0x1566: 0x17a1, 0x1567: 0x17a6, 0x1568: 0x0f9f, 0x1569: 0x0faf,
+ 0x156a: 0x0693, 0x156b: 0x0fb3, 0x156c: 0x0697, 0x156d: 0x0697, 0x156e: 0x0fcb, 0x156f: 0x0fcf,
+ 0x1570: 0x0fd7, 0x1571: 0x0fdb, 0x1572: 0x0fe7, 0x1573: 0x069b, 0x1574: 0x0fff, 0x1575: 0x17ab,
+ 0x1576: 0x101b, 0x1577: 0x17b0, 0x1578: 0x1027, 0x1579: 0x1715, 0x157a: 0x1037, 0x157b: 0x17b5,
+ 0x157c: 0x17ba, 0x157d: 0x17bf, 0x157e: 0x069f, 0x157f: 0x06a3,
+ // Block 0x56, offset 0x1580
+ 0x1580: 0x106f, 0x1581: 0x17c9, 0x1582: 0x17c4, 0x1583: 0x17ce, 0x1584: 0x17d3, 0x1585: 0x1077,
+ 0x1586: 0x107b, 0x1587: 0x107b, 0x1588: 0x1083, 0x1589: 0x06ab, 0x158a: 0x1087, 0x158b: 0x06af,
+ 0x158c: 0x06b3, 0x158d: 0x17dd, 0x158e: 0x109b, 0x158f: 0x10a3, 0x1590: 0x10af, 0x1591: 0x06b7,
+ 0x1592: 0x17e2, 0x1593: 0x10d3, 0x1594: 0x17e7, 0x1595: 0x17ec, 0x1596: 0x10f3, 0x1597: 0x110b,
+ 0x1598: 0x06bb, 0x1599: 0x1113, 0x159a: 0x1117, 0x159b: 0x111b, 0x159c: 0x17f1, 0x159d: 0x17f6,
+ 0x159e: 0x17f6, 0x159f: 0x1133, 0x15a0: 0x06bf, 0x15a1: 0x17fb, 0x15a2: 0x1147, 0x15a3: 0x114b,
+ 0x15a4: 0x06c3, 0x15a5: 0x1800, 0x15a6: 0x1167, 0x15a7: 0x06c7, 0x15a8: 0x1177, 0x15a9: 0x116f,
+ 0x15aa: 0x117f, 0x15ab: 0x180a, 0x15ac: 0x1197, 0x15ad: 0x06cb, 0x15ae: 0x11a3, 0x15af: 0x11ab,
+ 0x15b0: 0x11bb, 0x15b1: 0x06cf, 0x15b2: 0x1814, 0x15b3: 0x1819, 0x15b4: 0x06d3, 0x15b5: 0x181e,
+ 0x15b6: 0x11d3, 0x15b7: 0x1823, 0x15b8: 0x11df, 0x15b9: 0x11eb, 0x15ba: 0x11f3, 0x15bb: 0x1828,
+ 0x15bc: 0x182d, 0x15bd: 0x1207, 0x15be: 0x1832, 0x15bf: 0x120f,
+ // Block 0x57, offset 0x15c0
+ 0x15c0: 0x1742, 0x15c1: 0x06d7, 0x15c2: 0x1227, 0x15c3: 0x122b, 0x15c4: 0x06df, 0x15c5: 0x122f,
+ 0x15c6: 0x0aab, 0x15c7: 0x1837, 0x15c8: 0x183c, 0x15c9: 0x1747, 0x15ca: 0x174c, 0x15cb: 0x124f,
+ 0x15cc: 0x1253, 0x15cd: 0x146b, 0x15ce: 0x06e3, 0x15cf: 0x127f, 0x15d0: 0x127b, 0x15d1: 0x1283,
+ 0x15d2: 0x08b7, 0x15d3: 0x1287, 0x15d4: 0x128b, 0x15d5: 0x128f, 0x15d6: 0x1297, 0x15d7: 0x1841,
+ 0x15d8: 0x1293, 0x15d9: 0x129b, 0x15da: 0x12af, 0x15db: 0x12b3, 0x15dc: 0x129f, 0x15dd: 0x12b7,
+ 0x15de: 0x12cb, 0x15df: 0x12df, 0x15e0: 0x12ab, 0x15e1: 0x12bf, 0x15e2: 0x12c3, 0x15e3: 0x12c7,
+ 0x15e4: 0x1846, 0x15e5: 0x1850, 0x15e6: 0x184b, 0x15e7: 0x06e7, 0x15e8: 0x12e7, 0x15e9: 0x12eb,
+ 0x15ea: 0x12f3, 0x15eb: 0x1864, 0x15ec: 0x12f7, 0x15ed: 0x1855, 0x15ee: 0x06eb, 0x15ef: 0x06ef,
+ 0x15f0: 0x185a, 0x15f1: 0x185f, 0x15f2: 0x06f3, 0x15f3: 0x1317, 0x15f4: 0x131b, 0x15f5: 0x131f,
+ 0x15f6: 0x1323, 0x15f7: 0x132f, 0x15f8: 0x132b, 0x15f9: 0x1337, 0x15fa: 0x1333, 0x15fb: 0x1343,
+ 0x15fc: 0x133b, 0x15fd: 0x133f, 0x15fe: 0x1347, 0x15ff: 0x06f7,
+ // Block 0x58, offset 0x1600
+ 0x1600: 0x134f, 0x1601: 0x1353, 0x1602: 0x06fb, 0x1603: 0x1363, 0x1604: 0x1367, 0x1605: 0x1869,
+ 0x1606: 0x1373, 0x1607: 0x1377, 0x1608: 0x06ff, 0x1609: 0x1383, 0x160a: 0x0633, 0x160b: 0x186e,
+ 0x160c: 0x1873, 0x160d: 0x0703, 0x160e: 0x0707, 0x160f: 0x13af, 0x1610: 0x13c7, 0x1611: 0x13e3,
+ 0x1612: 0x13f3, 0x1613: 0x1878, 0x1614: 0x1407, 0x1615: 0x140b, 0x1616: 0x1423, 0x1617: 0x142f,
+ 0x1618: 0x1882, 0x1619: 0x16d4, 0x161a: 0x143b, 0x161b: 0x1437, 0x161c: 0x1443, 0x161d: 0x16d9,
+ 0x161e: 0x144f, 0x161f: 0x145b, 0x1620: 0x1887, 0x1621: 0x188c, 0x1622: 0x149b, 0x1623: 0x14a7,
+ 0x1624: 0x14af, 0x1625: 0x1891, 0x1626: 0x14b3, 0x1627: 0x14db, 0x1628: 0x14e7, 0x1629: 0x14eb,
+ 0x162a: 0x14e3, 0x162b: 0x14f7, 0x162c: 0x14fb, 0x162d: 0x1896, 0x162e: 0x1507, 0x162f: 0x070b,
+ 0x1630: 0x150f, 0x1631: 0x189b, 0x1632: 0x070f, 0x1633: 0x1547, 0x1634: 0x0b3b, 0x1635: 0x155f,
+ 0x1636: 0x18a0, 0x1637: 0x18aa, 0x1638: 0x0713, 0x1639: 0x0717, 0x163a: 0x1587, 0x163b: 0x18af,
+ 0x163c: 0x071b, 0x163d: 0x18b4, 0x163e: 0x159f, 0x163f: 0x159f,
+ // Block 0x59, offset 0x1640
+ 0x1640: 0x15a7, 0x1641: 0x18b9, 0x1642: 0x15bf, 0x1643: 0x071f, 0x1644: 0x15cf, 0x1645: 0x15db,
+ 0x1646: 0x15e3, 0x1647: 0x15eb, 0x1648: 0x0723, 0x1649: 0x18be, 0x164a: 0x15ff, 0x164b: 0x161b,
+ 0x164c: 0x1627, 0x164d: 0x0727, 0x164e: 0x072b, 0x164f: 0x162b, 0x1650: 0x18c3, 0x1651: 0x072f,
+ 0x1652: 0x18c8, 0x1653: 0x18cd, 0x1654: 0x18d2, 0x1655: 0x164f, 0x1656: 0x0733, 0x1657: 0x1663,
+ 0x1658: 0x166b, 0x1659: 0x166f, 0x165a: 0x1677, 0x165b: 0x167f, 0x165c: 0x1687, 0x165d: 0x18dc,
+}
+
+// nfkcIndex: 22 blocks, 1408 entries, 1408 bytes
+// Block 0 is the zero block.
+var nfkcIndex = [1408]uint8{
+ // Block 0x0, offset 0x0
+ // Block 0x1, offset 0x40
+ // Block 0x2, offset 0x80
+ // Block 0x3, offset 0xc0
+ 0xc2: 0x58, 0xc3: 0x01, 0xc4: 0x02, 0xc5: 0x03, 0xc6: 0x59, 0xc7: 0x04,
+ 0xc8: 0x05, 0xca: 0x5a, 0xcb: 0x5b, 0xcc: 0x06, 0xcd: 0x07, 0xce: 0x08, 0xcf: 0x09,
+ 0xd0: 0x0a, 0xd1: 0x5c, 0xd2: 0x5d, 0xd3: 0x0b, 0xd6: 0x0c, 0xd7: 0x5e,
+ 0xd8: 0x5f, 0xd9: 0x0d, 0xdb: 0x60, 0xdc: 0x61, 0xdd: 0x62, 0xdf: 0x63,
+ 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05,
+ 0xea: 0x06, 0xeb: 0x07, 0xec: 0x08, 0xed: 0x09, 0xef: 0x0a,
+ 0xf0: 0x13,
+ // Block 0x4, offset 0x100
+ 0x120: 0x64, 0x121: 0x65, 0x123: 0x66, 0x124: 0x67, 0x125: 0x68, 0x126: 0x69, 0x127: 0x6a,
+ 0x128: 0x6b, 0x129: 0x6c, 0x12a: 0x6d, 0x12b: 0x6e, 0x12c: 0x69, 0x12d: 0x6f, 0x12e: 0x70, 0x12f: 0x71,
+ 0x131: 0x72, 0x132: 0x73, 0x133: 0x74, 0x134: 0x75, 0x135: 0x76, 0x137: 0x77,
+ 0x138: 0x78, 0x139: 0x79, 0x13a: 0x7a, 0x13b: 0x7b, 0x13c: 0x7c, 0x13d: 0x7d, 0x13e: 0x7e, 0x13f: 0x7f,
+ // Block 0x5, offset 0x140
+ 0x140: 0x80, 0x142: 0x81, 0x143: 0x82, 0x144: 0x83, 0x145: 0x84, 0x146: 0x85, 0x147: 0x86,
+ 0x14d: 0x87,
+ 0x15c: 0x88, 0x15f: 0x89,
+ 0x162: 0x8a, 0x164: 0x8b,
+ 0x168: 0x8c, 0x169: 0x8d, 0x16a: 0x8e, 0x16c: 0x0e, 0x16d: 0x8f, 0x16e: 0x90, 0x16f: 0x91,
+ 0x170: 0x92, 0x173: 0x93, 0x174: 0x94, 0x175: 0x0f, 0x176: 0x10, 0x177: 0x95,
+ 0x178: 0x11, 0x179: 0x12, 0x17a: 0x13, 0x17b: 0x14, 0x17c: 0x15, 0x17d: 0x16, 0x17e: 0x17, 0x17f: 0x18,
+ // Block 0x6, offset 0x180
+ 0x180: 0x96, 0x181: 0x97, 0x182: 0x98, 0x183: 0x99, 0x184: 0x19, 0x185: 0x1a, 0x186: 0x9a, 0x187: 0x9b,
+ 0x188: 0x9c, 0x189: 0x1b, 0x18a: 0x1c, 0x18b: 0x9d, 0x18c: 0x9e,
+ 0x191: 0x1d, 0x192: 0x1e, 0x193: 0x9f,
+ 0x1a8: 0xa0, 0x1a9: 0xa1, 0x1ab: 0xa2,
+ 0x1b1: 0xa3, 0x1b3: 0xa4, 0x1b5: 0xa5, 0x1b7: 0xa6,
+ 0x1ba: 0xa7, 0x1bb: 0xa8, 0x1bc: 0x1f, 0x1bd: 0x20, 0x1be: 0x21, 0x1bf: 0xa9,
+ // Block 0x7, offset 0x1c0
+ 0x1c0: 0xaa, 0x1c1: 0x22, 0x1c2: 0x23, 0x1c3: 0x24, 0x1c4: 0xab, 0x1c5: 0xac, 0x1c6: 0x25,
+ 0x1c8: 0x26, 0x1c9: 0x27, 0x1ca: 0x28, 0x1cb: 0x29, 0x1cc: 0x2a, 0x1cd: 0x2b, 0x1ce: 0x2c, 0x1cf: 0x2d,
+ // Block 0x8, offset 0x200
+ 0x219: 0xad, 0x21a: 0xae, 0x21b: 0xaf, 0x21d: 0xb0, 0x21f: 0xb1,
+ 0x220: 0xb2, 0x223: 0xb3, 0x224: 0xb4, 0x225: 0xb5, 0x226: 0xb6, 0x227: 0xb7,
+ 0x22a: 0xb8, 0x22b: 0xb9, 0x22d: 0xba, 0x22f: 0xbb,
+ 0x230: 0xbc, 0x231: 0xbd, 0x232: 0xbe, 0x233: 0xbf, 0x234: 0xc0, 0x235: 0xc1, 0x236: 0xc2, 0x237: 0xbc,
+ 0x238: 0xbd, 0x239: 0xbe, 0x23a: 0xbf, 0x23b: 0xc0, 0x23c: 0xc1, 0x23d: 0xc2, 0x23e: 0xbc, 0x23f: 0xbd,
+ // Block 0x9, offset 0x240
+ 0x240: 0xbe, 0x241: 0xbf, 0x242: 0xc0, 0x243: 0xc1, 0x244: 0xc2, 0x245: 0xbc, 0x246: 0xbd, 0x247: 0xbe,
+ 0x248: 0xbf, 0x249: 0xc0, 0x24a: 0xc1, 0x24b: 0xc2, 0x24c: 0xbc, 0x24d: 0xbd, 0x24e: 0xbe, 0x24f: 0xbf,
+ 0x250: 0xc0, 0x251: 0xc1, 0x252: 0xc2, 0x253: 0xbc, 0x254: 0xbd, 0x255: 0xbe, 0x256: 0xbf, 0x257: 0xc0,
+ 0x258: 0xc1, 0x259: 0xc2, 0x25a: 0xbc, 0x25b: 0xbd, 0x25c: 0xbe, 0x25d: 0xbf, 0x25e: 0xc0, 0x25f: 0xc1,
+ 0x260: 0xc2, 0x261: 0xbc, 0x262: 0xbd, 0x263: 0xbe, 0x264: 0xbf, 0x265: 0xc0, 0x266: 0xc1, 0x267: 0xc2,
+ 0x268: 0xbc, 0x269: 0xbd, 0x26a: 0xbe, 0x26b: 0xbf, 0x26c: 0xc0, 0x26d: 0xc1, 0x26e: 0xc2, 0x26f: 0xbc,
+ 0x270: 0xbd, 0x271: 0xbe, 0x272: 0xbf, 0x273: 0xc0, 0x274: 0xc1, 0x275: 0xc2, 0x276: 0xbc, 0x277: 0xbd,
+ 0x278: 0xbe, 0x279: 0xbf, 0x27a: 0xc0, 0x27b: 0xc1, 0x27c: 0xc2, 0x27d: 0xbc, 0x27e: 0xbd, 0x27f: 0xbe,
+ // Block 0xa, offset 0x280
+ 0x280: 0xbf, 0x281: 0xc0, 0x282: 0xc1, 0x283: 0xc2, 0x284: 0xbc, 0x285: 0xbd, 0x286: 0xbe, 0x287: 0xbf,
+ 0x288: 0xc0, 0x289: 0xc1, 0x28a: 0xc2, 0x28b: 0xbc, 0x28c: 0xbd, 0x28d: 0xbe, 0x28e: 0xbf, 0x28f: 0xc0,
+ 0x290: 0xc1, 0x291: 0xc2, 0x292: 0xbc, 0x293: 0xbd, 0x294: 0xbe, 0x295: 0xbf, 0x296: 0xc0, 0x297: 0xc1,
+ 0x298: 0xc2, 0x299: 0xbc, 0x29a: 0xbd, 0x29b: 0xbe, 0x29c: 0xbf, 0x29d: 0xc0, 0x29e: 0xc1, 0x29f: 0xc2,
+ 0x2a0: 0xbc, 0x2a1: 0xbd, 0x2a2: 0xbe, 0x2a3: 0xbf, 0x2a4: 0xc0, 0x2a5: 0xc1, 0x2a6: 0xc2, 0x2a7: 0xbc,
+ 0x2a8: 0xbd, 0x2a9: 0xbe, 0x2aa: 0xbf, 0x2ab: 0xc0, 0x2ac: 0xc1, 0x2ad: 0xc2, 0x2ae: 0xbc, 0x2af: 0xbd,
+ 0x2b0: 0xbe, 0x2b1: 0xbf, 0x2b2: 0xc0, 0x2b3: 0xc1, 0x2b4: 0xc2, 0x2b5: 0xbc, 0x2b6: 0xbd, 0x2b7: 0xbe,
+ 0x2b8: 0xbf, 0x2b9: 0xc0, 0x2ba: 0xc1, 0x2bb: 0xc2, 0x2bc: 0xbc, 0x2bd: 0xbd, 0x2be: 0xbe, 0x2bf: 0xbf,
+ // Block 0xb, offset 0x2c0
+ 0x2c0: 0xc0, 0x2c1: 0xc1, 0x2c2: 0xc2, 0x2c3: 0xbc, 0x2c4: 0xbd, 0x2c5: 0xbe, 0x2c6: 0xbf, 0x2c7: 0xc0,
+ 0x2c8: 0xc1, 0x2c9: 0xc2, 0x2ca: 0xbc, 0x2cb: 0xbd, 0x2cc: 0xbe, 0x2cd: 0xbf, 0x2ce: 0xc0, 0x2cf: 0xc1,
+ 0x2d0: 0xc2, 0x2d1: 0xbc, 0x2d2: 0xbd, 0x2d3: 0xbe, 0x2d4: 0xbf, 0x2d5: 0xc0, 0x2d6: 0xc1, 0x2d7: 0xc2,
+ 0x2d8: 0xbc, 0x2d9: 0xbd, 0x2da: 0xbe, 0x2db: 0xbf, 0x2dc: 0xc0, 0x2dd: 0xc1, 0x2de: 0xc3,
+ // Block 0xc, offset 0x300
+ 0x324: 0x2e, 0x325: 0x2f, 0x326: 0x30, 0x327: 0x31,
+ 0x328: 0x32, 0x329: 0x33, 0x32a: 0x34, 0x32b: 0x35, 0x32c: 0x36, 0x32d: 0x37, 0x32e: 0x38, 0x32f: 0x39,
+ 0x330: 0x3a, 0x331: 0x3b, 0x332: 0x3c, 0x333: 0x3d, 0x334: 0x3e, 0x335: 0x3f, 0x336: 0x40, 0x337: 0x41,
+ 0x338: 0x42, 0x339: 0x43, 0x33a: 0x44, 0x33b: 0x45, 0x33c: 0xc4, 0x33d: 0x46, 0x33e: 0x47, 0x33f: 0xc5,
+ // Block 0xd, offset 0x340
+ 0x347: 0xc6,
+ 0x34b: 0xc7, 0x34d: 0xc8,
+ 0x368: 0xc9, 0x36b: 0xca,
+ // Block 0xe, offset 0x380
+ 0x381: 0xcb, 0x382: 0xcc, 0x384: 0xcd, 0x385: 0xb6, 0x387: 0xb7,
+ 0x388: 0xce, 0x38b: 0xcf, 0x38c: 0x69, 0x38d: 0xd0,
+ 0x392: 0xd1, 0x393: 0xd2, 0x396: 0xd3, 0x397: 0xd4,
+ 0x398: 0xd5, 0x39a: 0xd6,
+ // Block 0xf, offset 0x3c0
+ 0x3eb: 0xd7, 0x3ec: 0xd8,
+ // Block 0x10, offset 0x400
+ 0x432: 0xd9,
+ // Block 0x11, offset 0x440
+ 0x445: 0xda, 0x446: 0xdb, 0x447: 0xdc,
+ 0x449: 0xdd,
+ 0x450: 0xde, 0x451: 0xdf, 0x452: 0xe0, 0x453: 0xe1, 0x454: 0xe2, 0x455: 0xe3, 0x456: 0xe4, 0x457: 0xe5,
+ 0x458: 0xe6, 0x459: 0xe7, 0x45a: 0x48, 0x45b: 0xe8, 0x45c: 0xe9, 0x45d: 0xea, 0x45e: 0xeb, 0x45f: 0x49,
+ // Block 0x12, offset 0x480
+ 0x4a3: 0xec,
+ 0x4b8: 0x4a, 0x4b9: 0x4b, 0x4ba: 0x4c,
+ // Block 0x13, offset 0x4c0
+ 0x4c4: 0x4d, 0x4c5: 0xed, 0x4c6: 0xee,
+ 0x4c8: 0x4e, 0x4c9: 0xef,
+ // Block 0x14, offset 0x500
+ 0x520: 0x4f, 0x521: 0x50, 0x522: 0x51, 0x523: 0x52, 0x524: 0x53, 0x525: 0x54, 0x526: 0x55, 0x527: 0x56,
+ 0x528: 0x57,
+ // Block 0x15, offset 0x540
+ 0x550: 0x0b, 0x551: 0x0c, 0x556: 0x0d,
+ 0x55b: 0x0e, 0x55d: 0x0f, 0x55e: 0x10, 0x55f: 0x11,
+ 0x56f: 0x12,
+}
+
+// nfkcSparseOffset: 152 entries, 304 bytes
+var nfkcSparseOffset = []uint16{0x0, 0xe, 0x12, 0x1b, 0x25, 0x35, 0x37, 0x3c, 0x47, 0x56, 0x63, 0x6b, 0x6f, 0x74, 0x76, 0x85, 0x8d, 0x94, 0x97, 0x9f, 0xa3, 0xa7, 0xa9, 0xab, 0xb4, 0xb8, 0xbf, 0xc4, 0xc7, 0xd1, 0xd3, 0xda, 0xe2, 0xe6, 0xe8, 0xeb, 0xef, 0xf5, 0x106, 0x112, 0x114, 0x11a, 0x11c, 0x11e, 0x120, 0x122, 0x124, 0x126, 0x128, 0x12b, 0x12e, 0x130, 0x133, 0x136, 0x13a, 0x13f, 0x148, 0x14a, 0x14d, 0x14f, 0x15a, 0x165, 0x174, 0x182, 0x190, 0x1a0, 0x1ae, 0x1b5, 0x1bb, 0x1ca, 0x1ce, 0x1d0, 0x1d4, 0x1d6, 0x1d9, 0x1db, 0x1de, 0x1e0, 0x1e3, 0x1e5, 0x1e7, 0x1e9, 0x1f5, 0x1ff, 0x206, 0x213, 0x216, 0x219, 0x21b, 0x21d, 0x21f, 0x221, 0x224, 0x226, 0x228, 0x22a, 0x22c, 0x232, 0x235, 0x239, 0x23b, 0x242, 0x248, 0x24e, 0x256, 0x25c, 0x262, 0x268, 0x26c, 0x26e, 0x27d, 0x27f, 0x281, 0x283, 0x289, 0x28c, 0x28f, 0x297, 0x29e, 0x2a1, 0x2a3, 0x2ab, 0x2b2, 0x2b5, 0x2bb, 0x2bd, 0x2bf, 0x2c2, 0x2c4, 0x2c6, 0x2c8, 0x2d5, 0x2df, 0x2e1, 0x2e3, 0x2e7, 0x2ec, 0x2f8, 0x2fd, 0x306, 0x30c, 0x311, 0x315, 0x31a, 0x31e, 0x32e, 0x33c, 0x34a, 0x358, 0x35a, 0x364, 0x366}
+
+// nfkcSparseValues: 880 entries, 3520 bytes
+var nfkcSparseValues = [880]valueRange{
+ // Block 0x0, offset 0x0
+ {value: 0x0002, lo: 0x0d},
+ {value: 0x0001, lo: 0xa0, hi: 0xa0},
+ {value: 0x42b3, lo: 0xa8, hi: 0xa8},
+ {value: 0x0083, lo: 0xaa, hi: 0xaa},
+ {value: 0x429f, lo: 0xaf, hi: 0xaf},
+ {value: 0x0025, lo: 0xb2, hi: 0xb3},
+ {value: 0x4295, lo: 0xb4, hi: 0xb4},
+ {value: 0x01dc, lo: 0xb5, hi: 0xb5},
+ {value: 0x42cc, lo: 0xb8, hi: 0xb8},
+ {value: 0x0023, lo: 0xb9, hi: 0xb9},
+ {value: 0x009f, lo: 0xba, hi: 0xba},
+ {value: 0x23af, lo: 0xbc, hi: 0xbc},
+ {value: 0x23a3, lo: 0xbd, hi: 0xbd},
+ {value: 0x2445, lo: 0xbe, hi: 0xbe},
+ // Block 0x1, offset 0xe
+ {value: 0x0091, lo: 0x03},
+ {value: 0x471d, lo: 0xa0, hi: 0xa1},
+ {value: 0x474f, lo: 0xaf, hi: 0xb0},
+ {value: 0xa000, lo: 0xb7, hi: 0xb7},
+ // Block 0x2, offset 0x12
+ {value: 0x0003, lo: 0x08},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0x0091, lo: 0xb0, hi: 0xb0},
+ {value: 0x0119, lo: 0xb1, hi: 0xb1},
+ {value: 0x0095, lo: 0xb2, hi: 0xb2},
+ {value: 0x00a5, lo: 0xb3, hi: 0xb3},
+ {value: 0x0143, lo: 0xb4, hi: 0xb6},
+ {value: 0x00af, lo: 0xb7, hi: 0xb7},
+ {value: 0x00b3, lo: 0xb8, hi: 0xb8},
+ // Block 0x3, offset 0x1b
+ {value: 0x000a, lo: 0x09},
+ {value: 0x42a9, lo: 0x98, hi: 0x98},
+ {value: 0x42ae, lo: 0x99, hi: 0x9a},
+ {value: 0x42d1, lo: 0x9b, hi: 0x9b},
+ {value: 0x429a, lo: 0x9c, hi: 0x9c},
+ {value: 0x42bd, lo: 0x9d, hi: 0x9d},
+ {value: 0x0113, lo: 0xa0, hi: 0xa0},
+ {value: 0x0099, lo: 0xa1, hi: 0xa1},
+ {value: 0x00a7, lo: 0xa2, hi: 0xa3},
+ {value: 0x0167, lo: 0xa4, hi: 0xa4},
+ // Block 0x4, offset 0x25
+ {value: 0x0000, lo: 0x0f},
+ {value: 0xa000, lo: 0x83, hi: 0x83},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0xa000, lo: 0x8b, hi: 0x8b},
+ {value: 0xa000, lo: 0x8d, hi: 0x8d},
+ {value: 0x37e0, lo: 0x90, hi: 0x90},
+ {value: 0x37ec, lo: 0x91, hi: 0x91},
+ {value: 0x37da, lo: 0x93, hi: 0x93},
+ {value: 0xa000, lo: 0x96, hi: 0x96},
+ {value: 0x3852, lo: 0x97, hi: 0x97},
+ {value: 0x381c, lo: 0x9c, hi: 0x9c},
+ {value: 0x3804, lo: 0x9d, hi: 0x9d},
+ {value: 0x382e, lo: 0x9e, hi: 0x9e},
+ {value: 0xa000, lo: 0xb4, hi: 0xb5},
+ {value: 0x3858, lo: 0xb6, hi: 0xb6},
+ {value: 0x385e, lo: 0xb7, hi: 0xb7},
+ // Block 0x5, offset 0x35
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0x83, hi: 0x87},
+ // Block 0x6, offset 0x37
+ {value: 0x0001, lo: 0x04},
+ {value: 0x8113, lo: 0x81, hi: 0x82},
+ {value: 0x8132, lo: 0x84, hi: 0x84},
+ {value: 0x812d, lo: 0x85, hi: 0x85},
+ {value: 0x810d, lo: 0x87, hi: 0x87},
+ // Block 0x7, offset 0x3c
+ {value: 0x0000, lo: 0x0a},
+ {value: 0x8132, lo: 0x90, hi: 0x97},
+ {value: 0x8119, lo: 0x98, hi: 0x98},
+ {value: 0x811a, lo: 0x99, hi: 0x99},
+ {value: 0x811b, lo: 0x9a, hi: 0x9a},
+ {value: 0x387c, lo: 0xa2, hi: 0xa2},
+ {value: 0x3882, lo: 0xa3, hi: 0xa3},
+ {value: 0x388e, lo: 0xa4, hi: 0xa4},
+ {value: 0x3888, lo: 0xa5, hi: 0xa5},
+ {value: 0x3894, lo: 0xa6, hi: 0xa6},
+ {value: 0xa000, lo: 0xa7, hi: 0xa7},
+ // Block 0x8, offset 0x47
+ {value: 0x0000, lo: 0x0e},
+ {value: 0x38a6, lo: 0x80, hi: 0x80},
+ {value: 0xa000, lo: 0x81, hi: 0x81},
+ {value: 0x389a, lo: 0x82, hi: 0x82},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0x38a0, lo: 0x93, hi: 0x93},
+ {value: 0xa000, lo: 0x95, hi: 0x95},
+ {value: 0x8132, lo: 0x96, hi: 0x9c},
+ {value: 0x8132, lo: 0x9f, hi: 0xa2},
+ {value: 0x812d, lo: 0xa3, hi: 0xa3},
+ {value: 0x8132, lo: 0xa4, hi: 0xa4},
+ {value: 0x8132, lo: 0xa7, hi: 0xa8},
+ {value: 0x812d, lo: 0xaa, hi: 0xaa},
+ {value: 0x8132, lo: 0xab, hi: 0xac},
+ {value: 0x812d, lo: 0xad, hi: 0xad},
+ // Block 0x9, offset 0x56
+ {value: 0x0000, lo: 0x0c},
+ {value: 0x811f, lo: 0x91, hi: 0x91},
+ {value: 0x8132, lo: 0xb0, hi: 0xb0},
+ {value: 0x812d, lo: 0xb1, hi: 0xb1},
+ {value: 0x8132, lo: 0xb2, hi: 0xb3},
+ {value: 0x812d, lo: 0xb4, hi: 0xb4},
+ {value: 0x8132, lo: 0xb5, hi: 0xb6},
+ {value: 0x812d, lo: 0xb7, hi: 0xb9},
+ {value: 0x8132, lo: 0xba, hi: 0xba},
+ {value: 0x812d, lo: 0xbb, hi: 0xbc},
+ {value: 0x8132, lo: 0xbd, hi: 0xbd},
+ {value: 0x812d, lo: 0xbe, hi: 0xbe},
+ {value: 0x8132, lo: 0xbf, hi: 0xbf},
+ // Block 0xa, offset 0x63
+ {value: 0x0005, lo: 0x07},
+ {value: 0x8132, lo: 0x80, hi: 0x80},
+ {value: 0x8132, lo: 0x81, hi: 0x81},
+ {value: 0x812d, lo: 0x82, hi: 0x83},
+ {value: 0x812d, lo: 0x84, hi: 0x85},
+ {value: 0x812d, lo: 0x86, hi: 0x87},
+ {value: 0x812d, lo: 0x88, hi: 0x89},
+ {value: 0x8132, lo: 0x8a, hi: 0x8a},
+ // Block 0xb, offset 0x6b
+ {value: 0x0000, lo: 0x03},
+ {value: 0x8132, lo: 0xab, hi: 0xb1},
+ {value: 0x812d, lo: 0xb2, hi: 0xb2},
+ {value: 0x8132, lo: 0xb3, hi: 0xb3},
+ // Block 0xc, offset 0x6f
+ {value: 0x0000, lo: 0x04},
+ {value: 0x8132, lo: 0x96, hi: 0x99},
+ {value: 0x8132, lo: 0x9b, hi: 0xa3},
+ {value: 0x8132, lo: 0xa5, hi: 0xa7},
+ {value: 0x8132, lo: 0xa9, hi: 0xad},
+ // Block 0xd, offset 0x74
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x99, hi: 0x9b},
+ // Block 0xe, offset 0x76
+ {value: 0x0000, lo: 0x0e},
+ {value: 0x8132, lo: 0xa4, hi: 0xa5},
+ {value: 0x812d, lo: 0xa6, hi: 0xa6},
+ {value: 0x8132, lo: 0xa7, hi: 0xa8},
+ {value: 0x812d, lo: 0xa9, hi: 0xa9},
+ {value: 0x8132, lo: 0xaa, hi: 0xac},
+ {value: 0x812d, lo: 0xad, hi: 0xaf},
+ {value: 0x8116, lo: 0xb0, hi: 0xb0},
+ {value: 0x8117, lo: 0xb1, hi: 0xb1},
+ {value: 0x8118, lo: 0xb2, hi: 0xb2},
+ {value: 0x8132, lo: 0xb3, hi: 0xb5},
+ {value: 0x812d, lo: 0xb6, hi: 0xb6},
+ {value: 0x8132, lo: 0xb7, hi: 0xb8},
+ {value: 0x812d, lo: 0xb9, hi: 0xba},
+ {value: 0x8132, lo: 0xbb, hi: 0xbf},
+ // Block 0xf, offset 0x85
+ {value: 0x0000, lo: 0x07},
+ {value: 0xa000, lo: 0xa8, hi: 0xa8},
+ {value: 0x3f13, lo: 0xa9, hi: 0xa9},
+ {value: 0xa000, lo: 0xb0, hi: 0xb0},
+ {value: 0x3f1b, lo: 0xb1, hi: 0xb1},
+ {value: 0xa000, lo: 0xb3, hi: 0xb3},
+ {value: 0x3f23, lo: 0xb4, hi: 0xb4},
+ {value: 0x9902, lo: 0xbc, hi: 0xbc},
+ // Block 0x10, offset 0x8d
+ {value: 0x0008, lo: 0x06},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x8132, lo: 0x91, hi: 0x91},
+ {value: 0x812d, lo: 0x92, hi: 0x92},
+ {value: 0x8132, lo: 0x93, hi: 0x93},
+ {value: 0x8132, lo: 0x94, hi: 0x94},
+ {value: 0x4557, lo: 0x98, hi: 0x9f},
+ // Block 0x11, offset 0x94
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ {value: 0x9900, lo: 0xbe, hi: 0xbe},
+ // Block 0x12, offset 0x97
+ {value: 0x0007, lo: 0x07},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0x18e1, lo: 0x8b, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ {value: 0x4597, lo: 0x9c, hi: 0x9c},
+ {value: 0x459f, lo: 0x9d, hi: 0x9d},
+ {value: 0x45a7, lo: 0x9f, hi: 0x9f},
+ // Block 0x13, offset 0x9f
+ {value: 0x0000, lo: 0x03},
+ {value: 0x45cf, lo: 0xb3, hi: 0xb3},
+ {value: 0x45d7, lo: 0xb6, hi: 0xb6},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ // Block 0x14, offset 0xa3
+ {value: 0x0008, lo: 0x03},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x45af, lo: 0x99, hi: 0x9b},
+ {value: 0x45c7, lo: 0x9e, hi: 0x9e},
+ // Block 0x15, offset 0xa7
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ // Block 0x16, offset 0xa9
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ // Block 0x17, offset 0xab
+ {value: 0x0000, lo: 0x08},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0x18f6, lo: 0x88, hi: 0x88},
+ {value: 0x18ef, lo: 0x8b, hi: 0x8b},
+ {value: 0x18fd, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x96, hi: 0x97},
+ {value: 0x45df, lo: 0x9c, hi: 0x9c},
+ {value: 0x45e7, lo: 0x9d, hi: 0x9d},
+ // Block 0x18, offset 0xb4
+ {value: 0x0000, lo: 0x03},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0x1904, lo: 0x94, hi: 0x94},
+ {value: 0x9900, lo: 0xbe, hi: 0xbe},
+ // Block 0x19, offset 0xb8
+ {value: 0x0000, lo: 0x06},
+ {value: 0xa000, lo: 0x86, hi: 0x87},
+ {value: 0x190b, lo: 0x8a, hi: 0x8a},
+ {value: 0x1919, lo: 0x8b, hi: 0x8b},
+ {value: 0x1912, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ // Block 0x1a, offset 0xbf
+ {value: 0x1801, lo: 0x04},
+ {value: 0xa000, lo: 0x86, hi: 0x86},
+ {value: 0x3f2b, lo: 0x88, hi: 0x88},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x8120, lo: 0x95, hi: 0x96},
+ // Block 0x1b, offset 0xc4
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8102, lo: 0xbc, hi: 0xbc},
+ {value: 0xa000, lo: 0xbf, hi: 0xbf},
+ // Block 0x1c, offset 0xc7
+ {value: 0x0000, lo: 0x09},
+ {value: 0x1920, lo: 0x80, hi: 0x80},
+ {value: 0x9900, lo: 0x82, hi: 0x82},
+ {value: 0xa000, lo: 0x86, hi: 0x86},
+ {value: 0x1927, lo: 0x87, hi: 0x87},
+ {value: 0x192e, lo: 0x88, hi: 0x88},
+ {value: 0x2eb7, lo: 0x8a, hi: 0x8a},
+ {value: 0x19f6, lo: 0x8b, hi: 0x8b},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x95, hi: 0x96},
+ // Block 0x1d, offset 0xd1
+ {value: 0x0000, lo: 0x01},
+ {value: 0x9900, lo: 0xbe, hi: 0xbe},
+ // Block 0x1e, offset 0xd3
+ {value: 0x0000, lo: 0x06},
+ {value: 0xa000, lo: 0x86, hi: 0x87},
+ {value: 0x1935, lo: 0x8a, hi: 0x8a},
+ {value: 0x1943, lo: 0x8b, hi: 0x8b},
+ {value: 0x193c, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ // Block 0x1f, offset 0xda
+ {value: 0x0007, lo: 0x07},
+ {value: 0x9904, lo: 0x8a, hi: 0x8a},
+ {value: 0x9900, lo: 0x8f, hi: 0x8f},
+ {value: 0xa000, lo: 0x99, hi: 0x99},
+ {value: 0x3f33, lo: 0x9a, hi: 0x9a},
+ {value: 0x2ebe, lo: 0x9c, hi: 0x9d},
+ {value: 0x194a, lo: 0x9e, hi: 0x9e},
+ {value: 0x9900, lo: 0x9f, hi: 0x9f},
+ // Block 0x20, offset 0xe2
+ {value: 0x0000, lo: 0x03},
+ {value: 0x27b4, lo: 0xb3, hi: 0xb3},
+ {value: 0x8122, lo: 0xb8, hi: 0xb9},
+ {value: 0x8104, lo: 0xba, hi: 0xba},
+ // Block 0x21, offset 0xe6
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8123, lo: 0x88, hi: 0x8b},
+ // Block 0x22, offset 0xe8
+ {value: 0x0000, lo: 0x02},
+ {value: 0x27c9, lo: 0xb3, hi: 0xb3},
+ {value: 0x8124, lo: 0xb8, hi: 0xb9},
+ // Block 0x23, offset 0xeb
+ {value: 0x0000, lo: 0x03},
+ {value: 0x8125, lo: 0x88, hi: 0x8b},
+ {value: 0x27bb, lo: 0x9c, hi: 0x9c},
+ {value: 0x27c2, lo: 0x9d, hi: 0x9d},
+ // Block 0x24, offset 0xef
+ {value: 0x0000, lo: 0x05},
+ {value: 0x030b, lo: 0x8c, hi: 0x8c},
+ {value: 0x812d, lo: 0x98, hi: 0x99},
+ {value: 0x812d, lo: 0xb5, hi: 0xb5},
+ {value: 0x812d, lo: 0xb7, hi: 0xb7},
+ {value: 0x812b, lo: 0xb9, hi: 0xb9},
+ // Block 0x25, offset 0xf5
+ {value: 0x0000, lo: 0x10},
+ {value: 0x27d7, lo: 0x83, hi: 0x83},
+ {value: 0x27de, lo: 0x8d, hi: 0x8d},
+ {value: 0x27e5, lo: 0x92, hi: 0x92},
+ {value: 0x27ec, lo: 0x97, hi: 0x97},
+ {value: 0x27f3, lo: 0x9c, hi: 0x9c},
+ {value: 0x27d0, lo: 0xa9, hi: 0xa9},
+ {value: 0x8126, lo: 0xb1, hi: 0xb1},
+ {value: 0x8127, lo: 0xb2, hi: 0xb2},
+ {value: 0x4a0b, lo: 0xb3, hi: 0xb3},
+ {value: 0x8128, lo: 0xb4, hi: 0xb4},
+ {value: 0x4a14, lo: 0xb5, hi: 0xb5},
+ {value: 0x45ef, lo: 0xb6, hi: 0xb6},
+ {value: 0x462f, lo: 0xb7, hi: 0xb7},
+ {value: 0x45f7, lo: 0xb8, hi: 0xb8},
+ {value: 0x463a, lo: 0xb9, hi: 0xb9},
+ {value: 0x8127, lo: 0xba, hi: 0xbd},
+ // Block 0x26, offset 0x106
+ {value: 0x0000, lo: 0x0b},
+ {value: 0x8127, lo: 0x80, hi: 0x80},
+ {value: 0x4a1d, lo: 0x81, hi: 0x81},
+ {value: 0x8132, lo: 0x82, hi: 0x83},
+ {value: 0x8104, lo: 0x84, hi: 0x84},
+ {value: 0x8132, lo: 0x86, hi: 0x87},
+ {value: 0x2801, lo: 0x93, hi: 0x93},
+ {value: 0x2808, lo: 0x9d, hi: 0x9d},
+ {value: 0x280f, lo: 0xa2, hi: 0xa2},
+ {value: 0x2816, lo: 0xa7, hi: 0xa7},
+ {value: 0x281d, lo: 0xac, hi: 0xac},
+ {value: 0x27fa, lo: 0xb9, hi: 0xb9},
+ // Block 0x27, offset 0x112
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x86, hi: 0x86},
+ // Block 0x28, offset 0x114
+ {value: 0x0000, lo: 0x05},
+ {value: 0xa000, lo: 0xa5, hi: 0xa5},
+ {value: 0x1951, lo: 0xa6, hi: 0xa6},
+ {value: 0x9900, lo: 0xae, hi: 0xae},
+ {value: 0x8102, lo: 0xb7, hi: 0xb7},
+ {value: 0x8104, lo: 0xb9, hi: 0xba},
+ // Block 0x29, offset 0x11a
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x8d, hi: 0x8d},
+ // Block 0x2a, offset 0x11c
+ {value: 0x0000, lo: 0x01},
+ {value: 0x030f, lo: 0xbc, hi: 0xbc},
+ // Block 0x2b, offset 0x11e
+ {value: 0x0000, lo: 0x01},
+ {value: 0xa000, lo: 0x80, hi: 0x92},
+ // Block 0x2c, offset 0x120
+ {value: 0x0000, lo: 0x01},
+ {value: 0xb900, lo: 0xa1, hi: 0xb5},
+ // Block 0x2d, offset 0x122
+ {value: 0x0000, lo: 0x01},
+ {value: 0x9900, lo: 0xa8, hi: 0xbf},
+ // Block 0x2e, offset 0x124
+ {value: 0x0000, lo: 0x01},
+ {value: 0x9900, lo: 0x80, hi: 0x82},
+ // Block 0x2f, offset 0x126
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0x9d, hi: 0x9f},
+ // Block 0x30, offset 0x128
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x94, hi: 0x94},
+ {value: 0x8104, lo: 0xb4, hi: 0xb4},
+ // Block 0x31, offset 0x12b
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x92, hi: 0x92},
+ {value: 0x8132, lo: 0x9d, hi: 0x9d},
+ // Block 0x32, offset 0x12e
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8131, lo: 0xa9, hi: 0xa9},
+ // Block 0x33, offset 0x130
+ {value: 0x0004, lo: 0x02},
+ {value: 0x812e, lo: 0xb9, hi: 0xba},
+ {value: 0x812d, lo: 0xbb, hi: 0xbb},
+ // Block 0x34, offset 0x133
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0x97, hi: 0x97},
+ {value: 0x812d, lo: 0x98, hi: 0x98},
+ // Block 0x35, offset 0x136
+ {value: 0x0000, lo: 0x03},
+ {value: 0x8104, lo: 0xa0, hi: 0xa0},
+ {value: 0x8132, lo: 0xb5, hi: 0xbc},
+ {value: 0x812d, lo: 0xbf, hi: 0xbf},
+ // Block 0x36, offset 0x13a
+ {value: 0x0000, lo: 0x04},
+ {value: 0x8132, lo: 0xb0, hi: 0xb4},
+ {value: 0x812d, lo: 0xb5, hi: 0xba},
+ {value: 0x8132, lo: 0xbb, hi: 0xbc},
+ {value: 0x812d, lo: 0xbd, hi: 0xbd},
+ // Block 0x37, offset 0x13f
+ {value: 0x0000, lo: 0x08},
+ {value: 0x1990, lo: 0x80, hi: 0x80},
+ {value: 0x1997, lo: 0x81, hi: 0x81},
+ {value: 0xa000, lo: 0x82, hi: 0x82},
+ {value: 0x199e, lo: 0x83, hi: 0x83},
+ {value: 0x8104, lo: 0x84, hi: 0x84},
+ {value: 0x8132, lo: 0xab, hi: 0xab},
+ {value: 0x812d, lo: 0xac, hi: 0xac},
+ {value: 0x8132, lo: 0xad, hi: 0xb3},
+ // Block 0x38, offset 0x148
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0xaa, hi: 0xab},
+ // Block 0x39, offset 0x14a
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8102, lo: 0xa6, hi: 0xa6},
+ {value: 0x8104, lo: 0xb2, hi: 0xb3},
+ // Block 0x3a, offset 0x14d
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0xb7, hi: 0xb7},
+ // Block 0x3b, offset 0x14f
+ {value: 0x0000, lo: 0x0a},
+ {value: 0x8132, lo: 0x90, hi: 0x92},
+ {value: 0x8101, lo: 0x94, hi: 0x94},
+ {value: 0x812d, lo: 0x95, hi: 0x99},
+ {value: 0x8132, lo: 0x9a, hi: 0x9b},
+ {value: 0x812d, lo: 0x9c, hi: 0x9f},
+ {value: 0x8132, lo: 0xa0, hi: 0xa0},
+ {value: 0x8101, lo: 0xa2, hi: 0xa8},
+ {value: 0x812d, lo: 0xad, hi: 0xad},
+ {value: 0x8132, lo: 0xb4, hi: 0xb4},
+ {value: 0x8132, lo: 0xb8, hi: 0xb9},
+ // Block 0x3c, offset 0x15a
+ {value: 0x0002, lo: 0x0a},
+ {value: 0x0043, lo: 0xac, hi: 0xac},
+ {value: 0x00d1, lo: 0xad, hi: 0xad},
+ {value: 0x0045, lo: 0xae, hi: 0xae},
+ {value: 0x0049, lo: 0xb0, hi: 0xb1},
+ {value: 0x00e6, lo: 0xb2, hi: 0xb2},
+ {value: 0x004f, lo: 0xb3, hi: 0xba},
+ {value: 0x005f, lo: 0xbc, hi: 0xbc},
+ {value: 0x00ef, lo: 0xbd, hi: 0xbd},
+ {value: 0x0061, lo: 0xbe, hi: 0xbe},
+ {value: 0x0065, lo: 0xbf, hi: 0xbf},
+ // Block 0x3d, offset 0x165
+ {value: 0x0000, lo: 0x0e},
+ {value: 0x8132, lo: 0x80, hi: 0x81},
+ {value: 0x812d, lo: 0x82, hi: 0x82},
+ {value: 0x8132, lo: 0x83, hi: 0x89},
+ {value: 0x812d, lo: 0x8a, hi: 0x8a},
+ {value: 0x8132, lo: 0x8b, hi: 0x8c},
+ {value: 0x8135, lo: 0x8d, hi: 0x8d},
+ {value: 0x812a, lo: 0x8e, hi: 0x8e},
+ {value: 0x812d, lo: 0x8f, hi: 0x8f},
+ {value: 0x8129, lo: 0x90, hi: 0x90},
+ {value: 0x8132, lo: 0x91, hi: 0xb5},
+ {value: 0x8134, lo: 0xbc, hi: 0xbc},
+ {value: 0x812d, lo: 0xbd, hi: 0xbd},
+ {value: 0x8132, lo: 0xbe, hi: 0xbe},
+ {value: 0x812d, lo: 0xbf, hi: 0xbf},
+ // Block 0x3e, offset 0x174
+ {value: 0x0000, lo: 0x0d},
+ {value: 0x0001, lo: 0x80, hi: 0x8a},
+ {value: 0x04b3, lo: 0x91, hi: 0x91},
+ {value: 0x42d6, lo: 0x97, hi: 0x97},
+ {value: 0x001d, lo: 0xa4, hi: 0xa4},
+ {value: 0x1a06, lo: 0xa5, hi: 0xa5},
+ {value: 0x1cef, lo: 0xa6, hi: 0xa6},
+ {value: 0x0001, lo: 0xaf, hi: 0xaf},
+ {value: 0x288d, lo: 0xb3, hi: 0xb3},
+ {value: 0x29fa, lo: 0xb4, hi: 0xb4},
+ {value: 0x2894, lo: 0xb6, hi: 0xb6},
+ {value: 0x2a04, lo: 0xb7, hi: 0xb7},
+ {value: 0x1a00, lo: 0xbc, hi: 0xbc},
+ {value: 0x42a4, lo: 0xbe, hi: 0xbe},
+ // Block 0x3f, offset 0x182
+ {value: 0x0002, lo: 0x0d},
+ {value: 0x1ac6, lo: 0x87, hi: 0x87},
+ {value: 0x1ac3, lo: 0x88, hi: 0x88},
+ {value: 0x1a03, lo: 0x89, hi: 0x89},
+ {value: 0x2b97, lo: 0x97, hi: 0x97},
+ {value: 0x0001, lo: 0x9f, hi: 0x9f},
+ {value: 0x0021, lo: 0xb0, hi: 0xb0},
+ {value: 0x0093, lo: 0xb1, hi: 0xb1},
+ {value: 0x0029, lo: 0xb4, hi: 0xb9},
+ {value: 0x0017, lo: 0xba, hi: 0xba},
+ {value: 0x04df, lo: 0xbb, hi: 0xbb},
+ {value: 0x003b, lo: 0xbc, hi: 0xbc},
+ {value: 0x0011, lo: 0xbd, hi: 0xbe},
+ {value: 0x009d, lo: 0xbf, hi: 0xbf},
+ // Block 0x40, offset 0x190
+ {value: 0x0002, lo: 0x0f},
+ {value: 0x0021, lo: 0x80, hi: 0x89},
+ {value: 0x0017, lo: 0x8a, hi: 0x8a},
+ {value: 0x04df, lo: 0x8b, hi: 0x8b},
+ {value: 0x003b, lo: 0x8c, hi: 0x8c},
+ {value: 0x0011, lo: 0x8d, hi: 0x8e},
+ {value: 0x0083, lo: 0x90, hi: 0x90},
+ {value: 0x008b, lo: 0x91, hi: 0x91},
+ {value: 0x009f, lo: 0x92, hi: 0x92},
+ {value: 0x00b1, lo: 0x93, hi: 0x93},
+ {value: 0x0104, lo: 0x94, hi: 0x94},
+ {value: 0x0091, lo: 0x95, hi: 0x95},
+ {value: 0x0097, lo: 0x96, hi: 0x99},
+ {value: 0x00a1, lo: 0x9a, hi: 0x9a},
+ {value: 0x00a7, lo: 0x9b, hi: 0x9c},
+ {value: 0x1b2c, lo: 0xa8, hi: 0xa8},
+ // Block 0x41, offset 0x1a0
+ {value: 0x0000, lo: 0x0d},
+ {value: 0x8132, lo: 0x90, hi: 0x91},
+ {value: 0x8101, lo: 0x92, hi: 0x93},
+ {value: 0x8132, lo: 0x94, hi: 0x97},
+ {value: 0x8101, lo: 0x98, hi: 0x9a},
+ {value: 0x8132, lo: 0x9b, hi: 0x9c},
+ {value: 0x8132, lo: 0xa1, hi: 0xa1},
+ {value: 0x8101, lo: 0xa5, hi: 0xa6},
+ {value: 0x8132, lo: 0xa7, hi: 0xa7},
+ {value: 0x812d, lo: 0xa8, hi: 0xa8},
+ {value: 0x8132, lo: 0xa9, hi: 0xa9},
+ {value: 0x8101, lo: 0xaa, hi: 0xab},
+ {value: 0x812d, lo: 0xac, hi: 0xaf},
+ {value: 0x8132, lo: 0xb0, hi: 0xb0},
+ // Block 0x42, offset 0x1ae
+ {value: 0x0007, lo: 0x06},
+ {value: 0x2313, lo: 0x89, hi: 0x89},
+ {value: 0xa000, lo: 0x90, hi: 0x90},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0xa000, lo: 0x94, hi: 0x94},
+ {value: 0x3bf4, lo: 0x9a, hi: 0x9b},
+ {value: 0x3c02, lo: 0xae, hi: 0xae},
+ // Block 0x43, offset 0x1b5
+ {value: 0x000e, lo: 0x05},
+ {value: 0x3c09, lo: 0x8d, hi: 0x8e},
+ {value: 0x3c10, lo: 0x8f, hi: 0x8f},
+ {value: 0xa000, lo: 0x90, hi: 0x90},
+ {value: 0xa000, lo: 0x92, hi: 0x92},
+ {value: 0xa000, lo: 0x94, hi: 0x94},
+ // Block 0x44, offset 0x1bb
+ {value: 0x0173, lo: 0x0e},
+ {value: 0xa000, lo: 0x83, hi: 0x83},
+ {value: 0x3c1e, lo: 0x84, hi: 0x84},
+ {value: 0xa000, lo: 0x88, hi: 0x88},
+ {value: 0x3c25, lo: 0x89, hi: 0x89},
+ {value: 0xa000, lo: 0x8b, hi: 0x8b},
+ {value: 0x3c2c, lo: 0x8c, hi: 0x8c},
+ {value: 0xa000, lo: 0xa3, hi: 0xa3},
+ {value: 0x3c33, lo: 0xa4, hi: 0xa4},
+ {value: 0xa000, lo: 0xa5, hi: 0xa5},
+ {value: 0x3c3a, lo: 0xa6, hi: 0xa6},
+ {value: 0x289b, lo: 0xac, hi: 0xad},
+ {value: 0x28a2, lo: 0xaf, hi: 0xaf},
+ {value: 0x2a18, lo: 0xb0, hi: 0xb0},
+ {value: 0xa000, lo: 0xbc, hi: 0xbc},
+ // Block 0x45, offset 0x1ca
+ {value: 0x0007, lo: 0x03},
+ {value: 0x3ca3, lo: 0xa0, hi: 0xa1},
+ {value: 0x3ccd, lo: 0xa2, hi: 0xa3},
+ {value: 0x3cf7, lo: 0xaa, hi: 0xad},
+ // Block 0x46, offset 0x1ce
+ {value: 0x0004, lo: 0x01},
+ {value: 0x0503, lo: 0xa9, hi: 0xaa},
+ // Block 0x47, offset 0x1d0
+ {value: 0x0002, lo: 0x03},
+ {value: 0x0057, lo: 0x80, hi: 0x8f},
+ {value: 0x0083, lo: 0x90, hi: 0xa9},
+ {value: 0x0021, lo: 0xaa, hi: 0xaa},
+ // Block 0x48, offset 0x1d4
+ {value: 0x0000, lo: 0x01},
+ {value: 0x2ba4, lo: 0x8c, hi: 0x8c},
+ // Block 0x49, offset 0x1d6
+ {value: 0x0263, lo: 0x02},
+ {value: 0x1d1f, lo: 0xb4, hi: 0xb4},
+ {value: 0x1ac0, lo: 0xb5, hi: 0xb6},
+ // Block 0x4a, offset 0x1d9
+ {value: 0x0000, lo: 0x01},
+ {value: 0x4518, lo: 0x9c, hi: 0x9c},
+ // Block 0x4b, offset 0x1db
+ {value: 0x0000, lo: 0x02},
+ {value: 0x0095, lo: 0xbc, hi: 0xbc},
+ {value: 0x006d, lo: 0xbd, hi: 0xbd},
+ // Block 0x4c, offset 0x1de
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xaf, hi: 0xb1},
+ // Block 0x4d, offset 0x1e0
+ {value: 0x0000, lo: 0x02},
+ {value: 0x04f7, lo: 0xaf, hi: 0xaf},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x4e, offset 0x1e3
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xa0, hi: 0xbf},
+ // Block 0x4f, offset 0x1e5
+ {value: 0x0000, lo: 0x01},
+ {value: 0x0e3b, lo: 0x9f, hi: 0x9f},
+ // Block 0x50, offset 0x1e7
+ {value: 0x0000, lo: 0x01},
+ {value: 0x16a3, lo: 0xb3, hi: 0xb3},
+ // Block 0x51, offset 0x1e9
+ {value: 0x0004, lo: 0x0b},
+ {value: 0x160b, lo: 0x80, hi: 0x82},
+ {value: 0x1623, lo: 0x83, hi: 0x83},
+ {value: 0x163b, lo: 0x84, hi: 0x85},
+ {value: 0x164b, lo: 0x86, hi: 0x89},
+ {value: 0x165f, lo: 0x8a, hi: 0x8c},
+ {value: 0x1673, lo: 0x8d, hi: 0x8d},
+ {value: 0x167b, lo: 0x8e, hi: 0x8e},
+ {value: 0x1683, lo: 0x8f, hi: 0x90},
+ {value: 0x168f, lo: 0x91, hi: 0x93},
+ {value: 0x169f, lo: 0x94, hi: 0x94},
+ {value: 0x16a7, lo: 0x95, hi: 0x95},
+ // Block 0x52, offset 0x1f5
+ {value: 0x0004, lo: 0x09},
+ {value: 0x0001, lo: 0x80, hi: 0x80},
+ {value: 0x812c, lo: 0xaa, hi: 0xaa},
+ {value: 0x8131, lo: 0xab, hi: 0xab},
+ {value: 0x8133, lo: 0xac, hi: 0xac},
+ {value: 0x812e, lo: 0xad, hi: 0xad},
+ {value: 0x812f, lo: 0xae, hi: 0xae},
+ {value: 0x812f, lo: 0xaf, hi: 0xaf},
+ {value: 0x052b, lo: 0xb6, hi: 0xb6},
+ {value: 0x08ff, lo: 0xb8, hi: 0xba},
+ // Block 0x53, offset 0x1ff
+ {value: 0x0004, lo: 0x06},
+ {value: 0x0313, lo: 0xb1, hi: 0xb2},
+ {value: 0x043b, lo: 0xb3, hi: 0xb3},
+ {value: 0x031b, lo: 0xb4, hi: 0xb4},
+ {value: 0x043f, lo: 0xb5, hi: 0xb6},
+ {value: 0x031f, lo: 0xb7, hi: 0xb9},
+ {value: 0x0447, lo: 0xba, hi: 0xbf},
+ // Block 0x54, offset 0x206
+ {value: 0x0004, lo: 0x0c},
+ {value: 0x0367, lo: 0x80, hi: 0x80},
+ {value: 0x032b, lo: 0x81, hi: 0x83},
+ {value: 0x037b, lo: 0x84, hi: 0x84},
+ {value: 0x0337, lo: 0x85, hi: 0x8e},
+ {value: 0x03c7, lo: 0x8f, hi: 0xa3},
+ {value: 0x03c3, lo: 0xa4, hi: 0xa4},
+ {value: 0x035f, lo: 0xa5, hi: 0xa6},
+ {value: 0x045f, lo: 0xa7, hi: 0xad},
+ {value: 0x036b, lo: 0xae, hi: 0xae},
+ {value: 0x047b, lo: 0xaf, hi: 0xb0},
+ {value: 0x036f, lo: 0xb1, hi: 0xb3},
+ {value: 0x037f, lo: 0xb4, hi: 0xbf},
+ // Block 0x55, offset 0x213
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0xaf, hi: 0xaf},
+ {value: 0x8132, lo: 0xb4, hi: 0xbd},
+ // Block 0x56, offset 0x216
+ {value: 0x0003, lo: 0x02},
+ {value: 0x020f, lo: 0x9c, hi: 0x9d},
+ {value: 0x8132, lo: 0x9f, hi: 0x9f},
+ // Block 0x57, offset 0x219
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xb0, hi: 0xb1},
+ // Block 0x58, offset 0x21b
+ {value: 0x0000, lo: 0x01},
+ {value: 0x16af, lo: 0xb0, hi: 0xb0},
+ // Block 0x59, offset 0x21d
+ {value: 0x000c, lo: 0x01},
+ {value: 0x00d7, lo: 0xb8, hi: 0xb9},
+ // Block 0x5a, offset 0x21f
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x86, hi: 0x86},
+ // Block 0x5b, offset 0x221
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x84, hi: 0x84},
+ {value: 0x8132, lo: 0xa0, hi: 0xb1},
+ // Block 0x5c, offset 0x224
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0xab, hi: 0xad},
+ // Block 0x5d, offset 0x226
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x93, hi: 0x93},
+ // Block 0x5e, offset 0x228
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0xb3, hi: 0xb3},
+ // Block 0x5f, offset 0x22a
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0x80, hi: 0x80},
+ // Block 0x60, offset 0x22c
+ {value: 0x0000, lo: 0x05},
+ {value: 0x8132, lo: 0xb0, hi: 0xb0},
+ {value: 0x8132, lo: 0xb2, hi: 0xb3},
+ {value: 0x812d, lo: 0xb4, hi: 0xb4},
+ {value: 0x8132, lo: 0xb7, hi: 0xb8},
+ {value: 0x8132, lo: 0xbe, hi: 0xbf},
+ // Block 0x61, offset 0x232
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0x81, hi: 0x81},
+ {value: 0x8104, lo: 0xb6, hi: 0xb6},
+ // Block 0x62, offset 0x235
+ {value: 0x0008, lo: 0x03},
+ {value: 0x16ab, lo: 0x9c, hi: 0x9d},
+ {value: 0x0125, lo: 0x9e, hi: 0x9e},
+ {value: 0x16b7, lo: 0x9f, hi: 0x9f},
+ // Block 0x63, offset 0x239
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0xad, hi: 0xad},
+ // Block 0x64, offset 0x23b
+ {value: 0x0000, lo: 0x06},
+ {value: 0xe500, lo: 0x80, hi: 0x80},
+ {value: 0xc600, lo: 0x81, hi: 0x9b},
+ {value: 0xe500, lo: 0x9c, hi: 0x9c},
+ {value: 0xc600, lo: 0x9d, hi: 0xb7},
+ {value: 0xe500, lo: 0xb8, hi: 0xb8},
+ {value: 0xc600, lo: 0xb9, hi: 0xbf},
+ // Block 0x65, offset 0x242
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x93},
+ {value: 0xe500, lo: 0x94, hi: 0x94},
+ {value: 0xc600, lo: 0x95, hi: 0xaf},
+ {value: 0xe500, lo: 0xb0, hi: 0xb0},
+ {value: 0xc600, lo: 0xb1, hi: 0xbf},
+ // Block 0x66, offset 0x248
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x8b},
+ {value: 0xe500, lo: 0x8c, hi: 0x8c},
+ {value: 0xc600, lo: 0x8d, hi: 0xa7},
+ {value: 0xe500, lo: 0xa8, hi: 0xa8},
+ {value: 0xc600, lo: 0xa9, hi: 0xbf},
+ // Block 0x67, offset 0x24e
+ {value: 0x0000, lo: 0x07},
+ {value: 0xc600, lo: 0x80, hi: 0x83},
+ {value: 0xe500, lo: 0x84, hi: 0x84},
+ {value: 0xc600, lo: 0x85, hi: 0x9f},
+ {value: 0xe500, lo: 0xa0, hi: 0xa0},
+ {value: 0xc600, lo: 0xa1, hi: 0xbb},
+ {value: 0xe500, lo: 0xbc, hi: 0xbc},
+ {value: 0xc600, lo: 0xbd, hi: 0xbf},
+ // Block 0x68, offset 0x256
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x97},
+ {value: 0xe500, lo: 0x98, hi: 0x98},
+ {value: 0xc600, lo: 0x99, hi: 0xb3},
+ {value: 0xe500, lo: 0xb4, hi: 0xb4},
+ {value: 0xc600, lo: 0xb5, hi: 0xbf},
+ // Block 0x69, offset 0x25c
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x8f},
+ {value: 0xe500, lo: 0x90, hi: 0x90},
+ {value: 0xc600, lo: 0x91, hi: 0xab},
+ {value: 0xe500, lo: 0xac, hi: 0xac},
+ {value: 0xc600, lo: 0xad, hi: 0xbf},
+ // Block 0x6a, offset 0x262
+ {value: 0x0000, lo: 0x05},
+ {value: 0xc600, lo: 0x80, hi: 0x87},
+ {value: 0xe500, lo: 0x88, hi: 0x88},
+ {value: 0xc600, lo: 0x89, hi: 0xa3},
+ {value: 0xe500, lo: 0xa4, hi: 0xa4},
+ {value: 0xc600, lo: 0xa5, hi: 0xbf},
+ // Block 0x6b, offset 0x268
+ {value: 0x0000, lo: 0x03},
+ {value: 0xc600, lo: 0x80, hi: 0x87},
+ {value: 0xe500, lo: 0x88, hi: 0x88},
+ {value: 0xc600, lo: 0x89, hi: 0xa3},
+ // Block 0x6c, offset 0x26c
+ {value: 0x0002, lo: 0x01},
+ {value: 0x0003, lo: 0x81, hi: 0xbf},
+ // Block 0x6d, offset 0x26e
+ {value: 0x0004, lo: 0x0e},
+ {value: 0x03c7, lo: 0x82, hi: 0x87},
+ {value: 0x03df, lo: 0x8a, hi: 0x8f},
+ {value: 0x03f7, lo: 0x92, hi: 0x97},
+ {value: 0x040f, lo: 0x9a, hi: 0x9c},
+ {value: 0x00bf, lo: 0xa0, hi: 0xa0},
+ {value: 0x00c2, lo: 0xa1, hi: 0xa1},
+ {value: 0x00cb, lo: 0xa2, hi: 0xa2},
+ {value: 0x429f, lo: 0xa3, hi: 0xa3},
+ {value: 0x00c8, lo: 0xa4, hi: 0xa4},
+ {value: 0x00c5, lo: 0xa5, hi: 0xa5},
+ {value: 0x04bf, lo: 0xa6, hi: 0xa6},
+ {value: 0x04e3, lo: 0xa8, hi: 0xa8},
+ {value: 0x04c3, lo: 0xa9, hi: 0xac},
+ {value: 0x04e7, lo: 0xad, hi: 0xae},
+ // Block 0x6e, offset 0x27d
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0xbd, hi: 0xbd},
+ // Block 0x6f, offset 0x27f
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0xa0, hi: 0xa0},
+ // Block 0x70, offset 0x281
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xb6, hi: 0xba},
+ // Block 0x71, offset 0x283
+ {value: 0x002c, lo: 0x05},
+ {value: 0x812d, lo: 0x8d, hi: 0x8d},
+ {value: 0x8132, lo: 0x8f, hi: 0x8f},
+ {value: 0x8132, lo: 0xb8, hi: 0xb8},
+ {value: 0x8101, lo: 0xb9, hi: 0xba},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x72, offset 0x289
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8132, lo: 0xa5, hi: 0xa5},
+ {value: 0x812d, lo: 0xa6, hi: 0xa6},
+ // Block 0x73, offset 0x28c
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x86, hi: 0x86},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x74, offset 0x28f
+ {value: 0x17fe, lo: 0x07},
+ {value: 0xa000, lo: 0x99, hi: 0x99},
+ {value: 0x4273, lo: 0x9a, hi: 0x9a},
+ {value: 0xa000, lo: 0x9b, hi: 0x9b},
+ {value: 0x427d, lo: 0x9c, hi: 0x9c},
+ {value: 0xa000, lo: 0xa5, hi: 0xa5},
+ {value: 0x4287, lo: 0xab, hi: 0xab},
+ {value: 0x8104, lo: 0xb9, hi: 0xba},
+ // Block 0x75, offset 0x297
+ {value: 0x0000, lo: 0x06},
+ {value: 0x8132, lo: 0x80, hi: 0x82},
+ {value: 0x9900, lo: 0xa7, hi: 0xa7},
+ {value: 0x19a5, lo: 0xae, hi: 0xae},
+ {value: 0x19ae, lo: 0xaf, hi: 0xaf},
+ {value: 0xa000, lo: 0xb1, hi: 0xb2},
+ {value: 0x8104, lo: 0xb3, hi: 0xb4},
+ // Block 0x76, offset 0x29e
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0xb5, hi: 0xb5},
+ {value: 0x8102, lo: 0xb6, hi: 0xb6},
+ // Block 0x77, offset 0x2a1
+ {value: 0x0002, lo: 0x01},
+ {value: 0x8102, lo: 0xa9, hi: 0xaa},
+ // Block 0x78, offset 0x2a3
+ {value: 0x0000, lo: 0x07},
+ {value: 0xa000, lo: 0x87, hi: 0x87},
+ {value: 0x19b7, lo: 0x8b, hi: 0x8b},
+ {value: 0x19c0, lo: 0x8c, hi: 0x8c},
+ {value: 0x8104, lo: 0x8d, hi: 0x8d},
+ {value: 0x9900, lo: 0x97, hi: 0x97},
+ {value: 0x8132, lo: 0xa6, hi: 0xac},
+ {value: 0x8132, lo: 0xb0, hi: 0xb4},
+ // Block 0x79, offset 0x2ab
+ {value: 0x7f37, lo: 0x06},
+ {value: 0x9900, lo: 0xb0, hi: 0xb0},
+ {value: 0xa000, lo: 0xb9, hi: 0xb9},
+ {value: 0x9900, lo: 0xba, hi: 0xba},
+ {value: 0x19d2, lo: 0xbb, hi: 0xbb},
+ {value: 0x19c9, lo: 0xbc, hi: 0xbd},
+ {value: 0x19db, lo: 0xbe, hi: 0xbe},
+ // Block 0x7a, offset 0x2b2
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0x82, hi: 0x82},
+ {value: 0x8102, lo: 0x83, hi: 0x83},
+ // Block 0x7b, offset 0x2b5
+ {value: 0x0000, lo: 0x05},
+ {value: 0x9900, lo: 0xaf, hi: 0xaf},
+ {value: 0xa000, lo: 0xb8, hi: 0xb9},
+ {value: 0x19e4, lo: 0xba, hi: 0xba},
+ {value: 0x19ed, lo: 0xbb, hi: 0xbb},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x7c, offset 0x2bb
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8102, lo: 0x80, hi: 0x80},
+ // Block 0x7d, offset 0x2bd
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8104, lo: 0xbf, hi: 0xbf},
+ // Block 0x7e, offset 0x2bf
+ {value: 0x0000, lo: 0x02},
+ {value: 0x8104, lo: 0xb6, hi: 0xb6},
+ {value: 0x8102, lo: 0xb7, hi: 0xb7},
+ // Block 0x7f, offset 0x2c2
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8101, lo: 0xb0, hi: 0xb4},
+ // Block 0x80, offset 0x2c4
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0xb0, hi: 0xb6},
+ // Block 0x81, offset 0x2c6
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8101, lo: 0x9e, hi: 0x9e},
+ // Block 0x82, offset 0x2c8
+ {value: 0x0000, lo: 0x0c},
+ {value: 0x4607, lo: 0x9e, hi: 0x9e},
+ {value: 0x4611, lo: 0x9f, hi: 0x9f},
+ {value: 0x4645, lo: 0xa0, hi: 0xa0},
+ {value: 0x4653, lo: 0xa1, hi: 0xa1},
+ {value: 0x4661, lo: 0xa2, hi: 0xa2},
+ {value: 0x466f, lo: 0xa3, hi: 0xa3},
+ {value: 0x467d, lo: 0xa4, hi: 0xa4},
+ {value: 0x812b, lo: 0xa5, hi: 0xa6},
+ {value: 0x8101, lo: 0xa7, hi: 0xa9},
+ {value: 0x8130, lo: 0xad, hi: 0xad},
+ {value: 0x812b, lo: 0xae, hi: 0xb2},
+ {value: 0x812d, lo: 0xbb, hi: 0xbf},
+ // Block 0x83, offset 0x2d5
+ {value: 0x0000, lo: 0x09},
+ {value: 0x812d, lo: 0x80, hi: 0x82},
+ {value: 0x8132, lo: 0x85, hi: 0x89},
+ {value: 0x812d, lo: 0x8a, hi: 0x8b},
+ {value: 0x8132, lo: 0xaa, hi: 0xad},
+ {value: 0x461b, lo: 0xbb, hi: 0xbb},
+ {value: 0x4625, lo: 0xbc, hi: 0xbc},
+ {value: 0x468b, lo: 0xbd, hi: 0xbd},
+ {value: 0x46a7, lo: 0xbe, hi: 0xbe},
+ {value: 0x4699, lo: 0xbf, hi: 0xbf},
+ // Block 0x84, offset 0x2df
+ {value: 0x0000, lo: 0x01},
+ {value: 0x46b5, lo: 0x80, hi: 0x80},
+ // Block 0x85, offset 0x2e1
+ {value: 0x0000, lo: 0x01},
+ {value: 0x8132, lo: 0x82, hi: 0x84},
+ // Block 0x86, offset 0x2e3
+ {value: 0x0002, lo: 0x03},
+ {value: 0x0043, lo: 0x80, hi: 0x99},
+ {value: 0x0083, lo: 0x9a, hi: 0xb3},
+ {value: 0x0043, lo: 0xb4, hi: 0xbf},
+ // Block 0x87, offset 0x2e7
+ {value: 0x0002, lo: 0x04},
+ {value: 0x005b, lo: 0x80, hi: 0x8d},
+ {value: 0x0083, lo: 0x8e, hi: 0x94},
+ {value: 0x0093, lo: 0x96, hi: 0xa7},
+ {value: 0x0043, lo: 0xa8, hi: 0xbf},
+ // Block 0x88, offset 0x2ec
+ {value: 0x0002, lo: 0x0b},
+ {value: 0x0073, lo: 0x80, hi: 0x81},
+ {value: 0x0083, lo: 0x82, hi: 0x9b},
+ {value: 0x0043, lo: 0x9c, hi: 0x9c},
+ {value: 0x0047, lo: 0x9e, hi: 0x9f},
+ {value: 0x004f, lo: 0xa2, hi: 0xa2},
+ {value: 0x0055, lo: 0xa5, hi: 0xa6},
+ {value: 0x005d, lo: 0xa9, hi: 0xac},
+ {value: 0x0067, lo: 0xae, hi: 0xb5},
+ {value: 0x0083, lo: 0xb6, hi: 0xb9},
+ {value: 0x008d, lo: 0xbb, hi: 0xbb},
+ {value: 0x0091, lo: 0xbd, hi: 0xbf},
+ // Block 0x89, offset 0x2f8
+ {value: 0x0002, lo: 0x04},
+ {value: 0x0097, lo: 0x80, hi: 0x83},
+ {value: 0x00a1, lo: 0x85, hi: 0x8f},
+ {value: 0x0043, lo: 0x90, hi: 0xa9},
+ {value: 0x0083, lo: 0xaa, hi: 0xbf},
+ // Block 0x8a, offset 0x2fd
+ {value: 0x0002, lo: 0x08},
+ {value: 0x00af, lo: 0x80, hi: 0x83},
+ {value: 0x0043, lo: 0x84, hi: 0x85},
+ {value: 0x0049, lo: 0x87, hi: 0x8a},
+ {value: 0x0055, lo: 0x8d, hi: 0x94},
+ {value: 0x0067, lo: 0x96, hi: 0x9c},
+ {value: 0x0083, lo: 0x9e, hi: 0xb7},
+ {value: 0x0043, lo: 0xb8, hi: 0xb9},
+ {value: 0x0049, lo: 0xbb, hi: 0xbe},
+ // Block 0x8b, offset 0x306
+ {value: 0x0002, lo: 0x05},
+ {value: 0x0053, lo: 0x80, hi: 0x84},
+ {value: 0x005f, lo: 0x86, hi: 0x86},
+ {value: 0x0067, lo: 0x8a, hi: 0x90},
+ {value: 0x0083, lo: 0x92, hi: 0xab},
+ {value: 0x0043, lo: 0xac, hi: 0xbf},
+ // Block 0x8c, offset 0x30c
+ {value: 0x0002, lo: 0x04},
+ {value: 0x006b, lo: 0x80, hi: 0x85},
+ {value: 0x0083, lo: 0x86, hi: 0x9f},
+ {value: 0x0043, lo: 0xa0, hi: 0xb9},
+ {value: 0x0083, lo: 0xba, hi: 0xbf},
+ // Block 0x8d, offset 0x311
+ {value: 0x0002, lo: 0x03},
+ {value: 0x008f, lo: 0x80, hi: 0x93},
+ {value: 0x0043, lo: 0x94, hi: 0xad},
+ {value: 0x0083, lo: 0xae, hi: 0xbf},
+ // Block 0x8e, offset 0x315
+ {value: 0x0002, lo: 0x04},
+ {value: 0x00a7, lo: 0x80, hi: 0x87},
+ {value: 0x0043, lo: 0x88, hi: 0xa1},
+ {value: 0x0083, lo: 0xa2, hi: 0xbb},
+ {value: 0x0043, lo: 0xbc, hi: 0xbf},
+ // Block 0x8f, offset 0x31a
+ {value: 0x0002, lo: 0x03},
+ {value: 0x004b, lo: 0x80, hi: 0x95},
+ {value: 0x0083, lo: 0x96, hi: 0xaf},
+ {value: 0x0043, lo: 0xb0, hi: 0xbf},
+ // Block 0x90, offset 0x31e
+ {value: 0x0003, lo: 0x0f},
+ {value: 0x01b8, lo: 0x80, hi: 0x80},
+ {value: 0x04d7, lo: 0x81, hi: 0x81},
+ {value: 0x01bb, lo: 0x82, hi: 0x9a},
+ {value: 0x04d3, lo: 0x9b, hi: 0x9b},
+ {value: 0x01c7, lo: 0x9c, hi: 0x9c},
+ {value: 0x01d0, lo: 0x9d, hi: 0x9d},
+ {value: 0x01d6, lo: 0x9e, hi: 0x9e},
+ {value: 0x01fa, lo: 0x9f, hi: 0x9f},
+ {value: 0x01eb, lo: 0xa0, hi: 0xa0},
+ {value: 0x01e8, lo: 0xa1, hi: 0xa1},
+ {value: 0x0173, lo: 0xa2, hi: 0xb2},
+ {value: 0x0188, lo: 0xb3, hi: 0xb3},
+ {value: 0x01a6, lo: 0xb4, hi: 0xba},
+ {value: 0x04d7, lo: 0xbb, hi: 0xbb},
+ {value: 0x01bb, lo: 0xbc, hi: 0xbf},
+ // Block 0x91, offset 0x32e
+ {value: 0x0003, lo: 0x0d},
+ {value: 0x01c7, lo: 0x80, hi: 0x94},
+ {value: 0x04d3, lo: 0x95, hi: 0x95},
+ {value: 0x01c7, lo: 0x96, hi: 0x96},
+ {value: 0x01d0, lo: 0x97, hi: 0x97},
+ {value: 0x01d6, lo: 0x98, hi: 0x98},
+ {value: 0x01fa, lo: 0x99, hi: 0x99},
+ {value: 0x01eb, lo: 0x9a, hi: 0x9a},
+ {value: 0x01e8, lo: 0x9b, hi: 0x9b},
+ {value: 0x0173, lo: 0x9c, hi: 0xac},
+ {value: 0x0188, lo: 0xad, hi: 0xad},
+ {value: 0x01a6, lo: 0xae, hi: 0xb4},
+ {value: 0x04d7, lo: 0xb5, hi: 0xb5},
+ {value: 0x01bb, lo: 0xb6, hi: 0xbf},
+ // Block 0x92, offset 0x33c
+ {value: 0x0003, lo: 0x0d},
+ {value: 0x01d9, lo: 0x80, hi: 0x8e},
+ {value: 0x04d3, lo: 0x8f, hi: 0x8f},
+ {value: 0x01c7, lo: 0x90, hi: 0x90},
+ {value: 0x01d0, lo: 0x91, hi: 0x91},
+ {value: 0x01d6, lo: 0x92, hi: 0x92},
+ {value: 0x01fa, lo: 0x93, hi: 0x93},
+ {value: 0x01eb, lo: 0x94, hi: 0x94},
+ {value: 0x01e8, lo: 0x95, hi: 0x95},
+ {value: 0x0173, lo: 0x96, hi: 0xa6},
+ {value: 0x0188, lo: 0xa7, hi: 0xa7},
+ {value: 0x01a6, lo: 0xa8, hi: 0xae},
+ {value: 0x04d7, lo: 0xaf, hi: 0xaf},
+ {value: 0x01bb, lo: 0xb0, hi: 0xbf},
+ // Block 0x93, offset 0x34a
+ {value: 0x0003, lo: 0x0d},
+ {value: 0x01eb, lo: 0x80, hi: 0x88},
+ {value: 0x04d3, lo: 0x89, hi: 0x89},
+ {value: 0x01c7, lo: 0x8a, hi: 0x8a},
+ {value: 0x01d0, lo: 0x8b, hi: 0x8b},
+ {value: 0x01d6, lo: 0x8c, hi: 0x8c},
+ {value: 0x01fa, lo: 0x8d, hi: 0x8d},
+ {value: 0x01eb, lo: 0x8e, hi: 0x8e},
+ {value: 0x01e8, lo: 0x8f, hi: 0x8f},
+ {value: 0x0173, lo: 0x90, hi: 0xa0},
+ {value: 0x0188, lo: 0xa1, hi: 0xa1},
+ {value: 0x01a6, lo: 0xa2, hi: 0xa8},
+ {value: 0x04d7, lo: 0xa9, hi: 0xa9},
+ {value: 0x01bb, lo: 0xaa, hi: 0xbf},
+ // Block 0x94, offset 0x358
+ {value: 0x0000, lo: 0x01},
+ {value: 0x812d, lo: 0x90, hi: 0x96},
+ // Block 0x95, offset 0x35a
+ {value: 0x0002, lo: 0x09},
+ {value: 0x0063, lo: 0x80, hi: 0x89},
+ {value: 0x1ae4, lo: 0x8a, hi: 0x8a},
+ {value: 0x1b14, lo: 0x8b, hi: 0x8b},
+ {value: 0x1b2f, lo: 0x8c, hi: 0x8c},
+ {value: 0x1b35, lo: 0x8d, hi: 0x8d},
+ {value: 0x1d53, lo: 0x8e, hi: 0x8e},
+ {value: 0x1b41, lo: 0x8f, hi: 0x8f},
+ {value: 0x1b0e, lo: 0xaa, hi: 0xaa},
+ {value: 0x1b11, lo: 0xab, hi: 0xab},
+ // Block 0x96, offset 0x364
+ {value: 0x0000, lo: 0x01},
+ {value: 0x1ad2, lo: 0x90, hi: 0x90},
+ // Block 0x97, offset 0x366
+ {value: 0x0028, lo: 0x09},
+ {value: 0x2a5e, lo: 0x80, hi: 0x80},
+ {value: 0x2a22, lo: 0x81, hi: 0x81},
+ {value: 0x2a2c, lo: 0x82, hi: 0x82},
+ {value: 0x2a40, lo: 0x83, hi: 0x84},
+ {value: 0x2a4a, lo: 0x85, hi: 0x86},
+ {value: 0x2a36, lo: 0x87, hi: 0x87},
+ {value: 0x2a54, lo: 0x88, hi: 0x88},
+ {value: 0x0be7, lo: 0x90, hi: 0x90},
+ {value: 0x095f, lo: 0x91, hi: 0x91},
+}
+
+// recompMap: 7520 bytes (entries only)
+var recompMap = map[uint32]rune{
+ 0x00410300: 0x00C0,
+ 0x00410301: 0x00C1,
+ 0x00410302: 0x00C2,
+ 0x00410303: 0x00C3,
+ 0x00410308: 0x00C4,
+ 0x0041030A: 0x00C5,
+ 0x00430327: 0x00C7,
+ 0x00450300: 0x00C8,
+ 0x00450301: 0x00C9,
+ 0x00450302: 0x00CA,
+ 0x00450308: 0x00CB,
+ 0x00490300: 0x00CC,
+ 0x00490301: 0x00CD,
+ 0x00490302: 0x00CE,
+ 0x00490308: 0x00CF,
+ 0x004E0303: 0x00D1,
+ 0x004F0300: 0x00D2,
+ 0x004F0301: 0x00D3,
+ 0x004F0302: 0x00D4,
+ 0x004F0303: 0x00D5,
+ 0x004F0308: 0x00D6,
+ 0x00550300: 0x00D9,
+ 0x00550301: 0x00DA,
+ 0x00550302: 0x00DB,
+ 0x00550308: 0x00DC,
+ 0x00590301: 0x00DD,
+ 0x00610300: 0x00E0,
+ 0x00610301: 0x00E1,
+ 0x00610302: 0x00E2,
+ 0x00610303: 0x00E3,
+ 0x00610308: 0x00E4,
+ 0x0061030A: 0x00E5,
+ 0x00630327: 0x00E7,
+ 0x00650300: 0x00E8,
+ 0x00650301: 0x00E9,
+ 0x00650302: 0x00EA,
+ 0x00650308: 0x00EB,
+ 0x00690300: 0x00EC,
+ 0x00690301: 0x00ED,
+ 0x00690302: 0x00EE,
+ 0x00690308: 0x00EF,
+ 0x006E0303: 0x00F1,
+ 0x006F0300: 0x00F2,
+ 0x006F0301: 0x00F3,
+ 0x006F0302: 0x00F4,
+ 0x006F0303: 0x00F5,
+ 0x006F0308: 0x00F6,
+ 0x00750300: 0x00F9,
+ 0x00750301: 0x00FA,
+ 0x00750302: 0x00FB,
+ 0x00750308: 0x00FC,
+ 0x00790301: 0x00FD,
+ 0x00790308: 0x00FF,
+ 0x00410304: 0x0100,
+ 0x00610304: 0x0101,
+ 0x00410306: 0x0102,
+ 0x00610306: 0x0103,
+ 0x00410328: 0x0104,
+ 0x00610328: 0x0105,
+ 0x00430301: 0x0106,
+ 0x00630301: 0x0107,
+ 0x00430302: 0x0108,
+ 0x00630302: 0x0109,
+ 0x00430307: 0x010A,
+ 0x00630307: 0x010B,
+ 0x0043030C: 0x010C,
+ 0x0063030C: 0x010D,
+ 0x0044030C: 0x010E,
+ 0x0064030C: 0x010F,
+ 0x00450304: 0x0112,
+ 0x00650304: 0x0113,
+ 0x00450306: 0x0114,
+ 0x00650306: 0x0115,
+ 0x00450307: 0x0116,
+ 0x00650307: 0x0117,
+ 0x00450328: 0x0118,
+ 0x00650328: 0x0119,
+ 0x0045030C: 0x011A,
+ 0x0065030C: 0x011B,
+ 0x00470302: 0x011C,
+ 0x00670302: 0x011D,
+ 0x00470306: 0x011E,
+ 0x00670306: 0x011F,
+ 0x00470307: 0x0120,
+ 0x00670307: 0x0121,
+ 0x00470327: 0x0122,
+ 0x00670327: 0x0123,
+ 0x00480302: 0x0124,
+ 0x00680302: 0x0125,
+ 0x00490303: 0x0128,
+ 0x00690303: 0x0129,
+ 0x00490304: 0x012A,
+ 0x00690304: 0x012B,
+ 0x00490306: 0x012C,
+ 0x00690306: 0x012D,
+ 0x00490328: 0x012E,
+ 0x00690328: 0x012F,
+ 0x00490307: 0x0130,
+ 0x004A0302: 0x0134,
+ 0x006A0302: 0x0135,
+ 0x004B0327: 0x0136,
+ 0x006B0327: 0x0137,
+ 0x004C0301: 0x0139,
+ 0x006C0301: 0x013A,
+ 0x004C0327: 0x013B,
+ 0x006C0327: 0x013C,
+ 0x004C030C: 0x013D,
+ 0x006C030C: 0x013E,
+ 0x004E0301: 0x0143,
+ 0x006E0301: 0x0144,
+ 0x004E0327: 0x0145,
+ 0x006E0327: 0x0146,
+ 0x004E030C: 0x0147,
+ 0x006E030C: 0x0148,
+ 0x004F0304: 0x014C,
+ 0x006F0304: 0x014D,
+ 0x004F0306: 0x014E,
+ 0x006F0306: 0x014F,
+ 0x004F030B: 0x0150,
+ 0x006F030B: 0x0151,
+ 0x00520301: 0x0154,
+ 0x00720301: 0x0155,
+ 0x00520327: 0x0156,
+ 0x00720327: 0x0157,
+ 0x0052030C: 0x0158,
+ 0x0072030C: 0x0159,
+ 0x00530301: 0x015A,
+ 0x00730301: 0x015B,
+ 0x00530302: 0x015C,
+ 0x00730302: 0x015D,
+ 0x00530327: 0x015E,
+ 0x00730327: 0x015F,
+ 0x0053030C: 0x0160,
+ 0x0073030C: 0x0161,
+ 0x00540327: 0x0162,
+ 0x00740327: 0x0163,
+ 0x0054030C: 0x0164,
+ 0x0074030C: 0x0165,
+ 0x00550303: 0x0168,
+ 0x00750303: 0x0169,
+ 0x00550304: 0x016A,
+ 0x00750304: 0x016B,
+ 0x00550306: 0x016C,
+ 0x00750306: 0x016D,
+ 0x0055030A: 0x016E,
+ 0x0075030A: 0x016F,
+ 0x0055030B: 0x0170,
+ 0x0075030B: 0x0171,
+ 0x00550328: 0x0172,
+ 0x00750328: 0x0173,
+ 0x00570302: 0x0174,
+ 0x00770302: 0x0175,
+ 0x00590302: 0x0176,
+ 0x00790302: 0x0177,
+ 0x00590308: 0x0178,
+ 0x005A0301: 0x0179,
+ 0x007A0301: 0x017A,
+ 0x005A0307: 0x017B,
+ 0x007A0307: 0x017C,
+ 0x005A030C: 0x017D,
+ 0x007A030C: 0x017E,
+ 0x004F031B: 0x01A0,
+ 0x006F031B: 0x01A1,
+ 0x0055031B: 0x01AF,
+ 0x0075031B: 0x01B0,
+ 0x0041030C: 0x01CD,
+ 0x0061030C: 0x01CE,
+ 0x0049030C: 0x01CF,
+ 0x0069030C: 0x01D0,
+ 0x004F030C: 0x01D1,
+ 0x006F030C: 0x01D2,
+ 0x0055030C: 0x01D3,
+ 0x0075030C: 0x01D4,
+ 0x00DC0304: 0x01D5,
+ 0x00FC0304: 0x01D6,
+ 0x00DC0301: 0x01D7,
+ 0x00FC0301: 0x01D8,
+ 0x00DC030C: 0x01D9,
+ 0x00FC030C: 0x01DA,
+ 0x00DC0300: 0x01DB,
+ 0x00FC0300: 0x01DC,
+ 0x00C40304: 0x01DE,
+ 0x00E40304: 0x01DF,
+ 0x02260304: 0x01E0,
+ 0x02270304: 0x01E1,
+ 0x00C60304: 0x01E2,
+ 0x00E60304: 0x01E3,
+ 0x0047030C: 0x01E6,
+ 0x0067030C: 0x01E7,
+ 0x004B030C: 0x01E8,
+ 0x006B030C: 0x01E9,
+ 0x004F0328: 0x01EA,
+ 0x006F0328: 0x01EB,
+ 0x01EA0304: 0x01EC,
+ 0x01EB0304: 0x01ED,
+ 0x01B7030C: 0x01EE,
+ 0x0292030C: 0x01EF,
+ 0x006A030C: 0x01F0,
+ 0x00470301: 0x01F4,
+ 0x00670301: 0x01F5,
+ 0x004E0300: 0x01F8,
+ 0x006E0300: 0x01F9,
+ 0x00C50301: 0x01FA,
+ 0x00E50301: 0x01FB,
+ 0x00C60301: 0x01FC,
+ 0x00E60301: 0x01FD,
+ 0x00D80301: 0x01FE,
+ 0x00F80301: 0x01FF,
+ 0x0041030F: 0x0200,
+ 0x0061030F: 0x0201,
+ 0x00410311: 0x0202,
+ 0x00610311: 0x0203,
+ 0x0045030F: 0x0204,
+ 0x0065030F: 0x0205,
+ 0x00450311: 0x0206,
+ 0x00650311: 0x0207,
+ 0x0049030F: 0x0208,
+ 0x0069030F: 0x0209,
+ 0x00490311: 0x020A,
+ 0x00690311: 0x020B,
+ 0x004F030F: 0x020C,
+ 0x006F030F: 0x020D,
+ 0x004F0311: 0x020E,
+ 0x006F0311: 0x020F,
+ 0x0052030F: 0x0210,
+ 0x0072030F: 0x0211,
+ 0x00520311: 0x0212,
+ 0x00720311: 0x0213,
+ 0x0055030F: 0x0214,
+ 0x0075030F: 0x0215,
+ 0x00550311: 0x0216,
+ 0x00750311: 0x0217,
+ 0x00530326: 0x0218,
+ 0x00730326: 0x0219,
+ 0x00540326: 0x021A,
+ 0x00740326: 0x021B,
+ 0x0048030C: 0x021E,
+ 0x0068030C: 0x021F,
+ 0x00410307: 0x0226,
+ 0x00610307: 0x0227,
+ 0x00450327: 0x0228,
+ 0x00650327: 0x0229,
+ 0x00D60304: 0x022A,
+ 0x00F60304: 0x022B,
+ 0x00D50304: 0x022C,
+ 0x00F50304: 0x022D,
+ 0x004F0307: 0x022E,
+ 0x006F0307: 0x022F,
+ 0x022E0304: 0x0230,
+ 0x022F0304: 0x0231,
+ 0x00590304: 0x0232,
+ 0x00790304: 0x0233,
+ 0x00A80301: 0x0385,
+ 0x03910301: 0x0386,
+ 0x03950301: 0x0388,
+ 0x03970301: 0x0389,
+ 0x03990301: 0x038A,
+ 0x039F0301: 0x038C,
+ 0x03A50301: 0x038E,
+ 0x03A90301: 0x038F,
+ 0x03CA0301: 0x0390,
+ 0x03990308: 0x03AA,
+ 0x03A50308: 0x03AB,
+ 0x03B10301: 0x03AC,
+ 0x03B50301: 0x03AD,
+ 0x03B70301: 0x03AE,
+ 0x03B90301: 0x03AF,
+ 0x03CB0301: 0x03B0,
+ 0x03B90308: 0x03CA,
+ 0x03C50308: 0x03CB,
+ 0x03BF0301: 0x03CC,
+ 0x03C50301: 0x03CD,
+ 0x03C90301: 0x03CE,
+ 0x03D20301: 0x03D3,
+ 0x03D20308: 0x03D4,
+ 0x04150300: 0x0400,
+ 0x04150308: 0x0401,
+ 0x04130301: 0x0403,
+ 0x04060308: 0x0407,
+ 0x041A0301: 0x040C,
+ 0x04180300: 0x040D,
+ 0x04230306: 0x040E,
+ 0x04180306: 0x0419,
+ 0x04380306: 0x0439,
+ 0x04350300: 0x0450,
+ 0x04350308: 0x0451,
+ 0x04330301: 0x0453,
+ 0x04560308: 0x0457,
+ 0x043A0301: 0x045C,
+ 0x04380300: 0x045D,
+ 0x04430306: 0x045E,
+ 0x0474030F: 0x0476,
+ 0x0475030F: 0x0477,
+ 0x04160306: 0x04C1,
+ 0x04360306: 0x04C2,
+ 0x04100306: 0x04D0,
+ 0x04300306: 0x04D1,
+ 0x04100308: 0x04D2,
+ 0x04300308: 0x04D3,
+ 0x04150306: 0x04D6,
+ 0x04350306: 0x04D7,
+ 0x04D80308: 0x04DA,
+ 0x04D90308: 0x04DB,
+ 0x04160308: 0x04DC,
+ 0x04360308: 0x04DD,
+ 0x04170308: 0x04DE,
+ 0x04370308: 0x04DF,
+ 0x04180304: 0x04E2,
+ 0x04380304: 0x04E3,
+ 0x04180308: 0x04E4,
+ 0x04380308: 0x04E5,
+ 0x041E0308: 0x04E6,
+ 0x043E0308: 0x04E7,
+ 0x04E80308: 0x04EA,
+ 0x04E90308: 0x04EB,
+ 0x042D0308: 0x04EC,
+ 0x044D0308: 0x04ED,
+ 0x04230304: 0x04EE,
+ 0x04430304: 0x04EF,
+ 0x04230308: 0x04F0,
+ 0x04430308: 0x04F1,
+ 0x0423030B: 0x04F2,
+ 0x0443030B: 0x04F3,
+ 0x04270308: 0x04F4,
+ 0x04470308: 0x04F5,
+ 0x042B0308: 0x04F8,
+ 0x044B0308: 0x04F9,
+ 0x06270653: 0x0622,
+ 0x06270654: 0x0623,
+ 0x06480654: 0x0624,
+ 0x06270655: 0x0625,
+ 0x064A0654: 0x0626,
+ 0x06D50654: 0x06C0,
+ 0x06C10654: 0x06C2,
+ 0x06D20654: 0x06D3,
+ 0x0928093C: 0x0929,
+ 0x0930093C: 0x0931,
+ 0x0933093C: 0x0934,
+ 0x09C709BE: 0x09CB,
+ 0x09C709D7: 0x09CC,
+ 0x0B470B56: 0x0B48,
+ 0x0B470B3E: 0x0B4B,
+ 0x0B470B57: 0x0B4C,
+ 0x0B920BD7: 0x0B94,
+ 0x0BC60BBE: 0x0BCA,
+ 0x0BC70BBE: 0x0BCB,
+ 0x0BC60BD7: 0x0BCC,
+ 0x0C460C56: 0x0C48,
+ 0x0CBF0CD5: 0x0CC0,
+ 0x0CC60CD5: 0x0CC7,
+ 0x0CC60CD6: 0x0CC8,
+ 0x0CC60CC2: 0x0CCA,
+ 0x0CCA0CD5: 0x0CCB,
+ 0x0D460D3E: 0x0D4A,
+ 0x0D470D3E: 0x0D4B,
+ 0x0D460D57: 0x0D4C,
+ 0x0DD90DCA: 0x0DDA,
+ 0x0DD90DCF: 0x0DDC,
+ 0x0DDC0DCA: 0x0DDD,
+ 0x0DD90DDF: 0x0DDE,
+ 0x1025102E: 0x1026,
+ 0x1B051B35: 0x1B06,
+ 0x1B071B35: 0x1B08,
+ 0x1B091B35: 0x1B0A,
+ 0x1B0B1B35: 0x1B0C,
+ 0x1B0D1B35: 0x1B0E,
+ 0x1B111B35: 0x1B12,
+ 0x1B3A1B35: 0x1B3B,
+ 0x1B3C1B35: 0x1B3D,
+ 0x1B3E1B35: 0x1B40,
+ 0x1B3F1B35: 0x1B41,
+ 0x1B421B35: 0x1B43,
+ 0x00410325: 0x1E00,
+ 0x00610325: 0x1E01,
+ 0x00420307: 0x1E02,
+ 0x00620307: 0x1E03,
+ 0x00420323: 0x1E04,
+ 0x00620323: 0x1E05,
+ 0x00420331: 0x1E06,
+ 0x00620331: 0x1E07,
+ 0x00C70301: 0x1E08,
+ 0x00E70301: 0x1E09,
+ 0x00440307: 0x1E0A,
+ 0x00640307: 0x1E0B,
+ 0x00440323: 0x1E0C,
+ 0x00640323: 0x1E0D,
+ 0x00440331: 0x1E0E,
+ 0x00640331: 0x1E0F,
+ 0x00440327: 0x1E10,
+ 0x00640327: 0x1E11,
+ 0x0044032D: 0x1E12,
+ 0x0064032D: 0x1E13,
+ 0x01120300: 0x1E14,
+ 0x01130300: 0x1E15,
+ 0x01120301: 0x1E16,
+ 0x01130301: 0x1E17,
+ 0x0045032D: 0x1E18,
+ 0x0065032D: 0x1E19,
+ 0x00450330: 0x1E1A,
+ 0x00650330: 0x1E1B,
+ 0x02280306: 0x1E1C,
+ 0x02290306: 0x1E1D,
+ 0x00460307: 0x1E1E,
+ 0x00660307: 0x1E1F,
+ 0x00470304: 0x1E20,
+ 0x00670304: 0x1E21,
+ 0x00480307: 0x1E22,
+ 0x00680307: 0x1E23,
+ 0x00480323: 0x1E24,
+ 0x00680323: 0x1E25,
+ 0x00480308: 0x1E26,
+ 0x00680308: 0x1E27,
+ 0x00480327: 0x1E28,
+ 0x00680327: 0x1E29,
+ 0x0048032E: 0x1E2A,
+ 0x0068032E: 0x1E2B,
+ 0x00490330: 0x1E2C,
+ 0x00690330: 0x1E2D,
+ 0x00CF0301: 0x1E2E,
+ 0x00EF0301: 0x1E2F,
+ 0x004B0301: 0x1E30,
+ 0x006B0301: 0x1E31,
+ 0x004B0323: 0x1E32,
+ 0x006B0323: 0x1E33,
+ 0x004B0331: 0x1E34,
+ 0x006B0331: 0x1E35,
+ 0x004C0323: 0x1E36,
+ 0x006C0323: 0x1E37,
+ 0x1E360304: 0x1E38,
+ 0x1E370304: 0x1E39,
+ 0x004C0331: 0x1E3A,
+ 0x006C0331: 0x1E3B,
+ 0x004C032D: 0x1E3C,
+ 0x006C032D: 0x1E3D,
+ 0x004D0301: 0x1E3E,
+ 0x006D0301: 0x1E3F,
+ 0x004D0307: 0x1E40,
+ 0x006D0307: 0x1E41,
+ 0x004D0323: 0x1E42,
+ 0x006D0323: 0x1E43,
+ 0x004E0307: 0x1E44,
+ 0x006E0307: 0x1E45,
+ 0x004E0323: 0x1E46,
+ 0x006E0323: 0x1E47,
+ 0x004E0331: 0x1E48,
+ 0x006E0331: 0x1E49,
+ 0x004E032D: 0x1E4A,
+ 0x006E032D: 0x1E4B,
+ 0x00D50301: 0x1E4C,
+ 0x00F50301: 0x1E4D,
+ 0x00D50308: 0x1E4E,
+ 0x00F50308: 0x1E4F,
+ 0x014C0300: 0x1E50,
+ 0x014D0300: 0x1E51,
+ 0x014C0301: 0x1E52,
+ 0x014D0301: 0x1E53,
+ 0x00500301: 0x1E54,
+ 0x00700301: 0x1E55,
+ 0x00500307: 0x1E56,
+ 0x00700307: 0x1E57,
+ 0x00520307: 0x1E58,
+ 0x00720307: 0x1E59,
+ 0x00520323: 0x1E5A,
+ 0x00720323: 0x1E5B,
+ 0x1E5A0304: 0x1E5C,
+ 0x1E5B0304: 0x1E5D,
+ 0x00520331: 0x1E5E,
+ 0x00720331: 0x1E5F,
+ 0x00530307: 0x1E60,
+ 0x00730307: 0x1E61,
+ 0x00530323: 0x1E62,
+ 0x00730323: 0x1E63,
+ 0x015A0307: 0x1E64,
+ 0x015B0307: 0x1E65,
+ 0x01600307: 0x1E66,
+ 0x01610307: 0x1E67,
+ 0x1E620307: 0x1E68,
+ 0x1E630307: 0x1E69,
+ 0x00540307: 0x1E6A,
+ 0x00740307: 0x1E6B,
+ 0x00540323: 0x1E6C,
+ 0x00740323: 0x1E6D,
+ 0x00540331: 0x1E6E,
+ 0x00740331: 0x1E6F,
+ 0x0054032D: 0x1E70,
+ 0x0074032D: 0x1E71,
+ 0x00550324: 0x1E72,
+ 0x00750324: 0x1E73,
+ 0x00550330: 0x1E74,
+ 0x00750330: 0x1E75,
+ 0x0055032D: 0x1E76,
+ 0x0075032D: 0x1E77,
+ 0x01680301: 0x1E78,
+ 0x01690301: 0x1E79,
+ 0x016A0308: 0x1E7A,
+ 0x016B0308: 0x1E7B,
+ 0x00560303: 0x1E7C,
+ 0x00760303: 0x1E7D,
+ 0x00560323: 0x1E7E,
+ 0x00760323: 0x1E7F,
+ 0x00570300: 0x1E80,
+ 0x00770300: 0x1E81,
+ 0x00570301: 0x1E82,
+ 0x00770301: 0x1E83,
+ 0x00570308: 0x1E84,
+ 0x00770308: 0x1E85,
+ 0x00570307: 0x1E86,
+ 0x00770307: 0x1E87,
+ 0x00570323: 0x1E88,
+ 0x00770323: 0x1E89,
+ 0x00580307: 0x1E8A,
+ 0x00780307: 0x1E8B,
+ 0x00580308: 0x1E8C,
+ 0x00780308: 0x1E8D,
+ 0x00590307: 0x1E8E,
+ 0x00790307: 0x1E8F,
+ 0x005A0302: 0x1E90,
+ 0x007A0302: 0x1E91,
+ 0x005A0323: 0x1E92,
+ 0x007A0323: 0x1E93,
+ 0x005A0331: 0x1E94,
+ 0x007A0331: 0x1E95,
+ 0x00680331: 0x1E96,
+ 0x00740308: 0x1E97,
+ 0x0077030A: 0x1E98,
+ 0x0079030A: 0x1E99,
+ 0x017F0307: 0x1E9B,
+ 0x00410323: 0x1EA0,
+ 0x00610323: 0x1EA1,
+ 0x00410309: 0x1EA2,
+ 0x00610309: 0x1EA3,
+ 0x00C20301: 0x1EA4,
+ 0x00E20301: 0x1EA5,
+ 0x00C20300: 0x1EA6,
+ 0x00E20300: 0x1EA7,
+ 0x00C20309: 0x1EA8,
+ 0x00E20309: 0x1EA9,
+ 0x00C20303: 0x1EAA,
+ 0x00E20303: 0x1EAB,
+ 0x1EA00302: 0x1EAC,
+ 0x1EA10302: 0x1EAD,
+ 0x01020301: 0x1EAE,
+ 0x01030301: 0x1EAF,
+ 0x01020300: 0x1EB0,
+ 0x01030300: 0x1EB1,
+ 0x01020309: 0x1EB2,
+ 0x01030309: 0x1EB3,
+ 0x01020303: 0x1EB4,
+ 0x01030303: 0x1EB5,
+ 0x1EA00306: 0x1EB6,
+ 0x1EA10306: 0x1EB7,
+ 0x00450323: 0x1EB8,
+ 0x00650323: 0x1EB9,
+ 0x00450309: 0x1EBA,
+ 0x00650309: 0x1EBB,
+ 0x00450303: 0x1EBC,
+ 0x00650303: 0x1EBD,
+ 0x00CA0301: 0x1EBE,
+ 0x00EA0301: 0x1EBF,
+ 0x00CA0300: 0x1EC0,
+ 0x00EA0300: 0x1EC1,
+ 0x00CA0309: 0x1EC2,
+ 0x00EA0309: 0x1EC3,
+ 0x00CA0303: 0x1EC4,
+ 0x00EA0303: 0x1EC5,
+ 0x1EB80302: 0x1EC6,
+ 0x1EB90302: 0x1EC7,
+ 0x00490309: 0x1EC8,
+ 0x00690309: 0x1EC9,
+ 0x00490323: 0x1ECA,
+ 0x00690323: 0x1ECB,
+ 0x004F0323: 0x1ECC,
+ 0x006F0323: 0x1ECD,
+ 0x004F0309: 0x1ECE,
+ 0x006F0309: 0x1ECF,
+ 0x00D40301: 0x1ED0,
+ 0x00F40301: 0x1ED1,
+ 0x00D40300: 0x1ED2,
+ 0x00F40300: 0x1ED3,
+ 0x00D40309: 0x1ED4,
+ 0x00F40309: 0x1ED5,
+ 0x00D40303: 0x1ED6,
+ 0x00F40303: 0x1ED7,
+ 0x1ECC0302: 0x1ED8,
+ 0x1ECD0302: 0x1ED9,
+ 0x01A00301: 0x1EDA,
+ 0x01A10301: 0x1EDB,
+ 0x01A00300: 0x1EDC,
+ 0x01A10300: 0x1EDD,
+ 0x01A00309: 0x1EDE,
+ 0x01A10309: 0x1EDF,
+ 0x01A00303: 0x1EE0,
+ 0x01A10303: 0x1EE1,
+ 0x01A00323: 0x1EE2,
+ 0x01A10323: 0x1EE3,
+ 0x00550323: 0x1EE4,
+ 0x00750323: 0x1EE5,
+ 0x00550309: 0x1EE6,
+ 0x00750309: 0x1EE7,
+ 0x01AF0301: 0x1EE8,
+ 0x01B00301: 0x1EE9,
+ 0x01AF0300: 0x1EEA,
+ 0x01B00300: 0x1EEB,
+ 0x01AF0309: 0x1EEC,
+ 0x01B00309: 0x1EED,
+ 0x01AF0303: 0x1EEE,
+ 0x01B00303: 0x1EEF,
+ 0x01AF0323: 0x1EF0,
+ 0x01B00323: 0x1EF1,
+ 0x00590300: 0x1EF2,
+ 0x00790300: 0x1EF3,
+ 0x00590323: 0x1EF4,
+ 0x00790323: 0x1EF5,
+ 0x00590309: 0x1EF6,
+ 0x00790309: 0x1EF7,
+ 0x00590303: 0x1EF8,
+ 0x00790303: 0x1EF9,
+ 0x03B10313: 0x1F00,
+ 0x03B10314: 0x1F01,
+ 0x1F000300: 0x1F02,
+ 0x1F010300: 0x1F03,
+ 0x1F000301: 0x1F04,
+ 0x1F010301: 0x1F05,
+ 0x1F000342: 0x1F06,
+ 0x1F010342: 0x1F07,
+ 0x03910313: 0x1F08,
+ 0x03910314: 0x1F09,
+ 0x1F080300: 0x1F0A,
+ 0x1F090300: 0x1F0B,
+ 0x1F080301: 0x1F0C,
+ 0x1F090301: 0x1F0D,
+ 0x1F080342: 0x1F0E,
+ 0x1F090342: 0x1F0F,
+ 0x03B50313: 0x1F10,
+ 0x03B50314: 0x1F11,
+ 0x1F100300: 0x1F12,
+ 0x1F110300: 0x1F13,
+ 0x1F100301: 0x1F14,
+ 0x1F110301: 0x1F15,
+ 0x03950313: 0x1F18,
+ 0x03950314: 0x1F19,
+ 0x1F180300: 0x1F1A,
+ 0x1F190300: 0x1F1B,
+ 0x1F180301: 0x1F1C,
+ 0x1F190301: 0x1F1D,
+ 0x03B70313: 0x1F20,
+ 0x03B70314: 0x1F21,
+ 0x1F200300: 0x1F22,
+ 0x1F210300: 0x1F23,
+ 0x1F200301: 0x1F24,
+ 0x1F210301: 0x1F25,
+ 0x1F200342: 0x1F26,
+ 0x1F210342: 0x1F27,
+ 0x03970313: 0x1F28,
+ 0x03970314: 0x1F29,
+ 0x1F280300: 0x1F2A,
+ 0x1F290300: 0x1F2B,
+ 0x1F280301: 0x1F2C,
+ 0x1F290301: 0x1F2D,
+ 0x1F280342: 0x1F2E,
+ 0x1F290342: 0x1F2F,
+ 0x03B90313: 0x1F30,
+ 0x03B90314: 0x1F31,
+ 0x1F300300: 0x1F32,
+ 0x1F310300: 0x1F33,
+ 0x1F300301: 0x1F34,
+ 0x1F310301: 0x1F35,
+ 0x1F300342: 0x1F36,
+ 0x1F310342: 0x1F37,
+ 0x03990313: 0x1F38,
+ 0x03990314: 0x1F39,
+ 0x1F380300: 0x1F3A,
+ 0x1F390300: 0x1F3B,
+ 0x1F380301: 0x1F3C,
+ 0x1F390301: 0x1F3D,
+ 0x1F380342: 0x1F3E,
+ 0x1F390342: 0x1F3F,
+ 0x03BF0313: 0x1F40,
+ 0x03BF0314: 0x1F41,
+ 0x1F400300: 0x1F42,
+ 0x1F410300: 0x1F43,
+ 0x1F400301: 0x1F44,
+ 0x1F410301: 0x1F45,
+ 0x039F0313: 0x1F48,
+ 0x039F0314: 0x1F49,
+ 0x1F480300: 0x1F4A,
+ 0x1F490300: 0x1F4B,
+ 0x1F480301: 0x1F4C,
+ 0x1F490301: 0x1F4D,
+ 0x03C50313: 0x1F50,
+ 0x03C50314: 0x1F51,
+ 0x1F500300: 0x1F52,
+ 0x1F510300: 0x1F53,
+ 0x1F500301: 0x1F54,
+ 0x1F510301: 0x1F55,
+ 0x1F500342: 0x1F56,
+ 0x1F510342: 0x1F57,
+ 0x03A50314: 0x1F59,
+ 0x1F590300: 0x1F5B,
+ 0x1F590301: 0x1F5D,
+ 0x1F590342: 0x1F5F,
+ 0x03C90313: 0x1F60,
+ 0x03C90314: 0x1F61,
+ 0x1F600300: 0x1F62,
+ 0x1F610300: 0x1F63,
+ 0x1F600301: 0x1F64,
+ 0x1F610301: 0x1F65,
+ 0x1F600342: 0x1F66,
+ 0x1F610342: 0x1F67,
+ 0x03A90313: 0x1F68,
+ 0x03A90314: 0x1F69,
+ 0x1F680300: 0x1F6A,
+ 0x1F690300: 0x1F6B,
+ 0x1F680301: 0x1F6C,
+ 0x1F690301: 0x1F6D,
+ 0x1F680342: 0x1F6E,
+ 0x1F690342: 0x1F6F,
+ 0x03B10300: 0x1F70,
+ 0x03B50300: 0x1F72,
+ 0x03B70300: 0x1F74,
+ 0x03B90300: 0x1F76,
+ 0x03BF0300: 0x1F78,
+ 0x03C50300: 0x1F7A,
+ 0x03C90300: 0x1F7C,
+ 0x1F000345: 0x1F80,
+ 0x1F010345: 0x1F81,
+ 0x1F020345: 0x1F82,
+ 0x1F030345: 0x1F83,
+ 0x1F040345: 0x1F84,
+ 0x1F050345: 0x1F85,
+ 0x1F060345: 0x1F86,
+ 0x1F070345: 0x1F87,
+ 0x1F080345: 0x1F88,
+ 0x1F090345: 0x1F89,
+ 0x1F0A0345: 0x1F8A,
+ 0x1F0B0345: 0x1F8B,
+ 0x1F0C0345: 0x1F8C,
+ 0x1F0D0345: 0x1F8D,
+ 0x1F0E0345: 0x1F8E,
+ 0x1F0F0345: 0x1F8F,
+ 0x1F200345: 0x1F90,
+ 0x1F210345: 0x1F91,
+ 0x1F220345: 0x1F92,
+ 0x1F230345: 0x1F93,
+ 0x1F240345: 0x1F94,
+ 0x1F250345: 0x1F95,
+ 0x1F260345: 0x1F96,
+ 0x1F270345: 0x1F97,
+ 0x1F280345: 0x1F98,
+ 0x1F290345: 0x1F99,
+ 0x1F2A0345: 0x1F9A,
+ 0x1F2B0345: 0x1F9B,
+ 0x1F2C0345: 0x1F9C,
+ 0x1F2D0345: 0x1F9D,
+ 0x1F2E0345: 0x1F9E,
+ 0x1F2F0345: 0x1F9F,
+ 0x1F600345: 0x1FA0,
+ 0x1F610345: 0x1FA1,
+ 0x1F620345: 0x1FA2,
+ 0x1F630345: 0x1FA3,
+ 0x1F640345: 0x1FA4,
+ 0x1F650345: 0x1FA5,
+ 0x1F660345: 0x1FA6,
+ 0x1F670345: 0x1FA7,
+ 0x1F680345: 0x1FA8,
+ 0x1F690345: 0x1FA9,
+ 0x1F6A0345: 0x1FAA,
+ 0x1F6B0345: 0x1FAB,
+ 0x1F6C0345: 0x1FAC,
+ 0x1F6D0345: 0x1FAD,
+ 0x1F6E0345: 0x1FAE,
+ 0x1F6F0345: 0x1FAF,
+ 0x03B10306: 0x1FB0,
+ 0x03B10304: 0x1FB1,
+ 0x1F700345: 0x1FB2,
+ 0x03B10345: 0x1FB3,
+ 0x03AC0345: 0x1FB4,
+ 0x03B10342: 0x1FB6,
+ 0x1FB60345: 0x1FB7,
+ 0x03910306: 0x1FB8,
+ 0x03910304: 0x1FB9,
+ 0x03910300: 0x1FBA,
+ 0x03910345: 0x1FBC,
+ 0x00A80342: 0x1FC1,
+ 0x1F740345: 0x1FC2,
+ 0x03B70345: 0x1FC3,
+ 0x03AE0345: 0x1FC4,
+ 0x03B70342: 0x1FC6,
+ 0x1FC60345: 0x1FC7,
+ 0x03950300: 0x1FC8,
+ 0x03970300: 0x1FCA,
+ 0x03970345: 0x1FCC,
+ 0x1FBF0300: 0x1FCD,
+ 0x1FBF0301: 0x1FCE,
+ 0x1FBF0342: 0x1FCF,
+ 0x03B90306: 0x1FD0,
+ 0x03B90304: 0x1FD1,
+ 0x03CA0300: 0x1FD2,
+ 0x03B90342: 0x1FD6,
+ 0x03CA0342: 0x1FD7,
+ 0x03990306: 0x1FD8,
+ 0x03990304: 0x1FD9,
+ 0x03990300: 0x1FDA,
+ 0x1FFE0300: 0x1FDD,
+ 0x1FFE0301: 0x1FDE,
+ 0x1FFE0342: 0x1FDF,
+ 0x03C50306: 0x1FE0,
+ 0x03C50304: 0x1FE1,
+ 0x03CB0300: 0x1FE2,
+ 0x03C10313: 0x1FE4,
+ 0x03C10314: 0x1FE5,
+ 0x03C50342: 0x1FE6,
+ 0x03CB0342: 0x1FE7,
+ 0x03A50306: 0x1FE8,
+ 0x03A50304: 0x1FE9,
+ 0x03A50300: 0x1FEA,
+ 0x03A10314: 0x1FEC,
+ 0x00A80300: 0x1FED,
+ 0x1F7C0345: 0x1FF2,
+ 0x03C90345: 0x1FF3,
+ 0x03CE0345: 0x1FF4,
+ 0x03C90342: 0x1FF6,
+ 0x1FF60345: 0x1FF7,
+ 0x039F0300: 0x1FF8,
+ 0x03A90300: 0x1FFA,
+ 0x03A90345: 0x1FFC,
+ 0x21900338: 0x219A,
+ 0x21920338: 0x219B,
+ 0x21940338: 0x21AE,
+ 0x21D00338: 0x21CD,
+ 0x21D40338: 0x21CE,
+ 0x21D20338: 0x21CF,
+ 0x22030338: 0x2204,
+ 0x22080338: 0x2209,
+ 0x220B0338: 0x220C,
+ 0x22230338: 0x2224,
+ 0x22250338: 0x2226,
+ 0x223C0338: 0x2241,
+ 0x22430338: 0x2244,
+ 0x22450338: 0x2247,
+ 0x22480338: 0x2249,
+ 0x003D0338: 0x2260,
+ 0x22610338: 0x2262,
+ 0x224D0338: 0x226D,
+ 0x003C0338: 0x226E,
+ 0x003E0338: 0x226F,
+ 0x22640338: 0x2270,
+ 0x22650338: 0x2271,
+ 0x22720338: 0x2274,
+ 0x22730338: 0x2275,
+ 0x22760338: 0x2278,
+ 0x22770338: 0x2279,
+ 0x227A0338: 0x2280,
+ 0x227B0338: 0x2281,
+ 0x22820338: 0x2284,
+ 0x22830338: 0x2285,
+ 0x22860338: 0x2288,
+ 0x22870338: 0x2289,
+ 0x22A20338: 0x22AC,
+ 0x22A80338: 0x22AD,
+ 0x22A90338: 0x22AE,
+ 0x22AB0338: 0x22AF,
+ 0x227C0338: 0x22E0,
+ 0x227D0338: 0x22E1,
+ 0x22910338: 0x22E2,
+ 0x22920338: 0x22E3,
+ 0x22B20338: 0x22EA,
+ 0x22B30338: 0x22EB,
+ 0x22B40338: 0x22EC,
+ 0x22B50338: 0x22ED,
+ 0x304B3099: 0x304C,
+ 0x304D3099: 0x304E,
+ 0x304F3099: 0x3050,
+ 0x30513099: 0x3052,
+ 0x30533099: 0x3054,
+ 0x30553099: 0x3056,
+ 0x30573099: 0x3058,
+ 0x30593099: 0x305A,
+ 0x305B3099: 0x305C,
+ 0x305D3099: 0x305E,
+ 0x305F3099: 0x3060,
+ 0x30613099: 0x3062,
+ 0x30643099: 0x3065,
+ 0x30663099: 0x3067,
+ 0x30683099: 0x3069,
+ 0x306F3099: 0x3070,
+ 0x306F309A: 0x3071,
+ 0x30723099: 0x3073,
+ 0x3072309A: 0x3074,
+ 0x30753099: 0x3076,
+ 0x3075309A: 0x3077,
+ 0x30783099: 0x3079,
+ 0x3078309A: 0x307A,
+ 0x307B3099: 0x307C,
+ 0x307B309A: 0x307D,
+ 0x30463099: 0x3094,
+ 0x309D3099: 0x309E,
+ 0x30AB3099: 0x30AC,
+ 0x30AD3099: 0x30AE,
+ 0x30AF3099: 0x30B0,
+ 0x30B13099: 0x30B2,
+ 0x30B33099: 0x30B4,
+ 0x30B53099: 0x30B6,
+ 0x30B73099: 0x30B8,
+ 0x30B93099: 0x30BA,
+ 0x30BB3099: 0x30BC,
+ 0x30BD3099: 0x30BE,
+ 0x30BF3099: 0x30C0,
+ 0x30C13099: 0x30C2,
+ 0x30C43099: 0x30C5,
+ 0x30C63099: 0x30C7,
+ 0x30C83099: 0x30C9,
+ 0x30CF3099: 0x30D0,
+ 0x30CF309A: 0x30D1,
+ 0x30D23099: 0x30D3,
+ 0x30D2309A: 0x30D4,
+ 0x30D53099: 0x30D6,
+ 0x30D5309A: 0x30D7,
+ 0x30D83099: 0x30D9,
+ 0x30D8309A: 0x30DA,
+ 0x30DB3099: 0x30DC,
+ 0x30DB309A: 0x30DD,
+ 0x30A63099: 0x30F4,
+ 0x30EF3099: 0x30F7,
+ 0x30F03099: 0x30F8,
+ 0x30F13099: 0x30F9,
+ 0x30F23099: 0x30FA,
+ 0x30FD3099: 0x30FE,
+ 0x109910BA: 0x1109A,
+ 0x109B10BA: 0x1109C,
+ 0x10A510BA: 0x110AB,
+ 0x11311127: 0x1112E,
+ 0x11321127: 0x1112F,
+ 0x1347133E: 0x1134B,
+ 0x13471357: 0x1134C,
+ 0x14B914BA: 0x114BB,
+ 0x14B914B0: 0x114BC,
+ 0x14B914BD: 0x114BE,
+ 0x15B815AF: 0x115BA,
+ 0x15B915AF: 0x115BB,
+}
+
+// Total size of tables: 52KB (53487 bytes)
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/transform.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/transform.go
new file mode 100644
index 0000000..2b70bbf
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/transform.go
@@ -0,0 +1,88 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package norm
+
+import (
+ "unicode/utf8"
+
+ "code.google.com/p/go.text/transform"
+)
+
+// Reset implements the Reset method of the transform.Transformer interface.
+func (Form) Reset() {}
+
+// Transform implements the Transform method of the transform.Transformer
+// interface. It may need to write segments of up to MaxSegmentSize at once.
+// Users should either catch ErrShortDst and allow dst to grow or have dst be at
+// least of size MaxTransformChunkSize to be guaranteed of progress.
+func (f Form) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ n := 0
+ // Cap the maximum number of src bytes to check.
+ b := src
+ eof := atEOF
+ if ns := len(dst); ns < len(b) {
+ err = transform.ErrShortDst
+ eof = false
+ b = b[:ns]
+ }
+ i, ok := formTable[f].quickSpan(inputBytes(b), n, len(b), eof)
+ n += copy(dst[n:], b[n:i])
+ if !ok {
+ nDst, nSrc, err = f.transform(dst[n:], src[n:], atEOF)
+ return nDst + n, nSrc + n, err
+ }
+ if n < len(src) && !atEOF {
+ err = transform.ErrShortSrc
+ }
+ return n, n, err
+}
+
+func flushTransform(rb *reorderBuffer) bool {
+ // Write out (must fully fit in dst, or else it is a ErrShortDst).
+ if len(rb.out) < rb.nrune*utf8.UTFMax {
+ return false
+ }
+ rb.out = rb.out[rb.flushCopy(rb.out):]
+ return true
+}
+
+var errs = []error{nil, transform.ErrShortDst, transform.ErrShortSrc}
+
+// transform implements the transform.Transformer interface. It is only called
+// when quickSpan does not pass for a given string.
+func (f Form) transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ // TODO: get rid of reorderBuffer. See CL 23460044.
+ rb := reorderBuffer{}
+ rb.init(f, src)
+ for {
+ // Load segment into reorder buffer.
+ rb.setFlusher(dst[nDst:], flushTransform)
+ end := decomposeSegment(&rb, nSrc, atEOF)
+ if end < 0 {
+ return nDst, nSrc, errs[-end]
+ }
+ nDst = len(dst) - len(rb.out)
+ nSrc = end
+
+ // Next quickSpan.
+ end = rb.nsrc
+ eof := atEOF
+ if n := nSrc + len(dst) - nDst; n < end {
+ err = transform.ErrShortDst
+ end = n
+ eof = false
+ }
+ end, ok := rb.f.quickSpan(rb.src, nSrc, end, eof)
+ n := copy(dst[nDst:], rb.src.bytes[nSrc:end])
+ nSrc += n
+ nDst += n
+ if ok {
+ if n < rb.nsrc && !atEOF {
+ err = transform.ErrShortSrc
+ }
+ return nDst, nSrc, err
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/trie.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/trie.go
new file mode 100644
index 0000000..423386b
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/trie.go
@@ -0,0 +1,54 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package norm
+
+type valueRange struct {
+ value uint16 // header: value:stride
+ lo, hi byte // header: lo:n
+}
+
+type sparseBlocks struct {
+ values []valueRange
+ offset []uint16
+}
+
+var nfcSparse = sparseBlocks{
+ values: nfcSparseValues[:],
+ offset: nfcSparseOffset[:],
+}
+
+var nfkcSparse = sparseBlocks{
+ values: nfkcSparseValues[:],
+ offset: nfkcSparseOffset[:],
+}
+
+var (
+ nfcData = newNfcTrie(0)
+ nfkcData = newNfkcTrie(0)
+)
+
+// lookupValue determines the type of block n and looks up the value for b.
+// For n < t.cutoff, the block is a simple lookup table. Otherwise, the block
+// is a list of ranges with an accompanying value. Given a matching range r,
+// the value for b is by r.value + (b - r.lo) * stride.
+func (t *sparseBlocks) lookup(n uint32, b byte) uint16 {
+ offset := t.offset[n]
+ header := t.values[offset]
+ lo := offset + 1
+ hi := lo + uint16(header.lo)
+ for lo < hi {
+ m := lo + (hi-lo)/2
+ r := t.values[m]
+ if r.lo <= b && b <= r.hi {
+ return r.value + uint16(b-r.lo)*header.value
+ }
+ if b < r.lo {
+ hi = m
+ } else {
+ lo = m + 1
+ }
+ }
+ return 0
+}
diff --git a/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/triegen.go b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/triegen.go
new file mode 100644
index 0000000..45d7119
--- /dev/null
+++ b/Godeps/_workspace/src/code.google.com/p/go.text/unicode/norm/triegen.go
@@ -0,0 +1,117 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+// Trie table generator.
+// Used by make*tables tools to generate a go file with trie data structures
+// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte
+// sequence are used to lookup offsets in the index table to be used for the
+// next byte. The last byte is used to index into a table with 16-bit values.
+
+package main
+
+import (
+ "fmt"
+ "io"
+)
+
+const maxSparseEntries = 16
+
+type normCompacter struct {
+ sparseBlocks [][]uint64
+ sparseOffset []uint16
+ sparseCount int
+ name string
+}
+
+func mostFrequentStride(a []uint64) int {
+ counts := make(map[int]int)
+ var v int
+ for _, x := range a {
+ if stride := int(x) - v; v != 0 && stride >= 0 {
+ counts[stride]++
+ }
+ v = int(x)
+ }
+ var maxs, maxc int
+ for stride, cnt := range counts {
+ if cnt > maxc || (cnt == maxc && stride < maxs) {
+ maxs, maxc = stride, cnt
+ }
+ }
+ return maxs
+}
+
+func countSparseEntries(a []uint64) int {
+ stride := mostFrequentStride(a)
+ var v, count int
+ for _, tv := range a {
+ if int(tv)-v != stride {
+ if tv != 0 {
+ count++
+ }
+ }
+ v = int(tv)
+ }
+ return count
+}
+
+func (c *normCompacter) Size(v []uint64) (sz int, ok bool) {
+ if n := countSparseEntries(v); n <= maxSparseEntries {
+ return (n+1)*4 + 2, true
+ }
+ return 0, false
+}
+
+func (c *normCompacter) Store(v []uint64) uint32 {
+ h := uint32(len(c.sparseOffset))
+ c.sparseBlocks = append(c.sparseBlocks, v)
+ c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount))
+ c.sparseCount += countSparseEntries(v) + 1
+ return h
+}
+
+func (c *normCompacter) Handler() string {
+ return c.name + "Sparse.lookup"
+}
+
+func (c *normCompacter) Print(w io.Writer) (retErr error) {
+ p := func(f string, x ...interface{}) {
+ if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil {
+ retErr = err
+ }
+ }
+
+ ls := len(c.sparseBlocks)
+ p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2)
+ p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset)
+
+ ns := c.sparseCount
+ p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4)
+ p("var %sSparseValues = [%d]valueRange {", c.name, ns)
+ for i, b := range c.sparseBlocks {
+ p("\n// Block %#x, offset %#x", i, c.sparseOffset[i])
+ var v int
+ stride := mostFrequentStride(b)
+ n := countSparseEntries(b)
+ p("\n{value:%#04x,lo:%#02x},", stride, uint8(n))
+ for i, nv := range b {
+ if int(nv)-v != stride {
+ if v != 0 {
+ p(",hi:%#02x},", 0x80+i-1)
+ }
+ if nv != 0 {
+ p("\n{value:%#04x,lo:%#02x", nv, 0x80+i)
+ }
+ }
+ v = int(nv)
+ }
+ if v != 0 {
+ p(",hi:%#02x},", 0x80+len(b)-1)
+ }
+ }
+ p("\n}\n\n")
+ return
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/error.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/error.go
new file mode 100644
index 0000000..99d8c18
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/error.go
@@ -0,0 +1,105 @@
+// Package awserr represents API error interface accessors for the SDK.
+package awserr
+
+// An Error wraps lower level errors with code, message and an original error.
+// The underlying concrete error type may also satisfy other interfaces which
+// can be to used to obtain more specific information about the error.
+//
+// Calling Error() or String() will always include the full information about
+// an error based on its underlying type.
+//
+// Example:
+//
+// output, err := s3manage.Upload(svc, input, opts)
+// if err != nil {
+// if awsErr, ok := err.(awserr.Error); ok {
+// // Get error details
+// log.Println("Error:", err.Code(), err.Message())
+//
+// Prints out full error message, including original error if there was one.
+// log.Println("Error:", err.Error())
+//
+// // Get original error
+// if origErr := err.Err(); origErr != nil {
+// // operate on original error.
+// }
+// } else {
+// fmt.Println(err.Error())
+// }
+// }
+//
+type Error interface {
+ // Satisfy the generic error interface.
+ error
+
+ // Returns the short phrase depicting the classification of the error.
+ Code() string
+
+ // Returns the error details message.
+ Message() string
+
+ // Returns the original error if one was set. Nil is returned if not set.
+ OrigErr() error
+}
+
+// New returns an Error object described by the code, message, and origErr.
+//
+// If origErr satisfies the Error interface it will not be wrapped within a new
+// Error object and will instead be returned.
+func New(code, message string, origErr error) Error {
+ if e, ok := origErr.(Error); ok && e != nil {
+ return e
+ }
+ return newBaseError(code, message, origErr)
+}
+
+// A RequestFailure is an interface to extract request failure information from
+// an Error such as the request ID of the failed request returned by a service.
+// RequestFailures may not always have a requestID value if the request failed
+// prior to reaching the service such as a connection error.
+//
+// Example:
+//
+// output, err := s3manage.Upload(svc, input, opts)
+// if err != nil {
+// if reqerr, ok := err.(RequestFailure); ok {
+// log.Printf("Request failed", reqerr.Code(), reqerr.Message(), reqerr.RequestID())
+// } else {
+// log.Printf("Error:", err.Error()
+// }
+// }
+//
+// Combined with awserr.Error:
+//
+// output, err := s3manage.Upload(svc, input, opts)
+// if err != nil {
+// if awsErr, ok := err.(awserr.Error); ok {
+// // Generic AWS Error with Code, Message, and original error (if any)
+// fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+//
+// if reqErr, ok := err.(awserr.RequestFailure); ok {
+// // A service error occurred
+// fmt.Println(reqErr.StatusCode(), reqErr.RequestID())
+// }
+// } else {
+// fmt.Println(err.Error())
+// }
+// }
+//
+type RequestFailure interface {
+ Error
+
+ // The status code of the HTTP response.
+ StatusCode() int
+
+ // The request ID returned by the service for a request failure. This will
+ // be empty if no request ID is available such as the request failed due
+ // to a connection error.
+ RequestID() string
+}
+
+// NewRequestFailure returns a new request error wrapper for the given Error
+// provided.
+func NewRequestFailure(err Error, statusCode int, reqID string) RequestFailure {
+ return newRequestError(err, statusCode, reqID)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/types.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/types.go
new file mode 100644
index 0000000..418fc4c
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/types.go
@@ -0,0 +1,135 @@
+package awserr
+
+import "fmt"
+
+// SprintError returns a string of the formatted error code.
+//
+// Both extra and origErr are optional. If they are included their lines
+// will be added, but if they are not included their lines will be ignored.
+func SprintError(code, message, extra string, origErr error) string {
+ msg := fmt.Sprintf("%s: %s", code, message)
+ if extra != "" {
+ msg = fmt.Sprintf("%s\n\t%s", msg, extra)
+ }
+ if origErr != nil {
+ msg = fmt.Sprintf("%s\ncaused by: %s", msg, origErr.Error())
+ }
+ return msg
+}
+
+// A baseError wraps the code and message which defines an error. It also
+// can be used to wrap an original error object.
+//
+// Should be used as the root for errors satisfying the awserr.Error. Also
+// for any error which does not fit into a specific error wrapper type.
+type baseError struct {
+ // Classification of error
+ code string
+
+ // Detailed information about error
+ message string
+
+ // Optional original error this error is based off of. Allows building
+ // chained errors.
+ origErr error
+}
+
+// newBaseError returns an error object for the code, message, and err.
+//
+// code is a short no whitespace phrase depicting the classification of
+// the error that is being created.
+//
+// message is the free flow string containing detailed information about the error.
+//
+// origErr is the error object which will be nested under the new error to be returned.
+func newBaseError(code, message string, origErr error) *baseError {
+ return &baseError{
+ code: code,
+ message: message,
+ origErr: origErr,
+ }
+}
+
+// Error returns the string representation of the error.
+//
+// See ErrorWithExtra for formatting.
+//
+// Satisfies the error interface.
+func (b baseError) Error() string {
+ return SprintError(b.code, b.message, "", b.origErr)
+}
+
+// String returns the string representation of the error.
+// Alias for Error to satisfy the stringer interface.
+func (b baseError) String() string {
+ return b.Error()
+}
+
+// Code returns the short phrase depicting the classification of the error.
+func (b baseError) Code() string {
+ return b.code
+}
+
+// Message returns the error details message.
+func (b baseError) Message() string {
+ return b.message
+}
+
+// OrigErr returns the original error if one was set. Nil is returned if no error
+// was set.
+func (b baseError) OrigErr() error {
+ return b.origErr
+}
+
+// So that the Error interface type can be included as an anonymous field
+// in the requestError struct and not conflict with the error.Error() method.
+type awsError Error
+
+// A requestError wraps a request or service error.
+//
+// Composed of baseError for code, message, and original error.
+type requestError struct {
+ awsError
+ statusCode int
+ requestID string
+}
+
+// newRequestError returns a wrapped error with additional information for request
+// status code, and service requestID.
+//
+// Should be used to wrap all request which involve service requests. Even if
+// the request failed without a service response, but had an HTTP status code
+// that may be meaningful.
+//
+// Also wraps original errors via the baseError.
+func newRequestError(err Error, statusCode int, requestID string) *requestError {
+ return &requestError{
+ awsError: err,
+ statusCode: statusCode,
+ requestID: requestID,
+ }
+}
+
+// Error returns the string representation of the error.
+// Satisfies the error interface.
+func (r requestError) Error() string {
+ extra := fmt.Sprintf("status code: %d, request id: [%s]",
+ r.statusCode, r.requestID)
+ return SprintError(r.Code(), r.Message(), extra, r.OrigErr())
+}
+
+// String returns the string representation of the error.
+// Alias for Error to satisfy the stringer interface.
+func (r requestError) String() string {
+ return r.Error()
+}
+
+// StatusCode returns the wrapped status code for the error
+func (r requestError) StatusCode() int {
+ return r.statusCode
+}
+
+// RequestID returns the wrapped requestID
+func (r requestError) RequestID() string {
+ return r.requestID
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy.go
new file mode 100644
index 0000000..99a1156
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy.go
@@ -0,0 +1,95 @@
+package awsutil
+
+import (
+ "io"
+ "reflect"
+)
+
+// Copy deeply copies a src structure to dst. Useful for copying request and
+// response structures.
+//
+// Can copy between structs of different type, but will only copy fields which
+// are assignable, and exist in both structs. Fields which are not assignable,
+// or do not exist in both structs are ignored.
+func Copy(dst, src interface{}) {
+ dstval := reflect.ValueOf(dst)
+ if !dstval.IsValid() {
+ panic("Copy dst cannot be nil")
+ }
+
+ rcopy(dstval, reflect.ValueOf(src), true)
+}
+
+// CopyOf returns a copy of src while also allocating the memory for dst.
+// src must be a pointer type or this operation will fail.
+func CopyOf(src interface{}) (dst interface{}) {
+ dsti := reflect.New(reflect.TypeOf(src).Elem())
+ dst = dsti.Interface()
+ rcopy(dsti, reflect.ValueOf(src), true)
+ return
+}
+
+// rcopy performs a recursive copy of values from the source to destination.
+//
+// root is used to skip certain aspects of the copy which are not valid
+// for the root node of a object.
+func rcopy(dst, src reflect.Value, root bool) {
+ if !src.IsValid() {
+ return
+ }
+
+ switch src.Kind() {
+ case reflect.Ptr:
+ if _, ok := src.Interface().(io.Reader); ok {
+ if dst.Kind() == reflect.Ptr && dst.Elem().CanSet() {
+ dst.Elem().Set(src)
+ } else if dst.CanSet() {
+ dst.Set(src)
+ }
+ } else {
+ e := src.Type().Elem()
+ if dst.CanSet() && !src.IsNil() {
+ dst.Set(reflect.New(e))
+ }
+ if src.Elem().IsValid() {
+ // Keep the current root state since the depth hasn't changed
+ rcopy(dst.Elem(), src.Elem(), root)
+ }
+ }
+ case reflect.Struct:
+ if !root {
+ dst.Set(reflect.New(src.Type()).Elem())
+ }
+
+ t := dst.Type()
+ for i := 0; i < t.NumField(); i++ {
+ name := t.Field(i).Name
+ srcval := src.FieldByName(name)
+ if srcval.IsValid() {
+ rcopy(dst.FieldByName(name), srcval, false)
+ }
+ }
+ case reflect.Slice:
+ s := reflect.MakeSlice(src.Type(), src.Len(), src.Cap())
+ dst.Set(s)
+ for i := 0; i < src.Len(); i++ {
+ rcopy(dst.Index(i), src.Index(i), false)
+ }
+ case reflect.Map:
+ s := reflect.MakeMap(src.Type())
+ dst.Set(s)
+ for _, k := range src.MapKeys() {
+ v := src.MapIndex(k)
+ v2 := reflect.New(v.Type()).Elem()
+ rcopy(v2, v, false)
+ dst.SetMapIndex(k, v2)
+ }
+ default:
+ // Assign the value if possible. If its not assignable, the value would
+ // need to be converted and the impact of that may be unexpected, or is
+ // not compatible with the dst type.
+ if src.Type().AssignableTo(dst.Type()) {
+ dst.Set(src)
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy_test.go
new file mode 100644
index 0000000..4ae04ac
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy_test.go
@@ -0,0 +1,193 @@
+package awsutil_test
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+ "github.com/stretchr/testify/assert"
+)
+
+func ExampleCopy() {
+ type Foo struct {
+ A int
+ B []*string
+ }
+
+ // Create the initial value
+ str1 := "hello"
+ str2 := "bye bye"
+ f1 := &Foo{A: 1, B: []*string{&str1, &str2}}
+
+ // Do the copy
+ var f2 Foo
+ awsutil.Copy(&f2, f1)
+
+ // Print the result
+ fmt.Println(awsutil.StringValue(f2))
+
+ // Output:
+ // {
+ // A: 1,
+ // B: ["hello","bye bye"]
+ // }
+}
+
+func TestCopy(t *testing.T) {
+ type Foo struct {
+ A int
+ B []*string
+ C map[string]*int
+ }
+
+ // Create the initial value
+ str1 := "hello"
+ str2 := "bye bye"
+ int1 := 1
+ int2 := 2
+ f1 := &Foo{
+ A: 1,
+ B: []*string{&str1, &str2},
+ C: map[string]*int{
+ "A": &int1,
+ "B": &int2,
+ },
+ }
+
+ // Do the copy
+ var f2 Foo
+ awsutil.Copy(&f2, f1)
+
+ // Values are equal
+ assert.Equal(t, f2.A, f1.A)
+ assert.Equal(t, f2.B, f1.B)
+ assert.Equal(t, f2.C, f1.C)
+
+ // But pointers are not!
+ str3 := "nothello"
+ int3 := 57
+ f2.A = 100
+ f2.B[0] = &str3
+ f2.C["B"] = &int3
+ assert.NotEqual(t, f2.A, f1.A)
+ assert.NotEqual(t, f2.B, f1.B)
+ assert.NotEqual(t, f2.C, f1.C)
+}
+
+func TestCopyIgnoreNilMembers(t *testing.T) {
+ type Foo struct {
+ A *string
+ }
+
+ f := &Foo{}
+ assert.Nil(t, f.A)
+
+ var f2 Foo
+ awsutil.Copy(&f2, f)
+ assert.Nil(t, f2.A)
+
+ fcopy := awsutil.CopyOf(f)
+ f3 := fcopy.(*Foo)
+ assert.Nil(t, f3.A)
+}
+
+func TestCopyPrimitive(t *testing.T) {
+ str := "hello"
+ var s string
+ awsutil.Copy(&s, &str)
+ assert.Equal(t, "hello", s)
+}
+
+func TestCopyNil(t *testing.T) {
+ var s string
+ awsutil.Copy(&s, nil)
+ assert.Equal(t, "", s)
+}
+
+func TestCopyReader(t *testing.T) {
+ var buf io.Reader = bytes.NewReader([]byte("hello world"))
+ var r io.Reader
+ awsutil.Copy(&r, buf)
+ b, err := ioutil.ReadAll(r)
+ assert.NoError(t, err)
+ assert.Equal(t, []byte("hello world"), b)
+
+ // empty bytes because this is not a deep copy
+ b, err = ioutil.ReadAll(buf)
+ assert.NoError(t, err)
+ assert.Equal(t, []byte(""), b)
+}
+
+func TestCopyDifferentStructs(t *testing.T) {
+ type SrcFoo struct {
+ A int
+ B []*string
+ C map[string]*int
+ SrcUnique string
+ SameNameDiffType int
+ }
+ type DstFoo struct {
+ A int
+ B []*string
+ C map[string]*int
+ DstUnique int
+ SameNameDiffType string
+ }
+
+ // Create the initial value
+ str1 := "hello"
+ str2 := "bye bye"
+ int1 := 1
+ int2 := 2
+ f1 := &SrcFoo{
+ A: 1,
+ B: []*string{&str1, &str2},
+ C: map[string]*int{
+ "A": &int1,
+ "B": &int2,
+ },
+ SrcUnique: "unique",
+ SameNameDiffType: 1,
+ }
+
+ // Do the copy
+ var f2 DstFoo
+ awsutil.Copy(&f2, f1)
+
+ // Values are equal
+ assert.Equal(t, f2.A, f1.A)
+ assert.Equal(t, f2.B, f1.B)
+ assert.Equal(t, f2.C, f1.C)
+ assert.Equal(t, "unique", f1.SrcUnique)
+ assert.Equal(t, 1, f1.SameNameDiffType)
+ assert.Equal(t, 0, f2.DstUnique)
+ assert.Equal(t, "", f2.SameNameDiffType)
+}
+
+func ExampleCopyOf() {
+ type Foo struct {
+ A int
+ B []*string
+ }
+
+ // Create the initial value
+ str1 := "hello"
+ str2 := "bye bye"
+ f1 := &Foo{A: 1, B: []*string{&str1, &str2}}
+
+ // Do the copy
+ v := awsutil.CopyOf(f1)
+ var f2 *Foo = v.(*Foo)
+
+ // Print the result
+ fmt.Println(awsutil.StringValue(f2))
+
+ // Output:
+ // {
+ // A: 1,
+ // B: ["hello","bye bye"]
+ // }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value.go
new file mode 100644
index 0000000..7ae01ef
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value.go
@@ -0,0 +1,175 @@
+package awsutil
+
+import (
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var indexRe = regexp.MustCompile(`(.+)\[(-?\d+)?\]$`)
+
+// rValuesAtPath returns a slice of values found in value v. The values
+// in v are explored recursively so all nested values are collected.
+func rValuesAtPath(v interface{}, path string, create bool, caseSensitive bool) []reflect.Value {
+ pathparts := strings.Split(path, "||")
+ if len(pathparts) > 1 {
+ for _, pathpart := range pathparts {
+ vals := rValuesAtPath(v, pathpart, create, caseSensitive)
+ if vals != nil && len(vals) > 0 {
+ return vals
+ }
+ }
+ return nil
+ }
+
+ values := []reflect.Value{reflect.Indirect(reflect.ValueOf(v))}
+ components := strings.Split(path, ".")
+ for len(values) > 0 && len(components) > 0 {
+ var index *int64
+ var indexStar bool
+ c := strings.TrimSpace(components[0])
+ if c == "" { // no actual component, illegal syntax
+ return nil
+ } else if caseSensitive && c != "*" && strings.ToLower(c[0:1]) == c[0:1] {
+ // TODO normalize case for user
+ return nil // don't support unexported fields
+ }
+
+ // parse this component
+ if m := indexRe.FindStringSubmatch(c); m != nil {
+ c = m[1]
+ if m[2] == "" {
+ index = nil
+ indexStar = true
+ } else {
+ i, _ := strconv.ParseInt(m[2], 10, 32)
+ index = &i
+ indexStar = false
+ }
+ }
+
+ nextvals := []reflect.Value{}
+ for _, value := range values {
+ // pull component name out of struct member
+ if value.Kind() != reflect.Struct {
+ continue
+ }
+
+ if c == "*" { // pull all members
+ for i := 0; i < value.NumField(); i++ {
+ if f := reflect.Indirect(value.Field(i)); f.IsValid() {
+ nextvals = append(nextvals, f)
+ }
+ }
+ continue
+ }
+
+ value = value.FieldByNameFunc(func(name string) bool {
+ if c == name {
+ return true
+ } else if !caseSensitive && strings.ToLower(name) == strings.ToLower(c) {
+ return true
+ }
+ return false
+ })
+
+ if create && value.Kind() == reflect.Ptr && value.IsNil() {
+ value.Set(reflect.New(value.Type().Elem()))
+ value = value.Elem()
+ } else {
+ value = reflect.Indirect(value)
+ }
+
+ if value.IsValid() {
+ nextvals = append(nextvals, value)
+ }
+ }
+ values = nextvals
+
+ if indexStar || index != nil {
+ nextvals = []reflect.Value{}
+ for _, value := range values {
+ value := reflect.Indirect(value)
+ if value.Kind() != reflect.Slice {
+ continue
+ }
+
+ if indexStar { // grab all indices
+ for i := 0; i < value.Len(); i++ {
+ idx := reflect.Indirect(value.Index(i))
+ if idx.IsValid() {
+ nextvals = append(nextvals, idx)
+ }
+ }
+ continue
+ }
+
+ // pull out index
+ i := int(*index)
+ if i >= value.Len() { // check out of bounds
+ if create {
+ // TODO resize slice
+ } else {
+ continue
+ }
+ } else if i < 0 { // support negative indexing
+ i = value.Len() + i
+ }
+ value = reflect.Indirect(value.Index(i))
+
+ if value.IsValid() {
+ nextvals = append(nextvals, value)
+ }
+ }
+ values = nextvals
+ }
+
+ components = components[1:]
+ }
+ return values
+}
+
+// ValuesAtPath returns a list of objects at the lexical path inside of a structure
+func ValuesAtPath(i interface{}, path string) []interface{} {
+ if rvals := rValuesAtPath(i, path, false, true); rvals != nil {
+ vals := make([]interface{}, len(rvals))
+ for i, rval := range rvals {
+ vals[i] = rval.Interface()
+ }
+ return vals
+ }
+ return nil
+}
+
+// ValuesAtAnyPath returns a list of objects at the case-insensitive lexical
+// path inside of a structure
+func ValuesAtAnyPath(i interface{}, path string) []interface{} {
+ if rvals := rValuesAtPath(i, path, false, false); rvals != nil {
+ vals := make([]interface{}, len(rvals))
+ for i, rval := range rvals {
+ vals[i] = rval.Interface()
+ }
+ return vals
+ }
+ return nil
+}
+
+// SetValueAtPath sets an object at the lexical path inside of a structure
+func SetValueAtPath(i interface{}, path string, v interface{}) {
+ if rvals := rValuesAtPath(i, path, true, true); rvals != nil {
+ for _, rval := range rvals {
+ rval.Set(reflect.ValueOf(v))
+ }
+ }
+}
+
+// SetValueAtAnyPath sets an object at the case insensitive lexical path inside
+// of a structure
+func SetValueAtAnyPath(i interface{}, path string, v interface{}) {
+ if rvals := rValuesAtPath(i, path, true, false); rvals != nil {
+ for _, rval := range rvals {
+ rval.Set(reflect.ValueOf(v))
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value_test.go
new file mode 100644
index 0000000..ed10aec
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value_test.go
@@ -0,0 +1,65 @@
+package awsutil_test
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+ "github.com/stretchr/testify/assert"
+)
+
+type Struct struct {
+ A []Struct
+ z []Struct
+ B *Struct
+ D *Struct
+ C string
+}
+
+var data = Struct{
+ A: []Struct{{C: "value1"}, {C: "value2"}, {C: "value3"}},
+ z: []Struct{{C: "value1"}, {C: "value2"}, {C: "value3"}},
+ B: &Struct{B: &Struct{C: "terminal"}, D: &Struct{C: "terminal2"}},
+ C: "initial",
+}
+
+func TestValueAtPathSuccess(t *testing.T) {
+ assert.Equal(t, []interface{}{"initial"}, awsutil.ValuesAtPath(data, "C"))
+ assert.Equal(t, []interface{}{"value1"}, awsutil.ValuesAtPath(data, "A[0].C"))
+ assert.Equal(t, []interface{}{"value2"}, awsutil.ValuesAtPath(data, "A[1].C"))
+ assert.Equal(t, []interface{}{"value3"}, awsutil.ValuesAtPath(data, "A[2].C"))
+ assert.Equal(t, []interface{}{"value3"}, awsutil.ValuesAtAnyPath(data, "a[2].c"))
+ assert.Equal(t, []interface{}{"value3"}, awsutil.ValuesAtPath(data, "A[-1].C"))
+ assert.Equal(t, []interface{}{"value1", "value2", "value3"}, awsutil.ValuesAtPath(data, "A[].C"))
+ assert.Equal(t, []interface{}{"terminal"}, awsutil.ValuesAtPath(data, "B . B . C"))
+ assert.Equal(t, []interface{}{"terminal", "terminal2"}, awsutil.ValuesAtPath(data, "B.*.C"))
+ assert.Equal(t, []interface{}{"initial"}, awsutil.ValuesAtPath(data, "A.D.X || C"))
+}
+
+func TestValueAtPathFailure(t *testing.T) {
+ assert.Equal(t, []interface{}(nil), awsutil.ValuesAtPath(data, "C.x"))
+ assert.Equal(t, []interface{}(nil), awsutil.ValuesAtPath(data, ".x"))
+ assert.Equal(t, []interface{}{}, awsutil.ValuesAtPath(data, "X.Y.Z"))
+ assert.Equal(t, []interface{}{}, awsutil.ValuesAtPath(data, "A[100].C"))
+ assert.Equal(t, []interface{}{}, awsutil.ValuesAtPath(data, "A[3].C"))
+ assert.Equal(t, []interface{}{}, awsutil.ValuesAtPath(data, "B.B.C.Z"))
+ assert.Equal(t, []interface{}(nil), awsutil.ValuesAtPath(data, "z[-1].C"))
+ assert.Equal(t, []interface{}{}, awsutil.ValuesAtPath(nil, "A.B.C"))
+}
+
+func TestSetValueAtPathSuccess(t *testing.T) {
+ var s Struct
+ awsutil.SetValueAtPath(&s, "C", "test1")
+ awsutil.SetValueAtPath(&s, "B.B.C", "test2")
+ awsutil.SetValueAtPath(&s, "B.D.C", "test3")
+ assert.Equal(t, "test1", s.C)
+ assert.Equal(t, "test2", s.B.B.C)
+ assert.Equal(t, "test3", s.B.D.C)
+
+ awsutil.SetValueAtPath(&s, "B.*.C", "test0")
+ assert.Equal(t, "test0", s.B.B.C)
+ assert.Equal(t, "test0", s.B.D.C)
+
+ var s2 Struct
+ awsutil.SetValueAtAnyPath(&s2, "b.b.c", "test0")
+ assert.Equal(t, "test0", s2.B.B.C)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/string_value.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/string_value.go
new file mode 100644
index 0000000..09673a1
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/string_value.go
@@ -0,0 +1,103 @@
+package awsutil
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "reflect"
+ "strings"
+)
+
+// StringValue returns the string representation of a value.
+func StringValue(i interface{}) string {
+ var buf bytes.Buffer
+ stringValue(reflect.ValueOf(i), 0, &buf)
+ return buf.String()
+}
+
+// stringValue will recursively walk value v to build a textual
+// representation of the value.
+func stringValue(v reflect.Value, indent int, buf *bytes.Buffer) {
+ for v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+
+ switch v.Kind() {
+ case reflect.Struct:
+ strtype := v.Type().String()
+ if strtype == "time.Time" {
+ fmt.Fprintf(buf, "%s", v.Interface())
+ break
+ } else if strings.HasPrefix(strtype, "io.") {
+ buf.WriteString("")
+ break
+ }
+
+ buf.WriteString("{\n")
+
+ names := []string{}
+ for i := 0; i < v.Type().NumField(); i++ {
+ name := v.Type().Field(i).Name
+ f := v.Field(i)
+ if name[0:1] == strings.ToLower(name[0:1]) {
+ continue // ignore unexported fields
+ }
+ if (f.Kind() == reflect.Ptr || f.Kind() == reflect.Slice || f.Kind() == reflect.Map) && f.IsNil() {
+ continue // ignore unset fields
+ }
+ names = append(names, name)
+ }
+
+ for i, n := range names {
+ val := v.FieldByName(n)
+ buf.WriteString(strings.Repeat(" ", indent+2))
+ buf.WriteString(n + ": ")
+ stringValue(val, indent+2, buf)
+
+ if i < len(names)-1 {
+ buf.WriteString(",\n")
+ }
+ }
+
+ buf.WriteString("\n" + strings.Repeat(" ", indent) + "}")
+ case reflect.Slice:
+ nl, id, id2 := "", "", ""
+ if v.Len() > 3 {
+ nl, id, id2 = "\n", strings.Repeat(" ", indent), strings.Repeat(" ", indent+2)
+ }
+ buf.WriteString("[" + nl)
+ for i := 0; i < v.Len(); i++ {
+ buf.WriteString(id2)
+ stringValue(v.Index(i), indent+2, buf)
+
+ if i < v.Len()-1 {
+ buf.WriteString("," + nl)
+ }
+ }
+
+ buf.WriteString(nl + id + "]")
+ case reflect.Map:
+ buf.WriteString("{\n")
+
+ for i, k := range v.MapKeys() {
+ buf.WriteString(strings.Repeat(" ", indent+2))
+ buf.WriteString(k.String() + ": ")
+ stringValue(v.MapIndex(k), indent+2, buf)
+
+ if i < v.Len()-1 {
+ buf.WriteString(",\n")
+ }
+ }
+
+ buf.WriteString("\n" + strings.Repeat(" ", indent) + "}")
+ default:
+ format := "%v"
+ switch v.Interface().(type) {
+ case string:
+ format = "%q"
+ case io.ReadSeeker, io.Reader:
+ format = "buffer(%p)"
+ }
+ fmt.Fprintf(buf, format, v.Interface())
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config.go
new file mode 100644
index 0000000..4699070
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config.go
@@ -0,0 +1,173 @@
+package aws
+
+import (
+ "io"
+ "net/http"
+ "os"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/credentials"
+)
+
+// DefaultChainCredentials is a Credentials which will find the first available
+// credentials Value from the list of Providers.
+//
+// This should be used in the default case. Once the type of credentials are
+// known switching to the specific Credentials will be more efficient.
+var DefaultChainCredentials = credentials.NewChainCredentials(
+ []credentials.Provider{
+ &credentials.EnvProvider{},
+ &credentials.SharedCredentialsProvider{Filename: "", Profile: ""},
+ &credentials.EC2RoleProvider{ExpiryWindow: 5 * time.Minute},
+ })
+
+// The default number of retries for a service. The value of -1 indicates that
+// the service specific retry default will be used.
+const DefaultRetries = -1
+
+// DefaultConfig is the default all service configuration will be based off of.
+var DefaultConfig = &Config{
+ Credentials: DefaultChainCredentials,
+ Endpoint: "",
+ Region: os.Getenv("AWS_REGION"),
+ DisableSSL: false,
+ ManualSend: false,
+ HTTPClient: http.DefaultClient,
+ LogHTTPBody: false,
+ LogLevel: 0,
+ Logger: os.Stdout,
+ MaxRetries: DefaultRetries,
+ DisableParamValidation: false,
+ DisableComputeChecksums: false,
+ S3ForcePathStyle: false,
+}
+
+// A Config provides service configuration
+type Config struct {
+ Credentials *credentials.Credentials
+ Endpoint string
+ Region string
+ DisableSSL bool
+ ManualSend bool
+ HTTPClient *http.Client
+ LogHTTPBody bool
+ LogLevel uint
+ Logger io.Writer
+ MaxRetries int
+ DisableParamValidation bool
+ DisableComputeChecksums bool
+ S3ForcePathStyle bool
+}
+
+// Copy will return a shallow copy of the Config object.
+func (c Config) Copy() Config {
+ dst := Config{}
+ dst.Credentials = c.Credentials
+ dst.Endpoint = c.Endpoint
+ dst.Region = c.Region
+ dst.DisableSSL = c.DisableSSL
+ dst.ManualSend = c.ManualSend
+ dst.HTTPClient = c.HTTPClient
+ dst.LogHTTPBody = c.LogHTTPBody
+ dst.LogLevel = c.LogLevel
+ dst.Logger = c.Logger
+ dst.MaxRetries = c.MaxRetries
+ dst.DisableParamValidation = c.DisableParamValidation
+ dst.DisableComputeChecksums = c.DisableComputeChecksums
+ dst.S3ForcePathStyle = c.S3ForcePathStyle
+
+ return dst
+}
+
+// Merge merges the newcfg attribute values into this Config. Each attribute
+// will be merged into this config if the newcfg attribute's value is non-zero.
+// Due to this, newcfg attributes with zero values cannot be merged in. For
+// example bool attributes cannot be cleared using Merge, and must be explicitly
+// set on the Config structure.
+func (c Config) Merge(newcfg *Config) *Config {
+ if newcfg == nil {
+ return &c
+ }
+
+ cfg := Config{}
+
+ if newcfg.Credentials != nil {
+ cfg.Credentials = newcfg.Credentials
+ } else {
+ cfg.Credentials = c.Credentials
+ }
+
+ if newcfg.Endpoint != "" {
+ cfg.Endpoint = newcfg.Endpoint
+ } else {
+ cfg.Endpoint = c.Endpoint
+ }
+
+ if newcfg.Region != "" {
+ cfg.Region = newcfg.Region
+ } else {
+ cfg.Region = c.Region
+ }
+
+ if newcfg.DisableSSL {
+ cfg.DisableSSL = newcfg.DisableSSL
+ } else {
+ cfg.DisableSSL = c.DisableSSL
+ }
+
+ if newcfg.ManualSend {
+ cfg.ManualSend = newcfg.ManualSend
+ } else {
+ cfg.ManualSend = c.ManualSend
+ }
+
+ if newcfg.HTTPClient != nil {
+ cfg.HTTPClient = newcfg.HTTPClient
+ } else {
+ cfg.HTTPClient = c.HTTPClient
+ }
+
+ if newcfg.LogHTTPBody {
+ cfg.LogHTTPBody = newcfg.LogHTTPBody
+ } else {
+ cfg.LogHTTPBody = c.LogHTTPBody
+ }
+
+ if newcfg.LogLevel != 0 {
+ cfg.LogLevel = newcfg.LogLevel
+ } else {
+ cfg.LogLevel = c.LogLevel
+ }
+
+ if newcfg.Logger != nil {
+ cfg.Logger = newcfg.Logger
+ } else {
+ cfg.Logger = c.Logger
+ }
+
+ if newcfg.MaxRetries != DefaultRetries {
+ cfg.MaxRetries = newcfg.MaxRetries
+ } else {
+ cfg.MaxRetries = c.MaxRetries
+ }
+
+ if newcfg.DisableParamValidation {
+ cfg.DisableParamValidation = newcfg.DisableParamValidation
+ } else {
+ cfg.DisableParamValidation = c.DisableParamValidation
+ }
+
+ if newcfg.DisableComputeChecksums {
+ cfg.DisableComputeChecksums = newcfg.DisableComputeChecksums
+ } else {
+ cfg.DisableComputeChecksums = c.DisableComputeChecksums
+ }
+
+ if newcfg.S3ForcePathStyle {
+ cfg.S3ForcePathStyle = newcfg.S3ForcePathStyle
+ } else {
+ cfg.S3ForcePathStyle = c.S3ForcePathStyle
+ }
+
+ return &cfg
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config_test.go
new file mode 100644
index 0000000..fc5a77b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config_test.go
@@ -0,0 +1,92 @@
+package aws
+
+import (
+ "net/http"
+ "os"
+ "reflect"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/credentials"
+)
+
+var testCredentials = credentials.NewChainCredentials([]credentials.Provider{
+ &credentials.EnvProvider{},
+ &credentials.SharedCredentialsProvider{
+ Filename: "TestFilename",
+ Profile: "TestProfile"},
+ &credentials.EC2RoleProvider{ExpiryWindow: 5 * time.Minute},
+})
+
+var copyTestConfig = Config{
+ Credentials: testCredentials,
+ Endpoint: "CopyTestEndpoint",
+ Region: "COPY_TEST_AWS_REGION",
+ DisableSSL: true,
+ ManualSend: true,
+ HTTPClient: http.DefaultClient,
+ LogHTTPBody: true,
+ LogLevel: 2,
+ Logger: os.Stdout,
+ MaxRetries: DefaultRetries,
+ DisableParamValidation: true,
+ DisableComputeChecksums: true,
+ S3ForcePathStyle: true,
+}
+
+func TestCopy(t *testing.T) {
+ want := copyTestConfig
+ got := copyTestConfig.Copy()
+ if !reflect.DeepEqual(got, want) {
+ t.Errorf("Copy() = %+v", got)
+ t.Errorf(" want %+v", want)
+ }
+}
+
+func TestCopyReturnsNewInstance(t *testing.T) {
+ want := copyTestConfig
+ got := copyTestConfig.Copy()
+ if &got == &want {
+ t.Errorf("Copy() = %p; want different instance as source %p", &got, &want)
+ }
+}
+
+var mergeTestZeroValueConfig = Config{MaxRetries: DefaultRetries}
+
+var mergeTestConfig = Config{
+ Credentials: testCredentials,
+ Endpoint: "MergeTestEndpoint",
+ Region: "MERGE_TEST_AWS_REGION",
+ DisableSSL: true,
+ ManualSend: true,
+ HTTPClient: http.DefaultClient,
+ LogHTTPBody: true,
+ LogLevel: 2,
+ Logger: os.Stdout,
+ MaxRetries: 10,
+ DisableParamValidation: true,
+ DisableComputeChecksums: true,
+ S3ForcePathStyle: true,
+}
+
+var mergeTests = []struct {
+ cfg *Config
+ in *Config
+ want *Config
+}{
+ {&Config{}, nil, &Config{}},
+ {&Config{}, &mergeTestZeroValueConfig, &Config{}},
+ {&Config{}, &mergeTestConfig, &mergeTestConfig},
+}
+
+func TestMerge(t *testing.T) {
+ for _, tt := range mergeTests {
+ got := tt.cfg.Merge(tt.in)
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("Config %+v", tt.cfg)
+ t.Errorf(" Merge(%+v)", tt.in)
+ t.Errorf(" got %+v", got)
+ t.Errorf(" want %+v", tt.want)
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/chain_provider.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/chain_provider.go
new file mode 100644
index 0000000..73ff1b9
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/chain_provider.go
@@ -0,0 +1,81 @@
+package credentials
+
+import (
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+var (
+ // ErrNoValidProvidersFoundInChain Is returned when there are no valid
+ // providers in the ChainProvider.
+ ErrNoValidProvidersFoundInChain = awserr.New("NoCredentialProviders", "no valid providers in chain", nil)
+)
+
+// A ChainProvider will search for a provider which returns credentials
+// and cache that provider until Retrieve is called again.
+//
+// The ChainProvider provides a way of chaining multiple providers together
+// which will pick the first available using priority order of the Providers
+// in the list.
+//
+// If none of the Providers retrieve valid credentials Value, ChainProvider's
+// Retrieve() will return the error ErrNoValidProvidersFoundInChain.
+//
+// If a Provider is found which returns valid credentials Value ChainProvider
+// will cache that Provider for all calls to IsExpired(), until Retrieve is
+// called again.
+//
+// Example of ChainProvider to be used with an EnvProvider and EC2RoleProvider.
+// In this example EnvProvider will first check if any credentials are available
+// vai the environment variables. If there are none ChainProvider will check
+// the next Provider in the list, EC2RoleProvider in this case. If EC2RoleProvider
+// does not return any credentials ChainProvider will return the error
+// ErrNoValidProvidersFoundInChain
+//
+// creds := NewChainCredentials(
+// []Provider{
+// &EnvProvider{},
+// &EC2RoleProvider{},
+// })
+// creds.Retrieve()
+//
+type ChainProvider struct {
+ Providers []Provider
+ curr Provider
+}
+
+// NewChainCredentials returns a pointer to a new Credentials object
+// wrapping a chain of providers.
+func NewChainCredentials(providers []Provider) *Credentials {
+ return NewCredentials(&ChainProvider{
+ Providers: append([]Provider{}, providers...),
+ })
+}
+
+// Retrieve returns the credentials value or error if no provider returned
+// without error.
+//
+// If a provider is found it will be cached and any calls to IsExpired()
+// will return the expired state of the cached provider.
+func (c *ChainProvider) Retrieve() (Value, error) {
+ for _, p := range c.Providers {
+ if creds, err := p.Retrieve(); err == nil {
+ c.curr = p
+ return creds, nil
+ }
+ }
+ c.curr = nil
+
+ // TODO better error reporting. maybe report error for each failed retrieve?
+
+ return Value{}, ErrNoValidProvidersFoundInChain
+}
+
+// IsExpired will returned the expired state of the currently cached provider
+// if there is one. If there is no current provider, true will be returned.
+func (c *ChainProvider) IsExpired() bool {
+ if c.curr != nil {
+ return c.curr.IsExpired()
+ }
+
+ return true
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/chain_provider_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/chain_provider_test.go
new file mode 100644
index 0000000..4fba22f
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/chain_provider_test.go
@@ -0,0 +1,73 @@
+package credentials
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestChainProviderGet(t *testing.T) {
+ p := &ChainProvider{
+ Providers: []Provider{
+ &stubProvider{err: awserr.New("FirstError", "first provider error", nil)},
+ &stubProvider{err: awserr.New("SecondError", "second provider error", nil)},
+ &stubProvider{
+ creds: Value{
+ AccessKeyID: "AKID",
+ SecretAccessKey: "SECRET",
+ SessionToken: "",
+ },
+ },
+ },
+ }
+
+ creds, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+ assert.Equal(t, "AKID", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "SECRET", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Empty(t, creds.SessionToken, "Expect session token to be empty")
+}
+
+func TestChainProviderIsExpired(t *testing.T) {
+ stubProvider := &stubProvider{expired: true}
+ p := &ChainProvider{
+ Providers: []Provider{
+ stubProvider,
+ },
+ }
+
+ assert.True(t, p.IsExpired(), "Expect expired to be true before any Retrieve")
+ _, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+ assert.False(t, p.IsExpired(), "Expect not expired after retrieve")
+
+ stubProvider.expired = true
+ assert.True(t, p.IsExpired(), "Expect return of expired provider")
+
+ _, err = p.Retrieve()
+ assert.False(t, p.IsExpired(), "Expect not expired after retrieve")
+}
+
+func TestChainProviderWithNoProvider(t *testing.T) {
+ p := &ChainProvider{
+ Providers: []Provider{},
+ }
+
+ assert.True(t, p.IsExpired(), "Expect expired with no providers")
+ _, err := p.Retrieve()
+ assert.Equal(t, ErrNoValidProvidersFoundInChain, err, "Expect no providers error returned")
+}
+
+func TestChainProviderWithNoValidProvider(t *testing.T) {
+ p := &ChainProvider{
+ Providers: []Provider{
+ &stubProvider{err: awserr.New("FirstError", "first provider error", nil)},
+ &stubProvider{err: awserr.New("SecondError", "second provider error", nil)},
+ },
+ }
+
+ assert.True(t, p.IsExpired(), "Expect expired with no providers")
+ _, err := p.Retrieve()
+ assert.Equal(t, ErrNoValidProvidersFoundInChain, err, "Expect no providers error returned")
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/credentials.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/credentials.go
new file mode 100644
index 0000000..3d6ac4d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/credentials.go
@@ -0,0 +1,219 @@
+// Package credentials provides credential retrieval and management
+//
+// The Credentials is the primary method of getting access to and managing
+// credentials Values. Using dependency injection retrieval of the credential
+// values is handled by a object which satisfies the Provider interface.
+//
+// By default the Credentials.Get() will cache the successful result of a
+// Provider's Retrieve() until Provider.IsExpired() returns true. At which
+// point Credentials will call Provider's Retrieve() to get new credential Value.
+//
+// The Provider is responsible for determining when credentials Value have expired.
+// It is also important to note that Credentials will always call Retrieve the
+// first time Credentials.Get() is called.
+//
+// Example of using the environment variable credentials.
+//
+// creds := NewEnvCredentials()
+//
+// // Retrieve the credentials value
+// credValue, err := creds.Get()
+// if err != nil {
+// // handle error
+// }
+//
+// Example of forcing credentials to expire and be refreshed on the next Get().
+// This may be helpful to proactively expire credentials and refresh them sooner
+// than they would naturally expire on their own.
+//
+// creds := NewCredentials(&EC2RoleProvider{})
+// creds.Expire()
+// credsValue, err := creds.Get()
+// // New credentials will be retrieved instead of from cache.
+//
+//
+// Custom Provider
+//
+// Each Provider built into this package also provides a helper method to generate
+// a Credentials pointer setup with the provider. To use a custom Provider just
+// create a type which satisfies the Provider interface and pass it to the
+// NewCredentials method.
+//
+// type MyProvider struct{}
+// func (m *MyProvider) Retrieve() (Value, error) {...}
+// func (m *MyProvider) IsExpired() bool {...}
+//
+// creds := NewCredentials(&MyProvider{})
+// credValue, err := creds.Get()
+//
+package credentials
+
+import (
+ "sync"
+ "time"
+)
+
+// Create an empty Credential object that can be used as dummy placeholder
+// credentials for requests that do not need signed.
+//
+// This Credentials can be used to configure a service to not sign requests
+// when making service API calls. For example, when accessing public
+// s3 buckets.
+//
+// svc := s3.New(&aws.Config{Credentials: AnonymousCredentials})
+// // Access public S3 buckets.
+//
+var AnonymousCredentials = NewStaticCredentials("", "", "")
+
+// A Value is the AWS credentials value for individual credential fields.
+type Value struct {
+ // AWS Access key ID
+ AccessKeyID string
+
+ // AWS Secret Access Key
+ SecretAccessKey string
+
+ // AWS Session Token
+ SessionToken string
+}
+
+// A Provider is the interface for any component which will provide credentials
+// Value. A provider is required to manage its own Expired state, and what to
+// be expired means.
+//
+// The Provider should not need to implement its own mutexes, because
+// that will be managed by Credentials.
+type Provider interface {
+ // Refresh returns nil if it successfully retrieved the value.
+ // Error is returned if the value were not obtainable, or empty.
+ Retrieve() (Value, error)
+
+ // IsExpired returns if the credentials are no longer valid, and need
+ // to be retrieved.
+ IsExpired() bool
+}
+
+// A Expiry provides shared expiration logic to be used by credentials
+// providers to implement expiry functionality.
+//
+// The best method to use this struct is as an anonymous field within the
+// provider's struct.
+//
+// Example:
+// type EC2RoleProvider struct {
+// Expiry
+// ...
+// }
+type Expiry struct {
+ // The date/time when to expire on
+ expiration time.Time
+
+ // If set will be used by IsExpired to determine the current time.
+ // Defaults to time.Now if CurrentTime is not set. Available for testing
+ // to be able to mock out the current time.
+ CurrentTime func() time.Time
+}
+
+// SetExpiration sets the expiration IsExpired will check when called.
+//
+// If window is greater than 0 the expiration time will be reduced by the
+// window value.
+//
+// Using a window is helpful to trigger credentials to expire sooner than
+// the expiration time given to ensure no requests are made with expired
+// tokens.
+func (e *Expiry) SetExpiration(expiration time.Time, window time.Duration) {
+ e.expiration = expiration
+ if window > 0 {
+ e.expiration = e.expiration.Add(-window)
+ }
+}
+
+// IsExpired returns if the credentials are expired.
+func (e *Expiry) IsExpired() bool {
+ if e.CurrentTime == nil {
+ e.CurrentTime = time.Now
+ }
+ return e.expiration.Before(e.CurrentTime())
+}
+
+// A Credentials provides synchronous safe retrieval of AWS credentials Value.
+// Credentials will cache the credentials value until they expire. Once the value
+// expires the next Get will attempt to retrieve valid credentials.
+//
+// Credentials is safe to use across multiple goroutines and will manage the
+// synchronous state so the Providers do not need to implement their own
+// synchronization.
+//
+// The first Credentials.Get() will always call Provider.Retrieve() to get the
+// first instance of the credentials Value. All calls to Get() after that
+// will return the cached credentials Value until IsExpired() returns true.
+type Credentials struct {
+ creds Value
+ forceRefresh bool
+ m sync.Mutex
+
+ provider Provider
+}
+
+// NewCredentials returns a pointer to a new Credentials with the provider set.
+func NewCredentials(provider Provider) *Credentials {
+ return &Credentials{
+ provider: provider,
+ forceRefresh: true,
+ }
+}
+
+// Get returns the credentials value, or error if the credentials Value failed
+// to be retrieved.
+//
+// Will return the cached credentials Value if it has not expired. If the
+// credentials Value has expired the Provider's Retrieve() will be called
+// to refresh the credentials.
+//
+// If Credentials.Expire() was called the credentials Value will be force
+// expired, and the next call to Get() will cause them to be refreshed.
+func (c *Credentials) Get() (Value, error) {
+ c.m.Lock()
+ defer c.m.Unlock()
+
+ if c.isExpired() {
+ creds, err := c.provider.Retrieve()
+ if err != nil {
+ return Value{}, err
+ }
+ c.creds = creds
+ c.forceRefresh = false
+ }
+
+ return c.creds, nil
+}
+
+// Expire expires the credentials and forces them to be retrieved on the
+// next call to Get().
+//
+// This will override the Provider's expired state, and force Credentials
+// to call the Provider's Retrieve().
+func (c *Credentials) Expire() {
+ c.m.Lock()
+ defer c.m.Unlock()
+
+ c.forceRefresh = true
+}
+
+// IsExpired returns if the credentials are no longer valid, and need
+// to be retrieved.
+//
+// If the Credentials were forced to be expired with Expire() this will
+// reflect that override.
+func (c *Credentials) IsExpired() bool {
+ c.m.Lock()
+ defer c.m.Unlock()
+
+ return c.isExpired()
+}
+
+// isExpired helper method wrapping the definition of expired credentials.
+func (c *Credentials) isExpired() bool {
+ return c.forceRefresh || c.provider.IsExpired()
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/credentials_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/credentials_test.go
new file mode 100644
index 0000000..99c2b47
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/credentials_test.go
@@ -0,0 +1,62 @@
+package credentials
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/stretchr/testify/assert"
+)
+
+type stubProvider struct {
+ creds Value
+ expired bool
+ err error
+}
+
+func (s *stubProvider) Retrieve() (Value, error) {
+ s.expired = false
+ return s.creds, s.err
+}
+func (s *stubProvider) IsExpired() bool {
+ return s.expired
+}
+
+func TestCredentialsGet(t *testing.T) {
+ c := NewCredentials(&stubProvider{
+ creds: Value{
+ AccessKeyID: "AKID",
+ SecretAccessKey: "SECRET",
+ SessionToken: "",
+ },
+ expired: true,
+ })
+
+ creds, err := c.Get()
+ assert.Nil(t, err, "Expected no error")
+ assert.Equal(t, "AKID", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "SECRET", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Empty(t, creds.SessionToken, "Expect session token to be empty")
+}
+
+func TestCredentialsGetWithError(t *testing.T) {
+ c := NewCredentials(&stubProvider{err: awserr.New("provider error", "", nil), expired: true})
+
+ _, err := c.Get()
+ assert.Equal(t, "provider error", err.(awserr.Error).Code(), "Expected provider error")
+}
+
+func TestCredentialsExpire(t *testing.T) {
+ stub := &stubProvider{}
+ c := NewCredentials(stub)
+
+ stub.expired = false
+ assert.True(t, c.IsExpired(), "Expected to start out expired")
+ c.Expire()
+ assert.True(t, c.IsExpired(), "Expected to be expired")
+
+ c.forceRefresh = false
+ assert.False(t, c.IsExpired(), "Expected not to be expired")
+
+ stub.expired = true
+ assert.True(t, c.IsExpired(), "Expected to be expired")
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/ec2_role_provider.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/ec2_role_provider.go
new file mode 100644
index 0000000..7691b62
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/ec2_role_provider.go
@@ -0,0 +1,163 @@
+package credentials
+
+import (
+ "bufio"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+const metadataCredentialsEndpoint = "http://169.254.169.254/latest/meta-data/iam/security-credentials/"
+
+// A EC2RoleProvider retrieves credentials from the EC2 service, and keeps track if
+// those credentials are expired.
+//
+// Example how to configure the EC2RoleProvider with custom http Client, Endpoint
+// or ExpiryWindow
+//
+// p := &credentials.EC2RoleProvider{
+// // Pass in a custom timeout to be used when requesting
+// // IAM EC2 Role credentials.
+// Client: &http.Client{
+// Timeout: 10 * time.Second,
+// },
+// // Use default EC2 Role metadata endpoint, Alternate endpoints can be
+// // specified setting Endpoint to something else.
+// Endpoint: "",
+// // Do not use early expiry of credentials. If a non zero value is
+// // specified the credentials will be expired early
+// ExpiryWindow: 0,
+// }
+//
+type EC2RoleProvider struct {
+ Expiry
+
+ // Endpoint must be fully quantified URL
+ Endpoint string
+
+ // HTTP client to use when connecting to EC2 service
+ Client *http.Client
+
+ // ExpiryWindow will allow the credentials to trigger refreshing prior to
+ // the credentials actually expiring. This is beneficial so race conditions
+ // with expiring credentials do not cause request to fail unexpectedly
+ // due to ExpiredTokenException exceptions.
+ //
+ // So a ExpiryWindow of 10s would cause calls to IsExpired() to return true
+ // 10 seconds before the credentials are actually expired.
+ //
+ // If ExpiryWindow is 0 or less it will be ignored.
+ ExpiryWindow time.Duration
+}
+
+// NewEC2RoleCredentials returns a pointer to a new Credentials object
+// wrapping the EC2RoleProvider.
+//
+// Takes a custom http.Client which can be configured for custom handling of
+// things such as timeout.
+//
+// Endpoint is the URL that the EC2RoleProvider will connect to when retrieving
+// role and credentials.
+//
+// Window is the expiry window that will be subtracted from the expiry returned
+// by the role credential request. This is done so that the credentials will
+// expire sooner than their actual lifespan.
+func NewEC2RoleCredentials(client *http.Client, endpoint string, window time.Duration) *Credentials {
+ return NewCredentials(&EC2RoleProvider{
+ Endpoint: endpoint,
+ Client: client,
+ ExpiryWindow: window,
+ })
+}
+
+// Retrieve retrieves credentials from the EC2 service.
+// Error will be returned if the request fails, or unable to extract
+// the desired credentials.
+func (m *EC2RoleProvider) Retrieve() (Value, error) {
+ if m.Client == nil {
+ m.Client = http.DefaultClient
+ }
+ if m.Endpoint == "" {
+ m.Endpoint = metadataCredentialsEndpoint
+ }
+
+ credsList, err := requestCredList(m.Client, m.Endpoint)
+ if err != nil {
+ return Value{}, err
+ }
+
+ if len(credsList) == 0 {
+ return Value{}, awserr.New("EmptyEC2RoleList", "empty EC2 Role list", nil)
+ }
+ credsName := credsList[0]
+
+ roleCreds, err := requestCred(m.Client, m.Endpoint, credsName)
+ if err != nil {
+ return Value{}, err
+ }
+
+ m.SetExpiration(roleCreds.Expiration, m.ExpiryWindow)
+
+ return Value{
+ AccessKeyID: roleCreds.AccessKeyID,
+ SecretAccessKey: roleCreds.SecretAccessKey,
+ SessionToken: roleCreds.Token,
+ }, nil
+}
+
+// A ec2RoleCredRespBody provides the shape for deserializing credential
+// request responses.
+type ec2RoleCredRespBody struct {
+ Expiration time.Time
+ AccessKeyID string
+ SecretAccessKey string
+ Token string
+}
+
+// requestCredList requests a list of credentials from the EC2 service.
+// If there are no credentials, or there is an error making or receiving the request
+func requestCredList(client *http.Client, endpoint string) ([]string, error) {
+ resp, err := client.Get(endpoint)
+ if err != nil {
+ return nil, awserr.New("ListEC2Role", "failed to list EC2 Roles", err)
+ }
+ defer resp.Body.Close()
+
+ credsList := []string{}
+ s := bufio.NewScanner(resp.Body)
+ for s.Scan() {
+ credsList = append(credsList, s.Text())
+ }
+
+ if err := s.Err(); err != nil {
+ return nil, awserr.New("ReadEC2Role", "failed to read list of EC2 Roles", err)
+ }
+
+ return credsList, nil
+}
+
+// requestCred requests the credentials for a specific credentials from the EC2 service.
+//
+// If the credentials cannot be found, or there is an error reading the response
+// and error will be returned.
+func requestCred(client *http.Client, endpoint, credsName string) (*ec2RoleCredRespBody, error) {
+ resp, err := client.Get(endpoint + credsName)
+ if err != nil {
+ return nil, awserr.New("GetEC2RoleCredentials",
+ fmt.Sprintf("failed to get %s EC2 Role credentials", credsName),
+ err)
+ }
+ defer resp.Body.Close()
+
+ respCreds := &ec2RoleCredRespBody{}
+ if err := json.NewDecoder(resp.Body).Decode(respCreds); err != nil {
+ return nil, awserr.New("DecodeEC2RoleCredentials",
+ fmt.Sprintf("failed to decode %s EC2 Role credentials", credsName),
+ err)
+ }
+
+ return respCreds, nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/ec2_role_provider_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/ec2_role_provider_test.go
new file mode 100644
index 0000000..da1549a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/ec2_role_provider_test.go
@@ -0,0 +1,108 @@
+package credentials
+
+import (
+ "fmt"
+ "github.com/stretchr/testify/assert"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+ "time"
+)
+
+func initTestServer(expireOn string) *httptest.Server {
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.RequestURI == "/" {
+ fmt.Fprintln(w, "/creds")
+ } else {
+ fmt.Fprintf(w, `{
+ "AccessKeyId" : "accessKey",
+ "SecretAccessKey" : "secret",
+ "Token" : "token",
+ "Expiration" : "%s"
+}`, expireOn)
+ }
+ }))
+
+ return server
+}
+
+func TestEC2RoleProvider(t *testing.T) {
+ server := initTestServer("2014-12-16T01:51:37Z")
+ defer server.Close()
+
+ p := &EC2RoleProvider{Client: http.DefaultClient, Endpoint: server.URL}
+
+ creds, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.Equal(t, "accessKey", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "secret", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Equal(t, "token", creds.SessionToken, "Expect session token to match")
+}
+
+func TestEC2RoleProviderIsExpired(t *testing.T) {
+ server := initTestServer("2014-12-16T01:51:37Z")
+ defer server.Close()
+
+ p := &EC2RoleProvider{Client: http.DefaultClient, Endpoint: server.URL}
+ p.CurrentTime = func() time.Time {
+ return time.Date(2014, 12, 15, 21, 26, 0, 0, time.UTC)
+ }
+
+ assert.True(t, p.IsExpired(), "Expect creds to be expired before retrieve.")
+
+ _, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.False(t, p.IsExpired(), "Expect creds to not be expired after retrieve.")
+
+ p.CurrentTime = func() time.Time {
+ return time.Date(3014, 12, 15, 21, 26, 0, 0, time.UTC)
+ }
+
+ assert.True(t, p.IsExpired(), "Expect creds to be expired.")
+}
+
+func TestEC2RoleProviderExpiryWindowIsExpired(t *testing.T) {
+ server := initTestServer("2014-12-16T01:51:37Z")
+ defer server.Close()
+
+ p := &EC2RoleProvider{Client: http.DefaultClient, Endpoint: server.URL, ExpiryWindow: time.Hour * 1}
+ p.CurrentTime = func() time.Time {
+ return time.Date(2014, 12, 15, 0, 51, 37, 0, time.UTC)
+ }
+
+ assert.True(t, p.IsExpired(), "Expect creds to be expired before retrieve.")
+
+ _, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.False(t, p.IsExpired(), "Expect creds to not be expired after retrieve.")
+
+ p.CurrentTime = func() time.Time {
+ return time.Date(2014, 12, 16, 0, 55, 37, 0, time.UTC)
+ }
+
+ assert.True(t, p.IsExpired(), "Expect creds to be expired.")
+}
+
+func BenchmarkEC2RoleProvider(b *testing.B) {
+ server := initTestServer("2014-12-16T01:51:37Z")
+ defer server.Close()
+
+ p := &EC2RoleProvider{Client: http.DefaultClient, Endpoint: server.URL}
+ _, err := p.Retrieve()
+ if err != nil {
+ b.Fatal(err)
+ }
+
+ b.ResetTimer()
+ b.RunParallel(func(pb *testing.PB) {
+ for pb.Next() {
+ _, err := p.Retrieve()
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/env_provider.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/env_provider.go
new file mode 100644
index 0000000..3e556be
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/env_provider.go
@@ -0,0 +1,67 @@
+package credentials
+
+import (
+ "os"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+var (
+ // ErrAccessKeyIDNotFound is returned when the AWS Access Key ID can't be
+ // found in the process's environment.
+ ErrAccessKeyIDNotFound = awserr.New("EnvAccessKeyNotFound", "AWS_ACCESS_KEY_ID or AWS_ACCESS_KEY not found in environment", nil)
+ // ErrSecretAccessKeyNotFound is returned when the AWS Secret Access Key
+ // can't be found in the process's environment.
+ ErrSecretAccessKeyNotFound = awserr.New("EnvSecretNotFound", "AWS_SECRET_ACCESS_KEY or AWS_SECRET_KEY not found in environment", nil)
+)
+
+// A EnvProvider retrieves credentials from the environment variables of the
+// running process. Environment credentials never expire.
+//
+// Environment variables used:
+// - Access Key ID: AWS_ACCESS_KEY_ID or AWS_ACCESS_KEY
+// - Secret Access Key: AWS_SECRET_ACCESS_KEY or AWS_SECRET_KEY
+type EnvProvider struct {
+ retrieved bool
+}
+
+// NewEnvCredentials returns a pointer to a new Credentials object
+// wrapping the environment variable provider.
+func NewEnvCredentials() *Credentials {
+ return NewCredentials(&EnvProvider{})
+}
+
+// Retrieve retrieves the keys from the environment.
+func (e *EnvProvider) Retrieve() (Value, error) {
+ e.retrieved = false
+
+ id := os.Getenv("AWS_ACCESS_KEY_ID")
+ if id == "" {
+ id = os.Getenv("AWS_ACCESS_KEY")
+ }
+
+ secret := os.Getenv("AWS_SECRET_ACCESS_KEY")
+ if secret == "" {
+ secret = os.Getenv("AWS_SECRET_KEY")
+ }
+
+ if id == "" {
+ return Value{}, ErrAccessKeyIDNotFound
+ }
+
+ if secret == "" {
+ return Value{}, ErrSecretAccessKeyNotFound
+ }
+
+ e.retrieved = true
+ return Value{
+ AccessKeyID: id,
+ SecretAccessKey: secret,
+ SessionToken: os.Getenv("AWS_SESSION_TOKEN"),
+ }, nil
+}
+
+// IsExpired returns if the credentials have been retrieved.
+func (e *EnvProvider) IsExpired() bool {
+ return !e.retrieved
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/env_provider_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/env_provider_test.go
new file mode 100644
index 0000000..53f6ce2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/env_provider_test.go
@@ -0,0 +1,70 @@
+package credentials
+
+import (
+ "github.com/stretchr/testify/assert"
+ "os"
+ "testing"
+)
+
+func TestEnvProviderRetrieve(t *testing.T) {
+ os.Clearenv()
+ os.Setenv("AWS_ACCESS_KEY_ID", "access")
+ os.Setenv("AWS_SECRET_ACCESS_KEY", "secret")
+ os.Setenv("AWS_SESSION_TOKEN", "token")
+
+ e := EnvProvider{}
+ creds, err := e.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.Equal(t, "access", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "secret", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Equal(t, "token", creds.SessionToken, "Expect session token to match")
+}
+
+func TestEnvProviderIsExpired(t *testing.T) {
+ os.Clearenv()
+ os.Setenv("AWS_ACCESS_KEY_ID", "access")
+ os.Setenv("AWS_SECRET_ACCESS_KEY", "secret")
+ os.Setenv("AWS_SESSION_TOKEN", "token")
+
+ e := EnvProvider{}
+
+ assert.True(t, e.IsExpired(), "Expect creds to be expired before retrieve.")
+
+ _, err := e.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.False(t, e.IsExpired(), "Expect creds to not be expired after retrieve.")
+}
+
+func TestEnvProviderNoAccessKeyID(t *testing.T) {
+ os.Clearenv()
+ os.Setenv("AWS_SECRET_ACCESS_KEY", "secret")
+
+ e := EnvProvider{}
+ creds, err := e.Retrieve()
+ assert.Equal(t, ErrAccessKeyIDNotFound, err, "ErrAccessKeyIDNotFound expected, but was %#v error: %#v", creds, err)
+}
+
+func TestEnvProviderNoSecretAccessKey(t *testing.T) {
+ os.Clearenv()
+ os.Setenv("AWS_ACCESS_KEY_ID", "access")
+
+ e := EnvProvider{}
+ creds, err := e.Retrieve()
+ assert.Equal(t, ErrSecretAccessKeyNotFound, err, "ErrSecretAccessKeyNotFound expected, but was %#v error: %#v", creds, err)
+}
+
+func TestEnvProviderAlternateNames(t *testing.T) {
+ os.Clearenv()
+ os.Setenv("AWS_ACCESS_KEY", "access")
+ os.Setenv("AWS_SECRET_KEY", "secret")
+
+ e := EnvProvider{}
+ creds, err := e.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.Equal(t, "access", creds.AccessKeyID, "Expected access key ID")
+ assert.Equal(t, "secret", creds.SecretAccessKey, "Expected secret access key")
+ assert.Empty(t, creds.SessionToken, "Expected no token")
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/example.ini b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/example.ini
new file mode 100644
index 0000000..aa2dc50
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/example.ini
@@ -0,0 +1,8 @@
+[default]
+aws_access_key_id = accessKey
+aws_secret_access_key = secret
+aws_session_token = token
+
+[no_token]
+aws_access_key_id = accessKey
+aws_secret_access_key = secret
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/shared_credentials_provider.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/shared_credentials_provider.go
new file mode 100644
index 0000000..7367f73
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/shared_credentials_provider.go
@@ -0,0 +1,133 @@
+package credentials
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/vaughan0/go-ini"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+var (
+ // ErrSharedCredentialsHomeNotFound is emitted when the user directory cannot be found.
+ ErrSharedCredentialsHomeNotFound = awserr.New("UserHomeNotFound", "user home directory not found.", nil)
+)
+
+// A SharedCredentialsProvider retrieves credentials from the current user's home
+// directory, and keeps track if those credentials are expired.
+//
+// Profile ini file example: $HOME/.aws/credentials
+type SharedCredentialsProvider struct {
+ // Path to the shared credentials file. If empty will default to current user's
+ // home directory.
+ Filename string
+
+ // AWS Profile to extract credentials from the shared credentials file. If empty
+ // will default to environment variable "AWS_PROFILE" or "default" if
+ // environment variable is also not set.
+ Profile string
+
+ // retrieved states if the credentials have been successfully retrieved.
+ retrieved bool
+}
+
+// NewSharedCredentials returns a pointer to a new Credentials object
+// wrapping the Profile file provider.
+func NewSharedCredentials(filename, profile string) *Credentials {
+ return NewCredentials(&SharedCredentialsProvider{
+ Filename: filename,
+ Profile: profile,
+ })
+}
+
+// Retrieve reads and extracts the shared credentials from the current
+// users home directory.
+func (p *SharedCredentialsProvider) Retrieve() (Value, error) {
+ p.retrieved = false
+
+ filename, err := p.filename()
+ if err != nil {
+ return Value{}, err
+ }
+
+ creds, err := loadProfile(filename, p.profile())
+ if err != nil {
+ return Value{}, err
+ }
+
+ p.retrieved = true
+ return creds, nil
+}
+
+// IsExpired returns if the shared credentials have expired.
+func (p *SharedCredentialsProvider) IsExpired() bool {
+ return !p.retrieved
+}
+
+// loadProfiles loads from the file pointed to by shared credentials filename for profile.
+// The credentials retrieved from the profile will be returned or error. Error will be
+// returned if it fails to read from the file, or the data is invalid.
+func loadProfile(filename, profile string) (Value, error) {
+ config, err := ini.LoadFile(filename)
+ if err != nil {
+ return Value{}, awserr.New("SharedCredsLoad", "failed to load shared credentials file", err)
+ }
+ iniProfile := config.Section(profile)
+
+ id, ok := iniProfile["aws_access_key_id"]
+ if !ok {
+ return Value{}, awserr.New("SharedCredsAccessKey",
+ fmt.Sprintf("shared credentials %s in %s did not contain aws_access_key_id", profile, filename),
+ nil)
+ }
+
+ secret, ok := iniProfile["aws_secret_access_key"]
+ if !ok {
+ return Value{}, awserr.New("SharedCredsSecret",
+ fmt.Sprintf("shared credentials %s in %s did not contain aws_secret_access_key", profile, filename),
+ nil)
+ }
+
+ token := iniProfile["aws_session_token"]
+
+ return Value{
+ AccessKeyID: id,
+ SecretAccessKey: secret,
+ SessionToken: token,
+ }, nil
+}
+
+// filename returns the filename to use to read AWS shared credentials.
+//
+// Will return an error if the user's home directory path cannot be found.
+func (p *SharedCredentialsProvider) filename() (string, error) {
+ if p.Filename == "" {
+ homeDir := os.Getenv("HOME") // *nix
+ if homeDir == "" { // Windows
+ homeDir = os.Getenv("USERPROFILE")
+ }
+ if homeDir == "" {
+ return "", ErrSharedCredentialsHomeNotFound
+ }
+
+ p.Filename = filepath.Join(homeDir, ".aws", "credentials")
+ }
+
+ return p.Filename, nil
+}
+
+// profile returns the AWS shared credentials profile. If empty will read
+// environment variable "AWS_PROFILE". If that is not set profile will
+// return "default".
+func (p *SharedCredentialsProvider) profile() string {
+ if p.Profile == "" {
+ p.Profile = os.Getenv("AWS_PROFILE")
+ }
+ if p.Profile == "" {
+ p.Profile = "default"
+ }
+
+ return p.Profile
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/shared_credentials_provider_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/shared_credentials_provider_test.go
new file mode 100644
index 0000000..3621d56
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/shared_credentials_provider_test.go
@@ -0,0 +1,77 @@
+package credentials
+
+import (
+ "github.com/stretchr/testify/assert"
+ "os"
+ "testing"
+)
+
+func TestSharedCredentialsProvider(t *testing.T) {
+ os.Clearenv()
+
+ p := SharedCredentialsProvider{Filename: "example.ini", Profile: ""}
+ creds, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.Equal(t, "accessKey", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "secret", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Equal(t, "token", creds.SessionToken, "Expect session token to match")
+}
+
+func TestSharedCredentialsProviderIsExpired(t *testing.T) {
+ os.Clearenv()
+
+ p := SharedCredentialsProvider{Filename: "example.ini", Profile: ""}
+
+ assert.True(t, p.IsExpired(), "Expect creds to be expired before retrieve")
+
+ _, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.False(t, p.IsExpired(), "Expect creds to not be expired after retrieve")
+}
+
+func TestSharedCredentialsProviderWithAWS_PROFILE(t *testing.T) {
+ os.Clearenv()
+ os.Setenv("AWS_PROFILE", "no_token")
+
+ p := SharedCredentialsProvider{Filename: "example.ini", Profile: ""}
+ creds, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.Equal(t, "accessKey", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "secret", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Empty(t, creds.SessionToken, "Expect no token")
+}
+
+func TestSharedCredentialsProviderWithoutTokenFromProfile(t *testing.T) {
+ os.Clearenv()
+
+ p := SharedCredentialsProvider{Filename: "example.ini", Profile: "no_token"}
+ creds, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.Equal(t, "accessKey", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "secret", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Empty(t, creds.SessionToken, "Expect no token")
+}
+
+func BenchmarkSharedCredentialsProvider(b *testing.B) {
+ os.Clearenv()
+
+ p := SharedCredentialsProvider{Filename: "example.ini", Profile: ""}
+ _, err := p.Retrieve()
+ if err != nil {
+ b.Fatal(err)
+ }
+
+ b.ResetTimer()
+ b.RunParallel(func(pb *testing.PB) {
+ for pb.Next() {
+ _, err := p.Retrieve()
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/static_provider.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/static_provider.go
new file mode 100644
index 0000000..a114713
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/static_provider.go
@@ -0,0 +1,42 @@
+package credentials
+
+import (
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+var (
+ // ErrStaticCredentialsEmpty is emitted when static credentials are empty.
+ ErrStaticCredentialsEmpty = awserr.New("EmptyStaticCreds", "static credentials are empty", nil)
+)
+
+// A StaticProvider is a set of credentials which are set pragmatically,
+// and will never expire.
+type StaticProvider struct {
+ Value
+}
+
+// NewStaticCredentials returns a pointer to a new Credentials object
+// wrapping a static credentials value provider.
+func NewStaticCredentials(id, secret, token string) *Credentials {
+ return NewCredentials(&StaticProvider{Value: Value{
+ AccessKeyID: id,
+ SecretAccessKey: secret,
+ SessionToken: token,
+ }})
+}
+
+// Retrieve returns the credentials or error if the credentials are invalid.
+func (s *StaticProvider) Retrieve() (Value, error) {
+ if s.AccessKeyID == "" || s.SecretAccessKey == "" {
+ return Value{}, ErrStaticCredentialsEmpty
+ }
+
+ return s.Value, nil
+}
+
+// IsExpired returns if the credentials are expired.
+//
+// For StaticProvider, the credentials never expired.
+func (s *StaticProvider) IsExpired() bool {
+ return false
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/static_provider_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/static_provider_test.go
new file mode 100644
index 0000000..ea01236
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/static_provider_test.go
@@ -0,0 +1,34 @@
+package credentials
+
+import (
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestStaticProviderGet(t *testing.T) {
+ s := StaticProvider{
+ Value: Value{
+ AccessKeyID: "AKID",
+ SecretAccessKey: "SECRET",
+ SessionToken: "",
+ },
+ }
+
+ creds, err := s.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+ assert.Equal(t, "AKID", creds.AccessKeyID, "Expect access key ID to match")
+ assert.Equal(t, "SECRET", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Empty(t, creds.SessionToken, "Expect no session token")
+}
+
+func TestStaticProviderIsExpired(t *testing.T) {
+ s := StaticProvider{
+ Value: Value{
+ AccessKeyID: "AKID",
+ SecretAccessKey: "SECRET",
+ SessionToken: "",
+ },
+ }
+
+ assert.False(t, s.IsExpired(), "Expect static credentials to never expire")
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/stscreds/assume_role_provider.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/stscreds/assume_role_provider.go
new file mode 100644
index 0000000..1499b44
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/stscreds/assume_role_provider.go
@@ -0,0 +1,120 @@
+// Package stscreds are credential Providers to retrieve STS AWS credentials.
+//
+// STS provides multiple ways to retrieve credentials which can be used when making
+// future AWS service API operation calls.
+package stscreds
+
+import (
+ "fmt"
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/aws/aws-sdk-go/service/sts"
+ "time"
+)
+
+// AssumeRoler represents the minimal subset of the STS client API used by this provider.
+type AssumeRoler interface {
+ AssumeRole(input *sts.AssumeRoleInput) (*sts.AssumeRoleOutput, error)
+}
+
+// AssumeRoleProvider retrieves temporary credentials from the STS service, and
+// keeps track of their expiration time. This provider must be used explicitly,
+// as it is not included in the credentials chain.
+//
+// Example how to configure a service to use this provider:
+//
+// config := &aws.Config{
+// Credentials: stscreds.NewCredentials(nil, "arn-of-the-role-to-assume", 10*time.Second),
+// })
+// // Use config for creating your AWS service.
+//
+// Example how to obtain customised credentials:
+//
+// provider := &stscreds.Provider{
+// // Extend the duration to 1 hour.
+// Duration: time.Hour,
+// // Custom role name.
+// RoleSessionName: "custom-session-name",
+// }
+// creds := credentials.NewCredentials(provider)
+//
+type AssumeRoleProvider struct {
+ credentials.Expiry
+
+ // Custom STS client. If not set the default STS client will be used.
+ Client AssumeRoler
+
+ // Role to be assumed.
+ RoleARN string
+
+ // Session name, if you wish to reuse the credentials elsewhere.
+ RoleSessionName string
+
+ // Expiry duration of the STS credentials. Defaults to 15 minutes if not set.
+ Duration time.Duration
+
+ // ExpiryWindow will allow the credentials to trigger refreshing prior to
+ // the credentials actually expiring. This is beneficial so race conditions
+ // with expiring credentials do not cause request to fail unexpectedly
+ // due to ExpiredTokenException exceptions.
+ //
+ // So a ExpiryWindow of 10s would cause calls to IsExpired() to return true
+ // 10 seconds before the credentials are actually expired.
+ //
+ // If ExpiryWindow is 0 or less it will be ignored.
+ ExpiryWindow time.Duration
+}
+
+// NewCredentials returns a pointer to a new Credentials object wrapping the
+// AssumeRoleProvider. The credentials will expire every 15 minutes and the
+// role will be named after a nanosecond timestamp of this operation.
+//
+// The sts and roleARN parameters are used for building the "AssumeRole" call.
+// Pass nil as sts to use the default client.
+//
+// Window is the expiry window that will be subtracted from the expiry returned
+// by the role credential request. This is done so that the credentials will
+// expire sooner than their actual lifespan.
+func NewCredentials(client AssumeRoler, roleARN string, window time.Duration) *credentials.Credentials {
+ return credentials.NewCredentials(&AssumeRoleProvider{
+ Client: client,
+ RoleARN: roleARN,
+ ExpiryWindow: window,
+ })
+}
+
+// Retrieve generates a new set of temporary credentials using STS.
+func (p *AssumeRoleProvider) Retrieve() (credentials.Value, error) {
+
+ // Apply defaults where parameters are not set.
+ if p.Client == nil {
+ p.Client = sts.New(nil)
+ }
+ if p.RoleSessionName == "" {
+ // Try to work out a role name that will hopefully end up unique.
+ p.RoleSessionName = fmt.Sprintf("%d", time.Now().UTC().UnixNano())
+ }
+ if p.Duration == 0 {
+ // Expire as often as AWS permits.
+ p.Duration = 15 * time.Minute
+ }
+
+ roleOutput, err := p.Client.AssumeRole(&sts.AssumeRoleInput{
+ DurationSeconds: aws.Long(int64(p.Duration / time.Second)),
+ RoleARN: aws.String(p.RoleARN),
+ RoleSessionName: aws.String(p.RoleSessionName),
+ })
+
+ if err != nil {
+ return credentials.Value{}, err
+ }
+
+ // We will proactively generate new credentials before they expire.
+ p.SetExpiration(*roleOutput.Credentials.Expiration, p.ExpiryWindow)
+
+ return credentials.Value{
+ AccessKeyID: *roleOutput.Credentials.AccessKeyID,
+ SecretAccessKey: *roleOutput.Credentials.SecretAccessKey,
+ SessionToken: *roleOutput.Credentials.SessionToken,
+ }, nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/stscreds/assume_role_provider_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/stscreds/assume_role_provider_test.go
new file mode 100644
index 0000000..98b7690
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/credentials/stscreds/assume_role_provider_test.go
@@ -0,0 +1,58 @@
+package stscreds
+
+import (
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/service/sts"
+ "github.com/stretchr/testify/assert"
+ "testing"
+ "time"
+)
+
+type stubSTS struct {
+}
+
+func (s *stubSTS) AssumeRole(input *sts.AssumeRoleInput) (*sts.AssumeRoleOutput, error) {
+ expiry := time.Now().Add(60 * time.Minute)
+ return &sts.AssumeRoleOutput{
+ Credentials: &sts.Credentials{
+ // Just reflect the role arn to the provider.
+ AccessKeyID: input.RoleARN,
+ SecretAccessKey: aws.String("assumedSecretAccessKey"),
+ SessionToken: aws.String("assumedSessionToken"),
+ Expiration: &expiry,
+ },
+ }, nil
+}
+
+func TestAssumeRoleProvider(t *testing.T) {
+ stub := &stubSTS{}
+ p := &AssumeRoleProvider{
+ Client: stub,
+ RoleARN: "roleARN",
+ }
+
+ creds, err := p.Retrieve()
+ assert.Nil(t, err, "Expect no error")
+
+ assert.Equal(t, "roleARN", creds.AccessKeyID, "Expect access key ID to be reflected role ARN")
+ assert.Equal(t, "assumedSecretAccessKey", creds.SecretAccessKey, "Expect secret access key to match")
+ assert.Equal(t, "assumedSessionToken", creds.SessionToken, "Expect session token to match")
+}
+
+func BenchmarkAssumeRoleProvider(b *testing.B) {
+ stub := &stubSTS{}
+ p := &AssumeRoleProvider{
+ Client: stub,
+ RoleARN: "roleARN",
+ }
+
+ b.ResetTimer()
+ b.RunParallel(func(pb *testing.PB) {
+ for pb.Next() {
+ _, err := p.Retrieve()
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handler_functions.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handler_functions.go
new file mode 100644
index 0000000..a2a88a9
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handler_functions.go
@@ -0,0 +1,153 @@
+package aws
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "regexp"
+ "strconv"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+var sleepDelay = func(delay time.Duration) {
+ time.Sleep(delay)
+}
+
+// Interface for matching types which also have a Len method.
+type lener interface {
+ Len() int
+}
+
+// BuildContentLength builds the content length of a request based on the body,
+// or will use the HTTPRequest.Header's "Content-Length" if defined. If unable
+// to determine request body length and no "Content-Length" was specified it will panic.
+func BuildContentLength(r *Request) {
+ if slength := r.HTTPRequest.Header.Get("Content-Length"); slength != "" {
+ length, _ := strconv.ParseInt(slength, 10, 64)
+ r.HTTPRequest.ContentLength = length
+ return
+ }
+
+ var length int64
+ switch body := r.Body.(type) {
+ case nil:
+ length = 0
+ case lener:
+ length = int64(body.Len())
+ case io.Seeker:
+ r.bodyStart, _ = body.Seek(0, 1)
+ end, _ := body.Seek(0, 2)
+ body.Seek(r.bodyStart, 0) // make sure to seek back to original location
+ length = end - r.bodyStart
+ default:
+ panic("Cannot get length of body, must provide `ContentLength`")
+ }
+
+ r.HTTPRequest.ContentLength = length
+ r.HTTPRequest.Header.Set("Content-Length", fmt.Sprintf("%d", length))
+}
+
+// UserAgentHandler is a request handler for injecting User agent into requests.
+func UserAgentHandler(r *Request) {
+ r.HTTPRequest.Header.Set("User-Agent", SDKName+"/"+SDKVersion)
+}
+
+var reStatusCode = regexp.MustCompile(`^(\d+)`)
+
+// SendHandler is a request handler to send service request using HTTP client.
+func SendHandler(r *Request) {
+ var err error
+ r.HTTPResponse, err = r.Service.Config.HTTPClient.Do(r.HTTPRequest)
+ if err != nil {
+ // Capture the case where url.Error is returned for error processing
+ // response. e.g. 301 without location header comes back as string
+ // error and r.HTTPResponse is nil. Other url redirect errors will
+ // comeback in a similar method.
+ if e, ok := err.(*url.Error); ok {
+ if s := reStatusCode.FindStringSubmatch(e.Error()); s != nil {
+ code, _ := strconv.ParseInt(s[1], 10, 64)
+ r.HTTPResponse = &http.Response{
+ StatusCode: int(code),
+ Status: http.StatusText(int(code)),
+ Body: ioutil.NopCloser(bytes.NewReader([]byte{})),
+ }
+ return
+ }
+ }
+ if r.HTTPRequest == nil {
+ // Add a dummy request response object to ensure the HTTPResponse
+ // value is consistent.
+ r.HTTPResponse = &http.Response{
+ StatusCode: int(0),
+ Status: http.StatusText(int(0)),
+ Body: ioutil.NopCloser(bytes.NewReader([]byte{})),
+ }
+ }
+ // Catch all other request errors.
+ r.Error = awserr.New("RequestError", "send request failed", err)
+ r.Retryable.Set(true) // network errors are retryable
+ }
+}
+
+// ValidateResponseHandler is a request handler to validate service response.
+func ValidateResponseHandler(r *Request) {
+ if r.HTTPResponse.StatusCode == 0 || r.HTTPResponse.StatusCode >= 300 {
+ // this may be replaced by an UnmarshalError handler
+ r.Error = awserr.New("UnknownError", "unknown error", nil)
+ }
+}
+
+// AfterRetryHandler performs final checks to determine if the request should
+// be retried and how long to delay.
+func AfterRetryHandler(r *Request) {
+ // If one of the other handlers already set the retry state
+ // we don't want to override it based on the service's state
+ if !r.Retryable.IsSet() {
+ r.Retryable.Set(r.Service.ShouldRetry(r))
+ }
+
+ if r.WillRetry() {
+ r.RetryDelay = r.Service.RetryRules(r)
+ sleepDelay(r.RetryDelay)
+
+ // when the expired token exception occurs the credentials
+ // need to be expired locally so that the next request to
+ // get credentials will trigger a credentials refresh.
+ if r.Error != nil {
+ if err, ok := r.Error.(awserr.Error); ok {
+ if isCodeExpiredCreds(err.Code()) {
+ r.Config.Credentials.Expire()
+ }
+ }
+ }
+
+ r.RetryCount++
+ r.Error = nil
+ }
+}
+
+var (
+ // ErrMissingRegion is an error that is returned if region configuration is
+ // not found.
+ ErrMissingRegion error = awserr.New("MissingRegion", "could not find region configuration", nil)
+
+ // ErrMissingEndpoint is an error that is returned if an endpoint cannot be
+ // resolved for a service.
+ ErrMissingEndpoint error = awserr.New("MissingEndpoint", "'Endpoint' configuration is required for this service", nil)
+)
+
+// ValidateEndpointHandler is a request handler to validate a request had the
+// appropriate Region and Endpoint set. Will set r.Error if the endpoint or
+// region is not valid.
+func ValidateEndpointHandler(r *Request) {
+ if r.Service.SigningRegion == "" && r.Service.Config.Region == "" {
+ r.Error = ErrMissingRegion
+ } else if r.Service.Endpoint == "" {
+ r.Error = ErrMissingEndpoint
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handler_functions_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handler_functions_test.go
new file mode 100644
index 0000000..c6a30c0
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handler_functions_test.go
@@ -0,0 +1,81 @@
+package aws
+
+import (
+ "net/http"
+ "os"
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestValidateEndpointHandler(t *testing.T) {
+ os.Clearenv()
+ svc := NewService(&Config{Region: "us-west-2"})
+ svc.Handlers.Clear()
+ svc.Handlers.Validate.PushBack(ValidateEndpointHandler)
+
+ req := NewRequest(svc, &Operation{Name: "Operation"}, nil, nil)
+ err := req.Build()
+
+ assert.NoError(t, err)
+}
+
+func TestValidateEndpointHandlerErrorRegion(t *testing.T) {
+ os.Clearenv()
+ svc := NewService(nil)
+ svc.Handlers.Clear()
+ svc.Handlers.Validate.PushBack(ValidateEndpointHandler)
+
+ req := NewRequest(svc, &Operation{Name: "Operation"}, nil, nil)
+ err := req.Build()
+
+ assert.Error(t, err)
+ assert.Equal(t, ErrMissingRegion, err)
+}
+
+type mockCredsProvider struct {
+ expired bool
+ retreiveCalled bool
+}
+
+func (m *mockCredsProvider) Retrieve() (credentials.Value, error) {
+ m.retreiveCalled = true
+ return credentials.Value{}, nil
+}
+
+func (m *mockCredsProvider) IsExpired() bool {
+ return m.expired
+}
+
+func TestAfterRetryRefreshCreds(t *testing.T) {
+ os.Clearenv()
+ credProvider := &mockCredsProvider{}
+ svc := NewService(&Config{Credentials: credentials.NewCredentials(credProvider), MaxRetries: 1})
+
+ svc.Handlers.Clear()
+ svc.Handlers.ValidateResponse.PushBack(func(r *Request) {
+ r.Error = awserr.New("UnknownError", "", nil)
+ r.HTTPResponse = &http.Response{StatusCode: 400}
+ })
+ svc.Handlers.UnmarshalError.PushBack(func(r *Request) {
+ r.Error = awserr.New("ExpiredTokenException", "", nil)
+ })
+ svc.Handlers.AfterRetry.PushBack(func(r *Request) {
+ AfterRetryHandler(r)
+ })
+
+ assert.True(t, svc.Config.Credentials.IsExpired(), "Expect to start out expired")
+ assert.False(t, credProvider.retreiveCalled)
+
+ req := NewRequest(svc, &Operation{Name: "Operation"}, nil, nil)
+ req.Send()
+
+ assert.True(t, svc.Config.Credentials.IsExpired())
+ assert.False(t, credProvider.retreiveCalled)
+
+ _, err := svc.Config.Credentials.Get()
+ assert.NoError(t, err)
+ assert.True(t, credProvider.retreiveCalled)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handlers.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handlers.go
new file mode 100644
index 0000000..1968cb9
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handlers.go
@@ -0,0 +1,85 @@
+package aws
+
+// A Handlers provides a collection of request handlers for various
+// stages of handling requests.
+type Handlers struct {
+ Validate HandlerList
+ Build HandlerList
+ Sign HandlerList
+ Send HandlerList
+ ValidateResponse HandlerList
+ Unmarshal HandlerList
+ UnmarshalMeta HandlerList
+ UnmarshalError HandlerList
+ Retry HandlerList
+ AfterRetry HandlerList
+}
+
+// copy returns of this handler's lists.
+func (h *Handlers) copy() Handlers {
+ return Handlers{
+ Validate: h.Validate.copy(),
+ Build: h.Build.copy(),
+ Sign: h.Sign.copy(),
+ Send: h.Send.copy(),
+ ValidateResponse: h.ValidateResponse.copy(),
+ Unmarshal: h.Unmarshal.copy(),
+ UnmarshalError: h.UnmarshalError.copy(),
+ UnmarshalMeta: h.UnmarshalMeta.copy(),
+ Retry: h.Retry.copy(),
+ AfterRetry: h.AfterRetry.copy(),
+ }
+}
+
+// Clear removes callback functions for all handlers
+func (h *Handlers) Clear() {
+ h.Validate.Clear()
+ h.Build.Clear()
+ h.Send.Clear()
+ h.Sign.Clear()
+ h.Unmarshal.Clear()
+ h.UnmarshalMeta.Clear()
+ h.UnmarshalError.Clear()
+ h.ValidateResponse.Clear()
+ h.Retry.Clear()
+ h.AfterRetry.Clear()
+}
+
+// A HandlerList manages zero or more handlers in a list.
+type HandlerList struct {
+ list []func(*Request)
+}
+
+// copy creates a copy of the handler list.
+func (l *HandlerList) copy() HandlerList {
+ var n HandlerList
+ n.list = append([]func(*Request){}, l.list...)
+ return n
+}
+
+// Clear clears the handler list.
+func (l *HandlerList) Clear() {
+ l.list = []func(*Request){}
+}
+
+// Len returns the number of handlers in the list.
+func (l *HandlerList) Len() int {
+ return len(l.list)
+}
+
+// PushBack pushes handlers f to the back of the handler list.
+func (l *HandlerList) PushBack(f ...func(*Request)) {
+ l.list = append(l.list, f...)
+}
+
+// PushFront pushes handlers f to the front of the handler list.
+func (l *HandlerList) PushFront(f ...func(*Request)) {
+ l.list = append(f, l.list...)
+}
+
+// Run executes all handlers in the list with a given request object.
+func (l *HandlerList) Run(r *Request) {
+ for _, f := range l.list {
+ f(r)
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handlers_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handlers_test.go
new file mode 100644
index 0000000..26776f6
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/handlers_test.go
@@ -0,0 +1,31 @@
+package aws
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHandlerList(t *testing.T) {
+ s := ""
+ r := &Request{}
+ l := HandlerList{}
+ l.PushBack(func(r *Request) {
+ s += "a"
+ r.Data = s
+ })
+ l.Run(r)
+ assert.Equal(t, "a", s)
+ assert.Equal(t, "a", r.Data)
+}
+
+func TestMultipleHandlers(t *testing.T) {
+ r := &Request{}
+ l := HandlerList{}
+ l.PushBack(func(r *Request) { r.Data = nil })
+ l.PushFront(func(r *Request) { r.Data = Boolean(true) })
+ l.Run(r)
+ if r.Data != nil {
+ t.Error("Expected handler to execute")
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/param_validator.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/param_validator.go
new file mode 100644
index 0000000..b4e95ce
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/param_validator.go
@@ -0,0 +1,89 @@
+package aws
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+// ValidateParameters is a request handler to validate the input parameters.
+// Validating parameters only has meaning if done prior to the request being sent.
+func ValidateParameters(r *Request) {
+ if r.ParamsFilled() {
+ v := validator{errors: []string{}}
+ v.validateAny(reflect.ValueOf(r.Params), "")
+
+ if count := len(v.errors); count > 0 {
+ format := "%d validation errors:\n- %s"
+ msg := fmt.Sprintf(format, count, strings.Join(v.errors, "\n- "))
+ r.Error = awserr.New("InvalidParameter", msg, nil)
+ }
+ }
+}
+
+// A validator validates values. Collects validations errors which occurs.
+type validator struct {
+ errors []string
+}
+
+// validateAny will validate any struct, slice or map type. All validations
+// are also performed recursively for nested types.
+func (v *validator) validateAny(value reflect.Value, path string) {
+ value = reflect.Indirect(value)
+ if !value.IsValid() {
+ return
+ }
+
+ switch value.Kind() {
+ case reflect.Struct:
+ v.validateStruct(value, path)
+ case reflect.Slice:
+ for i := 0; i < value.Len(); i++ {
+ v.validateAny(value.Index(i), path+fmt.Sprintf("[%d]", i))
+ }
+ case reflect.Map:
+ for _, n := range value.MapKeys() {
+ v.validateAny(value.MapIndex(n), path+fmt.Sprintf("[%q]", n.String()))
+ }
+ }
+}
+
+// validateStruct will validate the struct value's fields. If the structure has
+// nested types those types will be validated also.
+func (v *validator) validateStruct(value reflect.Value, path string) {
+ prefix := "."
+ if path == "" {
+ prefix = ""
+ }
+
+ for i := 0; i < value.Type().NumField(); i++ {
+ f := value.Type().Field(i)
+ if strings.ToLower(f.Name[0:1]) == f.Name[0:1] {
+ continue
+ }
+ fvalue := value.FieldByName(f.Name)
+
+ notset := false
+ if f.Tag.Get("required") != "" {
+ switch fvalue.Kind() {
+ case reflect.Ptr, reflect.Slice, reflect.Map:
+ if fvalue.IsNil() {
+ notset = true
+ }
+ default:
+ if !fvalue.IsValid() {
+ notset = true
+ }
+ }
+ }
+
+ if notset {
+ msg := "missing required parameter: " + path + prefix + f.Name
+ v.errors = append(v.errors, msg)
+ } else {
+ v.validateAny(fvalue, path+prefix+f.Name)
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/param_validator_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/param_validator_test.go
new file mode 100644
index 0000000..b8239f4
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/param_validator_test.go
@@ -0,0 +1,84 @@
+package aws_test
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/stretchr/testify/assert"
+)
+
+var service = func() *aws.Service {
+ s := &aws.Service{
+ Config: &aws.Config{},
+ ServiceName: "mock-service",
+ APIVersion: "2015-01-01",
+ }
+ return s
+}()
+
+type StructShape struct {
+ RequiredList []*ConditionalStructShape `required:"true"`
+ RequiredMap map[string]*ConditionalStructShape `required:"true"`
+ RequiredBool *bool `required:"true"`
+ OptionalStruct *ConditionalStructShape
+
+ hiddenParameter *string
+
+ metadataStructureShape
+}
+
+type metadataStructureShape struct {
+ SDKShapeTraits bool
+}
+
+type ConditionalStructShape struct {
+ Name *string `required:"true"`
+ SDKShapeTraits bool
+}
+
+func TestNoErrors(t *testing.T) {
+ input := &StructShape{
+ RequiredList: []*ConditionalStructShape{},
+ RequiredMap: map[string]*ConditionalStructShape{
+ "key1": {Name: aws.String("Name")},
+ "key2": {Name: aws.String("Name")},
+ },
+ RequiredBool: aws.Boolean(true),
+ OptionalStruct: &ConditionalStructShape{Name: aws.String("Name")},
+ }
+
+ req := aws.NewRequest(service, &aws.Operation{}, input, nil)
+ aws.ValidateParameters(req)
+ assert.NoError(t, req.Error)
+}
+
+func TestMissingRequiredParameters(t *testing.T) {
+ input := &StructShape{}
+ req := aws.NewRequest(service, &aws.Operation{}, input, nil)
+ aws.ValidateParameters(req)
+
+ assert.Error(t, req.Error)
+ assert.Equal(t, "InvalidParameter", req.Error.(awserr.Error).Code())
+ assert.Equal(t, "3 validation errors:\n- missing required parameter: RequiredList\n- missing required parameter: RequiredMap\n- missing required parameter: RequiredBool", req.Error.(awserr.Error).Message())
+}
+
+func TestNestedMissingRequiredParameters(t *testing.T) {
+ input := &StructShape{
+ RequiredList: []*ConditionalStructShape{{}},
+ RequiredMap: map[string]*ConditionalStructShape{
+ "key1": {Name: aws.String("Name")},
+ "key2": {},
+ },
+ RequiredBool: aws.Boolean(true),
+ OptionalStruct: &ConditionalStructShape{},
+ }
+
+ req := aws.NewRequest(service, &aws.Operation{}, input, nil)
+ aws.ValidateParameters(req)
+
+ assert.Error(t, req.Error)
+ assert.Equal(t, "InvalidParameter", req.Error.(awserr.Error).Code())
+ assert.Equal(t, "3 validation errors:\n- missing required parameter: RequiredList[0].Name\n- missing required parameter: RequiredMap[\"key2\"].Name\n- missing required parameter: OptionalStruct.Name", req.Error.(awserr.Error).Message())
+
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request.go
new file mode 100644
index 0000000..68d1a4d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request.go
@@ -0,0 +1,312 @@
+package aws
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "reflect"
+ "strings"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+)
+
+// A Request is the service request to be made.
+type Request struct {
+ *Service
+ Handlers Handlers
+ Time time.Time
+ ExpireTime time.Duration
+ Operation *Operation
+ HTTPRequest *http.Request
+ HTTPResponse *http.Response
+ Body io.ReadSeeker
+ bodyStart int64 // offset from beginning of Body that the request body starts
+ Params interface{}
+ Error error
+ Data interface{}
+ RequestID string
+ RetryCount uint
+ Retryable SettableBool
+ RetryDelay time.Duration
+
+ built bool
+}
+
+// An Operation is the service API operation to be made.
+type Operation struct {
+ Name string
+ HTTPMethod string
+ HTTPPath string
+ *Paginator
+}
+
+// Paginator keeps track of pagination configuration for an API operation.
+type Paginator struct {
+ InputTokens []string
+ OutputTokens []string
+ LimitToken string
+ TruncationToken string
+}
+
+// NewRequest returns a new Request pointer for the service API
+// operation and parameters.
+//
+// Params is any value of input parameters to be the request payload.
+// Data is pointer value to an object which the request's response
+// payload will be deserialized to.
+func NewRequest(service *Service, operation *Operation, params interface{}, data interface{}) *Request {
+ method := operation.HTTPMethod
+ if method == "" {
+ method = "POST"
+ }
+ p := operation.HTTPPath
+ if p == "" {
+ p = "/"
+ }
+
+ httpReq, _ := http.NewRequest(method, "", nil)
+ httpReq.URL, _ = url.Parse(service.Endpoint + p)
+
+ r := &Request{
+ Service: service,
+ Handlers: service.Handlers.copy(),
+ Time: time.Now(),
+ ExpireTime: 0,
+ Operation: operation,
+ HTTPRequest: httpReq,
+ Body: nil,
+ Params: params,
+ Error: nil,
+ Data: data,
+ }
+ r.SetBufferBody([]byte{})
+
+ return r
+}
+
+// WillRetry returns if the request's can be retried.
+func (r *Request) WillRetry() bool {
+ return r.Error != nil && r.Retryable.Get() && r.RetryCount < r.Service.MaxRetries()
+}
+
+// ParamsFilled returns if the request's parameters have been populated
+// and the parameters are valid. False is returned if no parameters are
+// provided or invalid.
+func (r *Request) ParamsFilled() bool {
+ return r.Params != nil && reflect.ValueOf(r.Params).Elem().IsValid()
+}
+
+// DataFilled returns true if the request's data for response deserialization
+// target has been set and is a valid. False is returned if data is not
+// set, or is invalid.
+func (r *Request) DataFilled() bool {
+ return r.Data != nil && reflect.ValueOf(r.Data).Elem().IsValid()
+}
+
+// SetBufferBody will set the request's body bytes that will be sent to
+// the service API.
+func (r *Request) SetBufferBody(buf []byte) {
+ r.SetReaderBody(bytes.NewReader(buf))
+}
+
+// SetStringBody sets the body of the request to be backed by a string.
+func (r *Request) SetStringBody(s string) {
+ r.SetReaderBody(strings.NewReader(s))
+}
+
+// SetReaderBody will set the request's body reader.
+func (r *Request) SetReaderBody(reader io.ReadSeeker) {
+ r.HTTPRequest.Body = ioutil.NopCloser(reader)
+ r.Body = reader
+}
+
+// Presign returns the request's signed URL. Error will be returned
+// if the signing fails.
+func (r *Request) Presign(expireTime time.Duration) (string, error) {
+ r.ExpireTime = expireTime
+ r.Sign()
+ if r.Error != nil {
+ return "", r.Error
+ }
+ return r.HTTPRequest.URL.String(), nil
+}
+
+// Build will build the request's object so it can be signed and sent
+// to the service. Build will also validate all the request's parameters.
+// Anny additional build Handlers set on this request will be run
+// in the order they were set.
+//
+// The request will only be built once. Multiple calls to build will have
+// no effect.
+//
+// If any Validate or Build errors occur the build will stop and the error
+// which occurred will be returned.
+func (r *Request) Build() error {
+ if !r.built {
+ r.Error = nil
+ r.Handlers.Validate.Run(r)
+ if r.Error != nil {
+ return r.Error
+ }
+ r.Handlers.Build.Run(r)
+ r.built = true
+ }
+
+ return r.Error
+}
+
+// Sign will sign the request retuning error if errors are encountered.
+//
+// Send will build the request prior to signing. All Sign Handlers will
+// be executed in the order they were set.
+func (r *Request) Sign() error {
+ r.Build()
+ if r.Error != nil {
+ return r.Error
+ }
+
+ r.Handlers.Sign.Run(r)
+ return r.Error
+}
+
+// Send will send the request returning error if errors are encountered.
+//
+// Send will sign the request prior to sending. All Send Handlers will
+// be executed in the order they were set.
+func (r *Request) Send() error {
+ for {
+ r.Sign()
+ if r.Error != nil {
+ return r.Error
+ }
+
+ if r.Retryable.Get() {
+ // Re-seek the body back to the original point in for a retry so that
+ // send will send the body's contents again in the upcoming request.
+ r.Body.Seek(r.bodyStart, 0)
+ }
+ r.Retryable.Reset()
+
+ r.Handlers.Send.Run(r)
+ if r.Error != nil {
+ r.Handlers.Retry.Run(r)
+ r.Handlers.AfterRetry.Run(r)
+ if r.Error != nil {
+ return r.Error
+ }
+ continue
+ }
+
+ r.Handlers.UnmarshalMeta.Run(r)
+ r.Handlers.ValidateResponse.Run(r)
+ if r.Error != nil {
+ r.Handlers.UnmarshalError.Run(r)
+ r.Handlers.Retry.Run(r)
+ r.Handlers.AfterRetry.Run(r)
+ if r.Error != nil {
+ return r.Error
+ }
+ continue
+ }
+
+ r.Handlers.Unmarshal.Run(r)
+ if r.Error != nil {
+ r.Handlers.Retry.Run(r)
+ r.Handlers.AfterRetry.Run(r)
+ if r.Error != nil {
+ return r.Error
+ }
+ continue
+ }
+
+ break
+ }
+
+ return nil
+}
+
+// HasNextPage returns true if this request has more pages of data available.
+func (r *Request) HasNextPage() bool {
+ return r.nextPageTokens() != nil
+}
+
+// nextPageTokens returns the tokens to use when asking for the next page of
+// data.
+func (r *Request) nextPageTokens() []interface{} {
+ if r.Operation.Paginator == nil {
+ return nil
+ }
+
+ if r.Operation.TruncationToken != "" {
+ tr := awsutil.ValuesAtAnyPath(r.Data, r.Operation.TruncationToken)
+ if tr == nil || len(tr) == 0 {
+ return nil
+ }
+ switch v := tr[0].(type) {
+ case bool:
+ if v == false {
+ return nil
+ }
+ }
+ }
+
+ found := false
+ tokens := make([]interface{}, len(r.Operation.OutputTokens))
+
+ for i, outtok := range r.Operation.OutputTokens {
+ v := awsutil.ValuesAtAnyPath(r.Data, outtok)
+ if v != nil && len(v) > 0 {
+ found = true
+ tokens[i] = v[0]
+ }
+ }
+
+ if found {
+ return tokens
+ }
+ return nil
+}
+
+// NextPage returns a new Request that can be executed to return the next
+// page of result data. Call .Send() on this request to execute it.
+func (r *Request) NextPage() *Request {
+ tokens := r.nextPageTokens()
+ if tokens == nil {
+ return nil
+ }
+
+ data := reflect.New(reflect.TypeOf(r.Data).Elem()).Interface()
+ nr := NewRequest(r.Service, r.Operation, awsutil.CopyOf(r.Params), data)
+ for i, intok := range nr.Operation.InputTokens {
+ awsutil.SetValueAtAnyPath(nr.Params, intok, tokens[i])
+ }
+ return nr
+}
+
+// EachPage iterates over each page of a paginated request object. The fn
+// parameter should be a function with the following sample signature:
+//
+// func(page *T, lastPage bool) bool {
+// return true // return false to stop iterating
+// }
+//
+// Where "T" is the structure type matching the output structure of the given
+// operation. For example, a request object generated by
+// DynamoDB.ListTablesRequest() would expect to see dynamodb.ListTablesOutput
+// as the structure "T". The lastPage value represents whether the page is
+// the last page of data or not. The return value of this function should
+// return true to keep iterating or false to stop.
+func (r *Request) EachPage(fn func(data interface{}, isLastPage bool) (shouldContinue bool)) error {
+ for page := r; page != nil; page = page.NextPage() {
+ page.Send()
+ shouldContinue := fn(page.Data, !page.HasNextPage())
+ if page.Error != nil || !shouldContinue {
+ return page.Error
+ }
+ }
+
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request_pagination_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request_pagination_test.go
new file mode 100644
index 0000000..35b7ee8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request_pagination_test.go
@@ -0,0 +1,305 @@
+package aws_test
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/dynamodb"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+
+// Use DynamoDB methods for simplicity
+func TestPagination(t *testing.T) {
+ db := dynamodb.New(nil)
+ tokens, pages, numPages, gotToEnd := []string{}, []string{}, 0, false
+
+ reqNum := 0
+ resps := []*dynamodb.ListTablesOutput{
+ {TableNames: []*string{aws.String("Table1"), aws.String("Table2")}, LastEvaluatedTableName: aws.String("Table2")},
+ {TableNames: []*string{aws.String("Table3"), aws.String("Table4")}, LastEvaluatedTableName: aws.String("Table4")},
+ {TableNames: []*string{aws.String("Table5")}},
+ }
+
+ db.Handlers.Send.Clear() // mock sending
+ db.Handlers.Unmarshal.Clear()
+ db.Handlers.UnmarshalMeta.Clear()
+ db.Handlers.ValidateResponse.Clear()
+ db.Handlers.Build.PushBack(func(r *aws.Request) {
+ in := r.Params.(*dynamodb.ListTablesInput)
+ if in == nil {
+ tokens = append(tokens, "")
+ } else if in.ExclusiveStartTableName != nil {
+ tokens = append(tokens, *in.ExclusiveStartTableName)
+ }
+ })
+ db.Handlers.Unmarshal.PushBack(func(r *aws.Request) {
+ r.Data = resps[reqNum]
+ reqNum++
+ })
+
+ params := &dynamodb.ListTablesInput{Limit: aws.Long(2)}
+ err := db.ListTablesPages(params, func(p *dynamodb.ListTablesOutput, last bool) bool {
+ numPages++
+ for _, t := range p.TableNames {
+ pages = append(pages, *t)
+ }
+ if last {
+ if gotToEnd {
+ assert.Fail(t, "last=true happened twice")
+ }
+ gotToEnd = true
+ }
+ return true
+ })
+
+ assert.Equal(t, []string{"Table2", "Table4"}, tokens)
+ assert.Equal(t, []string{"Table1", "Table2", "Table3", "Table4", "Table5"}, pages)
+ assert.Equal(t, 3, numPages)
+ assert.True(t, gotToEnd)
+ assert.Nil(t, err)
+ assert.Nil(t, params.ExclusiveStartTableName)
+}
+
+// Use DynamoDB methods for simplicity
+func TestPaginationEachPage(t *testing.T) {
+ db := dynamodb.New(nil)
+ tokens, pages, numPages, gotToEnd := []string{}, []string{}, 0, false
+
+ reqNum := 0
+ resps := []*dynamodb.ListTablesOutput{
+ {TableNames: []*string{aws.String("Table1"), aws.String("Table2")}, LastEvaluatedTableName: aws.String("Table2")},
+ {TableNames: []*string{aws.String("Table3"), aws.String("Table4")}, LastEvaluatedTableName: aws.String("Table4")},
+ {TableNames: []*string{aws.String("Table5")}},
+ }
+
+ db.Handlers.Send.Clear() // mock sending
+ db.Handlers.Unmarshal.Clear()
+ db.Handlers.UnmarshalMeta.Clear()
+ db.Handlers.ValidateResponse.Clear()
+ db.Handlers.Build.PushBack(func(r *aws.Request) {
+ in := r.Params.(*dynamodb.ListTablesInput)
+ if in == nil {
+ tokens = append(tokens, "")
+ } else if in.ExclusiveStartTableName != nil {
+ tokens = append(tokens, *in.ExclusiveStartTableName)
+ }
+ })
+ db.Handlers.Unmarshal.PushBack(func(r *aws.Request) {
+ r.Data = resps[reqNum]
+ reqNum++
+ })
+
+ params := &dynamodb.ListTablesInput{Limit: aws.Long(2)}
+ req, _ := db.ListTablesRequest(params)
+ err := req.EachPage(func(p interface{}, last bool) bool {
+ numPages++
+ for _, t := range p.(*dynamodb.ListTablesOutput).TableNames {
+ pages = append(pages, *t)
+ }
+ if last {
+ if gotToEnd {
+ assert.Fail(t, "last=true happened twice")
+ }
+ gotToEnd = true
+ }
+
+ return true
+ })
+
+ assert.Equal(t, []string{"Table2", "Table4"}, tokens)
+ assert.Equal(t, []string{"Table1", "Table2", "Table3", "Table4", "Table5"}, pages)
+ assert.Equal(t, 3, numPages)
+ assert.True(t, gotToEnd)
+ assert.Nil(t, err)
+}
+
+// Use DynamoDB methods for simplicity
+func TestPaginationEarlyExit(t *testing.T) {
+ db := dynamodb.New(nil)
+ numPages, gotToEnd := 0, false
+
+ reqNum := 0
+ resps := []*dynamodb.ListTablesOutput{
+ {TableNames: []*string{aws.String("Table1"), aws.String("Table2")}, LastEvaluatedTableName: aws.String("Table2")},
+ {TableNames: []*string{aws.String("Table3"), aws.String("Table4")}, LastEvaluatedTableName: aws.String("Table4")},
+ {TableNames: []*string{aws.String("Table5")}},
+ }
+
+ db.Handlers.Send.Clear() // mock sending
+ db.Handlers.Unmarshal.Clear()
+ db.Handlers.UnmarshalMeta.Clear()
+ db.Handlers.ValidateResponse.Clear()
+ db.Handlers.Unmarshal.PushBack(func(r *aws.Request) {
+ r.Data = resps[reqNum]
+ reqNum++
+ })
+
+ params := &dynamodb.ListTablesInput{Limit: aws.Long(2)}
+ err := db.ListTablesPages(params, func(p *dynamodb.ListTablesOutput, last bool) bool {
+ numPages++
+ if numPages == 2 {
+ return false
+ }
+ if last {
+ if gotToEnd {
+ assert.Fail(t, "last=true happened twice")
+ }
+ gotToEnd = true
+ }
+ return true
+ })
+
+ assert.Equal(t, 2, numPages)
+ assert.False(t, gotToEnd)
+ assert.Nil(t, err)
+}
+
+func TestSkipPagination(t *testing.T) {
+ client := s3.New(nil)
+ client.Handlers.Send.Clear() // mock sending
+ client.Handlers.Unmarshal.Clear()
+ client.Handlers.UnmarshalMeta.Clear()
+ client.Handlers.ValidateResponse.Clear()
+ client.Handlers.Unmarshal.PushBack(func(r *aws.Request) {
+ r.Data = &s3.HeadBucketOutput{}
+ })
+
+ req, _ := client.HeadBucketRequest(&s3.HeadBucketInput{Bucket: aws.String("bucket")})
+
+ numPages, gotToEnd := 0, false
+ req.EachPage(func(p interface{}, last bool) bool {
+ numPages++
+ if last {
+ gotToEnd = true
+ }
+ return true
+ })
+ assert.Equal(t, 1, numPages)
+ assert.True(t, gotToEnd)
+}
+
+// Use S3 for simplicity
+func TestPaginationTruncation(t *testing.T) {
+ count := 0
+ client := s3.New(nil)
+
+ reqNum := &count
+ resps := []*s3.ListObjectsOutput{
+ {IsTruncated: aws.Boolean(true), Contents: []*s3.Object{{Key: aws.String("Key1")}}},
+ {IsTruncated: aws.Boolean(true), Contents: []*s3.Object{{Key: aws.String("Key2")}}},
+ {IsTruncated: aws.Boolean(false), Contents: []*s3.Object{{Key: aws.String("Key3")}}},
+ {IsTruncated: aws.Boolean(true), Contents: []*s3.Object{{Key: aws.String("Key4")}}},
+ }
+
+ client.Handlers.Send.Clear() // mock sending
+ client.Handlers.Unmarshal.Clear()
+ client.Handlers.UnmarshalMeta.Clear()
+ client.Handlers.ValidateResponse.Clear()
+ client.Handlers.Unmarshal.PushBack(func(r *aws.Request) {
+ r.Data = resps[*reqNum]
+ *reqNum++
+ })
+
+ params := &s3.ListObjectsInput{Bucket: aws.String("bucket")}
+
+ results := []string{}
+ err := client.ListObjectsPages(params, func(p *s3.ListObjectsOutput, last bool) bool {
+ results = append(results, *p.Contents[0].Key)
+ return true
+ })
+
+ assert.Equal(t, []string{"Key1", "Key2", "Key3"}, results)
+ assert.Nil(t, err)
+
+ // Try again without truncation token at all
+ count = 0
+ resps[1].IsTruncated = nil
+ resps[2].IsTruncated = aws.Boolean(true)
+ results = []string{}
+ err = client.ListObjectsPages(params, func(p *s3.ListObjectsOutput, last bool) bool {
+ results = append(results, *p.Contents[0].Key)
+ return true
+ })
+
+ assert.Equal(t, []string{"Key1", "Key2"}, results)
+ assert.Nil(t, err)
+
+}
+
+// Benchmarks
+var benchResps = []*dynamodb.ListTablesOutput{
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE"), aws.String("NXT")}, LastEvaluatedTableName: aws.String("NXT")},
+ {TableNames: []*string{aws.String("TABLE")}},
+}
+
+var benchDb = func() *dynamodb.DynamoDB {
+ db := dynamodb.New(nil)
+ db.Handlers.Send.Clear() // mock sending
+ db.Handlers.Unmarshal.Clear()
+ db.Handlers.UnmarshalMeta.Clear()
+ db.Handlers.ValidateResponse.Clear()
+ return db
+}
+
+func BenchmarkCodegenIterator(b *testing.B) {
+ reqNum := 0
+ db := benchDb()
+ db.Handlers.Unmarshal.PushBack(func(r *aws.Request) {
+ r.Data = benchResps[reqNum]
+ reqNum++
+ })
+
+ input := &dynamodb.ListTablesInput{Limit: aws.Long(2)}
+ iter := func(fn func(*dynamodb.ListTablesOutput, bool) bool) error {
+ page, _ := db.ListTablesRequest(input)
+ for ; page != nil; page = page.NextPage() {
+ page.Send()
+ out := page.Data.(*dynamodb.ListTablesOutput)
+ if result := fn(out, !page.HasNextPage()); page.Error != nil || !result {
+ return page.Error
+ }
+ }
+ return nil
+ }
+
+ for i := 0; i < b.N; i++ {
+ reqNum = 0
+ iter(func(p *dynamodb.ListTablesOutput, last bool) bool {
+ return true
+ })
+ }
+}
+
+func BenchmarkEachPageIterator(b *testing.B) {
+ reqNum := 0
+ db := benchDb()
+ db.Handlers.Unmarshal.PushBack(func(r *aws.Request) {
+ r.Data = benchResps[reqNum]
+ reqNum++
+ })
+
+ input := &dynamodb.ListTablesInput{Limit: aws.Long(2)}
+ for i := 0; i < b.N; i++ {
+ reqNum = 0
+ req, _ := db.ListTablesRequest(input)
+ req.EachPage(func(p interface{}, last bool) bool {
+ return true
+ })
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request_test.go
new file mode 100644
index 0000000..fcb4718
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/request_test.go
@@ -0,0 +1,219 @@
+package aws
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "reflect"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/stretchr/testify/assert"
+)
+
+type testData struct {
+ Data string
+}
+
+func body(str string) io.ReadCloser {
+ return ioutil.NopCloser(bytes.NewReader([]byte(str)))
+}
+
+func unmarshal(req *Request) {
+ defer req.HTTPResponse.Body.Close()
+ if req.Data != nil {
+ json.NewDecoder(req.HTTPResponse.Body).Decode(req.Data)
+ }
+ return
+}
+
+func unmarshalError(req *Request) {
+ bodyBytes, err := ioutil.ReadAll(req.HTTPResponse.Body)
+ if err != nil {
+ req.Error = awserr.New("UnmarshaleError", req.HTTPResponse.Status, err)
+ return
+ }
+ if len(bodyBytes) == 0 {
+ req.Error = awserr.NewRequestFailure(
+ awserr.New("UnmarshaleError", req.HTTPResponse.Status, fmt.Errorf("empty body")),
+ req.HTTPResponse.StatusCode,
+ "",
+ )
+ return
+ }
+ var jsonErr jsonErrorResponse
+ if err := json.Unmarshal(bodyBytes, &jsonErr); err != nil {
+ req.Error = awserr.New("UnmarshaleError", "JSON unmarshal", err)
+ return
+ }
+ req.Error = awserr.NewRequestFailure(
+ awserr.New(jsonErr.Code, jsonErr.Message, nil),
+ req.HTTPResponse.StatusCode,
+ "",
+ )
+}
+
+type jsonErrorResponse struct {
+ Code string `json:"__type"`
+ Message string `json:"message"`
+}
+
+// test that retries occur for 5xx status codes
+func TestRequestRecoverRetry5xx(t *testing.T) {
+ reqNum := 0
+ reqs := []http.Response{
+ {StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
+ {StatusCode: 501, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
+ {StatusCode: 200, Body: body(`{"data":"valid"}`)},
+ }
+
+ s := NewService(&Config{MaxRetries: 10})
+ s.Handlers.Validate.Clear()
+ s.Handlers.Unmarshal.PushBack(unmarshal)
+ s.Handlers.UnmarshalError.PushBack(unmarshalError)
+ s.Handlers.Send.Clear() // mock sending
+ s.Handlers.Send.PushBack(func(r *Request) {
+ r.HTTPResponse = &reqs[reqNum]
+ reqNum++
+ })
+ out := &testData{}
+ r := NewRequest(s, &Operation{Name: "Operation"}, nil, out)
+ err := r.Send()
+ assert.Nil(t, err)
+ assert.Equal(t, 2, int(r.RetryCount))
+ assert.Equal(t, "valid", out.Data)
+}
+
+// test that retries occur for 4xx status codes with a response type that can be retried - see `shouldRetry`
+func TestRequestRecoverRetry4xxRetryable(t *testing.T) {
+ reqNum := 0
+ reqs := []http.Response{
+ {StatusCode: 400, Body: body(`{"__type":"Throttling","message":"Rate exceeded."}`)},
+ {StatusCode: 429, Body: body(`{"__type":"ProvisionedThroughputExceededException","message":"Rate exceeded."}`)},
+ {StatusCode: 200, Body: body(`{"data":"valid"}`)},
+ }
+
+ s := NewService(&Config{MaxRetries: 10})
+ s.Handlers.Validate.Clear()
+ s.Handlers.Unmarshal.PushBack(unmarshal)
+ s.Handlers.UnmarshalError.PushBack(unmarshalError)
+ s.Handlers.Send.Clear() // mock sending
+ s.Handlers.Send.PushBack(func(r *Request) {
+ r.HTTPResponse = &reqs[reqNum]
+ reqNum++
+ })
+ out := &testData{}
+ r := NewRequest(s, &Operation{Name: "Operation"}, nil, out)
+ err := r.Send()
+ assert.Nil(t, err)
+ assert.Equal(t, 2, int(r.RetryCount))
+ assert.Equal(t, "valid", out.Data)
+}
+
+// test that retries don't occur for 4xx status codes with a response type that can't be retried
+func TestRequest4xxUnretryable(t *testing.T) {
+ s := NewService(&Config{MaxRetries: 10})
+ s.Handlers.Validate.Clear()
+ s.Handlers.Unmarshal.PushBack(unmarshal)
+ s.Handlers.UnmarshalError.PushBack(unmarshalError)
+ s.Handlers.Send.Clear() // mock sending
+ s.Handlers.Send.PushBack(func(r *Request) {
+ r.HTTPResponse = &http.Response{StatusCode: 401, Body: body(`{"__type":"SignatureDoesNotMatch","message":"Signature does not match."}`)}
+ })
+ out := &testData{}
+ r := NewRequest(s, &Operation{Name: "Operation"}, nil, out)
+ err := r.Send()
+ assert.NotNil(t, err)
+ if e, ok := err.(awserr.RequestFailure); ok {
+ assert.Equal(t, 401, e.StatusCode())
+ } else {
+ assert.Fail(t, "Expected error to be a service failure")
+ }
+ assert.Equal(t, "SignatureDoesNotMatch", err.(awserr.Error).Code())
+ assert.Equal(t, "Signature does not match.", err.(awserr.Error).Message())
+ assert.Equal(t, 0, int(r.RetryCount))
+}
+
+func TestRequestExhaustRetries(t *testing.T) {
+ delays := []time.Duration{}
+ sleepDelay = func(delay time.Duration) {
+ delays = append(delays, delay)
+ }
+
+ reqNum := 0
+ reqs := []http.Response{
+ {StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
+ {StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
+ {StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
+ {StatusCode: 500, Body: body(`{"__type":"UnknownError","message":"An error occurred."}`)},
+ }
+
+ s := NewService(&Config{MaxRetries: -1})
+ s.Handlers.Validate.Clear()
+ s.Handlers.Unmarshal.PushBack(unmarshal)
+ s.Handlers.UnmarshalError.PushBack(unmarshalError)
+ s.Handlers.Send.Clear() // mock sending
+ s.Handlers.Send.PushBack(func(r *Request) {
+ r.HTTPResponse = &reqs[reqNum]
+ reqNum++
+ })
+ r := NewRequest(s, &Operation{Name: "Operation"}, nil, nil)
+ err := r.Send()
+ assert.NotNil(t, err)
+ if e, ok := err.(awserr.RequestFailure); ok {
+ assert.Equal(t, 500, e.StatusCode())
+ } else {
+ assert.Fail(t, "Expected error to be a service failure")
+ }
+ assert.Equal(t, "UnknownError", err.(awserr.Error).Code())
+ assert.Equal(t, "An error occurred.", err.(awserr.Error).Message())
+ assert.Equal(t, 3, int(r.RetryCount))
+ assert.True(t, reflect.DeepEqual([]time.Duration{30 * time.Millisecond, 60 * time.Millisecond, 120 * time.Millisecond}, delays))
+}
+
+// test that the request is retried after the credentials are expired.
+func TestRequestRecoverExpiredCreds(t *testing.T) {
+ reqNum := 0
+ reqs := []http.Response{
+ {StatusCode: 400, Body: body(`{"__type":"ExpiredTokenException","message":"expired token"}`)},
+ {StatusCode: 200, Body: body(`{"data":"valid"}`)},
+ }
+
+ s := NewService(&Config{MaxRetries: 10, Credentials: credentials.NewStaticCredentials("AKID", "SECRET", "")})
+ s.Handlers.Validate.Clear()
+ s.Handlers.Unmarshal.PushBack(unmarshal)
+ s.Handlers.UnmarshalError.PushBack(unmarshalError)
+
+ credExpiredBeforeRetry := false
+ credExpiredAfterRetry := false
+
+ s.Handlers.AfterRetry.PushBack(func(r *Request) {
+ credExpiredAfterRetry = r.Config.Credentials.IsExpired()
+ })
+
+ s.Handlers.Sign.Clear()
+ s.Handlers.Sign.PushBack(func(r *Request) {
+ r.Config.Credentials.Get()
+ })
+ s.Handlers.Send.Clear() // mock sending
+ s.Handlers.Send.PushBack(func(r *Request) {
+ r.HTTPResponse = &reqs[reqNum]
+ reqNum++
+ })
+ out := &testData{}
+ r := NewRequest(s, &Operation{Name: "Operation"}, nil, out)
+ err := r.Send()
+ assert.Nil(t, err)
+
+ assert.False(t, credExpiredBeforeRetry, "Expect valid creds before retry check")
+ assert.True(t, credExpiredAfterRetry, "Expect expired creds after retry check")
+ assert.False(t, s.Config.Credentials.IsExpired(), "Expect valid creds after cred expired recovery")
+
+ assert.Equal(t, 1, int(r.RetryCount))
+ assert.Equal(t, "valid", out.Data)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/service.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/service.go
new file mode 100644
index 0000000..42d1be4
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/service.go
@@ -0,0 +1,177 @@
+package aws
+
+import (
+ "fmt"
+ "math"
+ "net/http"
+ "net/http/httputil"
+ "regexp"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/internal/endpoints"
+)
+
+// A Service implements the base service request and response handling
+// used by all services.
+type Service struct {
+ Config *Config
+ Handlers Handlers
+ ManualSend bool
+ ServiceName string
+ APIVersion string
+ Endpoint string
+ SigningName string
+ SigningRegion string
+ JSONVersion string
+ TargetPrefix string
+ RetryRules func(*Request) time.Duration
+ ShouldRetry func(*Request) bool
+ DefaultMaxRetries uint
+}
+
+var schemeRE = regexp.MustCompile("^([^:]+)://")
+
+// NewService will return a pointer to a new Server object initialized.
+func NewService(config *Config) *Service {
+ svc := &Service{Config: config}
+ svc.Initialize()
+ return svc
+}
+
+// Initialize initializes the service.
+func (s *Service) Initialize() {
+ if s.Config == nil {
+ s.Config = &Config{}
+ }
+ if s.Config.HTTPClient == nil {
+ s.Config.HTTPClient = http.DefaultClient
+ }
+
+ if s.RetryRules == nil {
+ s.RetryRules = retryRules
+ }
+
+ if s.ShouldRetry == nil {
+ s.ShouldRetry = shouldRetry
+ }
+
+ s.DefaultMaxRetries = 3
+ s.Handlers.Validate.PushBack(ValidateEndpointHandler)
+ s.Handlers.Build.PushBack(UserAgentHandler)
+ s.Handlers.Sign.PushBack(BuildContentLength)
+ s.Handlers.Send.PushBack(SendHandler)
+ s.Handlers.AfterRetry.PushBack(AfterRetryHandler)
+ s.Handlers.ValidateResponse.PushBack(ValidateResponseHandler)
+ s.AddDebugHandlers()
+ s.buildEndpoint()
+
+ if !s.Config.DisableParamValidation {
+ s.Handlers.Validate.PushBack(ValidateParameters)
+ }
+}
+
+// buildEndpoint builds the endpoint values the service will use to make requests with.
+func (s *Service) buildEndpoint() {
+ if s.Config.Endpoint != "" {
+ s.Endpoint = s.Config.Endpoint
+ } else {
+ s.Endpoint, s.SigningRegion =
+ endpoints.EndpointForRegion(s.ServiceName, s.Config.Region)
+ }
+
+ if s.Endpoint != "" && !schemeRE.MatchString(s.Endpoint) {
+ scheme := "https"
+ if s.Config.DisableSSL {
+ scheme = "http"
+ }
+ s.Endpoint = scheme + "://" + s.Endpoint
+ }
+}
+
+// AddDebugHandlers injects debug logging handlers into the service to log request
+// debug information.
+func (s *Service) AddDebugHandlers() {
+ out := s.Config.Logger
+ if s.Config.LogLevel == 0 {
+ return
+ }
+
+ s.Handlers.Send.PushFront(func(r *Request) {
+ logBody := r.Config.LogHTTPBody
+ dumpedBody, _ := httputil.DumpRequestOut(r.HTTPRequest, logBody)
+
+ fmt.Fprintf(out, "---[ REQUEST POST-SIGN ]-----------------------------\n")
+ fmt.Fprintf(out, "%s\n", string(dumpedBody))
+ fmt.Fprintf(out, "-----------------------------------------------------\n")
+ })
+ s.Handlers.Send.PushBack(func(r *Request) {
+ fmt.Fprintf(out, "---[ RESPONSE ]--------------------------------------\n")
+ if r.HTTPResponse != nil {
+ logBody := r.Config.LogHTTPBody
+ dumpedBody, _ := httputil.DumpResponse(r.HTTPResponse, logBody)
+ fmt.Fprintf(out, "%s\n", string(dumpedBody))
+ } else if r.Error != nil {
+ fmt.Fprintf(out, "%s\n", r.Error)
+ }
+ fmt.Fprintf(out, "-----------------------------------------------------\n")
+ })
+}
+
+// MaxRetries returns the number of maximum returns the service will use to make
+// an individual API request.
+func (s *Service) MaxRetries() uint {
+ if s.Config.MaxRetries < 0 {
+ return s.DefaultMaxRetries
+ }
+ return uint(s.Config.MaxRetries)
+}
+
+// retryRules returns the delay duration before retrying this request again
+func retryRules(r *Request) time.Duration {
+ delay := time.Duration(math.Pow(2, float64(r.RetryCount))) * 30
+ return delay * time.Millisecond
+}
+
+// retryableCodes is a collection of service response codes which are retry-able
+// without any further action.
+var retryableCodes = map[string]struct{}{
+ "RequestError": {},
+ "ProvisionedThroughputExceededException": {},
+ "Throttling": {},
+}
+
+// credsExpiredCodes is a collection of error codes which signify the credentials
+// need to be refreshed. Expired tokens require refreshing of credentials, and
+// resigning before the request can be retried.
+var credsExpiredCodes = map[string]struct{}{
+ "ExpiredToken": {},
+ "ExpiredTokenException": {},
+ "RequestExpired": {}, // EC2 Only
+}
+
+func isCodeRetryable(code string) bool {
+ if _, ok := retryableCodes[code]; ok {
+ return true
+ }
+
+ return isCodeExpiredCreds(code)
+}
+
+func isCodeExpiredCreds(code string) bool {
+ _, ok := credsExpiredCodes[code]
+ return ok
+}
+
+// shouldRetry returns if the request should be retried.
+func shouldRetry(r *Request) bool {
+ if r.HTTPResponse.StatusCode >= 500 {
+ return true
+ }
+ if r.Error != nil {
+ if err, ok := r.Error.(awserr.Error); ok {
+ return isCodeRetryable(err.Code())
+ }
+ }
+ return false
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/types.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/types.go
new file mode 100644
index 0000000..7801cb6
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/types.go
@@ -0,0 +1,131 @@
+package aws
+
+import (
+ "fmt"
+ "io"
+ "time"
+)
+
+// String converts a Go string into a string pointer.
+func String(v string) *string {
+ return &v
+}
+
+// Boolean converts a Go bool into a boolean pointer.
+func Boolean(v bool) *bool {
+ return &v
+}
+
+// Long converts a Go int64 into a long pointer.
+func Long(v int64) *int64 {
+ return &v
+}
+
+// Double converts a Go float64 into a double pointer.
+func Double(v float64) *float64 {
+ return &v
+}
+
+// Time converts a Go Time into a Time pointer
+func Time(t time.Time) *time.Time {
+ return &t
+}
+
+// ReadSeekCloser wraps a io.Reader returning a ReaderSeakerCloser
+func ReadSeekCloser(r io.Reader) ReaderSeekerCloser {
+ return ReaderSeekerCloser{r}
+}
+
+// ReaderSeekerCloser represents a reader that can also delegate io.Seeker and
+// io.Closer interfaces to the underlying object if they are available.
+type ReaderSeekerCloser struct {
+ r io.Reader
+}
+
+// Read reads from the reader up to size of p. The number of bytes read, and
+// error if it occurred will be returned.
+//
+// If the reader is not an io.Reader zero bytes read, and nil error will be returned.
+//
+// Performs the same functionality as io.Reader Read
+func (r ReaderSeekerCloser) Read(p []byte) (int, error) {
+ switch t := r.r.(type) {
+ case io.Reader:
+ return t.Read(p)
+ }
+ return 0, nil
+}
+
+// Seek sets the offset for the next Read to offset, interpreted according to
+// whence: 0 means relative to the origin of the file, 1 means relative to the
+// current offset, and 2 means relative to the end. Seek returns the new offset
+// and an error, if any.
+//
+// If the ReaderSeekerCloser is not an io.Seeker nothing will be done.
+func (r ReaderSeekerCloser) Seek(offset int64, whence int) (int64, error) {
+ switch t := r.r.(type) {
+ case io.Seeker:
+ return t.Seek(offset, whence)
+ }
+ return int64(0), nil
+}
+
+// Close closes the ReaderSeekerCloser.
+//
+// If the ReaderSeekerCloser is not an io.Closer nothing will be done.
+func (r ReaderSeekerCloser) Close() error {
+ switch t := r.r.(type) {
+ case io.Closer:
+ return t.Close()
+ }
+ return nil
+}
+
+// A SettableBool provides a boolean value which includes the state if
+// the value was set or unset. The set state is in addition to the value's
+// value(true|false)
+type SettableBool struct {
+ value bool
+ set bool
+}
+
+// SetBool returns a SettableBool with a value set
+func SetBool(value bool) SettableBool {
+ return SettableBool{value: value, set: true}
+}
+
+// Get returns the value. Will always be false if the SettableBool was not set.
+func (b *SettableBool) Get() bool {
+ if !b.set {
+ return false
+ }
+ return b.value
+}
+
+// Set sets the value and updates the state that the value has been set.
+func (b *SettableBool) Set(value bool) {
+ b.value = value
+ b.set = true
+}
+
+// IsSet returns if the value has been set
+func (b *SettableBool) IsSet() bool {
+ return b.set
+}
+
+// Reset resets the state and value of the SettableBool to its initial default
+// state of not set and zero value.
+func (b *SettableBool) Reset() {
+ b.value = false
+ b.set = false
+}
+
+// String returns the string representation of the value if set. Zero if not set.
+func (b *SettableBool) String() string {
+ return fmt.Sprintf("%t", b.Get())
+}
+
+// GoString returns the string representation of the SettableBool value and state
+func (b *SettableBool) GoString() string {
+ return fmt.Sprintf("Bool{value:%t, set:%t}", b.value, b.set)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/version.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/version.go
new file mode 100644
index 0000000..94b8c72
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/version.go
@@ -0,0 +1,8 @@
+// Package aws provides core functionality for making requests to AWS services.
+package aws
+
+// SDKName is the name of this AWS SDK
+const SDKName = "aws-sdk-go"
+
+// SDKVersion is the version of this SDK
+const SDKVersion = "0.6.4"
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints.go
new file mode 100644
index 0000000..d040ccc
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints.go
@@ -0,0 +1,31 @@
+// Package endpoints validates regional endpoints for services.
+package endpoints
+
+//go:generate go run ../model/cli/gen-endpoints/main.go endpoints.json endpoints_map.go
+//go:generate gofmt -s -w endpoints_map.go
+
+import "strings"
+
+// EndpointForRegion returns an endpoint and its signing region for a service and region.
+// if the service and region pair are not found endpoint and signingRegion will be empty.
+func EndpointForRegion(svcName, region string) (endpoint, signingRegion string) {
+ derivedKeys := []string{
+ region + "/" + svcName,
+ region + "/*",
+ "*/" + svcName,
+ "*/*",
+ }
+
+ for _, key := range derivedKeys {
+ if val, ok := endpointsMap.Endpoints[key]; ok {
+ ep := val.Endpoint
+ ep = strings.Replace(ep, "{region}", region, -1)
+ ep = strings.Replace(ep, "{service}", svcName, -1)
+
+ endpoint = ep
+ signingRegion = val.SigningRegion
+ return
+ }
+ }
+ return
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints.json b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints.json
new file mode 100644
index 0000000..4c58809
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints.json
@@ -0,0 +1,77 @@
+{
+ "version": 2,
+ "endpoints": {
+ "*/*": {
+ "endpoint": "{service}.{region}.amazonaws.com"
+ },
+ "cn-north-1/*": {
+ "endpoint": "{service}.{region}.amazonaws.com.cn",
+ "signatureVersion": "v4"
+ },
+ "us-gov-west-1/iam": {
+ "endpoint": "iam.us-gov.amazonaws.com"
+ },
+ "us-gov-west-1/sts": {
+ "endpoint": "sts.us-gov-west-1.amazonaws.com"
+ },
+ "us-gov-west-1/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "*/cloudfront": {
+ "endpoint": "cloudfront.amazonaws.com",
+ "signingRegion": "us-east-1"
+ },
+ "*/cloudsearchdomain": {
+ "endpoint": "",
+ "signingRegion": "us-east-1"
+ },
+ "*/iam": {
+ "endpoint": "iam.amazonaws.com",
+ "signingRegion": "us-east-1"
+ },
+ "*/importexport": {
+ "endpoint": "importexport.amazonaws.com",
+ "signingRegion": "us-east-1"
+ },
+ "*/route53": {
+ "endpoint": "route53.amazonaws.com",
+ "signingRegion": "us-east-1"
+ },
+ "*/sts": {
+ "endpoint": "sts.amazonaws.com",
+ "signingRegion": "us-east-1"
+ },
+ "us-east-1/sdb": {
+ "endpoint": "sdb.amazonaws.com",
+ "signingRegion": "us-east-1"
+ },
+ "us-east-1/s3": {
+ "endpoint": "s3.amazonaws.com"
+ },
+ "us-west-1/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "us-west-2/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "eu-west-1/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "ap-southeast-1/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "ap-southeast-2/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "ap-northeast-1/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "sa-east-1/s3": {
+ "endpoint": "s3-{region}.amazonaws.com"
+ },
+ "eu-central-1/s3": {
+ "endpoint": "{service}.{region}.amazonaws.com",
+ "signatureVersion": "v4"
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints_map.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints_map.go
new file mode 100644
index 0000000..894c1a6
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints_map.go
@@ -0,0 +1,89 @@
+package endpoints
+
+// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
+
+type endpointStruct struct {
+ Version int
+ Endpoints map[string]endpointEntry
+}
+
+type endpointEntry struct {
+ Endpoint string
+ SigningRegion string
+}
+
+var endpointsMap = endpointStruct{
+ Version: 2,
+ Endpoints: map[string]endpointEntry{
+ "*/*": {
+ Endpoint: "{service}.{region}.amazonaws.com",
+ },
+ "*/cloudfront": {
+ Endpoint: "cloudfront.amazonaws.com",
+ SigningRegion: "us-east-1",
+ },
+ "*/cloudsearchdomain": {
+ Endpoint: "",
+ SigningRegion: "us-east-1",
+ },
+ "*/iam": {
+ Endpoint: "iam.amazonaws.com",
+ SigningRegion: "us-east-1",
+ },
+ "*/importexport": {
+ Endpoint: "importexport.amazonaws.com",
+ SigningRegion: "us-east-1",
+ },
+ "*/route53": {
+ Endpoint: "route53.amazonaws.com",
+ SigningRegion: "us-east-1",
+ },
+ "*/sts": {
+ Endpoint: "sts.amazonaws.com",
+ SigningRegion: "us-east-1",
+ },
+ "ap-northeast-1/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ "ap-southeast-1/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ "ap-southeast-2/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ "cn-north-1/*": {
+ Endpoint: "{service}.{region}.amazonaws.com.cn",
+ },
+ "eu-central-1/s3": {
+ Endpoint: "{service}.{region}.amazonaws.com",
+ },
+ "eu-west-1/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ "sa-east-1/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ "us-east-1/s3": {
+ Endpoint: "s3.amazonaws.com",
+ },
+ "us-east-1/sdb": {
+ Endpoint: "sdb.amazonaws.com",
+ SigningRegion: "us-east-1",
+ },
+ "us-gov-west-1/iam": {
+ Endpoint: "iam.us-gov.amazonaws.com",
+ },
+ "us-gov-west-1/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ "us-gov-west-1/sts": {
+ Endpoint: "sts.us-gov-west-1.amazonaws.com",
+ },
+ "us-west-1/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ "us-west-2/s3": {
+ Endpoint: "s3-{region}.amazonaws.com",
+ },
+ },
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints_test.go
new file mode 100644
index 0000000..8af6587
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/endpoints/endpoints_test.go
@@ -0,0 +1,28 @@
+package endpoints
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGlobalEndpoints(t *testing.T) {
+ region := "mock-region-1"
+ svcs := []string{"cloudfront", "iam", "importexport", "route53", "sts"}
+
+ for _, name := range svcs {
+ ep, sr := EndpointForRegion(name, region)
+ assert.Equal(t, name+".amazonaws.com", ep)
+ assert.Equal(t, "us-east-1", sr)
+ }
+}
+
+func TestServicesInCN(t *testing.T) {
+ region := "cn-north-1"
+ svcs := []string{"cloudfront", "iam", "importexport", "route53", "sts", "s3"}
+
+ for _, name := range svcs {
+ ep, _ := EndpointForRegion(name, region)
+ assert.Equal(t, name+"."+region+".amazonaws.com.cn", ep)
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/build.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/build.go
new file mode 100644
index 0000000..c4d8dd2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/build.go
@@ -0,0 +1,33 @@
+// Package query provides serialisation of AWS query requests, and responses.
+package query
+
+//go:generate go run ../../fixtures/protocol/generate.go ../../fixtures/protocol/input/query.json build_test.go
+
+import (
+ "net/url"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/internal/protocol/query/queryutil"
+)
+
+// Build builds a request for an AWS Query service.
+func Build(r *aws.Request) {
+ body := url.Values{
+ "Action": {r.Operation.Name},
+ "Version": {r.Service.APIVersion},
+ }
+ if err := queryutil.Parse(body, r.Params, false); err != nil {
+ r.Error = awserr.New("SerializationError", "failed encoding Query request", err)
+ return
+ }
+
+ if r.ExpireTime == 0 {
+ r.HTTPRequest.Method = "POST"
+ r.HTTPRequest.Header.Set("Content-Type", "application/x-www-form-urlencoded; charset=utf-8")
+ r.SetBufferBody([]byte(body.Encode()))
+ } else { // This is a pre-signed request
+ r.HTTPRequest.Method = "GET"
+ r.HTTPRequest.URL.RawQuery = body.Encode()
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/build_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/build_test.go
new file mode 100644
index 0000000..b548298
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/build_test.go
@@ -0,0 +1,1491 @@
+package query_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "encoding/xml"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/protocol/query"
+ "github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil"
+ "github.com/aws/aws-sdk-go/internal/signer/v4"
+ "github.com/aws/aws-sdk-go/internal/util"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ bytes.Buffer // always import bytes
+var _ http.Request
+var _ json.Marshaler
+var _ time.Time
+var _ xmlutil.XMLNode
+var _ xml.Attr
+var _ = ioutil.Discard
+var _ = util.Trim("")
+var _ = url.Values{}
+var _ = io.EOF
+
+// InputService1ProtocolTest is a client for InputService1ProtocolTest.
+type InputService1ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService1ProtocolTest client.
+func NewInputService1ProtocolTest(config *aws.Config) *InputService1ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice1protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService1ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService1ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService1ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService1TestCaseOperation1 = "OperationName"
+
+// InputService1TestCaseOperation1Request generates a request for the InputService1TestCaseOperation1 operation.
+func (c *InputService1ProtocolTest) InputService1TestCaseOperation1Request(input *InputService1TestShapeInputShape) (req *aws.Request, output *InputService1TestShapeInputService1TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService1TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService1TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService1TestShapeInputService1TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService1ProtocolTest) InputService1TestCaseOperation1(input *InputService1TestShapeInputShape) (*InputService1TestShapeInputService1TestCaseOperation1Output, error) {
+ req, out := c.InputService1TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService1TestShapeInputService1TestCaseOperation1Output struct {
+ metadataInputService1TestShapeInputService1TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService1TestShapeInputService1TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService1TestShapeInputShape struct {
+ Bar *string `type:"string"`
+
+ Foo *string `type:"string"`
+
+ metadataInputService1TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService1TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService2ProtocolTest is a client for InputService2ProtocolTest.
+type InputService2ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService2ProtocolTest client.
+func NewInputService2ProtocolTest(config *aws.Config) *InputService2ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice2protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService2ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService2ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService2ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService2TestCaseOperation1 = "OperationName"
+
+// InputService2TestCaseOperation1Request generates a request for the InputService2TestCaseOperation1 operation.
+func (c *InputService2ProtocolTest) InputService2TestCaseOperation1Request(input *InputService2TestShapeInputShape) (req *aws.Request, output *InputService2TestShapeInputService2TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService2TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService2TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService2TestShapeInputService2TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService2ProtocolTest) InputService2TestCaseOperation1(input *InputService2TestShapeInputShape) (*InputService2TestShapeInputService2TestCaseOperation1Output, error) {
+ req, out := c.InputService2TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService2TestShapeInputService2TestCaseOperation1Output struct {
+ metadataInputService2TestShapeInputService2TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService2TestShapeInputService2TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService2TestShapeInputShape struct {
+ StructArg *InputService2TestShapeStructType `type:"structure"`
+
+ metadataInputService2TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService2TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService2TestShapeStructType struct {
+ ScalarArg *string `type:"string"`
+
+ metadataInputService2TestShapeStructType `json:"-" xml:"-"`
+}
+
+type metadataInputService2TestShapeStructType struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService3ProtocolTest is a client for InputService3ProtocolTest.
+type InputService3ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService3ProtocolTest client.
+func NewInputService3ProtocolTest(config *aws.Config) *InputService3ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice3protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService3ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService3ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService3ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService3TestCaseOperation1 = "OperationName"
+
+// InputService3TestCaseOperation1Request generates a request for the InputService3TestCaseOperation1 operation.
+func (c *InputService3ProtocolTest) InputService3TestCaseOperation1Request(input *InputService3TestShapeInputShape) (req *aws.Request, output *InputService3TestShapeInputService3TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService3TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService3TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService3TestShapeInputService3TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService3ProtocolTest) InputService3TestCaseOperation1(input *InputService3TestShapeInputShape) (*InputService3TestShapeInputService3TestCaseOperation1Output, error) {
+ req, out := c.InputService3TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService3TestCaseOperation2 = "OperationName"
+
+// InputService3TestCaseOperation2Request generates a request for the InputService3TestCaseOperation2 operation.
+func (c *InputService3ProtocolTest) InputService3TestCaseOperation2Request(input *InputService3TestShapeInputShape) (req *aws.Request, output *InputService3TestShapeInputService3TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService3TestCaseOperation2,
+ }
+
+ if input == nil {
+ input = &InputService3TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService3TestShapeInputService3TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService3ProtocolTest) InputService3TestCaseOperation2(input *InputService3TestShapeInputShape) (*InputService3TestShapeInputService3TestCaseOperation2Output, error) {
+ req, out := c.InputService3TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService3TestShapeInputService3TestCaseOperation1Output struct {
+ metadataInputService3TestShapeInputService3TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService3TestShapeInputService3TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService3TestShapeInputService3TestCaseOperation2Output struct {
+ metadataInputService3TestShapeInputService3TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService3TestShapeInputService3TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService3TestShapeInputShape struct {
+ ListArg []*string `type:"list"`
+
+ metadataInputService3TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService3TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService4ProtocolTest is a client for InputService4ProtocolTest.
+type InputService4ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService4ProtocolTest client.
+func NewInputService4ProtocolTest(config *aws.Config) *InputService4ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice4protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService4ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService4ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService4ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService4TestCaseOperation1 = "OperationName"
+
+// InputService4TestCaseOperation1Request generates a request for the InputService4TestCaseOperation1 operation.
+func (c *InputService4ProtocolTest) InputService4TestCaseOperation1Request(input *InputService4TestShapeInputShape) (req *aws.Request, output *InputService4TestShapeInputService4TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService4TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService4TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService4TestShapeInputService4TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService4ProtocolTest) InputService4TestCaseOperation1(input *InputService4TestShapeInputShape) (*InputService4TestShapeInputService4TestCaseOperation1Output, error) {
+ req, out := c.InputService4TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService4TestCaseOperation2 = "OperationName"
+
+// InputService4TestCaseOperation2Request generates a request for the InputService4TestCaseOperation2 operation.
+func (c *InputService4ProtocolTest) InputService4TestCaseOperation2Request(input *InputService4TestShapeInputShape) (req *aws.Request, output *InputService4TestShapeInputService4TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService4TestCaseOperation2,
+ }
+
+ if input == nil {
+ input = &InputService4TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService4TestShapeInputService4TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService4ProtocolTest) InputService4TestCaseOperation2(input *InputService4TestShapeInputShape) (*InputService4TestShapeInputService4TestCaseOperation2Output, error) {
+ req, out := c.InputService4TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService4TestShapeInputService4TestCaseOperation1Output struct {
+ metadataInputService4TestShapeInputService4TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService4TestShapeInputService4TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService4TestShapeInputService4TestCaseOperation2Output struct {
+ metadataInputService4TestShapeInputService4TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService4TestShapeInputService4TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService4TestShapeInputShape struct {
+ ListArg []*string `type:"list" flattened:"true"`
+
+ ScalarArg *string `type:"string"`
+
+ metadataInputService4TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService4TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService5ProtocolTest is a client for InputService5ProtocolTest.
+type InputService5ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService5ProtocolTest client.
+func NewInputService5ProtocolTest(config *aws.Config) *InputService5ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice5protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService5ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService5ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService5ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService5TestCaseOperation1 = "OperationName"
+
+// InputService5TestCaseOperation1Request generates a request for the InputService5TestCaseOperation1 operation.
+func (c *InputService5ProtocolTest) InputService5TestCaseOperation1Request(input *InputService5TestShapeInputShape) (req *aws.Request, output *InputService5TestShapeInputService5TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService5TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService5TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService5TestShapeInputService5TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService5ProtocolTest) InputService5TestCaseOperation1(input *InputService5TestShapeInputShape) (*InputService5TestShapeInputService5TestCaseOperation1Output, error) {
+ req, out := c.InputService5TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService5TestCaseOperation2 = "OperationName"
+
+// InputService5TestCaseOperation2Request generates a request for the InputService5TestCaseOperation2 operation.
+func (c *InputService5ProtocolTest) InputService5TestCaseOperation2Request(input *InputService5TestShapeInputShape) (req *aws.Request, output *InputService5TestShapeInputService5TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService5TestCaseOperation2,
+ }
+
+ if input == nil {
+ input = &InputService5TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService5TestShapeInputService5TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService5ProtocolTest) InputService5TestCaseOperation2(input *InputService5TestShapeInputShape) (*InputService5TestShapeInputService5TestCaseOperation2Output, error) {
+ req, out := c.InputService5TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService5TestShapeInputService5TestCaseOperation1Output struct {
+ metadataInputService5TestShapeInputService5TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService5TestShapeInputService5TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService5TestShapeInputService5TestCaseOperation2Output struct {
+ metadataInputService5TestShapeInputService5TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService5TestShapeInputService5TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService5TestShapeInputShape struct {
+ MapArg map[string]*string `type:"map"`
+
+ metadataInputService5TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService5TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService6ProtocolTest is a client for InputService6ProtocolTest.
+type InputService6ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService6ProtocolTest client.
+func NewInputService6ProtocolTest(config *aws.Config) *InputService6ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice6protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService6ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService6ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService6ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService6TestCaseOperation1 = "OperationName"
+
+// InputService6TestCaseOperation1Request generates a request for the InputService6TestCaseOperation1 operation.
+func (c *InputService6ProtocolTest) InputService6TestCaseOperation1Request(input *InputService6TestShapeInputShape) (req *aws.Request, output *InputService6TestShapeInputService6TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService6TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService6TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService6TestShapeInputService6TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService6ProtocolTest) InputService6TestCaseOperation1(input *InputService6TestShapeInputShape) (*InputService6TestShapeInputService6TestCaseOperation1Output, error) {
+ req, out := c.InputService6TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService6TestShapeInputService6TestCaseOperation1Output struct {
+ metadataInputService6TestShapeInputService6TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService6TestShapeInputService6TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService6TestShapeInputShape struct {
+ MapArg map[string]*string `locationNameKey:"TheKey" locationNameValue:"TheValue" type:"map"`
+
+ metadataInputService6TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService6TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService7ProtocolTest is a client for InputService7ProtocolTest.
+type InputService7ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService7ProtocolTest client.
+func NewInputService7ProtocolTest(config *aws.Config) *InputService7ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice7protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService7ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService7ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService7ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService7TestCaseOperation1 = "OperationName"
+
+// InputService7TestCaseOperation1Request generates a request for the InputService7TestCaseOperation1 operation.
+func (c *InputService7ProtocolTest) InputService7TestCaseOperation1Request(input *InputService7TestShapeInputShape) (req *aws.Request, output *InputService7TestShapeInputService7TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService7TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService7TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService7TestShapeInputService7TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService7ProtocolTest) InputService7TestCaseOperation1(input *InputService7TestShapeInputShape) (*InputService7TestShapeInputService7TestCaseOperation1Output, error) {
+ req, out := c.InputService7TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService7TestShapeInputService7TestCaseOperation1Output struct {
+ metadataInputService7TestShapeInputService7TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService7TestShapeInputService7TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService7TestShapeInputShape struct {
+ BlobArg []byte `type:"blob"`
+
+ metadataInputService7TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService7TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService8ProtocolTest is a client for InputService8ProtocolTest.
+type InputService8ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService8ProtocolTest client.
+func NewInputService8ProtocolTest(config *aws.Config) *InputService8ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice8protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService8ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService8ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService8ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService8TestCaseOperation1 = "OperationName"
+
+// InputService8TestCaseOperation1Request generates a request for the InputService8TestCaseOperation1 operation.
+func (c *InputService8ProtocolTest) InputService8TestCaseOperation1Request(input *InputService8TestShapeInputShape) (req *aws.Request, output *InputService8TestShapeInputService8TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService8TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService8TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService8TestShapeInputService8TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService8ProtocolTest) InputService8TestCaseOperation1(input *InputService8TestShapeInputShape) (*InputService8TestShapeInputService8TestCaseOperation1Output, error) {
+ req, out := c.InputService8TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService8TestShapeInputService8TestCaseOperation1Output struct {
+ metadataInputService8TestShapeInputService8TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService8TestShapeInputService8TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService8TestShapeInputShape struct {
+ TimeArg *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ metadataInputService8TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService8TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService9ProtocolTest is a client for InputService9ProtocolTest.
+type InputService9ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService9ProtocolTest client.
+func NewInputService9ProtocolTest(config *aws.Config) *InputService9ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice9protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &InputService9ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService9ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService9ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService9TestCaseOperation1 = "OperationName"
+
+// InputService9TestCaseOperation1Request generates a request for the InputService9TestCaseOperation1 operation.
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation1Request(input *InputService9TestShapeInputShape) (req *aws.Request, output *InputService9TestShapeInputService9TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService9TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &InputService9TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService9TestShapeInputService9TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation1(input *InputService9TestShapeInputShape) (*InputService9TestShapeInputService9TestCaseOperation1Output, error) {
+ req, out := c.InputService9TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService9TestCaseOperation2 = "OperationName"
+
+// InputService9TestCaseOperation2Request generates a request for the InputService9TestCaseOperation2 operation.
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation2Request(input *InputService9TestShapeInputShape) (req *aws.Request, output *InputService9TestShapeInputService9TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService9TestCaseOperation2,
+ }
+
+ if input == nil {
+ input = &InputService9TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService9TestShapeInputService9TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation2(input *InputService9TestShapeInputShape) (*InputService9TestShapeInputService9TestCaseOperation2Output, error) {
+ req, out := c.InputService9TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService9TestCaseOperation3 = "OperationName"
+
+// InputService9TestCaseOperation3Request generates a request for the InputService9TestCaseOperation3 operation.
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation3Request(input *InputService9TestShapeInputShape) (req *aws.Request, output *InputService9TestShapeInputService9TestCaseOperation3Output) {
+ op := &aws.Operation{
+ Name: opInputService9TestCaseOperation3,
+ }
+
+ if input == nil {
+ input = &InputService9TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService9TestShapeInputService9TestCaseOperation3Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation3(input *InputService9TestShapeInputShape) (*InputService9TestShapeInputService9TestCaseOperation3Output, error) {
+ req, out := c.InputService9TestCaseOperation3Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService9TestCaseOperation4 = "OperationName"
+
+// InputService9TestCaseOperation4Request generates a request for the InputService9TestCaseOperation4 operation.
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation4Request(input *InputService9TestShapeInputShape) (req *aws.Request, output *InputService9TestShapeInputService9TestCaseOperation4Output) {
+ op := &aws.Operation{
+ Name: opInputService9TestCaseOperation4,
+ }
+
+ if input == nil {
+ input = &InputService9TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService9TestShapeInputService9TestCaseOperation4Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation4(input *InputService9TestShapeInputShape) (*InputService9TestShapeInputService9TestCaseOperation4Output, error) {
+ req, out := c.InputService9TestCaseOperation4Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService9TestCaseOperation5 = "OperationName"
+
+// InputService9TestCaseOperation5Request generates a request for the InputService9TestCaseOperation5 operation.
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation5Request(input *InputService9TestShapeInputShape) (req *aws.Request, output *InputService9TestShapeInputService9TestCaseOperation5Output) {
+ op := &aws.Operation{
+ Name: opInputService9TestCaseOperation5,
+ }
+
+ if input == nil {
+ input = &InputService9TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService9TestShapeInputService9TestCaseOperation5Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation5(input *InputService9TestShapeInputShape) (*InputService9TestShapeInputService9TestCaseOperation5Output, error) {
+ req, out := c.InputService9TestCaseOperation5Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService9TestCaseOperation6 = "OperationName"
+
+// InputService9TestCaseOperation6Request generates a request for the InputService9TestCaseOperation6 operation.
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation6Request(input *InputService9TestShapeInputShape) (req *aws.Request, output *InputService9TestShapeInputService9TestCaseOperation6Output) {
+ op := &aws.Operation{
+ Name: opInputService9TestCaseOperation6,
+ }
+
+ if input == nil {
+ input = &InputService9TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService9TestShapeInputService9TestCaseOperation6Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation6(input *InputService9TestShapeInputShape) (*InputService9TestShapeInputService9TestCaseOperation6Output, error) {
+ req, out := c.InputService9TestCaseOperation6Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService9TestShapeInputService9TestCaseOperation1Output struct {
+ metadataInputService9TestShapeInputService9TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputService9TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeInputService9TestCaseOperation2Output struct {
+ metadataInputService9TestShapeInputService9TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputService9TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeInputService9TestCaseOperation3Output struct {
+ metadataInputService9TestShapeInputService9TestCaseOperation3Output `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputService9TestCaseOperation3Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeInputService9TestCaseOperation4Output struct {
+ metadataInputService9TestShapeInputService9TestCaseOperation4Output `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputService9TestCaseOperation4Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeInputService9TestCaseOperation5Output struct {
+ metadataInputService9TestShapeInputService9TestCaseOperation5Output `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputService9TestCaseOperation5Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeInputService9TestCaseOperation6Output struct {
+ metadataInputService9TestShapeInputService9TestCaseOperation6Output `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputService9TestCaseOperation6Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeInputShape struct {
+ RecursiveStruct *InputService9TestShapeRecursiveStructType `type:"structure"`
+
+ metadataInputService9TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeRecursiveStructType struct {
+ NoRecurse *string `type:"string"`
+
+ RecursiveList []*InputService9TestShapeRecursiveStructType `type:"list"`
+
+ RecursiveMap map[string]*InputService9TestShapeRecursiveStructType `type:"map"`
+
+ RecursiveStruct *InputService9TestShapeRecursiveStructType `type:"structure"`
+
+ metadataInputService9TestShapeRecursiveStructType `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeRecursiveStructType struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+//
+// Tests begin here
+//
+
+func TestInputService1ProtocolTestScalarMembersCase1(t *testing.T) {
+ svc := NewInputService1ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService1TestShapeInputShape{
+ Bar: aws.String("val2"),
+ Foo: aws.String("val1"),
+ }
+ req, _ := svc.InputService1TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&Bar=val2&Foo=val1&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService2ProtocolTestNestedStructureMembersCase1(t *testing.T) {
+ svc := NewInputService2ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService2TestShapeInputShape{
+ StructArg: &InputService2TestShapeStructType{
+ ScalarArg: aws.String("foo"),
+ },
+ }
+ req, _ := svc.InputService2TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&StructArg.ScalarArg=foo&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService3ProtocolTestListTypesCase1(t *testing.T) {
+ svc := NewInputService3ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService3TestShapeInputShape{
+ ListArg: []*string{
+ aws.String("foo"),
+ aws.String("bar"),
+ aws.String("baz"),
+ },
+ }
+ req, _ := svc.InputService3TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&ListArg.member.1=foo&ListArg.member.2=bar&ListArg.member.3=baz&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService3ProtocolTestListTypesCase2(t *testing.T) {
+ svc := NewInputService3ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService3TestShapeInputShape{
+ ListArg: []*string{},
+ }
+ req, _ := svc.InputService3TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&ListArg=&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService4ProtocolTestFlattenedListCase1(t *testing.T) {
+ svc := NewInputService4ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService4TestShapeInputShape{
+ ListArg: []*string{
+ aws.String("a"),
+ aws.String("b"),
+ aws.String("c"),
+ },
+ ScalarArg: aws.String("foo"),
+ }
+ req, _ := svc.InputService4TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&ListArg.1=a&ListArg.2=b&ListArg.3=c&ScalarArg=foo&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService4ProtocolTestFlattenedListCase2(t *testing.T) {
+ svc := NewInputService4ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService4TestShapeInputShape{
+ ListArg: []*string{},
+ ScalarArg: aws.String("foo"),
+ }
+ req, _ := svc.InputService4TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&ListArg=&ScalarArg=foo&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService5ProtocolTestSerializeMapTypeCase1(t *testing.T) {
+ svc := NewInputService5ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService5TestShapeInputShape{
+ MapArg: map[string]*string{
+ "key1": aws.String("val1"),
+ "key2": aws.String("val2"),
+ },
+ }
+ req, _ := svc.InputService5TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&MapArg.entry.1.key=key1&MapArg.entry.1.value=val1&MapArg.entry.2.key=key2&MapArg.entry.2.value=val2&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService5ProtocolTestSerializeMapTypeCase2(t *testing.T) {
+ svc := NewInputService5ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService5TestShapeInputShape{
+ MapArg: map[string]*string{},
+ }
+ req, _ := svc.InputService5TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&MapArg=&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService6ProtocolTestSerializeMapTypeWithLocationNameCase1(t *testing.T) {
+ svc := NewInputService6ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService6TestShapeInputShape{
+ MapArg: map[string]*string{
+ "key1": aws.String("val1"),
+ "key2": aws.String("val2"),
+ },
+ }
+ req, _ := svc.InputService6TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&MapArg.entry.1.TheKey=key1&MapArg.entry.1.TheValue=val1&MapArg.entry.2.TheKey=key2&MapArg.entry.2.TheValue=val2&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService7ProtocolTestBase64EncodedBlobsCase1(t *testing.T) {
+ svc := NewInputService7ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService7TestShapeInputShape{
+ BlobArg: []byte("foo"),
+ }
+ req, _ := svc.InputService7TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&BlobArg=Zm9v&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService8ProtocolTestTimestampValuesCase1(t *testing.T) {
+ svc := NewInputService8ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService8TestShapeInputShape{
+ TimeArg: aws.Time(time.Unix(1422172800, 0)),
+ }
+ req, _ := svc.InputService8TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&TimeArg=2015-01-25T08%3A00%3A00Z&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService9ProtocolTestRecursiveShapesCase1(t *testing.T) {
+ svc := NewInputService9ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService9TestShapeInputShape{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ NoRecurse: aws.String("foo"),
+ },
+ }
+ req, _ := svc.InputService9TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&RecursiveStruct.NoRecurse=foo&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService9ProtocolTestRecursiveShapesCase2(t *testing.T) {
+ svc := NewInputService9ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService9TestShapeInputShape{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ NoRecurse: aws.String("foo"),
+ },
+ },
+ }
+ req, _ := svc.InputService9TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&RecursiveStruct.RecursiveStruct.NoRecurse=foo&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService9ProtocolTestRecursiveShapesCase3(t *testing.T) {
+ svc := NewInputService9ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService9TestShapeInputShape{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ NoRecurse: aws.String("foo"),
+ },
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService9TestCaseOperation3Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&RecursiveStruct.RecursiveStruct.RecursiveStruct.RecursiveStruct.NoRecurse=foo&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService9ProtocolTestRecursiveShapesCase4(t *testing.T) {
+ svc := NewInputService9ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService9TestShapeInputShape{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ RecursiveList: []*InputService9TestShapeRecursiveStructType{
+ {
+ NoRecurse: aws.String("foo"),
+ },
+ {
+ NoRecurse: aws.String("bar"),
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService9TestCaseOperation4Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&RecursiveStruct.RecursiveList.member.1.NoRecurse=foo&RecursiveStruct.RecursiveList.member.2.NoRecurse=bar&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService9ProtocolTestRecursiveShapesCase5(t *testing.T) {
+ svc := NewInputService9ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService9TestShapeInputShape{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ RecursiveList: []*InputService9TestShapeRecursiveStructType{
+ {
+ NoRecurse: aws.String("foo"),
+ },
+ {
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ NoRecurse: aws.String("bar"),
+ },
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService9TestCaseOperation5Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&RecursiveStruct.RecursiveList.member.1.NoRecurse=foo&RecursiveStruct.RecursiveList.member.2.RecursiveStruct.NoRecurse=bar&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService9ProtocolTestRecursiveShapesCase6(t *testing.T) {
+ svc := NewInputService9ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService9TestShapeInputShape{
+ RecursiveStruct: &InputService9TestShapeRecursiveStructType{
+ RecursiveMap: map[string]*InputService9TestShapeRecursiveStructType{
+ "bar": {
+ NoRecurse: aws.String("bar"),
+ },
+ "foo": {
+ NoRecurse: aws.String("foo"),
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService9TestCaseOperation6Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ query.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body, _ := ioutil.ReadAll(r.Body)
+ assert.Equal(t, util.Trim(`Action=OperationName&RecursiveStruct.RecursiveMap.entry.1.key=bar&RecursiveStruct.RecursiveMap.entry.1.value.NoRecurse=bar&RecursiveStruct.RecursiveMap.entry.2.key=foo&RecursiveStruct.RecursiveMap.entry.2.value.NoRecurse=foo&Version=2014-01-01`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/queryutil/queryutil.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/queryutil/queryutil.go
new file mode 100644
index 0000000..3b417a8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/queryutil/queryutil.go
@@ -0,0 +1,223 @@
+package queryutil
+
+import (
+ "encoding/base64"
+ "fmt"
+ "net/url"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Parse parses an object i and fills a url.Values object. The isEC2 flag
+// indicates if this is the EC2 Query sub-protocol.
+func Parse(body url.Values, i interface{}, isEC2 bool) error {
+ q := queryParser{isEC2: isEC2}
+ return q.parseValue(body, reflect.ValueOf(i), "", "")
+}
+
+func elemOf(value reflect.Value) reflect.Value {
+ for value.Kind() == reflect.Ptr {
+ value = value.Elem()
+ }
+ return value
+}
+
+type queryParser struct {
+ isEC2 bool
+}
+
+func (q *queryParser) parseValue(v url.Values, value reflect.Value, prefix string, tag reflect.StructTag) error {
+ value = elemOf(value)
+
+ // no need to handle zero values
+ if !value.IsValid() {
+ return nil
+ }
+
+ t := tag.Get("type")
+ if t == "" {
+ switch value.Kind() {
+ case reflect.Struct:
+ t = "structure"
+ case reflect.Slice:
+ t = "list"
+ case reflect.Map:
+ t = "map"
+ }
+ }
+
+ switch t {
+ case "structure":
+ return q.parseStruct(v, value, prefix)
+ case "list":
+ return q.parseList(v, value, prefix, tag)
+ case "map":
+ return q.parseMap(v, value, prefix, tag)
+ default:
+ return q.parseScalar(v, value, prefix, tag)
+ }
+}
+
+func (q *queryParser) parseStruct(v url.Values, value reflect.Value, prefix string) error {
+ if !value.IsValid() {
+ return nil
+ }
+
+ t := value.Type()
+ for i := 0; i < value.NumField(); i++ {
+ if c := t.Field(i).Name[0:1]; strings.ToLower(c) == c {
+ continue // ignore unexported fields
+ }
+
+ value := elemOf(value.Field(i))
+ field := t.Field(i)
+ var name string
+
+ if q.isEC2 {
+ name = field.Tag.Get("queryName")
+ }
+ if name == "" {
+ if field.Tag.Get("flattened") != "" && field.Tag.Get("locationNameList") != "" {
+ name = field.Tag.Get("locationNameList")
+ } else if locName := field.Tag.Get("locationName"); locName != "" {
+ name = locName
+ }
+ if name != "" && q.isEC2 {
+ name = strings.ToUpper(name[0:1]) + name[1:]
+ }
+ }
+ if name == "" {
+ name = field.Name
+ }
+
+ if prefix != "" {
+ name = prefix + "." + name
+ }
+
+ if err := q.parseValue(v, value, name, field.Tag); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (q *queryParser) parseList(v url.Values, value reflect.Value, prefix string, tag reflect.StructTag) error {
+ // If it's empty, generate an empty value
+ if !value.IsNil() && value.Len() == 0 {
+ v.Set(prefix, "")
+ return nil
+ }
+
+ // check for unflattened list member
+ if !q.isEC2 && tag.Get("flattened") == "" {
+ prefix += ".member"
+ }
+
+ for i := 0; i < value.Len(); i++ {
+ slicePrefix := prefix
+ if slicePrefix == "" {
+ slicePrefix = strconv.Itoa(i + 1)
+ } else {
+ slicePrefix = slicePrefix + "." + strconv.Itoa(i+1)
+ }
+ if err := q.parseValue(v, value.Index(i), slicePrefix, ""); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (q *queryParser) parseMap(v url.Values, value reflect.Value, prefix string, tag reflect.StructTag) error {
+ // If it's empty, generate an empty value
+ if !value.IsNil() && value.Len() == 0 {
+ v.Set(prefix, "")
+ return nil
+ }
+
+ // check for unflattened list member
+ if !q.isEC2 && tag.Get("flattened") == "" {
+ prefix += ".entry"
+ }
+
+ // sort keys for improved serialization consistency.
+ // this is not strictly necessary for protocol support.
+ mapKeyValues := value.MapKeys()
+ mapKeys := map[string]reflect.Value{}
+ mapKeyNames := make([]string, len(mapKeyValues))
+ for i, mapKey := range mapKeyValues {
+ name := mapKey.String()
+ mapKeys[name] = mapKey
+ mapKeyNames[i] = name
+ }
+ sort.Strings(mapKeyNames)
+
+ for i, mapKeyName := range mapKeyNames {
+ mapKey := mapKeys[mapKeyName]
+ mapValue := value.MapIndex(mapKey)
+
+ kname := tag.Get("locationNameKey")
+ if kname == "" {
+ kname = "key"
+ }
+ vname := tag.Get("locationNameValue")
+ if vname == "" {
+ vname = "value"
+ }
+
+ // serialize key
+ var keyName string
+ if prefix == "" {
+ keyName = strconv.Itoa(i+1) + "." + kname
+ } else {
+ keyName = prefix + "." + strconv.Itoa(i+1) + "." + kname
+ }
+
+ if err := q.parseValue(v, mapKey, keyName, ""); err != nil {
+ return err
+ }
+
+ // serialize value
+ var valueName string
+ if prefix == "" {
+ valueName = strconv.Itoa(i+1) + "." + vname
+ } else {
+ valueName = prefix + "." + strconv.Itoa(i+1) + "." + vname
+ }
+
+ if err := q.parseValue(v, mapValue, valueName, ""); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (q *queryParser) parseScalar(v url.Values, r reflect.Value, name string, tag reflect.StructTag) error {
+ switch value := r.Interface().(type) {
+ case string:
+ v.Set(name, value)
+ case []byte:
+ if !r.IsNil() {
+ v.Set(name, base64.StdEncoding.EncodeToString(value))
+ }
+ case bool:
+ v.Set(name, strconv.FormatBool(value))
+ case int64:
+ v.Set(name, strconv.FormatInt(value, 10))
+ case int:
+ v.Set(name, strconv.Itoa(value))
+ case float64:
+ v.Set(name, strconv.FormatFloat(value, 'f', -1, 64))
+ case float32:
+ v.Set(name, strconv.FormatFloat(float64(value), 'f', -1, 32))
+ case time.Time:
+ const ISO8601UTC = "2006-01-02T15:04:05Z"
+ v.Set(name, value.UTC().Format(ISO8601UTC))
+ default:
+ return fmt.Errorf("unsupported value for param %s: %v (%s)", name, r.Interface(), r.Type().Name())
+ }
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal.go
new file mode 100644
index 0000000..e8cfa92
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal.go
@@ -0,0 +1,29 @@
+package query
+
+//go:generate go run ../../fixtures/protocol/generate.go ../../fixtures/protocol/output/query.json unmarshal_test.go
+
+import (
+ "encoding/xml"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil"
+)
+
+// Unmarshal unmarshals a response for an AWS Query service.
+func Unmarshal(r *aws.Request) {
+ defer r.HTTPResponse.Body.Close()
+ if r.DataFilled() {
+ decoder := xml.NewDecoder(r.HTTPResponse.Body)
+ err := xmlutil.UnmarshalXML(r.Data, decoder, r.Operation.Name+"Result")
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed decoding Query response", err)
+ return
+ }
+ }
+}
+
+// UnmarshalMeta unmarshals header response values for an AWS Query service.
+func UnmarshalMeta(r *aws.Request) {
+ // TODO implement unmarshaling of request IDs
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal_error.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal_error.go
new file mode 100644
index 0000000..d88ee33
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal_error.go
@@ -0,0 +1,33 @@
+package query
+
+import (
+ "encoding/xml"
+ "io"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+type xmlErrorResponse struct {
+ XMLName xml.Name `xml:"ErrorResponse"`
+ Code string `xml:"Error>Code"`
+ Message string `xml:"Error>Message"`
+ RequestID string `xml:"RequestId"`
+}
+
+// UnmarshalError unmarshals an error response for an AWS Query service.
+func UnmarshalError(r *aws.Request) {
+ defer r.HTTPResponse.Body.Close()
+
+ resp := &xmlErrorResponse{}
+ err := xml.NewDecoder(r.HTTPResponse.Body).Decode(resp)
+ if err != nil && err != io.EOF {
+ r.Error = awserr.New("SerializationError", "failed to decode query XML error response", err)
+ } else {
+ r.Error = awserr.NewRequestFailure(
+ awserr.New(resp.Code, resp.Message, nil),
+ r.HTTPResponse.StatusCode,
+ resp.RequestID,
+ )
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal_test.go
new file mode 100644
index 0000000..924d3d4
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/query/unmarshal_test.go
@@ -0,0 +1,1432 @@
+package query_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "encoding/xml"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/protocol/query"
+ "github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil"
+ "github.com/aws/aws-sdk-go/internal/signer/v4"
+ "github.com/aws/aws-sdk-go/internal/util"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ bytes.Buffer // always import bytes
+var _ http.Request
+var _ json.Marshaler
+var _ time.Time
+var _ xmlutil.XMLNode
+var _ xml.Attr
+var _ = ioutil.Discard
+var _ = util.Trim("")
+var _ = url.Values{}
+var _ = io.EOF
+
+// OutputService1ProtocolTest is a client for OutputService1ProtocolTest.
+type OutputService1ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService1ProtocolTest client.
+func NewOutputService1ProtocolTest(config *aws.Config) *OutputService1ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice1protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService1ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService1ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService1ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService1TestCaseOperation1 = "OperationName"
+
+// OutputService1TestCaseOperation1Request generates a request for the OutputService1TestCaseOperation1 operation.
+func (c *OutputService1ProtocolTest) OutputService1TestCaseOperation1Request(input *OutputService1TestShapeOutputService1TestCaseOperation1Input) (req *aws.Request, output *OutputService1TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService1TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService1TestShapeOutputService1TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService1TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService1ProtocolTest) OutputService1TestCaseOperation1(input *OutputService1TestShapeOutputService1TestCaseOperation1Input) (*OutputService1TestShapeOutputShape, error) {
+ req, out := c.OutputService1TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService1TestShapeOutputService1TestCaseOperation1Input struct {
+ metadataOutputService1TestShapeOutputService1TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService1TestShapeOutputService1TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService1TestShapeOutputShape struct {
+ Char *string `type:"character"`
+
+ Double *float64 `type:"double"`
+
+ FalseBool *bool `type:"boolean"`
+
+ Float *float64 `type:"float"`
+
+ Long *int64 `type:"long"`
+
+ Num *int64 `locationName:"FooNum" type:"integer"`
+
+ Str *string `type:"string"`
+
+ Timestamp *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ TrueBool *bool `type:"boolean"`
+
+ metadataOutputService1TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService1TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService2ProtocolTest is a client for OutputService2ProtocolTest.
+type OutputService2ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService2ProtocolTest client.
+func NewOutputService2ProtocolTest(config *aws.Config) *OutputService2ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice2protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService2ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService2ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService2ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService2TestCaseOperation1 = "OperationName"
+
+// OutputService2TestCaseOperation1Request generates a request for the OutputService2TestCaseOperation1 operation.
+func (c *OutputService2ProtocolTest) OutputService2TestCaseOperation1Request(input *OutputService2TestShapeOutputService2TestCaseOperation1Input) (req *aws.Request, output *OutputService2TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService2TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService2TestShapeOutputService2TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService2TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService2ProtocolTest) OutputService2TestCaseOperation1(input *OutputService2TestShapeOutputService2TestCaseOperation1Input) (*OutputService2TestShapeOutputShape, error) {
+ req, out := c.OutputService2TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService2TestShapeOutputService2TestCaseOperation1Input struct {
+ metadataOutputService2TestShapeOutputService2TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService2TestShapeOutputService2TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService2TestShapeOutputShape struct {
+ Num *int64 `type:"integer"`
+
+ Str *string `type:"string"`
+
+ metadataOutputService2TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService2TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService3ProtocolTest is a client for OutputService3ProtocolTest.
+type OutputService3ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService3ProtocolTest client.
+func NewOutputService3ProtocolTest(config *aws.Config) *OutputService3ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice3protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService3ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService3ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService3ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService3TestCaseOperation1 = "OperationName"
+
+// OutputService3TestCaseOperation1Request generates a request for the OutputService3TestCaseOperation1 operation.
+func (c *OutputService3ProtocolTest) OutputService3TestCaseOperation1Request(input *OutputService3TestShapeOutputService3TestCaseOperation1Input) (req *aws.Request, output *OutputService3TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService3TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService3TestShapeOutputService3TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService3TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService3ProtocolTest) OutputService3TestCaseOperation1(input *OutputService3TestShapeOutputService3TestCaseOperation1Input) (*OutputService3TestShapeOutputShape, error) {
+ req, out := c.OutputService3TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService3TestShapeOutputService3TestCaseOperation1Input struct {
+ metadataOutputService3TestShapeOutputService3TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService3TestShapeOutputService3TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService3TestShapeOutputShape struct {
+ Blob []byte `type:"blob"`
+
+ metadataOutputService3TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService3TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService4ProtocolTest is a client for OutputService4ProtocolTest.
+type OutputService4ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService4ProtocolTest client.
+func NewOutputService4ProtocolTest(config *aws.Config) *OutputService4ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice4protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService4ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService4ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService4ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService4TestCaseOperation1 = "OperationName"
+
+// OutputService4TestCaseOperation1Request generates a request for the OutputService4TestCaseOperation1 operation.
+func (c *OutputService4ProtocolTest) OutputService4TestCaseOperation1Request(input *OutputService4TestShapeOutputService4TestCaseOperation1Input) (req *aws.Request, output *OutputService4TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService4TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService4TestShapeOutputService4TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService4TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService4ProtocolTest) OutputService4TestCaseOperation1(input *OutputService4TestShapeOutputService4TestCaseOperation1Input) (*OutputService4TestShapeOutputShape, error) {
+ req, out := c.OutputService4TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService4TestShapeOutputService4TestCaseOperation1Input struct {
+ metadataOutputService4TestShapeOutputService4TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService4TestShapeOutputService4TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService4TestShapeOutputShape struct {
+ ListMember []*string `type:"list"`
+
+ metadataOutputService4TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService4TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService5ProtocolTest is a client for OutputService5ProtocolTest.
+type OutputService5ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService5ProtocolTest client.
+func NewOutputService5ProtocolTest(config *aws.Config) *OutputService5ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice5protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService5ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService5ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService5ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService5TestCaseOperation1 = "OperationName"
+
+// OutputService5TestCaseOperation1Request generates a request for the OutputService5TestCaseOperation1 operation.
+func (c *OutputService5ProtocolTest) OutputService5TestCaseOperation1Request(input *OutputService5TestShapeOutputService5TestCaseOperation1Input) (req *aws.Request, output *OutputService5TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService5TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService5TestShapeOutputService5TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService5TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService5ProtocolTest) OutputService5TestCaseOperation1(input *OutputService5TestShapeOutputService5TestCaseOperation1Input) (*OutputService5TestShapeOutputShape, error) {
+ req, out := c.OutputService5TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService5TestShapeOutputService5TestCaseOperation1Input struct {
+ metadataOutputService5TestShapeOutputService5TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService5TestShapeOutputService5TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService5TestShapeOutputShape struct {
+ ListMember []*string `locationNameList:"item" type:"list"`
+
+ metadataOutputService5TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService5TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService6ProtocolTest is a client for OutputService6ProtocolTest.
+type OutputService6ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService6ProtocolTest client.
+func NewOutputService6ProtocolTest(config *aws.Config) *OutputService6ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice6protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService6ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService6ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService6ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService6TestCaseOperation1 = "OperationName"
+
+// OutputService6TestCaseOperation1Request generates a request for the OutputService6TestCaseOperation1 operation.
+func (c *OutputService6ProtocolTest) OutputService6TestCaseOperation1Request(input *OutputService6TestShapeOutputService6TestCaseOperation1Input) (req *aws.Request, output *OutputService6TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService6TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService6TestShapeOutputService6TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService6TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService6ProtocolTest) OutputService6TestCaseOperation1(input *OutputService6TestShapeOutputService6TestCaseOperation1Input) (*OutputService6TestShapeOutputShape, error) {
+ req, out := c.OutputService6TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService6TestShapeOutputService6TestCaseOperation1Input struct {
+ metadataOutputService6TestShapeOutputService6TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService6TestShapeOutputService6TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService6TestShapeOutputShape struct {
+ ListMember []*string `type:"list" flattened:"true"`
+
+ metadataOutputService6TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService6TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService7ProtocolTest is a client for OutputService7ProtocolTest.
+type OutputService7ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService7ProtocolTest client.
+func NewOutputService7ProtocolTest(config *aws.Config) *OutputService7ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice7protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService7ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService7ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService7ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService7TestCaseOperation1 = "OperationName"
+
+// OutputService7TestCaseOperation1Request generates a request for the OutputService7TestCaseOperation1 operation.
+func (c *OutputService7ProtocolTest) OutputService7TestCaseOperation1Request(input *OutputService7TestShapeOutputService7TestCaseOperation1Input) (req *aws.Request, output *OutputService7TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService7TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService7TestShapeOutputService7TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService7TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService7ProtocolTest) OutputService7TestCaseOperation1(input *OutputService7TestShapeOutputService7TestCaseOperation1Input) (*OutputService7TestShapeOutputShape, error) {
+ req, out := c.OutputService7TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService7TestShapeOutputService7TestCaseOperation1Input struct {
+ metadataOutputService7TestShapeOutputService7TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService7TestShapeOutputService7TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService7TestShapeOutputShape struct {
+ ListMember []*string `type:"list" flattened:"true"`
+
+ metadataOutputService7TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService7TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService8ProtocolTest is a client for OutputService8ProtocolTest.
+type OutputService8ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService8ProtocolTest client.
+func NewOutputService8ProtocolTest(config *aws.Config) *OutputService8ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice8protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService8ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService8ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService8ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService8TestCaseOperation1 = "OperationName"
+
+// OutputService8TestCaseOperation1Request generates a request for the OutputService8TestCaseOperation1 operation.
+func (c *OutputService8ProtocolTest) OutputService8TestCaseOperation1Request(input *OutputService8TestShapeOutputService8TestCaseOperation1Input) (req *aws.Request, output *OutputService8TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService8TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService8TestShapeOutputService8TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService8TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService8ProtocolTest) OutputService8TestCaseOperation1(input *OutputService8TestShapeOutputService8TestCaseOperation1Input) (*OutputService8TestShapeOutputShape, error) {
+ req, out := c.OutputService8TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService8TestShapeOutputService8TestCaseOperation1Input struct {
+ metadataOutputService8TestShapeOutputService8TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService8TestShapeOutputService8TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService8TestShapeOutputShape struct {
+ List []*OutputService8TestShapeStructureShape `type:"list"`
+
+ metadataOutputService8TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService8TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService8TestShapeStructureShape struct {
+ Bar *string `type:"string"`
+
+ Baz *string `type:"string"`
+
+ Foo *string `type:"string"`
+
+ metadataOutputService8TestShapeStructureShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService8TestShapeStructureShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService9ProtocolTest is a client for OutputService9ProtocolTest.
+type OutputService9ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService9ProtocolTest client.
+func NewOutputService9ProtocolTest(config *aws.Config) *OutputService9ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice9protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService9ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService9ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService9ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService9TestCaseOperation1 = "OperationName"
+
+// OutputService9TestCaseOperation1Request generates a request for the OutputService9TestCaseOperation1 operation.
+func (c *OutputService9ProtocolTest) OutputService9TestCaseOperation1Request(input *OutputService9TestShapeOutputService9TestCaseOperation1Input) (req *aws.Request, output *OutputService9TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService9TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService9TestShapeOutputService9TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService9TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService9ProtocolTest) OutputService9TestCaseOperation1(input *OutputService9TestShapeOutputService9TestCaseOperation1Input) (*OutputService9TestShapeOutputShape, error) {
+ req, out := c.OutputService9TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService9TestShapeOutputService9TestCaseOperation1Input struct {
+ metadataOutputService9TestShapeOutputService9TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService9TestShapeOutputService9TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService9TestShapeOutputShape struct {
+ List []*OutputService9TestShapeStructureShape `type:"list" flattened:"true"`
+
+ metadataOutputService9TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService9TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService9TestShapeStructureShape struct {
+ Bar *string `type:"string"`
+
+ Baz *string `type:"string"`
+
+ Foo *string `type:"string"`
+
+ metadataOutputService9TestShapeStructureShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService9TestShapeStructureShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService10ProtocolTest is a client for OutputService10ProtocolTest.
+type OutputService10ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService10ProtocolTest client.
+func NewOutputService10ProtocolTest(config *aws.Config) *OutputService10ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice10protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService10ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService10ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService10ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService10TestCaseOperation1 = "OperationName"
+
+// OutputService10TestCaseOperation1Request generates a request for the OutputService10TestCaseOperation1 operation.
+func (c *OutputService10ProtocolTest) OutputService10TestCaseOperation1Request(input *OutputService10TestShapeOutputService10TestCaseOperation1Input) (req *aws.Request, output *OutputService10TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService10TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService10TestShapeOutputService10TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService10TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService10ProtocolTest) OutputService10TestCaseOperation1(input *OutputService10TestShapeOutputService10TestCaseOperation1Input) (*OutputService10TestShapeOutputShape, error) {
+ req, out := c.OutputService10TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService10TestShapeOutputService10TestCaseOperation1Input struct {
+ metadataOutputService10TestShapeOutputService10TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService10TestShapeOutputService10TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService10TestShapeOutputShape struct {
+ List []*string `locationNameList:"NamedList" type:"list" flattened:"true"`
+
+ metadataOutputService10TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService10TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService11ProtocolTest is a client for OutputService11ProtocolTest.
+type OutputService11ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService11ProtocolTest client.
+func NewOutputService11ProtocolTest(config *aws.Config) *OutputService11ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice11protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService11ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService11ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService11ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService11TestCaseOperation1 = "OperationName"
+
+// OutputService11TestCaseOperation1Request generates a request for the OutputService11TestCaseOperation1 operation.
+func (c *OutputService11ProtocolTest) OutputService11TestCaseOperation1Request(input *OutputService11TestShapeOutputService11TestCaseOperation1Input) (req *aws.Request, output *OutputService11TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService11TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService11TestShapeOutputService11TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService11TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService11ProtocolTest) OutputService11TestCaseOperation1(input *OutputService11TestShapeOutputService11TestCaseOperation1Input) (*OutputService11TestShapeOutputShape, error) {
+ req, out := c.OutputService11TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService11TestShapeOutputService11TestCaseOperation1Input struct {
+ metadataOutputService11TestShapeOutputService11TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService11TestShapeOutputService11TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService11TestShapeOutputShape struct {
+ Map map[string]*OutputService11TestShapeStructType `type:"map"`
+
+ metadataOutputService11TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService11TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService11TestShapeStructType struct {
+ Foo *string `locationName:"foo" type:"string"`
+
+ metadataOutputService11TestShapeStructType `json:"-" xml:"-"`
+}
+
+type metadataOutputService11TestShapeStructType struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService12ProtocolTest is a client for OutputService12ProtocolTest.
+type OutputService12ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService12ProtocolTest client.
+func NewOutputService12ProtocolTest(config *aws.Config) *OutputService12ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice12protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService12ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService12ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService12ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService12TestCaseOperation1 = "OperationName"
+
+// OutputService12TestCaseOperation1Request generates a request for the OutputService12TestCaseOperation1 operation.
+func (c *OutputService12ProtocolTest) OutputService12TestCaseOperation1Request(input *OutputService12TestShapeOutputService12TestCaseOperation1Input) (req *aws.Request, output *OutputService12TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService12TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService12TestShapeOutputService12TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService12TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService12ProtocolTest) OutputService12TestCaseOperation1(input *OutputService12TestShapeOutputService12TestCaseOperation1Input) (*OutputService12TestShapeOutputShape, error) {
+ req, out := c.OutputService12TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService12TestShapeOutputService12TestCaseOperation1Input struct {
+ metadataOutputService12TestShapeOutputService12TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService12TestShapeOutputService12TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService12TestShapeOutputShape struct {
+ Map map[string]*string `type:"map" flattened:"true"`
+
+ metadataOutputService12TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService12TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService13ProtocolTest is a client for OutputService13ProtocolTest.
+type OutputService13ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService13ProtocolTest client.
+func NewOutputService13ProtocolTest(config *aws.Config) *OutputService13ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice13protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService13ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService13ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService13ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService13TestCaseOperation1 = "OperationName"
+
+// OutputService13TestCaseOperation1Request generates a request for the OutputService13TestCaseOperation1 operation.
+func (c *OutputService13ProtocolTest) OutputService13TestCaseOperation1Request(input *OutputService13TestShapeOutputService13TestCaseOperation1Input) (req *aws.Request, output *OutputService13TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService13TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService13TestShapeOutputService13TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService13TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService13ProtocolTest) OutputService13TestCaseOperation1(input *OutputService13TestShapeOutputService13TestCaseOperation1Input) (*OutputService13TestShapeOutputShape, error) {
+ req, out := c.OutputService13TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService13TestShapeOutputService13TestCaseOperation1Input struct {
+ metadataOutputService13TestShapeOutputService13TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService13TestShapeOutputService13TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService13TestShapeOutputShape struct {
+ Map map[string]*string `locationName:"Attribute" locationNameKey:"Name" locationNameValue:"Value" type:"map" flattened:"true"`
+
+ metadataOutputService13TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService13TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService14ProtocolTest is a client for OutputService14ProtocolTest.
+type OutputService14ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService14ProtocolTest client.
+func NewOutputService14ProtocolTest(config *aws.Config) *OutputService14ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice14protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(query.Build)
+ service.Handlers.Unmarshal.PushBack(query.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(query.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(query.UnmarshalError)
+
+ return &OutputService14ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService14ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService14ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService14TestCaseOperation1 = "OperationName"
+
+// OutputService14TestCaseOperation1Request generates a request for the OutputService14TestCaseOperation1 operation.
+func (c *OutputService14ProtocolTest) OutputService14TestCaseOperation1Request(input *OutputService14TestShapeOutputService14TestCaseOperation1Input) (req *aws.Request, output *OutputService14TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService14TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService14TestShapeOutputService14TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService14TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService14ProtocolTest) OutputService14TestCaseOperation1(input *OutputService14TestShapeOutputService14TestCaseOperation1Input) (*OutputService14TestShapeOutputShape, error) {
+ req, out := c.OutputService14TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService14TestShapeOutputService14TestCaseOperation1Input struct {
+ metadataOutputService14TestShapeOutputService14TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService14TestShapeOutputService14TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService14TestShapeOutputShape struct {
+ Map map[string]*string `locationNameKey:"foo" locationNameValue:"bar" type:"map" flattened:"true"`
+
+ metadataOutputService14TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService14TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+//
+// Tests begin here
+//
+
+func TestOutputService1ProtocolTestScalarMembersCase1(t *testing.T) {
+ svc := NewOutputService1ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("myname123falsetrue1.21.3200a2015-01-25T08:00:00Zrequest-id"))
+ req, out := svc.OutputService1TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "a", *out.Char)
+ assert.Equal(t, 1.3, *out.Double)
+ assert.Equal(t, false, *out.FalseBool)
+ assert.Equal(t, 1.2, *out.Float)
+ assert.Equal(t, int64(200), *out.Long)
+ assert.Equal(t, int64(123), *out.Num)
+ assert.Equal(t, "myname", *out.Str)
+ assert.Equal(t, time.Unix(1.4221728e+09, 0).UTC().String(), out.Timestamp.String())
+ assert.Equal(t, true, *out.TrueBool)
+
+}
+
+func TestOutputService2ProtocolTestNotAllMembersInResponseCase1(t *testing.T) {
+ svc := NewOutputService2ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("mynamerequest-id"))
+ req, out := svc.OutputService2TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "myname", *out.Str)
+
+}
+
+func TestOutputService3ProtocolTestBlobCase1(t *testing.T) {
+ svc := NewOutputService3ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("dmFsdWU=requestid"))
+ req, out := svc.OutputService3TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "value", string(out.Blob))
+
+}
+
+func TestOutputService4ProtocolTestListsCase1(t *testing.T) {
+ svc := NewOutputService4ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abc123requestid"))
+ req, out := svc.OutputService4TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.ListMember[0])
+ assert.Equal(t, "123", *out.ListMember[1])
+
+}
+
+func TestOutputService5ProtocolTestListWithCustomMemberNameCase1(t *testing.T) {
+ svc := NewOutputService5ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("- abc
- 123
requestid"))
+ req, out := svc.OutputService5TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.ListMember[0])
+ assert.Equal(t, "123", *out.ListMember[1])
+
+}
+
+func TestOutputService6ProtocolTestFlattenedListCase1(t *testing.T) {
+ svc := NewOutputService6ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abc123requestid"))
+ req, out := svc.OutputService6TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.ListMember[0])
+ assert.Equal(t, "123", *out.ListMember[1])
+
+}
+
+func TestOutputService7ProtocolTestFlattenedSingleElementListCase1(t *testing.T) {
+ svc := NewOutputService7ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abcrequestid"))
+ req, out := svc.OutputService7TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.ListMember[0])
+
+}
+
+func TestOutputService8ProtocolTestListOfStructuresCase1(t *testing.T) {
+ svc := NewOutputService8ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("firstfoofirstbarfirstbazsecondfoosecondbarsecondbaz
requestid"))
+ req, out := svc.OutputService8TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "firstbar", *out.List[0].Bar)
+ assert.Equal(t, "firstbaz", *out.List[0].Baz)
+ assert.Equal(t, "firstfoo", *out.List[0].Foo)
+ assert.Equal(t, "secondbar", *out.List[1].Bar)
+ assert.Equal(t, "secondbaz", *out.List[1].Baz)
+ assert.Equal(t, "secondfoo", *out.List[1].Foo)
+
+}
+
+func TestOutputService9ProtocolTestFlattenedListOfStructuresCase1(t *testing.T) {
+ svc := NewOutputService9ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("firstfoofirstbarfirstbaz
secondfoosecondbarsecondbaz
requestid"))
+ req, out := svc.OutputService9TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "firstbar", *out.List[0].Bar)
+ assert.Equal(t, "firstbaz", *out.List[0].Baz)
+ assert.Equal(t, "firstfoo", *out.List[0].Foo)
+ assert.Equal(t, "secondbar", *out.List[1].Bar)
+ assert.Equal(t, "secondbaz", *out.List[1].Baz)
+ assert.Equal(t, "secondfoo", *out.List[1].Foo)
+
+}
+
+func TestOutputService10ProtocolTestFlattenedListWithLocationNameCase1(t *testing.T) {
+ svc := NewOutputService10ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abrequestid"))
+ req, out := svc.OutputService10TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "a", *out.List[0])
+ assert.Equal(t, "b", *out.List[1])
+
+}
+
+func TestOutputService11ProtocolTestNormalMapCase1(t *testing.T) {
+ svc := NewOutputService11ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("requestid"))
+ req, out := svc.OutputService11TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "bam", *out.Map["baz"].Foo)
+ assert.Equal(t, "bar", *out.Map["qux"].Foo)
+
+}
+
+func TestOutputService12ProtocolTestFlattenedMapCase1(t *testing.T) {
+ svc := NewOutputService12ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("requestid"))
+ req, out := svc.OutputService12TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "bam", *out.Map["baz"])
+ assert.Equal(t, "bar", *out.Map["qux"])
+
+}
+
+func TestOutputService13ProtocolTestFlattenedMapInShapeDefinitionCase1(t *testing.T) {
+ svc := NewOutputService13ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("quxbarrequestid"))
+ req, out := svc.OutputService13TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "bar", *out.Map["qux"])
+
+}
+
+func TestOutputService14ProtocolTestNamedMapCase1(t *testing.T) {
+ svc := NewOutputService14ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("requestid"))
+ req, out := svc.OutputService14TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ query.UnmarshalMeta(req)
+ query.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "bam", *out.Map["baz"])
+ assert.Equal(t, "bar", *out.Map["qux"])
+
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/build.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/build.go
new file mode 100644
index 0000000..cd5eef2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/build.go
@@ -0,0 +1,212 @@
+// Package rest provides RESTful serialisation of AWS requests and responses.
+package rest
+
+import (
+ "bytes"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "net/url"
+ "path"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+// RFC822 returns an RFC822 formatted timestamp for AWS protocols
+const RFC822 = "Mon, 2 Jan 2006 15:04:05 GMT"
+
+// Whether the byte value can be sent without escaping in AWS URLs
+var noEscape [256]bool
+
+func init() {
+ for i := 0; i < len(noEscape); i++ {
+ // AWS expects every character except these to be escaped
+ noEscape[i] = (i >= 'A' && i <= 'Z') ||
+ (i >= 'a' && i <= 'z') ||
+ (i >= '0' && i <= '9') ||
+ i == '-' ||
+ i == '.' ||
+ i == '_' ||
+ i == '~'
+ }
+}
+
+// Build builds the REST component of a service request.
+func Build(r *aws.Request) {
+ if r.ParamsFilled() {
+ v := reflect.ValueOf(r.Params).Elem()
+ buildLocationElements(r, v)
+ buildBody(r, v)
+ }
+}
+
+func buildLocationElements(r *aws.Request, v reflect.Value) {
+ query := r.HTTPRequest.URL.Query()
+
+ for i := 0; i < v.NumField(); i++ {
+ m := v.Field(i)
+ if n := v.Type().Field(i).Name; n[0:1] == strings.ToLower(n[0:1]) {
+ continue
+ }
+
+ if m.IsValid() {
+ field := v.Type().Field(i)
+ name := field.Tag.Get("locationName")
+ if name == "" {
+ name = field.Name
+ }
+ if m.Kind() == reflect.Ptr {
+ m = m.Elem()
+ }
+ if !m.IsValid() {
+ continue
+ }
+
+ switch field.Tag.Get("location") {
+ case "headers": // header maps
+ buildHeaderMap(r, m, field.Tag.Get("locationName"))
+ case "header":
+ buildHeader(r, m, name)
+ case "uri":
+ buildURI(r, m, name)
+ case "querystring":
+ buildQueryString(r, m, name, query)
+ }
+ }
+ if r.Error != nil {
+ return
+ }
+ }
+
+ r.HTTPRequest.URL.RawQuery = query.Encode()
+ updatePath(r.HTTPRequest.URL, r.HTTPRequest.URL.Path)
+}
+
+func buildBody(r *aws.Request, v reflect.Value) {
+ if field, ok := v.Type().FieldByName("SDKShapeTraits"); ok {
+ if payloadName := field.Tag.Get("payload"); payloadName != "" {
+ pfield, _ := v.Type().FieldByName(payloadName)
+ if ptag := pfield.Tag.Get("type"); ptag != "" && ptag != "structure" {
+ payload := reflect.Indirect(v.FieldByName(payloadName))
+ if payload.IsValid() && payload.Interface() != nil {
+ switch reader := payload.Interface().(type) {
+ case io.ReadSeeker:
+ r.SetReaderBody(reader)
+ case []byte:
+ r.SetBufferBody(reader)
+ case string:
+ r.SetStringBody(reader)
+ default:
+ r.Error = awserr.New("SerializationError",
+ "failed to encode REST request",
+ fmt.Errorf("unknown payload type %s", payload.Type()))
+ }
+ }
+ }
+ }
+ }
+}
+
+func buildHeader(r *aws.Request, v reflect.Value, name string) {
+ str, err := convertType(v)
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to encode REST request", err)
+ } else if str != nil {
+ r.HTTPRequest.Header.Add(name, *str)
+ }
+}
+
+func buildHeaderMap(r *aws.Request, v reflect.Value, prefix string) {
+ for _, key := range v.MapKeys() {
+ str, err := convertType(v.MapIndex(key))
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to encode REST request", err)
+ } else if str != nil {
+ r.HTTPRequest.Header.Add(prefix+key.String(), *str)
+ }
+ }
+}
+
+func buildURI(r *aws.Request, v reflect.Value, name string) {
+ value, err := convertType(v)
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to encode REST request", err)
+ } else if value != nil {
+ uri := r.HTTPRequest.URL.Path
+ uri = strings.Replace(uri, "{"+name+"}", EscapePath(*value, true), -1)
+ uri = strings.Replace(uri, "{"+name+"+}", EscapePath(*value, false), -1)
+ r.HTTPRequest.URL.Path = uri
+ }
+}
+
+func buildQueryString(r *aws.Request, v reflect.Value, name string, query url.Values) {
+ str, err := convertType(v)
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to encode REST request", err)
+ } else if str != nil {
+ query.Set(name, *str)
+ }
+}
+
+func updatePath(url *url.URL, urlPath string) {
+ scheme, query := url.Scheme, url.RawQuery
+
+ // clean up path
+ urlPath = path.Clean(urlPath)
+
+ // get formatted URL minus scheme so we can build this into Opaque
+ url.Scheme, url.Path, url.RawQuery = "", "", ""
+ s := url.String()
+ url.Scheme = scheme
+ url.RawQuery = query
+
+ // build opaque URI
+ url.Opaque = s + urlPath
+}
+
+// EscapePath escapes part of a URL path in Amazon style
+func EscapePath(path string, encodeSep bool) string {
+ var buf bytes.Buffer
+ for i := 0; i < len(path); i++ {
+ c := path[i]
+ if noEscape[c] || (c == '/' && !encodeSep) {
+ buf.WriteByte(c)
+ } else {
+ buf.WriteByte('%')
+ buf.WriteString(strings.ToUpper(strconv.FormatUint(uint64(c), 16)))
+ }
+ }
+ return buf.String()
+}
+
+func convertType(v reflect.Value) (*string, error) {
+ v = reflect.Indirect(v)
+ if !v.IsValid() {
+ return nil, nil
+ }
+
+ var str string
+ switch value := v.Interface().(type) {
+ case string:
+ str = value
+ case []byte:
+ str = base64.StdEncoding.EncodeToString(value)
+ case bool:
+ str = strconv.FormatBool(value)
+ case int64:
+ str = strconv.FormatInt(value, 10)
+ case float64:
+ str = strconv.FormatFloat(value, 'f', -1, 64)
+ case time.Time:
+ str = value.UTC().Format(RFC822)
+ default:
+ err := fmt.Errorf("Unsupported value for param %v (%s)", v.Interface(), v.Type())
+ return nil, err
+ }
+ return &str, nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/payload.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/payload.go
new file mode 100644
index 0000000..1f603bb
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/payload.go
@@ -0,0 +1,45 @@
+package rest
+
+import "reflect"
+
+// PayloadMember returns the payload field member of i if there is one, or nil.
+func PayloadMember(i interface{}) interface{} {
+ if i == nil {
+ return nil
+ }
+
+ v := reflect.ValueOf(i).Elem()
+ if !v.IsValid() {
+ return nil
+ }
+ if field, ok := v.Type().FieldByName("SDKShapeTraits"); ok {
+ if payloadName := field.Tag.Get("payload"); payloadName != "" {
+ field, _ := v.Type().FieldByName(payloadName)
+ if field.Tag.Get("type") != "structure" {
+ return nil
+ }
+
+ payload := v.FieldByName(payloadName)
+ if payload.IsValid() || (payload.Kind() == reflect.Ptr && !payload.IsNil()) {
+ return payload.Interface()
+ }
+ }
+ }
+ return nil
+}
+
+// PayloadType returns the type of a payload field member of i if there is one, or "".
+func PayloadType(i interface{}) string {
+ v := reflect.Indirect(reflect.ValueOf(i))
+ if !v.IsValid() {
+ return ""
+ }
+ if field, ok := v.Type().FieldByName("SDKShapeTraits"); ok {
+ if payloadName := field.Tag.Get("payload"); payloadName != "" {
+ if member, ok := v.Type().FieldByName(payloadName); ok {
+ return member.Tag.Get("type")
+ }
+ }
+ }
+ return ""
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/unmarshal.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/unmarshal.go
new file mode 100644
index 0000000..a4155f1
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/rest/unmarshal.go
@@ -0,0 +1,174 @@
+package rest
+
+import (
+ "encoding/base64"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+// Unmarshal unmarshals the REST component of a response in a REST service.
+func Unmarshal(r *aws.Request) {
+ if r.DataFilled() {
+ v := reflect.Indirect(reflect.ValueOf(r.Data))
+ unmarshalBody(r, v)
+ unmarshalLocationElements(r, v)
+ }
+}
+
+func unmarshalBody(r *aws.Request, v reflect.Value) {
+ if field, ok := v.Type().FieldByName("SDKShapeTraits"); ok {
+ if payloadName := field.Tag.Get("payload"); payloadName != "" {
+ pfield, _ := v.Type().FieldByName(payloadName)
+ if ptag := pfield.Tag.Get("type"); ptag != "" && ptag != "structure" {
+ payload := v.FieldByName(payloadName)
+ if payload.IsValid() {
+ switch payload.Interface().(type) {
+ case []byte:
+ b, err := ioutil.ReadAll(r.HTTPResponse.Body)
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to decode REST response", err)
+ } else {
+ payload.Set(reflect.ValueOf(b))
+ }
+ case *string:
+ b, err := ioutil.ReadAll(r.HTTPResponse.Body)
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to decode REST response", err)
+ } else {
+ str := string(b)
+ payload.Set(reflect.ValueOf(&str))
+ }
+ default:
+ switch payload.Type().String() {
+ case "io.ReadSeeker":
+ payload.Set(reflect.ValueOf(aws.ReadSeekCloser(r.HTTPResponse.Body)))
+ case "aws.ReadSeekCloser", "io.ReadCloser":
+ payload.Set(reflect.ValueOf(r.HTTPResponse.Body))
+ default:
+ r.Error = awserr.New("SerializationError",
+ "failed to decode REST response",
+ fmt.Errorf("unknown payload type %s", payload.Type()))
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+func unmarshalLocationElements(r *aws.Request, v reflect.Value) {
+ for i := 0; i < v.NumField(); i++ {
+ m, field := v.Field(i), v.Type().Field(i)
+ if n := field.Name; n[0:1] == strings.ToLower(n[0:1]) {
+ continue
+ }
+
+ if m.IsValid() {
+ name := field.Tag.Get("locationName")
+ if name == "" {
+ name = field.Name
+ }
+
+ switch field.Tag.Get("location") {
+ case "statusCode":
+ unmarshalStatusCode(m, r.HTTPResponse.StatusCode)
+ case "header":
+ err := unmarshalHeader(m, r.HTTPResponse.Header.Get(name))
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to decode REST response", err)
+ break
+ }
+ case "headers":
+ prefix := field.Tag.Get("locationName")
+ err := unmarshalHeaderMap(m, r.HTTPResponse.Header, prefix)
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to decode REST response", err)
+ break
+ }
+ }
+ }
+ if r.Error != nil {
+ return
+ }
+ }
+}
+
+func unmarshalStatusCode(v reflect.Value, statusCode int) {
+ if !v.IsValid() {
+ return
+ }
+
+ switch v.Interface().(type) {
+ case *int64:
+ s := int64(statusCode)
+ v.Set(reflect.ValueOf(&s))
+ }
+}
+
+func unmarshalHeaderMap(r reflect.Value, headers http.Header, prefix string) error {
+ switch r.Interface().(type) {
+ case map[string]*string: // we only support string map value types
+ out := map[string]*string{}
+ for k, v := range headers {
+ k = http.CanonicalHeaderKey(k)
+ if strings.HasPrefix(strings.ToLower(k), strings.ToLower(prefix)) {
+ out[k[len(prefix):]] = &v[0]
+ }
+ }
+ r.Set(reflect.ValueOf(out))
+ }
+ return nil
+}
+
+func unmarshalHeader(v reflect.Value, header string) error {
+ if !v.IsValid() || (header == "" && v.Elem().Kind() != reflect.String) {
+ return nil
+ }
+
+ switch v.Interface().(type) {
+ case *string:
+ v.Set(reflect.ValueOf(&header))
+ case []byte:
+ b, err := base64.StdEncoding.DecodeString(header)
+ if err != nil {
+ return err
+ }
+ v.Set(reflect.ValueOf(&b))
+ case *bool:
+ b, err := strconv.ParseBool(header)
+ if err != nil {
+ return err
+ }
+ v.Set(reflect.ValueOf(&b))
+ case *int64:
+ i, err := strconv.ParseInt(header, 10, 64)
+ if err != nil {
+ return err
+ }
+ v.Set(reflect.ValueOf(&i))
+ case *float64:
+ f, err := strconv.ParseFloat(header, 64)
+ if err != nil {
+ return err
+ }
+ v.Set(reflect.ValueOf(&f))
+ case *time.Time:
+ t, err := time.Parse(RFC822, header)
+ if err != nil {
+ return err
+ }
+ v.Set(reflect.ValueOf(&t))
+ default:
+ err := fmt.Errorf("Unsupported value for param %v (%s)", v.Interface(), v.Type())
+ return err
+ }
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/build_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/build_test.go
new file mode 100644
index 0000000..6fcd3f8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/build_test.go
@@ -0,0 +1,2736 @@
+package restxml_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "encoding/xml"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/protocol/restxml"
+ "github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil"
+ "github.com/aws/aws-sdk-go/internal/signer/v4"
+ "github.com/aws/aws-sdk-go/internal/util"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ bytes.Buffer // always import bytes
+var _ http.Request
+var _ json.Marshaler
+var _ time.Time
+var _ xmlutil.XMLNode
+var _ xml.Attr
+var _ = ioutil.Discard
+var _ = util.Trim("")
+var _ = url.Values{}
+var _ = io.EOF
+
+// InputService1ProtocolTest is a client for InputService1ProtocolTest.
+type InputService1ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService1ProtocolTest client.
+func NewInputService1ProtocolTest(config *aws.Config) *InputService1ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice1protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService1ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService1ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService1ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService1TestCaseOperation1 = "OperationName"
+
+// InputService1TestCaseOperation1Request generates a request for the InputService1TestCaseOperation1 operation.
+func (c *InputService1ProtocolTest) InputService1TestCaseOperation1Request(input *InputService1TestShapeInputShape) (req *aws.Request, output *InputService1TestShapeInputService1TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService1TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService1TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService1TestShapeInputService1TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService1ProtocolTest) InputService1TestCaseOperation1(input *InputService1TestShapeInputShape) (*InputService1TestShapeInputService1TestCaseOperation1Output, error) {
+ req, out := c.InputService1TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService1TestCaseOperation2 = "OperationName"
+
+// InputService1TestCaseOperation2Request generates a request for the InputService1TestCaseOperation2 operation.
+func (c *InputService1ProtocolTest) InputService1TestCaseOperation2Request(input *InputService1TestShapeInputShape) (req *aws.Request, output *InputService1TestShapeInputService1TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService1TestCaseOperation2,
+ HTTPMethod: "PUT",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService1TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService1TestShapeInputService1TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService1ProtocolTest) InputService1TestCaseOperation2(input *InputService1TestShapeInputShape) (*InputService1TestShapeInputService1TestCaseOperation2Output, error) {
+ req, out := c.InputService1TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService1TestShapeInputService1TestCaseOperation1Output struct {
+ metadataInputService1TestShapeInputService1TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService1TestShapeInputService1TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService1TestShapeInputService1TestCaseOperation2Output struct {
+ metadataInputService1TestShapeInputService1TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService1TestShapeInputService1TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService1TestShapeInputShape struct {
+ Description *string `type:"string"`
+
+ Name *string `type:"string"`
+
+ metadataInputService1TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService1TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+// InputService2ProtocolTest is a client for InputService2ProtocolTest.
+type InputService2ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService2ProtocolTest client.
+func NewInputService2ProtocolTest(config *aws.Config) *InputService2ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice2protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService2ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService2ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService2ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService2TestCaseOperation1 = "OperationName"
+
+// InputService2TestCaseOperation1Request generates a request for the InputService2TestCaseOperation1 operation.
+func (c *InputService2ProtocolTest) InputService2TestCaseOperation1Request(input *InputService2TestShapeInputShape) (req *aws.Request, output *InputService2TestShapeInputService2TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService2TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService2TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService2TestShapeInputService2TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService2ProtocolTest) InputService2TestCaseOperation1(input *InputService2TestShapeInputShape) (*InputService2TestShapeInputService2TestCaseOperation1Output, error) {
+ req, out := c.InputService2TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService2TestShapeInputService2TestCaseOperation1Output struct {
+ metadataInputService2TestShapeInputService2TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService2TestShapeInputService2TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService2TestShapeInputShape struct {
+ First *bool `type:"boolean"`
+
+ Fourth *int64 `type:"integer"`
+
+ Second *bool `type:"boolean"`
+
+ Third *float64 `type:"float"`
+
+ metadataInputService2TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService2TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+// InputService3ProtocolTest is a client for InputService3ProtocolTest.
+type InputService3ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService3ProtocolTest client.
+func NewInputService3ProtocolTest(config *aws.Config) *InputService3ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice3protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService3ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService3ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService3ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService3TestCaseOperation1 = "OperationName"
+
+// InputService3TestCaseOperation1Request generates a request for the InputService3TestCaseOperation1 operation.
+func (c *InputService3ProtocolTest) InputService3TestCaseOperation1Request(input *InputService3TestShapeInputShape) (req *aws.Request, output *InputService3TestShapeInputService3TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService3TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService3TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService3TestShapeInputService3TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService3ProtocolTest) InputService3TestCaseOperation1(input *InputService3TestShapeInputShape) (*InputService3TestShapeInputService3TestCaseOperation1Output, error) {
+ req, out := c.InputService3TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService3TestShapeInputService3TestCaseOperation1Output struct {
+ metadataInputService3TestShapeInputService3TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService3TestShapeInputService3TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService3TestShapeInputShape struct {
+ Description *string `type:"string"`
+
+ SubStructure *InputService3TestShapeSubStructure `type:"structure"`
+
+ metadataInputService3TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService3TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+type InputService3TestShapeSubStructure struct {
+ Bar *string `type:"string"`
+
+ Foo *string `type:"string"`
+
+ metadataInputService3TestShapeSubStructure `json:"-" xml:"-"`
+}
+
+type metadataInputService3TestShapeSubStructure struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService4ProtocolTest is a client for InputService4ProtocolTest.
+type InputService4ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService4ProtocolTest client.
+func NewInputService4ProtocolTest(config *aws.Config) *InputService4ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice4protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService4ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService4ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService4ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService4TestCaseOperation1 = "OperationName"
+
+// InputService4TestCaseOperation1Request generates a request for the InputService4TestCaseOperation1 operation.
+func (c *InputService4ProtocolTest) InputService4TestCaseOperation1Request(input *InputService4TestShapeInputShape) (req *aws.Request, output *InputService4TestShapeInputService4TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService4TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService4TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService4TestShapeInputService4TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService4ProtocolTest) InputService4TestCaseOperation1(input *InputService4TestShapeInputShape) (*InputService4TestShapeInputService4TestCaseOperation1Output, error) {
+ req, out := c.InputService4TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService4TestShapeInputService4TestCaseOperation1Output struct {
+ metadataInputService4TestShapeInputService4TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService4TestShapeInputService4TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService4TestShapeInputShape struct {
+ Description *string `type:"string"`
+
+ SubStructure *InputService4TestShapeSubStructure `type:"structure"`
+
+ metadataInputService4TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService4TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+type InputService4TestShapeSubStructure struct {
+ Bar *string `type:"string"`
+
+ Foo *string `type:"string"`
+
+ metadataInputService4TestShapeSubStructure `json:"-" xml:"-"`
+}
+
+type metadataInputService4TestShapeSubStructure struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService5ProtocolTest is a client for InputService5ProtocolTest.
+type InputService5ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService5ProtocolTest client.
+func NewInputService5ProtocolTest(config *aws.Config) *InputService5ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice5protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService5ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService5ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService5ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService5TestCaseOperation1 = "OperationName"
+
+// InputService5TestCaseOperation1Request generates a request for the InputService5TestCaseOperation1 operation.
+func (c *InputService5ProtocolTest) InputService5TestCaseOperation1Request(input *InputService5TestShapeInputShape) (req *aws.Request, output *InputService5TestShapeInputService5TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService5TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService5TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService5TestShapeInputService5TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService5ProtocolTest) InputService5TestCaseOperation1(input *InputService5TestShapeInputShape) (*InputService5TestShapeInputService5TestCaseOperation1Output, error) {
+ req, out := c.InputService5TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService5TestShapeInputService5TestCaseOperation1Output struct {
+ metadataInputService5TestShapeInputService5TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService5TestShapeInputService5TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService5TestShapeInputShape struct {
+ ListParam []*string `type:"list"`
+
+ metadataInputService5TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService5TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+// InputService6ProtocolTest is a client for InputService6ProtocolTest.
+type InputService6ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService6ProtocolTest client.
+func NewInputService6ProtocolTest(config *aws.Config) *InputService6ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice6protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService6ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService6ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService6ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService6TestCaseOperation1 = "OperationName"
+
+// InputService6TestCaseOperation1Request generates a request for the InputService6TestCaseOperation1 operation.
+func (c *InputService6ProtocolTest) InputService6TestCaseOperation1Request(input *InputService6TestShapeInputShape) (req *aws.Request, output *InputService6TestShapeInputService6TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService6TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService6TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService6TestShapeInputService6TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService6ProtocolTest) InputService6TestCaseOperation1(input *InputService6TestShapeInputShape) (*InputService6TestShapeInputService6TestCaseOperation1Output, error) {
+ req, out := c.InputService6TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService6TestShapeInputService6TestCaseOperation1Output struct {
+ metadataInputService6TestShapeInputService6TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService6TestShapeInputService6TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService6TestShapeInputShape struct {
+ ListParam []*string `locationName:"AlternateName" locationNameList:"NotMember" type:"list"`
+
+ metadataInputService6TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService6TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+// InputService7ProtocolTest is a client for InputService7ProtocolTest.
+type InputService7ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService7ProtocolTest client.
+func NewInputService7ProtocolTest(config *aws.Config) *InputService7ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice7protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService7ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService7ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService7ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService7TestCaseOperation1 = "OperationName"
+
+// InputService7TestCaseOperation1Request generates a request for the InputService7TestCaseOperation1 operation.
+func (c *InputService7ProtocolTest) InputService7TestCaseOperation1Request(input *InputService7TestShapeInputShape) (req *aws.Request, output *InputService7TestShapeInputService7TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService7TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService7TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService7TestShapeInputService7TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService7ProtocolTest) InputService7TestCaseOperation1(input *InputService7TestShapeInputShape) (*InputService7TestShapeInputService7TestCaseOperation1Output, error) {
+ req, out := c.InputService7TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService7TestShapeInputService7TestCaseOperation1Output struct {
+ metadataInputService7TestShapeInputService7TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService7TestShapeInputService7TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService7TestShapeInputShape struct {
+ ListParam []*string `type:"list" flattened:"true"`
+
+ metadataInputService7TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService7TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+// InputService8ProtocolTest is a client for InputService8ProtocolTest.
+type InputService8ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService8ProtocolTest client.
+func NewInputService8ProtocolTest(config *aws.Config) *InputService8ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice8protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService8ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService8ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService8ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService8TestCaseOperation1 = "OperationName"
+
+// InputService8TestCaseOperation1Request generates a request for the InputService8TestCaseOperation1 operation.
+func (c *InputService8ProtocolTest) InputService8TestCaseOperation1Request(input *InputService8TestShapeInputShape) (req *aws.Request, output *InputService8TestShapeInputService8TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService8TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService8TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService8TestShapeInputService8TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService8ProtocolTest) InputService8TestCaseOperation1(input *InputService8TestShapeInputShape) (*InputService8TestShapeInputService8TestCaseOperation1Output, error) {
+ req, out := c.InputService8TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService8TestShapeInputService8TestCaseOperation1Output struct {
+ metadataInputService8TestShapeInputService8TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService8TestShapeInputService8TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService8TestShapeInputShape struct {
+ ListParam []*string `locationName:"item" type:"list" flattened:"true"`
+
+ metadataInputService8TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService8TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+// InputService9ProtocolTest is a client for InputService9ProtocolTest.
+type InputService9ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService9ProtocolTest client.
+func NewInputService9ProtocolTest(config *aws.Config) *InputService9ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice9protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService9ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService9ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService9ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService9TestCaseOperation1 = "OperationName"
+
+// InputService9TestCaseOperation1Request generates a request for the InputService9TestCaseOperation1 operation.
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation1Request(input *InputService9TestShapeInputShape) (req *aws.Request, output *InputService9TestShapeInputService9TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService9TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService9TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService9TestShapeInputService9TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService9ProtocolTest) InputService9TestCaseOperation1(input *InputService9TestShapeInputShape) (*InputService9TestShapeInputService9TestCaseOperation1Output, error) {
+ req, out := c.InputService9TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService9TestShapeInputService9TestCaseOperation1Output struct {
+ metadataInputService9TestShapeInputService9TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputService9TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService9TestShapeInputShape struct {
+ ListParam []*InputService9TestShapeSingleFieldStruct `locationName:"item" type:"list" flattened:"true"`
+
+ metadataInputService9TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+type InputService9TestShapeSingleFieldStruct struct {
+ Element *string `locationName:"value" type:"string"`
+
+ metadataInputService9TestShapeSingleFieldStruct `json:"-" xml:"-"`
+}
+
+type metadataInputService9TestShapeSingleFieldStruct struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService10ProtocolTest is a client for InputService10ProtocolTest.
+type InputService10ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService10ProtocolTest client.
+func NewInputService10ProtocolTest(config *aws.Config) *InputService10ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice10protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService10ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService10ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService10ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService10TestCaseOperation1 = "OperationName"
+
+// InputService10TestCaseOperation1Request generates a request for the InputService10TestCaseOperation1 operation.
+func (c *InputService10ProtocolTest) InputService10TestCaseOperation1Request(input *InputService10TestShapeInputShape) (req *aws.Request, output *InputService10TestShapeInputService10TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService10TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/2014-01-01/hostedzone",
+ }
+
+ if input == nil {
+ input = &InputService10TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService10TestShapeInputService10TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService10ProtocolTest) InputService10TestCaseOperation1(input *InputService10TestShapeInputShape) (*InputService10TestShapeInputService10TestCaseOperation1Output, error) {
+ req, out := c.InputService10TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService10TestShapeInputService10TestCaseOperation1Output struct {
+ metadataInputService10TestShapeInputService10TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService10TestShapeInputService10TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService10TestShapeInputShape struct {
+ StructureParam *InputService10TestShapeStructureShape `type:"structure"`
+
+ metadataInputService10TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService10TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+type InputService10TestShapeStructureShape struct {
+ B []byte `locationName:"b" type:"blob"`
+
+ T *time.Time `locationName:"t" type:"timestamp" timestampFormat:"iso8601"`
+
+ metadataInputService10TestShapeStructureShape `json:"-" xml:"-"`
+}
+
+type metadataInputService10TestShapeStructureShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService11ProtocolTest is a client for InputService11ProtocolTest.
+type InputService11ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService11ProtocolTest client.
+func NewInputService11ProtocolTest(config *aws.Config) *InputService11ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice11protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService11ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService11ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService11ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService11TestCaseOperation1 = "OperationName"
+
+// InputService11TestCaseOperation1Request generates a request for the InputService11TestCaseOperation1 operation.
+func (c *InputService11ProtocolTest) InputService11TestCaseOperation1Request(input *InputService11TestShapeInputShape) (req *aws.Request, output *InputService11TestShapeInputService11TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService11TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService11TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService11TestShapeInputService11TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService11ProtocolTest) InputService11TestCaseOperation1(input *InputService11TestShapeInputShape) (*InputService11TestShapeInputService11TestCaseOperation1Output, error) {
+ req, out := c.InputService11TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService11TestShapeInputService11TestCaseOperation1Output struct {
+ metadataInputService11TestShapeInputService11TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService11TestShapeInputService11TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService11TestShapeInputShape struct {
+ Foo map[string]*string `location:"headers" locationName:"x-foo-" type:"map"`
+
+ metadataInputService11TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService11TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+// InputService12ProtocolTest is a client for InputService12ProtocolTest.
+type InputService12ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService12ProtocolTest client.
+func NewInputService12ProtocolTest(config *aws.Config) *InputService12ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice12protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService12ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService12ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService12ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService12TestCaseOperation1 = "OperationName"
+
+// InputService12TestCaseOperation1Request generates a request for the InputService12TestCaseOperation1 operation.
+func (c *InputService12ProtocolTest) InputService12TestCaseOperation1Request(input *InputService12TestShapeInputShape) (req *aws.Request, output *InputService12TestShapeInputService12TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService12TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService12TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService12TestShapeInputService12TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService12ProtocolTest) InputService12TestCaseOperation1(input *InputService12TestShapeInputShape) (*InputService12TestShapeInputService12TestCaseOperation1Output, error) {
+ req, out := c.InputService12TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService12TestShapeInputService12TestCaseOperation1Output struct {
+ metadataInputService12TestShapeInputService12TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService12TestShapeInputService12TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService12TestShapeInputShape struct {
+ Foo *string `locationName:"foo" type:"string"`
+
+ metadataInputService12TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService12TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure" payload:"Foo"`
+}
+
+// InputService13ProtocolTest is a client for InputService13ProtocolTest.
+type InputService13ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService13ProtocolTest client.
+func NewInputService13ProtocolTest(config *aws.Config) *InputService13ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice13protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService13ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService13ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService13ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService13TestCaseOperation1 = "OperationName"
+
+// InputService13TestCaseOperation1Request generates a request for the InputService13TestCaseOperation1 operation.
+func (c *InputService13ProtocolTest) InputService13TestCaseOperation1Request(input *InputService13TestShapeInputShape) (req *aws.Request, output *InputService13TestShapeInputService13TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService13TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService13TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService13TestShapeInputService13TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService13ProtocolTest) InputService13TestCaseOperation1(input *InputService13TestShapeInputShape) (*InputService13TestShapeInputService13TestCaseOperation1Output, error) {
+ req, out := c.InputService13TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService13TestCaseOperation2 = "OperationName"
+
+// InputService13TestCaseOperation2Request generates a request for the InputService13TestCaseOperation2 operation.
+func (c *InputService13ProtocolTest) InputService13TestCaseOperation2Request(input *InputService13TestShapeInputShape) (req *aws.Request, output *InputService13TestShapeInputService13TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService13TestCaseOperation2,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService13TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService13TestShapeInputService13TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService13ProtocolTest) InputService13TestCaseOperation2(input *InputService13TestShapeInputShape) (*InputService13TestShapeInputService13TestCaseOperation2Output, error) {
+ req, out := c.InputService13TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService13TestShapeInputService13TestCaseOperation1Output struct {
+ metadataInputService13TestShapeInputService13TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService13TestShapeInputService13TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService13TestShapeInputService13TestCaseOperation2Output struct {
+ metadataInputService13TestShapeInputService13TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService13TestShapeInputService13TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService13TestShapeInputShape struct {
+ Foo []byte `locationName:"foo" type:"blob"`
+
+ metadataInputService13TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService13TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure" payload:"Foo"`
+}
+
+// InputService14ProtocolTest is a client for InputService14ProtocolTest.
+type InputService14ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService14ProtocolTest client.
+func NewInputService14ProtocolTest(config *aws.Config) *InputService14ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice14protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService14ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService14ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService14ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService14TestCaseOperation1 = "OperationName"
+
+// InputService14TestCaseOperation1Request generates a request for the InputService14TestCaseOperation1 operation.
+func (c *InputService14ProtocolTest) InputService14TestCaseOperation1Request(input *InputService14TestShapeInputShape) (req *aws.Request, output *InputService14TestShapeInputService14TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService14TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService14TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService14TestShapeInputService14TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService14ProtocolTest) InputService14TestCaseOperation1(input *InputService14TestShapeInputShape) (*InputService14TestShapeInputService14TestCaseOperation1Output, error) {
+ req, out := c.InputService14TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService14TestCaseOperation2 = "OperationName"
+
+// InputService14TestCaseOperation2Request generates a request for the InputService14TestCaseOperation2 operation.
+func (c *InputService14ProtocolTest) InputService14TestCaseOperation2Request(input *InputService14TestShapeInputShape) (req *aws.Request, output *InputService14TestShapeInputService14TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService14TestCaseOperation2,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService14TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService14TestShapeInputService14TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService14ProtocolTest) InputService14TestCaseOperation2(input *InputService14TestShapeInputShape) (*InputService14TestShapeInputService14TestCaseOperation2Output, error) {
+ req, out := c.InputService14TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService14TestCaseOperation3 = "OperationName"
+
+// InputService14TestCaseOperation3Request generates a request for the InputService14TestCaseOperation3 operation.
+func (c *InputService14ProtocolTest) InputService14TestCaseOperation3Request(input *InputService14TestShapeInputShape) (req *aws.Request, output *InputService14TestShapeInputService14TestCaseOperation3Output) {
+ op := &aws.Operation{
+ Name: opInputService14TestCaseOperation3,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService14TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService14TestShapeInputService14TestCaseOperation3Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService14ProtocolTest) InputService14TestCaseOperation3(input *InputService14TestShapeInputShape) (*InputService14TestShapeInputService14TestCaseOperation3Output, error) {
+ req, out := c.InputService14TestCaseOperation3Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService14TestShapeFooShape struct {
+ Baz *string `locationName:"baz" type:"string"`
+
+ metadataInputService14TestShapeFooShape `json:"-" xml:"-"`
+}
+
+type metadataInputService14TestShapeFooShape struct {
+ SDKShapeTraits bool `locationName:"foo" type:"structure"`
+}
+
+type InputService14TestShapeInputService14TestCaseOperation1Output struct {
+ metadataInputService14TestShapeInputService14TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService14TestShapeInputService14TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService14TestShapeInputService14TestCaseOperation2Output struct {
+ metadataInputService14TestShapeInputService14TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService14TestShapeInputService14TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService14TestShapeInputService14TestCaseOperation3Output struct {
+ metadataInputService14TestShapeInputService14TestCaseOperation3Output `json:"-" xml:"-"`
+}
+
+type metadataInputService14TestShapeInputService14TestCaseOperation3Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService14TestShapeInputShape struct {
+ Foo *InputService14TestShapeFooShape `locationName:"foo" type:"structure"`
+
+ metadataInputService14TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService14TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure" payload:"Foo"`
+}
+
+// InputService15ProtocolTest is a client for InputService15ProtocolTest.
+type InputService15ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService15ProtocolTest client.
+func NewInputService15ProtocolTest(config *aws.Config) *InputService15ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice15protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService15ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService15ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService15ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService15TestCaseOperation1 = "OperationName"
+
+// InputService15TestCaseOperation1Request generates a request for the InputService15TestCaseOperation1 operation.
+func (c *InputService15ProtocolTest) InputService15TestCaseOperation1Request(input *InputService15TestShapeInputShape) (req *aws.Request, output *InputService15TestShapeInputService15TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService15TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &InputService15TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService15TestShapeInputService15TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService15ProtocolTest) InputService15TestCaseOperation1(input *InputService15TestShapeInputShape) (*InputService15TestShapeInputService15TestCaseOperation1Output, error) {
+ req, out := c.InputService15TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService15TestShapeGrant struct {
+ Grantee *InputService15TestShapeGrantee `type:"structure"`
+
+ metadataInputService15TestShapeGrant `json:"-" xml:"-"`
+}
+
+type metadataInputService15TestShapeGrant struct {
+ SDKShapeTraits bool `locationName:"Grant" type:"structure"`
+}
+
+type InputService15TestShapeGrantee struct {
+ EmailAddress *string `type:"string"`
+
+ Type *string `locationName:"xsi:type" type:"string" xmlAttribute:"true"`
+
+ metadataInputService15TestShapeGrantee `json:"-" xml:"-"`
+}
+
+type metadataInputService15TestShapeGrantee struct {
+ SDKShapeTraits bool `type:"structure" xmlPrefix:"xsi" xmlURI:"http://www.w3.org/2001/XMLSchema-instance"`
+}
+
+type InputService15TestShapeInputService15TestCaseOperation1Output struct {
+ metadataInputService15TestShapeInputService15TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService15TestShapeInputService15TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService15TestShapeInputShape struct {
+ Grant *InputService15TestShapeGrant `locationName:"Grant" type:"structure"`
+
+ metadataInputService15TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService15TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure" payload:"Grant"`
+}
+
+// InputService16ProtocolTest is a client for InputService16ProtocolTest.
+type InputService16ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService16ProtocolTest client.
+func NewInputService16ProtocolTest(config *aws.Config) *InputService16ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice16protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService16ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService16ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService16ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService16TestCaseOperation1 = "OperationName"
+
+// InputService16TestCaseOperation1Request generates a request for the InputService16TestCaseOperation1 operation.
+func (c *InputService16ProtocolTest) InputService16TestCaseOperation1Request(input *InputService16TestShapeInputShape) (req *aws.Request, output *InputService16TestShapeInputService16TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService16TestCaseOperation1,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &InputService16TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService16TestShapeInputService16TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService16ProtocolTest) InputService16TestCaseOperation1(input *InputService16TestShapeInputShape) (*InputService16TestShapeInputService16TestCaseOperation1Output, error) {
+ req, out := c.InputService16TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService16TestShapeInputService16TestCaseOperation1Output struct {
+ metadataInputService16TestShapeInputService16TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService16TestShapeInputService16TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService16TestShapeInputShape struct {
+ Bucket *string `location:"uri" type:"string"`
+
+ Key *string `location:"uri" type:"string"`
+
+ metadataInputService16TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService16TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService17ProtocolTest is a client for InputService17ProtocolTest.
+type InputService17ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService17ProtocolTest client.
+func NewInputService17ProtocolTest(config *aws.Config) *InputService17ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice17protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService17ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService17ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService17ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService17TestCaseOperation1 = "OperationName"
+
+// InputService17TestCaseOperation1Request generates a request for the InputService17TestCaseOperation1 operation.
+func (c *InputService17ProtocolTest) InputService17TestCaseOperation1Request(input *InputService17TestShapeInputShape) (req *aws.Request, output *InputService17TestShapeInputService17TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService17TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService17TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService17TestShapeInputService17TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService17ProtocolTest) InputService17TestCaseOperation1(input *InputService17TestShapeInputShape) (*InputService17TestShapeInputService17TestCaseOperation1Output, error) {
+ req, out := c.InputService17TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService17TestCaseOperation2 = "OperationName"
+
+// InputService17TestCaseOperation2Request generates a request for the InputService17TestCaseOperation2 operation.
+func (c *InputService17ProtocolTest) InputService17TestCaseOperation2Request(input *InputService17TestShapeInputShape) (req *aws.Request, output *InputService17TestShapeInputService17TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService17TestCaseOperation2,
+ HTTPMethod: "POST",
+ HTTPPath: "/path?abc=mno",
+ }
+
+ if input == nil {
+ input = &InputService17TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService17TestShapeInputService17TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService17ProtocolTest) InputService17TestCaseOperation2(input *InputService17TestShapeInputShape) (*InputService17TestShapeInputService17TestCaseOperation2Output, error) {
+ req, out := c.InputService17TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService17TestShapeInputService17TestCaseOperation1Output struct {
+ metadataInputService17TestShapeInputService17TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService17TestShapeInputService17TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService17TestShapeInputService17TestCaseOperation2Output struct {
+ metadataInputService17TestShapeInputService17TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService17TestShapeInputService17TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService17TestShapeInputShape struct {
+ Foo *string `location:"querystring" locationName:"param-name" type:"string"`
+
+ metadataInputService17TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService17TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService18ProtocolTest is a client for InputService18ProtocolTest.
+type InputService18ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService18ProtocolTest client.
+func NewInputService18ProtocolTest(config *aws.Config) *InputService18ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice18protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService18ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService18ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService18ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService18TestCaseOperation1 = "OperationName"
+
+// InputService18TestCaseOperation1Request generates a request for the InputService18TestCaseOperation1 operation.
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation1Request(input *InputService18TestShapeInputShape) (req *aws.Request, output *InputService18TestShapeInputService18TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService18TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService18TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService18TestShapeInputService18TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation1(input *InputService18TestShapeInputShape) (*InputService18TestShapeInputService18TestCaseOperation1Output, error) {
+ req, out := c.InputService18TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService18TestCaseOperation2 = "OperationName"
+
+// InputService18TestCaseOperation2Request generates a request for the InputService18TestCaseOperation2 operation.
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation2Request(input *InputService18TestShapeInputShape) (req *aws.Request, output *InputService18TestShapeInputService18TestCaseOperation2Output) {
+ op := &aws.Operation{
+ Name: opInputService18TestCaseOperation2,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService18TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService18TestShapeInputService18TestCaseOperation2Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation2(input *InputService18TestShapeInputShape) (*InputService18TestShapeInputService18TestCaseOperation2Output, error) {
+ req, out := c.InputService18TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService18TestCaseOperation3 = "OperationName"
+
+// InputService18TestCaseOperation3Request generates a request for the InputService18TestCaseOperation3 operation.
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation3Request(input *InputService18TestShapeInputShape) (req *aws.Request, output *InputService18TestShapeInputService18TestCaseOperation3Output) {
+ op := &aws.Operation{
+ Name: opInputService18TestCaseOperation3,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService18TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService18TestShapeInputService18TestCaseOperation3Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation3(input *InputService18TestShapeInputShape) (*InputService18TestShapeInputService18TestCaseOperation3Output, error) {
+ req, out := c.InputService18TestCaseOperation3Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService18TestCaseOperation4 = "OperationName"
+
+// InputService18TestCaseOperation4Request generates a request for the InputService18TestCaseOperation4 operation.
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation4Request(input *InputService18TestShapeInputShape) (req *aws.Request, output *InputService18TestShapeInputService18TestCaseOperation4Output) {
+ op := &aws.Operation{
+ Name: opInputService18TestCaseOperation4,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService18TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService18TestShapeInputService18TestCaseOperation4Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation4(input *InputService18TestShapeInputShape) (*InputService18TestShapeInputService18TestCaseOperation4Output, error) {
+ req, out := c.InputService18TestCaseOperation4Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService18TestCaseOperation5 = "OperationName"
+
+// InputService18TestCaseOperation5Request generates a request for the InputService18TestCaseOperation5 operation.
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation5Request(input *InputService18TestShapeInputShape) (req *aws.Request, output *InputService18TestShapeInputService18TestCaseOperation5Output) {
+ op := &aws.Operation{
+ Name: opInputService18TestCaseOperation5,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService18TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService18TestShapeInputService18TestCaseOperation5Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation5(input *InputService18TestShapeInputShape) (*InputService18TestShapeInputService18TestCaseOperation5Output, error) {
+ req, out := c.InputService18TestCaseOperation5Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opInputService18TestCaseOperation6 = "OperationName"
+
+// InputService18TestCaseOperation6Request generates a request for the InputService18TestCaseOperation6 operation.
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation6Request(input *InputService18TestShapeInputShape) (req *aws.Request, output *InputService18TestShapeInputService18TestCaseOperation6Output) {
+ op := &aws.Operation{
+ Name: opInputService18TestCaseOperation6,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService18TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService18TestShapeInputService18TestCaseOperation6Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService18ProtocolTest) InputService18TestCaseOperation6(input *InputService18TestShapeInputShape) (*InputService18TestShapeInputService18TestCaseOperation6Output, error) {
+ req, out := c.InputService18TestCaseOperation6Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService18TestShapeInputService18TestCaseOperation1Output struct {
+ metadataInputService18TestShapeInputService18TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeInputService18TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService18TestShapeInputService18TestCaseOperation2Output struct {
+ metadataInputService18TestShapeInputService18TestCaseOperation2Output `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeInputService18TestCaseOperation2Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService18TestShapeInputService18TestCaseOperation3Output struct {
+ metadataInputService18TestShapeInputService18TestCaseOperation3Output `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeInputService18TestCaseOperation3Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService18TestShapeInputService18TestCaseOperation4Output struct {
+ metadataInputService18TestShapeInputService18TestCaseOperation4Output `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeInputService18TestCaseOperation4Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService18TestShapeInputService18TestCaseOperation5Output struct {
+ metadataInputService18TestShapeInputService18TestCaseOperation5Output `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeInputService18TestCaseOperation5Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService18TestShapeInputService18TestCaseOperation6Output struct {
+ metadataInputService18TestShapeInputService18TestCaseOperation6Output `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeInputService18TestCaseOperation6Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService18TestShapeInputShape struct {
+ RecursiveStruct *InputService18TestShapeRecursiveStructType `type:"structure"`
+
+ metadataInputService18TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeInputShape struct {
+ SDKShapeTraits bool `locationName:"OperationRequest" type:"structure" xmlURI:"https://foo/"`
+}
+
+type InputService18TestShapeRecursiveStructType struct {
+ NoRecurse *string `type:"string"`
+
+ RecursiveList []*InputService18TestShapeRecursiveStructType `type:"list"`
+
+ RecursiveMap map[string]*InputService18TestShapeRecursiveStructType `type:"map"`
+
+ RecursiveStruct *InputService18TestShapeRecursiveStructType `type:"structure"`
+
+ metadataInputService18TestShapeRecursiveStructType `json:"-" xml:"-"`
+}
+
+type metadataInputService18TestShapeRecursiveStructType struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// InputService19ProtocolTest is a client for InputService19ProtocolTest.
+type InputService19ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new InputService19ProtocolTest client.
+func NewInputService19ProtocolTest(config *aws.Config) *InputService19ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "inputservice19protocoltest",
+ APIVersion: "2014-01-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &InputService19ProtocolTest{service}
+}
+
+// newRequest creates a new request for a InputService19ProtocolTest operation and runs any
+// custom request initialization.
+func (c *InputService19ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opInputService19TestCaseOperation1 = "OperationName"
+
+// InputService19TestCaseOperation1Request generates a request for the InputService19TestCaseOperation1 operation.
+func (c *InputService19ProtocolTest) InputService19TestCaseOperation1Request(input *InputService19TestShapeInputShape) (req *aws.Request, output *InputService19TestShapeInputService19TestCaseOperation1Output) {
+ op := &aws.Operation{
+ Name: opInputService19TestCaseOperation1,
+ HTTPMethod: "POST",
+ HTTPPath: "/path",
+ }
+
+ if input == nil {
+ input = &InputService19TestShapeInputShape{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &InputService19TestShapeInputService19TestCaseOperation1Output{}
+ req.Data = output
+ return
+}
+
+func (c *InputService19ProtocolTest) InputService19TestCaseOperation1(input *InputService19TestShapeInputShape) (*InputService19TestShapeInputService19TestCaseOperation1Output, error) {
+ req, out := c.InputService19TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type InputService19TestShapeInputService19TestCaseOperation1Output struct {
+ metadataInputService19TestShapeInputService19TestCaseOperation1Output `json:"-" xml:"-"`
+}
+
+type metadataInputService19TestShapeInputService19TestCaseOperation1Output struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type InputService19TestShapeInputShape struct {
+ TimeArgInHeader *time.Time `location:"header" locationName:"x-amz-timearg" type:"timestamp" timestampFormat:"rfc822"`
+
+ metadataInputService19TestShapeInputShape `json:"-" xml:"-"`
+}
+
+type metadataInputService19TestShapeInputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+//
+// Tests begin here
+//
+
+func TestInputService1ProtocolTestBasicXMLSerializationCase1(t *testing.T) {
+ svc := NewInputService1ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService1TestShapeInputShape{
+ Description: aws.String("bar"),
+ Name: aws.String("foo"),
+ }
+ req, _ := svc.InputService1TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`barfoo`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService1ProtocolTestBasicXMLSerializationCase2(t *testing.T) {
+ svc := NewInputService1ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService1TestShapeInputShape{
+ Description: aws.String("bar"),
+ Name: aws.String("foo"),
+ }
+ req, _ := svc.InputService1TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`barfoo`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService2ProtocolTestSerializeOtherScalarTypesCase1(t *testing.T) {
+ svc := NewInputService2ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService2TestShapeInputShape{
+ First: aws.Boolean(true),
+ Fourth: aws.Long(3),
+ Second: aws.Boolean(false),
+ Third: aws.Double(1.2),
+ }
+ req, _ := svc.InputService2TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`true3false1.2`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService3ProtocolTestNestedStructuresCase1(t *testing.T) {
+ svc := NewInputService3ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService3TestShapeInputShape{
+ Description: aws.String("baz"),
+ SubStructure: &InputService3TestShapeSubStructure{
+ Bar: aws.String("b"),
+ Foo: aws.String("a"),
+ },
+ }
+ req, _ := svc.InputService3TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`bazba`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService4ProtocolTestNestedStructuresCase1(t *testing.T) {
+ svc := NewInputService4ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService4TestShapeInputShape{
+ Description: aws.String("baz"),
+ SubStructure: &InputService4TestShapeSubStructure{},
+ }
+ req, _ := svc.InputService4TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`baz`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService5ProtocolTestNonFlattenedListsCase1(t *testing.T) {
+ svc := NewInputService5ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService5TestShapeInputShape{
+ ListParam: []*string{
+ aws.String("one"),
+ aws.String("two"),
+ aws.String("three"),
+ },
+ }
+ req, _ := svc.InputService5TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`onetwothree`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService6ProtocolTestNonFlattenedListsWithLocationNameCase1(t *testing.T) {
+ svc := NewInputService6ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService6TestShapeInputShape{
+ ListParam: []*string{
+ aws.String("one"),
+ aws.String("two"),
+ aws.String("three"),
+ },
+ }
+ req, _ := svc.InputService6TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`onetwothree`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService7ProtocolTestFlattenedListsCase1(t *testing.T) {
+ svc := NewInputService7ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService7TestShapeInputShape{
+ ListParam: []*string{
+ aws.String("one"),
+ aws.String("two"),
+ aws.String("three"),
+ },
+ }
+ req, _ := svc.InputService7TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`onetwothree`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService8ProtocolTestFlattenedListsWithLocationNameCase1(t *testing.T) {
+ svc := NewInputService8ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService8TestShapeInputShape{
+ ListParam: []*string{
+ aws.String("one"),
+ aws.String("two"),
+ aws.String("three"),
+ },
+ }
+ req, _ := svc.InputService8TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`- one
- two
- three
`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService9ProtocolTestListOfStructuresCase1(t *testing.T) {
+ svc := NewInputService9ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService9TestShapeInputShape{
+ ListParam: []*InputService9TestShapeSingleFieldStruct{
+ {
+ Element: aws.String("one"),
+ },
+ {
+ Element: aws.String("two"),
+ },
+ {
+ Element: aws.String("three"),
+ },
+ },
+ }
+ req, _ := svc.InputService9TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`- one
- two
- three
`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService10ProtocolTestBlobAndTimestampShapesCase1(t *testing.T) {
+ svc := NewInputService10ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService10TestShapeInputShape{
+ StructureParam: &InputService10TestShapeStructureShape{
+ B: []byte("foo"),
+ T: aws.Time(time.Unix(1422172800, 0)),
+ },
+ }
+ req, _ := svc.InputService10TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`Zm9v2015-01-25T08:00:00Z`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/2014-01-01/hostedzone", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService11ProtocolTestHeaderMapsCase1(t *testing.T) {
+ svc := NewInputService11ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService11TestShapeInputShape{
+ Foo: map[string]*string{
+ "a": aws.String("b"),
+ "c": aws.String("d"),
+ },
+ }
+ req, _ := svc.InputService11TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+ assert.Equal(t, "b", r.Header.Get("x-foo-a"))
+ assert.Equal(t, "d", r.Header.Get("x-foo-c"))
+
+}
+
+func TestInputService12ProtocolTestStringPayloadCase1(t *testing.T) {
+ svc := NewInputService12ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService12TestShapeInputShape{
+ Foo: aws.String("bar"),
+ }
+ req, _ := svc.InputService12TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`bar`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService13ProtocolTestBlobPayloadCase1(t *testing.T) {
+ svc := NewInputService13ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService13TestShapeInputShape{
+ Foo: []byte("bar"),
+ }
+ req, _ := svc.InputService13TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`bar`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService13ProtocolTestBlobPayloadCase2(t *testing.T) {
+ svc := NewInputService13ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService13TestShapeInputShape{}
+ req, _ := svc.InputService13TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService14ProtocolTestStructurePayloadCase1(t *testing.T) {
+ svc := NewInputService14ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService14TestShapeInputShape{
+ Foo: &InputService14TestShapeFooShape{
+ Baz: aws.String("bar"),
+ },
+ }
+ req, _ := svc.InputService14TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`bar`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService14ProtocolTestStructurePayloadCase2(t *testing.T) {
+ svc := NewInputService14ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService14TestShapeInputShape{
+ Foo: &InputService14TestShapeFooShape{},
+ }
+ req, _ := svc.InputService14TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(``), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService14ProtocolTestStructurePayloadCase3(t *testing.T) {
+ svc := NewInputService14ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService14TestShapeInputShape{}
+ req, _ := svc.InputService14TestCaseOperation3Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService15ProtocolTestXMLAttributeCase1(t *testing.T) {
+ svc := NewInputService15ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService15TestShapeInputShape{
+ Grant: &InputService15TestShapeGrant{
+ Grantee: &InputService15TestShapeGrantee{
+ EmailAddress: aws.String("foo@example.com"),
+ Type: aws.String("CanonicalUser"),
+ },
+ },
+ }
+ req, _ := svc.InputService15TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`foo@example.com`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService16ProtocolTestGreedyKeysCase1(t *testing.T) {
+ svc := NewInputService16ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService16TestShapeInputShape{
+ Bucket: aws.String("my/bucket"),
+ Key: aws.String("testing /123"),
+ }
+ req, _ := svc.InputService16TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert URL
+ assert.Equal(t, "https://test/my%2Fbucket/testing%20/123", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService17ProtocolTestOmitsNullQueryParamsButSerializesEmptyStringsCase1(t *testing.T) {
+ svc := NewInputService17ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService17TestShapeInputShape{}
+ req, _ := svc.InputService17TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService17ProtocolTestOmitsNullQueryParamsButSerializesEmptyStringsCase2(t *testing.T) {
+ svc := NewInputService17ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService17TestShapeInputShape{
+ Foo: aws.String(""),
+ }
+ req, _ := svc.InputService17TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert URL
+ assert.Equal(t, "https://test/path?abc=mno¶m-name=", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService18ProtocolTestRecursiveShapesCase1(t *testing.T) {
+ svc := NewInputService18ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService18TestShapeInputShape{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ NoRecurse: aws.String("foo"),
+ },
+ }
+ req, _ := svc.InputService18TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`foo`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService18ProtocolTestRecursiveShapesCase2(t *testing.T) {
+ svc := NewInputService18ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService18TestShapeInputShape{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ NoRecurse: aws.String("foo"),
+ },
+ },
+ }
+ req, _ := svc.InputService18TestCaseOperation2Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`foo`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService18ProtocolTestRecursiveShapesCase3(t *testing.T) {
+ svc := NewInputService18ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService18TestShapeInputShape{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ NoRecurse: aws.String("foo"),
+ },
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService18TestCaseOperation3Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`foo`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService18ProtocolTestRecursiveShapesCase4(t *testing.T) {
+ svc := NewInputService18ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService18TestShapeInputShape{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ RecursiveList: []*InputService18TestShapeRecursiveStructType{
+ {
+ NoRecurse: aws.String("foo"),
+ },
+ {
+ NoRecurse: aws.String("bar"),
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService18TestCaseOperation4Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`foobar`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService18ProtocolTestRecursiveShapesCase5(t *testing.T) {
+ svc := NewInputService18ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService18TestShapeInputShape{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ RecursiveList: []*InputService18TestShapeRecursiveStructType{
+ {
+ NoRecurse: aws.String("foo"),
+ },
+ {
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ NoRecurse: aws.String("bar"),
+ },
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService18TestCaseOperation5Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`foobar`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService18ProtocolTestRecursiveShapesCase6(t *testing.T) {
+ svc := NewInputService18ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService18TestShapeInputShape{
+ RecursiveStruct: &InputService18TestShapeRecursiveStructType{
+ RecursiveMap: map[string]*InputService18TestShapeRecursiveStructType{
+ "bar": {
+ NoRecurse: aws.String("bar"),
+ },
+ "foo": {
+ NoRecurse: aws.String("foo"),
+ },
+ },
+ },
+ }
+ req, _ := svc.InputService18TestCaseOperation6Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert body
+ assert.NotNil(t, r.Body)
+ body := util.SortXML(r.Body)
+ assert.Equal(t, util.Trim(`barbarfoofoo`), util.Trim(string(body)))
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+
+}
+
+func TestInputService19ProtocolTestTimestampInHeaderCase1(t *testing.T) {
+ svc := NewInputService19ProtocolTest(nil)
+ svc.Endpoint = "https://test"
+
+ input := &InputService19TestShapeInputShape{
+ TimeArgInHeader: aws.Time(time.Unix(1422172800, 0)),
+ }
+ req, _ := svc.InputService19TestCaseOperation1Request(input)
+ r := req.HTTPRequest
+
+ // build request
+ restxml.Build(req)
+ assert.NoError(t, req.Error)
+
+ // assert URL
+ assert.Equal(t, "https://test/path", r.URL.String())
+
+ // assert headers
+ assert.Equal(t, "Sun, 25 Jan 2015 08:00:00 GMT", r.Header.Get("x-amz-timearg"))
+
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/restxml.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/restxml.go
new file mode 100644
index 0000000..d6cbff4
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/restxml.go
@@ -0,0 +1,55 @@
+// Package restxml provides RESTful XML serialisation of AWS
+// requests and responses.
+package restxml
+
+//go:generate go run ../../fixtures/protocol/generate.go ../../fixtures/protocol/input/rest-xml.json build_test.go
+//go:generate go run ../../fixtures/protocol/generate.go ../../fixtures/protocol/output/rest-xml.json unmarshal_test.go
+
+import (
+ "bytes"
+ "encoding/xml"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/internal/protocol/query"
+ "github.com/aws/aws-sdk-go/internal/protocol/rest"
+ "github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil"
+)
+
+// Build builds a request payload for the REST XML protocol.
+func Build(r *aws.Request) {
+ rest.Build(r)
+
+ if t := rest.PayloadType(r.Params); t == "structure" || t == "" {
+ var buf bytes.Buffer
+ err := xmlutil.BuildXML(r.Params, xml.NewEncoder(&buf))
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to enode rest XML request", err)
+ return
+ }
+ r.SetBufferBody(buf.Bytes())
+ }
+}
+
+// Unmarshal unmarshals a payload response for the REST XML protocol.
+func Unmarshal(r *aws.Request) {
+ if t := rest.PayloadType(r.Data); t == "structure" || t == "" {
+ defer r.HTTPResponse.Body.Close()
+ decoder := xml.NewDecoder(r.HTTPResponse.Body)
+ err := xmlutil.UnmarshalXML(r.Data, decoder, "")
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed to decode REST XML response", err)
+ return
+ }
+ }
+}
+
+// UnmarshalMeta unmarshals response headers for the REST XML protocol.
+func UnmarshalMeta(r *aws.Request) {
+ rest.Unmarshal(r)
+}
+
+// UnmarshalError unmarshals a response error for the REST XML protocol.
+func UnmarshalError(r *aws.Request) {
+ query.UnmarshalError(r)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/unmarshal_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/unmarshal_test.go
new file mode 100644
index 0000000..7efb93d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/restxml/unmarshal_test.go
@@ -0,0 +1,1322 @@
+package restxml_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "encoding/xml"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/protocol/restxml"
+ "github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil"
+ "github.com/aws/aws-sdk-go/internal/signer/v4"
+ "github.com/aws/aws-sdk-go/internal/util"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ bytes.Buffer // always import bytes
+var _ http.Request
+var _ json.Marshaler
+var _ time.Time
+var _ xmlutil.XMLNode
+var _ xml.Attr
+var _ = ioutil.Discard
+var _ = util.Trim("")
+var _ = url.Values{}
+var _ = io.EOF
+
+// OutputService1ProtocolTest is a client for OutputService1ProtocolTest.
+type OutputService1ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService1ProtocolTest client.
+func NewOutputService1ProtocolTest(config *aws.Config) *OutputService1ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice1protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService1ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService1ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService1ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService1TestCaseOperation1 = "OperationName"
+
+// OutputService1TestCaseOperation1Request generates a request for the OutputService1TestCaseOperation1 operation.
+func (c *OutputService1ProtocolTest) OutputService1TestCaseOperation1Request(input *OutputService1TestShapeOutputService1TestCaseOperation1Input) (req *aws.Request, output *OutputService1TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService1TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService1TestShapeOutputService1TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService1TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService1ProtocolTest) OutputService1TestCaseOperation1(input *OutputService1TestShapeOutputService1TestCaseOperation1Input) (*OutputService1TestShapeOutputShape, error) {
+ req, out := c.OutputService1TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+const opOutputService1TestCaseOperation2 = "OperationName"
+
+// OutputService1TestCaseOperation2Request generates a request for the OutputService1TestCaseOperation2 operation.
+func (c *OutputService1ProtocolTest) OutputService1TestCaseOperation2Request(input *OutputService1TestShapeOutputService1TestCaseOperation2Input) (req *aws.Request, output *OutputService1TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService1TestCaseOperation2,
+ }
+
+ if input == nil {
+ input = &OutputService1TestShapeOutputService1TestCaseOperation2Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService1TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService1ProtocolTest) OutputService1TestCaseOperation2(input *OutputService1TestShapeOutputService1TestCaseOperation2Input) (*OutputService1TestShapeOutputShape, error) {
+ req, out := c.OutputService1TestCaseOperation2Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService1TestShapeOutputService1TestCaseOperation1Input struct {
+ metadataOutputService1TestShapeOutputService1TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService1TestShapeOutputService1TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService1TestShapeOutputService1TestCaseOperation2Input struct {
+ metadataOutputService1TestShapeOutputService1TestCaseOperation2Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService1TestShapeOutputService1TestCaseOperation2Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService1TestShapeOutputShape struct {
+ Char *string `type:"character"`
+
+ Double *float64 `type:"double"`
+
+ FalseBool *bool `type:"boolean"`
+
+ Float *float64 `type:"float"`
+
+ ImaHeader *string `location:"header" type:"string"`
+
+ ImaHeaderLocation *string `location:"header" locationName:"X-Foo" type:"string"`
+
+ Long *int64 `type:"long"`
+
+ Num *int64 `locationName:"FooNum" type:"integer"`
+
+ Str *string `type:"string"`
+
+ Timestamp *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ TrueBool *bool `type:"boolean"`
+
+ metadataOutputService1TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService1TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService2ProtocolTest is a client for OutputService2ProtocolTest.
+type OutputService2ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService2ProtocolTest client.
+func NewOutputService2ProtocolTest(config *aws.Config) *OutputService2ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice2protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService2ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService2ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService2ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService2TestCaseOperation1 = "OperationName"
+
+// OutputService2TestCaseOperation1Request generates a request for the OutputService2TestCaseOperation1 operation.
+func (c *OutputService2ProtocolTest) OutputService2TestCaseOperation1Request(input *OutputService2TestShapeOutputService2TestCaseOperation1Input) (req *aws.Request, output *OutputService2TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService2TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService2TestShapeOutputService2TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService2TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService2ProtocolTest) OutputService2TestCaseOperation1(input *OutputService2TestShapeOutputService2TestCaseOperation1Input) (*OutputService2TestShapeOutputShape, error) {
+ req, out := c.OutputService2TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService2TestShapeOutputService2TestCaseOperation1Input struct {
+ metadataOutputService2TestShapeOutputService2TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService2TestShapeOutputService2TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService2TestShapeOutputShape struct {
+ Blob []byte `type:"blob"`
+
+ metadataOutputService2TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService2TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService3ProtocolTest is a client for OutputService3ProtocolTest.
+type OutputService3ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService3ProtocolTest client.
+func NewOutputService3ProtocolTest(config *aws.Config) *OutputService3ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice3protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService3ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService3ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService3ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService3TestCaseOperation1 = "OperationName"
+
+// OutputService3TestCaseOperation1Request generates a request for the OutputService3TestCaseOperation1 operation.
+func (c *OutputService3ProtocolTest) OutputService3TestCaseOperation1Request(input *OutputService3TestShapeOutputService3TestCaseOperation1Input) (req *aws.Request, output *OutputService3TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService3TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService3TestShapeOutputService3TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService3TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService3ProtocolTest) OutputService3TestCaseOperation1(input *OutputService3TestShapeOutputService3TestCaseOperation1Input) (*OutputService3TestShapeOutputShape, error) {
+ req, out := c.OutputService3TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService3TestShapeOutputService3TestCaseOperation1Input struct {
+ metadataOutputService3TestShapeOutputService3TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService3TestShapeOutputService3TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService3TestShapeOutputShape struct {
+ ListMember []*string `type:"list"`
+
+ metadataOutputService3TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService3TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService4ProtocolTest is a client for OutputService4ProtocolTest.
+type OutputService4ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService4ProtocolTest client.
+func NewOutputService4ProtocolTest(config *aws.Config) *OutputService4ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice4protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService4ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService4ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService4ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService4TestCaseOperation1 = "OperationName"
+
+// OutputService4TestCaseOperation1Request generates a request for the OutputService4TestCaseOperation1 operation.
+func (c *OutputService4ProtocolTest) OutputService4TestCaseOperation1Request(input *OutputService4TestShapeOutputService4TestCaseOperation1Input) (req *aws.Request, output *OutputService4TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService4TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService4TestShapeOutputService4TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService4TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService4ProtocolTest) OutputService4TestCaseOperation1(input *OutputService4TestShapeOutputService4TestCaseOperation1Input) (*OutputService4TestShapeOutputShape, error) {
+ req, out := c.OutputService4TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService4TestShapeOutputService4TestCaseOperation1Input struct {
+ metadataOutputService4TestShapeOutputService4TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService4TestShapeOutputService4TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService4TestShapeOutputShape struct {
+ ListMember []*string `locationNameList:"item" type:"list"`
+
+ metadataOutputService4TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService4TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService5ProtocolTest is a client for OutputService5ProtocolTest.
+type OutputService5ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService5ProtocolTest client.
+func NewOutputService5ProtocolTest(config *aws.Config) *OutputService5ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice5protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService5ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService5ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService5ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService5TestCaseOperation1 = "OperationName"
+
+// OutputService5TestCaseOperation1Request generates a request for the OutputService5TestCaseOperation1 operation.
+func (c *OutputService5ProtocolTest) OutputService5TestCaseOperation1Request(input *OutputService5TestShapeOutputService5TestCaseOperation1Input) (req *aws.Request, output *OutputService5TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService5TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService5TestShapeOutputService5TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService5TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService5ProtocolTest) OutputService5TestCaseOperation1(input *OutputService5TestShapeOutputService5TestCaseOperation1Input) (*OutputService5TestShapeOutputShape, error) {
+ req, out := c.OutputService5TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService5TestShapeOutputService5TestCaseOperation1Input struct {
+ metadataOutputService5TestShapeOutputService5TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService5TestShapeOutputService5TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService5TestShapeOutputShape struct {
+ ListMember []*string `type:"list" flattened:"true"`
+
+ metadataOutputService5TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService5TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService6ProtocolTest is a client for OutputService6ProtocolTest.
+type OutputService6ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService6ProtocolTest client.
+func NewOutputService6ProtocolTest(config *aws.Config) *OutputService6ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice6protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService6ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService6ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService6ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService6TestCaseOperation1 = "OperationName"
+
+// OutputService6TestCaseOperation1Request generates a request for the OutputService6TestCaseOperation1 operation.
+func (c *OutputService6ProtocolTest) OutputService6TestCaseOperation1Request(input *OutputService6TestShapeOutputService6TestCaseOperation1Input) (req *aws.Request, output *OutputService6TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService6TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService6TestShapeOutputService6TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService6TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService6ProtocolTest) OutputService6TestCaseOperation1(input *OutputService6TestShapeOutputService6TestCaseOperation1Input) (*OutputService6TestShapeOutputShape, error) {
+ req, out := c.OutputService6TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService6TestShapeOutputService6TestCaseOperation1Input struct {
+ metadataOutputService6TestShapeOutputService6TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService6TestShapeOutputService6TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService6TestShapeOutputShape struct {
+ Map map[string]*OutputService6TestShapeSingleStructure `type:"map"`
+
+ metadataOutputService6TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService6TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService6TestShapeSingleStructure struct {
+ Foo *string `locationName:"foo" type:"string"`
+
+ metadataOutputService6TestShapeSingleStructure `json:"-" xml:"-"`
+}
+
+type metadataOutputService6TestShapeSingleStructure struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService7ProtocolTest is a client for OutputService7ProtocolTest.
+type OutputService7ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService7ProtocolTest client.
+func NewOutputService7ProtocolTest(config *aws.Config) *OutputService7ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice7protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService7ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService7ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService7ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService7TestCaseOperation1 = "OperationName"
+
+// OutputService7TestCaseOperation1Request generates a request for the OutputService7TestCaseOperation1 operation.
+func (c *OutputService7ProtocolTest) OutputService7TestCaseOperation1Request(input *OutputService7TestShapeOutputService7TestCaseOperation1Input) (req *aws.Request, output *OutputService7TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService7TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService7TestShapeOutputService7TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService7TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService7ProtocolTest) OutputService7TestCaseOperation1(input *OutputService7TestShapeOutputService7TestCaseOperation1Input) (*OutputService7TestShapeOutputShape, error) {
+ req, out := c.OutputService7TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService7TestShapeOutputService7TestCaseOperation1Input struct {
+ metadataOutputService7TestShapeOutputService7TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService7TestShapeOutputService7TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService7TestShapeOutputShape struct {
+ Map map[string]*string `type:"map" flattened:"true"`
+
+ metadataOutputService7TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService7TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService8ProtocolTest is a client for OutputService8ProtocolTest.
+type OutputService8ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService8ProtocolTest client.
+func NewOutputService8ProtocolTest(config *aws.Config) *OutputService8ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice8protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService8ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService8ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService8ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService8TestCaseOperation1 = "OperationName"
+
+// OutputService8TestCaseOperation1Request generates a request for the OutputService8TestCaseOperation1 operation.
+func (c *OutputService8ProtocolTest) OutputService8TestCaseOperation1Request(input *OutputService8TestShapeOutputService8TestCaseOperation1Input) (req *aws.Request, output *OutputService8TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService8TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService8TestShapeOutputService8TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService8TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService8ProtocolTest) OutputService8TestCaseOperation1(input *OutputService8TestShapeOutputService8TestCaseOperation1Input) (*OutputService8TestShapeOutputShape, error) {
+ req, out := c.OutputService8TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService8TestShapeOutputService8TestCaseOperation1Input struct {
+ metadataOutputService8TestShapeOutputService8TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService8TestShapeOutputService8TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService8TestShapeOutputShape struct {
+ Map map[string]*string `locationNameKey:"foo" locationNameValue:"bar" type:"map"`
+
+ metadataOutputService8TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService8TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService9ProtocolTest is a client for OutputService9ProtocolTest.
+type OutputService9ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService9ProtocolTest client.
+func NewOutputService9ProtocolTest(config *aws.Config) *OutputService9ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice9protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService9ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService9ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService9ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService9TestCaseOperation1 = "OperationName"
+
+// OutputService9TestCaseOperation1Request generates a request for the OutputService9TestCaseOperation1 operation.
+func (c *OutputService9ProtocolTest) OutputService9TestCaseOperation1Request(input *OutputService9TestShapeOutputService9TestCaseOperation1Input) (req *aws.Request, output *OutputService9TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService9TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService9TestShapeOutputService9TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService9TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService9ProtocolTest) OutputService9TestCaseOperation1(input *OutputService9TestShapeOutputService9TestCaseOperation1Input) (*OutputService9TestShapeOutputShape, error) {
+ req, out := c.OutputService9TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService9TestShapeOutputService9TestCaseOperation1Input struct {
+ metadataOutputService9TestShapeOutputService9TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService9TestShapeOutputService9TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService9TestShapeOutputShape struct {
+ Data *OutputService9TestShapeSingleStructure `type:"structure"`
+
+ Header *string `location:"header" locationName:"X-Foo" type:"string"`
+
+ metadataOutputService9TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService9TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure" payload:"Data"`
+}
+
+type OutputService9TestShapeSingleStructure struct {
+ Foo *string `type:"string"`
+
+ metadataOutputService9TestShapeSingleStructure `json:"-" xml:"-"`
+}
+
+type metadataOutputService9TestShapeSingleStructure struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService10ProtocolTest is a client for OutputService10ProtocolTest.
+type OutputService10ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService10ProtocolTest client.
+func NewOutputService10ProtocolTest(config *aws.Config) *OutputService10ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice10protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService10ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService10ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService10ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService10TestCaseOperation1 = "OperationName"
+
+// OutputService10TestCaseOperation1Request generates a request for the OutputService10TestCaseOperation1 operation.
+func (c *OutputService10ProtocolTest) OutputService10TestCaseOperation1Request(input *OutputService10TestShapeOutputService10TestCaseOperation1Input) (req *aws.Request, output *OutputService10TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService10TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService10TestShapeOutputService10TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService10TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService10ProtocolTest) OutputService10TestCaseOperation1(input *OutputService10TestShapeOutputService10TestCaseOperation1Input) (*OutputService10TestShapeOutputShape, error) {
+ req, out := c.OutputService10TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService10TestShapeOutputService10TestCaseOperation1Input struct {
+ metadataOutputService10TestShapeOutputService10TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService10TestShapeOutputService10TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService10TestShapeOutputShape struct {
+ Stream []byte `type:"blob"`
+
+ metadataOutputService10TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService10TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure" payload:"Stream"`
+}
+
+// OutputService11ProtocolTest is a client for OutputService11ProtocolTest.
+type OutputService11ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService11ProtocolTest client.
+func NewOutputService11ProtocolTest(config *aws.Config) *OutputService11ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice11protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService11ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService11ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService11ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService11TestCaseOperation1 = "OperationName"
+
+// OutputService11TestCaseOperation1Request generates a request for the OutputService11TestCaseOperation1 operation.
+func (c *OutputService11ProtocolTest) OutputService11TestCaseOperation1Request(input *OutputService11TestShapeOutputService11TestCaseOperation1Input) (req *aws.Request, output *OutputService11TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService11TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService11TestShapeOutputService11TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService11TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService11ProtocolTest) OutputService11TestCaseOperation1(input *OutputService11TestShapeOutputService11TestCaseOperation1Input) (*OutputService11TestShapeOutputShape, error) {
+ req, out := c.OutputService11TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService11TestShapeOutputService11TestCaseOperation1Input struct {
+ metadataOutputService11TestShapeOutputService11TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService11TestShapeOutputService11TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService11TestShapeOutputShape struct {
+ Char *string `location:"header" locationName:"x-char" type:"character"`
+
+ Double *float64 `location:"header" locationName:"x-double" type:"double"`
+
+ FalseBool *bool `location:"header" locationName:"x-false-bool" type:"boolean"`
+
+ Float *float64 `location:"header" locationName:"x-float" type:"float"`
+
+ Integer *int64 `location:"header" locationName:"x-int" type:"integer"`
+
+ Long *int64 `location:"header" locationName:"x-long" type:"long"`
+
+ Str *string `location:"header" locationName:"x-str" type:"string"`
+
+ Timestamp *time.Time `location:"header" locationName:"x-timestamp" type:"timestamp" timestampFormat:"iso8601"`
+
+ TrueBool *bool `location:"header" locationName:"x-true-bool" type:"boolean"`
+
+ metadataOutputService11TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService11TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// OutputService12ProtocolTest is a client for OutputService12ProtocolTest.
+type OutputService12ProtocolTest struct {
+ *aws.Service
+}
+
+// New returns a new OutputService12ProtocolTest client.
+func NewOutputService12ProtocolTest(config *aws.Config) *OutputService12ProtocolTest {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "outputservice12protocoltest",
+ APIVersion: "",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ return &OutputService12ProtocolTest{service}
+}
+
+// newRequest creates a new request for a OutputService12ProtocolTest operation and runs any
+// custom request initialization.
+func (c *OutputService12ProtocolTest) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ return req
+}
+
+const opOutputService12TestCaseOperation1 = "OperationName"
+
+// OutputService12TestCaseOperation1Request generates a request for the OutputService12TestCaseOperation1 operation.
+func (c *OutputService12ProtocolTest) OutputService12TestCaseOperation1Request(input *OutputService12TestShapeOutputService12TestCaseOperation1Input) (req *aws.Request, output *OutputService12TestShapeOutputShape) {
+ op := &aws.Operation{
+ Name: opOutputService12TestCaseOperation1,
+ }
+
+ if input == nil {
+ input = &OutputService12TestShapeOutputService12TestCaseOperation1Input{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &OutputService12TestShapeOutputShape{}
+ req.Data = output
+ return
+}
+
+func (c *OutputService12ProtocolTest) OutputService12TestCaseOperation1(input *OutputService12TestShapeOutputService12TestCaseOperation1Input) (*OutputService12TestShapeOutputShape, error) {
+ req, out := c.OutputService12TestCaseOperation1Request(input)
+ err := req.Send()
+ return out, err
+}
+
+type OutputService12TestShapeOutputService12TestCaseOperation1Input struct {
+ metadataOutputService12TestShapeOutputService12TestCaseOperation1Input `json:"-" xml:"-"`
+}
+
+type metadataOutputService12TestShapeOutputService12TestCaseOperation1Input struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type OutputService12TestShapeOutputShape struct {
+ String *string `type:"string"`
+
+ metadataOutputService12TestShapeOutputShape `json:"-" xml:"-"`
+}
+
+type metadataOutputService12TestShapeOutputShape struct {
+ SDKShapeTraits bool `type:"structure" payload:"String"`
+}
+
+//
+// Tests begin here
+//
+
+func TestOutputService1ProtocolTestScalarMembersCase1(t *testing.T) {
+ svc := NewOutputService1ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("myname123falsetrue1.21.3200a2015-01-25T08:00:00Z"))
+ req, out := svc.OutputService1TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+ req.HTTPResponse.Header.Set("ImaHeader", "test")
+ req.HTTPResponse.Header.Set("X-Foo", "abc")
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "a", *out.Char)
+ assert.Equal(t, 1.3, *out.Double)
+ assert.Equal(t, false, *out.FalseBool)
+ assert.Equal(t, 1.2, *out.Float)
+ assert.Equal(t, "test", *out.ImaHeader)
+ assert.Equal(t, "abc", *out.ImaHeaderLocation)
+ assert.Equal(t, int64(200), *out.Long)
+ assert.Equal(t, int64(123), *out.Num)
+ assert.Equal(t, "myname", *out.Str)
+ assert.Equal(t, time.Unix(1.4221728e+09, 0).UTC().String(), out.Timestamp.String())
+ assert.Equal(t, true, *out.TrueBool)
+
+}
+
+func TestOutputService1ProtocolTestScalarMembersCase2(t *testing.T) {
+ svc := NewOutputService1ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("123falsetrue1.21.3200a2015-01-25T08:00:00Z"))
+ req, out := svc.OutputService1TestCaseOperation2Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+ req.HTTPResponse.Header.Set("ImaHeader", "test")
+ req.HTTPResponse.Header.Set("X-Foo", "abc")
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "a", *out.Char)
+ assert.Equal(t, 1.3, *out.Double)
+ assert.Equal(t, false, *out.FalseBool)
+ assert.Equal(t, 1.2, *out.Float)
+ assert.Equal(t, "test", *out.ImaHeader)
+ assert.Equal(t, "abc", *out.ImaHeaderLocation)
+ assert.Equal(t, int64(200), *out.Long)
+ assert.Equal(t, int64(123), *out.Num)
+ assert.Equal(t, "", *out.Str)
+ assert.Equal(t, time.Unix(1.4221728e+09, 0).UTC().String(), out.Timestamp.String())
+ assert.Equal(t, true, *out.TrueBool)
+
+}
+
+func TestOutputService2ProtocolTestBlobCase1(t *testing.T) {
+ svc := NewOutputService2ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("dmFsdWU="))
+ req, out := svc.OutputService2TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "value", string(out.Blob))
+
+}
+
+func TestOutputService3ProtocolTestListsCase1(t *testing.T) {
+ svc := NewOutputService3ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abc123"))
+ req, out := svc.OutputService3TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.ListMember[0])
+ assert.Equal(t, "123", *out.ListMember[1])
+
+}
+
+func TestOutputService4ProtocolTestListWithCustomMemberNameCase1(t *testing.T) {
+ svc := NewOutputService4ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("- abc
- 123
"))
+ req, out := svc.OutputService4TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.ListMember[0])
+ assert.Equal(t, "123", *out.ListMember[1])
+
+}
+
+func TestOutputService5ProtocolTestFlattenedListCase1(t *testing.T) {
+ svc := NewOutputService5ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abc123"))
+ req, out := svc.OutputService5TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.ListMember[0])
+ assert.Equal(t, "123", *out.ListMember[1])
+
+}
+
+func TestOutputService6ProtocolTestNormalMapCase1(t *testing.T) {
+ svc := NewOutputService6ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte(""))
+ req, out := svc.OutputService6TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "bam", *out.Map["baz"].Foo)
+ assert.Equal(t, "bar", *out.Map["qux"].Foo)
+
+}
+
+func TestOutputService7ProtocolTestFlattenedMapCase1(t *testing.T) {
+ svc := NewOutputService7ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte(""))
+ req, out := svc.OutputService7TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "bam", *out.Map["baz"])
+ assert.Equal(t, "bar", *out.Map["qux"])
+
+}
+
+func TestOutputService8ProtocolTestNamedMapCase1(t *testing.T) {
+ svc := NewOutputService8ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte(""))
+ req, out := svc.OutputService8TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "bam", *out.Map["baz"])
+ assert.Equal(t, "bar", *out.Map["qux"])
+
+}
+
+func TestOutputService9ProtocolTestXMLPayloadCase1(t *testing.T) {
+ svc := NewOutputService9ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abc"))
+ req, out := svc.OutputService9TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+ req.HTTPResponse.Header.Set("X-Foo", "baz")
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", *out.Data.Foo)
+ assert.Equal(t, "baz", *out.Header)
+
+}
+
+func TestOutputService10ProtocolTestStreamingPayloadCase1(t *testing.T) {
+ svc := NewOutputService10ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("abc"))
+ req, out := svc.OutputService10TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "abc", string(out.Stream))
+
+}
+
+func TestOutputService11ProtocolTestScalarMembersInHeadersCase1(t *testing.T) {
+ svc := NewOutputService11ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte(""))
+ req, out := svc.OutputService11TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+ req.HTTPResponse.Header.Set("x-char", "a")
+ req.HTTPResponse.Header.Set("x-double", "1.5")
+ req.HTTPResponse.Header.Set("x-false-bool", "false")
+ req.HTTPResponse.Header.Set("x-float", "1.5")
+ req.HTTPResponse.Header.Set("x-int", "1")
+ req.HTTPResponse.Header.Set("x-long", "100")
+ req.HTTPResponse.Header.Set("x-str", "string")
+ req.HTTPResponse.Header.Set("x-timestamp", "Sun, 25 Jan 2015 08:00:00 GMT")
+ req.HTTPResponse.Header.Set("x-true-bool", "true")
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "a", *out.Char)
+ assert.Equal(t, 1.5, *out.Double)
+ assert.Equal(t, false, *out.FalseBool)
+ assert.Equal(t, 1.5, *out.Float)
+ assert.Equal(t, int64(1), *out.Integer)
+ assert.Equal(t, int64(100), *out.Long)
+ assert.Equal(t, "string", *out.Str)
+ assert.Equal(t, time.Unix(1.4221728e+09, 0).UTC().String(), out.Timestamp.String())
+ assert.Equal(t, true, *out.TrueBool)
+
+}
+
+func TestOutputService12ProtocolTestStringCase1(t *testing.T) {
+ svc := NewOutputService12ProtocolTest(nil)
+
+ buf := bytes.NewReader([]byte("operation result string"))
+ req, out := svc.OutputService12TestCaseOperation1Request(nil)
+ req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
+
+ // set headers
+
+ // unmarshal response
+ restxml.UnmarshalMeta(req)
+ restxml.Unmarshal(req)
+ assert.NoError(t, req.Error)
+
+ // assert response
+ assert.NotNil(t, out) // ensure out variable is used
+ assert.Equal(t, "operation result string", *out.String)
+
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/build.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/build.go
new file mode 100644
index 0000000..d3db250
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/build.go
@@ -0,0 +1,287 @@
+// Package xmlutil provides XML serialisation of AWS requests and responses.
+package xmlutil
+
+import (
+ "encoding/base64"
+ "encoding/xml"
+ "fmt"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// BuildXML will serialize params into an xml.Encoder.
+// Error will be returned if the serialization of any of the params or nested values fails.
+func BuildXML(params interface{}, e *xml.Encoder) error {
+ b := xmlBuilder{encoder: e, namespaces: map[string]string{}}
+ root := NewXMLElement(xml.Name{})
+ if err := b.buildValue(reflect.ValueOf(params), root, ""); err != nil {
+ return err
+ }
+ for _, c := range root.Children {
+ for _, v := range c {
+ return StructToXML(e, v, false)
+ }
+ }
+ return nil
+}
+
+// Returns the reflection element of a value, if it is a pointer.
+func elemOf(value reflect.Value) reflect.Value {
+ for value.Kind() == reflect.Ptr {
+ value = value.Elem()
+ }
+ return value
+}
+
+// A xmlBuilder serializes values from Go code to XML
+type xmlBuilder struct {
+ encoder *xml.Encoder
+ namespaces map[string]string
+}
+
+// buildValue generic XMLNode builder for any type. Will build value for their specific type
+// struct, list, map, scalar.
+//
+// Also takes a "type" tag value to set what type a value should be converted to XMLNode as. If
+// type is not provided reflect will be used to determine the value's type.
+func (b *xmlBuilder) buildValue(value reflect.Value, current *XMLNode, tag reflect.StructTag) error {
+ value = elemOf(value)
+ if !value.IsValid() { // no need to handle zero values
+ return nil
+ } else if tag.Get("location") != "" { // don't handle non-body location values
+ return nil
+ }
+
+ t := tag.Get("type")
+ if t == "" {
+ switch value.Kind() {
+ case reflect.Struct:
+ t = "structure"
+ case reflect.Slice:
+ t = "list"
+ case reflect.Map:
+ t = "map"
+ }
+ }
+
+ switch t {
+ case "structure":
+ if field, ok := value.Type().FieldByName("SDKShapeTraits"); ok {
+ tag = tag + reflect.StructTag(" ") + field.Tag
+ }
+ return b.buildStruct(value, current, tag)
+ case "list":
+ return b.buildList(value, current, tag)
+ case "map":
+ return b.buildMap(value, current, tag)
+ default:
+ return b.buildScalar(value, current, tag)
+ }
+}
+
+// buildStruct adds a struct and its fields to the current XMLNode. All fields any any nested
+// types are converted to XMLNodes also.
+func (b *xmlBuilder) buildStruct(value reflect.Value, current *XMLNode, tag reflect.StructTag) error {
+ if !value.IsValid() {
+ return nil
+ }
+
+ fieldAdded := false
+
+ // unwrap payloads
+ if payload := tag.Get("payload"); payload != "" {
+ field, _ := value.Type().FieldByName(payload)
+ tag = field.Tag
+ value = elemOf(value.FieldByName(payload))
+
+ if !value.IsValid() {
+ return nil
+ }
+ }
+
+ child := NewXMLElement(xml.Name{Local: tag.Get("locationName")})
+
+ // there is an xmlNamespace associated with this struct
+ if prefix, uri := tag.Get("xmlPrefix"), tag.Get("xmlURI"); uri != "" {
+ ns := xml.Attr{
+ Name: xml.Name{Local: "xmlns"},
+ Value: uri,
+ }
+ if prefix != "" {
+ b.namespaces[prefix] = uri // register the namespace
+ ns.Name.Local = "xmlns:" + prefix
+ }
+
+ child.Attr = append(child.Attr, ns)
+ }
+
+ t := value.Type()
+ for i := 0; i < value.NumField(); i++ {
+ if c := t.Field(i).Name[0:1]; strings.ToLower(c) == c {
+ continue // ignore unexported fields
+ }
+
+ member := elemOf(value.Field(i))
+ field := t.Field(i)
+ mTag := field.Tag
+
+ if mTag.Get("location") != "" { // skip non-body members
+ continue
+ }
+
+ memberName := mTag.Get("locationName")
+ if memberName == "" {
+ memberName = field.Name
+ mTag = reflect.StructTag(string(mTag) + ` locationName:"` + memberName + `"`)
+ }
+ if err := b.buildValue(member, child, mTag); err != nil {
+ return err
+ }
+
+ fieldAdded = true
+ }
+
+ if fieldAdded { // only append this child if we have one ore more valid members
+ current.AddChild(child)
+ }
+
+ return nil
+}
+
+// buildList adds the value's list items to the current XMLNode as children nodes. All
+// nested values in the list are converted to XMLNodes also.
+func (b *xmlBuilder) buildList(value reflect.Value, current *XMLNode, tag reflect.StructTag) error {
+ if value.IsNil() { // don't build omitted lists
+ return nil
+ }
+
+ // check for unflattened list member
+ flattened := tag.Get("flattened") != ""
+
+ xname := xml.Name{Local: tag.Get("locationName")}
+ if flattened {
+ for i := 0; i < value.Len(); i++ {
+ child := NewXMLElement(xname)
+ current.AddChild(child)
+ if err := b.buildValue(value.Index(i), child, ""); err != nil {
+ return err
+ }
+ }
+ } else {
+ list := NewXMLElement(xname)
+ current.AddChild(list)
+
+ for i := 0; i < value.Len(); i++ {
+ iname := tag.Get("locationNameList")
+ if iname == "" {
+ iname = "member"
+ }
+
+ child := NewXMLElement(xml.Name{Local: iname})
+ list.AddChild(child)
+ if err := b.buildValue(value.Index(i), child, ""); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+// buildMap adds the value's key/value pairs to the current XMLNode as children nodes. All
+// nested values in the map are converted to XMLNodes also.
+//
+// Error will be returned if it is unable to build the map's values into XMLNodes
+func (b *xmlBuilder) buildMap(value reflect.Value, current *XMLNode, tag reflect.StructTag) error {
+ if value.IsNil() { // don't build omitted maps
+ return nil
+ }
+
+ maproot := NewXMLElement(xml.Name{Local: tag.Get("locationName")})
+ current.AddChild(maproot)
+ current = maproot
+
+ kname, vname := "key", "value"
+ if n := tag.Get("locationNameKey"); n != "" {
+ kname = n
+ }
+ if n := tag.Get("locationNameValue"); n != "" {
+ vname = n
+ }
+
+ // sorting is not required for compliance, but it makes testing easier
+ keys := make([]string, value.Len())
+ for i, k := range value.MapKeys() {
+ keys[i] = k.String()
+ }
+ sort.Strings(keys)
+
+ for _, k := range keys {
+ v := value.MapIndex(reflect.ValueOf(k))
+
+ mapcur := current
+ if tag.Get("flattened") == "" { // add "entry" tag to non-flat maps
+ child := NewXMLElement(xml.Name{Local: "entry"})
+ mapcur.AddChild(child)
+ mapcur = child
+ }
+
+ kchild := NewXMLElement(xml.Name{Local: kname})
+ kchild.Text = k
+ vchild := NewXMLElement(xml.Name{Local: vname})
+ mapcur.AddChild(kchild)
+ mapcur.AddChild(vchild)
+
+ if err := b.buildValue(v, vchild, ""); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// buildScalar will convert the value into a string and append it as a attribute or child
+// of the current XMLNode.
+//
+// The value will be added as an attribute if tag contains a "xmlAttribute" attribute value.
+//
+// Error will be returned if the value type is unsupported.
+func (b *xmlBuilder) buildScalar(value reflect.Value, current *XMLNode, tag reflect.StructTag) error {
+ var str string
+ switch converted := value.Interface().(type) {
+ case string:
+ str = converted
+ case []byte:
+ if !value.IsNil() {
+ str = base64.StdEncoding.EncodeToString(converted)
+ }
+ case bool:
+ str = strconv.FormatBool(converted)
+ case int64:
+ str = strconv.FormatInt(converted, 10)
+ case int:
+ str = strconv.Itoa(converted)
+ case float64:
+ str = strconv.FormatFloat(converted, 'f', -1, 64)
+ case float32:
+ str = strconv.FormatFloat(float64(converted), 'f', -1, 32)
+ case time.Time:
+ const ISO8601UTC = "2006-01-02T15:04:05Z"
+ str = converted.UTC().Format(ISO8601UTC)
+ default:
+ return fmt.Errorf("unsupported value for param %s: %v (%s)",
+ tag.Get("locationName"), value.Interface(), value.Type().Name())
+ }
+
+ xname := xml.Name{Local: tag.Get("locationName")}
+ if tag.Get("xmlAttribute") != "" { // put into current node's attribute list
+ attr := xml.Attr{Name: xname, Value: str}
+ current.Attr = append(current.Attr, attr)
+ } else { // regular text node
+ current.AddChild(&XMLNode{Name: xname, Text: str})
+ }
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/unmarshal.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/unmarshal.go
new file mode 100644
index 0000000..5e4fe21
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/unmarshal.go
@@ -0,0 +1,260 @@
+package xmlutil
+
+import (
+ "encoding/base64"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// UnmarshalXML deserializes an xml.Decoder into the container v. V
+// needs to match the shape of the XML expected to be decoded.
+// If the shape doesn't match unmarshaling will fail.
+func UnmarshalXML(v interface{}, d *xml.Decoder, wrapper string) error {
+ n, _ := XMLToStruct(d, nil)
+ if n.Children != nil {
+ for _, root := range n.Children {
+ for _, c := range root {
+ if wrappedChild, ok := c.Children[wrapper]; ok {
+ c = wrappedChild[0] // pull out wrapped element
+ }
+
+ err := parse(reflect.ValueOf(v), c, "")
+ if err != nil {
+ if err == io.EOF {
+ return nil
+ }
+ return err
+ }
+ }
+ }
+ return nil
+ }
+ return nil
+}
+
+// parse deserializes any value from the XMLNode. The type tag is used to infer the type, or reflect
+// will be used to determine the type from r.
+func parse(r reflect.Value, node *XMLNode, tag reflect.StructTag) error {
+ rtype := r.Type()
+ if rtype.Kind() == reflect.Ptr {
+ rtype = rtype.Elem() // check kind of actual element type
+ }
+
+ t := tag.Get("type")
+ if t == "" {
+ switch rtype.Kind() {
+ case reflect.Struct:
+ t = "structure"
+ case reflect.Slice:
+ t = "list"
+ case reflect.Map:
+ t = "map"
+ }
+ }
+
+ switch t {
+ case "structure":
+ if field, ok := rtype.FieldByName("SDKShapeTraits"); ok {
+ tag = field.Tag
+ }
+ return parseStruct(r, node, tag)
+ case "list":
+ return parseList(r, node, tag)
+ case "map":
+ return parseMap(r, node, tag)
+ default:
+ return parseScalar(r, node, tag)
+ }
+}
+
+// parseStruct deserializes a structure and its fields from an XMLNode. Any nested
+// types in the structure will also be deserialized.
+func parseStruct(r reflect.Value, node *XMLNode, tag reflect.StructTag) error {
+ t := r.Type()
+ if r.Kind() == reflect.Ptr {
+ if r.IsNil() { // create the structure if it's nil
+ s := reflect.New(r.Type().Elem())
+ r.Set(s)
+ r = s
+ }
+
+ r = r.Elem()
+ t = t.Elem()
+ }
+
+ // unwrap any payloads
+ if payload := tag.Get("payload"); payload != "" {
+ field, _ := t.FieldByName(payload)
+ return parseStruct(r.FieldByName(payload), node, field.Tag)
+ }
+
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+ if c := field.Name[0:1]; strings.ToLower(c) == c {
+ continue // ignore unexported fields
+ }
+
+ // figure out what this field is called
+ name := field.Name
+ if field.Tag.Get("flattened") != "" && field.Tag.Get("locationNameList") != "" {
+ name = field.Tag.Get("locationNameList")
+ } else if locName := field.Tag.Get("locationName"); locName != "" {
+ name = locName
+ }
+
+ // try to find the field by name in elements
+ elems := node.Children[name]
+
+ if elems == nil { // try to find the field in attributes
+ for _, a := range node.Attr {
+ if name == a.Name.Local {
+ // turn this into a text node for de-serializing
+ elems = []*XMLNode{{Text: a.Value}}
+ }
+ }
+ }
+
+ member := r.FieldByName(field.Name)
+ for _, elem := range elems {
+ err := parse(member, elem, field.Tag)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+// parseList deserializes a list of values from an XML node. Each list entry
+// will also be deserialized.
+func parseList(r reflect.Value, node *XMLNode, tag reflect.StructTag) error {
+ t := r.Type()
+
+ if tag.Get("flattened") == "" { // look at all item entries
+ mname := "member"
+ if name := tag.Get("locationNameList"); name != "" {
+ mname = name
+ }
+
+ if Children, ok := node.Children[mname]; ok {
+ if r.IsNil() {
+ r.Set(reflect.MakeSlice(t, len(Children), len(Children)))
+ }
+
+ for i, c := range Children {
+ err := parse(r.Index(i), c, "")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ } else { // flattened list means this is a single element
+ if r.IsNil() {
+ r.Set(reflect.MakeSlice(t, 0, 0))
+ }
+
+ childR := reflect.Zero(t.Elem())
+ r.Set(reflect.Append(r, childR))
+ err := parse(r.Index(r.Len()-1), node, "")
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// parseMap deserializes a map from an XMLNode. The direct children of the XMLNode
+// will also be deserialized as map entries.
+func parseMap(r reflect.Value, node *XMLNode, tag reflect.StructTag) error {
+ if r.IsNil() {
+ r.Set(reflect.MakeMap(r.Type()))
+ }
+
+ if tag.Get("flattened") == "" { // look at all child entries
+ for _, entry := range node.Children["entry"] {
+ parseMapEntry(r, entry, tag)
+ }
+ } else { // this element is itself an entry
+ parseMapEntry(r, node, tag)
+ }
+
+ return nil
+}
+
+// parseMapEntry deserializes a map entry from a XML node.
+func parseMapEntry(r reflect.Value, node *XMLNode, tag reflect.StructTag) error {
+ kname, vname := "key", "value"
+ if n := tag.Get("locationNameKey"); n != "" {
+ kname = n
+ }
+ if n := tag.Get("locationNameValue"); n != "" {
+ vname = n
+ }
+
+ keys, ok := node.Children[kname]
+ values := node.Children[vname]
+ if ok {
+ for i, key := range keys {
+ keyR := reflect.ValueOf(key.Text)
+ value := values[i]
+ valueR := reflect.New(r.Type().Elem()).Elem()
+
+ parse(valueR, value, "")
+ r.SetMapIndex(keyR, valueR)
+ }
+ }
+ return nil
+}
+
+// parseScaller deserializes an XMLNode value into a concrete type based on the
+// interface type of r.
+//
+// Error is returned if the deserialization fails due to invalid type conversion,
+// or unsupported interface type.
+func parseScalar(r reflect.Value, node *XMLNode, tag reflect.StructTag) error {
+ switch r.Interface().(type) {
+ case *string:
+ r.Set(reflect.ValueOf(&node.Text))
+ return nil
+ case []byte:
+ b, err := base64.StdEncoding.DecodeString(node.Text)
+ if err != nil {
+ return err
+ }
+ r.Set(reflect.ValueOf(b))
+ case *bool:
+ v, err := strconv.ParseBool(node.Text)
+ if err != nil {
+ return err
+ }
+ r.Set(reflect.ValueOf(&v))
+ case *int64:
+ v, err := strconv.ParseInt(node.Text, 10, 64)
+ if err != nil {
+ return err
+ }
+ r.Set(reflect.ValueOf(&v))
+ case *float64:
+ v, err := strconv.ParseFloat(node.Text, 64)
+ if err != nil {
+ return err
+ }
+ r.Set(reflect.ValueOf(&v))
+ case *time.Time:
+ const ISO8601UTC = "2006-01-02T15:04:05Z"
+ t, err := time.Parse(ISO8601UTC, node.Text)
+ if err != nil {
+ return err
+ }
+ r.Set(reflect.ValueOf(&t))
+ default:
+ return fmt.Errorf("unsupported value: %v (%s)", r.Interface(), r.Type())
+ }
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/xml_to_struct.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/xml_to_struct.go
new file mode 100644
index 0000000..72c198a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/protocol/xml/xmlutil/xml_to_struct.go
@@ -0,0 +1,105 @@
+package xmlutil
+
+import (
+ "encoding/xml"
+ "io"
+ "sort"
+)
+
+// A XMLNode contains the values to be encoded or decoded.
+type XMLNode struct {
+ Name xml.Name `json:",omitempty"`
+ Children map[string][]*XMLNode `json:",omitempty"`
+ Text string `json:",omitempty"`
+ Attr []xml.Attr `json:",omitempty"`
+}
+
+// NewXMLElement returns a pointer to a new XMLNode initialized to default values.
+func NewXMLElement(name xml.Name) *XMLNode {
+ return &XMLNode{
+ Name: name,
+ Children: map[string][]*XMLNode{},
+ Attr: []xml.Attr{},
+ }
+}
+
+// AddChild adds child to the XMLNode.
+func (n *XMLNode) AddChild(child *XMLNode) {
+ if _, ok := n.Children[child.Name.Local]; !ok {
+ n.Children[child.Name.Local] = []*XMLNode{}
+ }
+ n.Children[child.Name.Local] = append(n.Children[child.Name.Local], child)
+}
+
+// XMLToStruct converts a xml.Decoder stream to XMLNode with nested values.
+func XMLToStruct(d *xml.Decoder, s *xml.StartElement) (*XMLNode, error) {
+ out := &XMLNode{}
+ for {
+ tok, err := d.Token()
+ if tok == nil || err == io.EOF {
+ break
+ }
+ if err != nil {
+ return out, err
+ }
+
+ switch typed := tok.(type) {
+ case xml.CharData:
+ out.Text = string(typed.Copy())
+ case xml.StartElement:
+ el := typed.Copy()
+ out.Attr = el.Attr
+ if out.Children == nil {
+ out.Children = map[string][]*XMLNode{}
+ }
+
+ name := typed.Name.Local
+ slice := out.Children[name]
+ if slice == nil {
+ slice = []*XMLNode{}
+ }
+ node, e := XMLToStruct(d, &el)
+ if e != nil {
+ return out, e
+ }
+ node.Name = typed.Name
+ slice = append(slice, node)
+ out.Children[name] = slice
+ case xml.EndElement:
+ if s != nil && s.Name.Local == typed.Name.Local { // matching end token
+ return out, nil
+ }
+ }
+ }
+ return out, nil
+}
+
+// StructToXML writes an XMLNode to a xml.Encoder as tokens.
+func StructToXML(e *xml.Encoder, node *XMLNode, sorted bool) error {
+ e.EncodeToken(xml.StartElement{Name: node.Name, Attr: node.Attr})
+
+ if node.Text != "" {
+ e.EncodeToken(xml.CharData([]byte(node.Text)))
+ } else if sorted {
+ sortedNames := []string{}
+ for k := range node.Children {
+ sortedNames = append(sortedNames, k)
+ }
+ sort.Strings(sortedNames)
+
+ for _, k := range sortedNames {
+ for _, v := range node.Children[k] {
+ StructToXML(e, v, sorted)
+ }
+ }
+ } else {
+ for _, c := range node.Children {
+ for _, v := range c {
+ StructToXML(e, v, sorted)
+ }
+ }
+ }
+
+ e.EncodeToken(xml.EndElement{Name: node.Name})
+ return e.Flush()
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/functional_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/functional_test.go
new file mode 100644
index 0000000..fbb0e41
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/functional_test.go
@@ -0,0 +1,43 @@
+package v4_test
+
+import (
+ "net/url"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+
+func TestPresignHandler(t *testing.T) {
+ svc := s3.New(nil)
+ req, _ := svc.PutObjectRequest(&s3.PutObjectInput{
+ Bucket: aws.String("bucket"),
+ Key: aws.String("key"),
+ ContentDisposition: aws.String("a+b c$d"),
+ ACL: aws.String("public-read"),
+ })
+ req.Time = time.Unix(0, 0)
+ urlstr, err := req.Presign(5 * time.Minute)
+
+ assert.NoError(t, err)
+
+ expectedDate := "19700101T000000Z"
+ expectedHeaders := "host;x-amz-acl"
+ expectedSig := "7edcb4e3a1bf12f4989018d75acbe3a7f03df24bd6f3112602d59fc551f0e4e2"
+ expectedCred := "AKID/19700101/mock-region/s3/aws4_request"
+
+ u, _ := url.Parse(urlstr)
+ urlQ := u.Query()
+ assert.Equal(t, expectedSig, urlQ.Get("X-Amz-Signature"))
+ assert.Equal(t, expectedCred, urlQ.Get("X-Amz-Credential"))
+ assert.Equal(t, expectedHeaders, urlQ.Get("X-Amz-SignedHeaders"))
+ assert.Equal(t, expectedDate, urlQ.Get("X-Amz-Date"))
+ assert.Equal(t, "300", urlQ.Get("X-Amz-Expires"))
+
+ assert.NotContains(t, urlstr, "+") // + encoded as %20
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/v4.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/v4.go
new file mode 100644
index 0000000..6fef0d6
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/v4.go
@@ -0,0 +1,360 @@
+// Package v4 implements signing for AWS V4 signer
+package v4
+
+import (
+ "crypto/hmac"
+ "crypto/sha256"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/aws/aws-sdk-go/internal/protocol/rest"
+
+ "github.com/aws/aws-sdk-go/aws"
+)
+
+const (
+ authHeaderPrefix = "AWS4-HMAC-SHA256"
+ timeFormat = "20060102T150405Z"
+ shortTimeFormat = "20060102"
+)
+
+var ignoredHeaders = map[string]bool{
+ "Authorization": true,
+ "Content-Type": true,
+ "Content-Length": true,
+ "User-Agent": true,
+}
+
+type signer struct {
+ Request *http.Request
+ Time time.Time
+ ExpireTime time.Duration
+ ServiceName string
+ Region string
+ CredValues credentials.Value
+ Credentials *credentials.Credentials
+ Query url.Values
+ Body io.ReadSeeker
+ Debug uint
+ Logger io.Writer
+
+ isPresign bool
+ formattedTime string
+ formattedShortTime string
+
+ signedHeaders string
+ canonicalHeaders string
+ canonicalString string
+ credentialString string
+ stringToSign string
+ signature string
+ authorization string
+}
+
+// Sign requests with signature version 4.
+//
+// Will sign the requests with the service config's Credentials object
+// Signing is skipped if the credentials is the credentials.AnonymousCredentials
+// object.
+func Sign(req *aws.Request) {
+ // If the request does not need to be signed ignore the signing of the
+ // request if the AnonymousCredentials object is used.
+ if req.Service.Config.Credentials == credentials.AnonymousCredentials {
+ return
+ }
+
+ region := req.Service.SigningRegion
+ if region == "" {
+ region = req.Service.Config.Region
+ }
+
+ name := req.Service.SigningName
+ if name == "" {
+ name = req.Service.ServiceName
+ }
+
+ s := signer{
+ Request: req.HTTPRequest,
+ Time: req.Time,
+ ExpireTime: req.ExpireTime,
+ Query: req.HTTPRequest.URL.Query(),
+ Body: req.Body,
+ ServiceName: name,
+ Region: region,
+ Credentials: req.Service.Config.Credentials,
+ Debug: req.Service.Config.LogLevel,
+ Logger: req.Service.Config.Logger,
+ }
+
+ req.Error = s.sign()
+}
+
+func (v4 *signer) sign() error {
+ if v4.ExpireTime != 0 {
+ v4.isPresign = true
+ }
+
+ if v4.isRequestSigned() {
+ if !v4.Credentials.IsExpired() {
+ // If the request is already signed, and the credentials have not
+ // expired yet ignore the signing request.
+ return nil
+ }
+
+ // The credentials have expired for this request. The current signing
+ // is invalid, and needs to be request because the request will fail.
+ if v4.isPresign {
+ v4.removePresign()
+ // Update the request's query string to ensure the values stays in
+ // sync in the case retrieving the new credentials fails.
+ v4.Request.URL.RawQuery = v4.Query.Encode()
+ }
+ }
+
+ var err error
+ v4.CredValues, err = v4.Credentials.Get()
+ if err != nil {
+ return err
+ }
+
+ if v4.isPresign {
+ v4.Query.Set("X-Amz-Algorithm", authHeaderPrefix)
+ if v4.CredValues.SessionToken != "" {
+ v4.Query.Set("X-Amz-Security-Token", v4.CredValues.SessionToken)
+ } else {
+ v4.Query.Del("X-Amz-Security-Token")
+ }
+ } else if v4.CredValues.SessionToken != "" {
+ v4.Request.Header.Set("X-Amz-Security-Token", v4.CredValues.SessionToken)
+ }
+
+ v4.build()
+
+ if v4.Debug > 0 {
+ v4.logSigningInfo()
+ }
+
+ return nil
+}
+
+func (v4 *signer) logSigningInfo() {
+ out := v4.Logger
+ fmt.Fprintf(out, "---[ CANONICAL STRING ]-----------------------------\n")
+ fmt.Fprintln(out, v4.canonicalString)
+ fmt.Fprintf(out, "---[ STRING TO SIGN ]--------------------------------\n")
+ fmt.Fprintln(out, v4.stringToSign)
+ if v4.isPresign {
+ fmt.Fprintf(out, "---[ SIGNED URL ]--------------------------------\n")
+ fmt.Fprintln(out, v4.Request.URL)
+ }
+ fmt.Fprintf(out, "-----------------------------------------------------\n")
+}
+
+func (v4 *signer) build() {
+
+ v4.buildTime() // no depends
+ v4.buildCredentialString() // no depends
+ if v4.isPresign {
+ v4.buildQuery() // no depends
+ }
+ v4.buildCanonicalHeaders() // depends on cred string
+ v4.buildCanonicalString() // depends on canon headers / signed headers
+ v4.buildStringToSign() // depends on canon string
+ v4.buildSignature() // depends on string to sign
+
+ if v4.isPresign {
+ v4.Request.URL.RawQuery += "&X-Amz-Signature=" + v4.signature
+ } else {
+ parts := []string{
+ authHeaderPrefix + " Credential=" + v4.CredValues.AccessKeyID + "/" + v4.credentialString,
+ "SignedHeaders=" + v4.signedHeaders,
+ "Signature=" + v4.signature,
+ }
+ v4.Request.Header.Set("Authorization", strings.Join(parts, ", "))
+ }
+}
+
+func (v4 *signer) buildTime() {
+ v4.formattedTime = v4.Time.UTC().Format(timeFormat)
+ v4.formattedShortTime = v4.Time.UTC().Format(shortTimeFormat)
+
+ if v4.isPresign {
+ duration := int64(v4.ExpireTime / time.Second)
+ v4.Query.Set("X-Amz-Date", v4.formattedTime)
+ v4.Query.Set("X-Amz-Expires", strconv.FormatInt(duration, 10))
+ } else {
+ v4.Request.Header.Set("X-Amz-Date", v4.formattedTime)
+ }
+}
+
+func (v4 *signer) buildCredentialString() {
+ v4.credentialString = strings.Join([]string{
+ v4.formattedShortTime,
+ v4.Region,
+ v4.ServiceName,
+ "aws4_request",
+ }, "/")
+
+ if v4.isPresign {
+ v4.Query.Set("X-Amz-Credential", v4.CredValues.AccessKeyID+"/"+v4.credentialString)
+ }
+}
+
+func (v4 *signer) buildQuery() {
+ for k, h := range v4.Request.Header {
+ if strings.HasPrefix(http.CanonicalHeaderKey(k), "X-Amz-") {
+ continue // never hoist x-amz-* headers, they must be signed
+ }
+ if _, ok := ignoredHeaders[http.CanonicalHeaderKey(k)]; ok {
+ continue // never hoist ignored headers
+ }
+
+ v4.Request.Header.Del(k)
+ v4.Query.Del(k)
+ for _, v := range h {
+ v4.Query.Add(k, v)
+ }
+ }
+}
+
+func (v4 *signer) buildCanonicalHeaders() {
+ var headers []string
+ headers = append(headers, "host")
+ for k := range v4.Request.Header {
+ if _, ok := ignoredHeaders[http.CanonicalHeaderKey(k)]; ok {
+ continue // ignored header
+ }
+ headers = append(headers, strings.ToLower(k))
+ }
+ sort.Strings(headers)
+
+ v4.signedHeaders = strings.Join(headers, ";")
+
+ if v4.isPresign {
+ v4.Query.Set("X-Amz-SignedHeaders", v4.signedHeaders)
+ }
+
+ headerValues := make([]string, len(headers))
+ for i, k := range headers {
+ if k == "host" {
+ headerValues[i] = "host:" + v4.Request.URL.Host
+ } else {
+ headerValues[i] = k + ":" +
+ strings.Join(v4.Request.Header[http.CanonicalHeaderKey(k)], ",")
+ }
+ }
+
+ v4.canonicalHeaders = strings.Join(headerValues, "\n")
+}
+
+func (v4 *signer) buildCanonicalString() {
+ v4.Request.URL.RawQuery = strings.Replace(v4.Query.Encode(), "+", "%20", -1)
+ uri := v4.Request.URL.Opaque
+ if uri != "" {
+ uri = "/" + strings.Join(strings.Split(uri, "/")[3:], "/")
+ } else {
+ uri = v4.Request.URL.Path
+ }
+ if uri == "" {
+ uri = "/"
+ }
+
+ if v4.ServiceName != "s3" {
+ uri = rest.EscapePath(uri, false)
+ }
+
+ v4.canonicalString = strings.Join([]string{
+ v4.Request.Method,
+ uri,
+ v4.Request.URL.RawQuery,
+ v4.canonicalHeaders + "\n",
+ v4.signedHeaders,
+ v4.bodyDigest(),
+ }, "\n")
+}
+
+func (v4 *signer) buildStringToSign() {
+ v4.stringToSign = strings.Join([]string{
+ authHeaderPrefix,
+ v4.formattedTime,
+ v4.credentialString,
+ hex.EncodeToString(makeSha256([]byte(v4.canonicalString))),
+ }, "\n")
+}
+
+func (v4 *signer) buildSignature() {
+ secret := v4.CredValues.SecretAccessKey
+ date := makeHmac([]byte("AWS4"+secret), []byte(v4.formattedShortTime))
+ region := makeHmac(date, []byte(v4.Region))
+ service := makeHmac(region, []byte(v4.ServiceName))
+ credentials := makeHmac(service, []byte("aws4_request"))
+ signature := makeHmac(credentials, []byte(v4.stringToSign))
+ v4.signature = hex.EncodeToString(signature)
+}
+
+func (v4 *signer) bodyDigest() string {
+ hash := v4.Request.Header.Get("X-Amz-Content-Sha256")
+ if hash == "" {
+ if v4.isPresign && v4.ServiceName == "s3" {
+ hash = "UNSIGNED-PAYLOAD"
+ } else if v4.Body == nil {
+ hash = hex.EncodeToString(makeSha256([]byte{}))
+ } else {
+ hash = hex.EncodeToString(makeSha256Reader(v4.Body))
+ }
+ v4.Request.Header.Add("X-Amz-Content-Sha256", hash)
+ }
+ return hash
+}
+
+// isRequestSigned returns if the request is currently signed or presigned
+func (v4 *signer) isRequestSigned() bool {
+ if v4.isPresign && v4.Query.Get("X-Amz-Signature") != "" {
+ return true
+ }
+ if v4.Request.Header.Get("Authorization") != "" {
+ return true
+ }
+
+ return false
+}
+
+// unsign removes signing flags for both signed and presigned requests.
+func (v4 *signer) removePresign() {
+ v4.Query.Del("X-Amz-Algorithm")
+ v4.Query.Del("X-Amz-Signature")
+ v4.Query.Del("X-Amz-Security-Token")
+ v4.Query.Del("X-Amz-Date")
+ v4.Query.Del("X-Amz-Expires")
+ v4.Query.Del("X-Amz-Credential")
+ v4.Query.Del("X-Amz-SignedHeaders")
+}
+
+func makeHmac(key []byte, data []byte) []byte {
+ hash := hmac.New(sha256.New, key)
+ hash.Write(data)
+ return hash.Sum(nil)
+}
+
+func makeSha256(data []byte) []byte {
+ hash := sha256.New()
+ hash.Write(data)
+ return hash.Sum(nil)
+}
+
+func makeSha256Reader(reader io.ReadSeeker) []byte {
+ hash := sha256.New()
+ start, _ := reader.Seek(0, 1)
+ defer reader.Seek(start, 0)
+
+ io.Copy(hash, reader)
+ return hash.Sum(nil)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/v4_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/v4_test.go
new file mode 100644
index 0000000..99966f8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/internal/signer/v4/v4_test.go
@@ -0,0 +1,245 @@
+package v4
+
+import (
+ "net/http"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/stretchr/testify/assert"
+)
+
+func buildSigner(serviceName string, region string, signTime time.Time, expireTime time.Duration, body string) signer {
+ endpoint := "https://" + serviceName + "." + region + ".amazonaws.com"
+ reader := strings.NewReader(body)
+ req, _ := http.NewRequest("POST", endpoint, reader)
+ req.URL.Opaque = "//example.org/bucket/key-._~,!@#$%^&*()"
+ req.Header.Add("X-Amz-Target", "prefix.Operation")
+ req.Header.Add("Content-Type", "application/x-amz-json-1.0")
+ req.Header.Add("Content-Length", string(len(body)))
+ req.Header.Add("X-Amz-Meta-Other-Header", "some-value=!@#$%^&* (+)")
+
+ return signer{
+ Request: req,
+ Time: signTime,
+ ExpireTime: expireTime,
+ Query: req.URL.Query(),
+ Body: reader,
+ ServiceName: serviceName,
+ Region: region,
+ Credentials: credentials.NewStaticCredentials("AKID", "SECRET", "SESSION"),
+ }
+}
+
+func removeWS(text string) string {
+ text = strings.Replace(text, " ", "", -1)
+ text = strings.Replace(text, "\n", "", -1)
+ text = strings.Replace(text, "\t", "", -1)
+ return text
+}
+
+func assertEqual(t *testing.T, expected, given string) {
+ if removeWS(expected) != removeWS(given) {
+ t.Errorf("\nExpected: %s\nGiven: %s", expected, given)
+ }
+}
+
+func TestPresignRequest(t *testing.T) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Unix(0, 0), 300*time.Second, "{}")
+ signer.sign()
+
+ expectedDate := "19700101T000000Z"
+ expectedHeaders := "host;x-amz-meta-other-header;x-amz-target"
+ expectedSig := "5eeedebf6f995145ce56daa02902d10485246d3defb34f97b973c1f40ab82d36"
+ expectedCred := "AKID/19700101/us-east-1/dynamodb/aws4_request"
+
+ q := signer.Request.URL.Query()
+ assert.Equal(t, expectedSig, q.Get("X-Amz-Signature"))
+ assert.Equal(t, expectedCred, q.Get("X-Amz-Credential"))
+ assert.Equal(t, expectedHeaders, q.Get("X-Amz-SignedHeaders"))
+ assert.Equal(t, expectedDate, q.Get("X-Amz-Date"))
+}
+
+func TestSignRequest(t *testing.T) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Unix(0, 0), 0, "{}")
+ signer.sign()
+
+ expectedDate := "19700101T000000Z"
+ expectedSig := "AWS4-HMAC-SHA256 Credential=AKID/19700101/us-east-1/dynamodb/aws4_request, SignedHeaders=host;x-amz-date;x-amz-meta-other-header;x-amz-security-token;x-amz-target, Signature=69ada33fec48180dab153576e4dd80c4e04124f80dda3eccfed8a67c2b91ed5e"
+
+ q := signer.Request.Header
+ assert.Equal(t, expectedSig, q.Get("Authorization"))
+ assert.Equal(t, expectedDate, q.Get("X-Amz-Date"))
+}
+
+func TestSignEmptyBody(t *testing.T) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Now(), 0, "")
+ signer.Body = nil
+ signer.sign()
+ hash := signer.Request.Header.Get("X-Amz-Content-Sha256")
+ assert.Equal(t, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", hash)
+}
+
+func TestSignBody(t *testing.T) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Now(), 0, "hello")
+ signer.sign()
+ hash := signer.Request.Header.Get("X-Amz-Content-Sha256")
+ assert.Equal(t, "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", hash)
+}
+
+func TestSignSeekedBody(t *testing.T) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Now(), 0, " hello")
+ signer.Body.Read(make([]byte, 3)) // consume first 3 bytes so body is now "hello"
+ signer.sign()
+ hash := signer.Request.Header.Get("X-Amz-Content-Sha256")
+ assert.Equal(t, "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", hash)
+
+ start, _ := signer.Body.Seek(0, 1)
+ assert.Equal(t, int64(3), start)
+}
+
+func TestPresignEmptyBodyS3(t *testing.T) {
+ signer := buildSigner("s3", "us-east-1", time.Now(), 5*time.Minute, "hello")
+ signer.sign()
+ hash := signer.Request.Header.Get("X-Amz-Content-Sha256")
+ assert.Equal(t, "UNSIGNED-PAYLOAD", hash)
+}
+
+func TestSignPrecomputedBodyChecksum(t *testing.T) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Now(), 0, "hello")
+ signer.Request.Header.Set("X-Amz-Content-Sha256", "PRECOMPUTED")
+ signer.sign()
+ hash := signer.Request.Header.Get("X-Amz-Content-Sha256")
+ assert.Equal(t, "PRECOMPUTED", hash)
+}
+
+func TestAnonymousCredentials(t *testing.T) {
+ r := aws.NewRequest(
+ aws.NewService(&aws.Config{Credentials: credentials.AnonymousCredentials}),
+ &aws.Operation{
+ Name: "BatchGetItem",
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ },
+ nil,
+ nil,
+ )
+ Sign(r)
+
+ urlQ := r.HTTPRequest.URL.Query()
+ assert.Empty(t, urlQ.Get("X-Amz-Signature"))
+ assert.Empty(t, urlQ.Get("X-Amz-Credential"))
+ assert.Empty(t, urlQ.Get("X-Amz-SignedHeaders"))
+ assert.Empty(t, urlQ.Get("X-Amz-Date"))
+
+ hQ := r.HTTPRequest.Header
+ assert.Empty(t, hQ.Get("Authorization"))
+ assert.Empty(t, hQ.Get("X-Amz-Date"))
+}
+
+func TestIgnoreResignRequestWithValidCreds(t *testing.T) {
+ r := aws.NewRequest(
+ aws.NewService(&aws.Config{
+ Credentials: credentials.NewStaticCredentials("AKID", "SECRET", "SESSION"),
+ Region: "us-west-2",
+ }),
+ &aws.Operation{
+ Name: "BatchGetItem",
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ },
+ nil,
+ nil,
+ )
+
+ Sign(r)
+ sig := r.HTTPRequest.Header.Get("Authorization")
+
+ Sign(r)
+ assert.Equal(t, sig, r.HTTPRequest.Header.Get("Authorization"))
+}
+
+func TestIgnorePreResignRequestWithValidCreds(t *testing.T) {
+ r := aws.NewRequest(
+ aws.NewService(&aws.Config{
+ Credentials: credentials.NewStaticCredentials("AKID", "SECRET", "SESSION"),
+ Region: "us-west-2",
+ }),
+ &aws.Operation{
+ Name: "BatchGetItem",
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ },
+ nil,
+ nil,
+ )
+ r.ExpireTime = time.Minute * 10
+
+ Sign(r)
+ sig := r.HTTPRequest.Header.Get("X-Amz-Signature")
+
+ Sign(r)
+ assert.Equal(t, sig, r.HTTPRequest.Header.Get("X-Amz-Signature"))
+}
+
+func TestResignRequestExpiredCreds(t *testing.T) {
+ creds := credentials.NewStaticCredentials("AKID", "SECRET", "SESSION")
+ r := aws.NewRequest(
+ aws.NewService(&aws.Config{Credentials: creds}),
+ &aws.Operation{
+ Name: "BatchGetItem",
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ },
+ nil,
+ nil,
+ )
+ Sign(r)
+ querySig := r.HTTPRequest.Header.Get("Authorization")
+
+ creds.Expire()
+
+ Sign(r)
+ assert.NotEqual(t, querySig, r.HTTPRequest.Header.Get("Authorization"))
+}
+
+func TestPreResignRequestExpiredCreds(t *testing.T) {
+ provider := &credentials.StaticProvider{credentials.Value{"AKID", "SECRET", "SESSION"}}
+ creds := credentials.NewCredentials(provider)
+ r := aws.NewRequest(
+ aws.NewService(&aws.Config{Credentials: creds}),
+ &aws.Operation{
+ Name: "BatchGetItem",
+ HTTPMethod: "POST",
+ HTTPPath: "/",
+ },
+ nil,
+ nil,
+ )
+ r.ExpireTime = time.Minute * 10
+
+ Sign(r)
+ querySig := r.HTTPRequest.URL.Query().Get("X-Amz-Signature")
+
+ creds.Expire()
+ r.Time = time.Now().Add(time.Hour * 48)
+
+ Sign(r)
+ assert.NotEqual(t, querySig, r.HTTPRequest.URL.Query().Get("X-Amz-Signature"))
+}
+
+func BenchmarkPresignRequest(b *testing.B) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Now(), 300*time.Second, "{}")
+ for i := 0; i < b.N; i++ {
+ signer.sign()
+ }
+}
+
+func BenchmarkSignRequest(b *testing.B) {
+ signer := buildSigner("dynamodb", "us-east-1", time.Now(), 0, "{}")
+ for i := 0; i < b.N; i++ {
+ signer.sign()
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/api.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/api.go
new file mode 100644
index 0000000..e702583
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/api.go
@@ -0,0 +1,4804 @@
+// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
+
+// Package s3 provides a client for Amazon Simple Storage Service.
+package s3
+
+import (
+ "io"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+)
+
+const opAbortMultipartUpload = "AbortMultipartUpload"
+
+// AbortMultipartUploadRequest generates a request for the AbortMultipartUpload operation.
+func (c *S3) AbortMultipartUploadRequest(input *AbortMultipartUploadInput) (req *aws.Request, output *AbortMultipartUploadOutput) {
+ op := &aws.Operation{
+ Name: opAbortMultipartUpload,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &AbortMultipartUploadInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &AbortMultipartUploadOutput{}
+ req.Data = output
+ return
+}
+
+// Aborts a multipart upload.
+//
+// To verify that all parts have been removed, so you don't get charged for
+// the part storage, you should call the List Parts operation and ensure the
+// parts list is empty.
+func (c *S3) AbortMultipartUpload(input *AbortMultipartUploadInput) (*AbortMultipartUploadOutput, error) {
+ req, out := c.AbortMultipartUploadRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opCompleteMultipartUpload = "CompleteMultipartUpload"
+
+// CompleteMultipartUploadRequest generates a request for the CompleteMultipartUpload operation.
+func (c *S3) CompleteMultipartUploadRequest(input *CompleteMultipartUploadInput) (req *aws.Request, output *CompleteMultipartUploadOutput) {
+ op := &aws.Operation{
+ Name: opCompleteMultipartUpload,
+ HTTPMethod: "POST",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &CompleteMultipartUploadInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &CompleteMultipartUploadOutput{}
+ req.Data = output
+ return
+}
+
+// Completes a multipart upload by assembling previously uploaded parts.
+func (c *S3) CompleteMultipartUpload(input *CompleteMultipartUploadInput) (*CompleteMultipartUploadOutput, error) {
+ req, out := c.CompleteMultipartUploadRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opCopyObject = "CopyObject"
+
+// CopyObjectRequest generates a request for the CopyObject operation.
+func (c *S3) CopyObjectRequest(input *CopyObjectInput) (req *aws.Request, output *CopyObjectOutput) {
+ op := &aws.Operation{
+ Name: opCopyObject,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &CopyObjectInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &CopyObjectOutput{}
+ req.Data = output
+ return
+}
+
+// Creates a copy of an object that is already stored in Amazon S3.
+func (c *S3) CopyObject(input *CopyObjectInput) (*CopyObjectOutput, error) {
+ req, out := c.CopyObjectRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opCreateBucket = "CreateBucket"
+
+// CreateBucketRequest generates a request for the CreateBucket operation.
+func (c *S3) CreateBucketRequest(input *CreateBucketInput) (req *aws.Request, output *CreateBucketOutput) {
+ op := &aws.Operation{
+ Name: opCreateBucket,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}",
+ }
+
+ if input == nil {
+ input = &CreateBucketInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &CreateBucketOutput{}
+ req.Data = output
+ return
+}
+
+// Creates a new bucket.
+func (c *S3) CreateBucket(input *CreateBucketInput) (*CreateBucketOutput, error) {
+ req, out := c.CreateBucketRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opCreateMultipartUpload = "CreateMultipartUpload"
+
+// CreateMultipartUploadRequest generates a request for the CreateMultipartUpload operation.
+func (c *S3) CreateMultipartUploadRequest(input *CreateMultipartUploadInput) (req *aws.Request, output *CreateMultipartUploadOutput) {
+ op := &aws.Operation{
+ Name: opCreateMultipartUpload,
+ HTTPMethod: "POST",
+ HTTPPath: "/{Bucket}/{Key+}?uploads",
+ }
+
+ if input == nil {
+ input = &CreateMultipartUploadInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &CreateMultipartUploadOutput{}
+ req.Data = output
+ return
+}
+
+// Initiates a multipart upload and returns an upload ID.
+//
+// Note: After you initiate multipart upload and upload one or more parts,
+// you must either complete or abort multipart upload in order to stop getting
+// charged for storage of the uploaded parts. Only after you either complete
+// or abort multipart upload, Amazon S3 frees up the parts storage and stops
+// charging you for the parts storage.
+func (c *S3) CreateMultipartUpload(input *CreateMultipartUploadInput) (*CreateMultipartUploadOutput, error) {
+ req, out := c.CreateMultipartUploadRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteBucket = "DeleteBucket"
+
+// DeleteBucketRequest generates a request for the DeleteBucket operation.
+func (c *S3) DeleteBucketRequest(input *DeleteBucketInput) (req *aws.Request, output *DeleteBucketOutput) {
+ op := &aws.Operation{
+ Name: opDeleteBucket,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}",
+ }
+
+ if input == nil {
+ input = &DeleteBucketInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteBucketOutput{}
+ req.Data = output
+ return
+}
+
+// Deletes the bucket. All objects (including all object versions and Delete
+// Markers) in the bucket must be deleted before the bucket itself can be deleted.
+func (c *S3) DeleteBucket(input *DeleteBucketInput) (*DeleteBucketOutput, error) {
+ req, out := c.DeleteBucketRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteBucketCORS = "DeleteBucketCors"
+
+// DeleteBucketCORSRequest generates a request for the DeleteBucketCORS operation.
+func (c *S3) DeleteBucketCORSRequest(input *DeleteBucketCORSInput) (req *aws.Request, output *DeleteBucketCORSOutput) {
+ op := &aws.Operation{
+ Name: opDeleteBucketCORS,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}?cors",
+ }
+
+ if input == nil {
+ input = &DeleteBucketCORSInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteBucketCORSOutput{}
+ req.Data = output
+ return
+}
+
+// Deletes the cors configuration information set for the bucket.
+func (c *S3) DeleteBucketCORS(input *DeleteBucketCORSInput) (*DeleteBucketCORSOutput, error) {
+ req, out := c.DeleteBucketCORSRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteBucketLifecycle = "DeleteBucketLifecycle"
+
+// DeleteBucketLifecycleRequest generates a request for the DeleteBucketLifecycle operation.
+func (c *S3) DeleteBucketLifecycleRequest(input *DeleteBucketLifecycleInput) (req *aws.Request, output *DeleteBucketLifecycleOutput) {
+ op := &aws.Operation{
+ Name: opDeleteBucketLifecycle,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}?lifecycle",
+ }
+
+ if input == nil {
+ input = &DeleteBucketLifecycleInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteBucketLifecycleOutput{}
+ req.Data = output
+ return
+}
+
+// Deletes the lifecycle configuration from the bucket.
+func (c *S3) DeleteBucketLifecycle(input *DeleteBucketLifecycleInput) (*DeleteBucketLifecycleOutput, error) {
+ req, out := c.DeleteBucketLifecycleRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteBucketPolicy = "DeleteBucketPolicy"
+
+// DeleteBucketPolicyRequest generates a request for the DeleteBucketPolicy operation.
+func (c *S3) DeleteBucketPolicyRequest(input *DeleteBucketPolicyInput) (req *aws.Request, output *DeleteBucketPolicyOutput) {
+ op := &aws.Operation{
+ Name: opDeleteBucketPolicy,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}?policy",
+ }
+
+ if input == nil {
+ input = &DeleteBucketPolicyInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteBucketPolicyOutput{}
+ req.Data = output
+ return
+}
+
+// Deletes the policy from the bucket.
+func (c *S3) DeleteBucketPolicy(input *DeleteBucketPolicyInput) (*DeleteBucketPolicyOutput, error) {
+ req, out := c.DeleteBucketPolicyRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteBucketReplication = "DeleteBucketReplication"
+
+// DeleteBucketReplicationRequest generates a request for the DeleteBucketReplication operation.
+func (c *S3) DeleteBucketReplicationRequest(input *DeleteBucketReplicationInput) (req *aws.Request, output *DeleteBucketReplicationOutput) {
+ op := &aws.Operation{
+ Name: opDeleteBucketReplication,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}?replication",
+ }
+
+ if input == nil {
+ input = &DeleteBucketReplicationInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteBucketReplicationOutput{}
+ req.Data = output
+ return
+}
+
+func (c *S3) DeleteBucketReplication(input *DeleteBucketReplicationInput) (*DeleteBucketReplicationOutput, error) {
+ req, out := c.DeleteBucketReplicationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteBucketTagging = "DeleteBucketTagging"
+
+// DeleteBucketTaggingRequest generates a request for the DeleteBucketTagging operation.
+func (c *S3) DeleteBucketTaggingRequest(input *DeleteBucketTaggingInput) (req *aws.Request, output *DeleteBucketTaggingOutput) {
+ op := &aws.Operation{
+ Name: opDeleteBucketTagging,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}?tagging",
+ }
+
+ if input == nil {
+ input = &DeleteBucketTaggingInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteBucketTaggingOutput{}
+ req.Data = output
+ return
+}
+
+// Deletes the tags from the bucket.
+func (c *S3) DeleteBucketTagging(input *DeleteBucketTaggingInput) (*DeleteBucketTaggingOutput, error) {
+ req, out := c.DeleteBucketTaggingRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteBucketWebsite = "DeleteBucketWebsite"
+
+// DeleteBucketWebsiteRequest generates a request for the DeleteBucketWebsite operation.
+func (c *S3) DeleteBucketWebsiteRequest(input *DeleteBucketWebsiteInput) (req *aws.Request, output *DeleteBucketWebsiteOutput) {
+ op := &aws.Operation{
+ Name: opDeleteBucketWebsite,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}?website",
+ }
+
+ if input == nil {
+ input = &DeleteBucketWebsiteInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteBucketWebsiteOutput{}
+ req.Data = output
+ return
+}
+
+// This operation removes the website configuration from the bucket.
+func (c *S3) DeleteBucketWebsite(input *DeleteBucketWebsiteInput) (*DeleteBucketWebsiteOutput, error) {
+ req, out := c.DeleteBucketWebsiteRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteObject = "DeleteObject"
+
+// DeleteObjectRequest generates a request for the DeleteObject operation.
+func (c *S3) DeleteObjectRequest(input *DeleteObjectInput) (req *aws.Request, output *DeleteObjectOutput) {
+ op := &aws.Operation{
+ Name: opDeleteObject,
+ HTTPMethod: "DELETE",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &DeleteObjectInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteObjectOutput{}
+ req.Data = output
+ return
+}
+
+// Removes the null version (if there is one) of an object and inserts a delete
+// marker, which becomes the latest version of the object. If there isn't a
+// null version, Amazon S3 does not remove any objects.
+func (c *S3) DeleteObject(input *DeleteObjectInput) (*DeleteObjectOutput, error) {
+ req, out := c.DeleteObjectRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opDeleteObjects = "DeleteObjects"
+
+// DeleteObjectsRequest generates a request for the DeleteObjects operation.
+func (c *S3) DeleteObjectsRequest(input *DeleteObjectsInput) (req *aws.Request, output *DeleteObjectsOutput) {
+ op := &aws.Operation{
+ Name: opDeleteObjects,
+ HTTPMethod: "POST",
+ HTTPPath: "/{Bucket}?delete",
+ }
+
+ if input == nil {
+ input = &DeleteObjectsInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &DeleteObjectsOutput{}
+ req.Data = output
+ return
+}
+
+// This operation enables you to delete multiple objects from a bucket using
+// a single HTTP request. You may specify up to 1000 keys.
+func (c *S3) DeleteObjects(input *DeleteObjectsInput) (*DeleteObjectsOutput, error) {
+ req, out := c.DeleteObjectsRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketACL = "GetBucketAcl"
+
+// GetBucketACLRequest generates a request for the GetBucketACL operation.
+func (c *S3) GetBucketACLRequest(input *GetBucketACLInput) (req *aws.Request, output *GetBucketACLOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketACL,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?acl",
+ }
+
+ if input == nil {
+ input = &GetBucketACLInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketACLOutput{}
+ req.Data = output
+ return
+}
+
+// Gets the access control policy for the bucket.
+func (c *S3) GetBucketACL(input *GetBucketACLInput) (*GetBucketACLOutput, error) {
+ req, out := c.GetBucketACLRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketCORS = "GetBucketCors"
+
+// GetBucketCORSRequest generates a request for the GetBucketCORS operation.
+func (c *S3) GetBucketCORSRequest(input *GetBucketCORSInput) (req *aws.Request, output *GetBucketCORSOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketCORS,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?cors",
+ }
+
+ if input == nil {
+ input = &GetBucketCORSInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketCORSOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the cors configuration for the bucket.
+func (c *S3) GetBucketCORS(input *GetBucketCORSInput) (*GetBucketCORSOutput, error) {
+ req, out := c.GetBucketCORSRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketLifecycle = "GetBucketLifecycle"
+
+// GetBucketLifecycleRequest generates a request for the GetBucketLifecycle operation.
+func (c *S3) GetBucketLifecycleRequest(input *GetBucketLifecycleInput) (req *aws.Request, output *GetBucketLifecycleOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketLifecycle,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?lifecycle",
+ }
+
+ if input == nil {
+ input = &GetBucketLifecycleInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketLifecycleOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the lifecycle configuration information set on the bucket.
+func (c *S3) GetBucketLifecycle(input *GetBucketLifecycleInput) (*GetBucketLifecycleOutput, error) {
+ req, out := c.GetBucketLifecycleRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketLocation = "GetBucketLocation"
+
+// GetBucketLocationRequest generates a request for the GetBucketLocation operation.
+func (c *S3) GetBucketLocationRequest(input *GetBucketLocationInput) (req *aws.Request, output *GetBucketLocationOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketLocation,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?location",
+ }
+
+ if input == nil {
+ input = &GetBucketLocationInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketLocationOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the region the bucket resides in.
+func (c *S3) GetBucketLocation(input *GetBucketLocationInput) (*GetBucketLocationOutput, error) {
+ req, out := c.GetBucketLocationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketLogging = "GetBucketLogging"
+
+// GetBucketLoggingRequest generates a request for the GetBucketLogging operation.
+func (c *S3) GetBucketLoggingRequest(input *GetBucketLoggingInput) (req *aws.Request, output *GetBucketLoggingOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketLogging,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?logging",
+ }
+
+ if input == nil {
+ input = &GetBucketLoggingInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketLoggingOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the logging status of a bucket and the permissions users have to
+// view and modify that status. To use GET, you must be the bucket owner.
+func (c *S3) GetBucketLogging(input *GetBucketLoggingInput) (*GetBucketLoggingOutput, error) {
+ req, out := c.GetBucketLoggingRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketNotification = "GetBucketNotification"
+
+// GetBucketNotificationRequest generates a request for the GetBucketNotification operation.
+func (c *S3) GetBucketNotificationRequest(input *GetBucketNotificationConfigurationRequest) (req *aws.Request, output *NotificationConfigurationDeprecated) {
+ op := &aws.Operation{
+ Name: opGetBucketNotification,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?notification",
+ }
+
+ if input == nil {
+ input = &GetBucketNotificationConfigurationRequest{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &NotificationConfigurationDeprecated{}
+ req.Data = output
+ return
+}
+
+// Deprecated, see the GetBucketNotificationConfiguration operation.
+func (c *S3) GetBucketNotification(input *GetBucketNotificationConfigurationRequest) (*NotificationConfigurationDeprecated, error) {
+ req, out := c.GetBucketNotificationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketNotificationConfiguration = "GetBucketNotificationConfiguration"
+
+// GetBucketNotificationConfigurationRequest generates a request for the GetBucketNotificationConfiguration operation.
+func (c *S3) GetBucketNotificationConfigurationRequest(input *GetBucketNotificationConfigurationRequest) (req *aws.Request, output *NotificationConfiguration) {
+ op := &aws.Operation{
+ Name: opGetBucketNotificationConfiguration,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?notification",
+ }
+
+ if input == nil {
+ input = &GetBucketNotificationConfigurationRequest{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &NotificationConfiguration{}
+ req.Data = output
+ return
+}
+
+// Returns the notification configuration of a bucket.
+func (c *S3) GetBucketNotificationConfiguration(input *GetBucketNotificationConfigurationRequest) (*NotificationConfiguration, error) {
+ req, out := c.GetBucketNotificationConfigurationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketPolicy = "GetBucketPolicy"
+
+// GetBucketPolicyRequest generates a request for the GetBucketPolicy operation.
+func (c *S3) GetBucketPolicyRequest(input *GetBucketPolicyInput) (req *aws.Request, output *GetBucketPolicyOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketPolicy,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?policy",
+ }
+
+ if input == nil {
+ input = &GetBucketPolicyInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketPolicyOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the policy of a specified bucket.
+func (c *S3) GetBucketPolicy(input *GetBucketPolicyInput) (*GetBucketPolicyOutput, error) {
+ req, out := c.GetBucketPolicyRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketReplication = "GetBucketReplication"
+
+// GetBucketReplicationRequest generates a request for the GetBucketReplication operation.
+func (c *S3) GetBucketReplicationRequest(input *GetBucketReplicationInput) (req *aws.Request, output *GetBucketReplicationOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketReplication,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?replication",
+ }
+
+ if input == nil {
+ input = &GetBucketReplicationInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketReplicationOutput{}
+ req.Data = output
+ return
+}
+
+func (c *S3) GetBucketReplication(input *GetBucketReplicationInput) (*GetBucketReplicationOutput, error) {
+ req, out := c.GetBucketReplicationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketRequestPayment = "GetBucketRequestPayment"
+
+// GetBucketRequestPaymentRequest generates a request for the GetBucketRequestPayment operation.
+func (c *S3) GetBucketRequestPaymentRequest(input *GetBucketRequestPaymentInput) (req *aws.Request, output *GetBucketRequestPaymentOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketRequestPayment,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?requestPayment",
+ }
+
+ if input == nil {
+ input = &GetBucketRequestPaymentInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketRequestPaymentOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the request payment configuration of a bucket.
+func (c *S3) GetBucketRequestPayment(input *GetBucketRequestPaymentInput) (*GetBucketRequestPaymentOutput, error) {
+ req, out := c.GetBucketRequestPaymentRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketTagging = "GetBucketTagging"
+
+// GetBucketTaggingRequest generates a request for the GetBucketTagging operation.
+func (c *S3) GetBucketTaggingRequest(input *GetBucketTaggingInput) (req *aws.Request, output *GetBucketTaggingOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketTagging,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?tagging",
+ }
+
+ if input == nil {
+ input = &GetBucketTaggingInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketTaggingOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the tag set associated with the bucket.
+func (c *S3) GetBucketTagging(input *GetBucketTaggingInput) (*GetBucketTaggingOutput, error) {
+ req, out := c.GetBucketTaggingRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketVersioning = "GetBucketVersioning"
+
+// GetBucketVersioningRequest generates a request for the GetBucketVersioning operation.
+func (c *S3) GetBucketVersioningRequest(input *GetBucketVersioningInput) (req *aws.Request, output *GetBucketVersioningOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketVersioning,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?versioning",
+ }
+
+ if input == nil {
+ input = &GetBucketVersioningInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketVersioningOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the versioning state of a bucket.
+func (c *S3) GetBucketVersioning(input *GetBucketVersioningInput) (*GetBucketVersioningOutput, error) {
+ req, out := c.GetBucketVersioningRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetBucketWebsite = "GetBucketWebsite"
+
+// GetBucketWebsiteRequest generates a request for the GetBucketWebsite operation.
+func (c *S3) GetBucketWebsiteRequest(input *GetBucketWebsiteInput) (req *aws.Request, output *GetBucketWebsiteOutput) {
+ op := &aws.Operation{
+ Name: opGetBucketWebsite,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?website",
+ }
+
+ if input == nil {
+ input = &GetBucketWebsiteInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetBucketWebsiteOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the website configuration for a bucket.
+func (c *S3) GetBucketWebsite(input *GetBucketWebsiteInput) (*GetBucketWebsiteOutput, error) {
+ req, out := c.GetBucketWebsiteRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetObject = "GetObject"
+
+// GetObjectRequest generates a request for the GetObject operation.
+func (c *S3) GetObjectRequest(input *GetObjectInput) (req *aws.Request, output *GetObjectOutput) {
+ op := &aws.Operation{
+ Name: opGetObject,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &GetObjectInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetObjectOutput{}
+ req.Data = output
+ return
+}
+
+// Retrieves objects from Amazon S3.
+func (c *S3) GetObject(input *GetObjectInput) (*GetObjectOutput, error) {
+ req, out := c.GetObjectRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetObjectACL = "GetObjectAcl"
+
+// GetObjectACLRequest generates a request for the GetObjectACL operation.
+func (c *S3) GetObjectACLRequest(input *GetObjectACLInput) (req *aws.Request, output *GetObjectACLOutput) {
+ op := &aws.Operation{
+ Name: opGetObjectACL,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}/{Key+}?acl",
+ }
+
+ if input == nil {
+ input = &GetObjectACLInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetObjectACLOutput{}
+ req.Data = output
+ return
+}
+
+// Returns the access control list (ACL) of an object.
+func (c *S3) GetObjectACL(input *GetObjectACLInput) (*GetObjectACLOutput, error) {
+ req, out := c.GetObjectACLRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opGetObjectTorrent = "GetObjectTorrent"
+
+// GetObjectTorrentRequest generates a request for the GetObjectTorrent operation.
+func (c *S3) GetObjectTorrentRequest(input *GetObjectTorrentInput) (req *aws.Request, output *GetObjectTorrentOutput) {
+ op := &aws.Operation{
+ Name: opGetObjectTorrent,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}/{Key+}?torrent",
+ }
+
+ if input == nil {
+ input = &GetObjectTorrentInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &GetObjectTorrentOutput{}
+ req.Data = output
+ return
+}
+
+// Return torrent files from a bucket.
+func (c *S3) GetObjectTorrent(input *GetObjectTorrentInput) (*GetObjectTorrentOutput, error) {
+ req, out := c.GetObjectTorrentRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opHeadBucket = "HeadBucket"
+
+// HeadBucketRequest generates a request for the HeadBucket operation.
+func (c *S3) HeadBucketRequest(input *HeadBucketInput) (req *aws.Request, output *HeadBucketOutput) {
+ op := &aws.Operation{
+ Name: opHeadBucket,
+ HTTPMethod: "HEAD",
+ HTTPPath: "/{Bucket}",
+ }
+
+ if input == nil {
+ input = &HeadBucketInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &HeadBucketOutput{}
+ req.Data = output
+ return
+}
+
+// This operation is useful to determine if a bucket exists and you have permission
+// to access it.
+func (c *S3) HeadBucket(input *HeadBucketInput) (*HeadBucketOutput, error) {
+ req, out := c.HeadBucketRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opHeadObject = "HeadObject"
+
+// HeadObjectRequest generates a request for the HeadObject operation.
+func (c *S3) HeadObjectRequest(input *HeadObjectInput) (req *aws.Request, output *HeadObjectOutput) {
+ op := &aws.Operation{
+ Name: opHeadObject,
+ HTTPMethod: "HEAD",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &HeadObjectInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &HeadObjectOutput{}
+ req.Data = output
+ return
+}
+
+// The HEAD operation retrieves metadata from an object without returning the
+// object itself. This operation is useful if you're only interested in an object's
+// metadata. To use HEAD, you must have READ access to the object.
+func (c *S3) HeadObject(input *HeadObjectInput) (*HeadObjectOutput, error) {
+ req, out := c.HeadObjectRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opListBuckets = "ListBuckets"
+
+// ListBucketsRequest generates a request for the ListBuckets operation.
+func (c *S3) ListBucketsRequest(input *ListBucketsInput) (req *aws.Request, output *ListBucketsOutput) {
+ op := &aws.Operation{
+ Name: opListBuckets,
+ HTTPMethod: "GET",
+ HTTPPath: "/",
+ }
+
+ if input == nil {
+ input = &ListBucketsInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &ListBucketsOutput{}
+ req.Data = output
+ return
+}
+
+// Returns a list of all buckets owned by the authenticated sender of the request.
+func (c *S3) ListBuckets(input *ListBucketsInput) (*ListBucketsOutput, error) {
+ req, out := c.ListBucketsRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opListMultipartUploads = "ListMultipartUploads"
+
+// ListMultipartUploadsRequest generates a request for the ListMultipartUploads operation.
+func (c *S3) ListMultipartUploadsRequest(input *ListMultipartUploadsInput) (req *aws.Request, output *ListMultipartUploadsOutput) {
+ op := &aws.Operation{
+ Name: opListMultipartUploads,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?uploads",
+ Paginator: &aws.Paginator{
+ InputTokens: []string{"KeyMarker", "UploadIdMarker"},
+ OutputTokens: []string{"NextKeyMarker", "NextUploadIdMarker"},
+ LimitToken: "MaxUploads",
+ TruncationToken: "IsTruncated",
+ },
+ }
+
+ if input == nil {
+ input = &ListMultipartUploadsInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &ListMultipartUploadsOutput{}
+ req.Data = output
+ return
+}
+
+// This operation lists in-progress multipart uploads.
+func (c *S3) ListMultipartUploads(input *ListMultipartUploadsInput) (*ListMultipartUploadsOutput, error) {
+ req, out := c.ListMultipartUploadsRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+func (c *S3) ListMultipartUploadsPages(input *ListMultipartUploadsInput, fn func(p *ListMultipartUploadsOutput, lastPage bool) (shouldContinue bool)) error {
+ page, _ := c.ListMultipartUploadsRequest(input)
+ return page.EachPage(func(p interface{}, lastPage bool) bool {
+ return fn(p.(*ListMultipartUploadsOutput), lastPage)
+ })
+}
+
+const opListObjectVersions = "ListObjectVersions"
+
+// ListObjectVersionsRequest generates a request for the ListObjectVersions operation.
+func (c *S3) ListObjectVersionsRequest(input *ListObjectVersionsInput) (req *aws.Request, output *ListObjectVersionsOutput) {
+ op := &aws.Operation{
+ Name: opListObjectVersions,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}?versions",
+ Paginator: &aws.Paginator{
+ InputTokens: []string{"KeyMarker", "VersionIdMarker"},
+ OutputTokens: []string{"NextKeyMarker", "NextVersionIdMarker"},
+ LimitToken: "MaxKeys",
+ TruncationToken: "IsTruncated",
+ },
+ }
+
+ if input == nil {
+ input = &ListObjectVersionsInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &ListObjectVersionsOutput{}
+ req.Data = output
+ return
+}
+
+// Returns metadata about all of the versions of objects in a bucket.
+func (c *S3) ListObjectVersions(input *ListObjectVersionsInput) (*ListObjectVersionsOutput, error) {
+ req, out := c.ListObjectVersionsRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+func (c *S3) ListObjectVersionsPages(input *ListObjectVersionsInput, fn func(p *ListObjectVersionsOutput, lastPage bool) (shouldContinue bool)) error {
+ page, _ := c.ListObjectVersionsRequest(input)
+ return page.EachPage(func(p interface{}, lastPage bool) bool {
+ return fn(p.(*ListObjectVersionsOutput), lastPage)
+ })
+}
+
+const opListObjects = "ListObjects"
+
+// ListObjectsRequest generates a request for the ListObjects operation.
+func (c *S3) ListObjectsRequest(input *ListObjectsInput) (req *aws.Request, output *ListObjectsOutput) {
+ op := &aws.Operation{
+ Name: opListObjects,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}",
+ Paginator: &aws.Paginator{
+ InputTokens: []string{"Marker"},
+ OutputTokens: []string{"NextMarker || Contents[-1].Key"},
+ LimitToken: "MaxKeys",
+ TruncationToken: "IsTruncated",
+ },
+ }
+
+ if input == nil {
+ input = &ListObjectsInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &ListObjectsOutput{}
+ req.Data = output
+ return
+}
+
+// Returns some or all (up to 1000) of the objects in a bucket. You can use
+// the request parameters as selection criteria to return a subset of the objects
+// in a bucket.
+func (c *S3) ListObjects(input *ListObjectsInput) (*ListObjectsOutput, error) {
+ req, out := c.ListObjectsRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+func (c *S3) ListObjectsPages(input *ListObjectsInput, fn func(p *ListObjectsOutput, lastPage bool) (shouldContinue bool)) error {
+ page, _ := c.ListObjectsRequest(input)
+ return page.EachPage(func(p interface{}, lastPage bool) bool {
+ return fn(p.(*ListObjectsOutput), lastPage)
+ })
+}
+
+const opListParts = "ListParts"
+
+// ListPartsRequest generates a request for the ListParts operation.
+func (c *S3) ListPartsRequest(input *ListPartsInput) (req *aws.Request, output *ListPartsOutput) {
+ op := &aws.Operation{
+ Name: opListParts,
+ HTTPMethod: "GET",
+ HTTPPath: "/{Bucket}/{Key+}",
+ Paginator: &aws.Paginator{
+ InputTokens: []string{"PartNumberMarker"},
+ OutputTokens: []string{"NextPartNumberMarker"},
+ LimitToken: "MaxParts",
+ TruncationToken: "IsTruncated",
+ },
+ }
+
+ if input == nil {
+ input = &ListPartsInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &ListPartsOutput{}
+ req.Data = output
+ return
+}
+
+// Lists the parts that have been uploaded for a specific multipart upload.
+func (c *S3) ListParts(input *ListPartsInput) (*ListPartsOutput, error) {
+ req, out := c.ListPartsRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+func (c *S3) ListPartsPages(input *ListPartsInput, fn func(p *ListPartsOutput, lastPage bool) (shouldContinue bool)) error {
+ page, _ := c.ListPartsRequest(input)
+ return page.EachPage(func(p interface{}, lastPage bool) bool {
+ return fn(p.(*ListPartsOutput), lastPage)
+ })
+}
+
+const opPutBucketACL = "PutBucketAcl"
+
+// PutBucketACLRequest generates a request for the PutBucketACL operation.
+func (c *S3) PutBucketACLRequest(input *PutBucketACLInput) (req *aws.Request, output *PutBucketACLOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketACL,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?acl",
+ }
+
+ if input == nil {
+ input = &PutBucketACLInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketACLOutput{}
+ req.Data = output
+ return
+}
+
+// Sets the permissions on a bucket using access control lists (ACL).
+func (c *S3) PutBucketACL(input *PutBucketACLInput) (*PutBucketACLOutput, error) {
+ req, out := c.PutBucketACLRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketCORS = "PutBucketCors"
+
+// PutBucketCORSRequest generates a request for the PutBucketCORS operation.
+func (c *S3) PutBucketCORSRequest(input *PutBucketCORSInput) (req *aws.Request, output *PutBucketCORSOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketCORS,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?cors",
+ }
+
+ if input == nil {
+ input = &PutBucketCORSInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketCORSOutput{}
+ req.Data = output
+ return
+}
+
+// Sets the cors configuration for a bucket.
+func (c *S3) PutBucketCORS(input *PutBucketCORSInput) (*PutBucketCORSOutput, error) {
+ req, out := c.PutBucketCORSRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketLifecycle = "PutBucketLifecycle"
+
+// PutBucketLifecycleRequest generates a request for the PutBucketLifecycle operation.
+func (c *S3) PutBucketLifecycleRequest(input *PutBucketLifecycleInput) (req *aws.Request, output *PutBucketLifecycleOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketLifecycle,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?lifecycle",
+ }
+
+ if input == nil {
+ input = &PutBucketLifecycleInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketLifecycleOutput{}
+ req.Data = output
+ return
+}
+
+// Sets lifecycle configuration for your bucket. If a lifecycle configuration
+// exists, it replaces it.
+func (c *S3) PutBucketLifecycle(input *PutBucketLifecycleInput) (*PutBucketLifecycleOutput, error) {
+ req, out := c.PutBucketLifecycleRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketLogging = "PutBucketLogging"
+
+// PutBucketLoggingRequest generates a request for the PutBucketLogging operation.
+func (c *S3) PutBucketLoggingRequest(input *PutBucketLoggingInput) (req *aws.Request, output *PutBucketLoggingOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketLogging,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?logging",
+ }
+
+ if input == nil {
+ input = &PutBucketLoggingInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketLoggingOutput{}
+ req.Data = output
+ return
+}
+
+// Set the logging parameters for a bucket and to specify permissions for who
+// can view and modify the logging parameters. To set the logging status of
+// a bucket, you must be the bucket owner.
+func (c *S3) PutBucketLogging(input *PutBucketLoggingInput) (*PutBucketLoggingOutput, error) {
+ req, out := c.PutBucketLoggingRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketNotification = "PutBucketNotification"
+
+// PutBucketNotificationRequest generates a request for the PutBucketNotification operation.
+func (c *S3) PutBucketNotificationRequest(input *PutBucketNotificationInput) (req *aws.Request, output *PutBucketNotificationOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketNotification,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?notification",
+ }
+
+ if input == nil {
+ input = &PutBucketNotificationInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketNotificationOutput{}
+ req.Data = output
+ return
+}
+
+// Deprecated, see the PutBucketNotificationConfiguraiton operation.
+func (c *S3) PutBucketNotification(input *PutBucketNotificationInput) (*PutBucketNotificationOutput, error) {
+ req, out := c.PutBucketNotificationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketNotificationConfiguration = "PutBucketNotificationConfiguration"
+
+// PutBucketNotificationConfigurationRequest generates a request for the PutBucketNotificationConfiguration operation.
+func (c *S3) PutBucketNotificationConfigurationRequest(input *PutBucketNotificationConfigurationInput) (req *aws.Request, output *PutBucketNotificationConfigurationOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketNotificationConfiguration,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?notification",
+ }
+
+ if input == nil {
+ input = &PutBucketNotificationConfigurationInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketNotificationConfigurationOutput{}
+ req.Data = output
+ return
+}
+
+// Enables notifications of specified events for a bucket.
+func (c *S3) PutBucketNotificationConfiguration(input *PutBucketNotificationConfigurationInput) (*PutBucketNotificationConfigurationOutput, error) {
+ req, out := c.PutBucketNotificationConfigurationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketPolicy = "PutBucketPolicy"
+
+// PutBucketPolicyRequest generates a request for the PutBucketPolicy operation.
+func (c *S3) PutBucketPolicyRequest(input *PutBucketPolicyInput) (req *aws.Request, output *PutBucketPolicyOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketPolicy,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?policy",
+ }
+
+ if input == nil {
+ input = &PutBucketPolicyInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketPolicyOutput{}
+ req.Data = output
+ return
+}
+
+// Replaces a policy on a bucket. If the bucket already has a policy, the one
+// in this request completely replaces it.
+func (c *S3) PutBucketPolicy(input *PutBucketPolicyInput) (*PutBucketPolicyOutput, error) {
+ req, out := c.PutBucketPolicyRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketReplication = "PutBucketReplication"
+
+// PutBucketReplicationRequest generates a request for the PutBucketReplication operation.
+func (c *S3) PutBucketReplicationRequest(input *PutBucketReplicationInput) (req *aws.Request, output *PutBucketReplicationOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketReplication,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?replication",
+ }
+
+ if input == nil {
+ input = &PutBucketReplicationInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketReplicationOutput{}
+ req.Data = output
+ return
+}
+
+// Creates a new replication configuration (or replaces an existing one, if
+// present).
+func (c *S3) PutBucketReplication(input *PutBucketReplicationInput) (*PutBucketReplicationOutput, error) {
+ req, out := c.PutBucketReplicationRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketRequestPayment = "PutBucketRequestPayment"
+
+// PutBucketRequestPaymentRequest generates a request for the PutBucketRequestPayment operation.
+func (c *S3) PutBucketRequestPaymentRequest(input *PutBucketRequestPaymentInput) (req *aws.Request, output *PutBucketRequestPaymentOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketRequestPayment,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?requestPayment",
+ }
+
+ if input == nil {
+ input = &PutBucketRequestPaymentInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketRequestPaymentOutput{}
+ req.Data = output
+ return
+}
+
+// Sets the request payment configuration for a bucket. By default, the bucket
+// owner pays for downloads from the bucket. This configuration parameter enables
+// the bucket owner (only) to specify that the person requesting the download
+// will be charged for the download. Documentation on requester pays buckets
+// can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html
+func (c *S3) PutBucketRequestPayment(input *PutBucketRequestPaymentInput) (*PutBucketRequestPaymentOutput, error) {
+ req, out := c.PutBucketRequestPaymentRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketTagging = "PutBucketTagging"
+
+// PutBucketTaggingRequest generates a request for the PutBucketTagging operation.
+func (c *S3) PutBucketTaggingRequest(input *PutBucketTaggingInput) (req *aws.Request, output *PutBucketTaggingOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketTagging,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?tagging",
+ }
+
+ if input == nil {
+ input = &PutBucketTaggingInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketTaggingOutput{}
+ req.Data = output
+ return
+}
+
+// Sets the tags for a bucket.
+func (c *S3) PutBucketTagging(input *PutBucketTaggingInput) (*PutBucketTaggingOutput, error) {
+ req, out := c.PutBucketTaggingRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketVersioning = "PutBucketVersioning"
+
+// PutBucketVersioningRequest generates a request for the PutBucketVersioning operation.
+func (c *S3) PutBucketVersioningRequest(input *PutBucketVersioningInput) (req *aws.Request, output *PutBucketVersioningOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketVersioning,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?versioning",
+ }
+
+ if input == nil {
+ input = &PutBucketVersioningInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketVersioningOutput{}
+ req.Data = output
+ return
+}
+
+// Sets the versioning state of an existing bucket. To set the versioning state,
+// you must be the bucket owner.
+func (c *S3) PutBucketVersioning(input *PutBucketVersioningInput) (*PutBucketVersioningOutput, error) {
+ req, out := c.PutBucketVersioningRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutBucketWebsite = "PutBucketWebsite"
+
+// PutBucketWebsiteRequest generates a request for the PutBucketWebsite operation.
+func (c *S3) PutBucketWebsiteRequest(input *PutBucketWebsiteInput) (req *aws.Request, output *PutBucketWebsiteOutput) {
+ op := &aws.Operation{
+ Name: opPutBucketWebsite,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}?website",
+ }
+
+ if input == nil {
+ input = &PutBucketWebsiteInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutBucketWebsiteOutput{}
+ req.Data = output
+ return
+}
+
+// Set the website configuration for a bucket.
+func (c *S3) PutBucketWebsite(input *PutBucketWebsiteInput) (*PutBucketWebsiteOutput, error) {
+ req, out := c.PutBucketWebsiteRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutObject = "PutObject"
+
+// PutObjectRequest generates a request for the PutObject operation.
+func (c *S3) PutObjectRequest(input *PutObjectInput) (req *aws.Request, output *PutObjectOutput) {
+ op := &aws.Operation{
+ Name: opPutObject,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &PutObjectInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutObjectOutput{}
+ req.Data = output
+ return
+}
+
+// Adds an object to a bucket.
+func (c *S3) PutObject(input *PutObjectInput) (*PutObjectOutput, error) {
+ req, out := c.PutObjectRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opPutObjectACL = "PutObjectAcl"
+
+// PutObjectACLRequest generates a request for the PutObjectACL operation.
+func (c *S3) PutObjectACLRequest(input *PutObjectACLInput) (req *aws.Request, output *PutObjectACLOutput) {
+ op := &aws.Operation{
+ Name: opPutObjectACL,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}/{Key+}?acl",
+ }
+
+ if input == nil {
+ input = &PutObjectACLInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &PutObjectACLOutput{}
+ req.Data = output
+ return
+}
+
+// uses the acl subresource to set the access control list (ACL) permissions
+// for an object that already exists in a bucket
+func (c *S3) PutObjectACL(input *PutObjectACLInput) (*PutObjectACLOutput, error) {
+ req, out := c.PutObjectACLRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opRestoreObject = "RestoreObject"
+
+// RestoreObjectRequest generates a request for the RestoreObject operation.
+func (c *S3) RestoreObjectRequest(input *RestoreObjectInput) (req *aws.Request, output *RestoreObjectOutput) {
+ op := &aws.Operation{
+ Name: opRestoreObject,
+ HTTPMethod: "POST",
+ HTTPPath: "/{Bucket}/{Key+}?restore",
+ }
+
+ if input == nil {
+ input = &RestoreObjectInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &RestoreObjectOutput{}
+ req.Data = output
+ return
+}
+
+// Restores an archived copy of an object back into Amazon S3
+func (c *S3) RestoreObject(input *RestoreObjectInput) (*RestoreObjectOutput, error) {
+ req, out := c.RestoreObjectRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opUploadPart = "UploadPart"
+
+// UploadPartRequest generates a request for the UploadPart operation.
+func (c *S3) UploadPartRequest(input *UploadPartInput) (req *aws.Request, output *UploadPartOutput) {
+ op := &aws.Operation{
+ Name: opUploadPart,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &UploadPartInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &UploadPartOutput{}
+ req.Data = output
+ return
+}
+
+// Uploads a part in a multipart upload.
+//
+// Note: After you initiate multipart upload and upload one or more parts,
+// you must either complete or abort multipart upload in order to stop getting
+// charged for storage of the uploaded parts. Only after you either complete
+// or abort multipart upload, Amazon S3 frees up the parts storage and stops
+// charging you for the parts storage.
+func (c *S3) UploadPart(input *UploadPartInput) (*UploadPartOutput, error) {
+ req, out := c.UploadPartRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+const opUploadPartCopy = "UploadPartCopy"
+
+// UploadPartCopyRequest generates a request for the UploadPartCopy operation.
+func (c *S3) UploadPartCopyRequest(input *UploadPartCopyInput) (req *aws.Request, output *UploadPartCopyOutput) {
+ op := &aws.Operation{
+ Name: opUploadPartCopy,
+ HTTPMethod: "PUT",
+ HTTPPath: "/{Bucket}/{Key+}",
+ }
+
+ if input == nil {
+ input = &UploadPartCopyInput{}
+ }
+
+ req = c.newRequest(op, input, output)
+ output = &UploadPartCopyOutput{}
+ req.Data = output
+ return
+}
+
+// Uploads a part by copying data from an existing object as data source.
+func (c *S3) UploadPartCopy(input *UploadPartCopyInput) (*UploadPartCopyOutput, error) {
+ req, out := c.UploadPartCopyRequest(input)
+ err := req.Send()
+ return out, err
+}
+
+type AbortMultipartUploadInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ UploadID *string `location:"querystring" locationName:"uploadId" type:"string" required:"true"`
+
+ metadataAbortMultipartUploadInput `json:"-" xml:"-"`
+}
+
+type metadataAbortMultipartUploadInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type AbortMultipartUploadOutput struct {
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ metadataAbortMultipartUploadOutput `json:"-" xml:"-"`
+}
+
+type metadataAbortMultipartUploadOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type AccessControlPolicy struct {
+ // A list of grants.
+ Grants []*Grant `locationName:"AccessControlList" locationNameList:"Grant" type:"list"`
+
+ Owner *Owner `type:"structure"`
+
+ metadataAccessControlPolicy `json:"-" xml:"-"`
+}
+
+type metadataAccessControlPolicy struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Bucket struct {
+ // Date the bucket was created.
+ CreationDate *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ // The name of the bucket.
+ Name *string `type:"string"`
+
+ metadataBucket `json:"-" xml:"-"`
+}
+
+type metadataBucket struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type BucketLoggingStatus struct {
+ LoggingEnabled *LoggingEnabled `type:"structure"`
+
+ metadataBucketLoggingStatus `json:"-" xml:"-"`
+}
+
+type metadataBucketLoggingStatus struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CORSConfiguration struct {
+ CORSRules []*CORSRule `locationName:"CORSRule" type:"list" flattened:"true"`
+
+ metadataCORSConfiguration `json:"-" xml:"-"`
+}
+
+type metadataCORSConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CORSRule struct {
+ // Specifies which headers are allowed in a pre-flight OPTIONS request.
+ AllowedHeaders []*string `locationName:"AllowedHeader" type:"list" flattened:"true"`
+
+ // Identifies HTTP methods that the domain/origin specified in the rule is allowed
+ // to execute.
+ AllowedMethods []*string `locationName:"AllowedMethod" type:"list" flattened:"true"`
+
+ // One or more origins you want customers to be able to access the bucket from.
+ AllowedOrigins []*string `locationName:"AllowedOrigin" type:"list" flattened:"true"`
+
+ // One or more headers in the response that you want customers to be able to
+ // access from their applications (for example, from a JavaScript XMLHttpRequest
+ // object).
+ ExposeHeaders []*string `locationName:"ExposeHeader" type:"list" flattened:"true"`
+
+ // The time in seconds that your browser is to cache the preflight response
+ // for the specified resource.
+ MaxAgeSeconds *int64 `type:"integer"`
+
+ metadataCORSRule `json:"-" xml:"-"`
+}
+
+type metadataCORSRule struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CloudFunctionConfiguration struct {
+ CloudFunction *string `type:"string"`
+
+ // Bucket event for which to send notifications.
+ Event *string `type:"string"`
+
+ Events []*string `locationName:"Event" type:"list" flattened:"true"`
+
+ // Optional unique identifier for configurations in a notification configuration.
+ // If you don't provide one, Amazon S3 will assign an ID.
+ ID *string `locationName:"Id" type:"string"`
+
+ InvocationRole *string `type:"string"`
+
+ metadataCloudFunctionConfiguration `json:"-" xml:"-"`
+}
+
+type metadataCloudFunctionConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CommonPrefix struct {
+ Prefix *string `type:"string"`
+
+ metadataCommonPrefix `json:"-" xml:"-"`
+}
+
+type metadataCommonPrefix struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CompleteMultipartUploadInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ MultipartUpload *CompletedMultipartUpload `locationName:"CompleteMultipartUpload" type:"structure"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ UploadID *string `location:"querystring" locationName:"uploadId" type:"string" required:"true"`
+
+ metadataCompleteMultipartUploadInput `json:"-" xml:"-"`
+}
+
+type metadataCompleteMultipartUploadInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"MultipartUpload"`
+}
+
+type CompleteMultipartUploadOutput struct {
+ Bucket *string `type:"string"`
+
+ // Entity tag of the object.
+ ETag *string `type:"string"`
+
+ // If the object expiration is configured, this will contain the expiration
+ // date (expiry-date) and rule ID (rule-id). The value of rule-id is URL encoded.
+ Expiration *string `location:"header" locationName:"x-amz-expiration" type:"string"`
+
+ Key *string `type:"string"`
+
+ Location *string `type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // Version of the object.
+ VersionID *string `location:"header" locationName:"x-amz-version-id" type:"string"`
+
+ metadataCompleteMultipartUploadOutput `json:"-" xml:"-"`
+}
+
+type metadataCompleteMultipartUploadOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CompletedMultipartUpload struct {
+ Parts []*CompletedPart `locationName:"Part" type:"list" flattened:"true"`
+
+ metadataCompletedMultipartUpload `json:"-" xml:"-"`
+}
+
+type metadataCompletedMultipartUpload struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CompletedPart struct {
+ // Entity tag returned when the part was uploaded.
+ ETag *string `type:"string"`
+
+ // Part number that identifies the part.
+ PartNumber *int64 `type:"integer"`
+
+ metadataCompletedPart `json:"-" xml:"-"`
+}
+
+type metadataCompletedPart struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Condition struct {
+ // The HTTP error code when the redirect is applied. In the event of an error,
+ // if the error code equals this value, then the specified redirect is applied.
+ // Required when parent element Condition is specified and sibling KeyPrefixEquals
+ // is not specified. If both are specified, then both must be true for the redirect
+ // to be applied.
+ HTTPErrorCodeReturnedEquals *string `locationName:"HttpErrorCodeReturnedEquals" type:"string"`
+
+ // The object key name prefix when the redirect is applied. For example, to
+ // redirect requests for ExamplePage.html, the key prefix will be ExamplePage.html.
+ // To redirect request for all pages with the prefix docs/, the key prefix will
+ // be /docs, which identifies all objects in the docs/ folder. Required when
+ // the parent element Condition is specified and sibling HttpErrorCodeReturnedEquals
+ // is not specified. If both conditions are specified, both must be true for
+ // the redirect to be applied.
+ KeyPrefixEquals *string `type:"string"`
+
+ metadataCondition `json:"-" xml:"-"`
+}
+
+type metadataCondition struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CopyObjectInput struct {
+ // The canned ACL to apply to the object.
+ ACL *string `location:"header" locationName:"x-amz-acl" type:"string"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Specifies caching behavior along the request/reply chain.
+ CacheControl *string `location:"header" locationName:"Cache-Control" type:"string"`
+
+ // Specifies presentational information for the object.
+ ContentDisposition *string `location:"header" locationName:"Content-Disposition" type:"string"`
+
+ // Specifies what content encodings have been applied to the object and thus
+ // what decoding mechanisms must be applied to obtain the media-type referenced
+ // by the Content-Type header field.
+ ContentEncoding *string `location:"header" locationName:"Content-Encoding" type:"string"`
+
+ // The language the content is in.
+ ContentLanguage *string `location:"header" locationName:"Content-Language" type:"string"`
+
+ // A standard MIME type describing the format of the object data.
+ ContentType *string `location:"header" locationName:"Content-Type" type:"string"`
+
+ // The name of the source bucket and key name of the source object, separated
+ // by a slash (/). Must be URL-encoded.
+ CopySource *string `location:"header" locationName:"x-amz-copy-source" type:"string" required:"true"`
+
+ // Copies the object if its entity tag (ETag) matches the specified tag.
+ CopySourceIfMatch *string `location:"header" locationName:"x-amz-copy-source-if-match" type:"string"`
+
+ // Copies the object if it has been modified since the specified time.
+ CopySourceIfModifiedSince *time.Time `location:"header" locationName:"x-amz-copy-source-if-modified-since" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Copies the object if its entity tag (ETag) is different than the specified
+ // ETag.
+ CopySourceIfNoneMatch *string `location:"header" locationName:"x-amz-copy-source-if-none-match" type:"string"`
+
+ // Copies the object if it hasn't been modified since the specified time.
+ CopySourceIfUnmodifiedSince *time.Time `location:"header" locationName:"x-amz-copy-source-if-unmodified-since" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Specifies the algorithm to use when decrypting the source object (e.g., AES256).
+ CopySourceSSECustomerAlgorithm *string `location:"header" locationName:"x-amz-copy-source-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use to decrypt
+ // the source object. The encryption key provided in this header must be one
+ // that was used when the source object was created.
+ CopySourceSSECustomerKey *string `location:"header" locationName:"x-amz-copy-source-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ CopySourceSSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-copy-source-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // The date and time at which the object is no longer cacheable.
+ Expires *time.Time `location:"header" locationName:"Expires" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.
+ GrantFullControl *string `location:"header" locationName:"x-amz-grant-full-control" type:"string"`
+
+ // Allows grantee to read the object data and its metadata.
+ GrantRead *string `location:"header" locationName:"x-amz-grant-read" type:"string"`
+
+ // Allows grantee to read the object ACL.
+ GrantReadACP *string `location:"header" locationName:"x-amz-grant-read-acp" type:"string"`
+
+ // Allows grantee to write the ACL for the applicable object.
+ GrantWriteACP *string `location:"header" locationName:"x-amz-grant-write-acp" type:"string"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // A map of metadata to store with the object in S3.
+ Metadata map[string]*string `location:"headers" locationName:"x-amz-meta-" type:"map"`
+
+ // Specifies whether the metadata is copied from the source object or replaced
+ // with metadata provided in the request.
+ MetadataDirective *string `location:"header" locationName:"x-amz-metadata-directive" type:"string"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // Specifies the AWS KMS key ID to use for object encryption. All GET and PUT
+ // requests for an object protected by AWS KMS will fail if not made via SSL
+ // or using SigV4. Documentation on configuring any of the officially supported
+ // AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // The type of storage to use for the object. Defaults to 'STANDARD'.
+ StorageClass *string `location:"header" locationName:"x-amz-storage-class" type:"string"`
+
+ // If the bucket is configured as a website, redirects requests for this object
+ // to another object in the same bucket or to an external URL. Amazon S3 stores
+ // the value of this header in the object metadata.
+ WebsiteRedirectLocation *string `location:"header" locationName:"x-amz-website-redirect-location" type:"string"`
+
+ metadataCopyObjectInput `json:"-" xml:"-"`
+}
+
+type metadataCopyObjectInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CopyObjectOutput struct {
+ CopyObjectResult *CopyObjectResult `type:"structure"`
+
+ CopySourceVersionID *string `location:"header" locationName:"x-amz-copy-source-version-id" type:"string"`
+
+ // If the object expiration is configured, the response includes this header.
+ Expiration *string `location:"header" locationName:"x-amz-expiration" type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header confirming the encryption algorithm
+ // used.
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header to provide round trip message integrity
+ // verification of the customer-provided encryption key.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ metadataCopyObjectOutput `json:"-" xml:"-"`
+}
+
+type metadataCopyObjectOutput struct {
+ SDKShapeTraits bool `type:"structure" payload:"CopyObjectResult"`
+}
+
+type CopyObjectResult struct {
+ ETag *string `type:"string"`
+
+ LastModified *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ metadataCopyObjectResult `json:"-" xml:"-"`
+}
+
+type metadataCopyObjectResult struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CopyPartResult struct {
+ // Entity tag of the object.
+ ETag *string `type:"string"`
+
+ // Date and time at which the object was uploaded.
+ LastModified *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ metadataCopyPartResult `json:"-" xml:"-"`
+}
+
+type metadataCopyPartResult struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CreateBucketConfiguration struct {
+ // Specifies the region where the bucket will be created. If you don't specify
+ // a region, the bucket will be created in US Standard.
+ LocationConstraint *string `type:"string"`
+
+ metadataCreateBucketConfiguration `json:"-" xml:"-"`
+}
+
+type metadataCreateBucketConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CreateBucketInput struct {
+ // The canned ACL to apply to the bucket.
+ ACL *string `location:"header" locationName:"x-amz-acl" type:"string"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ CreateBucketConfiguration *CreateBucketConfiguration `locationName:"CreateBucketConfiguration" type:"structure"`
+
+ // Allows grantee the read, write, read ACP, and write ACP permissions on the
+ // bucket.
+ GrantFullControl *string `location:"header" locationName:"x-amz-grant-full-control" type:"string"`
+
+ // Allows grantee to list the objects in the bucket.
+ GrantRead *string `location:"header" locationName:"x-amz-grant-read" type:"string"`
+
+ // Allows grantee to read the bucket ACL.
+ GrantReadACP *string `location:"header" locationName:"x-amz-grant-read-acp" type:"string"`
+
+ // Allows grantee to create, overwrite, and delete any object in the bucket.
+ GrantWrite *string `location:"header" locationName:"x-amz-grant-write" type:"string"`
+
+ // Allows grantee to write the ACL for the applicable bucket.
+ GrantWriteACP *string `location:"header" locationName:"x-amz-grant-write-acp" type:"string"`
+
+ metadataCreateBucketInput `json:"-" xml:"-"`
+}
+
+type metadataCreateBucketInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"CreateBucketConfiguration"`
+}
+
+type CreateBucketOutput struct {
+ Location *string `location:"header" locationName:"Location" type:"string"`
+
+ metadataCreateBucketOutput `json:"-" xml:"-"`
+}
+
+type metadataCreateBucketOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CreateMultipartUploadInput struct {
+ // The canned ACL to apply to the object.
+ ACL *string `location:"header" locationName:"x-amz-acl" type:"string"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Specifies caching behavior along the request/reply chain.
+ CacheControl *string `location:"header" locationName:"Cache-Control" type:"string"`
+
+ // Specifies presentational information for the object.
+ ContentDisposition *string `location:"header" locationName:"Content-Disposition" type:"string"`
+
+ // Specifies what content encodings have been applied to the object and thus
+ // what decoding mechanisms must be applied to obtain the media-type referenced
+ // by the Content-Type header field.
+ ContentEncoding *string `location:"header" locationName:"Content-Encoding" type:"string"`
+
+ // The language the content is in.
+ ContentLanguage *string `location:"header" locationName:"Content-Language" type:"string"`
+
+ // A standard MIME type describing the format of the object data.
+ ContentType *string `location:"header" locationName:"Content-Type" type:"string"`
+
+ // The date and time at which the object is no longer cacheable.
+ Expires *time.Time `location:"header" locationName:"Expires" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.
+ GrantFullControl *string `location:"header" locationName:"x-amz-grant-full-control" type:"string"`
+
+ // Allows grantee to read the object data and its metadata.
+ GrantRead *string `location:"header" locationName:"x-amz-grant-read" type:"string"`
+
+ // Allows grantee to read the object ACL.
+ GrantReadACP *string `location:"header" locationName:"x-amz-grant-read-acp" type:"string"`
+
+ // Allows grantee to write the ACL for the applicable object.
+ GrantWriteACP *string `location:"header" locationName:"x-amz-grant-write-acp" type:"string"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // A map of metadata to store with the object in S3.
+ Metadata map[string]*string `location:"headers" locationName:"x-amz-meta-" type:"map"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // Specifies the AWS KMS key ID to use for object encryption. All GET and PUT
+ // requests for an object protected by AWS KMS will fail if not made via SSL
+ // or using SigV4. Documentation on configuring any of the officially supported
+ // AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // The type of storage to use for the object. Defaults to 'STANDARD'.
+ StorageClass *string `location:"header" locationName:"x-amz-storage-class" type:"string"`
+
+ // If the bucket is configured as a website, redirects requests for this object
+ // to another object in the same bucket or to an external URL. Amazon S3 stores
+ // the value of this header in the object metadata.
+ WebsiteRedirectLocation *string `location:"header" locationName:"x-amz-website-redirect-location" type:"string"`
+
+ metadataCreateMultipartUploadInput `json:"-" xml:"-"`
+}
+
+type metadataCreateMultipartUploadInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type CreateMultipartUploadOutput struct {
+ // Name of the bucket to which the multipart upload was initiated.
+ Bucket *string `locationName:"Bucket" type:"string"`
+
+ // Object key for which the multipart upload was initiated.
+ Key *string `type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header confirming the encryption algorithm
+ // used.
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header to provide round trip message integrity
+ // verification of the customer-provided encryption key.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // ID for the initiated multipart upload.
+ UploadID *string `locationName:"UploadId" type:"string"`
+
+ metadataCreateMultipartUploadOutput `json:"-" xml:"-"`
+}
+
+type metadataCreateMultipartUploadOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Delete struct {
+ Objects []*ObjectIdentifier `locationName:"Object" type:"list" flattened:"true" required:"true"`
+
+ // Element to enable quiet mode for the request. When you add this element,
+ // you must set its value to true.
+ Quiet *bool `type:"boolean"`
+
+ metadataDelete `json:"-" xml:"-"`
+}
+
+type metadataDelete struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketCORSInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataDeleteBucketCORSInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketCORSInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketCORSOutput struct {
+ metadataDeleteBucketCORSOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketCORSOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataDeleteBucketInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketLifecycleInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataDeleteBucketLifecycleInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketLifecycleInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketLifecycleOutput struct {
+ metadataDeleteBucketLifecycleOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketLifecycleOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketOutput struct {
+ metadataDeleteBucketOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketPolicyInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataDeleteBucketPolicyInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketPolicyInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketPolicyOutput struct {
+ metadataDeleteBucketPolicyOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketPolicyOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketReplicationInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataDeleteBucketReplicationInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketReplicationInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketReplicationOutput struct {
+ metadataDeleteBucketReplicationOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketReplicationOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketTaggingInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataDeleteBucketTaggingInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketTaggingInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketTaggingOutput struct {
+ metadataDeleteBucketTaggingOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketTaggingOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketWebsiteInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataDeleteBucketWebsiteInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketWebsiteInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteBucketWebsiteOutput struct {
+ metadataDeleteBucketWebsiteOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteBucketWebsiteOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteMarkerEntry struct {
+ // Specifies whether the object is (true) or is not (false) the latest version
+ // of an object.
+ IsLatest *bool `type:"boolean"`
+
+ // The object key.
+ Key *string `type:"string"`
+
+ // Date and time the object was last modified.
+ LastModified *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ Owner *Owner `type:"structure"`
+
+ // Version ID of an object.
+ VersionID *string `locationName:"VersionId" type:"string"`
+
+ metadataDeleteMarkerEntry `json:"-" xml:"-"`
+}
+
+type metadataDeleteMarkerEntry struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteObjectInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // The concatenation of the authentication device's serial number, a space,
+ // and the value that is displayed on your authentication device.
+ MFA *string `location:"header" locationName:"x-amz-mfa" type:"string"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // VersionId used to reference a specific version of the object.
+ VersionID *string `location:"querystring" locationName:"versionId" type:"string"`
+
+ metadataDeleteObjectInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteObjectInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteObjectOutput struct {
+ // Specifies whether the versioned object that was permanently deleted was (true)
+ // or was not (false) a delete marker.
+ DeleteMarker *bool `location:"header" locationName:"x-amz-delete-marker" type:"boolean"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // Returns the version ID of the delete marker created as a result of the DELETE
+ // operation.
+ VersionID *string `location:"header" locationName:"x-amz-version-id" type:"string"`
+
+ metadataDeleteObjectOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteObjectOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeleteObjectsInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Delete *Delete `locationName:"Delete" type:"structure" required:"true"`
+
+ // The concatenation of the authentication device's serial number, a space,
+ // and the value that is displayed on your authentication device.
+ MFA *string `location:"header" locationName:"x-amz-mfa" type:"string"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ metadataDeleteObjectsInput `json:"-" xml:"-"`
+}
+
+type metadataDeleteObjectsInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Delete"`
+}
+
+type DeleteObjectsOutput struct {
+ Deleted []*DeletedObject `type:"list" flattened:"true"`
+
+ Errors []*Error `locationName:"Error" type:"list" flattened:"true"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ metadataDeleteObjectsOutput `json:"-" xml:"-"`
+}
+
+type metadataDeleteObjectsOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type DeletedObject struct {
+ DeleteMarker *bool `type:"boolean"`
+
+ DeleteMarkerVersionID *string `locationName:"DeleteMarkerVersionId" type:"string"`
+
+ Key *string `type:"string"`
+
+ VersionID *string `locationName:"VersionId" type:"string"`
+
+ metadataDeletedObject `json:"-" xml:"-"`
+}
+
+type metadataDeletedObject struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Destination struct {
+ // Amazon resource name (ARN) of the bucket where you want Amazon S3 to store
+ // replicas of the object identified by the rule.
+ Bucket *string `type:"string" required:"true"`
+
+ metadataDestination `json:"-" xml:"-"`
+}
+
+type metadataDestination struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Error struct {
+ Code *string `type:"string"`
+
+ Key *string `type:"string"`
+
+ Message *string `type:"string"`
+
+ VersionID *string `locationName:"VersionId" type:"string"`
+
+ metadataError `json:"-" xml:"-"`
+}
+
+type metadataError struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ErrorDocument struct {
+ // The object key name to use when a 4XX class error occurs.
+ Key *string `type:"string" required:"true"`
+
+ metadataErrorDocument `json:"-" xml:"-"`
+}
+
+type metadataErrorDocument struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketACLInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketACLInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketACLInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketACLOutput struct {
+ // A list of grants.
+ Grants []*Grant `locationName:"AccessControlList" locationNameList:"Grant" type:"list"`
+
+ Owner *Owner `type:"structure"`
+
+ metadataGetBucketACLOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketACLOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketCORSInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketCORSInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketCORSInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketCORSOutput struct {
+ CORSRules []*CORSRule `locationName:"CORSRule" type:"list" flattened:"true"`
+
+ metadataGetBucketCORSOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketCORSOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketLifecycleInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketLifecycleInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketLifecycleInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketLifecycleOutput struct {
+ Rules []*LifecycleRule `locationName:"Rule" type:"list" flattened:"true"`
+
+ metadataGetBucketLifecycleOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketLifecycleOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketLocationInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketLocationInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketLocationInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketLocationOutput struct {
+ LocationConstraint *string `type:"string"`
+
+ metadataGetBucketLocationOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketLocationOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketLoggingInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketLoggingInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketLoggingInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketLoggingOutput struct {
+ LoggingEnabled *LoggingEnabled `type:"structure"`
+
+ metadataGetBucketLoggingOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketLoggingOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketNotificationConfigurationRequest struct {
+ // Name of the buket to get the notification configuration for.
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketNotificationConfigurationRequest `json:"-" xml:"-"`
+}
+
+type metadataGetBucketNotificationConfigurationRequest struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketPolicyInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketPolicyInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketPolicyInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketPolicyOutput struct {
+ // The bucket policy as a JSON document.
+ Policy *string `type:"string"`
+
+ metadataGetBucketPolicyOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketPolicyOutput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Policy"`
+}
+
+type GetBucketReplicationInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketReplicationInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketReplicationInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketReplicationOutput struct {
+ // Container for replication rules. You can add as many as 1,000 rules. Total
+ // replication configuration size can be up to 2 MB.
+ ReplicationConfiguration *ReplicationConfiguration `type:"structure"`
+
+ metadataGetBucketReplicationOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketReplicationOutput struct {
+ SDKShapeTraits bool `type:"structure" payload:"ReplicationConfiguration"`
+}
+
+type GetBucketRequestPaymentInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketRequestPaymentInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketRequestPaymentInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketRequestPaymentOutput struct {
+ // Specifies who pays for the download and request fees.
+ Payer *string `type:"string"`
+
+ metadataGetBucketRequestPaymentOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketRequestPaymentOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketTaggingInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketTaggingInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketTaggingInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketTaggingOutput struct {
+ TagSet []*Tag `locationNameList:"Tag" type:"list" required:"true"`
+
+ metadataGetBucketTaggingOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketTaggingOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketVersioningInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketVersioningInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketVersioningInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketVersioningOutput struct {
+ // Specifies whether MFA delete is enabled in the bucket versioning configuration.
+ // This element is only returned if the bucket has been configured with MFA
+ // delete. If the bucket has never been so configured, this element is not returned.
+ MFADelete *string `locationName:"MfaDelete" type:"string"`
+
+ // The versioning state of the bucket.
+ Status *string `type:"string"`
+
+ metadataGetBucketVersioningOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketVersioningOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketWebsiteInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataGetBucketWebsiteInput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketWebsiteInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetBucketWebsiteOutput struct {
+ ErrorDocument *ErrorDocument `type:"structure"`
+
+ IndexDocument *IndexDocument `type:"structure"`
+
+ RedirectAllRequestsTo *RedirectAllRequestsTo `type:"structure"`
+
+ RoutingRules []*RoutingRule `locationNameList:"RoutingRule" type:"list"`
+
+ metadataGetBucketWebsiteOutput `json:"-" xml:"-"`
+}
+
+type metadataGetBucketWebsiteOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetObjectACLInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // VersionId used to reference a specific version of the object.
+ VersionID *string `location:"querystring" locationName:"versionId" type:"string"`
+
+ metadataGetObjectACLInput `json:"-" xml:"-"`
+}
+
+type metadataGetObjectACLInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetObjectACLOutput struct {
+ // A list of grants.
+ Grants []*Grant `locationName:"AccessControlList" locationNameList:"Grant" type:"list"`
+
+ Owner *Owner `type:"structure"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ metadataGetObjectACLOutput `json:"-" xml:"-"`
+}
+
+type metadataGetObjectACLOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetObjectInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Return the object only if its entity tag (ETag) is the same as the one specified,
+ // otherwise return a 412 (precondition failed).
+ IfMatch *string `location:"header" locationName:"If-Match" type:"string"`
+
+ // Return the object only if it has been modified since the specified time,
+ // otherwise return a 304 (not modified).
+ IfModifiedSince *time.Time `location:"header" locationName:"If-Modified-Since" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Return the object only if its entity tag (ETag) is different from the one
+ // specified, otherwise return a 304 (not modified).
+ IfNoneMatch *string `location:"header" locationName:"If-None-Match" type:"string"`
+
+ // Return the object only if it has not been modified since the specified time,
+ // otherwise return a 412 (precondition failed).
+ IfUnmodifiedSince *time.Time `location:"header" locationName:"If-Unmodified-Since" type:"timestamp" timestampFormat:"rfc822"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Downloads the specified range bytes of an object. For more information about
+ // the HTTP Range header, go to http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.
+ Range *string `location:"header" locationName:"Range" type:"string"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Sets the Cache-Control header of the response.
+ ResponseCacheControl *string `location:"querystring" locationName:"response-cache-control" type:"string"`
+
+ // Sets the Content-Disposition header of the response
+ ResponseContentDisposition *string `location:"querystring" locationName:"response-content-disposition" type:"string"`
+
+ // Sets the Content-Encoding header of the response.
+ ResponseContentEncoding *string `location:"querystring" locationName:"response-content-encoding" type:"string"`
+
+ // Sets the Content-Language header of the response.
+ ResponseContentLanguage *string `location:"querystring" locationName:"response-content-language" type:"string"`
+
+ // Sets the Content-Type header of the response.
+ ResponseContentType *string `location:"querystring" locationName:"response-content-type" type:"string"`
+
+ // Sets the Expires header of the response.
+ ResponseExpires *time.Time `location:"querystring" locationName:"response-expires" type:"timestamp" timestampFormat:"iso8601"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // VersionId used to reference a specific version of the object.
+ VersionID *string `location:"querystring" locationName:"versionId" type:"string"`
+
+ metadataGetObjectInput `json:"-" xml:"-"`
+}
+
+type metadataGetObjectInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetObjectOutput struct {
+ AcceptRanges *string `location:"header" locationName:"accept-ranges" type:"string"`
+
+ // Object data.
+ Body io.ReadCloser `type:"blob"`
+
+ // Specifies caching behavior along the request/reply chain.
+ CacheControl *string `location:"header" locationName:"Cache-Control" type:"string"`
+
+ // Specifies presentational information for the object.
+ ContentDisposition *string `location:"header" locationName:"Content-Disposition" type:"string"`
+
+ // Specifies what content encodings have been applied to the object and thus
+ // what decoding mechanisms must be applied to obtain the media-type referenced
+ // by the Content-Type header field.
+ ContentEncoding *string `location:"header" locationName:"Content-Encoding" type:"string"`
+
+ // The language the content is in.
+ ContentLanguage *string `location:"header" locationName:"Content-Language" type:"string"`
+
+ // Size of the body in bytes.
+ ContentLength *int64 `location:"header" locationName:"Content-Length" type:"integer"`
+
+ // The portion of the object returned in the response.
+ ContentRange *string `location:"header" locationName:"Content-Range" type:"string"`
+
+ // A standard MIME type describing the format of the object data.
+ ContentType *string `location:"header" locationName:"Content-Type" type:"string"`
+
+ // Specifies whether the object retrieved was (true) or was not (false) a Delete
+ // Marker. If false, this response header does not appear in the response.
+ DeleteMarker *bool `location:"header" locationName:"x-amz-delete-marker" type:"boolean"`
+
+ // An ETag is an opaque identifier assigned by a web server to a specific version
+ // of a resource found at a URL
+ ETag *string `location:"header" locationName:"ETag" type:"string"`
+
+ // If the object expiration is configured (see PUT Bucket lifecycle), the response
+ // includes this header. It includes the expiry-date and rule-id key value pairs
+ // providing object expiration information. The value of the rule-id is URL
+ // encoded.
+ Expiration *string `location:"header" locationName:"x-amz-expiration" type:"string"`
+
+ // The date and time at which the object is no longer cacheable.
+ Expires *time.Time `location:"header" locationName:"Expires" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Last modified date of the object
+ LastModified *time.Time `location:"header" locationName:"Last-Modified" type:"timestamp" timestampFormat:"rfc822"`
+
+ // A map of metadata to store with the object in S3.
+ Metadata map[string]*string `location:"headers" locationName:"x-amz-meta-" type:"map"`
+
+ // This is set to the number of metadata entries not returned in x-amz-meta
+ // headers. This can happen if you create metadata using an API like SOAP that
+ // supports more flexible metadata than the REST API. For example, using SOAP,
+ // you can create metadata whose values are not legal HTTP headers.
+ MissingMeta *int64 `location:"header" locationName:"x-amz-missing-meta" type:"integer"`
+
+ ReplicationStatus *string `location:"header" locationName:"x-amz-replication-status" type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // Provides information about object restoration operation and expiration time
+ // of the restored object copy.
+ Restore *string `location:"header" locationName:"x-amz-restore" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header confirming the encryption algorithm
+ // used.
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header to provide round trip message integrity
+ // verification of the customer-provided encryption key.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // Version of the object.
+ VersionID *string `location:"header" locationName:"x-amz-version-id" type:"string"`
+
+ // If the bucket is configured as a website, redirects requests for this object
+ // to another object in the same bucket or to an external URL. Amazon S3 stores
+ // the value of this header in the object metadata.
+ WebsiteRedirectLocation *string `location:"header" locationName:"x-amz-website-redirect-location" type:"string"`
+
+ metadataGetObjectOutput `json:"-" xml:"-"`
+}
+
+type metadataGetObjectOutput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Body"`
+}
+
+type GetObjectTorrentInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ metadataGetObjectTorrentInput `json:"-" xml:"-"`
+}
+
+type metadataGetObjectTorrentInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type GetObjectTorrentOutput struct {
+ Body io.ReadCloser `type:"blob"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ metadataGetObjectTorrentOutput `json:"-" xml:"-"`
+}
+
+type metadataGetObjectTorrentOutput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Body"`
+}
+
+type Grant struct {
+ Grantee *Grantee `type:"structure"`
+
+ // Specifies the permission given to the grantee.
+ Permission *string `type:"string"`
+
+ metadataGrant `json:"-" xml:"-"`
+}
+
+type metadataGrant struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Grantee struct {
+ // Screen name of the grantee.
+ DisplayName *string `type:"string"`
+
+ // Email address of the grantee.
+ EmailAddress *string `type:"string"`
+
+ // The canonical user ID of the grantee.
+ ID *string `type:"string"`
+
+ // Type of grantee
+ Type *string `locationName:"xsi:type" type:"string" xmlAttribute:"true" required:"true"`
+
+ // URI of the grantee group.
+ URI *string `type:"string"`
+
+ metadataGrantee `json:"-" xml:"-"`
+}
+
+type metadataGrantee struct {
+ SDKShapeTraits bool `type:"structure" xmlPrefix:"xsi" xmlURI:"http://www.w3.org/2001/XMLSchema-instance"`
+}
+
+type HeadBucketInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ metadataHeadBucketInput `json:"-" xml:"-"`
+}
+
+type metadataHeadBucketInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type HeadBucketOutput struct {
+ metadataHeadBucketOutput `json:"-" xml:"-"`
+}
+
+type metadataHeadBucketOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type HeadObjectInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Return the object only if its entity tag (ETag) is the same as the one specified,
+ // otherwise return a 412 (precondition failed).
+ IfMatch *string `location:"header" locationName:"If-Match" type:"string"`
+
+ // Return the object only if it has been modified since the specified time,
+ // otherwise return a 304 (not modified).
+ IfModifiedSince *time.Time `location:"header" locationName:"If-Modified-Since" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Return the object only if its entity tag (ETag) is different from the one
+ // specified, otherwise return a 304 (not modified).
+ IfNoneMatch *string `location:"header" locationName:"If-None-Match" type:"string"`
+
+ // Return the object only if it has not been modified since the specified time,
+ // otherwise return a 412 (precondition failed).
+ IfUnmodifiedSince *time.Time `location:"header" locationName:"If-Unmodified-Since" type:"timestamp" timestampFormat:"rfc822"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Downloads the specified range bytes of an object. For more information about
+ // the HTTP Range header, go to http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.
+ Range *string `location:"header" locationName:"Range" type:"string"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // VersionId used to reference a specific version of the object.
+ VersionID *string `location:"querystring" locationName:"versionId" type:"string"`
+
+ metadataHeadObjectInput `json:"-" xml:"-"`
+}
+
+type metadataHeadObjectInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type HeadObjectOutput struct {
+ AcceptRanges *string `location:"header" locationName:"accept-ranges" type:"string"`
+
+ // Specifies caching behavior along the request/reply chain.
+ CacheControl *string `location:"header" locationName:"Cache-Control" type:"string"`
+
+ // Specifies presentational information for the object.
+ ContentDisposition *string `location:"header" locationName:"Content-Disposition" type:"string"`
+
+ // Specifies what content encodings have been applied to the object and thus
+ // what decoding mechanisms must be applied to obtain the media-type referenced
+ // by the Content-Type header field.
+ ContentEncoding *string `location:"header" locationName:"Content-Encoding" type:"string"`
+
+ // The language the content is in.
+ ContentLanguage *string `location:"header" locationName:"Content-Language" type:"string"`
+
+ // Size of the body in bytes.
+ ContentLength *int64 `location:"header" locationName:"Content-Length" type:"integer"`
+
+ // A standard MIME type describing the format of the object data.
+ ContentType *string `location:"header" locationName:"Content-Type" type:"string"`
+
+ // Specifies whether the object retrieved was (true) or was not (false) a Delete
+ // Marker. If false, this response header does not appear in the response.
+ DeleteMarker *bool `location:"header" locationName:"x-amz-delete-marker" type:"boolean"`
+
+ // An ETag is an opaque identifier assigned by a web server to a specific version
+ // of a resource found at a URL
+ ETag *string `location:"header" locationName:"ETag" type:"string"`
+
+ // If the object expiration is configured (see PUT Bucket lifecycle), the response
+ // includes this header. It includes the expiry-date and rule-id key value pairs
+ // providing object expiration information. The value of the rule-id is URL
+ // encoded.
+ Expiration *string `location:"header" locationName:"x-amz-expiration" type:"string"`
+
+ // The date and time at which the object is no longer cacheable.
+ Expires *time.Time `location:"header" locationName:"Expires" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Last modified date of the object
+ LastModified *time.Time `location:"header" locationName:"Last-Modified" type:"timestamp" timestampFormat:"rfc822"`
+
+ // A map of metadata to store with the object in S3.
+ Metadata map[string]*string `location:"headers" locationName:"x-amz-meta-" type:"map"`
+
+ // This is set to the number of metadata entries not returned in x-amz-meta
+ // headers. This can happen if you create metadata using an API like SOAP that
+ // supports more flexible metadata than the REST API. For example, using SOAP,
+ // you can create metadata whose values are not legal HTTP headers.
+ MissingMeta *int64 `location:"header" locationName:"x-amz-missing-meta" type:"integer"`
+
+ ReplicationStatus *string `location:"header" locationName:"x-amz-replication-status" type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // Provides information about object restoration operation and expiration time
+ // of the restored object copy.
+ Restore *string `location:"header" locationName:"x-amz-restore" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header confirming the encryption algorithm
+ // used.
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header to provide round trip message integrity
+ // verification of the customer-provided encryption key.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // Version of the object.
+ VersionID *string `location:"header" locationName:"x-amz-version-id" type:"string"`
+
+ // If the bucket is configured as a website, redirects requests for this object
+ // to another object in the same bucket or to an external URL. Amazon S3 stores
+ // the value of this header in the object metadata.
+ WebsiteRedirectLocation *string `location:"header" locationName:"x-amz-website-redirect-location" type:"string"`
+
+ metadataHeadObjectOutput `json:"-" xml:"-"`
+}
+
+type metadataHeadObjectOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type IndexDocument struct {
+ // A suffix that is appended to a request that is for a directory on the website
+ // endpoint (e.g. if the suffix is index.html and you make a request to samplebucket/images/
+ // the data that is returned will be for the object with the key name images/index.html)
+ // The suffix must not be empty and must not include a slash character.
+ Suffix *string `type:"string" required:"true"`
+
+ metadataIndexDocument `json:"-" xml:"-"`
+}
+
+type metadataIndexDocument struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Initiator struct {
+ // Name of the Principal.
+ DisplayName *string `type:"string"`
+
+ // If the principal is an AWS account, it provides the Canonical User ID. If
+ // the principal is an IAM User, it provides a user ARN value.
+ ID *string `type:"string"`
+
+ metadataInitiator `json:"-" xml:"-"`
+}
+
+type metadataInitiator struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// Container for specifying the AWS Lambda notification configuration.
+type LambdaFunctionConfiguration struct {
+ Events []*string `locationName:"Event" type:"list" flattened:"true" required:"true"`
+
+ // Optional unique identifier for configurations in a notification configuration.
+ // If you don't provide one, Amazon S3 will assign an ID.
+ ID *string `locationName:"Id" type:"string"`
+
+ // Lambda cloud function ARN that Amazon S3 can invoke when it detects events
+ // of the specified type.
+ LambdaFunctionARN *string `locationName:"CloudFunction" type:"string" required:"true"`
+
+ metadataLambdaFunctionConfiguration `json:"-" xml:"-"`
+}
+
+type metadataLambdaFunctionConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type LifecycleConfiguration struct {
+ Rules []*LifecycleRule `locationName:"Rule" type:"list" flattened:"true" required:"true"`
+
+ metadataLifecycleConfiguration `json:"-" xml:"-"`
+}
+
+type metadataLifecycleConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type LifecycleExpiration struct {
+ // Indicates at what date the object is to be moved or deleted. Should be in
+ // GMT ISO 8601 Format.
+ Date *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ // Indicates the lifetime, in days, of the objects that are subject to the rule.
+ // The value must be a non-zero positive integer.
+ Days *int64 `type:"integer"`
+
+ metadataLifecycleExpiration `json:"-" xml:"-"`
+}
+
+type metadataLifecycleExpiration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type LifecycleRule struct {
+ Expiration *LifecycleExpiration `type:"structure"`
+
+ // Unique identifier for the rule. The value cannot be longer than 255 characters.
+ ID *string `type:"string"`
+
+ // Specifies when noncurrent object versions expire. Upon expiration, Amazon
+ // S3 permanently deletes the noncurrent object versions. You set this lifecycle
+ // configuration action on a bucket that has versioning enabled (or suspended)
+ // to request that Amazon S3 delete noncurrent object versions at a specific
+ // period in the object's lifetime.
+ NoncurrentVersionExpiration *NoncurrentVersionExpiration `type:"structure"`
+
+ // Container for the transition rule that describes when noncurrent objects
+ // transition to the GLACIER storage class. If your bucket is versioning-enabled
+ // (or versioning is suspended), you can set this action to request that Amazon
+ // S3 transition noncurrent object versions to the GLACIER storage class at
+ // a specific period in the object's lifetime.
+ NoncurrentVersionTransition *NoncurrentVersionTransition `type:"structure"`
+
+ // Prefix identifying one or more objects to which the rule applies.
+ Prefix *string `type:"string" required:"true"`
+
+ // If 'Enabled', the rule is currently being applied. If 'Disabled', the rule
+ // is not currently being applied.
+ Status *string `type:"string" required:"true"`
+
+ Transition *Transition `type:"structure"`
+
+ metadataLifecycleRule `json:"-" xml:"-"`
+}
+
+type metadataLifecycleRule struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListBucketsInput struct {
+ metadataListBucketsInput `json:"-" xml:"-"`
+}
+
+type metadataListBucketsInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListBucketsOutput struct {
+ Buckets []*Bucket `locationNameList:"Bucket" type:"list"`
+
+ Owner *Owner `type:"structure"`
+
+ metadataListBucketsOutput `json:"-" xml:"-"`
+}
+
+type metadataListBucketsOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListMultipartUploadsInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Character you use to group keys.
+ Delimiter *string `location:"querystring" locationName:"delimiter" type:"string"`
+
+ // Requests Amazon S3 to encode the object keys in the response and specifies
+ // the encoding method to use. An object key may contain any Unicode character;
+ // however, XML 1.0 parser cannot parse some characters, such as characters
+ // with an ASCII value from 0 to 10. For characters that are not supported in
+ // XML 1.0, you can add this parameter to request that Amazon S3 encode the
+ // keys in the response.
+ EncodingType *string `location:"querystring" locationName:"encoding-type" type:"string"`
+
+ // Together with upload-id-marker, this parameter specifies the multipart upload
+ // after which listing should begin.
+ KeyMarker *string `location:"querystring" locationName:"key-marker" type:"string"`
+
+ // Sets the maximum number of multipart uploads, from 1 to 1,000, to return
+ // in the response body. 1,000 is the maximum number of uploads that can be
+ // returned in a response.
+ MaxUploads *int64 `location:"querystring" locationName:"max-uploads" type:"integer"`
+
+ // Lists in-progress uploads only for those keys that begin with the specified
+ // prefix.
+ Prefix *string `location:"querystring" locationName:"prefix" type:"string"`
+
+ // Together with key-marker, specifies the multipart upload after which listing
+ // should begin. If key-marker is not specified, the upload-id-marker parameter
+ // is ignored.
+ UploadIDMarker *string `location:"querystring" locationName:"upload-id-marker" type:"string"`
+
+ metadataListMultipartUploadsInput `json:"-" xml:"-"`
+}
+
+type metadataListMultipartUploadsInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListMultipartUploadsOutput struct {
+ // Name of the bucket to which the multipart upload was initiated.
+ Bucket *string `type:"string"`
+
+ CommonPrefixes []*CommonPrefix `type:"list" flattened:"true"`
+
+ Delimiter *string `type:"string"`
+
+ // Encoding type used by Amazon S3 to encode object keys in the response.
+ EncodingType *string `type:"string"`
+
+ // Indicates whether the returned list of multipart uploads is truncated. A
+ // value of true indicates that the list was truncated. The list can be truncated
+ // if the number of multipart uploads exceeds the limit allowed or specified
+ // by max uploads.
+ IsTruncated *bool `type:"boolean"`
+
+ // The key at or after which the listing began.
+ KeyMarker *string `type:"string"`
+
+ // Maximum number of multipart uploads that could have been included in the
+ // response.
+ MaxUploads *int64 `type:"integer"`
+
+ // When a list is truncated, this element specifies the value that should be
+ // used for the key-marker request parameter in a subsequent request.
+ NextKeyMarker *string `type:"string"`
+
+ // When a list is truncated, this element specifies the value that should be
+ // used for the upload-id-marker request parameter in a subsequent request.
+ NextUploadIDMarker *string `locationName:"NextUploadIdMarker" type:"string"`
+
+ // When a prefix is provided in the request, this field contains the specified
+ // prefix. The result contains only keys starting with the specified prefix.
+ Prefix *string `type:"string"`
+
+ // Upload ID after which listing began.
+ UploadIDMarker *string `locationName:"UploadIdMarker" type:"string"`
+
+ Uploads []*MultipartUpload `locationName:"Upload" type:"list" flattened:"true"`
+
+ metadataListMultipartUploadsOutput `json:"-" xml:"-"`
+}
+
+type metadataListMultipartUploadsOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListObjectVersionsInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // A delimiter is a character you use to group keys.
+ Delimiter *string `location:"querystring" locationName:"delimiter" type:"string"`
+
+ // Requests Amazon S3 to encode the object keys in the response and specifies
+ // the encoding method to use. An object key may contain any Unicode character;
+ // however, XML 1.0 parser cannot parse some characters, such as characters
+ // with an ASCII value from 0 to 10. For characters that are not supported in
+ // XML 1.0, you can add this parameter to request that Amazon S3 encode the
+ // keys in the response.
+ EncodingType *string `location:"querystring" locationName:"encoding-type" type:"string"`
+
+ // Specifies the key to start with when listing objects in a bucket.
+ KeyMarker *string `location:"querystring" locationName:"key-marker" type:"string"`
+
+ // Sets the maximum number of keys returned in the response. The response might
+ // contain fewer keys but will never contain more.
+ MaxKeys *int64 `location:"querystring" locationName:"max-keys" type:"integer"`
+
+ // Limits the response to keys that begin with the specified prefix.
+ Prefix *string `location:"querystring" locationName:"prefix" type:"string"`
+
+ // Specifies the object version you want to start listing from.
+ VersionIDMarker *string `location:"querystring" locationName:"version-id-marker" type:"string"`
+
+ metadataListObjectVersionsInput `json:"-" xml:"-"`
+}
+
+type metadataListObjectVersionsInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListObjectVersionsOutput struct {
+ CommonPrefixes []*CommonPrefix `type:"list" flattened:"true"`
+
+ DeleteMarkers []*DeleteMarkerEntry `locationName:"DeleteMarker" type:"list" flattened:"true"`
+
+ Delimiter *string `type:"string"`
+
+ // Encoding type used by Amazon S3 to encode object keys in the response.
+ EncodingType *string `type:"string"`
+
+ // A flag that indicates whether or not Amazon S3 returned all of the results
+ // that satisfied the search criteria. If your results were truncated, you can
+ // make a follow-up paginated request using the NextKeyMarker and NextVersionIdMarker
+ // response parameters as a starting place in another request to return the
+ // rest of the results.
+ IsTruncated *bool `type:"boolean"`
+
+ // Marks the last Key returned in a truncated response.
+ KeyMarker *string `type:"string"`
+
+ MaxKeys *int64 `type:"integer"`
+
+ Name *string `type:"string"`
+
+ // Use this value for the key marker request parameter in a subsequent request.
+ NextKeyMarker *string `type:"string"`
+
+ // Use this value for the next version id marker parameter in a subsequent request.
+ NextVersionIDMarker *string `locationName:"NextVersionIdMarker" type:"string"`
+
+ Prefix *string `type:"string"`
+
+ VersionIDMarker *string `locationName:"VersionIdMarker" type:"string"`
+
+ Versions []*ObjectVersion `locationName:"Version" type:"list" flattened:"true"`
+
+ metadataListObjectVersionsOutput `json:"-" xml:"-"`
+}
+
+type metadataListObjectVersionsOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListObjectsInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // A delimiter is a character you use to group keys.
+ Delimiter *string `location:"querystring" locationName:"delimiter" type:"string"`
+
+ // Requests Amazon S3 to encode the object keys in the response and specifies
+ // the encoding method to use. An object key may contain any Unicode character;
+ // however, XML 1.0 parser cannot parse some characters, such as characters
+ // with an ASCII value from 0 to 10. For characters that are not supported in
+ // XML 1.0, you can add this parameter to request that Amazon S3 encode the
+ // keys in the response.
+ EncodingType *string `location:"querystring" locationName:"encoding-type" type:"string"`
+
+ // Specifies the key to start with when listing objects in a bucket.
+ Marker *string `location:"querystring" locationName:"marker" type:"string"`
+
+ // Sets the maximum number of keys returned in the response. The response might
+ // contain fewer keys but will never contain more.
+ MaxKeys *int64 `location:"querystring" locationName:"max-keys" type:"integer"`
+
+ // Limits the response to keys that begin with the specified prefix.
+ Prefix *string `location:"querystring" locationName:"prefix" type:"string"`
+
+ metadataListObjectsInput `json:"-" xml:"-"`
+}
+
+type metadataListObjectsInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListObjectsOutput struct {
+ CommonPrefixes []*CommonPrefix `type:"list" flattened:"true"`
+
+ Contents []*Object `type:"list" flattened:"true"`
+
+ Delimiter *string `type:"string"`
+
+ // Encoding type used by Amazon S3 to encode object keys in the response.
+ EncodingType *string `type:"string"`
+
+ // A flag that indicates whether or not Amazon S3 returned all of the results
+ // that satisfied the search criteria.
+ IsTruncated *bool `type:"boolean"`
+
+ Marker *string `type:"string"`
+
+ MaxKeys *int64 `type:"integer"`
+
+ Name *string `type:"string"`
+
+ // When response is truncated (the IsTruncated element value in the response
+ // is true), you can use the key name in this field as marker in the subsequent
+ // request to get next set of objects. Amazon S3 lists objects in alphabetical
+ // order Note: This element is returned only if you have delimiter request parameter
+ // specified. If response does not include the NextMaker and it is truncated,
+ // you can use the value of the last Key in the response as the marker in the
+ // subsequent request to get the next set of object keys.
+ NextMarker *string `type:"string"`
+
+ Prefix *string `type:"string"`
+
+ metadataListObjectsOutput `json:"-" xml:"-"`
+}
+
+type metadataListObjectsOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListPartsInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Sets the maximum number of parts to return.
+ MaxParts *int64 `location:"querystring" locationName:"max-parts" type:"integer"`
+
+ // Specifies the part after which listing should begin. Only parts with higher
+ // part numbers will be listed.
+ PartNumberMarker *int64 `location:"querystring" locationName:"part-number-marker" type:"integer"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Upload ID identifying the multipart upload whose parts are being listed.
+ UploadID *string `location:"querystring" locationName:"uploadId" type:"string" required:"true"`
+
+ metadataListPartsInput `json:"-" xml:"-"`
+}
+
+type metadataListPartsInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ListPartsOutput struct {
+ // Name of the bucket to which the multipart upload was initiated.
+ Bucket *string `type:"string"`
+
+ // Identifies who initiated the multipart upload.
+ Initiator *Initiator `type:"structure"`
+
+ // Indicates whether the returned list of parts is truncated.
+ IsTruncated *bool `type:"boolean"`
+
+ // Object key for which the multipart upload was initiated.
+ Key *string `type:"string"`
+
+ // Maximum number of parts that were allowed in the response.
+ MaxParts *int64 `type:"integer"`
+
+ // When a list is truncated, this element specifies the last part in the list,
+ // as well as the value to use for the part-number-marker request parameter
+ // in a subsequent request.
+ NextPartNumberMarker *int64 `type:"integer"`
+
+ Owner *Owner `type:"structure"`
+
+ // Part number after which listing begins.
+ PartNumberMarker *int64 `type:"integer"`
+
+ Parts []*Part `locationName:"Part" type:"list" flattened:"true"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // The class of storage used to store the object.
+ StorageClass *string `type:"string"`
+
+ // Upload ID identifying the multipart upload whose parts are being listed.
+ UploadID *string `locationName:"UploadId" type:"string"`
+
+ metadataListPartsOutput `json:"-" xml:"-"`
+}
+
+type metadataListPartsOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type LoggingEnabled struct {
+ // Specifies the bucket where you want Amazon S3 to store server access logs.
+ // You can have your logs delivered to any bucket that you own, including the
+ // same bucket that is being logged. You can also configure multiple buckets
+ // to deliver their logs to the same target bucket. In this case you should
+ // choose a different TargetPrefix for each source bucket so that the delivered
+ // log files can be distinguished by key.
+ TargetBucket *string `type:"string"`
+
+ TargetGrants []*TargetGrant `locationNameList:"Grant" type:"list"`
+
+ // This element lets you specify a prefix for the keys that the log files will
+ // be stored under.
+ TargetPrefix *string `type:"string"`
+
+ metadataLoggingEnabled `json:"-" xml:"-"`
+}
+
+type metadataLoggingEnabled struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type MultipartUpload struct {
+ // Date and time at which the multipart upload was initiated.
+ Initiated *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ // Identifies who initiated the multipart upload.
+ Initiator *Initiator `type:"structure"`
+
+ // Key of the object for which the multipart upload was initiated.
+ Key *string `type:"string"`
+
+ Owner *Owner `type:"structure"`
+
+ // The class of storage used to store the object.
+ StorageClass *string `type:"string"`
+
+ // Upload ID that identifies the multipart upload.
+ UploadID *string `locationName:"UploadId" type:"string"`
+
+ metadataMultipartUpload `json:"-" xml:"-"`
+}
+
+type metadataMultipartUpload struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// Specifies when noncurrent object versions expire. Upon expiration, Amazon
+// S3 permanently deletes the noncurrent object versions. You set this lifecycle
+// configuration action on a bucket that has versioning enabled (or suspended)
+// to request that Amazon S3 delete noncurrent object versions at a specific
+// period in the object's lifetime.
+type NoncurrentVersionExpiration struct {
+ // Specifies the number of days an object is noncurrent before Amazon S3 can
+ // perform the associated action. For information about the noncurrent days
+ // calculations, see How Amazon S3 Calculates When an Object Became Noncurrent
+ // (/AmazonS3/latest/dev/s3-access-control.html) in the Amazon Simple Storage
+ // Service Developer Guide.
+ NoncurrentDays *int64 `type:"integer"`
+
+ metadataNoncurrentVersionExpiration `json:"-" xml:"-"`
+}
+
+type metadataNoncurrentVersionExpiration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// Container for the transition rule that describes when noncurrent objects
+// transition to the GLACIER storage class. If your bucket is versioning-enabled
+// (or versioning is suspended), you can set this action to request that Amazon
+// S3 transition noncurrent object versions to the GLACIER storage class at
+// a specific period in the object's lifetime.
+type NoncurrentVersionTransition struct {
+ // Specifies the number of days an object is noncurrent before Amazon S3 can
+ // perform the associated action. For information about the noncurrent days
+ // calculations, see How Amazon S3 Calculates When an Object Became Noncurrent
+ // (/AmazonS3/latest/dev/s3-access-control.html) in the Amazon Simple Storage
+ // Service Developer Guide.
+ NoncurrentDays *int64 `type:"integer"`
+
+ // The class of storage used to store the object.
+ StorageClass *string `type:"string"`
+
+ metadataNoncurrentVersionTransition `json:"-" xml:"-"`
+}
+
+type metadataNoncurrentVersionTransition struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// Container for specifying the notification configuration of the bucket. If
+// this element is empty, notifications are turned off on the bucket.
+type NotificationConfiguration struct {
+ LambdaFunctionConfigurations []*LambdaFunctionConfiguration `locationName:"CloudFunctionConfiguration" type:"list" flattened:"true"`
+
+ QueueConfigurations []*QueueConfiguration `locationName:"QueueConfiguration" type:"list" flattened:"true"`
+
+ TopicConfigurations []*TopicConfiguration `locationName:"TopicConfiguration" type:"list" flattened:"true"`
+
+ metadataNotificationConfiguration `json:"-" xml:"-"`
+}
+
+type metadataNotificationConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type NotificationConfigurationDeprecated struct {
+ CloudFunctionConfiguration *CloudFunctionConfiguration `type:"structure"`
+
+ QueueConfiguration *QueueConfigurationDeprecated `type:"structure"`
+
+ TopicConfiguration *TopicConfigurationDeprecated `type:"structure"`
+
+ metadataNotificationConfigurationDeprecated `json:"-" xml:"-"`
+}
+
+type metadataNotificationConfigurationDeprecated struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Object struct {
+ ETag *string `type:"string"`
+
+ Key *string `type:"string"`
+
+ LastModified *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ Owner *Owner `type:"structure"`
+
+ Size *int64 `type:"integer"`
+
+ // The class of storage used to store the object.
+ StorageClass *string `type:"string"`
+
+ metadataObject `json:"-" xml:"-"`
+}
+
+type metadataObject struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ObjectIdentifier struct {
+ // Key name of the object to delete.
+ Key *string `type:"string" required:"true"`
+
+ // VersionId for the specific version of the object to delete.
+ VersionID *string `locationName:"VersionId" type:"string"`
+
+ metadataObjectIdentifier `json:"-" xml:"-"`
+}
+
+type metadataObjectIdentifier struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ObjectVersion struct {
+ ETag *string `type:"string"`
+
+ // Specifies whether the object is (true) or is not (false) the latest version
+ // of an object.
+ IsLatest *bool `type:"boolean"`
+
+ // The object key.
+ Key *string `type:"string"`
+
+ // Date and time the object was last modified.
+ LastModified *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ Owner *Owner `type:"structure"`
+
+ // Size in bytes of the object.
+ Size *int64 `type:"integer"`
+
+ // The class of storage used to store the object.
+ StorageClass *string `type:"string"`
+
+ // Version ID of an object.
+ VersionID *string `locationName:"VersionId" type:"string"`
+
+ metadataObjectVersion `json:"-" xml:"-"`
+}
+
+type metadataObjectVersion struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Owner struct {
+ DisplayName *string `type:"string"`
+
+ ID *string `type:"string"`
+
+ metadataOwner `json:"-" xml:"-"`
+}
+
+type metadataOwner struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Part struct {
+ // Entity tag returned when the part was uploaded.
+ ETag *string `type:"string"`
+
+ // Date and time at which the part was uploaded.
+ LastModified *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ // Part number identifying the part.
+ PartNumber *int64 `type:"integer"`
+
+ // Size of the uploaded part data.
+ Size *int64 `type:"integer"`
+
+ metadataPart `json:"-" xml:"-"`
+}
+
+type metadataPart struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketACLInput struct {
+ // The canned ACL to apply to the bucket.
+ ACL *string `location:"header" locationName:"x-amz-acl" type:"string"`
+
+ AccessControlPolicy *AccessControlPolicy `locationName:"AccessControlPolicy" type:"structure"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Allows grantee the read, write, read ACP, and write ACP permissions on the
+ // bucket.
+ GrantFullControl *string `location:"header" locationName:"x-amz-grant-full-control" type:"string"`
+
+ // Allows grantee to list the objects in the bucket.
+ GrantRead *string `location:"header" locationName:"x-amz-grant-read" type:"string"`
+
+ // Allows grantee to read the bucket ACL.
+ GrantReadACP *string `location:"header" locationName:"x-amz-grant-read-acp" type:"string"`
+
+ // Allows grantee to create, overwrite, and delete any object in the bucket.
+ GrantWrite *string `location:"header" locationName:"x-amz-grant-write" type:"string"`
+
+ // Allows grantee to write the ACL for the applicable bucket.
+ GrantWriteACP *string `location:"header" locationName:"x-amz-grant-write-acp" type:"string"`
+
+ metadataPutBucketACLInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketACLInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"AccessControlPolicy"`
+}
+
+type PutBucketACLOutput struct {
+ metadataPutBucketACLOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketACLOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketCORSInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ CORSConfiguration *CORSConfiguration `locationName:"CORSConfiguration" type:"structure"`
+
+ metadataPutBucketCORSInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketCORSInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"CORSConfiguration"`
+}
+
+type PutBucketCORSOutput struct {
+ metadataPutBucketCORSOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketCORSOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketLifecycleInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ LifecycleConfiguration *LifecycleConfiguration `locationName:"LifecycleConfiguration" type:"structure"`
+
+ metadataPutBucketLifecycleInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketLifecycleInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"LifecycleConfiguration"`
+}
+
+type PutBucketLifecycleOutput struct {
+ metadataPutBucketLifecycleOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketLifecycleOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketLoggingInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ BucketLoggingStatus *BucketLoggingStatus `locationName:"BucketLoggingStatus" type:"structure" required:"true"`
+
+ metadataPutBucketLoggingInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketLoggingInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"BucketLoggingStatus"`
+}
+
+type PutBucketLoggingOutput struct {
+ metadataPutBucketLoggingOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketLoggingOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketNotificationConfigurationInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Container for specifying the notification configuration of the bucket. If
+ // this element is empty, notifications are turned off on the bucket.
+ NotificationConfiguration *NotificationConfiguration `locationName:"NotificationConfiguration" type:"structure" required:"true"`
+
+ metadataPutBucketNotificationConfigurationInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketNotificationConfigurationInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"NotificationConfiguration"`
+}
+
+type PutBucketNotificationConfigurationOutput struct {
+ metadataPutBucketNotificationConfigurationOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketNotificationConfigurationOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketNotificationInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ NotificationConfiguration *NotificationConfigurationDeprecated `locationName:"NotificationConfiguration" type:"structure" required:"true"`
+
+ metadataPutBucketNotificationInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketNotificationInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"NotificationConfiguration"`
+}
+
+type PutBucketNotificationOutput struct {
+ metadataPutBucketNotificationOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketNotificationOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketPolicyInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // The bucket policy as a JSON document.
+ Policy *string `type:"string" required:"true"`
+
+ metadataPutBucketPolicyInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketPolicyInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Policy"`
+}
+
+type PutBucketPolicyOutput struct {
+ metadataPutBucketPolicyOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketPolicyOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketReplicationInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Container for replication rules. You can add as many as 1,000 rules. Total
+ // replication configuration size can be up to 2 MB.
+ ReplicationConfiguration *ReplicationConfiguration `locationName:"ReplicationConfiguration" type:"structure" required:"true"`
+
+ metadataPutBucketReplicationInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketReplicationInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"ReplicationConfiguration"`
+}
+
+type PutBucketReplicationOutput struct {
+ metadataPutBucketReplicationOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketReplicationOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketRequestPaymentInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ RequestPaymentConfiguration *RequestPaymentConfiguration `locationName:"RequestPaymentConfiguration" type:"structure" required:"true"`
+
+ metadataPutBucketRequestPaymentInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketRequestPaymentInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"RequestPaymentConfiguration"`
+}
+
+type PutBucketRequestPaymentOutput struct {
+ metadataPutBucketRequestPaymentOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketRequestPaymentOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketTaggingInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Tagging *Tagging `locationName:"Tagging" type:"structure" required:"true"`
+
+ metadataPutBucketTaggingInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketTaggingInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Tagging"`
+}
+
+type PutBucketTaggingOutput struct {
+ metadataPutBucketTaggingOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketTaggingOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketVersioningInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // The concatenation of the authentication device's serial number, a space,
+ // and the value that is displayed on your authentication device.
+ MFA *string `location:"header" locationName:"x-amz-mfa" type:"string"`
+
+ VersioningConfiguration *VersioningConfiguration `locationName:"VersioningConfiguration" type:"structure" required:"true"`
+
+ metadataPutBucketVersioningInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketVersioningInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"VersioningConfiguration"`
+}
+
+type PutBucketVersioningOutput struct {
+ metadataPutBucketVersioningOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketVersioningOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutBucketWebsiteInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ WebsiteConfiguration *WebsiteConfiguration `locationName:"WebsiteConfiguration" type:"structure" required:"true"`
+
+ metadataPutBucketWebsiteInput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketWebsiteInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"WebsiteConfiguration"`
+}
+
+type PutBucketWebsiteOutput struct {
+ metadataPutBucketWebsiteOutput `json:"-" xml:"-"`
+}
+
+type metadataPutBucketWebsiteOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutObjectACLInput struct {
+ // The canned ACL to apply to the object.
+ ACL *string `location:"header" locationName:"x-amz-acl" type:"string"`
+
+ AccessControlPolicy *AccessControlPolicy `locationName:"AccessControlPolicy" type:"structure"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Allows grantee the read, write, read ACP, and write ACP permissions on the
+ // bucket.
+ GrantFullControl *string `location:"header" locationName:"x-amz-grant-full-control" type:"string"`
+
+ // Allows grantee to list the objects in the bucket.
+ GrantRead *string `location:"header" locationName:"x-amz-grant-read" type:"string"`
+
+ // Allows grantee to read the bucket ACL.
+ GrantReadACP *string `location:"header" locationName:"x-amz-grant-read-acp" type:"string"`
+
+ // Allows grantee to create, overwrite, and delete any object in the bucket.
+ GrantWrite *string `location:"header" locationName:"x-amz-grant-write" type:"string"`
+
+ // Allows grantee to write the ACL for the applicable bucket.
+ GrantWriteACP *string `location:"header" locationName:"x-amz-grant-write-acp" type:"string"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ metadataPutObjectACLInput `json:"-" xml:"-"`
+}
+
+type metadataPutObjectACLInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"AccessControlPolicy"`
+}
+
+type PutObjectACLOutput struct {
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ metadataPutObjectACLOutput `json:"-" xml:"-"`
+}
+
+type metadataPutObjectACLOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type PutObjectInput struct {
+ // The canned ACL to apply to the object.
+ ACL *string `location:"header" locationName:"x-amz-acl" type:"string"`
+
+ // Object data.
+ Body io.ReadSeeker `type:"blob"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Specifies caching behavior along the request/reply chain.
+ CacheControl *string `location:"header" locationName:"Cache-Control" type:"string"`
+
+ // Specifies presentational information for the object.
+ ContentDisposition *string `location:"header" locationName:"Content-Disposition" type:"string"`
+
+ // Specifies what content encodings have been applied to the object and thus
+ // what decoding mechanisms must be applied to obtain the media-type referenced
+ // by the Content-Type header field.
+ ContentEncoding *string `location:"header" locationName:"Content-Encoding" type:"string"`
+
+ // The language the content is in.
+ ContentLanguage *string `location:"header" locationName:"Content-Language" type:"string"`
+
+ // Size of the body in bytes. This parameter is useful when the size of the
+ // body cannot be determined automatically.
+ ContentLength *int64 `location:"header" locationName:"Content-Length" type:"integer"`
+
+ // A standard MIME type describing the format of the object data.
+ ContentType *string `location:"header" locationName:"Content-Type" type:"string"`
+
+ // The date and time at which the object is no longer cacheable.
+ Expires *time.Time `location:"header" locationName:"Expires" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.
+ GrantFullControl *string `location:"header" locationName:"x-amz-grant-full-control" type:"string"`
+
+ // Allows grantee to read the object data and its metadata.
+ GrantRead *string `location:"header" locationName:"x-amz-grant-read" type:"string"`
+
+ // Allows grantee to read the object ACL.
+ GrantReadACP *string `location:"header" locationName:"x-amz-grant-read-acp" type:"string"`
+
+ // Allows grantee to write the ACL for the applicable object.
+ GrantWriteACP *string `location:"header" locationName:"x-amz-grant-write-acp" type:"string"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // A map of metadata to store with the object in S3.
+ Metadata map[string]*string `location:"headers" locationName:"x-amz-meta-" type:"map"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // Specifies the AWS KMS key ID to use for object encryption. All GET and PUT
+ // requests for an object protected by AWS KMS will fail if not made via SSL
+ // or using SigV4. Documentation on configuring any of the officially supported
+ // AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // The type of storage to use for the object. Defaults to 'STANDARD'.
+ StorageClass *string `location:"header" locationName:"x-amz-storage-class" type:"string"`
+
+ // If the bucket is configured as a website, redirects requests for this object
+ // to another object in the same bucket or to an external URL. Amazon S3 stores
+ // the value of this header in the object metadata.
+ WebsiteRedirectLocation *string `location:"header" locationName:"x-amz-website-redirect-location" type:"string"`
+
+ metadataPutObjectInput `json:"-" xml:"-"`
+}
+
+type metadataPutObjectInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Body"`
+}
+
+type PutObjectOutput struct {
+ // Entity tag for the uploaded object.
+ ETag *string `location:"header" locationName:"ETag" type:"string"`
+
+ // If the object expiration is configured, this will contain the expiration
+ // date (expiry-date) and rule ID (rule-id). The value of rule-id is URL encoded.
+ Expiration *string `location:"header" locationName:"x-amz-expiration" type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header confirming the encryption algorithm
+ // used.
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header to provide round trip message integrity
+ // verification of the customer-provided encryption key.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // Version of the object.
+ VersionID *string `location:"header" locationName:"x-amz-version-id" type:"string"`
+
+ metadataPutObjectOutput `json:"-" xml:"-"`
+}
+
+type metadataPutObjectOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// Container for specifying an configuration when you want Amazon S3 to publish
+// events to an Amazon Simple Queue Service (Amazon SQS) queue.
+type QueueConfiguration struct {
+ Events []*string `locationName:"Event" type:"list" flattened:"true" required:"true"`
+
+ // Optional unique identifier for configurations in a notification configuration.
+ // If you don't provide one, Amazon S3 will assign an ID.
+ ID *string `locationName:"Id" type:"string"`
+
+ // Amazon SQS queue ARN to which Amazon S3 will publish a message when it detects
+ // events of specified type.
+ QueueARN *string `locationName:"Queue" type:"string" required:"true"`
+
+ metadataQueueConfiguration `json:"-" xml:"-"`
+}
+
+type metadataQueueConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type QueueConfigurationDeprecated struct {
+ // Bucket event for which to send notifications.
+ Event *string `type:"string"`
+
+ Events []*string `locationName:"Event" type:"list" flattened:"true"`
+
+ // Optional unique identifier for configurations in a notification configuration.
+ // If you don't provide one, Amazon S3 will assign an ID.
+ ID *string `locationName:"Id" type:"string"`
+
+ Queue *string `type:"string"`
+
+ metadataQueueConfigurationDeprecated `json:"-" xml:"-"`
+}
+
+type metadataQueueConfigurationDeprecated struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Redirect struct {
+ // The HTTP redirect code to use on the response. Not required if one of the
+ // siblings is present.
+ HTTPRedirectCode *string `locationName:"HttpRedirectCode" type:"string"`
+
+ // The host name to use in the redirect request.
+ HostName *string `type:"string"`
+
+ // Protocol to use (http, https) when redirecting requests. The default is the
+ // protocol that is used in the original request.
+ Protocol *string `type:"string"`
+
+ // The object key prefix to use in the redirect request. For example, to redirect
+ // requests for all pages with prefix docs/ (objects in the docs/ folder) to
+ // documents/, you can set a condition block with KeyPrefixEquals set to docs/
+ // and in the Redirect set ReplaceKeyPrefixWith to /documents. Not required
+ // if one of the siblings is present. Can be present only if ReplaceKeyWith
+ // is not provided.
+ ReplaceKeyPrefixWith *string `type:"string"`
+
+ // The specific object key to use in the redirect request. For example, redirect
+ // request to error.html. Not required if one of the sibling is present. Can
+ // be present only if ReplaceKeyPrefixWith is not provided.
+ ReplaceKeyWith *string `type:"string"`
+
+ metadataRedirect `json:"-" xml:"-"`
+}
+
+type metadataRedirect struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type RedirectAllRequestsTo struct {
+ // Name of the host where requests will be redirected.
+ HostName *string `type:"string" required:"true"`
+
+ // Protocol to use (http, https) when redirecting requests. The default is the
+ // protocol that is used in the original request.
+ Protocol *string `type:"string"`
+
+ metadataRedirectAllRequestsTo `json:"-" xml:"-"`
+}
+
+type metadataRedirectAllRequestsTo struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// Container for replication rules. You can add as many as 1,000 rules. Total
+// replication configuration size can be up to 2 MB.
+type ReplicationConfiguration struct {
+ // Amazon Resource Name (ARN) of an IAM role for Amazon S3 to assume when replicating
+ // the objects.
+ Role *string `type:"string" required:"true"`
+
+ // Container for information about a particular replication rule. Replication
+ // configuration must have at least one rule and can contain up to 1,000 rules.
+ Rules []*ReplicationRule `locationName:"Rule" type:"list" flattened:"true" required:"true"`
+
+ metadataReplicationConfiguration `json:"-" xml:"-"`
+}
+
+type metadataReplicationConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type ReplicationRule struct {
+ Destination *Destination `type:"structure" required:"true"`
+
+ // Unique identifier for the rule. The value cannot be longer than 255 characters.
+ ID *string `type:"string"`
+
+ // Object keyname prefix identifying one or more objects to which the rule applies.
+ // Maximum prefix length can be up to 1,024 characters. Overlapping prefixes
+ // are not supported.
+ Prefix *string `type:"string" required:"true"`
+
+ // The rule is ignored if status is not Enabled.
+ Status *string `type:"string" required:"true"`
+
+ metadataReplicationRule `json:"-" xml:"-"`
+}
+
+type metadataReplicationRule struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type RequestPaymentConfiguration struct {
+ // Specifies who pays for the download and request fees.
+ Payer *string `type:"string" required:"true"`
+
+ metadataRequestPaymentConfiguration `json:"-" xml:"-"`
+}
+
+type metadataRequestPaymentConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type RestoreObjectInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ RestoreRequest *RestoreRequest `locationName:"RestoreRequest" type:"structure"`
+
+ VersionID *string `location:"querystring" locationName:"versionId" type:"string"`
+
+ metadataRestoreObjectInput `json:"-" xml:"-"`
+}
+
+type metadataRestoreObjectInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"RestoreRequest"`
+}
+
+type RestoreObjectOutput struct {
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ metadataRestoreObjectOutput `json:"-" xml:"-"`
+}
+
+type metadataRestoreObjectOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type RestoreRequest struct {
+ // Lifetime of the active copy in days
+ Days *int64 `type:"integer" required:"true"`
+
+ metadataRestoreRequest `json:"-" xml:"-"`
+}
+
+type metadataRestoreRequest struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type RoutingRule struct {
+ // A container for describing a condition that must be met for the specified
+ // redirect to apply. For example, 1. If request is for pages in the /docs folder,
+ // redirect to the /documents folder. 2. If request results in HTTP error 4xx,
+ // redirect request to another host where you might process the error.
+ Condition *Condition `type:"structure"`
+
+ // Container for redirect information. You can redirect requests to another
+ // host, to another page, or with another protocol. In the event of an error,
+ // you can can specify a different error code to return.
+ Redirect *Redirect `type:"structure" required:"true"`
+
+ metadataRoutingRule `json:"-" xml:"-"`
+}
+
+type metadataRoutingRule struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Tag struct {
+ // Name of the tag.
+ Key *string `type:"string" required:"true"`
+
+ // Value of the tag.
+ Value *string `type:"string" required:"true"`
+
+ metadataTag `json:"-" xml:"-"`
+}
+
+type metadataTag struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Tagging struct {
+ TagSet []*Tag `locationNameList:"Tag" type:"list" required:"true"`
+
+ metadataTagging `json:"-" xml:"-"`
+}
+
+type metadataTagging struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type TargetGrant struct {
+ Grantee *Grantee `type:"structure"`
+
+ // Logging permissions assigned to the Grantee for the bucket.
+ Permission *string `type:"string"`
+
+ metadataTargetGrant `json:"-" xml:"-"`
+}
+
+type metadataTargetGrant struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+// Container for specifying the configuration when you want Amazon S3 to publish
+// events to an Amazon Simple Notification Service (Amazon SNS) topic.
+type TopicConfiguration struct {
+ Events []*string `locationName:"Event" type:"list" flattened:"true" required:"true"`
+
+ // Optional unique identifier for configurations in a notification configuration.
+ // If you don't provide one, Amazon S3 will assign an ID.
+ ID *string `locationName:"Id" type:"string"`
+
+ // Amazon SNS topic ARN to which Amazon S3 will publish a message when it detects
+ // events of specified type.
+ TopicARN *string `locationName:"Topic" type:"string" required:"true"`
+
+ metadataTopicConfiguration `json:"-" xml:"-"`
+}
+
+type metadataTopicConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type TopicConfigurationDeprecated struct {
+ // Bucket event for which to send notifications.
+ Event *string `type:"string"`
+
+ Events []*string `locationName:"Event" type:"list" flattened:"true"`
+
+ // Optional unique identifier for configurations in a notification configuration.
+ // If you don't provide one, Amazon S3 will assign an ID.
+ ID *string `locationName:"Id" type:"string"`
+
+ // Amazon SNS topic to which Amazon S3 will publish a message to report the
+ // specified events for the bucket.
+ Topic *string `type:"string"`
+
+ metadataTopicConfigurationDeprecated `json:"-" xml:"-"`
+}
+
+type metadataTopicConfigurationDeprecated struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type Transition struct {
+ // Indicates at what date the object is to be moved or deleted. Should be in
+ // GMT ISO 8601 Format.
+ Date *time.Time `type:"timestamp" timestampFormat:"iso8601"`
+
+ // Indicates the lifetime, in days, of the objects that are subject to the rule.
+ // The value must be a non-zero positive integer.
+ Days *int64 `type:"integer"`
+
+ // The class of storage used to store the object.
+ StorageClass *string `type:"string"`
+
+ metadataTransition `json:"-" xml:"-"`
+}
+
+type metadataTransition struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type UploadPartCopyInput struct {
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // The name of the source bucket and key name of the source object, separated
+ // by a slash (/). Must be URL-encoded.
+ CopySource *string `location:"header" locationName:"x-amz-copy-source" type:"string" required:"true"`
+
+ // Copies the object if its entity tag (ETag) matches the specified tag.
+ CopySourceIfMatch *string `location:"header" locationName:"x-amz-copy-source-if-match" type:"string"`
+
+ // Copies the object if it has been modified since the specified time.
+ CopySourceIfModifiedSince *time.Time `location:"header" locationName:"x-amz-copy-source-if-modified-since" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Copies the object if its entity tag (ETag) is different than the specified
+ // ETag.
+ CopySourceIfNoneMatch *string `location:"header" locationName:"x-amz-copy-source-if-none-match" type:"string"`
+
+ // Copies the object if it hasn't been modified since the specified time.
+ CopySourceIfUnmodifiedSince *time.Time `location:"header" locationName:"x-amz-copy-source-if-unmodified-since" type:"timestamp" timestampFormat:"rfc822"`
+
+ // The range of bytes to copy from the source object. The range value must use
+ // the form bytes=first-last, where the first and last are the zero-based byte
+ // offsets to copy. For example, bytes=0-9 indicates that you want to copy the
+ // first ten bytes of the source. You can copy a range only if the source object
+ // is greater than 5 GB.
+ CopySourceRange *string `location:"header" locationName:"x-amz-copy-source-range" type:"string"`
+
+ // Specifies the algorithm to use when decrypting the source object (e.g., AES256).
+ CopySourceSSECustomerAlgorithm *string `location:"header" locationName:"x-amz-copy-source-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use to decrypt
+ // the source object. The encryption key provided in this header must be one
+ // that was used when the source object was created.
+ CopySourceSSECustomerKey *string `location:"header" locationName:"x-amz-copy-source-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ CopySourceSSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-copy-source-server-side-encryption-customer-key-MD5" type:"string"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Part number of part being copied.
+ PartNumber *int64 `location:"querystring" locationName:"partNumber" type:"integer" required:"true"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header. This must be the same encryption key specified in the initiate multipart
+ // upload request.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // Upload ID identifying the multipart upload whose part is being copied.
+ UploadID *string `location:"querystring" locationName:"uploadId" type:"string" required:"true"`
+
+ metadataUploadPartCopyInput `json:"-" xml:"-"`
+}
+
+type metadataUploadPartCopyInput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type UploadPartCopyOutput struct {
+ CopyPartResult *CopyPartResult `type:"structure"`
+
+ // The version of the source object that was copied, if you have enabled versioning
+ // on the source bucket.
+ CopySourceVersionID *string `location:"header" locationName:"x-amz-copy-source-version-id" type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header confirming the encryption algorithm
+ // used.
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header to provide round trip message integrity
+ // verification of the customer-provided encryption key.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ metadataUploadPartCopyOutput `json:"-" xml:"-"`
+}
+
+type metadataUploadPartCopyOutput struct {
+ SDKShapeTraits bool `type:"structure" payload:"CopyPartResult"`
+}
+
+type UploadPartInput struct {
+ Body io.ReadSeeker `type:"blob"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Size of the body in bytes. This parameter is useful when the size of the
+ // body cannot be determined automatically.
+ ContentLength *int64 `location:"header" locationName:"Content-Length" type:"integer"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // Part number of part being uploaded.
+ PartNumber *int64 `location:"querystring" locationName:"partNumber" type:"integer" required:"true"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header. This must be the same encryption key specified in the initiate multipart
+ // upload request.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // Upload ID identifying the multipart upload whose part is being uploaded.
+ UploadID *string `location:"querystring" locationName:"uploadId" type:"string" required:"true"`
+
+ metadataUploadPartInput `json:"-" xml:"-"`
+}
+
+type metadataUploadPartInput struct {
+ SDKShapeTraits bool `type:"structure" payload:"Body"`
+}
+
+type UploadPartOutput struct {
+ // Entity tag for the uploaded object.
+ ETag *string `location:"header" locationName:"ETag" type:"string"`
+
+ // If present, indicates that the requester was successfully charged for the
+ // request.
+ RequestCharged *string `location:"header" locationName:"x-amz-request-charged" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header confirming the encryption algorithm
+ // used.
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // If server-side encryption with a customer-provided encryption key was requested,
+ // the response will include this header to provide round trip message integrity
+ // verification of the customer-provided encryption key.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // If present, specifies the ID of the AWS Key Management Service (KMS) master
+ // encryption key that was used for the object.
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ metadataUploadPartOutput `json:"-" xml:"-"`
+}
+
+type metadataUploadPartOutput struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type VersioningConfiguration struct {
+ // Specifies whether MFA delete is enabled in the bucket versioning configuration.
+ // This element is only returned if the bucket has been configured with MFA
+ // delete. If the bucket has never been so configured, this element is not returned.
+ MFADelete *string `locationName:"MfaDelete" type:"string"`
+
+ // The versioning state of the bucket.
+ Status *string `type:"string"`
+
+ metadataVersioningConfiguration `json:"-" xml:"-"`
+}
+
+type metadataVersioningConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
+
+type WebsiteConfiguration struct {
+ ErrorDocument *ErrorDocument `type:"structure"`
+
+ IndexDocument *IndexDocument `type:"structure"`
+
+ RedirectAllRequestsTo *RedirectAllRequestsTo `type:"structure"`
+
+ RoutingRules []*RoutingRule `locationNameList:"RoutingRule" type:"list"`
+
+ metadataWebsiteConfiguration `json:"-" xml:"-"`
+}
+
+type metadataWebsiteConfiguration struct {
+ SDKShapeTraits bool `type:"structure"`
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/bucket_location.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/bucket_location.go
new file mode 100644
index 0000000..59eb845
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/bucket_location.go
@@ -0,0 +1,42 @@
+package s3
+
+import (
+ "io/ioutil"
+ "regexp"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+)
+
+var reBucketLocation = regexp.MustCompile(`>([^<>]+)<\/Location`)
+
+func buildGetBucketLocation(r *aws.Request) {
+ if r.DataFilled() {
+ out := r.Data.(*GetBucketLocationOutput)
+ b, err := ioutil.ReadAll(r.HTTPResponse.Body)
+ if err != nil {
+ r.Error = awserr.New("SerializationError", "failed reading response body", err)
+ return
+ }
+
+ match := reBucketLocation.FindSubmatch(b)
+ if len(match) > 1 {
+ loc := string(match[1])
+ out.LocationConstraint = &loc
+ }
+ }
+}
+
+func populateLocationConstraint(r *aws.Request) {
+ if r.ParamsFilled() && r.Config.Region != "us-east-1" {
+ in := r.Params.(*CreateBucketInput)
+ if in.CreateBucketConfiguration == nil {
+ r.Params = awsutil.CopyOf(r.Params)
+ in = r.Params.(*CreateBucketInput)
+ in.CreateBucketConfiguration = &CreateBucketConfiguration{
+ LocationConstraint: &r.Config.Region,
+ }
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/bucket_location_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/bucket_location_test.go
new file mode 100644
index 0000000..f168854
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/bucket_location_test.go
@@ -0,0 +1,75 @@
+package s3_test
+
+import (
+ "bytes"
+ "io/ioutil"
+ "net/http"
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+var s3LocationTests = []struct {
+ body string
+ loc string
+}{
+ {``, ``},
+ {`EU`, `EU`},
+}
+
+func TestGetBucketLocation(t *testing.T) {
+ for _, test := range s3LocationTests {
+ s := s3.New(nil)
+ s.Handlers.Send.Clear()
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ reader := ioutil.NopCloser(bytes.NewReader([]byte(test.body)))
+ r.HTTPResponse = &http.Response{StatusCode: 200, Body: reader}
+ })
+
+ resp, err := s.GetBucketLocation(&s3.GetBucketLocationInput{Bucket: aws.String("bucket")})
+ assert.NoError(t, err)
+ if test.loc == "" {
+ assert.Nil(t, resp.LocationConstraint)
+ } else {
+ assert.Equal(t, test.loc, *resp.LocationConstraint)
+ }
+ }
+}
+
+func TestPopulateLocationConstraint(t *testing.T) {
+ s := s3.New(nil)
+ in := &s3.CreateBucketInput{
+ Bucket: aws.String("bucket"),
+ }
+ req, _ := s.CreateBucketRequest(in)
+ err := req.Build()
+ assert.NoError(t, err)
+ assert.Equal(t, "mock-region", awsutil.ValuesAtPath(req.Params, "CreateBucketConfiguration.LocationConstraint")[0])
+ assert.Nil(t, in.CreateBucketConfiguration) // don't modify original params
+}
+
+func TestNoPopulateLocationConstraintIfProvided(t *testing.T) {
+ s := s3.New(nil)
+ req, _ := s.CreateBucketRequest(&s3.CreateBucketInput{
+ Bucket: aws.String("bucket"),
+ CreateBucketConfiguration: &s3.CreateBucketConfiguration{},
+ })
+ err := req.Build()
+ assert.NoError(t, err)
+ assert.Equal(t, 0, len(awsutil.ValuesAtPath(req.Params, "CreateBucketConfiguration.LocationConstraint")))
+}
+
+func TestNoPopulateLocationConstraintIfClassic(t *testing.T) {
+ s := s3.New(&aws.Config{Region: "us-east-1"})
+ req, _ := s.CreateBucketRequest(&s3.CreateBucketInput{
+ Bucket: aws.String("bucket"),
+ })
+ err := req.Build()
+ assert.NoError(t, err)
+ assert.Equal(t, 0, len(awsutil.ValuesAtPath(req.Params, "CreateBucketConfiguration.LocationConstraint")))
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/content_md5.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/content_md5.go
new file mode 100644
index 0000000..386f09a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/content_md5.go
@@ -0,0 +1,36 @@
+package s3
+
+import (
+ "crypto/md5"
+ "encoding/base64"
+ "io"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+// contentMD5 computes and sets the HTTP Content-MD5 header for requests that
+// require it.
+func contentMD5(r *aws.Request) {
+ h := md5.New()
+
+ // hash the body. seek back to the first position after reading to reset
+ // the body for transmission. copy errors may be assumed to be from the
+ // body.
+ _, err := io.Copy(h, r.Body)
+ if err != nil {
+ r.Error = awserr.New("ContentMD5", "failed to read body", err)
+ return
+ }
+ _, err = r.Body.Seek(0, 0)
+ if err != nil {
+ r.Error = awserr.New("ContentMD5", "failed to seek body", err)
+ return
+ }
+
+ // encode the md5 checksum in base64 and set the request header.
+ sum := h.Sum(nil)
+ sum64 := make([]byte, base64.StdEncoding.EncodedLen(len(sum)))
+ base64.StdEncoding.Encode(sum64, sum)
+ r.HTTPRequest.Header.Set("Content-MD5", string(sum64))
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/customizations.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/customizations.go
new file mode 100644
index 0000000..490ff2d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/customizations.go
@@ -0,0 +1,32 @@
+package s3
+
+import "github.com/aws/aws-sdk-go/aws"
+
+func init() {
+ initService = func(s *aws.Service) {
+ // Support building custom host-style bucket endpoints
+ s.Handlers.Build.PushFront(updateHostWithBucket)
+
+ // Require SSL when using SSE keys
+ s.Handlers.Validate.PushBack(validateSSERequiresSSL)
+ s.Handlers.Build.PushBack(computeSSEKeys)
+
+ // S3 uses custom error unmarshaling logic
+ s.Handlers.UnmarshalError.Clear()
+ s.Handlers.UnmarshalError.PushBack(unmarshalError)
+ }
+
+ initRequest = func(r *aws.Request) {
+ switch r.Operation.Name {
+ case opPutBucketCORS, opPutBucketLifecycle, opPutBucketPolicy, opPutBucketTagging, opDeleteObjects:
+ // These S3 operations require Content-MD5 to be set
+ r.Handlers.Build.PushBack(contentMD5)
+ case opGetBucketLocation:
+ // GetBucketLocation has custom parsing logic
+ r.Handlers.Unmarshal.PushFront(buildGetBucketLocation)
+ case opCreateBucket:
+ // Auto-populate LocationConstraint with current region
+ r.Handlers.Validate.PushFront(populateLocationConstraint)
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/customizations_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/customizations_test.go
new file mode 100644
index 0000000..523b487
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/customizations_test.go
@@ -0,0 +1,90 @@
+package s3_test
+
+import (
+ "crypto/md5"
+ "encoding/base64"
+ "io/ioutil"
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+
+func assertMD5(t *testing.T, req *aws.Request) {
+ err := req.Build()
+ assert.NoError(t, err)
+
+ b, _ := ioutil.ReadAll(req.HTTPRequest.Body)
+ out := md5.Sum(b)
+ assert.NotEmpty(t, b)
+ assert.Equal(t, base64.StdEncoding.EncodeToString(out[:]), req.HTTPRequest.Header.Get("Content-MD5"))
+}
+
+func TestMD5InPutBucketCORS(t *testing.T) {
+ svc := s3.New(nil)
+ req, _ := svc.PutBucketCORSRequest(&s3.PutBucketCORSInput{
+ Bucket: aws.String("bucketname"),
+ CORSConfiguration: &s3.CORSConfiguration{
+ CORSRules: []*s3.CORSRule{
+ {AllowedMethods: []*string{aws.String("GET")}},
+ },
+ },
+ })
+ assertMD5(t, req)
+}
+
+func TestMD5InPutBucketLifecycle(t *testing.T) {
+ svc := s3.New(nil)
+ req, _ := svc.PutBucketLifecycleRequest(&s3.PutBucketLifecycleInput{
+ Bucket: aws.String("bucketname"),
+ LifecycleConfiguration: &s3.LifecycleConfiguration{
+ Rules: []*s3.LifecycleRule{
+ {
+ ID: aws.String("ID"),
+ Prefix: aws.String("Prefix"),
+ Status: aws.String("Enabled"),
+ },
+ },
+ },
+ })
+ assertMD5(t, req)
+}
+
+func TestMD5InPutBucketPolicy(t *testing.T) {
+ svc := s3.New(nil)
+ req, _ := svc.PutBucketPolicyRequest(&s3.PutBucketPolicyInput{
+ Bucket: aws.String("bucketname"),
+ Policy: aws.String("{}"),
+ })
+ assertMD5(t, req)
+}
+
+func TestMD5InPutBucketTagging(t *testing.T) {
+ svc := s3.New(nil)
+ req, _ := svc.PutBucketTaggingRequest(&s3.PutBucketTaggingInput{
+ Bucket: aws.String("bucketname"),
+ Tagging: &s3.Tagging{
+ TagSet: []*s3.Tag{
+ {Key: aws.String("KEY"), Value: aws.String("VALUE")},
+ },
+ },
+ })
+ assertMD5(t, req)
+}
+
+func TestMD5InDeleteObjects(t *testing.T) {
+ svc := s3.New(nil)
+ req, _ := svc.DeleteObjectsRequest(&s3.DeleteObjectsInput{
+ Bucket: aws.String("bucketname"),
+ Delete: &s3.Delete{
+ Objects: []*s3.ObjectIdentifier{
+ {Key: aws.String("key")},
+ },
+ },
+ })
+ assertMD5(t, req)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/examples_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/examples_test.go
new file mode 100644
index 0000000..7dde748
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/examples_test.go
@@ -0,0 +1,1928 @@
+// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
+
+package s3_test
+
+import (
+ "bytes"
+ "fmt"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+ "github.com/aws/aws-sdk-go/service/s3"
+)
+
+var _ time.Duration
+var _ bytes.Buffer
+
+func ExampleS3_AbortMultipartUpload() {
+ svc := s3.New(nil)
+
+ params := &s3.AbortMultipartUploadInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ UploadID: aws.String("MultipartUploadId"), // Required
+ RequestPayer: aws.String("RequestPayer"),
+ }
+ resp, err := svc.AbortMultipartUpload(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_CompleteMultipartUpload() {
+ svc := s3.New(nil)
+
+ params := &s3.CompleteMultipartUploadInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ UploadID: aws.String("MultipartUploadId"), // Required
+ MultipartUpload: &s3.CompletedMultipartUpload{
+ Parts: []*s3.CompletedPart{
+ { // Required
+ ETag: aws.String("ETag"),
+ PartNumber: aws.Long(1),
+ },
+ // More values...
+ },
+ },
+ RequestPayer: aws.String("RequestPayer"),
+ }
+ resp, err := svc.CompleteMultipartUpload(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_CopyObject() {
+ svc := s3.New(nil)
+
+ params := &s3.CopyObjectInput{
+ Bucket: aws.String("BucketName"), // Required
+ CopySource: aws.String("CopySource"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ ACL: aws.String("ObjectCannedACL"),
+ CacheControl: aws.String("CacheControl"),
+ ContentDisposition: aws.String("ContentDisposition"),
+ ContentEncoding: aws.String("ContentEncoding"),
+ ContentLanguage: aws.String("ContentLanguage"),
+ ContentType: aws.String("ContentType"),
+ CopySourceIfMatch: aws.String("CopySourceIfMatch"),
+ CopySourceIfModifiedSince: aws.Time(time.Now()),
+ CopySourceIfNoneMatch: aws.String("CopySourceIfNoneMatch"),
+ CopySourceIfUnmodifiedSince: aws.Time(time.Now()),
+ CopySourceSSECustomerAlgorithm: aws.String("CopySourceSSECustomerAlgorithm"),
+ CopySourceSSECustomerKey: aws.String("CopySourceSSECustomerKey"),
+ CopySourceSSECustomerKeyMD5: aws.String("CopySourceSSECustomerKeyMD5"),
+ Expires: aws.Time(time.Now()),
+ GrantFullControl: aws.String("GrantFullControl"),
+ GrantRead: aws.String("GrantRead"),
+ GrantReadACP: aws.String("GrantReadACP"),
+ GrantWriteACP: aws.String("GrantWriteACP"),
+ Metadata: map[string]*string{
+ "Key": aws.String("MetadataValue"), // Required
+ // More values...
+ },
+ MetadataDirective: aws.String("MetadataDirective"),
+ RequestPayer: aws.String("RequestPayer"),
+ SSECustomerAlgorithm: aws.String("SSECustomerAlgorithm"),
+ SSECustomerKey: aws.String("SSECustomerKey"),
+ SSECustomerKeyMD5: aws.String("SSECustomerKeyMD5"),
+ SSEKMSKeyID: aws.String("SSEKMSKeyId"),
+ ServerSideEncryption: aws.String("ServerSideEncryption"),
+ StorageClass: aws.String("StorageClass"),
+ WebsiteRedirectLocation: aws.String("WebsiteRedirectLocation"),
+ }
+ resp, err := svc.CopyObject(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_CreateBucket() {
+ svc := s3.New(nil)
+
+ params := &s3.CreateBucketInput{
+ Bucket: aws.String("BucketName"), // Required
+ ACL: aws.String("BucketCannedACL"),
+ CreateBucketConfiguration: &s3.CreateBucketConfiguration{
+ LocationConstraint: aws.String("BucketLocationConstraint"),
+ },
+ GrantFullControl: aws.String("GrantFullControl"),
+ GrantRead: aws.String("GrantRead"),
+ GrantReadACP: aws.String("GrantReadACP"),
+ GrantWrite: aws.String("GrantWrite"),
+ GrantWriteACP: aws.String("GrantWriteACP"),
+ }
+ resp, err := svc.CreateBucket(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_CreateMultipartUpload() {
+ svc := s3.New(nil)
+
+ params := &s3.CreateMultipartUploadInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ ACL: aws.String("ObjectCannedACL"),
+ CacheControl: aws.String("CacheControl"),
+ ContentDisposition: aws.String("ContentDisposition"),
+ ContentEncoding: aws.String("ContentEncoding"),
+ ContentLanguage: aws.String("ContentLanguage"),
+ ContentType: aws.String("ContentType"),
+ Expires: aws.Time(time.Now()),
+ GrantFullControl: aws.String("GrantFullControl"),
+ GrantRead: aws.String("GrantRead"),
+ GrantReadACP: aws.String("GrantReadACP"),
+ GrantWriteACP: aws.String("GrantWriteACP"),
+ Metadata: map[string]*string{
+ "Key": aws.String("MetadataValue"), // Required
+ // More values...
+ },
+ RequestPayer: aws.String("RequestPayer"),
+ SSECustomerAlgorithm: aws.String("SSECustomerAlgorithm"),
+ SSECustomerKey: aws.String("SSECustomerKey"),
+ SSECustomerKeyMD5: aws.String("SSECustomerKeyMD5"),
+ SSEKMSKeyID: aws.String("SSEKMSKeyId"),
+ ServerSideEncryption: aws.String("ServerSideEncryption"),
+ StorageClass: aws.String("StorageClass"),
+ WebsiteRedirectLocation: aws.String("WebsiteRedirectLocation"),
+ }
+ resp, err := svc.CreateMultipartUpload(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteBucket() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteBucketInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.DeleteBucket(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteBucketCORS() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteBucketCORSInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.DeleteBucketCORS(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteBucketLifecycle() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteBucketLifecycleInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.DeleteBucketLifecycle(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteBucketPolicy() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteBucketPolicyInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.DeleteBucketPolicy(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteBucketReplication() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteBucketReplicationInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.DeleteBucketReplication(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteBucketTagging() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteBucketTaggingInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.DeleteBucketTagging(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteBucketWebsite() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteBucketWebsiteInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.DeleteBucketWebsite(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteObject() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteObjectInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ MFA: aws.String("MFA"),
+ RequestPayer: aws.String("RequestPayer"),
+ VersionID: aws.String("ObjectVersionId"),
+ }
+ resp, err := svc.DeleteObject(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_DeleteObjects() {
+ svc := s3.New(nil)
+
+ params := &s3.DeleteObjectsInput{
+ Bucket: aws.String("BucketName"), // Required
+ Delete: &s3.Delete{ // Required
+ Objects: []*s3.ObjectIdentifier{ // Required
+ { // Required
+ Key: aws.String("ObjectKey"), // Required
+ VersionID: aws.String("ObjectVersionId"),
+ },
+ // More values...
+ },
+ Quiet: aws.Boolean(true),
+ },
+ MFA: aws.String("MFA"),
+ RequestPayer: aws.String("RequestPayer"),
+ }
+ resp, err := svc.DeleteObjects(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketACL() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketACLInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketACL(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketCORS() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketCORSInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketCORS(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketLifecycle() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketLifecycleInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketLifecycle(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketLocation() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketLocationInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketLocation(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketLogging() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketLoggingInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketLogging(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketNotification() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketNotificationConfigurationRequest{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketNotification(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketNotificationConfiguration() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketNotificationConfigurationRequest{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketNotificationConfiguration(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketPolicy() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketPolicyInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketPolicy(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketReplication() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketReplicationInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketReplication(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketRequestPayment() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketRequestPaymentInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketRequestPayment(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketTagging() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketTaggingInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketTagging(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketVersioning() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketVersioningInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketVersioning(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetBucketWebsite() {
+ svc := s3.New(nil)
+
+ params := &s3.GetBucketWebsiteInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.GetBucketWebsite(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetObject() {
+ svc := s3.New(nil)
+
+ params := &s3.GetObjectInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ IfMatch: aws.String("IfMatch"),
+ IfModifiedSince: aws.Time(time.Now()),
+ IfNoneMatch: aws.String("IfNoneMatch"),
+ IfUnmodifiedSince: aws.Time(time.Now()),
+ Range: aws.String("Range"),
+ RequestPayer: aws.String("RequestPayer"),
+ ResponseCacheControl: aws.String("ResponseCacheControl"),
+ ResponseContentDisposition: aws.String("ResponseContentDisposition"),
+ ResponseContentEncoding: aws.String("ResponseContentEncoding"),
+ ResponseContentLanguage: aws.String("ResponseContentLanguage"),
+ ResponseContentType: aws.String("ResponseContentType"),
+ ResponseExpires: aws.Time(time.Now()),
+ SSECustomerAlgorithm: aws.String("SSECustomerAlgorithm"),
+ SSECustomerKey: aws.String("SSECustomerKey"),
+ SSECustomerKeyMD5: aws.String("SSECustomerKeyMD5"),
+ VersionID: aws.String("ObjectVersionId"),
+ }
+ resp, err := svc.GetObject(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetObjectACL() {
+ svc := s3.New(nil)
+
+ params := &s3.GetObjectACLInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ RequestPayer: aws.String("RequestPayer"),
+ VersionID: aws.String("ObjectVersionId"),
+ }
+ resp, err := svc.GetObjectACL(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_GetObjectTorrent() {
+ svc := s3.New(nil)
+
+ params := &s3.GetObjectTorrentInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ RequestPayer: aws.String("RequestPayer"),
+ }
+ resp, err := svc.GetObjectTorrent(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_HeadBucket() {
+ svc := s3.New(nil)
+
+ params := &s3.HeadBucketInput{
+ Bucket: aws.String("BucketName"), // Required
+ }
+ resp, err := svc.HeadBucket(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_HeadObject() {
+ svc := s3.New(nil)
+
+ params := &s3.HeadObjectInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ IfMatch: aws.String("IfMatch"),
+ IfModifiedSince: aws.Time(time.Now()),
+ IfNoneMatch: aws.String("IfNoneMatch"),
+ IfUnmodifiedSince: aws.Time(time.Now()),
+ Range: aws.String("Range"),
+ RequestPayer: aws.String("RequestPayer"),
+ SSECustomerAlgorithm: aws.String("SSECustomerAlgorithm"),
+ SSECustomerKey: aws.String("SSECustomerKey"),
+ SSECustomerKeyMD5: aws.String("SSECustomerKeyMD5"),
+ VersionID: aws.String("ObjectVersionId"),
+ }
+ resp, err := svc.HeadObject(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_ListBuckets() {
+ svc := s3.New(nil)
+
+ var params *s3.ListBucketsInput
+ resp, err := svc.ListBuckets(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_ListMultipartUploads() {
+ svc := s3.New(nil)
+
+ params := &s3.ListMultipartUploadsInput{
+ Bucket: aws.String("BucketName"), // Required
+ Delimiter: aws.String("Delimiter"),
+ EncodingType: aws.String("EncodingType"),
+ KeyMarker: aws.String("KeyMarker"),
+ MaxUploads: aws.Long(1),
+ Prefix: aws.String("Prefix"),
+ UploadIDMarker: aws.String("UploadIdMarker"),
+ }
+ resp, err := svc.ListMultipartUploads(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_ListObjectVersions() {
+ svc := s3.New(nil)
+
+ params := &s3.ListObjectVersionsInput{
+ Bucket: aws.String("BucketName"), // Required
+ Delimiter: aws.String("Delimiter"),
+ EncodingType: aws.String("EncodingType"),
+ KeyMarker: aws.String("KeyMarker"),
+ MaxKeys: aws.Long(1),
+ Prefix: aws.String("Prefix"),
+ VersionIDMarker: aws.String("VersionIdMarker"),
+ }
+ resp, err := svc.ListObjectVersions(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_ListObjects() {
+ svc := s3.New(nil)
+
+ params := &s3.ListObjectsInput{
+ Bucket: aws.String("BucketName"), // Required
+ Delimiter: aws.String("Delimiter"),
+ EncodingType: aws.String("EncodingType"),
+ Marker: aws.String("Marker"),
+ MaxKeys: aws.Long(1),
+ Prefix: aws.String("Prefix"),
+ }
+ resp, err := svc.ListObjects(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_ListParts() {
+ svc := s3.New(nil)
+
+ params := &s3.ListPartsInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ UploadID: aws.String("MultipartUploadId"), // Required
+ MaxParts: aws.Long(1),
+ PartNumberMarker: aws.Long(1),
+ RequestPayer: aws.String("RequestPayer"),
+ }
+ resp, err := svc.ListParts(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketACL() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketACLInput{
+ Bucket: aws.String("BucketName"), // Required
+ ACL: aws.String("BucketCannedACL"),
+ AccessControlPolicy: &s3.AccessControlPolicy{
+ Grants: []*s3.Grant{
+ { // Required
+ Grantee: &s3.Grantee{
+ Type: aws.String("Type"), // Required
+ DisplayName: aws.String("DisplayName"),
+ EmailAddress: aws.String("EmailAddress"),
+ ID: aws.String("ID"),
+ URI: aws.String("URI"),
+ },
+ Permission: aws.String("Permission"),
+ },
+ // More values...
+ },
+ Owner: &s3.Owner{
+ DisplayName: aws.String("DisplayName"),
+ ID: aws.String("ID"),
+ },
+ },
+ GrantFullControl: aws.String("GrantFullControl"),
+ GrantRead: aws.String("GrantRead"),
+ GrantReadACP: aws.String("GrantReadACP"),
+ GrantWrite: aws.String("GrantWrite"),
+ GrantWriteACP: aws.String("GrantWriteACP"),
+ }
+ resp, err := svc.PutBucketACL(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketCORS() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketCORSInput{
+ Bucket: aws.String("BucketName"), // Required
+ CORSConfiguration: &s3.CORSConfiguration{
+ CORSRules: []*s3.CORSRule{
+ { // Required
+ AllowedHeaders: []*string{
+ aws.String("AllowedHeader"), // Required
+ // More values...
+ },
+ AllowedMethods: []*string{
+ aws.String("AllowedMethod"), // Required
+ // More values...
+ },
+ AllowedOrigins: []*string{
+ aws.String("AllowedOrigin"), // Required
+ // More values...
+ },
+ ExposeHeaders: []*string{
+ aws.String("ExposeHeader"), // Required
+ // More values...
+ },
+ MaxAgeSeconds: aws.Long(1),
+ },
+ // More values...
+ },
+ },
+ }
+ resp, err := svc.PutBucketCORS(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketLifecycle() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketLifecycleInput{
+ Bucket: aws.String("BucketName"), // Required
+ LifecycleConfiguration: &s3.LifecycleConfiguration{
+ Rules: []*s3.LifecycleRule{ // Required
+ { // Required
+ Prefix: aws.String("Prefix"), // Required
+ Status: aws.String("ExpirationStatus"), // Required
+ Expiration: &s3.LifecycleExpiration{
+ Date: aws.Time(time.Now()),
+ Days: aws.Long(1),
+ },
+ ID: aws.String("ID"),
+ NoncurrentVersionExpiration: &s3.NoncurrentVersionExpiration{
+ NoncurrentDays: aws.Long(1),
+ },
+ NoncurrentVersionTransition: &s3.NoncurrentVersionTransition{
+ NoncurrentDays: aws.Long(1),
+ StorageClass: aws.String("TransitionStorageClass"),
+ },
+ Transition: &s3.Transition{
+ Date: aws.Time(time.Now()),
+ Days: aws.Long(1),
+ StorageClass: aws.String("TransitionStorageClass"),
+ },
+ },
+ // More values...
+ },
+ },
+ }
+ resp, err := svc.PutBucketLifecycle(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketLogging() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketLoggingInput{
+ Bucket: aws.String("BucketName"), // Required
+ BucketLoggingStatus: &s3.BucketLoggingStatus{ // Required
+ LoggingEnabled: &s3.LoggingEnabled{
+ TargetBucket: aws.String("TargetBucket"),
+ TargetGrants: []*s3.TargetGrant{
+ { // Required
+ Grantee: &s3.Grantee{
+ Type: aws.String("Type"), // Required
+ DisplayName: aws.String("DisplayName"),
+ EmailAddress: aws.String("EmailAddress"),
+ ID: aws.String("ID"),
+ URI: aws.String("URI"),
+ },
+ Permission: aws.String("BucketLogsPermission"),
+ },
+ // More values...
+ },
+ TargetPrefix: aws.String("TargetPrefix"),
+ },
+ },
+ }
+ resp, err := svc.PutBucketLogging(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketNotification() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketNotificationInput{
+ Bucket: aws.String("BucketName"), // Required
+ NotificationConfiguration: &s3.NotificationConfigurationDeprecated{ // Required
+ CloudFunctionConfiguration: &s3.CloudFunctionConfiguration{
+ CloudFunction: aws.String("CloudFunction"),
+ Event: aws.String("Event"),
+ Events: []*string{
+ aws.String("Event"), // Required
+ // More values...
+ },
+ ID: aws.String("NotificationId"),
+ InvocationRole: aws.String("CloudFunctionInvocationRole"),
+ },
+ QueueConfiguration: &s3.QueueConfigurationDeprecated{
+ Event: aws.String("Event"),
+ Events: []*string{
+ aws.String("Event"), // Required
+ // More values...
+ },
+ ID: aws.String("NotificationId"),
+ Queue: aws.String("QueueArn"),
+ },
+ TopicConfiguration: &s3.TopicConfigurationDeprecated{
+ Event: aws.String("Event"),
+ Events: []*string{
+ aws.String("Event"), // Required
+ // More values...
+ },
+ ID: aws.String("NotificationId"),
+ Topic: aws.String("TopicArn"),
+ },
+ },
+ }
+ resp, err := svc.PutBucketNotification(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketNotificationConfiguration() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketNotificationConfigurationInput{
+ Bucket: aws.String("BucketName"), // Required
+ NotificationConfiguration: &s3.NotificationConfiguration{ // Required
+ LambdaFunctionConfigurations: []*s3.LambdaFunctionConfiguration{
+ { // Required
+ Events: []*string{ // Required
+ aws.String("Event"), // Required
+ // More values...
+ },
+ LambdaFunctionARN: aws.String("LambdaFunctionArn"), // Required
+ ID: aws.String("NotificationId"),
+ },
+ // More values...
+ },
+ QueueConfigurations: []*s3.QueueConfiguration{
+ { // Required
+ Events: []*string{ // Required
+ aws.String("Event"), // Required
+ // More values...
+ },
+ QueueARN: aws.String("QueueArn"), // Required
+ ID: aws.String("NotificationId"),
+ },
+ // More values...
+ },
+ TopicConfigurations: []*s3.TopicConfiguration{
+ { // Required
+ Events: []*string{ // Required
+ aws.String("Event"), // Required
+ // More values...
+ },
+ TopicARN: aws.String("TopicArn"), // Required
+ ID: aws.String("NotificationId"),
+ },
+ // More values...
+ },
+ },
+ }
+ resp, err := svc.PutBucketNotificationConfiguration(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketPolicy() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketPolicyInput{
+ Bucket: aws.String("BucketName"), // Required
+ Policy: aws.String("Policy"), // Required
+ }
+ resp, err := svc.PutBucketPolicy(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketReplication() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketReplicationInput{
+ Bucket: aws.String("BucketName"), // Required
+ ReplicationConfiguration: &s3.ReplicationConfiguration{ // Required
+ Role: aws.String("Role"), // Required
+ Rules: []*s3.ReplicationRule{ // Required
+ { // Required
+ Destination: &s3.Destination{ // Required
+ Bucket: aws.String("BucketName"), // Required
+ },
+ Prefix: aws.String("Prefix"), // Required
+ Status: aws.String("ReplicationRuleStatus"), // Required
+ ID: aws.String("ID"),
+ },
+ // More values...
+ },
+ },
+ }
+ resp, err := svc.PutBucketReplication(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketRequestPayment() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketRequestPaymentInput{
+ Bucket: aws.String("BucketName"), // Required
+ RequestPaymentConfiguration: &s3.RequestPaymentConfiguration{ // Required
+ Payer: aws.String("Payer"), // Required
+ },
+ }
+ resp, err := svc.PutBucketRequestPayment(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketTagging() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketTaggingInput{
+ Bucket: aws.String("BucketName"), // Required
+ Tagging: &s3.Tagging{ // Required
+ TagSet: []*s3.Tag{ // Required
+ { // Required
+ Key: aws.String("ObjectKey"), // Required
+ Value: aws.String("Value"), // Required
+ },
+ // More values...
+ },
+ },
+ }
+ resp, err := svc.PutBucketTagging(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketVersioning() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketVersioningInput{
+ Bucket: aws.String("BucketName"), // Required
+ VersioningConfiguration: &s3.VersioningConfiguration{ // Required
+ MFADelete: aws.String("MFADelete"),
+ Status: aws.String("BucketVersioningStatus"),
+ },
+ MFA: aws.String("MFA"),
+ }
+ resp, err := svc.PutBucketVersioning(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutBucketWebsite() {
+ svc := s3.New(nil)
+
+ params := &s3.PutBucketWebsiteInput{
+ Bucket: aws.String("BucketName"), // Required
+ WebsiteConfiguration: &s3.WebsiteConfiguration{ // Required
+ ErrorDocument: &s3.ErrorDocument{
+ Key: aws.String("ObjectKey"), // Required
+ },
+ IndexDocument: &s3.IndexDocument{
+ Suffix: aws.String("Suffix"), // Required
+ },
+ RedirectAllRequestsTo: &s3.RedirectAllRequestsTo{
+ HostName: aws.String("HostName"), // Required
+ Protocol: aws.String("Protocol"),
+ },
+ RoutingRules: []*s3.RoutingRule{
+ { // Required
+ Redirect: &s3.Redirect{ // Required
+ HTTPRedirectCode: aws.String("HttpRedirectCode"),
+ HostName: aws.String("HostName"),
+ Protocol: aws.String("Protocol"),
+ ReplaceKeyPrefixWith: aws.String("ReplaceKeyPrefixWith"),
+ ReplaceKeyWith: aws.String("ReplaceKeyWith"),
+ },
+ Condition: &s3.Condition{
+ HTTPErrorCodeReturnedEquals: aws.String("HttpErrorCodeReturnedEquals"),
+ KeyPrefixEquals: aws.String("KeyPrefixEquals"),
+ },
+ },
+ // More values...
+ },
+ },
+ }
+ resp, err := svc.PutBucketWebsite(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutObject() {
+ svc := s3.New(nil)
+
+ params := &s3.PutObjectInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ ACL: aws.String("ObjectCannedACL"),
+ Body: bytes.NewReader([]byte("PAYLOAD")),
+ CacheControl: aws.String("CacheControl"),
+ ContentDisposition: aws.String("ContentDisposition"),
+ ContentEncoding: aws.String("ContentEncoding"),
+ ContentLanguage: aws.String("ContentLanguage"),
+ ContentLength: aws.Long(1),
+ ContentType: aws.String("ContentType"),
+ Expires: aws.Time(time.Now()),
+ GrantFullControl: aws.String("GrantFullControl"),
+ GrantRead: aws.String("GrantRead"),
+ GrantReadACP: aws.String("GrantReadACP"),
+ GrantWriteACP: aws.String("GrantWriteACP"),
+ Metadata: map[string]*string{
+ "Key": aws.String("MetadataValue"), // Required
+ // More values...
+ },
+ RequestPayer: aws.String("RequestPayer"),
+ SSECustomerAlgorithm: aws.String("SSECustomerAlgorithm"),
+ SSECustomerKey: aws.String("SSECustomerKey"),
+ SSECustomerKeyMD5: aws.String("SSECustomerKeyMD5"),
+ SSEKMSKeyID: aws.String("SSEKMSKeyId"),
+ ServerSideEncryption: aws.String("ServerSideEncryption"),
+ StorageClass: aws.String("StorageClass"),
+ WebsiteRedirectLocation: aws.String("WebsiteRedirectLocation"),
+ }
+ resp, err := svc.PutObject(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_PutObjectACL() {
+ svc := s3.New(nil)
+
+ params := &s3.PutObjectACLInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ ACL: aws.String("ObjectCannedACL"),
+ AccessControlPolicy: &s3.AccessControlPolicy{
+ Grants: []*s3.Grant{
+ { // Required
+ Grantee: &s3.Grantee{
+ Type: aws.String("Type"), // Required
+ DisplayName: aws.String("DisplayName"),
+ EmailAddress: aws.String("EmailAddress"),
+ ID: aws.String("ID"),
+ URI: aws.String("URI"),
+ },
+ Permission: aws.String("Permission"),
+ },
+ // More values...
+ },
+ Owner: &s3.Owner{
+ DisplayName: aws.String("DisplayName"),
+ ID: aws.String("ID"),
+ },
+ },
+ GrantFullControl: aws.String("GrantFullControl"),
+ GrantRead: aws.String("GrantRead"),
+ GrantReadACP: aws.String("GrantReadACP"),
+ GrantWrite: aws.String("GrantWrite"),
+ GrantWriteACP: aws.String("GrantWriteACP"),
+ RequestPayer: aws.String("RequestPayer"),
+ }
+ resp, err := svc.PutObjectACL(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_RestoreObject() {
+ svc := s3.New(nil)
+
+ params := &s3.RestoreObjectInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ RequestPayer: aws.String("RequestPayer"),
+ RestoreRequest: &s3.RestoreRequest{
+ Days: aws.Long(1), // Required
+ },
+ VersionID: aws.String("ObjectVersionId"),
+ }
+ resp, err := svc.RestoreObject(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_UploadPart() {
+ svc := s3.New(nil)
+
+ params := &s3.UploadPartInput{
+ Bucket: aws.String("BucketName"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ PartNumber: aws.Long(1), // Required
+ UploadID: aws.String("MultipartUploadId"), // Required
+ Body: bytes.NewReader([]byte("PAYLOAD")),
+ ContentLength: aws.Long(1),
+ RequestPayer: aws.String("RequestPayer"),
+ SSECustomerAlgorithm: aws.String("SSECustomerAlgorithm"),
+ SSECustomerKey: aws.String("SSECustomerKey"),
+ SSECustomerKeyMD5: aws.String("SSECustomerKeyMD5"),
+ }
+ resp, err := svc.UploadPart(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
+
+func ExampleS3_UploadPartCopy() {
+ svc := s3.New(nil)
+
+ params := &s3.UploadPartCopyInput{
+ Bucket: aws.String("BucketName"), // Required
+ CopySource: aws.String("CopySource"), // Required
+ Key: aws.String("ObjectKey"), // Required
+ PartNumber: aws.Long(1), // Required
+ UploadID: aws.String("MultipartUploadId"), // Required
+ CopySourceIfMatch: aws.String("CopySourceIfMatch"),
+ CopySourceIfModifiedSince: aws.Time(time.Now()),
+ CopySourceIfNoneMatch: aws.String("CopySourceIfNoneMatch"),
+ CopySourceIfUnmodifiedSince: aws.Time(time.Now()),
+ CopySourceRange: aws.String("CopySourceRange"),
+ CopySourceSSECustomerAlgorithm: aws.String("CopySourceSSECustomerAlgorithm"),
+ CopySourceSSECustomerKey: aws.String("CopySourceSSECustomerKey"),
+ CopySourceSSECustomerKeyMD5: aws.String("CopySourceSSECustomerKeyMD5"),
+ RequestPayer: aws.String("RequestPayer"),
+ SSECustomerAlgorithm: aws.String("SSECustomerAlgorithm"),
+ SSECustomerKey: aws.String("SSECustomerKey"),
+ SSECustomerKeyMD5: aws.String("SSECustomerKeyMD5"),
+ }
+ resp, err := svc.UploadPartCopy(params)
+
+ if err != nil {
+ if awsErr, ok := err.(awserr.Error); ok {
+ // Generic AWS Error with Code, Message, and original error (if any)
+ fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
+ if reqErr, ok := err.(awserr.RequestFailure); ok {
+ // A service error occurred
+ fmt.Println(reqErr.Code(), reqErr.Message(), reqErr.StatusCode(), reqErr.RequestID())
+ }
+ } else {
+ // This case should never be hit, the SDK should always return an
+ // error which satisfies the awserr.Error interface.
+ fmt.Println(err.Error())
+ }
+ }
+
+ // Pretty-print the response data.
+ fmt.Println(awsutil.StringValue(resp))
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/host_style_bucket.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/host_style_bucket.go
new file mode 100644
index 0000000..07c62dc
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/host_style_bucket.go
@@ -0,0 +1,53 @@
+package s3
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+)
+
+var reDomain = regexp.MustCompile(`^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$`)
+var reIPAddress = regexp.MustCompile(`^(\d+\.){3}\d+$`)
+
+// dnsCompatibleBucketName returns true if the bucket name is DNS compatible.
+// Buckets created outside of the classic region MUST be DNS compatible.
+func dnsCompatibleBucketName(bucket string) bool {
+ return reDomain.MatchString(bucket) &&
+ !reIPAddress.MatchString(bucket) &&
+ !strings.Contains(bucket, "..")
+}
+
+// hostStyleBucketName returns true if the request should put the bucket in
+// the host. This is false if S3ForcePathStyle is explicitly set or if the
+// bucket is not DNS compatible.
+func hostStyleBucketName(r *aws.Request, bucket string) bool {
+ if r.Config.S3ForcePathStyle {
+ return false
+ }
+
+ // Bucket might be DNS compatible but dots in the hostname will fail
+ // certificate validation, so do not use host-style.
+ if r.HTTPRequest.URL.Scheme == "https" && strings.Contains(bucket, ".") {
+ return false
+ }
+
+ // Use host-style if the bucket is DNS compatible
+ return dnsCompatibleBucketName(bucket)
+}
+
+func updateHostWithBucket(r *aws.Request) {
+ b := awsutil.ValuesAtPath(r.Params, "Bucket")
+ if len(b) == 0 {
+ return
+ }
+
+ if bucket := b[0].(string); bucket != "" && hostStyleBucketName(r, bucket) {
+ r.HTTPRequest.URL.Host = bucket + "." + r.HTTPRequest.URL.Host
+ r.HTTPRequest.URL.Path = strings.Replace(r.HTTPRequest.URL.Path, "/{Bucket}", "", -1)
+ if r.HTTPRequest.URL.Path == "" {
+ r.HTTPRequest.URL.Path = "/"
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/host_style_bucket_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/host_style_bucket_test.go
new file mode 100644
index 0000000..23cdf7f
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/host_style_bucket_test.go
@@ -0,0 +1,61 @@
+package s3_test
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/stretchr/testify/assert"
+)
+
+type s3BucketTest struct {
+ bucket string
+ url string
+}
+
+var (
+ _ = unit.Imported
+
+ sslTests = []s3BucketTest{
+ {"abc", "https://abc.s3.mock-region.amazonaws.com/"},
+ {"a$b$c", "https://s3.mock-region.amazonaws.com/a%24b%24c"},
+ {"a.b.c", "https://s3.mock-region.amazonaws.com/a.b.c"},
+ {"a..bc", "https://s3.mock-region.amazonaws.com/a..bc"},
+ }
+
+ nosslTests = []s3BucketTest{
+ {"a.b.c", "http://a.b.c.s3.mock-region.amazonaws.com/"},
+ {"a..bc", "http://s3.mock-region.amazonaws.com/a..bc"},
+ }
+
+ forcepathTests = []s3BucketTest{
+ {"abc", "https://s3.mock-region.amazonaws.com/abc"},
+ {"a$b$c", "https://s3.mock-region.amazonaws.com/a%24b%24c"},
+ {"a.b.c", "https://s3.mock-region.amazonaws.com/a.b.c"},
+ {"a..bc", "https://s3.mock-region.amazonaws.com/a..bc"},
+ }
+)
+
+func runTests(t *testing.T, svc *s3.S3, tests []s3BucketTest) {
+ for _, test := range tests {
+ req, _ := svc.ListObjectsRequest(&s3.ListObjectsInput{Bucket: &test.bucket})
+ req.Build()
+ assert.Equal(t, test.url, req.HTTPRequest.URL.String())
+ }
+}
+
+func TestHostStyleBucketBuild(t *testing.T) {
+ s := s3.New(nil)
+ runTests(t, s, sslTests)
+}
+
+func TestHostStyleBucketBuildNoSSL(t *testing.T) {
+ s := s3.New(&aws.Config{DisableSSL: true})
+ runTests(t, s, nosslTests)
+}
+
+func TestPathStyleBucketBuild(t *testing.T) {
+ s := s3.New(&aws.Config{S3ForcePathStyle: true})
+ runTests(t, s, forcepathTests)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3iface/interface.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3iface/interface.go
new file mode 100644
index 0000000..d799e25
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3iface/interface.go
@@ -0,0 +1,119 @@
+// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
+
+// Package s3iface provides an interface for the Amazon Simple Storage Service.
+package s3iface
+
+import (
+ "github.com/aws/aws-sdk-go/service/s3"
+)
+
+// S3API is the interface type for s3.S3.
+type S3API interface {
+ AbortMultipartUpload(*s3.AbortMultipartUploadInput) (*s3.AbortMultipartUploadOutput, error)
+
+ CompleteMultipartUpload(*s3.CompleteMultipartUploadInput) (*s3.CompleteMultipartUploadOutput, error)
+
+ CopyObject(*s3.CopyObjectInput) (*s3.CopyObjectOutput, error)
+
+ CreateBucket(*s3.CreateBucketInput) (*s3.CreateBucketOutput, error)
+
+ CreateMultipartUpload(*s3.CreateMultipartUploadInput) (*s3.CreateMultipartUploadOutput, error)
+
+ DeleteBucket(*s3.DeleteBucketInput) (*s3.DeleteBucketOutput, error)
+
+ DeleteBucketCORS(*s3.DeleteBucketCORSInput) (*s3.DeleteBucketCORSOutput, error)
+
+ DeleteBucketLifecycle(*s3.DeleteBucketLifecycleInput) (*s3.DeleteBucketLifecycleOutput, error)
+
+ DeleteBucketPolicy(*s3.DeleteBucketPolicyInput) (*s3.DeleteBucketPolicyOutput, error)
+
+ DeleteBucketReplication(*s3.DeleteBucketReplicationInput) (*s3.DeleteBucketReplicationOutput, error)
+
+ DeleteBucketTagging(*s3.DeleteBucketTaggingInput) (*s3.DeleteBucketTaggingOutput, error)
+
+ DeleteBucketWebsite(*s3.DeleteBucketWebsiteInput) (*s3.DeleteBucketWebsiteOutput, error)
+
+ DeleteObject(*s3.DeleteObjectInput) (*s3.DeleteObjectOutput, error)
+
+ DeleteObjects(*s3.DeleteObjectsInput) (*s3.DeleteObjectsOutput, error)
+
+ GetBucketACL(*s3.GetBucketACLInput) (*s3.GetBucketACLOutput, error)
+
+ GetBucketCORS(*s3.GetBucketCORSInput) (*s3.GetBucketCORSOutput, error)
+
+ GetBucketLifecycle(*s3.GetBucketLifecycleInput) (*s3.GetBucketLifecycleOutput, error)
+
+ GetBucketLocation(*s3.GetBucketLocationInput) (*s3.GetBucketLocationOutput, error)
+
+ GetBucketLogging(*s3.GetBucketLoggingInput) (*s3.GetBucketLoggingOutput, error)
+
+ GetBucketNotification(*s3.GetBucketNotificationConfigurationRequest) (*s3.NotificationConfigurationDeprecated, error)
+
+ GetBucketNotificationConfiguration(*s3.GetBucketNotificationConfigurationRequest) (*s3.NotificationConfiguration, error)
+
+ GetBucketPolicy(*s3.GetBucketPolicyInput) (*s3.GetBucketPolicyOutput, error)
+
+ GetBucketReplication(*s3.GetBucketReplicationInput) (*s3.GetBucketReplicationOutput, error)
+
+ GetBucketRequestPayment(*s3.GetBucketRequestPaymentInput) (*s3.GetBucketRequestPaymentOutput, error)
+
+ GetBucketTagging(*s3.GetBucketTaggingInput) (*s3.GetBucketTaggingOutput, error)
+
+ GetBucketVersioning(*s3.GetBucketVersioningInput) (*s3.GetBucketVersioningOutput, error)
+
+ GetBucketWebsite(*s3.GetBucketWebsiteInput) (*s3.GetBucketWebsiteOutput, error)
+
+ GetObject(*s3.GetObjectInput) (*s3.GetObjectOutput, error)
+
+ GetObjectACL(*s3.GetObjectACLInput) (*s3.GetObjectACLOutput, error)
+
+ GetObjectTorrent(*s3.GetObjectTorrentInput) (*s3.GetObjectTorrentOutput, error)
+
+ HeadBucket(*s3.HeadBucketInput) (*s3.HeadBucketOutput, error)
+
+ HeadObject(*s3.HeadObjectInput) (*s3.HeadObjectOutput, error)
+
+ ListBuckets(*s3.ListBucketsInput) (*s3.ListBucketsOutput, error)
+
+ ListMultipartUploads(*s3.ListMultipartUploadsInput) (*s3.ListMultipartUploadsOutput, error)
+
+ ListObjectVersions(*s3.ListObjectVersionsInput) (*s3.ListObjectVersionsOutput, error)
+
+ ListObjects(*s3.ListObjectsInput) (*s3.ListObjectsOutput, error)
+
+ ListParts(*s3.ListPartsInput) (*s3.ListPartsOutput, error)
+
+ PutBucketACL(*s3.PutBucketACLInput) (*s3.PutBucketACLOutput, error)
+
+ PutBucketCORS(*s3.PutBucketCORSInput) (*s3.PutBucketCORSOutput, error)
+
+ PutBucketLifecycle(*s3.PutBucketLifecycleInput) (*s3.PutBucketLifecycleOutput, error)
+
+ PutBucketLogging(*s3.PutBucketLoggingInput) (*s3.PutBucketLoggingOutput, error)
+
+ PutBucketNotification(*s3.PutBucketNotificationInput) (*s3.PutBucketNotificationOutput, error)
+
+ PutBucketNotificationConfiguration(*s3.PutBucketNotificationConfigurationInput) (*s3.PutBucketNotificationConfigurationOutput, error)
+
+ PutBucketPolicy(*s3.PutBucketPolicyInput) (*s3.PutBucketPolicyOutput, error)
+
+ PutBucketReplication(*s3.PutBucketReplicationInput) (*s3.PutBucketReplicationOutput, error)
+
+ PutBucketRequestPayment(*s3.PutBucketRequestPaymentInput) (*s3.PutBucketRequestPaymentOutput, error)
+
+ PutBucketTagging(*s3.PutBucketTaggingInput) (*s3.PutBucketTaggingOutput, error)
+
+ PutBucketVersioning(*s3.PutBucketVersioningInput) (*s3.PutBucketVersioningOutput, error)
+
+ PutBucketWebsite(*s3.PutBucketWebsiteInput) (*s3.PutBucketWebsiteOutput, error)
+
+ PutObject(*s3.PutObjectInput) (*s3.PutObjectOutput, error)
+
+ PutObjectACL(*s3.PutObjectACLInput) (*s3.PutObjectACLOutput, error)
+
+ RestoreObject(*s3.RestoreObjectInput) (*s3.RestoreObjectOutput, error)
+
+ UploadPart(*s3.UploadPartInput) (*s3.UploadPartOutput, error)
+
+ UploadPartCopy(*s3.UploadPartCopyInput) (*s3.UploadPartCopyOutput, error)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3iface/interface_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3iface/interface_test.go
new file mode 100644
index 0000000..cd67215
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3iface/interface_test.go
@@ -0,0 +1,15 @@
+// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
+
+package s3iface_test
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/aws/aws-sdk-go/service/s3/s3iface"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestInterface(t *testing.T) {
+ assert.Implements(t, (*s3iface.S3API)(nil), s3.New(nil))
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/download.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/download.go
new file mode 100644
index 0000000..d6ebe02
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/download.go
@@ -0,0 +1,257 @@
+package s3manager
+
+import (
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+ "github.com/aws/aws-sdk-go/service/s3"
+)
+
+// The default range of bytes to get at a time when using Download().
+var DefaultDownloadPartSize int64 = 1024 * 1024 * 5
+
+// The default number of goroutines to spin up when using Download().
+var DefaultDownloadConcurrency = 5
+
+// The default set of options used when opts is nil in Download().
+var DefaultDownloadOptions = &DownloadOptions{
+ PartSize: DefaultDownloadPartSize,
+ Concurrency: DefaultDownloadConcurrency,
+}
+
+// DownloadOptions keeps tracks of extra options to pass to an Download() call.
+type DownloadOptions struct {
+ // The buffer size (in bytes) to use when buffering data into chunks and
+ // sending them as parts to S3. The minimum allowed part size is 5MB, and
+ // if this value is set to zero, the DefaultPartSize value will be used.
+ PartSize int64
+
+ // The number of goroutines to spin up in parallel when sending parts.
+ // If this is set to zero, the DefaultConcurrency value will be used.
+ Concurrency int
+
+ // An S3 client to use when performing downloads. Leave this as nil to use
+ // a default client.
+ S3 *s3.S3
+}
+
+// NewDownloader creates a new Downloader structure that downloads an object
+// from S3 in concurrent chunks. Pass in an optional DownloadOptions struct
+// to customize the downloader behavior.
+func NewDownloader(opts *DownloadOptions) *Downloader {
+ if opts == nil {
+ opts = DefaultDownloadOptions
+ }
+ return &Downloader{opts: opts}
+}
+
+// The Downloader structure that calls Download(). It is safe to call Download()
+// on this structure for multiple objects and across concurrent goroutines.
+type Downloader struct {
+ opts *DownloadOptions
+}
+
+// Download downloads an object in S3 and writes the payload into w using
+// concurrent GET requests.
+//
+// It is safe to call this method for multiple objects and across concurrent
+// goroutines.
+func (d *Downloader) Download(w io.WriterAt, input *s3.GetObjectInput) (n int64, err error) {
+ impl := downloader{w: w, in: input, opts: *d.opts}
+ return impl.download()
+}
+
+// downloader is the implementation structure used internally by Downloader.
+type downloader struct {
+ opts DownloadOptions
+ in *s3.GetObjectInput
+ w io.WriterAt
+
+ wg sync.WaitGroup
+ m sync.Mutex
+
+ pos int64
+ totalBytes int64
+ written int64
+ err error
+}
+
+// init initializes the downloader with default options.
+func (d *downloader) init() {
+ d.totalBytes = -1
+
+ if d.opts.Concurrency == 0 {
+ d.opts.Concurrency = DefaultDownloadConcurrency
+ }
+
+ if d.opts.PartSize == 0 {
+ d.opts.PartSize = DefaultDownloadPartSize
+ }
+
+ if d.opts.S3 == nil {
+ d.opts.S3 = s3.New(nil)
+ }
+}
+
+// download performs the implementation of the object download across ranged
+// GETs.
+func (d *downloader) download() (n int64, err error) {
+ d.init()
+
+ // Spin up workers
+ ch := make(chan dlchunk, d.opts.Concurrency)
+ for i := 0; i < d.opts.Concurrency; i++ {
+ d.wg.Add(1)
+ go d.downloadPart(ch)
+ }
+
+ // Assign work
+ for d.geterr() == nil {
+ if d.pos != 0 {
+ // This is not the first chunk, let's wait until we know the total
+ // size of the payload so we can see if we have read the entire
+ // object.
+ total := d.getTotalBytes()
+
+ if total < 0 {
+ // Total has not yet been set, so sleep and loop around while
+ // waiting for our first worker to resolve this value.
+ time.Sleep(10 * time.Millisecond)
+ continue
+ } else if d.pos >= total {
+ break // We're finished queueing chunks
+ }
+ }
+
+ // Queue the next range of bytes to read.
+ ch <- dlchunk{w: d.w, start: d.pos, size: d.opts.PartSize}
+ d.pos += d.opts.PartSize
+ }
+
+ // Wait for completion
+ close(ch)
+ d.wg.Wait()
+
+ // Return error
+ return d.written, d.err
+}
+
+// downloadPart is an individual goroutine worker reading from the ch channel
+// and performing a GetObject request on the data with a given byte range.
+//
+// If this is the first worker, this operation also resolves the total number
+// of bytes to be read so that the worker manager knows when it is finished.
+func (d *downloader) downloadPart(ch chan dlchunk) {
+ defer d.wg.Done()
+
+ for {
+ chunk, ok := <-ch
+
+ if !ok {
+ break
+ }
+
+ if d.geterr() == nil {
+ // Get the next byte range of data
+ in := &s3.GetObjectInput{}
+ awsutil.Copy(in, d.in)
+ rng := fmt.Sprintf("bytes=%d-%d",
+ chunk.start, chunk.start+chunk.size-1)
+ in.Range = &rng
+
+ resp, err := d.opts.S3.GetObject(in)
+ if err != nil {
+ d.seterr(err)
+ } else {
+ d.setTotalBytes(resp) // Set total if not yet set.
+
+ n, err := io.Copy(&chunk, resp.Body)
+ resp.Body.Close()
+
+ if err != nil {
+ d.seterr(err)
+ }
+ d.incrwritten(n)
+ }
+ }
+ }
+}
+
+// getTotalBytes is a thread-safe getter for retrieving the total byte status.
+func (d *downloader) getTotalBytes() int64 {
+ d.m.Lock()
+ defer d.m.Unlock()
+
+ return d.totalBytes
+}
+
+// getTotalBytes is a thread-safe setter for setting the total byte status.
+func (d *downloader) setTotalBytes(resp *s3.GetObjectOutput) {
+ d.m.Lock()
+ defer d.m.Unlock()
+
+ if d.totalBytes >= 0 {
+ return
+ }
+
+ parts := strings.Split(*resp.ContentRange, "/")
+ total, err := strconv.ParseInt(parts[len(parts)-1], 10, 64)
+ if err != nil {
+ d.err = err
+ return
+ }
+
+ d.totalBytes = total
+}
+
+func (d *downloader) incrwritten(n int64) {
+ d.m.Lock()
+ defer d.m.Unlock()
+
+ d.written += n
+}
+
+// geterr is a thread-safe getter for the error object
+func (d *downloader) geterr() error {
+ d.m.Lock()
+ defer d.m.Unlock()
+
+ return d.err
+}
+
+// seterr is a thread-safe setter for the error object
+func (d *downloader) seterr(e error) {
+ d.m.Lock()
+ defer d.m.Unlock()
+
+ d.err = e
+}
+
+// dlchunk represents a single chunk of data to write by the worker routine.
+// This structure also implements an io.SectionReader style interface for
+// io.WriterAt, effectively making it an io.SectionWriter (which does not
+// exist).
+type dlchunk struct {
+ w io.WriterAt
+ start int64
+ size int64
+ cur int64
+}
+
+// Write wraps io.WriterAt for the dlchunk, writing from the dlchunk's start
+// position to its end (or EOF).
+func (c *dlchunk) Write(p []byte) (n int, err error) {
+ if c.cur >= c.size {
+ return 0, io.EOF
+ }
+
+ n, err = c.w.WriteAt(p, c.start+c.cur)
+ c.cur += int64(n)
+
+ return
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/download_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/download_test.go
new file mode 100644
index 0000000..2c9c3b4
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/download_test.go
@@ -0,0 +1,165 @@
+package s3manager_test
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "regexp"
+ "strconv"
+ "sync"
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/aws/aws-sdk-go/service/s3/s3manager"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+
+func dlLoggingSvc(data []byte) (*s3.S3, *[]string, *[]string) {
+ var m sync.Mutex
+ names := []string{}
+ ranges := []string{}
+
+ svc := s3.New(nil)
+ svc.Handlers.Send.Clear()
+ svc.Handlers.Send.PushBack(func(r *aws.Request) {
+ m.Lock()
+ defer m.Unlock()
+
+ names = append(names, r.Operation.Name)
+ ranges = append(ranges, *r.Params.(*s3.GetObjectInput).Range)
+
+ rerng := regexp.MustCompile(`bytes=(\d+)-(\d+)`)
+ rng := rerng.FindStringSubmatch(r.HTTPRequest.Header.Get("Range"))
+ start, _ := strconv.ParseInt(rng[1], 10, 64)
+ fin, _ := strconv.ParseInt(rng[2], 10, 64)
+ fin++
+
+ if fin > int64(len(data)) {
+ fin = int64(len(data))
+ }
+
+ r.HTTPResponse = &http.Response{
+ StatusCode: 200,
+ Body: ioutil.NopCloser(bytes.NewReader(data[start:fin])),
+ Header: http.Header{},
+ }
+ r.HTTPResponse.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d",
+ start, fin, len(data)))
+ })
+
+ return svc, &names, &ranges
+}
+
+type dlwriter struct {
+ buf []byte
+}
+
+func newDLWriter(size int) *dlwriter {
+ return &dlwriter{buf: make([]byte, size)}
+}
+
+func (d dlwriter) WriteAt(p []byte, pos int64) (n int, err error) {
+ if pos > int64(len(d.buf)) {
+ return 0, io.EOF
+ }
+
+ written := 0
+ for i, b := range p {
+ if i >= len(d.buf) {
+ break
+ }
+ d.buf[pos+int64(i)] = b
+ written++
+ }
+ return written, nil
+}
+
+func TestDownloadOrder(t *testing.T) {
+ s, names, ranges := dlLoggingSvc(buf12MB)
+
+ opts := &s3manager.DownloadOptions{S3: s, Concurrency: 1}
+ d := s3manager.NewDownloader(opts)
+ w := newDLWriter(len(buf12MB))
+ n, err := d.Download(w, &s3.GetObjectInput{
+ Bucket: aws.String("bucket"),
+ Key: aws.String("key"),
+ })
+
+ assert.Nil(t, err)
+ assert.Equal(t, int64(len(buf12MB)), n)
+ assert.Equal(t, []string{"GetObject", "GetObject", "GetObject"}, *names)
+ assert.Equal(t, []string{"bytes=0-5242879", "bytes=5242880-10485759", "bytes=10485760-15728639"}, *ranges)
+
+ count := 0
+ for _, b := range w.buf {
+ count += int(b)
+ }
+ assert.Equal(t, 0, count)
+}
+
+func TestDownloadZero(t *testing.T) {
+ s, names, ranges := dlLoggingSvc([]byte{})
+
+ opts := &s3manager.DownloadOptions{S3: s}
+ d := s3manager.NewDownloader(opts)
+ w := newDLWriter(0)
+ n, err := d.Download(w, &s3.GetObjectInput{
+ Bucket: aws.String("bucket"),
+ Key: aws.String("key"),
+ })
+
+ assert.Nil(t, err)
+ assert.Equal(t, int64(0), n)
+ assert.Equal(t, []string{"GetObject"}, *names)
+ assert.Equal(t, []string{"bytes=0-5242879"}, *ranges)
+}
+
+func TestDownloadSetPartSize(t *testing.T) {
+ s, names, ranges := dlLoggingSvc([]byte{1, 2, 3})
+
+ opts := &s3manager.DownloadOptions{S3: s, PartSize: 1, Concurrency: 1}
+ d := s3manager.NewDownloader(opts)
+ w := newDLWriter(3)
+ n, err := d.Download(w, &s3.GetObjectInput{
+ Bucket: aws.String("bucket"),
+ Key: aws.String("key"),
+ })
+
+ assert.Nil(t, err)
+ assert.Equal(t, int64(3), n)
+ assert.Equal(t, []string{"GetObject", "GetObject", "GetObject"}, *names)
+ assert.Equal(t, []string{"bytes=0-0", "bytes=1-1", "bytes=2-2"}, *ranges)
+ assert.Equal(t, []byte{1, 2, 3}, w.buf)
+}
+
+func TestDownloadError(t *testing.T) {
+ s, names, _ := dlLoggingSvc([]byte{1, 2, 3})
+ opts := &s3manager.DownloadOptions{S3: s, PartSize: 1, Concurrency: 1}
+
+ num := 0
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ num++
+ if num > 1 {
+ r.HTTPResponse.StatusCode = 400
+ r.HTTPResponse.Body = ioutil.NopCloser(bytes.NewReader([]byte{}))
+ }
+ })
+
+ d := s3manager.NewDownloader(opts)
+ w := newDLWriter(3)
+ n, err := d.Download(w, &s3.GetObjectInput{
+ Bucket: aws.String("bucket"),
+ Key: aws.String("key"),
+ })
+
+ assert.NotNil(t, err)
+ assert.Equal(t, int64(1), n)
+ assert.Equal(t, []string{"GetObject", "GetObject"}, *names)
+ assert.Equal(t, []byte{1, 0, 0}, w.buf)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/upload.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/upload.go
new file mode 100644
index 0000000..db55680
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/upload.go
@@ -0,0 +1,562 @@
+package s3manager
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "sort"
+ "sync"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+ "github.com/aws/aws-sdk-go/service/s3"
+)
+
+// The maximum allowed number of parts in a multi-part upload on Amazon S3.
+var MaxUploadParts = 10000
+
+// The minimum allowed part size when uploading a part to Amazon S3.
+var MinUploadPartSize int64 = 1024 * 1024 * 5
+
+// The default part size to buffer chunks of a payload into.
+var DefaultUploadPartSize = MinUploadPartSize
+
+// The default number of goroutines to spin up when using Upload().
+var DefaultUploadConcurrency = 5
+
+// The default set of options used when opts is nil in Upload().
+var DefaultUploadOptions = &UploadOptions{
+ PartSize: DefaultUploadPartSize,
+ Concurrency: DefaultUploadConcurrency,
+ LeavePartsOnError: false,
+ S3: nil,
+}
+
+// A MultiUploadFailure wraps a failed S3 multipart upload. An error returned
+// will satisfy this interface when a multi part upload failed to upload all
+// chucks to S3. In the case of a failure the UploadID is needed to operate on
+// the chunks, if any, which were uploaded.
+//
+// Example:
+//
+// u := s3manager.NewUploader(opts)
+// output, err := u.upload(input)
+// if err != nil {
+// if multierr, ok := err.(MultiUploadFailure); ok {
+// // Process error and its associated uploadID
+// fmt.Println("Error:", multierr.Code(), multierr.Message(), multierr.UploadID())
+// } else {
+// // Process error generically
+// fmt.Println("Error:", err.Error())
+// }
+// }
+//
+type MultiUploadFailure interface {
+ awserr.Error
+
+ // Returns the upload id for the S3 multipart upload that failed.
+ UploadID() string
+}
+
+// So that the Error interface type can be included as an anonymous field
+// in the multiUploadError struct and not conflict with the error.Error() method.
+type awsError awserr.Error
+
+// A multiUploadError wraps the upload ID of a failed s3 multipart upload.
+// Composed of BaseError for code, message, and original error
+//
+// Should be used for an error that occurred failing a S3 multipart upload,
+// and a upload ID is available. If an uploadID is not available a more relevant
+type multiUploadError struct {
+ awsError
+
+ // ID for multipart upload which failed.
+ uploadID string
+}
+
+// Error returns the string representation of the error.
+//
+// See apierr.BaseError ErrorWithExtra for output format
+//
+// Satisfies the error interface.
+func (m multiUploadError) Error() string {
+ extra := fmt.Sprintf("upload id: %s", m.uploadID)
+ return awserr.SprintError(m.Code(), m.Message(), extra, m.OrigErr())
+}
+
+// String returns the string representation of the error.
+// Alias for Error to satisfy the stringer interface.
+func (m multiUploadError) String() string {
+ return m.Error()
+}
+
+// UploadID returns the id of the S3 upload which failed.
+func (m multiUploadError) UploadID() string {
+ return m.uploadID
+}
+
+// UploadInput contains all input for upload requests to Amazon S3.
+type UploadInput struct {
+ // The canned ACL to apply to the object.
+ ACL *string `location:"header" locationName:"x-amz-acl" type:"string"`
+
+ Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"`
+
+ // Specifies caching behavior along the request/reply chain.
+ CacheControl *string `location:"header" locationName:"Cache-Control" type:"string"`
+
+ // Specifies presentational information for the object.
+ ContentDisposition *string `location:"header" locationName:"Content-Disposition" type:"string"`
+
+ // Specifies what content encodings have been applied to the object and thus
+ // what decoding mechanisms must be applied to obtain the media-type referenced
+ // by the Content-Type header field.
+ ContentEncoding *string `location:"header" locationName:"Content-Encoding" type:"string"`
+
+ // The language the content is in.
+ ContentLanguage *string `location:"header" locationName:"Content-Language" type:"string"`
+
+ // A standard MIME type describing the format of the object data.
+ ContentType *string `location:"header" locationName:"Content-Type" type:"string"`
+
+ // The date and time at which the object is no longer cacheable.
+ Expires *time.Time `location:"header" locationName:"Expires" type:"timestamp" timestampFormat:"rfc822"`
+
+ // Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.
+ GrantFullControl *string `location:"header" locationName:"x-amz-grant-full-control" type:"string"`
+
+ // Allows grantee to read the object data and its metadata.
+ GrantRead *string `location:"header" locationName:"x-amz-grant-read" type:"string"`
+
+ // Allows grantee to read the object ACL.
+ GrantReadACP *string `location:"header" locationName:"x-amz-grant-read-acp" type:"string"`
+
+ // Allows grantee to write the ACL for the applicable object.
+ GrantWriteACP *string `location:"header" locationName:"x-amz-grant-write-acp" type:"string"`
+
+ Key *string `location:"uri" locationName:"Key" type:"string" required:"true"`
+
+ // A map of metadata to store with the object in S3.
+ Metadata map[string]*string `location:"headers" locationName:"x-amz-meta-" type:"map"`
+
+ // Confirms that the requester knows that she or he will be charged for the
+ // request. Bucket owners need not specify this parameter in their requests.
+ // Documentation on downloading objects from requester pays buckets can be found
+ // at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html
+ RequestPayer *string `location:"header" locationName:"x-amz-request-payer" type:"string"`
+
+ // Specifies the algorithm to use to when encrypting the object (e.g., AES256,
+ // aws:kms).
+ SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"`
+
+ // Specifies the customer-provided encryption key for Amazon S3 to use in encrypting
+ // data. This value is used to store the object and then it is discarded; Amazon
+ // does not store the encryption key. The key must be appropriate for use with
+ // the algorithm specified in the x-amz-server-side-encryption-customer-algorithm
+ // header.
+ SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"`
+
+ // Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321.
+ // Amazon S3 uses this header for a message integrity check to ensure the encryption
+ // key was transmitted without error.
+ SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"`
+
+ // Specifies the AWS KMS key ID to use for object encryption. All GET and PUT
+ // requests for an object protected by AWS KMS will fail if not made via SSL
+ // or using SigV4. Documentation on configuring any of the officially supported
+ // AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version
+ SSEKMSKeyID *string `location:"header" locationName:"x-amz-server-side-encryption-aws-kms-key-id" type:"string"`
+
+ // The Server-side encryption algorithm used when storing this object in S3
+ // (e.g., AES256, aws:kms).
+ ServerSideEncryption *string `location:"header" locationName:"x-amz-server-side-encryption" type:"string"`
+
+ // The type of storage to use for the object. Defaults to 'STANDARD'.
+ StorageClass *string `location:"header" locationName:"x-amz-storage-class" type:"string"`
+
+ // If the bucket is configured as a website, redirects requests for this object
+ // to another object in the same bucket or to an external URL. Amazon S3 stores
+ // the value of this header in the object metadata.
+ WebsiteRedirectLocation *string `location:"header" locationName:"x-amz-website-redirect-location" type:"string"`
+
+ // The readable body payload to send to S3.
+ Body io.Reader
+}
+
+// UploadOutput represents a response from the Upload() call.
+type UploadOutput struct {
+ // The URL where the object was uploaded to.
+ Location string
+
+ // The ID for a multipart upload to S3. In the case of an error the error
+ // can be cast to the MultiUploadFailure interface to extract the upload ID.
+ UploadID string
+}
+
+// UploadOptions keeps tracks of extra options to pass to an Upload() call.
+type UploadOptions struct {
+ // The buffer size (in bytes) to use when buffering data into chunks and
+ // sending them as parts to S3. The minimum allowed part size is 5MB, and
+ // if this value is set to zero, the DefaultPartSize value will be used.
+ PartSize int64
+
+ // The number of goroutines to spin up in parallel when sending parts.
+ // If this is set to zero, the DefaultConcurrency value will be used.
+ Concurrency int
+
+ // Setting this value to true will cause the SDK to avoid calling
+ // AbortMultipartUpload on a failure, leaving all successfully uploaded
+ // parts on S3 for manual recovery.
+ //
+ // Note that storing parts of an incomplete multipart upload counts towards
+ // space usage on S3 and will add additional costs if not cleaned up.
+ LeavePartsOnError bool
+
+ // The client to use when uploading to S3. Leave this as nil to use the
+ // default S3 client.
+ S3 *s3.S3
+}
+
+// NewUploader creates a new Uploader object to upload data to S3. Pass in
+// an optional opts structure to customize the uploader behavior.
+func NewUploader(opts *UploadOptions) *Uploader {
+ if opts == nil {
+ opts = DefaultUploadOptions
+ }
+ return &Uploader{opts: opts}
+}
+
+// The Uploader structure that calls Upload(). It is safe to call Upload()
+// on this structure for multiple objects and across concurrent goroutines.
+type Uploader struct {
+ opts *UploadOptions
+}
+
+// Upload uploads an object to S3, intelligently buffering large files into
+// smaller chunks and sending them in parallel across multiple goroutines. You
+// can configure the buffer size and concurrency through the opts parameter.
+//
+// If opts is set to nil, DefaultUploadOptions will be used.
+//
+// It is safe to call this method for multiple objects and across concurrent
+// goroutines.
+func (u *Uploader) Upload(input *UploadInput) (*UploadOutput, error) {
+ i := uploader{in: input, opts: *u.opts}
+ return i.upload()
+}
+
+// internal structure to manage an upload to S3.
+type uploader struct {
+ in *UploadInput
+ opts UploadOptions
+
+ readerPos int64 // current reader position
+ totalSize int64 // set to -1 if the size is not known
+}
+
+// internal logic for deciding whether to upload a single part or use a
+// multipart upload.
+func (u *uploader) upload() (*UploadOutput, error) {
+ u.init()
+
+ if u.opts.PartSize < MinUploadPartSize {
+ msg := fmt.Sprintf("part size must be at least %d bytes", MinUploadPartSize)
+ return nil, awserr.New("ConfigError", msg, nil)
+ }
+
+ // Do one read to determine if we have more than one part
+ buf, err := u.nextReader()
+ if err == io.EOF || err == io.ErrUnexpectedEOF { // single part
+ return u.singlePart(buf)
+ } else if err != nil {
+ return nil, awserr.New("ReadRequestBody", "read upload data failed", err)
+ }
+
+ mu := multiuploader{uploader: u}
+ return mu.upload(buf)
+}
+
+// init will initialize all default options.
+func (u *uploader) init() {
+ if u.opts.S3 == nil {
+ u.opts.S3 = s3.New(nil)
+ }
+ if u.opts.Concurrency == 0 {
+ u.opts.Concurrency = DefaultUploadConcurrency
+ }
+ if u.opts.PartSize == 0 {
+ u.opts.PartSize = DefaultUploadPartSize
+ }
+
+ // Try to get the total size for some optimizations
+ u.initSize()
+}
+
+// initSize tries to detect the total stream size, setting u.totalSize. If
+// the size is not known, totalSize is set to -1.
+func (u *uploader) initSize() {
+ u.totalSize = -1
+
+ switch r := u.in.Body.(type) {
+ case io.Seeker:
+ pos, _ := r.Seek(0, 1)
+ defer r.Seek(pos, 0)
+
+ n, err := r.Seek(0, 2)
+ if err != nil {
+ return
+ }
+ u.totalSize = n
+
+ // try to adjust partSize if it is too small
+ if u.totalSize/u.opts.PartSize >= int64(MaxUploadParts) {
+ u.opts.PartSize = u.totalSize / int64(MaxUploadParts)
+ }
+ }
+}
+
+// nextReader returns a seekable reader representing the next packet of data.
+// This operation increases the shared u.readerPos counter, but note that it
+// does not need to be wrapped in a mutex because nextReader is only called
+// from the main thread.
+func (u *uploader) nextReader() (io.ReadSeeker, error) {
+ switch r := u.in.Body.(type) {
+ case io.ReaderAt:
+ var err error
+
+ n := u.opts.PartSize
+ if u.totalSize >= 0 {
+ bytesLeft := u.totalSize - u.readerPos
+
+ if bytesLeft == 0 {
+ err = io.EOF
+ } else if bytesLeft <= u.opts.PartSize {
+ err = io.ErrUnexpectedEOF
+ n = bytesLeft
+ }
+ }
+
+ buf := io.NewSectionReader(r, u.readerPos, n)
+ u.readerPos += n
+
+ return buf, err
+
+ default:
+ packet := make([]byte, u.opts.PartSize)
+ n, err := io.ReadFull(u.in.Body, packet)
+ u.readerPos += int64(n)
+
+ return bytes.NewReader(packet[0:n]), err
+ }
+}
+
+// singlePart contains upload logic for uploading a single chunk via
+// a regular PutObject request. Multipart requests require at least two
+// parts, or at least 5MB of data.
+func (u *uploader) singlePart(buf io.ReadSeeker) (*UploadOutput, error) {
+ params := &s3.PutObjectInput{}
+ awsutil.Copy(params, u.in)
+ params.Body = buf
+
+ req, _ := u.opts.S3.PutObjectRequest(params)
+ if err := req.Send(); err != nil {
+ return nil, err
+ }
+
+ url := req.HTTPRequest.URL.String()
+ return &UploadOutput{Location: url}, nil
+}
+
+// internal structure to manage a specific multipart upload to S3.
+type multiuploader struct {
+ *uploader
+ wg sync.WaitGroup
+ m sync.Mutex
+ err error
+ uploadID string
+ parts completedParts
+}
+
+// keeps track of a single chunk of data being sent to S3.
+type chunk struct {
+ buf io.ReadSeeker
+ num int64
+}
+
+// completedParts is a wrapper to make parts sortable by their part number,
+// since S3 required this list to be sent in sorted order.
+type completedParts []*s3.CompletedPart
+
+func (a completedParts) Len() int { return len(a) }
+func (a completedParts) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a completedParts) Less(i, j int) bool { return *a[i].PartNumber < *a[j].PartNumber }
+
+// upload will perform a multipart upload using the firstBuf buffer containing
+// the first chunk of data.
+func (u *multiuploader) upload(firstBuf io.ReadSeeker) (*UploadOutput, error) {
+ params := &s3.CreateMultipartUploadInput{}
+ awsutil.Copy(params, u.in)
+
+ // Create the multipart
+ resp, err := u.opts.S3.CreateMultipartUpload(params)
+ if err != nil {
+ return nil, err
+ }
+ u.uploadID = *resp.UploadID
+
+ // Create the workers
+ ch := make(chan chunk, u.opts.Concurrency)
+ for i := 0; i < u.opts.Concurrency; i++ {
+ u.wg.Add(1)
+ go u.readChunk(ch)
+ }
+
+ // Send part 1 to the workers
+ var num int64 = 1
+ ch <- chunk{buf: firstBuf, num: num}
+
+ // Read and queue the rest of the parts
+ for u.geterr() == nil {
+ // This upload exceeded maximum number of supported parts, error now.
+ if num > int64(MaxUploadParts) {
+ msg := fmt.Sprintf("exceeded total allowed parts (%d). "+
+ "Adjust PartSize to fit in this limit", MaxUploadParts)
+ u.seterr(awserr.New("TotalPartsExceeded", msg, nil))
+ break
+ }
+
+ num++
+
+ buf, err := u.nextReader()
+ if err == io.EOF {
+ break
+ }
+
+ ch <- chunk{buf: buf, num: num}
+
+ if err != nil && err != io.ErrUnexpectedEOF {
+ u.seterr(awserr.New(
+ "ReadRequestBody",
+ "read multipart upload data failed",
+ err))
+ break
+ }
+ }
+
+ // Close the channel, wait for workers, and complete upload
+ close(ch)
+ u.wg.Wait()
+ complete := u.complete()
+
+ if err := u.geterr(); err != nil {
+ return nil, &multiUploadError{
+ awsError: awserr.New(
+ "MultipartUpload",
+ "upload multipart failed",
+ err),
+ uploadID: u.uploadID,
+ }
+ }
+ return &UploadOutput{
+ Location: *complete.Location,
+ UploadID: u.uploadID,
+ }, nil
+}
+
+// readChunk runs in worker goroutines to pull chunks off of the ch channel
+// and send() them as UploadPart requests.
+func (u *multiuploader) readChunk(ch chan chunk) {
+ defer u.wg.Done()
+ for {
+ data, ok := <-ch
+
+ if !ok {
+ break
+ }
+
+ if u.geterr() == nil {
+ if err := u.send(data); err != nil {
+ u.seterr(err)
+ }
+ }
+ }
+}
+
+// send performs an UploadPart request and keeps track of the completed
+// part information.
+func (u *multiuploader) send(c chunk) error {
+ resp, err := u.opts.S3.UploadPart(&s3.UploadPartInput{
+ Bucket: u.in.Bucket,
+ Key: u.in.Key,
+ Body: c.buf,
+ UploadID: &u.uploadID,
+ PartNumber: &c.num,
+ })
+
+ if err != nil {
+ return err
+ }
+
+ n := c.num
+ completed := &s3.CompletedPart{ETag: resp.ETag, PartNumber: &n}
+
+ u.m.Lock()
+ u.parts = append(u.parts, completed)
+ u.m.Unlock()
+
+ return nil
+}
+
+// geterr is a thread-safe getter for the error object
+func (u *multiuploader) geterr() error {
+ u.m.Lock()
+ defer u.m.Unlock()
+
+ return u.err
+}
+
+// seterr is a thread-safe setter for the error object
+func (u *multiuploader) seterr(e error) {
+ u.m.Lock()
+ defer u.m.Unlock()
+
+ u.err = e
+}
+
+// fail will abort the multipart unless LeavePartsOnError is set to true.
+func (u *multiuploader) fail() {
+ if u.opts.LeavePartsOnError {
+ return
+ }
+
+ u.opts.S3.AbortMultipartUpload(&s3.AbortMultipartUploadInput{
+ Bucket: u.in.Bucket,
+ Key: u.in.Key,
+ UploadID: &u.uploadID,
+ })
+}
+
+// complete successfully completes a multipart upload and returns the response.
+func (u *multiuploader) complete() *s3.CompleteMultipartUploadOutput {
+ if u.geterr() != nil {
+ u.fail()
+ return nil
+ }
+
+ // Parts must be sorted in PartNumber order.
+ sort.Sort(u.parts)
+
+ resp, err := u.opts.S3.CompleteMultipartUpload(&s3.CompleteMultipartUploadInput{
+ Bucket: u.in.Bucket,
+ Key: u.in.Key,
+ UploadID: &u.uploadID,
+ MultipartUpload: &s3.CompletedMultipartUpload{Parts: u.parts},
+ })
+ if err != nil {
+ u.seterr(err)
+ u.fail()
+ }
+
+ return resp
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/upload_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/upload_test.go
new file mode 100644
index 0000000..9016419
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/s3manager/upload_test.go
@@ -0,0 +1,438 @@
+package s3manager_test
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "sort"
+ "sync"
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/aws/aws-sdk-go/service/s3/s3manager"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+var buf12MB = make([]byte, 1024*1024*12)
+var buf2MB = make([]byte, 1024*1024*2)
+
+var emptyList = []string{}
+
+func val(i interface{}, s string) interface{} {
+ return awsutil.ValuesAtPath(i, s)[0]
+}
+
+func contains(src []string, s string) bool {
+ for _, v := range src {
+ if s == v {
+ return true
+ }
+ }
+ return false
+}
+
+func loggingSvc(ignoreOps []string) (*s3.S3, *[]string, *[]interface{}) {
+ var m sync.Mutex
+ partNum := 0
+ names := []string{}
+ params := []interface{}{}
+ svc := s3.New(nil)
+ svc.Handlers.Unmarshal.Clear()
+ svc.Handlers.UnmarshalMeta.Clear()
+ svc.Handlers.UnmarshalError.Clear()
+ svc.Handlers.Send.Clear()
+ svc.Handlers.Send.PushBack(func(r *aws.Request) {
+ m.Lock()
+ defer m.Unlock()
+
+ if !contains(ignoreOps, r.Operation.Name) {
+ names = append(names, r.Operation.Name)
+ params = append(params, r.Params)
+ }
+
+ r.HTTPResponse = &http.Response{
+ StatusCode: 200,
+ Body: ioutil.NopCloser(bytes.NewReader([]byte{})),
+ }
+
+ switch data := r.Data.(type) {
+ case *s3.CreateMultipartUploadOutput:
+ data.UploadID = aws.String("UPLOAD-ID")
+ case *s3.UploadPartOutput:
+ partNum++
+ data.ETag = aws.String(fmt.Sprintf("ETAG%d", partNum))
+ case *s3.CompleteMultipartUploadOutput:
+ data.Location = aws.String("https://location")
+ }
+ })
+
+ return svc, &names, ¶ms
+}
+
+func buflen(i interface{}) int {
+ r := i.(io.Reader)
+ b, _ := ioutil.ReadAll(r)
+ return len(b)
+}
+
+func TestUploadOrderMulti(t *testing.T) {
+ s, ops, args := loggingSvc(emptyList)
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ resp, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf12MB),
+ ServerSideEncryption: aws.String("AES256"),
+ ContentType: aws.String("content/type"),
+ })
+
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"CreateMultipartUpload", "UploadPart", "UploadPart", "UploadPart", "CompleteMultipartUpload"}, *ops)
+ assert.Equal(t, "https://location", resp.Location)
+ assert.Equal(t, "UPLOAD-ID", resp.UploadID)
+
+ // Validate input values
+
+ // UploadPart
+ assert.Equal(t, "UPLOAD-ID", val((*args)[1], "UploadID"))
+ assert.Equal(t, "UPLOAD-ID", val((*args)[2], "UploadID"))
+ assert.Equal(t, "UPLOAD-ID", val((*args)[3], "UploadID"))
+
+ // CompleteMultipartUpload
+ assert.Equal(t, "UPLOAD-ID", val((*args)[4], "UploadID"))
+ assert.Equal(t, int64(1), val((*args)[4], "MultipartUpload.Parts[0].PartNumber"))
+ assert.Equal(t, int64(2), val((*args)[4], "MultipartUpload.Parts[1].PartNumber"))
+ assert.Equal(t, int64(3), val((*args)[4], "MultipartUpload.Parts[2].PartNumber"))
+ assert.Regexp(t, `^ETAG\d+$`, val((*args)[4], "MultipartUpload.Parts[0].ETag"))
+ assert.Regexp(t, `^ETAG\d+$`, val((*args)[4], "MultipartUpload.Parts[1].ETag"))
+ assert.Regexp(t, `^ETAG\d+$`, val((*args)[4], "MultipartUpload.Parts[2].ETag"))
+
+ // Custom headers
+ assert.Equal(t, "AES256", val((*args)[0], "ServerSideEncryption"))
+ assert.Equal(t, "content/type", val((*args)[0], "ContentType"))
+}
+
+func TestUploadOrderMultiDifferentPartSize(t *testing.T) {
+ s, ops, args := loggingSvc(emptyList)
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{
+ S3: s,
+ PartSize: 1024 * 1024 * 7,
+ Concurrency: 1,
+ })
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf12MB),
+ })
+
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"CreateMultipartUpload", "UploadPart", "UploadPart", "CompleteMultipartUpload"}, *ops)
+
+ // Part lengths
+ assert.Equal(t, 1024*1024*7, buflen(val((*args)[1], "Body")))
+ assert.Equal(t, 1024*1024*5, buflen(val((*args)[2], "Body")))
+}
+
+func TestUploadIncreasePartSize(t *testing.T) {
+ s3manager.MaxUploadParts = 2
+ defer func() { s3manager.MaxUploadParts = 10000 }()
+
+ s, ops, args := loggingSvc(emptyList)
+ opts := &s3manager.UploadOptions{S3: s, Concurrency: 1}
+ mgr := s3manager.NewUploader(opts)
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf12MB),
+ })
+
+ assert.NoError(t, err)
+ assert.Equal(t, int64(0), opts.PartSize) // don't modify orig options
+ assert.Equal(t, []string{"CreateMultipartUpload", "UploadPart", "UploadPart", "CompleteMultipartUpload"}, *ops)
+
+ // Part lengths
+ assert.Equal(t, 1024*1024*6, buflen(val((*args)[1], "Body")))
+ assert.Equal(t, 1024*1024*6, buflen(val((*args)[2], "Body")))
+}
+
+func TestUploadFailIfPartSizeTooSmall(t *testing.T) {
+ opts := &s3manager.UploadOptions{PartSize: 5}
+ mgr := s3manager.NewUploader(opts)
+ resp, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf12MB),
+ })
+
+ assert.Nil(t, resp)
+ assert.NotNil(t, err)
+
+ aerr := err.(awserr.Error)
+ assert.Equal(t, "ConfigError", aerr.Code())
+ assert.Contains(t, aerr.Message(), "part size must be at least")
+}
+
+func TestUploadOrderSingle(t *testing.T) {
+ s, ops, args := loggingSvc(emptyList)
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ resp, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf2MB),
+ ServerSideEncryption: aws.String("AES256"),
+ ContentType: aws.String("content/type"),
+ })
+
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"PutObject"}, *ops)
+ assert.NotEqual(t, "", resp.Location)
+ assert.Equal(t, "", resp.UploadID)
+ assert.Equal(t, "AES256", val((*args)[0], "ServerSideEncryption"))
+ assert.Equal(t, "content/type", val((*args)[0], "ContentType"))
+}
+
+func TestUploadOrderSingleFailure(t *testing.T) {
+ s, ops, _ := loggingSvc(emptyList)
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ r.HTTPResponse.StatusCode = 400
+ })
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ resp, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf2MB),
+ })
+
+ assert.Error(t, err)
+ assert.Equal(t, []string{"PutObject"}, *ops)
+ assert.Nil(t, resp)
+}
+
+func TestUploadOrderZero(t *testing.T) {
+ s, ops, args := loggingSvc(emptyList)
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ resp, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(make([]byte, 0)),
+ })
+
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"PutObject"}, *ops)
+ assert.NotEqual(t, "", resp.Location)
+ assert.Equal(t, "", resp.UploadID)
+ assert.Equal(t, 0, buflen(val((*args)[0], "Body")))
+}
+
+func TestUploadOrderMultiFailure(t *testing.T) {
+ s, ops, _ := loggingSvc(emptyList)
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ switch t := r.Data.(type) {
+ case *s3.UploadPartOutput:
+ if *t.ETag == "ETAG2" {
+ r.HTTPResponse.StatusCode = 400
+ }
+ }
+ })
+
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s, Concurrency: 1})
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf12MB),
+ })
+
+ assert.Error(t, err)
+ assert.Equal(t, []string{"CreateMultipartUpload", "UploadPart", "UploadPart", "AbortMultipartUpload"}, *ops)
+}
+
+func TestUploadOrderMultiFailureOnComplete(t *testing.T) {
+ s, ops, _ := loggingSvc(emptyList)
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ switch r.Data.(type) {
+ case *s3.CompleteMultipartUploadOutput:
+ r.HTTPResponse.StatusCode = 400
+ }
+ })
+
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s, Concurrency: 1})
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(buf12MB),
+ })
+
+ assert.Error(t, err)
+ assert.Equal(t, []string{"CreateMultipartUpload", "UploadPart", "UploadPart",
+ "UploadPart", "CompleteMultipartUpload", "AbortMultipartUpload"}, *ops)
+}
+
+func TestUploadOrderMultiFailureOnCreate(t *testing.T) {
+ s, ops, _ := loggingSvc(emptyList)
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ switch r.Data.(type) {
+ case *s3.CreateMultipartUploadOutput:
+ r.HTTPResponse.StatusCode = 400
+ }
+ })
+
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(make([]byte, 1024*1024*12)),
+ })
+
+ assert.Error(t, err)
+ assert.Equal(t, []string{"CreateMultipartUpload"}, *ops)
+}
+
+func TestUploadOrderMultiFailureLeaveParts(t *testing.T) {
+ s, ops, _ := loggingSvc(emptyList)
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ switch data := r.Data.(type) {
+ case *s3.UploadPartOutput:
+ if *data.ETag == "ETAG2" {
+ r.HTTPResponse.StatusCode = 400
+ }
+ }
+ })
+
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{
+ S3: s,
+ Concurrency: 1,
+ LeavePartsOnError: true,
+ })
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: bytes.NewReader(make([]byte, 1024*1024*12)),
+ })
+
+ assert.Error(t, err)
+ assert.Equal(t, []string{"CreateMultipartUpload", "UploadPart", "UploadPart"}, *ops)
+}
+
+type failreader struct {
+ times int
+ failCount int
+}
+
+func (f *failreader) Read(b []byte) (int, error) {
+ f.failCount++
+ if f.failCount >= f.times {
+ return 0, fmt.Errorf("random failure")
+ }
+ return len(b), nil
+}
+
+func TestUploadOrderReadFail1(t *testing.T) {
+ s, ops, _ := loggingSvc(emptyList)
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: &failreader{times: 1},
+ })
+
+ assert.Equal(t, "ReadRequestBody", err.(awserr.Error).Code())
+ assert.EqualError(t, err.(awserr.Error).OrigErr(), "random failure")
+ assert.Equal(t, []string{}, *ops)
+}
+
+func TestUploadOrderReadFail2(t *testing.T) {
+ s, ops, _ := loggingSvc([]string{"UploadPart"})
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s, Concurrency: 1})
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: &failreader{times: 2},
+ })
+
+ assert.Equal(t, "ReadRequestBody", err.(awserr.Error).Code())
+ assert.EqualError(t, err.(awserr.Error).OrigErr(), "random failure")
+ assert.Equal(t, []string{"CreateMultipartUpload", "AbortMultipartUpload"}, *ops)
+}
+
+type sizedReader struct {
+ size int
+ cur int
+}
+
+func (s *sizedReader) Read(p []byte) (n int, err error) {
+ if s.cur >= s.size {
+ return 0, io.EOF
+ }
+
+ n = len(p)
+ s.cur += len(p)
+ if s.cur > s.size {
+ n -= s.cur - s.size
+ }
+
+ return
+}
+
+func TestUploadOrderMultiBufferedReader(t *testing.T) {
+ s, ops, args := loggingSvc(emptyList)
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ _, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: &sizedReader{size: 1024 * 1024 * 12},
+ })
+
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"CreateMultipartUpload", "UploadPart", "UploadPart", "UploadPart", "CompleteMultipartUpload"}, *ops)
+
+ // Part lengths
+ parts := []int{
+ buflen(val((*args)[1], "Body")),
+ buflen(val((*args)[2], "Body")),
+ buflen(val((*args)[3], "Body")),
+ }
+ sort.Ints(parts)
+ assert.Equal(t, []int{1024 * 1024 * 2, 1024 * 1024 * 5, 1024 * 1024 * 5}, parts)
+}
+
+func TestUploadOrderMultiBufferedReaderExceedTotalParts(t *testing.T) {
+ s3manager.MaxUploadParts = 2
+ defer func() { s3manager.MaxUploadParts = 10000 }()
+ s, ops, _ := loggingSvc([]string{"UploadPart"})
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s, Concurrency: 1})
+ resp, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: &sizedReader{size: 1024 * 1024 * 12},
+ })
+
+ assert.Error(t, err)
+ assert.Nil(t, resp)
+ assert.Equal(t, []string{"CreateMultipartUpload", "AbortMultipartUpload"}, *ops)
+
+ aerr := err.(awserr.Error)
+ assert.Equal(t, "TotalPartsExceeded", aerr.Code())
+ assert.Contains(t, aerr.Message(), "exceeded total allowed parts (2)")
+}
+
+func TestUploadOrderSingleBufferedReader(t *testing.T) {
+ s, ops, _ := loggingSvc(emptyList)
+ mgr := s3manager.NewUploader(&s3manager.UploadOptions{S3: s})
+ resp, err := mgr.Upload(&s3manager.UploadInput{
+ Bucket: aws.String("Bucket"),
+ Key: aws.String("Key"),
+ Body: &sizedReader{size: 1024 * 1024 * 2},
+ })
+
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"PutObject"}, *ops)
+ assert.NotEqual(t, "", resp.Location)
+ assert.Equal(t, "", resp.UploadID)
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/service.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/service.go
new file mode 100644
index 0000000..0d25fd2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/service.go
@@ -0,0 +1,57 @@
+// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
+
+package s3
+
+import (
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/internal/protocol/restxml"
+ "github.com/aws/aws-sdk-go/internal/signer/v4"
+)
+
+// S3 is a client for Amazon S3.
+type S3 struct {
+ *aws.Service
+}
+
+// Used for custom service initialization logic
+var initService func(*aws.Service)
+
+// Used for custom request initialization logic
+var initRequest func(*aws.Request)
+
+// New returns a new S3 client.
+func New(config *aws.Config) *S3 {
+ service := &aws.Service{
+ Config: aws.DefaultConfig.Merge(config),
+ ServiceName: "s3",
+ APIVersion: "2006-03-01",
+ }
+ service.Initialize()
+
+ // Handlers
+ service.Handlers.Sign.PushBack(v4.Sign)
+ service.Handlers.Build.PushBack(restxml.Build)
+ service.Handlers.Unmarshal.PushBack(restxml.Unmarshal)
+ service.Handlers.UnmarshalMeta.PushBack(restxml.UnmarshalMeta)
+ service.Handlers.UnmarshalError.PushBack(restxml.UnmarshalError)
+
+ // Run custom service initialization if present
+ if initService != nil {
+ initService(service)
+ }
+
+ return &S3{service}
+}
+
+// newRequest creates a new request for a S3 operation and runs any
+// custom request initialization.
+func (c *S3) newRequest(op *aws.Operation, params, data interface{}) *aws.Request {
+ req := aws.NewRequest(c.Service, op, params, data)
+
+ // Run custom request initialization if present
+ if initRequest != nil {
+ initRequest(req)
+ }
+
+ return req
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/sse.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/sse.go
new file mode 100644
index 0000000..01350f7
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/sse.go
@@ -0,0 +1,44 @@
+package s3
+
+import (
+ "crypto/md5"
+ "encoding/base64"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/aws/awsutil"
+)
+
+var errSSERequiresSSL = awserr.New("ConfigError", "cannot send SSE keys over HTTP.", nil)
+
+func validateSSERequiresSSL(r *aws.Request) {
+ if r.HTTPRequest.URL.Scheme != "https" {
+ p := awsutil.ValuesAtPath(r.Params, "SSECustomerKey||CopySourceSSECustomerKey")
+ if len(p) > 0 {
+ r.Error = errSSERequiresSSL
+ }
+ }
+}
+
+func computeSSEKeys(r *aws.Request) {
+ headers := []string{
+ "x-amz-server-side-encryption-customer-key",
+ "x-amz-copy-source-server-side-encryption-customer-key",
+ }
+
+ for _, h := range headers {
+ md5h := h + "-md5"
+ if key := r.HTTPRequest.Header.Get(h); key != "" {
+ // Base64-encode the value
+ b64v := base64.StdEncoding.EncodeToString([]byte(key))
+ r.HTTPRequest.Header.Set(h, b64v)
+
+ // Add MD5 if it wasn't computed
+ if r.HTTPRequest.Header.Get(md5h) == "" {
+ sum := md5.Sum([]byte(key))
+ b64sum := base64.StdEncoding.EncodeToString(sum[:])
+ r.HTTPRequest.Header.Set(md5h, b64sum)
+ }
+ }
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/sse_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/sse_test.go
new file mode 100644
index 0000000..e665188
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/sse_test.go
@@ -0,0 +1,81 @@
+package s3_test
+
+import (
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+
+func TestSSECustomerKeyOverHTTPError(t *testing.T) {
+ s := s3.New(&aws.Config{DisableSSL: true})
+ req, _ := s.CopyObjectRequest(&s3.CopyObjectInput{
+ Bucket: aws.String("bucket"),
+ CopySource: aws.String("bucket/source"),
+ Key: aws.String("dest"),
+ SSECustomerKey: aws.String("key"),
+ })
+ err := req.Build()
+
+ assert.Error(t, err)
+ assert.Equal(t, "ConfigError", err.(awserr.Error).Code())
+ assert.Contains(t, err.(awserr.Error).Message(), "cannot send SSE keys over HTTP")
+}
+
+func TestCopySourceSSECustomerKeyOverHTTPError(t *testing.T) {
+ s := s3.New(&aws.Config{DisableSSL: true})
+ req, _ := s.CopyObjectRequest(&s3.CopyObjectInput{
+ Bucket: aws.String("bucket"),
+ CopySource: aws.String("bucket/source"),
+ Key: aws.String("dest"),
+ CopySourceSSECustomerKey: aws.String("key"),
+ })
+ err := req.Build()
+
+ assert.Error(t, err)
+ assert.Equal(t, "ConfigError", err.(awserr.Error).Code())
+ assert.Contains(t, err.(awserr.Error).Message(), "cannot send SSE keys over HTTP")
+}
+
+func TestComputeSSEKeys(t *testing.T) {
+ s := s3.New(nil)
+ req, _ := s.CopyObjectRequest(&s3.CopyObjectInput{
+ Bucket: aws.String("bucket"),
+ CopySource: aws.String("bucket/source"),
+ Key: aws.String("dest"),
+ SSECustomerKey: aws.String("key"),
+ CopySourceSSECustomerKey: aws.String("key"),
+ })
+ err := req.Build()
+
+ assert.NoError(t, err)
+ assert.Equal(t, "a2V5", req.HTTPRequest.Header.Get("x-amz-server-side-encryption-customer-key"))
+ assert.Equal(t, "a2V5", req.HTTPRequest.Header.Get("x-amz-copy-source-server-side-encryption-customer-key"))
+ assert.Equal(t, "PG4LipwVIkqCKLmpjKFTHQ==", req.HTTPRequest.Header.Get("x-amz-server-side-encryption-customer-key-md5"))
+ assert.Equal(t, "PG4LipwVIkqCKLmpjKFTHQ==", req.HTTPRequest.Header.Get("x-amz-copy-source-server-side-encryption-customer-key-md5"))
+}
+
+func TestComputeSSEKeysShortcircuit(t *testing.T) {
+ s := s3.New(nil)
+ req, _ := s.CopyObjectRequest(&s3.CopyObjectInput{
+ Bucket: aws.String("bucket"),
+ CopySource: aws.String("bucket/source"),
+ Key: aws.String("dest"),
+ SSECustomerKey: aws.String("key"),
+ CopySourceSSECustomerKey: aws.String("key"),
+ SSECustomerKeyMD5: aws.String("MD5"),
+ CopySourceSSECustomerKeyMD5: aws.String("MD5"),
+ })
+ err := req.Build()
+
+ assert.NoError(t, err)
+ assert.Equal(t, "a2V5", req.HTTPRequest.Header.Get("x-amz-server-side-encryption-customer-key"))
+ assert.Equal(t, "a2V5", req.HTTPRequest.Header.Get("x-amz-copy-source-server-side-encryption-customer-key"))
+ assert.Equal(t, "MD5", req.HTTPRequest.Header.Get("x-amz-server-side-encryption-customer-key-md5"))
+ assert.Equal(t, "MD5", req.HTTPRequest.Header.Get("x-amz-copy-source-server-side-encryption-customer-key-md5"))
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/unmarshal_error.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/unmarshal_error.go
new file mode 100644
index 0000000..c27d434
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/unmarshal_error.go
@@ -0,0 +1,42 @@
+package s3
+
+import (
+ "encoding/xml"
+ "io"
+ "strings"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+)
+
+type xmlErrorResponse struct {
+ XMLName xml.Name `xml:"Error"`
+ Code string `xml:"Code"`
+ Message string `xml:"Message"`
+}
+
+func unmarshalError(r *aws.Request) {
+ defer r.HTTPResponse.Body.Close()
+
+ if r.HTTPResponse.ContentLength == int64(0) {
+ // No body, use status code to generate an awserr.Error
+ r.Error = awserr.NewRequestFailure(
+ awserr.New(strings.Replace(r.HTTPResponse.Status, " ", "", -1), r.HTTPResponse.Status, nil),
+ r.HTTPResponse.StatusCode,
+ "",
+ )
+ return
+ }
+
+ resp := &xmlErrorResponse{}
+ err := xml.NewDecoder(r.HTTPResponse.Body).Decode(resp)
+ if err != nil && err != io.EOF {
+ r.Error = awserr.New("SerializationError", "failed to decode S3 XML error response", nil)
+ } else {
+ r.Error = awserr.NewRequestFailure(
+ awserr.New(resp.Code, resp.Message, nil),
+ r.HTTPResponse.StatusCode,
+ "",
+ )
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/unmarshal_error_test.go b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/unmarshal_error_test.go
new file mode 100644
index 0000000..ee08d62
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/aws/aws-sdk-go/service/s3/unmarshal_error_test.go
@@ -0,0 +1,53 @@
+package s3_test
+
+import (
+ "bytes"
+ "io/ioutil"
+ "net/http"
+ "testing"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
+ "github.com/aws/aws-sdk-go/internal/test/unit"
+ "github.com/aws/aws-sdk-go/service/s3"
+ "github.com/stretchr/testify/assert"
+)
+
+var _ = unit.Imported
+
+var s3StatusCodeErrorTests = []struct {
+ scode int
+ status string
+ body string
+ code string
+ message string
+}{
+ {301, "Moved Permanently", "", "MovedPermanently", "Moved Permanently"},
+ {403, "Forbidden", "", "Forbidden", "Forbidden"},
+ {400, "Bad Request", "", "BadRequest", "Bad Request"},
+ {404, "Not Found", "", "NotFound", "Not Found"},
+ {500, "Internal Error", "", "InternalError", "Internal Error"},
+}
+
+func TestStatusCodeError(t *testing.T) {
+ for _, test := range s3StatusCodeErrorTests {
+ s := s3.New(nil)
+ s.Handlers.Send.Clear()
+ s.Handlers.Send.PushBack(func(r *aws.Request) {
+ body := ioutil.NopCloser(bytes.NewReader([]byte(test.body)))
+ r.HTTPResponse = &http.Response{
+ ContentLength: int64(len(test.body)),
+ StatusCode: test.scode,
+ Status: test.status,
+ Body: body,
+ }
+ })
+ _, err := s.PutBucketACL(&s3.PutBucketACLInput{
+ Bucket: aws.String("bucket"), ACL: aws.String("public-read"),
+ })
+
+ assert.Error(t, err)
+ assert.Equal(t, test.code, err.(awserr.Error).Code())
+ assert.Equal(t, test.message, err.(awserr.Error).Message())
+ }
+}
diff --git a/Godeps/_workspace/src/github.com/extemporalgenome/slug/LICENSE b/Godeps/_workspace/src/github.com/extemporalgenome/slug/LICENSE
new file mode 100644
index 0000000..7448756
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/extemporalgenome/slug/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2012 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Godeps/_workspace/src/github.com/extemporalgenome/slug/README.md b/Godeps/_workspace/src/github.com/extemporalgenome/slug/README.md
new file mode 100644
index 0000000..50a88bb
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/extemporalgenome/slug/README.md
@@ -0,0 +1,9 @@
+# slug
+
+See the [API docs](http://go.pkgdoc.org/github.com/extemporalgenome/slug).
+
+Latin-ish inputs should have very stable output. All inputs are passed through
+an NFKD transform, and anything still in the unicode Letter and Number
+categories are passed through intact. Anything in the Mark or Lm/Sk categories
+(modifiers) are skipped, and runs of characters from any other categories are
+collapsed to a single hyphen.
diff --git a/Godeps/_workspace/src/github.com/extemporalgenome/slug/slug.go b/Godeps/_workspace/src/github.com/extemporalgenome/slug/slug.go
new file mode 100644
index 0000000..d21833c
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/extemporalgenome/slug/slug.go
@@ -0,0 +1,110 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package slug transforms strings into a normalized form well suited for use in URLs.
+package slug
+
+import (
+ "code.google.com/p/go.text/unicode/norm"
+ "encoding/hex"
+ "unicode"
+ "unicode/utf8"
+)
+
+var lat = []*unicode.RangeTable{unicode.Letter, unicode.Number}
+var nop = []*unicode.RangeTable{unicode.Mark, unicode.Sk, unicode.Lm}
+
+// Slug replaces each run of characters which are not unicode letters or
+// numbers with a single hyphen, except for leading or trailing runs. Letters
+// will be stripped of diacritical marks and lowercased. Letter or number
+// codepoints that do not have combining marks or a lower-cased variant will
+// be passed through unaltered.
+func Slug(s string) string {
+ buf := make([]rune, 0, len(s))
+ dash := false
+ for _, r := range norm.NFKD.String(s) {
+ switch {
+ // unicode 'letters' like mandarin characters pass through
+ case unicode.IsOneOf(lat, r):
+ buf = append(buf, unicode.ToLower(r))
+ dash = true
+ case unicode.IsOneOf(nop, r):
+ // skip
+ case dash:
+ buf = append(buf, '-')
+ dash = false
+ }
+ }
+ if i := len(buf) - 1; i >= 0 && buf[i] == '-' {
+ buf = buf[:i]
+ }
+ return string(buf)
+}
+
+// SlugAscii is identical to Slug, except that runs of one or more unicode
+// letters or numbers that still fall outside the ASCII range will have their
+// UTF-8 representation hex encoded and delimited by hyphens. As with Slug, in
+// no case will hyphens appear at either end of the returned string.
+func SlugAscii(s string) string {
+ const m = utf8.UTFMax
+ var (
+ ib [m * 3]byte
+ ob []byte
+ buf = make([]byte, 0, len(s))
+ dash = false
+ latin = true
+ )
+ for _, r := range norm.NFKD.String(s) {
+ switch {
+ case unicode.IsOneOf(lat, r):
+ r = unicode.ToLower(r)
+ n := utf8.EncodeRune(ib[:m], r)
+ if r >= 128 {
+ if latin && dash {
+ buf = append(buf, '-')
+ }
+ n = hex.Encode(ib[m:], ib[:n])
+ ob = ib[m : m+n]
+ latin = false
+ } else {
+ if !latin {
+ buf = append(buf, '-')
+ }
+ ob = ib[:n]
+ latin = true
+ }
+ dash = true
+ buf = append(buf, ob...)
+ case unicode.IsOneOf(nop, r):
+ // skip
+ case dash:
+ buf = append(buf, '-')
+ dash = false
+ latin = true
+ }
+ }
+ if i := len(buf) - 1; i >= 0 && buf[i] == '-' {
+ buf = buf[:i]
+ }
+ return string(buf)
+}
+
+// IsSlugAscii returns true only if SlugAscii(s) == s.
+func IsSlugAscii(s string) bool {
+ dash := true
+ for _, r := range s {
+ switch {
+ case r == '-':
+ if dash {
+ return false
+ }
+ dash = true
+ case 'a' <= r && r <= 'z', '0' <= r && r <= '9':
+ dash = false
+ default:
+ return false
+ }
+ }
+ return !dash
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/.gitignore b/Godeps/_workspace/src/github.com/flosch/go-humanize/.gitignore
new file mode 100644
index 0000000..05b4051
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/.gitignore
@@ -0,0 +1,6 @@
+#*
+*.[568]
+*.a
+*~
+[568].out
+_*
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/LICENSE b/Godeps/_workspace/src/github.com/flosch/go-humanize/LICENSE
new file mode 100644
index 0000000..8d9a94a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/LICENSE
@@ -0,0 +1,21 @@
+Copyright (c) 2005-2008 Dustin Sallings
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/README.markdown b/Godeps/_workspace/src/github.com/flosch/go-humanize/README.markdown
new file mode 100644
index 0000000..079bc89
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/README.markdown
@@ -0,0 +1,78 @@
+# Humane Units
+
+Just a few functions for helping humanize times and sizes.
+
+`go get` it as `github.com/dustin/go-humanize`, import it as
+`"github.com/dustin/go-humanize"`, use it as `humanize`
+
+## Sizes
+
+This lets you take numbers like `82854982` and convert them to useful
+strings like, `83MB` or `79MiB` (whichever you prefer).
+
+Example:
+
+ fmt.Printf("That file is %s.", humanize.Bytes(82854982))
+
+## Times
+
+This lets you take a `time.Time` and spit it out in relative terms.
+For example, `12 seconds ago` or `3 days from now`.
+
+Example:
+
+ fmt.Printf("This was touched %s", humanize.Time(someTimeInstance))
+
+Thanks to Kyle Lemons for the time implementation from an IRC
+conversation one day. It's pretty neat.
+
+## Ordinals
+
+From a [mailing list discussion][odisc] where a user wanted to be able
+to label ordinals.
+
+ 0 -> 0th
+ 1 -> 1st
+ 2 -> 2nd
+ 3 -> 3rd
+ 4 -> 4th
+ [...]
+
+Example:
+
+ fmt.Printf("You're my %s best friend.", humanize.Ordinal(193))
+
+## Commas
+
+Want to shove commas into numbers? Be my guest.
+
+ 0 -> 0
+ 100 -> 100
+ 1000 -> 1,000
+ 1000000000 -> 1,000,000,000
+ -100000 -> -100,000
+
+Example:
+
+ fmt.Printf("You owe $%s.\n", humanize.Comma(6582491))
+
+## Ftoa
+
+Nicer float64 formatter that removes trailing zeros.
+
+ fmt.Printf("%f", 2.24) // 2.240000
+ fmt.Printf("%s", humanize.Ftoa(2.24)) // 2.24
+ fmt.Printf("%f", 2.0) // 2.000000
+ fmt.Printf("%s", humanize.Ftoa(2.0)) // 2
+
+## SI notation
+
+Format numbers with [SI notation][sinotation].
+
+Example:
+
+ humanize.SI(0.00000000223, "M") // 2.23nM
+
+
+[odisc]: https://groups.google.com/d/topic/golang-nuts/l8NhI74jl-4/discussion
+[sinotation]: http://en.wikipedia.org/wiki/Metric_prefix
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/big.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/big.go
new file mode 100644
index 0000000..f49dc33
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/big.go
@@ -0,0 +1,31 @@
+package humanize
+
+import (
+ "math/big"
+)
+
+// order of magnitude (to a max order)
+func oomm(n, b *big.Int, maxmag int) (float64, int) {
+ mag := 0
+ m := &big.Int{}
+ for n.Cmp(b) >= 0 {
+ n.DivMod(n, b, m)
+ mag++
+ if mag == maxmag && maxmag >= 0 {
+ break
+ }
+ }
+ return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag
+}
+
+// total order of magnitude
+// (same as above, but with no upper limit)
+func oom(n, b *big.Int) (float64, int) {
+ mag := 0
+ m := &big.Int{}
+ for n.Cmp(b) >= 0 {
+ n.DivMod(n, b, m)
+ mag++
+ }
+ return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/bigbytes.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/bigbytes.go
new file mode 100644
index 0000000..02c71ce
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/bigbytes.go
@@ -0,0 +1,158 @@
+package humanize
+
+import (
+ "fmt"
+ "math/big"
+ "strings"
+ "unicode"
+)
+
+var (
+ bigIECExp = big.NewInt(1024)
+
+ // BigByte is one byte in bit.Ints
+ BigByte = big.NewInt(1)
+ // BigKiByte is 1,024 bytes in bit.Ints
+ BigKiByte = (&big.Int{}).Mul(BigByte, bigIECExp)
+ // BigMiByte is 1,024 k bytes in bit.Ints
+ BigMiByte = (&big.Int{}).Mul(BigKiByte, bigIECExp)
+ // BigGiByte is 1,024 m bytes in bit.Ints
+ BigGiByte = (&big.Int{}).Mul(BigMiByte, bigIECExp)
+ // BigTiByte is 1,024 g bytes in bit.Ints
+ BigTiByte = (&big.Int{}).Mul(BigGiByte, bigIECExp)
+ // BigPiByte is 1,024 t bytes in bit.Ints
+ BigPiByte = (&big.Int{}).Mul(BigTiByte, bigIECExp)
+ // BigEiByte is 1,024 p bytes in bit.Ints
+ BigEiByte = (&big.Int{}).Mul(BigPiByte, bigIECExp)
+ // BigZiByte is 1,024 e bytes in bit.Ints
+ BigZiByte = (&big.Int{}).Mul(BigEiByte, bigIECExp)
+ // BigYiByte is 1,024 z bytes in bit.Ints
+ BigYiByte = (&big.Int{}).Mul(BigZiByte, bigIECExp)
+)
+
+var (
+ bigSIExp = big.NewInt(1000)
+
+ // BigSIByte is one SI byte in big.Ints
+ BigSIByte = big.NewInt(1)
+ // BigKByte is 1,000 SI bytes in big.Ints
+ BigKByte = (&big.Int{}).Mul(BigSIByte, bigSIExp)
+ // BigMByte is 1,000 SI k bytes in big.Ints
+ BigMByte = (&big.Int{}).Mul(BigKByte, bigSIExp)
+ // BigGByte is 1,000 SI m bytes in big.Ints
+ BigGByte = (&big.Int{}).Mul(BigMByte, bigSIExp)
+ // BigTByte is 1,000 SI g bytes in big.Ints
+ BigTByte = (&big.Int{}).Mul(BigGByte, bigSIExp)
+ // BigPByte is 1,000 SI t bytes in big.Ints
+ BigPByte = (&big.Int{}).Mul(BigTByte, bigSIExp)
+ // BigEByte is 1,000 SI p bytes in big.Ints
+ BigEByte = (&big.Int{}).Mul(BigPByte, bigSIExp)
+ // BigZByte is 1,000 SI e bytes in big.Ints
+ BigZByte = (&big.Int{}).Mul(BigEByte, bigSIExp)
+ // BigYByte is 1,000 SI z bytes in big.Ints
+ BigYByte = (&big.Int{}).Mul(BigZByte, bigSIExp)
+)
+
+var bigBytesSizeTable = map[string]*big.Int{
+ "b": BigByte,
+ "kib": BigKiByte,
+ "kb": BigKByte,
+ "mib": BigMiByte,
+ "mb": BigMByte,
+ "gib": BigGiByte,
+ "gb": BigGByte,
+ "tib": BigTiByte,
+ "tb": BigTByte,
+ "pib": BigPiByte,
+ "pb": BigPByte,
+ "eib": BigEiByte,
+ "eb": BigEByte,
+ "zib": BigZiByte,
+ "zb": BigZByte,
+ "yib": BigYiByte,
+ "yb": BigYByte,
+ // Without suffix
+ "": BigByte,
+ "ki": BigKiByte,
+ "k": BigKByte,
+ "mi": BigMiByte,
+ "m": BigMByte,
+ "gi": BigGiByte,
+ "g": BigGByte,
+ "ti": BigTiByte,
+ "t": BigTByte,
+ "pi": BigPiByte,
+ "p": BigPByte,
+ "ei": BigEiByte,
+ "e": BigEByte,
+ "z": BigZByte,
+ "zi": BigZiByte,
+ "y": BigYByte,
+ "yi": BigYiByte,
+}
+
+var ten = big.NewInt(10)
+
+func humanateBigBytes(s, base *big.Int, sizes []string) string {
+ if s.Cmp(ten) < 0 {
+ return fmt.Sprintf("%dB", s)
+ }
+ c := (&big.Int{}).Set(s)
+ val, mag := oomm(c, base, len(sizes)-1)
+ suffix := sizes[mag]
+ f := "%.0f"
+ if val < 10 {
+ f = "%.1f"
+ }
+
+ return fmt.Sprintf(f+"%s", val, suffix)
+
+}
+
+// BigBytes produces a human readable representation of an SI size.
+//
+// BigBytes(82854982) -> 83MB
+func BigBytes(s *big.Int) string {
+ sizes := []string{"B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"}
+ return humanateBigBytes(s, bigSIExp, sizes)
+}
+
+// BigIBytes produces a human readable representation of an IEC size.
+//
+// BigIBytes(82854982) -> 79MiB
+func BigIBytes(s *big.Int) string {
+ sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"}
+ return humanateBigBytes(s, bigIECExp, sizes)
+}
+
+// ParseBigBytes parses a string representation of bytes into the number
+// of bytes it represents.
+//
+// ParseBigBytes("42MB") -> 42000000, nil
+// ParseBigBytes("42mib") -> 44040192, nil
+func ParseBigBytes(s string) (*big.Int, error) {
+ lastDigit := 0
+ for _, r := range s {
+ if !(unicode.IsDigit(r) || r == '.') {
+ break
+ }
+ lastDigit++
+ }
+
+ val := &big.Rat{}
+ _, err := fmt.Sscanf(s[:lastDigit], "%f", val)
+ if err != nil {
+ return nil, err
+ }
+
+ extra := strings.ToLower(strings.TrimSpace(s[lastDigit:]))
+ if m, ok := bigBytesSizeTable[extra]; ok {
+ mv := (&big.Rat{}).SetInt(m)
+ val.Mul(val, mv)
+ rv := &big.Int{}
+ rv.Div(val.Num(), val.Denom())
+ return rv, nil
+ }
+
+ return nil, fmt.Errorf("unhandled size name: %v", extra)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/bytes.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/bytes.go
new file mode 100644
index 0000000..e4b8040
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/bytes.go
@@ -0,0 +1,129 @@
+package humanize
+
+import (
+ "fmt"
+ "math"
+ "strconv"
+ "strings"
+ "unicode"
+)
+
+// IEC Sizes.
+// kibis of bits
+const (
+ Byte = 1
+ KiByte = Byte * 1024
+ MiByte = KiByte * 1024
+ GiByte = MiByte * 1024
+ TiByte = GiByte * 1024
+ PiByte = TiByte * 1024
+ EiByte = PiByte * 1024
+)
+
+// SI Sizes.
+const (
+ IByte = 1
+ KByte = IByte * 1000
+ MByte = KByte * 1000
+ GByte = MByte * 1000
+ TByte = GByte * 1000
+ PByte = TByte * 1000
+ EByte = PByte * 1000
+)
+
+var bytesSizeTable = map[string]uint64{
+ "b": Byte,
+ "kib": KiByte,
+ "kb": KByte,
+ "mib": MiByte,
+ "mb": MByte,
+ "gib": GiByte,
+ "gb": GByte,
+ "tib": TiByte,
+ "tb": TByte,
+ "pib": PiByte,
+ "pb": PByte,
+ "eib": EiByte,
+ "eb": EByte,
+ // Without suffix
+ "": Byte,
+ "ki": KiByte,
+ "k": KByte,
+ "mi": MiByte,
+ "m": MByte,
+ "gi": GiByte,
+ "g": GByte,
+ "ti": TiByte,
+ "t": TByte,
+ "pi": PiByte,
+ "p": PByte,
+ "ei": EiByte,
+ "e": EByte,
+}
+
+func logn(n, b float64) float64 {
+ return math.Log(n) / math.Log(b)
+}
+
+func humanateBytes(s uint64, base float64, sizes []string) string {
+ if s < 10 {
+ return fmt.Sprintf("%dB", s)
+ }
+ e := math.Floor(logn(float64(s), base))
+ suffix := sizes[int(e)]
+ val := float64(s) / math.Pow(base, math.Floor(e))
+ f := "%.0f"
+ if val < 10 {
+ f = "%.1f"
+ }
+
+ return fmt.Sprintf(f+"%s", val, suffix)
+
+}
+
+// Bytes produces a human readable representation of an SI size.
+//
+// Bytes(82854982) -> 83MB
+func Bytes(s uint64) string {
+ sizes := []string{"B", "KB", "MB", "GB", "TB", "PB", "EB"}
+ return humanateBytes(uint64(s), 1000, sizes)
+}
+
+// IBytes produces a human readable representation of an IEC size.
+//
+// IBytes(82854982) -> 79MiB
+func IBytes(s uint64) string {
+ sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"}
+ return humanateBytes(uint64(s), 1024, sizes)
+}
+
+// ParseBytes parses a string representation of bytes into the number
+// of bytes it represents.
+//
+// ParseBytes("42MB") -> 42000000, nil
+// ParseBytes("42mib") -> 44040192, nil
+func ParseBytes(s string) (uint64, error) {
+ lastDigit := 0
+ for _, r := range s {
+ if !(unicode.IsDigit(r) || r == '.') {
+ break
+ }
+ lastDigit++
+ }
+
+ f, err := strconv.ParseFloat(s[:lastDigit], 64)
+ if err != nil {
+ return 0, err
+ }
+
+ extra := strings.ToLower(strings.TrimSpace(s[lastDigit:]))
+ if m, ok := bytesSizeTable[extra]; ok {
+ f *= float64(m)
+ if f >= math.MaxUint64 {
+ return 0, fmt.Errorf("too large: %v", s)
+ }
+ return uint64(f), nil
+ }
+
+ return 0, fmt.Errorf("unhandled size name: %v", extra)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/comma.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/comma.go
new file mode 100644
index 0000000..9efc682
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/comma.go
@@ -0,0 +1,67 @@
+package humanize
+
+import (
+ "math/big"
+ "strconv"
+ "strings"
+)
+
+// Comma produces a string form of the given number in base 10 with
+// commas after every three orders of magnitude.
+//
+// e.g. Comma(834142) -> 834,142
+func Comma(v int64) string {
+ sign := ""
+ if v < 0 {
+ sign = "-"
+ v = 0 - v
+ }
+
+ parts := []string{"", "", "", "", "", "", "", ""}
+ j := len(parts) - 1
+
+ for v > 999 {
+ parts[j] = strconv.FormatInt(v%1000, 10)
+ switch len(parts[j]) {
+ case 2:
+ parts[j] = "0" + parts[j]
+ case 1:
+ parts[j] = "00" + parts[j]
+ }
+ v = v / 1000
+ j--
+ }
+ parts[j] = strconv.Itoa(int(v))
+ return sign + strings.Join(parts[j:len(parts)], ",")
+}
+
+// BigComma produces a string form of the given big.Int in base 10
+// with commas after every three orders of magnitude.
+func BigComma(b *big.Int) string {
+ sign := ""
+ if b.Sign() < 0 {
+ sign = "-"
+ b.Abs(b)
+ }
+
+ athousand := big.NewInt(1000)
+ c := (&big.Int{}).Set(b)
+ _, m := oom(c, athousand)
+ parts := make([]string, m+1)
+ j := len(parts) - 1
+
+ mod := &big.Int{}
+ for b.Cmp(athousand) >= 0 {
+ b.DivMod(b, athousand, mod)
+ parts[j] = strconv.FormatInt(mod.Int64(), 10)
+ switch len(parts[j]) {
+ case 2:
+ parts[j] = "0" + parts[j]
+ case 1:
+ parts[j] = "00" + parts[j]
+ }
+ j--
+ }
+ parts[j] = strconv.Itoa(int(b.Int64()))
+ return sign + strings.Join(parts[j:len(parts)], ",")
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/ftoa.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/ftoa.go
new file mode 100644
index 0000000..c76190b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/ftoa.go
@@ -0,0 +1,23 @@
+package humanize
+
+import "strconv"
+
+func stripTrailingZeros(s string) string {
+ offset := len(s) - 1
+ for offset > 0 {
+ if s[offset] == '.' {
+ offset--
+ break
+ }
+ if s[offset] != '0' {
+ break
+ }
+ offset--
+ }
+ return s[:offset+1]
+}
+
+// Ftoa converts a float to a string with no trailing zeros.
+func Ftoa(num float64) string {
+ return stripTrailingZeros(strconv.FormatFloat(num, 'f', 6, 64))
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/humanize.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/humanize.go
new file mode 100644
index 0000000..74142c2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/humanize.go
@@ -0,0 +1,8 @@
+/*
+Package humanize converts boring ugly numbers to human-friendly strings.
+
+Durations can be turned into strings such as "3 days ago", numbers
+representing sizes like 82854982 into useful strings like, "83MB" or
+"79MiB" (whichever you prefer).
+*/
+package humanize
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/ordinals.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/ordinals.go
new file mode 100644
index 0000000..43d88a8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/ordinals.go
@@ -0,0 +1,25 @@
+package humanize
+
+import "strconv"
+
+// Ordinal gives you the input number in a rank/ordinal format.
+//
+// Ordinal(3) -> 3rd
+func Ordinal(x int) string {
+ suffix := "th"
+ switch x % 10 {
+ case 1:
+ if x%100 != 11 {
+ suffix = "st"
+ }
+ case 2:
+ if x%100 != 12 {
+ suffix = "nd"
+ }
+ case 3:
+ if x%100 != 13 {
+ suffix = "rd"
+ }
+ }
+ return strconv.Itoa(x) + suffix
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/si.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/si.go
new file mode 100644
index 0000000..e4cdcea
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/si.go
@@ -0,0 +1,104 @@
+package humanize
+
+import (
+ "errors"
+ "math"
+ "regexp"
+ "strconv"
+)
+
+var siPrefixTable = map[float64]string{
+ -24: "y", // yocto
+ -21: "z", // zepto
+ -18: "a", // atto
+ -15: "f", // femto
+ -12: "p", // pico
+ -9: "n", // nano
+ -6: "µ", // micro
+ -3: "m", // milli
+ 0: "",
+ 3: "k", // kilo
+ 6: "M", // mega
+ 9: "G", // giga
+ 12: "T", // tera
+ 15: "P", // peta
+ 18: "E", // exa
+ 21: "Z", // zetta
+ 24: "Y", // yotta
+}
+
+var revSIPrefixTable = revfmap(siPrefixTable)
+
+// revfmap reverses the map and precomputes the power multiplier
+func revfmap(in map[float64]string) map[string]float64 {
+ rv := map[string]float64{}
+ for k, v := range in {
+ rv[v] = math.Pow(10, k)
+ }
+ return rv
+}
+
+var riParseRegex *regexp.Regexp
+
+func init() {
+ ri := `^([0-9.]+)([`
+ for _, v := range siPrefixTable {
+ ri += v
+ }
+ ri += `]?)(.*)`
+
+ riParseRegex = regexp.MustCompile(ri)
+}
+
+// ComputeSI finds the most appropriate SI prefix for the given number
+// and returns the prefix along with the value adjusted to be within
+// that prefix.
+//
+// e.g. ComputeSI(2.2345e-12) -> (2.2345, "p")
+func ComputeSI(input float64) (float64, string) {
+ if input == 0 {
+ return 0, ""
+ }
+ exponent := math.Floor(logn(input, 10))
+ exponent = math.Floor(exponent/3) * 3
+
+ value := input / math.Pow(10, exponent)
+
+ // Handle special case where value is exactly 1000.0
+ // Should return 1M instead of 1000k
+ if value == 1000.0 {
+ exponent += 3
+ value = input / math.Pow(10, exponent)
+ }
+
+ prefix := siPrefixTable[exponent]
+ return value, prefix
+}
+
+// SI returns a string with default formatting.
+//
+// SI uses Ftoa to format float value, removing trailing zeros.
+//
+// e.g. SI(1000000, B) -> 1MB
+// e.g. SI(2.2345e-12, "F") -> 2.2345pF
+func SI(input float64, unit string) string {
+ value, prefix := ComputeSI(input)
+ return Ftoa(value) + prefix + unit
+}
+
+var errInvalid = errors.New("invalid input")
+
+// ParseSI parses an SI string back into the number and unit.
+//
+// e.g. ParseSI(2.2345pF) -> (2.2345e-12, "F", nil)
+func ParseSI(input string) (float64, string, error) {
+ found := riParseRegex.FindStringSubmatch(input)
+ if len(found) != 4 {
+ return 0, "", errInvalid
+ }
+ mag := revSIPrefixTable[found[2]]
+ unit := found[3]
+
+ base, err := strconv.ParseFloat(found[1], 64)
+ return base * mag, unit, err
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/go-humanize/times.go b/Godeps/_workspace/src/github.com/flosch/go-humanize/times.go
new file mode 100644
index 0000000..65788f6
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/go-humanize/times.go
@@ -0,0 +1,87 @@
+package humanize
+
+import (
+ "fmt"
+ "time"
+)
+
+// Seconds-based time units
+const (
+ Minute = 60
+ Hour = 60 * Minute
+ Day = 24 * Hour
+ Week = 7 * Day
+ Month = 30 * Day
+ Year = 12 * Month
+ LongTime = 37 * Year
+)
+
+// Time formats a time into a relative string.
+//
+// Time(someT) -> "3 weeks ago"
+func Time(then time.Time) string {
+ diff := then.Sub(time.Now())
+
+ return TimeDuration(diff)
+}
+
+func TimeDuration(diff time.Duration) string {
+ diff /= time.Second
+
+ lbl := "ago"
+
+ after := diff > 0
+
+ if after {
+ lbl = "from now"
+ diff += 1
+ } else {
+ diff *= -1
+ }
+
+ switch {
+ case diff <= 0:
+ return "now"
+ case diff <= 2:
+ return fmt.Sprintf("1 second %s", lbl)
+ case diff < 1*Minute:
+ return fmt.Sprintf("%d seconds %s", diff, lbl)
+
+ case diff < 2*Minute:
+ return fmt.Sprintf("1 minute %s", lbl)
+ case diff < 1*Hour:
+ return fmt.Sprintf("%d minutes %s", diff/Minute, lbl)
+
+ case diff < 2*Hour:
+ return fmt.Sprintf("1 hour %s", lbl)
+ case diff < 1*Day:
+ return fmt.Sprintf("%d hours %s", diff/Hour, lbl)
+
+ case diff < 2*Day:
+ return fmt.Sprintf("1 day %s", lbl)
+ case diff < 1*Week:
+ return fmt.Sprintf("%d days %s", diff/Day, lbl)
+
+ case diff < 2*Week:
+ return fmt.Sprintf("1 week %s", lbl)
+ case diff < 1*Month:
+ return fmt.Sprintf("%d weeks %s", diff/Week, lbl)
+
+ case diff < 2*Month:
+ return fmt.Sprintf("1 month %s", lbl)
+ case diff < 1*Year:
+ return fmt.Sprintf("%d months %s", diff/Month, lbl)
+
+ case diff < 18*Month:
+ return fmt.Sprintf("1 year %s", lbl)
+ case diff < 2*Year:
+ return fmt.Sprintf("2 years %s", lbl)
+ case diff < LongTime:
+ return fmt.Sprintf("%d years %s", diff/Year, lbl)
+ }
+
+ if after {
+ return "a while from now"
+ }
+ return "long ago"
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2-addons/.gitignore b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/.gitignore
new file mode 100644
index 0000000..5738814
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+.idea
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2-addons/.travis.yml b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/.travis.yml
new file mode 100644
index 0000000..746ebb9
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+go:
+ - 1.3
+ - tip
+install:
+ - go get code.google.com/p/go.tools/cmd/cover
+ - go get gopkg.in/check.v1
+ - go get github.com/flosch/pongo2
+ - go get github.com/russross/blackfriday
+ - go get github.com/extemporalgenome/slug
+ - go get github.com/flosch/go-humanize
+script:
+ - go test -v -covermode=count -coverprofile=coverage.out
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2-addons/LICENSE b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/LICENSE
new file mode 100644
index 0000000..c8f3875
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Florian Schlachter
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2-addons/README.md b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/README.md
new file mode 100644
index 0000000..c975351
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/README.md
@@ -0,0 +1,51 @@
+# pongo2-addons
+
+[![Build Status](https://travis-ci.org/flosch/pongo2-addons.svg?branch=master)](https://travis-ci.org/flosch/pongo2-addons)
+[![Gratipay](http://img.shields.io/badge/gratipay-support%20pongo-brightgreen.svg)](https://gratipay.com/flosch/)
+
+Official filter and tag add-ons for [pongo2](https://github.com/flosch/pongo2). Since this package uses
+3rd-party-libraries, it's in its own package.
+
+## How to install and use
+
+Install via `go get -u github.com/flosch/pongo2-addons`. All dependencies will be automatically fetched and installed.
+
+Simply add the following import line **after** importing pongo2:
+
+ _ "github.com/flosch/pongo2-addons"
+
+All additional filters/tags will be registered automatically.
+
+## Addons
+
+### Filters
+
+ - Regulars
+ - **[filesizeformat](https://docs.djangoproject.com/en/dev/ref/templates/builtins/#filesizeformat)** (human-readable filesize; takes bytes as input)
+ - **[slugify](https://docs.djangoproject.com/en/dev/ref/templates/builtins/#slugify)** (creates a slug for a given input)
+ - **truncatesentences** / **truncatesentences_html** (returns the first X sentences [like truncatechars/truncatewords]; please provide X as a parameter)
+
+ - Markup
+ - **markdown**
+
+ - Humanize
+ - **[intcomma](https://docs.djangoproject.com/en/dev/ref/contrib/humanize/#intcomma)** (put decimal marks into the number)
+ - **[ordinal](https://docs.djangoproject.com/en/dev/ref/contrib/humanize/#ordinal)** (convert integer to its ordinal as string)
+ - **[naturalday](https://docs.djangoproject.com/en/dev/ref/contrib/humanize/#naturalday)** (converts `time.Time`-object into today/yesterday/tomorrow if possible; otherwise it will use `naturaltime`)
+ - **[timesince](https://docs.djangoproject.com/en/dev/ref/templates/builtins/#timesince)/[timeuntil](https://docs.djangoproject.com/en/1.6/ref/templates/builtins/#timeuntil)/[naturaltime](https://docs.djangoproject.com/en/dev/ref/contrib/humanize/#naturaltime)** (human-readable time [duration] indicator)
+
+### Tags
+
+(nothing yet)
+
+## TODO
+
+ - Support i18n/i10n
+
+## Used libraries
+
+I want to thank the authors of these libraries (which are being used in `pongo2-addons`):
+
+ * [github.com/extemporalgenome/slug](https://github.com/extemporalgenome/slug)
+ * [github.com/dustin/go-humanize](https://github.com/dustin/go-humanize)
+ * [github.com/russross/blackfriday](https://github.com/russross/blackfriday)
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2-addons/filters.go b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/filters.go
new file mode 100644
index 0000000..af1a5d1
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/filters.go
@@ -0,0 +1,288 @@
+package pongo2addons
+
+import (
+ "bytes"
+ "fmt"
+ "regexp"
+ "strings"
+ "time"
+ "unicode/utf8"
+
+ "github.com/flosch/pongo2"
+
+ "github.com/extemporalgenome/slug"
+ "github.com/flosch/go-humanize"
+ "github.com/russross/blackfriday"
+)
+
+func init() {
+ // Regulars
+ pongo2.RegisterFilter("slugify", filterSlugify)
+ pongo2.RegisterFilter("filesizeformat", filterFilesizeformat)
+ pongo2.RegisterFilter("truncatesentences", filterTruncatesentences)
+ pongo2.RegisterFilter("truncatesentences_html", filterTruncatesentencesHtml)
+
+ // Markup
+ pongo2.RegisterFilter("markdown", filterMarkdown)
+
+ // Humanize
+ pongo2.RegisterFilter("timeuntil", filterTimeuntilTimesince)
+ pongo2.RegisterFilter("timesince", filterTimeuntilTimesince)
+ pongo2.RegisterFilter("naturaltime", filterTimeuntilTimesince)
+ pongo2.RegisterFilter("naturalday", filterNaturalday)
+ pongo2.RegisterFilter("intcomma", filterIntcomma)
+ pongo2.RegisterFilter("ordinal", filterOrdinal)
+}
+
+func filterMarkdown(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ return pongo2.AsSafeValue(string(blackfriday.MarkdownCommon([]byte(in.String())))), nil
+}
+
+func filterSlugify(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ return pongo2.AsValue(slug.Slug(in.String())), nil
+}
+
+func filterFilesizeformat(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ return pongo2.AsValue(humanize.IBytes(uint64(in.Integer()))), nil
+}
+
+var filterTruncatesentencesRe = regexp.MustCompile(`(?U:.*[\w]{3,}.*([\d][\.!?][\D]|[\D][\.!?][\s]|[\n$]))`)
+
+func filterTruncatesentences(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ count := param.Integer()
+ if count <= 0 {
+ return pongo2.AsValue(""), nil
+ }
+ sentencens := filterTruncatesentencesRe.FindAllString(strings.TrimSpace(in.String()), -1)
+ return pongo2.AsValue(strings.TrimSpace(strings.Join(sentencens[:min(count, len(sentencens))], ""))), nil
+}
+
+// Taken from pongo2/filters_builtin.go
+func filterTruncateHtmlHelper(value string, new_output *bytes.Buffer, cond func() bool, fn func(c rune, s int, idx int) int, finalize func()) {
+ vLen := len(value)
+ tag_stack := make([]string, 0)
+ idx := 0
+
+ for idx < vLen && !cond() {
+ c, s := utf8.DecodeRuneInString(value[idx:])
+ if c == utf8.RuneError {
+ idx += s
+ continue
+ }
+
+ if c == '<' {
+ new_output.WriteRune(c)
+ idx += s // consume "<"
+
+ if idx+1 < vLen {
+ if value[idx] == '/' {
+ // Close tag
+
+ new_output.WriteString("/")
+
+ tag := ""
+ idx += 1 // consume "/"
+
+ for idx < vLen {
+ c2, size2 := utf8.DecodeRuneInString(value[idx:])
+ if c2 == utf8.RuneError {
+ idx += size2
+ continue
+ }
+
+ // End of tag found
+ if c2 == '>' {
+ idx++ // consume ">"
+ break
+ }
+ tag += string(c2)
+ idx += size2
+ }
+
+ if len(tag_stack) > 0 {
+ // Ideally, the close tag is TOP of tag stack
+ // In malformed HTML, it must not be, so iterate through the stack and remove the tag
+ for i := len(tag_stack) - 1; i >= 0; i-- {
+ if tag_stack[i] == tag {
+ // Found the tag
+ tag_stack[i] = tag_stack[len(tag_stack)-1]
+ tag_stack = tag_stack[:len(tag_stack)-1]
+ break
+ }
+ }
+ }
+
+ new_output.WriteString(tag)
+ new_output.WriteString(">")
+ } else {
+ // Open tag
+
+ tag := ""
+
+ params := false
+ for idx < vLen {
+ c2, size2 := utf8.DecodeRuneInString(value[idx:])
+ if c2 == utf8.RuneError {
+ idx += size2
+ continue
+ }
+
+ new_output.WriteRune(c2)
+
+ // End of tag found
+ if c2 == '>' {
+ idx++ // consume ">"
+ break
+ }
+
+ if !params {
+ if c2 == ' ' {
+ params = true
+ } else {
+ tag += string(c2)
+ }
+ }
+
+ idx += size2
+ }
+
+ // Add tag to stack
+ tag_stack = append(tag_stack, tag)
+ }
+ }
+ } else {
+ idx = fn(c, s, idx)
+ }
+ }
+
+ finalize()
+
+ for i := len(tag_stack) - 1; i >= 0; i-- {
+ tag := tag_stack[i]
+ // Close everything from the regular tag stack
+ new_output.WriteString(fmt.Sprintf("%s>", tag))
+ }
+}
+
+func filterTruncatesentencesHtml(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ count := param.Integer()
+ if count <= 0 {
+ return pongo2.AsValue(""), nil
+ }
+
+ value := in.String()
+ newLen := max(param.Integer(), 0)
+
+ new_output := bytes.NewBuffer(nil)
+
+ sentencefilter := 0
+
+ filterTruncateHtmlHelper(value, new_output, func() bool {
+ return sentencefilter >= newLen
+ }, func(_ rune, _ int, idx int) int {
+ // Get next word
+ word_found := false
+
+ for idx < len(value) {
+ c2, size2 := utf8.DecodeRuneInString(value[idx:])
+ if c2 == utf8.RuneError {
+ idx += size2
+ continue
+ }
+
+ if c2 == '<' {
+ // HTML tag start, don't consume it
+ return idx
+ }
+
+ new_output.WriteRune(c2)
+ idx += size2
+
+ if (c2 == '.' && !(idx+1 < len(value) && value[idx+1] >= '0' && value[idx+1] <= '9')) ||
+ c2 == '!' || c2 == '?' || c2 == '\n' {
+ // Sentence ends here, stop capturing it now
+ break
+ } else {
+ word_found = true
+ }
+ }
+
+ if word_found {
+ sentencefilter++
+ }
+
+ return idx
+ }, func() {})
+
+ return pongo2.AsSafeValue(new_output.String()), nil
+}
+
+func filterTimeuntilTimesince(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ basetime, is_time := in.Interface().(time.Time)
+ if !is_time {
+ return nil, &pongo2.Error{
+ Sender: "filter:timeuntil/timesince",
+ ErrorMsg: "time-value is not a time.Time-instance.",
+ }
+ }
+ var paramtime time.Time
+ if !param.IsNil() {
+ paramtime, is_time = param.Interface().(time.Time)
+ if !is_time {
+ return nil, &pongo2.Error{
+ Sender: "filter:timeuntil/timesince",
+ ErrorMsg: "time-parameter is not a time.Time-instance.",
+ }
+ }
+ } else {
+ paramtime = time.Now()
+ }
+
+ return pongo2.AsValue(humanize.TimeDuration(basetime.Sub(paramtime))), nil
+}
+
+func filterIntcomma(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ return pongo2.AsValue(humanize.Comma(int64(in.Integer()))), nil
+}
+
+func filterOrdinal(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ return pongo2.AsValue(humanize.Ordinal(in.Integer())), nil
+}
+
+func filterNaturalday(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
+ basetime, is_time := in.Interface().(time.Time)
+ if !is_time {
+ return nil, &pongo2.Error{
+ Sender: "filter:naturalday",
+ ErrorMsg: "naturalday-value is not a time.Time-instance.",
+ }
+ }
+
+ var reference_time time.Time
+ if !param.IsNil() {
+ reference_time, is_time = param.Interface().(time.Time)
+ if !is_time {
+ return nil, &pongo2.Error{
+ Sender: "filter:naturalday",
+ ErrorMsg: "naturalday-parameter is not a time.Time-instance.",
+ }
+ }
+ } else {
+ reference_time = time.Now()
+ }
+
+ d := reference_time.Sub(basetime) / time.Hour
+
+ switch {
+ case d >= 0 && d < 24:
+ // Today
+ return pongo2.AsValue("today"), nil
+ case d >= 24:
+ return pongo2.AsValue("yesterday"), nil
+ case d < 0 && d >= -24:
+ return pongo2.AsValue("tomorrow"), nil
+ }
+
+ // Default behaviour
+ return pongo2.ApplyFilter("naturaltime", in, param)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2-addons/helpers.go b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/helpers.go
new file mode 100644
index 0000000..008052c
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2-addons/helpers.go
@@ -0,0 +1,15 @@
+package pongo2addons
+
+func max(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/.gitignore b/Godeps/_workspace/src/github.com/flosch/pongo2/.gitignore
new file mode 100644
index 0000000..37eaf44
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/.gitignore
@@ -0,0 +1,40 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+.idea
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+
+.project
+EBNF.txt
+test1.tpl
+pongo2_internal_test.go
+tpl-error.out
+/count.out
+/cover.out
+*.swp
+*.iml
+/cpu.out
+/mem.out
+/pongo2.test
+*.error
+/profile
+/coverage.out
+/pongo2_internal_test.ignore
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/.travis.yml b/Godeps/_workspace/src/github.com/flosch/pongo2/.travis.yml
new file mode 100644
index 0000000..18971e1
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/.travis.yml
@@ -0,0 +1,12 @@
+language: go
+
+go:
+ - 1.4
+ - tip
+install:
+ - go get golang.org/x/tools/cmd/cover
+ - go get github.com/mattn/goveralls
+ - go get gopkg.in/check.v1
+script:
+ - go test -v -covermode=count -coverprofile=coverage.out -bench . -cpu 1,4
+ - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci -repotoken $COVERALLS_TOKEN || true'
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/AUTHORS b/Godeps/_workspace/src/github.com/flosch/pongo2/AUTHORS
new file mode 100644
index 0000000..b552df4
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/AUTHORS
@@ -0,0 +1,10 @@
+Main author and maintainer of pongo2:
+
+* Florian Schlachter
+
+Contributors (in no specific order):
+
+* @romanoaugusto88
+* @vitalbh
+
+Feel free to add yourself to the list or to modify your entry if you did a contribution.
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/LICENSE b/Godeps/_workspace/src/github.com/flosch/pongo2/LICENSE
new file mode 100644
index 0000000..e876f86
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2013-2014 Florian Schlachter
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/README.md b/Godeps/_workspace/src/github.com/flosch/pongo2/README.md
new file mode 100644
index 0000000..9c26041
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/README.md
@@ -0,0 +1,252 @@
+# [pongo](https://en.wikipedia.org/wiki/Pongo_%28genus%29)2
+
+[![Join the chat at https://gitter.im/flosch/pongo2](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/flosch/pongo2)
+[![GoDoc](https://godoc.org/github.com/flosch/pongo2?status.png)](https://godoc.org/github.com/flosch/pongo2)
+[![Build Status](https://travis-ci.org/flosch/pongo2.svg?branch=master)](https://travis-ci.org/flosch/pongo2)
+[![Coverage Status](https://coveralls.io/repos/flosch/pongo2/badge.png?branch=master)](https://coveralls.io/r/flosch/pongo2?branch=master)
+[![gratipay](http://img.shields.io/badge/gratipay-support%20pongo-brightgreen.svg)](https://gratipay.com/flosch/)
+[![Bountysource](https://www.bountysource.com/badge/tracker?tracker_id=3654947)](https://www.bountysource.com/trackers/3654947-pongo2?utm_source=3654947&utm_medium=shield&utm_campaign=TRACKER_BADGE)
+
+pongo2 is the successor of [pongo](https://github.com/flosch/pongo), a Django-syntax like templating-language.
+
+Install/update using `go get` (no dependencies required by pongo2):
+```
+go get -u github.com/flosch/pongo2
+```
+
+Please use the [issue tracker](https://github.com/flosch/pongo2/issues) if you're encountering any problems with pongo2 or if you need help with implementing tags or filters ([create a ticket!](https://github.com/flosch/pongo2/issues/new)). If possible, please use [playground](https://www.florian-schlachter.de/pongo2/) to create a short test case on what's wrong and include the link to the snippet in your issue.
+
+**New**: [Try pongo2 out in the pongo2 playground.](https://www.florian-schlachter.de/pongo2/)
+
+## First impression of a template
+
+```HTML+Django
+Our admins and users
+{# This is a short example to give you a quick overview of pongo2's syntax. #}
+
+{% macro user_details(user, is_admin=false) %}
+
+
+
= 40) || (user.karma > calc_avg_karma(userlist)+5) %}
+ class="karma-good"{% endif %}>
+
+
+ {{ user }}
+
+
+
+
This user registered {{ user.register_date|naturaltime }}.
+
+
+
The user's biography:
+
{{ user.biography|markdown|truncatewords_html:15 }}
+ read more
+
+ {% if is_admin %}
This user is an admin!
{% endif %}
+
+{% endmacro %}
+
+
+
+
+ Our admins
+ {% for admin in adminlist %}
+ {{ user_details(admin, true) }}
+ {% endfor %}
+
+ Our members
+ {% for user in userlist %}
+ {{ user_details(user) }}
+ {% endfor %}
+
+
+```
+
+## Development status
+
+**Latest stable release**: v3.0 (`go get -u gopkg.in/flosch/pongo2.v3` / [`v3`](https://github.com/flosch/pongo2/tree/v3)-branch) [[read the announcement](https://www.florian-schlachter.de/post/pongo2-v3/)]
+
+**Current development**: v4 (`master`-branch)
+
+*Note*: With the release of pongo v4 the branch v2 will be deprecated.
+
+**Deprecated versions** (not supported anymore): v1
+
+| Topic | Status |
+| ------------------------------------ | -------------------------------------------------------------------------------------- |
+| Django version compatibility: | [1.7](https://docs.djangoproject.com/en/1.7/ref/templates/builtins/) |
+| *Missing* (planned) **filters**: | none ([hints](https://github.com/flosch/pongo2/blob/master/filters_builtin.go#L3)) |
+| *Missing* (planned) **tags**: | none ([hints](https://github.com/flosch/pongo2/blob/master/tags.go#L3)) |
+
+Please also have a look on the [caveats](https://github.com/flosch/pongo2#caveats) and on the [official add-ons](https://github.com/flosch/pongo2#official).
+
+## Features (and new in pongo2)
+
+ * Entirely rewritten from the ground-up.
+ * [Advanced C-like expressions](https://github.com/flosch/pongo2/blob/master/template_tests/expressions.tpl).
+ * [Complex function calls within expressions](https://github.com/flosch/pongo2/blob/master/template_tests/function_calls_wrapper.tpl).
+ * [Easy API to create new filters and tags](http://godoc.org/github.com/flosch/pongo2#RegisterFilter) ([including parsing arguments](http://godoc.org/github.com/flosch/pongo2#Parser))
+ * Additional features:
+ * Macros including importing macros from other files (see [template_tests/macro.tpl](https://github.com/flosch/pongo2/blob/master/template_tests/macro.tpl))
+ * [Template sandboxing](https://godoc.org/github.com/flosch/pongo2#TemplateSet) ([directory patterns](http://golang.org/pkg/path/filepath/#Match), banned tags/filters)
+
+## Recent API changes within pongo2
+
+If you're using the `master`-branch of pongo2, you might be interested in this section. Since pongo2 is still in development (even though there is a first stable release!), there could be (backwards-incompatible) API changes over time. To keep track of these and therefore make it painless for you to adapt your codebase, I'll list them here.
+
+ * Function signature for tag execution changed: not taking a `bytes.Buffer` anymore; instead `Execute()`-functions are now taking a `TemplateWriter` interface.
+ * Function signature for tag and filter parsing/execution changed (`error` return type changed to `*Error`).
+ * `INodeEvaluator` has been removed and got replaced by `IEvaluator`. You can change your existing tags/filters by simply replacing the interface.
+ * Two new helper functions: [`RenderTemplateFile()`](https://godoc.org/github.com/flosch/pongo2#RenderTemplateFile) and [`RenderTemplateString()`](https://godoc.org/github.com/flosch/pongo2#RenderTemplateString).
+ * `Template.ExecuteRW()` is now [`Template.ExecuteWriter()`](https://godoc.org/github.com/flosch/pongo2#Template.ExecuteWriter)
+ * `Template.Execute*()` functions do now take a `pongo2.Context` directly (no pointer anymore).
+
+## How you can help
+
+ * Write [filters](https://github.com/flosch/pongo2/blob/master/filters_builtin.go#L3) / [tags](https://github.com/flosch/pongo2/blob/master/tags.go#L4) (see [tutorial](https://www.florian-schlachter.de/post/pongo2/)) by forking pongo2 and sending pull requests
+ * Write/improve code tests (use the following command to see what tests are missing: `go test -v -cover -covermode=count -coverprofile=cover.out && go tool cover -html=cover.out` or have a look on [gocover.io/github.com/flosch/pongo2](http://gocover.io/github.com/flosch/pongo2))
+ * Write/improve template tests (see the `template_tests/` directory)
+ * Write middleware, libraries and websites using pongo2. :-)
+
+# Documentation
+
+For a documentation on how the templating language works you can [head over to the Django documentation](https://docs.djangoproject.com/en/dev/topics/templates/). pongo2 aims to be compatible with it.
+
+You can access pongo2's API documentation on [godoc](https://godoc.org/github.com/flosch/pongo2).
+
+## Blog post series
+
+ * [pongo2 v3 released](https://www.florian-schlachter.de/post/pongo2-v3/)
+ * [pongo2 v2 released](https://www.florian-schlachter.de/post/pongo2-v2/)
+ * [pongo2 1.0 released](https://www.florian-schlachter.de/post/pongo2-10/) [August 8th 2014]
+ * [pongo2 playground](https://www.florian-schlachter.de/post/pongo2-playground/) [August 1st 2014]
+ * [Release of pongo2 1.0-rc1 + pongo2-addons](https://www.florian-schlachter.de/post/pongo2-10-rc1/) [July 30th 2014]
+ * [Introduction to pongo2 + migration- and "how to write tags/filters"-tutorial.](https://www.florian-schlachter.de/post/pongo2/) [June 29th 2014]
+
+## Caveats
+
+### Filters
+
+ * **date** / **time**: The `date` and `time` filter are taking the Golang specific time- and date-format (not Django's one) currently. [Take a look on the format here](http://golang.org/pkg/time/#Time.Format).
+ * **stringformat**: `stringformat` does **not** take Python's string format syntax as a parameter, instead it takes Go's. Essentially `{{ 3.14|stringformat:"pi is %.2f" }}` is `fmt.Sprintf("pi is %.2f", 3.14)`.
+ * **escape** / **force_escape**: Unlike Django's behaviour, the `escape`-filter is applied immediately. Therefore there is no need for a `force_escape`-filter yet.
+
+### Tags
+
+ * **for**: All the `forloop` fields (like `forloop.counter`) are written with a capital letter at the beginning. For example, the `counter` can be accessed by `forloop.Counter` and the parentloop by `forloop.Parentloop`.
+ * **now**: takes Go's time format (see **date** and **time**-filter).
+
+### Misc
+
+ * **not in-operator**: You can check whether a map/struct/string contains a key/field/substring by using the in-operator (or the negation of it):
+ `{% if key in map %}Key is in map{% else %}Key not in map{% endif %}` or `{% if !(key in map) %}Key is NOT in map{% else %}Key is in map{% endif %}`.
+
+# Add-ons, libraries and helpers
+
+## Official
+
+ * [ponginae](https://github.com/flosch/ponginae) - A web-framework for Go (using pongo2).
+ * [pongo2-tools](https://github.com/flosch/pongo2-tools) - Official tools and helpers for pongo2
+ * [pongo2-addons](https://github.com/flosch/pongo2-addons) - Official additional filters/tags for pongo2 (for example a **markdown**-filter). They are in their own repository because they're relying on 3rd-party-libraries.
+
+## 3rd-party
+
+ * [beego-pongo2](https://github.com/oal/beego-pongo2) - A tiny little helper for using Pongo2 with [Beego](https://github.com/astaxie/beego).
+ * [beego-pongo2.v2](https://github.com/ipfans/beego-pongo2.v2) - Same as `beego-pongo2`, but for pongo2 v2.
+ * [macaron-pongo2](https://github.com/macaron-contrib/pongo2) - pongo2 support for [Macaron](https://github.com/Unknwon/macaron), a modular web framework.
+ * [ginpongo2](https://github.com/ngerakines/ginpongo2) - middleware for [gin](github.com/gin-gonic/gin) to use pongo2 templates
+ * [pongo2-trans](https://github.com/fromYukki/pongo2trans) - `trans`-tag implementation for internationalization
+ * [tpongo2](https://github.com/tango-contrib/tpongo2) - pongo2 support for [Tango](https://github.com/lunny/tango), a micro-kernel & pluggable web framework.
+
+Please add your project to this list and send me a pull request when you've developed something nice for pongo2.
+
+# API-usage examples
+
+Please see the documentation for a full list of provided API methods.
+
+## A tiny example (template string)
+
+```Go
+// Compile the template first (i. e. creating the AST)
+tpl, err := pongo2.FromString("Hello {{ name|capfirst }}!")
+if err != nil {
+ panic(err)
+}
+// Now you can render the template with the given
+// pongo2.Context how often you want to.
+out, err := tpl.Execute(pongo2.Context{"name": "florian"})
+if err != nil {
+ panic(err)
+}
+fmt.Println(out) // Output: Hello Florian!
+```
+
+## Example server-usage (template file)
+
+```Go
+package main
+
+import (
+ "github.com/flosch/pongo2"
+ "net/http"
+)
+
+// Pre-compiling the templates at application startup using the
+// little Must()-helper function (Must() will panic if FromFile()
+// or FromString() will return with an error - that's it).
+// It's faster to pre-compile it anywhere at startup and only
+// execute the template later.
+var tplExample = pongo2.Must(pongo2.FromFile("example.html"))
+
+func examplePage(w http.ResponseWriter, r *http.Request) {
+ // Execute the template per HTTP request
+ err := tplExample.ExecuteWriter(pongo2.Context{"query": r.FormValue("query")}, w)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ }
+}
+
+func main() {
+ http.HandleFunc("/", examplePage)
+ http.ListenAndServe(":8080", nil)
+}
+```
+
+# Benchmark
+
+The benchmarks have been run on the my machine (`Intel(R) Core(TM) i7-2600 CPU @ 3.40GHz`) using the command:
+
+ go test -bench . -cpu 1,2,4,8
+
+All benchmarks are compiling (depends on the benchmark) and executing the `template_tests/complex.tpl` template.
+
+The results are:
+
+ BenchmarkExecuteComplexWithSandboxActive 50000 60450 ns/op
+ BenchmarkExecuteComplexWithSandboxActive-2 50000 56998 ns/op
+ BenchmarkExecuteComplexWithSandboxActive-4 50000 60343 ns/op
+ BenchmarkExecuteComplexWithSandboxActive-8 50000 64229 ns/op
+ BenchmarkCompileAndExecuteComplexWithSandboxActive 10000 164410 ns/op
+ BenchmarkCompileAndExecuteComplexWithSandboxActive-2 10000 156682 ns/op
+ BenchmarkCompileAndExecuteComplexWithSandboxActive-4 10000 164821 ns/op
+ BenchmarkCompileAndExecuteComplexWithSandboxActive-8 10000 171806 ns/op
+ BenchmarkParallelExecuteComplexWithSandboxActive 50000 60428 ns/op
+ BenchmarkParallelExecuteComplexWithSandboxActive-2 50000 31887 ns/op
+ BenchmarkParallelExecuteComplexWithSandboxActive-4 100000 22810 ns/op
+ BenchmarkParallelExecuteComplexWithSandboxActive-8 100000 18820 ns/op
+ BenchmarkExecuteComplexWithoutSandbox 50000 56942 ns/op
+ BenchmarkExecuteComplexWithoutSandbox-2 50000 56168 ns/op
+ BenchmarkExecuteComplexWithoutSandbox-4 50000 57838 ns/op
+ BenchmarkExecuteComplexWithoutSandbox-8 50000 60539 ns/op
+ BenchmarkCompileAndExecuteComplexWithoutSandbox 10000 162086 ns/op
+ BenchmarkCompileAndExecuteComplexWithoutSandbox-2 10000 159771 ns/op
+ BenchmarkCompileAndExecuteComplexWithoutSandbox-4 10000 163826 ns/op
+ BenchmarkCompileAndExecuteComplexWithoutSandbox-8 10000 169062 ns/op
+ BenchmarkParallelExecuteComplexWithoutSandbox 50000 57152 ns/op
+ BenchmarkParallelExecuteComplexWithoutSandbox-2 50000 30276 ns/op
+ BenchmarkParallelExecuteComplexWithoutSandbox-4 100000 22065 ns/op
+ BenchmarkParallelExecuteComplexWithoutSandbox-8 100000 18034 ns/op
+
+Benchmarked on October 2nd 2014.
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/context.go b/Godeps/_workspace/src/github.com/flosch/pongo2/context.go
new file mode 100644
index 0000000..7b728ec
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/context.go
@@ -0,0 +1,125 @@
+package pongo2
+
+import (
+ "fmt"
+ "regexp"
+)
+
+var reIdentifiers = regexp.MustCompile("^[a-zA-Z0-9_]+$")
+
+// A Context type provides constants, variables, instances or functions to a template.
+//
+// pongo2 automatically provides meta-information or functions through the "pongo2"-key.
+// Currently, context["pongo2"] contains the following keys:
+// 1. version: returns the version string
+//
+// Template examples for accessing items from your context:
+// {{ myconstant }}
+// {{ myfunc("test", 42) }}
+// {{ user.name }}
+// {{ pongo2.version }}
+type Context map[string]interface{}
+
+func (c Context) checkForValidIdentifiers() *Error {
+ for k, v := range c {
+ if !reIdentifiers.MatchString(k) {
+ return &Error{
+ Sender: "checkForValidIdentifiers",
+ ErrorMsg: fmt.Sprintf("Context-key '%s' (value: '%+v') is not a valid identifier.", k, v),
+ }
+ }
+ }
+ return nil
+}
+
+// Update updates this context with the key/value-pairs from another context.
+func (c Context) Update(other Context) Context {
+ for k, v := range other {
+ c[k] = v
+ }
+ return c
+}
+
+// ExecutionContext contains all data important for the current rendering state.
+//
+// If you're writing a custom tag, your tag's Execute()-function will
+// have access to the ExecutionContext. This struct stores anything
+// about the current rendering process's Context including
+// the Context provided by the user (field Public).
+// You can safely use the Private context to provide data to the user's
+// template (like a 'forloop'-information). The Shared-context is used
+// to share data between tags. All ExecutionContexts share this context.
+//
+// Please be careful when accessing the Public data.
+// PLEASE DO NOT MODIFY THE PUBLIC CONTEXT (read-only).
+//
+// To create your own execution context within tags, use the
+// NewChildExecutionContext(parent) function.
+type ExecutionContext struct {
+ template *Template
+
+ Autoescape bool
+ Public Context
+ Private Context
+ Shared Context
+}
+
+var pongo2MetaContext = Context{
+ "version": Version,
+}
+
+func newExecutionContext(tpl *Template, ctx Context) *ExecutionContext {
+ privateCtx := make(Context)
+
+ // Make the pongo2-related funcs/vars available to the context
+ privateCtx["pongo2"] = pongo2MetaContext
+
+ return &ExecutionContext{
+ template: tpl,
+
+ Public: ctx,
+ Private: privateCtx,
+ Autoescape: true,
+ }
+}
+
+func NewChildExecutionContext(parent *ExecutionContext) *ExecutionContext {
+ newctx := &ExecutionContext{
+ template: parent.template,
+
+ Public: parent.Public,
+ Private: make(Context),
+ Autoescape: parent.Autoescape,
+ }
+ newctx.Shared = parent.Shared
+
+ // Copy all existing private items
+ newctx.Private.Update(parent.Private)
+
+ return newctx
+}
+
+func (ctx *ExecutionContext) Error(msg string, token *Token) *Error {
+ filename := ctx.template.name
+ var line, col int
+ if token != nil {
+ // No tokens available
+ // TODO: Add location (from where?)
+ filename = token.Filename
+ line = token.Line
+ col = token.Col
+ }
+ return &Error{
+ Template: ctx.template,
+ Filename: filename,
+ Line: line,
+ Column: col,
+ Token: token,
+ Sender: "execution",
+ ErrorMsg: msg,
+ }
+}
+
+func (ctx *ExecutionContext) Logf(format string, args ...interface{}) {
+ ctx.template.set.logf(format, args...)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/doc.go b/Godeps/_workspace/src/github.com/flosch/pongo2/doc.go
new file mode 100644
index 0000000..5a23e2b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/doc.go
@@ -0,0 +1,31 @@
+// A Django-syntax like template-engine
+//
+// Blog posts about pongo2 (including introduction and migration):
+// https://www.florian-schlachter.de/?tag=pongo2
+//
+// Complete documentation on the template language:
+// https://docs.djangoproject.com/en/dev/topics/templates/
+//
+// Try out pongo2 live in the pongo2 playground:
+// https://www.florian-schlachter.de/pongo2/
+//
+// Make sure to read README.md in the repository as well.
+//
+// A tiny example with template strings:
+//
+// (Snippet on playground: https://www.florian-schlachter.de/pongo2/?id=1206546277)
+//
+// // Compile the template first (i. e. creating the AST)
+// tpl, err := pongo2.FromString("Hello {{ name|capfirst }}!")
+// if err != nil {
+// panic(err)
+// }
+// // Now you can render the template with the given
+// // pongo2.Context how often you want to.
+// out, err := tpl.Execute(pongo2.Context{"name": "fred"})
+// if err != nil {
+// panic(err)
+// }
+// fmt.Println(out) // Output: Hello Fred!
+//
+package pongo2
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/examples.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/examples.md
new file mode 100644
index 0000000..a98bb3a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/examples.md
@@ -0,0 +1 @@
+(Stub, TBA)
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/filters.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/filters.md
new file mode 100644
index 0000000..40a3253
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/filters.md
@@ -0,0 +1,68 @@
+TODO:
+
+* What are filters?
+* List+explain all existing filters (pongo2 + pongo2-addons)
+
+Implemented filters so far which needs documentation:
+
+* escape
+* safe
+* escapejs
+* add
+* addslashes
+* capfirst
+* center
+* cut
+* date
+* default
+* default_if_none
+* divisibleby
+* first
+* floatformat
+* get_digit
+* iriencode
+* join
+* last
+* length
+* length_is
+* linebreaks
+* linebreaksbr
+* linenumbers
+* ljust
+* lower
+* make_list
+* phone2numeric
+* pluralize
+* random
+* removetags
+* rjust
+* slice
+* stringformat
+* striptags
+* time
+* title
+* truncatechars
+* truncatechars_html
+* truncatewords
+* truncatewords_html
+* upper
+* urlencode
+* urlize
+* urlizetrunc
+* wordcount
+* wordwrap
+* yesno
+
+* filesizeformat*
+* slugify*
+* truncatesentences*
+* truncatesentences_html*
+* markdown*
+* intcomma*
+* ordinal*
+* naturalday*
+* timesince*
+* timeuntil*
+* naturaltime*
+
+Filters marked with * are available through [pongo2-addons](https://github.com/flosch/pongo2-addons).
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/index.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/index.md
new file mode 100644
index 0000000..a98bb3a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/index.md
@@ -0,0 +1 @@
+(Stub, TBA)
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/macros.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/macros.md
new file mode 100644
index 0000000..2b27069
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/macros.md
@@ -0,0 +1 @@
+(Stub, TBA)
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/tags.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/tags.md
new file mode 100644
index 0000000..dae4566
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/tags.md
@@ -0,0 +1,31 @@
+TODO:
+
+* What are tags?
+* List+explain all existing tags (pongo2 + pongo2-addons)
+
+Implemented tags so far which needs documentation:
+
+* autoescape
+* block
+* comment
+* cycle
+* extends
+* filter
+* firstof
+* for
+* if
+* ifchanged
+* ifequal
+* ifnotequal
+* import
+* include
+* lorem
+* macro
+* now
+* set
+* spaceless
+* ssi
+* templatetag
+* verbatim
+* widthratio
+* with
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/template_sets.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/template_sets.md
new file mode 100644
index 0000000..a98bb3a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/template_sets.md
@@ -0,0 +1 @@
+(Stub, TBA)
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/write_filters.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/write_filters.md
new file mode 100644
index 0000000..e69de29
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/docs/write_tags.md b/Godeps/_workspace/src/github.com/flosch/pongo2/docs/write_tags.md
new file mode 100644
index 0000000..e69de29
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/error.go b/Godeps/_workspace/src/github.com/flosch/pongo2/error.go
new file mode 100644
index 0000000..80d1147
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/error.go
@@ -0,0 +1,91 @@
+package pongo2
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+)
+
+// The Error type is being used to address an error during lexing, parsing or
+// execution. If you want to return an error object (for example in your own
+// tag or filter) fill this object with as much information as you have.
+// Make sure "Sender" is always given (if you're returning an error within
+// a filter, make Sender equals 'filter:yourfilter'; same goes for tags: 'tag:mytag').
+// It's okay if you only fill in ErrorMsg if you don't have any other details at hand.
+type Error struct {
+ Template *Template
+ Filename string
+ Line int
+ Column int
+ Token *Token
+ Sender string
+ ErrorMsg string
+}
+
+func (e *Error) updateFromTokenIfNeeded(template *Template, t *Token) *Error {
+ if e.Template == nil {
+ e.Template = template
+ }
+
+ if e.Token == nil {
+ e.Token = t
+ if e.Line <= 0 {
+ e.Line = t.Line
+ e.Column = t.Col
+ }
+ }
+
+ return e
+}
+
+// Returns a nice formatted error string.
+func (e *Error) Error() string {
+ s := "[Error"
+ if e.Sender != "" {
+ s += " (where: " + e.Sender + ")"
+ }
+ if e.Filename != "" {
+ s += " in " + e.Filename
+ }
+ if e.Line > 0 {
+ s += fmt.Sprintf(" | Line %d Col %d", e.Line, e.Column)
+ if e.Token != nil {
+ s += fmt.Sprintf(" near '%s'", e.Token.Val)
+ }
+ }
+ s += "] "
+ s += e.ErrorMsg
+ return s
+}
+
+// RawLine returns the affected line from the original template, if available.
+func (e *Error) RawLine() (line string, available bool) {
+ if e.Line <= 0 || e.Filename == "" {
+ return "", false
+ }
+
+ filename := e.Filename
+ if e.Template != nil {
+ filename = e.Template.set.resolveFilename(e.Template, e.Filename)
+ }
+ file, err := os.Open(filename)
+ if err != nil {
+ panic(err)
+ }
+ defer func() {
+ err := file.Close()
+ if err != nil {
+ panic(err)
+ }
+ }()
+
+ scanner := bufio.NewScanner(file)
+ l := 0
+ for scanner.Scan() {
+ l++
+ if l == e.Line {
+ return scanner.Text(), true
+ }
+ }
+ return "", false
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/filters.go b/Godeps/_workspace/src/github.com/flosch/pongo2/filters.go
new file mode 100644
index 0000000..13061f3
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/filters.go
@@ -0,0 +1,133 @@
+package pongo2
+
+import (
+ "fmt"
+)
+
+type FilterFunction func(in *Value, param *Value) (out *Value, err *Error)
+
+var filters map[string]FilterFunction
+
+func init() {
+ filters = make(map[string]FilterFunction)
+}
+
+// Registers a new filter. If there's already a filter with the same
+// name, RegisterFilter will panic. You usually want to call this
+// function in the filter's init() function:
+// http://golang.org/doc/effective_go.html#init
+//
+// See http://www.florian-schlachter.de/post/pongo2/ for more about
+// writing filters and tags.
+func RegisterFilter(name string, fn FilterFunction) {
+ _, existing := filters[name]
+ if existing {
+ panic(fmt.Sprintf("Filter with name '%s' is already registered.", name))
+ }
+ filters[name] = fn
+}
+
+// Replaces an already registered filter with a new implementation. Use this
+// function with caution since it allows you to change existing filter behaviour.
+func ReplaceFilter(name string, fn FilterFunction) {
+ _, existing := filters[name]
+ if !existing {
+ panic(fmt.Sprintf("Filter with name '%s' does not exist (therefore cannot be overridden).", name))
+ }
+ filters[name] = fn
+}
+
+// Like ApplyFilter, but panics on an error
+func MustApplyFilter(name string, value *Value, param *Value) *Value {
+ val, err := ApplyFilter(name, value, param)
+ if err != nil {
+ panic(err)
+ }
+ return val
+}
+
+// Applies a filter to a given value using the given parameters. Returns a *pongo2.Value or an error.
+func ApplyFilter(name string, value *Value, param *Value) (*Value, *Error) {
+ fn, existing := filters[name]
+ if !existing {
+ return nil, &Error{
+ Sender: "applyfilter",
+ ErrorMsg: fmt.Sprintf("Filter with name '%s' not found.", name),
+ }
+ }
+
+ // Make sure param is a *Value
+ if param == nil {
+ param = AsValue(nil)
+ }
+
+ return fn(value, param)
+}
+
+type filterCall struct {
+ token *Token
+
+ name string
+ parameter IEvaluator
+
+ filterFunc FilterFunction
+}
+
+func (fc *filterCall) Execute(v *Value, ctx *ExecutionContext) (*Value, *Error) {
+ var param *Value
+ var err *Error
+
+ if fc.parameter != nil {
+ param, err = fc.parameter.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ } else {
+ param = AsValue(nil)
+ }
+
+ filteredValue, err := fc.filterFunc(v, param)
+ if err != nil {
+ return nil, err.updateFromTokenIfNeeded(ctx.template, fc.token)
+ }
+ return filteredValue, nil
+}
+
+// Filter = IDENT | IDENT ":" FilterArg | IDENT "|" Filter
+func (p *Parser) parseFilter() (*filterCall, *Error) {
+ identToken := p.MatchType(TokenIdentifier)
+
+ // Check filter ident
+ if identToken == nil {
+ return nil, p.Error("Filter name must be an identifier.", nil)
+ }
+
+ filter := &filterCall{
+ token: identToken,
+ name: identToken.Val,
+ }
+
+ // Get the appropriate filter function and bind it
+ filterFn, exists := filters[identToken.Val]
+ if !exists {
+ return nil, p.Error(fmt.Sprintf("Filter '%s' does not exist.", identToken.Val), identToken)
+ }
+
+ filter.filterFunc = filterFn
+
+ // Check for filter-argument (2 tokens needed: ':' ARG)
+ if p.Match(TokenSymbol, ":") != nil {
+ if p.Peek(TokenSymbol, "}}") != nil {
+ return nil, p.Error("Filter parameter required after ':'.", nil)
+ }
+
+ // Get filter argument expression
+ v, err := p.parseVariableOrLiteral()
+ if err != nil {
+ return nil, err
+ }
+ filter.parameter = v
+ }
+
+ return filter, nil
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/filters_builtin.go b/Godeps/_workspace/src/github.com/flosch/pongo2/filters_builtin.go
new file mode 100644
index 0000000..a267fa2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/filters_builtin.go
@@ -0,0 +1,901 @@
+package pongo2
+
+/* Filters that are provided through github.com/flosch/pongo2-addons:
+ ------------------------------------------------------------------
+
+ filesizeformat
+ slugify
+ timesince
+ timeuntil
+
+ Filters that won't be added:
+ ----------------------------
+
+ get_static_prefix (reason: web-framework specific)
+ pprint (reason: python-specific)
+ static (reason: web-framework specific)
+
+ Reconsideration (not implemented yet):
+ --------------------------------------
+
+ force_escape (reason: not yet needed since this is the behaviour of pongo2's escape filter)
+ safeseq (reason: same reason as `force_escape`)
+ unordered_list (python-specific; not sure whether needed or not)
+ dictsort (python-specific; maybe one could add a filter to sort a list of structs by a specific field name)
+ dictsortreversed (see dictsort)
+*/
+
+import (
+ "bytes"
+ "fmt"
+ "math/rand"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+)
+
+func init() {
+ rand.Seed(time.Now().Unix())
+
+ RegisterFilter("escape", filterEscape)
+ RegisterFilter("safe", filterSafe)
+ RegisterFilter("escapejs", filterEscapejs)
+
+ RegisterFilter("add", filterAdd)
+ RegisterFilter("addslashes", filterAddslashes)
+ RegisterFilter("capfirst", filterCapfirst)
+ RegisterFilter("center", filterCenter)
+ RegisterFilter("cut", filterCut)
+ RegisterFilter("date", filterDate)
+ RegisterFilter("default", filterDefault)
+ RegisterFilter("default_if_none", filterDefaultIfNone)
+ RegisterFilter("divisibleby", filterDivisibleby)
+ RegisterFilter("first", filterFirst)
+ RegisterFilter("floatformat", filterFloatformat)
+ RegisterFilter("get_digit", filterGetdigit)
+ RegisterFilter("iriencode", filterIriencode)
+ RegisterFilter("join", filterJoin)
+ RegisterFilter("last", filterLast)
+ RegisterFilter("length", filterLength)
+ RegisterFilter("length_is", filterLengthis)
+ RegisterFilter("linebreaks", filterLinebreaks)
+ RegisterFilter("linebreaksbr", filterLinebreaksbr)
+ RegisterFilter("linenumbers", filterLinenumbers)
+ RegisterFilter("ljust", filterLjust)
+ RegisterFilter("lower", filterLower)
+ RegisterFilter("make_list", filterMakelist)
+ RegisterFilter("phone2numeric", filterPhone2numeric)
+ RegisterFilter("pluralize", filterPluralize)
+ RegisterFilter("random", filterRandom)
+ RegisterFilter("removetags", filterRemovetags)
+ RegisterFilter("rjust", filterRjust)
+ RegisterFilter("slice", filterSlice)
+ RegisterFilter("stringformat", filterStringformat)
+ RegisterFilter("striptags", filterStriptags)
+ RegisterFilter("time", filterDate) // time uses filterDate (same golang-format)
+ RegisterFilter("title", filterTitle)
+ RegisterFilter("truncatechars", filterTruncatechars)
+ RegisterFilter("truncatechars_html", filterTruncatecharsHTML)
+ RegisterFilter("truncatewords", filterTruncatewords)
+ RegisterFilter("truncatewords_html", filterTruncatewordsHTML)
+ RegisterFilter("upper", filterUpper)
+ RegisterFilter("urlencode", filterUrlencode)
+ RegisterFilter("urlize", filterUrlize)
+ RegisterFilter("urlizetrunc", filterUrlizetrunc)
+ RegisterFilter("wordcount", filterWordcount)
+ RegisterFilter("wordwrap", filterWordwrap)
+ RegisterFilter("yesno", filterYesno)
+
+ RegisterFilter("float", filterFloat) // pongo-specific
+ RegisterFilter("integer", filterInteger) // pongo-specific
+}
+
+func filterTruncatecharsHelper(s string, newLen int) string {
+ runes := []rune(s)
+ if newLen < len(runes) {
+ if newLen >= 3 {
+ return fmt.Sprintf("%s...", string(runes[:newLen-3]))
+ }
+ // Not enough space for the ellipsis
+ return string(runes[:newLen])
+ }
+ return string(runes)
+}
+
+func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func() bool, fn func(c rune, s int, idx int) int, finalize func()) {
+ vLen := len(value)
+ var tagStack []string
+ idx := 0
+
+ for idx < vLen && !cond() {
+ c, s := utf8.DecodeRuneInString(value[idx:])
+ if c == utf8.RuneError {
+ idx += s
+ continue
+ }
+
+ if c == '<' {
+ newOutput.WriteRune(c)
+ idx += s // consume "<"
+
+ if idx+1 < vLen {
+ if value[idx] == '/' {
+ // Close tag
+
+ newOutput.WriteString("/")
+
+ tag := ""
+ idx++ // consume "/"
+
+ for idx < vLen {
+ c2, size2 := utf8.DecodeRuneInString(value[idx:])
+ if c2 == utf8.RuneError {
+ idx += size2
+ continue
+ }
+
+ // End of tag found
+ if c2 == '>' {
+ idx++ // consume ">"
+ break
+ }
+ tag += string(c2)
+ idx += size2
+ }
+
+ if len(tagStack) > 0 {
+ // Ideally, the close tag is TOP of tag stack
+ // In malformed HTML, it must not be, so iterate through the stack and remove the tag
+ for i := len(tagStack) - 1; i >= 0; i-- {
+ if tagStack[i] == tag {
+ // Found the tag
+ tagStack[i] = tagStack[len(tagStack)-1]
+ tagStack = tagStack[:len(tagStack)-1]
+ break
+ }
+ }
+ }
+
+ newOutput.WriteString(tag)
+ newOutput.WriteString(">")
+ } else {
+ // Open tag
+
+ tag := ""
+
+ params := false
+ for idx < vLen {
+ c2, size2 := utf8.DecodeRuneInString(value[idx:])
+ if c2 == utf8.RuneError {
+ idx += size2
+ continue
+ }
+
+ newOutput.WriteRune(c2)
+
+ // End of tag found
+ if c2 == '>' {
+ idx++ // consume ">"
+ break
+ }
+
+ if !params {
+ if c2 == ' ' {
+ params = true
+ } else {
+ tag += string(c2)
+ }
+ }
+
+ idx += size2
+ }
+
+ // Add tag to stack
+ tagStack = append(tagStack, tag)
+ }
+ }
+ } else {
+ idx = fn(c, s, idx)
+ }
+ }
+
+ finalize()
+
+ for i := len(tagStack) - 1; i >= 0; i-- {
+ tag := tagStack[i]
+ // Close everything from the regular tag stack
+ newOutput.WriteString(fmt.Sprintf("%s>", tag))
+ }
+}
+
+func filterTruncatechars(in *Value, param *Value) (*Value, *Error) {
+ s := in.String()
+ newLen := param.Integer()
+ return AsValue(filterTruncatecharsHelper(s, newLen)), nil
+}
+
+func filterTruncatecharsHTML(in *Value, param *Value) (*Value, *Error) {
+ value := in.String()
+ newLen := max(param.Integer()-3, 0)
+
+ newOutput := bytes.NewBuffer(nil)
+
+ textcounter := 0
+
+ filterTruncateHTMLHelper(value, newOutput, func() bool {
+ return textcounter >= newLen
+ }, func(c rune, s int, idx int) int {
+ textcounter++
+ newOutput.WriteRune(c)
+
+ return idx + s
+ }, func() {
+ if textcounter >= newLen && textcounter < len(value) {
+ newOutput.WriteString("...")
+ }
+ })
+
+ return AsSafeValue(newOutput.String()), nil
+}
+
+func filterTruncatewords(in *Value, param *Value) (*Value, *Error) {
+ words := strings.Fields(in.String())
+ n := param.Integer()
+ if n <= 0 {
+ return AsValue(""), nil
+ }
+ nlen := min(len(words), n)
+ out := make([]string, 0, nlen)
+ for i := 0; i < nlen; i++ {
+ out = append(out, words[i])
+ }
+
+ if n < len(words) {
+ out = append(out, "...")
+ }
+
+ return AsValue(strings.Join(out, " ")), nil
+}
+
+func filterTruncatewordsHTML(in *Value, param *Value) (*Value, *Error) {
+ value := in.String()
+ newLen := max(param.Integer(), 0)
+
+ newOutput := bytes.NewBuffer(nil)
+
+ wordcounter := 0
+
+ filterTruncateHTMLHelper(value, newOutput, func() bool {
+ return wordcounter >= newLen
+ }, func(_ rune, _ int, idx int) int {
+ // Get next word
+ wordFound := false
+
+ for idx < len(value) {
+ c2, size2 := utf8.DecodeRuneInString(value[idx:])
+ if c2 == utf8.RuneError {
+ idx += size2
+ continue
+ }
+
+ if c2 == '<' {
+ // HTML tag start, don't consume it
+ return idx
+ }
+
+ newOutput.WriteRune(c2)
+ idx += size2
+
+ if c2 == ' ' || c2 == '.' || c2 == ',' || c2 == ';' {
+ // Word ends here, stop capturing it now
+ break
+ } else {
+ wordFound = true
+ }
+ }
+
+ if wordFound {
+ wordcounter++
+ }
+
+ return idx
+ }, func() {
+ if wordcounter >= newLen {
+ newOutput.WriteString("...")
+ }
+ })
+
+ return AsSafeValue(newOutput.String()), nil
+}
+
+func filterEscape(in *Value, param *Value) (*Value, *Error) {
+ output := strings.Replace(in.String(), "&", "&", -1)
+ output = strings.Replace(output, ">", ">", -1)
+ output = strings.Replace(output, "<", "<", -1)
+ output = strings.Replace(output, "\"", """, -1)
+ output = strings.Replace(output, "'", "'", -1)
+ return AsValue(output), nil
+}
+
+func filterSafe(in *Value, param *Value) (*Value, *Error) {
+ return in, nil // nothing to do here, just to keep track of the safe application
+}
+
+func filterEscapejs(in *Value, param *Value) (*Value, *Error) {
+ sin := in.String()
+
+ var b bytes.Buffer
+
+ idx := 0
+ for idx < len(sin) {
+ c, size := utf8.DecodeRuneInString(sin[idx:])
+ if c == utf8.RuneError {
+ idx += size
+ continue
+ }
+
+ if c == '\\' {
+ // Escape seq?
+ if idx+1 < len(sin) {
+ switch sin[idx+1] {
+ case 'r':
+ b.WriteString(fmt.Sprintf(`\u%04X`, '\r'))
+ idx += 2
+ continue
+ case 'n':
+ b.WriteString(fmt.Sprintf(`\u%04X`, '\n'))
+ idx += 2
+ continue
+ /*case '\'':
+ b.WriteString(fmt.Sprintf(`\u%04X`, '\''))
+ idx += 2
+ continue
+ case '"':
+ b.WriteString(fmt.Sprintf(`\u%04X`, '"'))
+ idx += 2
+ continue*/
+ }
+ }
+ }
+
+ if (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == ' ' || c == '/' {
+ b.WriteRune(c)
+ } else {
+ b.WriteString(fmt.Sprintf(`\u%04X`, c))
+ }
+
+ idx += size
+ }
+
+ return AsValue(b.String()), nil
+}
+
+func filterAdd(in *Value, param *Value) (*Value, *Error) {
+ if in.IsNumber() && param.IsNumber() {
+ if in.IsFloat() || param.IsFloat() {
+ return AsValue(in.Float() + param.Float()), nil
+ }
+ return AsValue(in.Integer() + param.Integer()), nil
+ }
+ // If in/param is not a number, we're relying on the
+ // Value's String() convertion and just add them both together
+ return AsValue(in.String() + param.String()), nil
+}
+
+func filterAddslashes(in *Value, param *Value) (*Value, *Error) {
+ output := strings.Replace(in.String(), "\\", "\\\\", -1)
+ output = strings.Replace(output, "\"", "\\\"", -1)
+ output = strings.Replace(output, "'", "\\'", -1)
+ return AsValue(output), nil
+}
+
+func filterCut(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(strings.Replace(in.String(), param.String(), "", -1)), nil
+}
+
+func filterLength(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(in.Len()), nil
+}
+
+func filterLengthis(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(in.Len() == param.Integer()), nil
+}
+
+func filterDefault(in *Value, param *Value) (*Value, *Error) {
+ if !in.IsTrue() {
+ return param, nil
+ }
+ return in, nil
+}
+
+func filterDefaultIfNone(in *Value, param *Value) (*Value, *Error) {
+ if in.IsNil() {
+ return param, nil
+ }
+ return in, nil
+}
+
+func filterDivisibleby(in *Value, param *Value) (*Value, *Error) {
+ if param.Integer() == 0 {
+ return AsValue(false), nil
+ }
+ return AsValue(in.Integer()%param.Integer() == 0), nil
+}
+
+func filterFirst(in *Value, param *Value) (*Value, *Error) {
+ if in.CanSlice() && in.Len() > 0 {
+ return in.Index(0), nil
+ }
+ return AsValue(""), nil
+}
+
+func filterFloatformat(in *Value, param *Value) (*Value, *Error) {
+ val := in.Float()
+
+ decimals := -1
+ if !param.IsNil() {
+ // Any argument provided?
+ decimals = param.Integer()
+ }
+
+ // if the argument is not a number (e. g. empty), the default
+ // behaviour is trim the result
+ trim := !param.IsNumber()
+
+ if decimals <= 0 {
+ // argument is negative or zero, so we
+ // want the output being trimmed
+ decimals = -decimals
+ trim = true
+ }
+
+ if trim {
+ // Remove zeroes
+ if float64(int(val)) == val {
+ return AsValue(in.Integer()), nil
+ }
+ }
+
+ return AsValue(strconv.FormatFloat(val, 'f', decimals, 64)), nil
+}
+
+func filterGetdigit(in *Value, param *Value) (*Value, *Error) {
+ i := param.Integer()
+ l := len(in.String()) // do NOT use in.Len() here!
+ if i <= 0 || i > l {
+ return in, nil
+ }
+ return AsValue(in.String()[l-i] - 48), nil
+}
+
+const filterIRIChars = "/#%[]=:;$&()+,!?*@'~"
+
+func filterIriencode(in *Value, param *Value) (*Value, *Error) {
+ var b bytes.Buffer
+
+ sin := in.String()
+ for _, r := range sin {
+ if strings.IndexRune(filterIRIChars, r) >= 0 {
+ b.WriteRune(r)
+ } else {
+ b.WriteString(url.QueryEscape(string(r)))
+ }
+ }
+
+ return AsValue(b.String()), nil
+}
+
+func filterJoin(in *Value, param *Value) (*Value, *Error) {
+ if !in.CanSlice() {
+ return in, nil
+ }
+ sep := param.String()
+ sl := make([]string, 0, in.Len())
+ for i := 0; i < in.Len(); i++ {
+ sl = append(sl, in.Index(i).String())
+ }
+ return AsValue(strings.Join(sl, sep)), nil
+}
+
+func filterLast(in *Value, param *Value) (*Value, *Error) {
+ if in.CanSlice() && in.Len() > 0 {
+ return in.Index(in.Len() - 1), nil
+ }
+ return AsValue(""), nil
+}
+
+func filterUpper(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(strings.ToUpper(in.String())), nil
+}
+
+func filterLower(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(strings.ToLower(in.String())), nil
+}
+
+func filterMakelist(in *Value, param *Value) (*Value, *Error) {
+ s := in.String()
+ result := make([]string, 0, len(s))
+ for _, c := range s {
+ result = append(result, string(c))
+ }
+ return AsValue(result), nil
+}
+
+func filterCapfirst(in *Value, param *Value) (*Value, *Error) {
+ if in.Len() <= 0 {
+ return AsValue(""), nil
+ }
+ t := in.String()
+ r, size := utf8.DecodeRuneInString(t)
+ return AsValue(strings.ToUpper(string(r)) + t[size:]), nil
+}
+
+func filterCenter(in *Value, param *Value) (*Value, *Error) {
+ width := param.Integer()
+ slen := in.Len()
+ if width <= slen {
+ return in, nil
+ }
+
+ spaces := width - slen
+ left := spaces/2 + spaces%2
+ right := spaces / 2
+
+ return AsValue(fmt.Sprintf("%s%s%s", strings.Repeat(" ", left),
+ in.String(), strings.Repeat(" ", right))), nil
+}
+
+func filterDate(in *Value, param *Value) (*Value, *Error) {
+ t, isTime := in.Interface().(time.Time)
+ if !isTime {
+ return nil, &Error{
+ Sender: "filter:date",
+ ErrorMsg: "Filter input argument must be of type 'time.Time'.",
+ }
+ }
+ return AsValue(t.Format(param.String())), nil
+}
+
+func filterFloat(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(in.Float()), nil
+}
+
+func filterInteger(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(in.Integer()), nil
+}
+
+func filterLinebreaks(in *Value, param *Value) (*Value, *Error) {
+ if in.Len() == 0 {
+ return in, nil
+ }
+
+ var b bytes.Buffer
+
+ // Newline =
+ // Double newline = ...
+ lines := strings.Split(in.String(), "\n")
+ lenlines := len(lines)
+
+ opened := false
+
+ for idx, line := range lines {
+
+ if !opened {
+ b.WriteString("")
+ opened = true
+ }
+
+ b.WriteString(line)
+
+ if idx < lenlines-1 && strings.TrimSpace(lines[idx]) != "" {
+ // We've not reached the end
+ if strings.TrimSpace(lines[idx+1]) == "" {
+ // Next line is empty
+ if opened {
+ b.WriteString("
")
+ opened = false
+ }
+ } else {
+ b.WriteString("
")
+ }
+ }
+ }
+
+ if opened {
+ b.WriteString("
")
+ }
+
+ return AsValue(b.String()), nil
+}
+
+func filterLinebreaksbr(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(strings.Replace(in.String(), "\n", "
", -1)), nil
+}
+
+func filterLinenumbers(in *Value, param *Value) (*Value, *Error) {
+ lines := strings.Split(in.String(), "\n")
+ output := make([]string, 0, len(lines))
+ for idx, line := range lines {
+ output = append(output, fmt.Sprintf("%d. %s", idx+1, line))
+ }
+ return AsValue(strings.Join(output, "\n")), nil
+}
+
+func filterLjust(in *Value, param *Value) (*Value, *Error) {
+ times := param.Integer() - in.Len()
+ if times < 0 {
+ times = 0
+ }
+ return AsValue(fmt.Sprintf("%s%s", in.String(), strings.Repeat(" ", times))), nil
+}
+
+func filterUrlencode(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(url.QueryEscape(in.String())), nil
+}
+
+// TODO: This regexp could do some work
+var filterUrlizeURLRegexp = regexp.MustCompile(`((((http|https)://)|www\.|((^|[ ])[0-9A-Za-z_\-]+(\.com|\.net|\.org|\.info|\.biz|\.de))))(?U:.*)([ ]+|$)`)
+var filterUrlizeEmailRegexp = regexp.MustCompile(`(\w+@\w+\.\w{2,4})`)
+
+func filterUrlizeHelper(input string, autoescape bool, trunc int) string {
+ sout := filterUrlizeURLRegexp.ReplaceAllStringFunc(input, func(raw_url string) string {
+ var prefix string
+ var suffix string
+ if strings.HasPrefix(raw_url, " ") {
+ prefix = " "
+ }
+ if strings.HasSuffix(raw_url, " ") {
+ suffix = " "
+ }
+
+ raw_url = strings.TrimSpace(raw_url)
+
+ t, err := ApplyFilter("iriencode", AsValue(raw_url), nil)
+ if err != nil {
+ panic(err)
+ }
+ url := t.String()
+
+ if !strings.HasPrefix(url, "http") {
+ url = fmt.Sprintf("http://%s", url)
+ }
+
+ title := raw_url
+
+ if trunc > 3 && len(title) > trunc {
+ title = fmt.Sprintf("%s...", title[:trunc-3])
+ }
+
+ if autoescape {
+ t, err := ApplyFilter("escape", AsValue(title), nil)
+ if err != nil {
+ panic(err)
+ }
+ title = t.String()
+ }
+
+ return fmt.Sprintf(`%s%s%s`, prefix, url, title, suffix)
+ })
+
+ sout = filterUrlizeEmailRegexp.ReplaceAllStringFunc(sout, func(mail string) string {
+
+ title := mail
+
+ if trunc > 3 && len(title) > trunc {
+ title = fmt.Sprintf("%s...", title[:trunc-3])
+ }
+
+ return fmt.Sprintf(`%s`, mail, title)
+ })
+
+ return sout
+}
+
+func filterUrlize(in *Value, param *Value) (*Value, *Error) {
+ autoescape := true
+ if param.IsBool() {
+ autoescape = param.Bool()
+ }
+
+ return AsValue(filterUrlizeHelper(in.String(), autoescape, -1)), nil
+}
+
+func filterUrlizetrunc(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(filterUrlizeHelper(in.String(), true, param.Integer())), nil
+}
+
+func filterStringformat(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(fmt.Sprintf(param.String(), in.Interface())), nil
+}
+
+var reStriptags = regexp.MustCompile("<[^>]*?>")
+
+func filterStriptags(in *Value, param *Value) (*Value, *Error) {
+ s := in.String()
+
+ // Strip all tags
+ s = reStriptags.ReplaceAllString(s, "")
+
+ return AsValue(strings.TrimSpace(s)), nil
+}
+
+// https://en.wikipedia.org/wiki/Phoneword
+var filterPhone2numericMap = map[string]string{
+ "a": "2", "b": "2", "c": "2", "d": "3", "e": "3", "f": "3", "g": "4", "h": "4", "i": "4", "j": "5", "k": "5",
+ "l": "5", "m": "6", "n": "6", "o": "6", "p": "7", "q": "7", "r": "7", "s": "7", "t": "8", "u": "8", "v": "8",
+ "w": "9", "x": "9", "y": "9", "z": "9",
+}
+
+func filterPhone2numeric(in *Value, param *Value) (*Value, *Error) {
+ sin := in.String()
+ for k, v := range filterPhone2numericMap {
+ sin = strings.Replace(sin, k, v, -1)
+ sin = strings.Replace(sin, strings.ToUpper(k), v, -1)
+ }
+ return AsValue(sin), nil
+}
+
+func filterPluralize(in *Value, param *Value) (*Value, *Error) {
+ if in.IsNumber() {
+ // Works only on numbers
+ if param.Len() > 0 {
+ endings := strings.Split(param.String(), ",")
+ if len(endings) > 2 {
+ return nil, &Error{
+ Sender: "filter:pluralize",
+ ErrorMsg: "You cannot pass more than 2 arguments to filter 'pluralize'.",
+ }
+ }
+ if len(endings) == 1 {
+ // 1 argument
+ if in.Integer() != 1 {
+ return AsValue(endings[0]), nil
+ }
+ } else {
+ if in.Integer() != 1 {
+ // 2 arguments
+ return AsValue(endings[1]), nil
+ }
+ return AsValue(endings[0]), nil
+ }
+ } else {
+ if in.Integer() != 1 {
+ // return default 's'
+ return AsValue("s"), nil
+ }
+ }
+
+ return AsValue(""), nil
+ }
+ return nil, &Error{
+ Sender: "filter:pluralize",
+ ErrorMsg: "Filter 'pluralize' does only work on numbers.",
+ }
+}
+
+func filterRandom(in *Value, param *Value) (*Value, *Error) {
+ if !in.CanSlice() || in.Len() <= 0 {
+ return in, nil
+ }
+ i := rand.Intn(in.Len())
+ return in.Index(i), nil
+}
+
+func filterRemovetags(in *Value, param *Value) (*Value, *Error) {
+ s := in.String()
+ tags := strings.Split(param.String(), ",")
+
+ // Strip only specific tags
+ for _, tag := range tags {
+ re := regexp.MustCompile(fmt.Sprintf("?%s/?>", tag))
+ s = re.ReplaceAllString(s, "")
+ }
+
+ return AsValue(strings.TrimSpace(s)), nil
+}
+
+func filterRjust(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(fmt.Sprintf(fmt.Sprintf("%%%ds", param.Integer()), in.String())), nil
+}
+
+func filterSlice(in *Value, param *Value) (*Value, *Error) {
+ comp := strings.Split(param.String(), ":")
+ if len(comp) != 2 {
+ return nil, &Error{
+ Sender: "filter:slice",
+ ErrorMsg: "Slice string must have the format 'from:to' [from/to can be omitted, but the ':' is required]",
+ }
+ }
+
+ if !in.CanSlice() {
+ return in, nil
+ }
+
+ from := AsValue(comp[0]).Integer()
+ to := in.Len()
+
+ if from > to {
+ from = to
+ }
+
+ vto := AsValue(comp[1]).Integer()
+ if vto >= from && vto <= in.Len() {
+ to = vto
+ }
+
+ return in.Slice(from, to), nil
+}
+
+func filterTitle(in *Value, param *Value) (*Value, *Error) {
+ if !in.IsString() {
+ return AsValue(""), nil
+ }
+ return AsValue(strings.Title(strings.ToLower(in.String()))), nil
+}
+
+func filterWordcount(in *Value, param *Value) (*Value, *Error) {
+ return AsValue(len(strings.Fields(in.String()))), nil
+}
+
+func filterWordwrap(in *Value, param *Value) (*Value, *Error) {
+ words := strings.Fields(in.String())
+ wordsLen := len(words)
+ wrapAt := param.Integer()
+ if wrapAt <= 0 {
+ return in, nil
+ }
+
+ linecount := wordsLen/wrapAt + wordsLen%wrapAt
+ lines := make([]string, 0, linecount)
+ for i := 0; i < linecount; i++ {
+ lines = append(lines, strings.Join(words[wrapAt*i:min(wrapAt*(i+1), wordsLen)], " "))
+ }
+ return AsValue(strings.Join(lines, "\n")), nil
+}
+
+func filterYesno(in *Value, param *Value) (*Value, *Error) {
+ choices := map[int]string{
+ 0: "yes",
+ 1: "no",
+ 2: "maybe",
+ }
+ paramString := param.String()
+ customChoices := strings.Split(paramString, ",")
+ if len(paramString) > 0 {
+ if len(customChoices) > 3 {
+ return nil, &Error{
+ Sender: "filter:yesno",
+ ErrorMsg: fmt.Sprintf("You cannot pass more than 3 options to the 'yesno'-filter (got: '%s').", paramString),
+ }
+ }
+ if len(customChoices) < 2 {
+ return nil, &Error{
+ Sender: "filter:yesno",
+ ErrorMsg: fmt.Sprintf("You must pass either no or at least 2 arguments to the 'yesno'-filter (got: '%s').", paramString),
+ }
+ }
+
+ // Map to the options now
+ choices[0] = customChoices[0]
+ choices[1] = customChoices[1]
+ if len(customChoices) == 3 {
+ choices[2] = customChoices[2]
+ }
+ }
+
+ // maybe
+ if in.IsNil() {
+ return AsValue(choices[2]), nil
+ }
+
+ // yes
+ if in.IsTrue() {
+ return AsValue(choices[0]), nil
+ }
+
+ // no
+ return AsValue(choices[1]), nil
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/helpers.go b/Godeps/_workspace/src/github.com/flosch/pongo2/helpers.go
new file mode 100644
index 0000000..880dbc0
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/helpers.go
@@ -0,0 +1,15 @@
+package pongo2
+
+func max(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/lexer.go b/Godeps/_workspace/src/github.com/flosch/pongo2/lexer.go
new file mode 100644
index 0000000..1698e41
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/lexer.go
@@ -0,0 +1,421 @@
+package pongo2
+
+import (
+ "fmt"
+ "strings"
+ "unicode/utf8"
+)
+
+const (
+ TokenError = iota
+ EOF
+
+ TokenHTML
+
+ TokenKeyword
+ TokenIdentifier
+ TokenString
+ TokenNumber
+ TokenSymbol
+)
+
+var (
+ tokenSpaceChars = " \n\r\t"
+ tokenIdentifierChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"
+ tokenIdentifierCharsWithDigits = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789"
+ tokenDigits = "0123456789"
+
+ // Available symbols in pongo2 (within filters/tag)
+ TokenSymbols = []string{
+ // 3-Char symbols
+
+ // 2-Char symbols
+ "==", ">=", "<=", "&&", "||", "{{", "}}", "{%", "%}", "!=", "<>",
+
+ // 1-Char symbol
+ "(", ")", "+", "-", "*", "<", ">", "/", "^", ",", ".", "!", "|", ":", "=", "%",
+ }
+
+ // Available keywords in pongo2
+ TokenKeywords = []string{"in", "and", "or", "not", "true", "false", "as", "export"}
+)
+
+type TokenType int
+type Token struct {
+ Filename string
+ Typ TokenType
+ Val string
+ Line int
+ Col int
+}
+
+type lexerStateFn func() lexerStateFn
+type lexer struct {
+ name string
+ input string
+ start int // start pos of the item
+ pos int // current pos
+ width int // width of last rune
+ tokens []*Token
+ errored bool
+ startline int
+ startcol int
+ line int
+ col int
+
+ inVerbatim bool
+ verbatimName string
+}
+
+func (t *Token) String() string {
+ val := t.Val
+ if len(val) > 1000 {
+ val = fmt.Sprintf("%s...%s", val[:10], val[len(val)-5:len(val)])
+ }
+
+ typ := ""
+ switch t.Typ {
+ case TokenHTML:
+ typ = "HTML"
+ case TokenError:
+ typ = "Error"
+ case TokenIdentifier:
+ typ = "Identifier"
+ case TokenKeyword:
+ typ = "Keyword"
+ case TokenNumber:
+ typ = "Number"
+ case TokenString:
+ typ = "String"
+ case TokenSymbol:
+ typ = "Symbol"
+ default:
+ typ = "Unknown"
+ }
+
+ return fmt.Sprintf("",
+ typ, t.Typ, val, t.Line, t.Col)
+}
+
+func lex(name string, input string) ([]*Token, *Error) {
+ l := &lexer{
+ name: name,
+ input: input,
+ tokens: make([]*Token, 0, 100),
+ line: 1,
+ col: 1,
+ startline: 1,
+ startcol: 1,
+ }
+ l.run()
+ if l.errored {
+ errtoken := l.tokens[len(l.tokens)-1]
+ return nil, &Error{
+ Filename: name,
+ Line: errtoken.Line,
+ Column: errtoken.Col,
+ Sender: "lexer",
+ ErrorMsg: errtoken.Val,
+ }
+ }
+ return l.tokens, nil
+}
+
+func (l *lexer) value() string {
+ return l.input[l.start:l.pos]
+}
+
+func (l *lexer) length() int {
+ return l.pos - l.start
+}
+
+func (l *lexer) emit(t TokenType) {
+ tok := &Token{
+ Filename: l.name,
+ Typ: t,
+ Val: l.value(),
+ Line: l.startline,
+ Col: l.startcol,
+ }
+
+ if t == TokenString {
+ // Escape sequence \" in strings
+ tok.Val = strings.Replace(tok.Val, `\"`, `"`, -1)
+ tok.Val = strings.Replace(tok.Val, `\\`, `\`, -1)
+ }
+
+ l.tokens = append(l.tokens, tok)
+ l.start = l.pos
+ l.startline = l.line
+ l.startcol = l.col
+}
+
+func (l *lexer) next() rune {
+ if l.pos >= len(l.input) {
+ l.width = 0
+ return EOF
+ }
+ r, w := utf8.DecodeRuneInString(l.input[l.pos:])
+ l.width = w
+ l.pos += l.width
+ l.col += l.width
+ return r
+}
+
+func (l *lexer) backup() {
+ l.pos -= l.width
+ l.col -= l.width
+}
+
+func (l *lexer) peek() rune {
+ r := l.next()
+ l.backup()
+ return r
+}
+
+func (l *lexer) ignore() {
+ l.start = l.pos
+ l.startline = l.line
+ l.startcol = l.col
+}
+
+func (l *lexer) accept(what string) bool {
+ if strings.IndexRune(what, l.next()) >= 0 {
+ return true
+ }
+ l.backup()
+ return false
+}
+
+func (l *lexer) acceptRun(what string) {
+ for strings.IndexRune(what, l.next()) >= 0 {
+ }
+ l.backup()
+}
+
+func (l *lexer) errorf(format string, args ...interface{}) lexerStateFn {
+ t := &Token{
+ Filename: l.name,
+ Typ: TokenError,
+ Val: fmt.Sprintf(format, args...),
+ Line: l.startline,
+ Col: l.startcol,
+ }
+ l.tokens = append(l.tokens, t)
+ l.errored = true
+ l.startline = l.line
+ l.startcol = l.col
+ return nil
+}
+
+func (l *lexer) eof() bool {
+ return l.start >= len(l.input)-1
+}
+
+func (l *lexer) run() {
+ for {
+ // TODO: Support verbatim tag names
+ // https://docs.djangoproject.com/en/dev/ref/templates/builtins/#verbatim
+ if l.inVerbatim {
+ name := l.verbatimName
+ if name != "" {
+ name += " "
+ }
+ if strings.HasPrefix(l.input[l.pos:], fmt.Sprintf("{%% endverbatim %s%%}", name)) { // end verbatim
+ if l.pos > l.start {
+ l.emit(TokenHTML)
+ }
+ w := len("{% endverbatim %}")
+ l.pos += w
+ l.col += w
+ l.ignore()
+ l.inVerbatim = false
+ }
+ } else if strings.HasPrefix(l.input[l.pos:], "{% verbatim %}") { // tag
+ if l.pos > l.start {
+ l.emit(TokenHTML)
+ }
+ l.inVerbatim = true
+ w := len("{% verbatim %}")
+ l.pos += w
+ l.col += w
+ l.ignore()
+ }
+
+ if !l.inVerbatim {
+ // Ignore single-line comments {# ... #}
+ if strings.HasPrefix(l.input[l.pos:], "{#") {
+ if l.pos > l.start {
+ l.emit(TokenHTML)
+ }
+
+ l.pos += 2 // pass '{#'
+ l.col += 2
+
+ for {
+ switch l.peek() {
+ case EOF:
+ l.errorf("Single-line comment not closed.")
+ return
+ case '\n':
+ l.errorf("Newline not permitted in a single-line comment.")
+ return
+ }
+
+ if strings.HasPrefix(l.input[l.pos:], "#}") {
+ l.pos += 2 // pass '#}'
+ l.col += 2
+ break
+ }
+
+ l.next()
+ }
+ l.ignore() // ignore whole comment
+
+ // Comment skipped
+ continue // next token
+ }
+
+ if strings.HasPrefix(l.input[l.pos:], "{{") || // variable
+ strings.HasPrefix(l.input[l.pos:], "{%") { // tag
+ if l.pos > l.start {
+ l.emit(TokenHTML)
+ }
+ l.tokenize()
+ if l.errored {
+ return
+ }
+ continue
+ }
+ }
+
+ switch l.peek() {
+ case '\n':
+ l.line++
+ l.col = 0
+ }
+ if l.next() == EOF {
+ break
+ }
+ }
+
+ if l.pos > l.start {
+ l.emit(TokenHTML)
+ }
+
+ if l.inVerbatim {
+ l.errorf("verbatim-tag not closed, got EOF.")
+ }
+}
+
+func (l *lexer) tokenize() {
+ for state := l.stateCode; state != nil; {
+ state = state()
+ }
+}
+
+func (l *lexer) stateCode() lexerStateFn {
+outer_loop:
+ for {
+ switch {
+ case l.accept(tokenSpaceChars):
+ if l.value() == "\n" {
+ return l.errorf("Newline not allowed within tag/variable.")
+ }
+ l.ignore()
+ continue
+ case l.accept(tokenIdentifierChars):
+ return l.stateIdentifier
+ case l.accept(tokenDigits):
+ return l.stateNumber
+ case l.accept(`"`):
+ return l.stateString
+ }
+
+ // Check for symbol
+ for _, sym := range TokenSymbols {
+ if strings.HasPrefix(l.input[l.start:], sym) {
+ l.pos += len(sym)
+ l.col += l.length()
+ l.emit(TokenSymbol)
+
+ if sym == "%}" || sym == "}}" {
+ // Tag/variable end, return after emit
+ return nil
+ }
+
+ continue outer_loop
+ }
+ }
+
+ if l.pos < len(l.input) {
+ return l.errorf("Unknown character: %q (%d)", l.peek(), l.peek())
+ }
+
+ break
+ }
+
+ // Normal shut down
+ return nil
+}
+
+func (l *lexer) stateIdentifier() lexerStateFn {
+ l.acceptRun(tokenIdentifierChars)
+ l.acceptRun(tokenIdentifierCharsWithDigits)
+ for _, kw := range TokenKeywords {
+ if kw == l.value() {
+ l.emit(TokenKeyword)
+ return l.stateCode
+ }
+ }
+ l.emit(TokenIdentifier)
+ return l.stateCode
+}
+
+func (l *lexer) stateNumber() lexerStateFn {
+ l.acceptRun(tokenDigits)
+ /*
+ Maybe context-sensitive number lexing?
+ * comments.0.Text // first comment
+ * usercomments.1.0 // second user, first comment
+ * if (score >= 8.5) // 8.5 as a number
+
+ if l.peek() == '.' {
+ l.accept(".")
+ if !l.accept(tokenDigits) {
+ return l.errorf("Malformed number.")
+ }
+ l.acceptRun(tokenDigits)
+ }
+ */
+ l.emit(TokenNumber)
+ return l.stateCode
+}
+
+func (l *lexer) stateString() lexerStateFn {
+ l.ignore()
+ l.startcol-- // we're starting the position at the first "
+ for !l.accept(`"`) {
+ switch l.next() {
+ case '\\':
+ // escape sequence
+ switch l.peek() {
+ case '"', '\\':
+ l.next()
+ default:
+ return l.errorf("Unknown escape sequence: \\%c", l.peek())
+ }
+ case EOF:
+ return l.errorf("Unexpected EOF, string not closed.")
+ case '\n':
+ return l.errorf("Newline in string is not allowed.")
+ }
+ }
+ l.backup()
+ l.emit(TokenString)
+
+ l.next()
+ l.ignore()
+
+ return l.stateCode
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/nodes.go b/Godeps/_workspace/src/github.com/flosch/pongo2/nodes.go
new file mode 100644
index 0000000..5b039cd
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/nodes.go
@@ -0,0 +1,16 @@
+package pongo2
+
+// The root document
+type nodeDocument struct {
+ Nodes []INode
+}
+
+func (doc *nodeDocument) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ for _, n := range doc.Nodes {
+ err := n.Execute(ctx, writer)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/nodes_html.go b/Godeps/_workspace/src/github.com/flosch/pongo2/nodes_html.go
new file mode 100644
index 0000000..9680285
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/nodes_html.go
@@ -0,0 +1,10 @@
+package pongo2
+
+type nodeHTML struct {
+ token *Token
+}
+
+func (n *nodeHTML) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ writer.WriteString(n.token.Val)
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/nodes_wrapper.go b/Godeps/_workspace/src/github.com/flosch/pongo2/nodes_wrapper.go
new file mode 100644
index 0000000..d1bcb8d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/nodes_wrapper.go
@@ -0,0 +1,16 @@
+package pongo2
+
+type NodeWrapper struct {
+ Endtag string
+ nodes []INode
+}
+
+func (wrapper *NodeWrapper) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ for _, n := range wrapper.nodes {
+ err := n.Execute(ctx, writer)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/parser.go b/Godeps/_workspace/src/github.com/flosch/pongo2/parser.go
new file mode 100644
index 0000000..bed5061
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/parser.go
@@ -0,0 +1,265 @@
+package pongo2
+
+import (
+ "fmt"
+ "strings"
+)
+
+type INode interface {
+ Execute(*ExecutionContext, TemplateWriter) *Error
+}
+
+type IEvaluator interface {
+ INode
+ GetPositionToken() *Token
+ Evaluate(*ExecutionContext) (*Value, *Error)
+ FilterApplied(name string) bool
+}
+
+// The parser provides you a comprehensive and easy tool to
+// work with the template document and arguments provided by
+// the user for your custom tag.
+//
+// The parser works on a token list which will be provided by pongo2.
+// A token is a unit you can work with. Tokens are either of type identifier,
+// string, number, keyword, HTML or symbol.
+//
+// (See Token's documentation for more about tokens)
+type Parser struct {
+ name string
+ idx int
+ tokens []*Token
+ lastToken *Token
+
+ // if the parser parses a template document, here will be
+ // a reference to it (needed to access the template through Tags)
+ template *Template
+}
+
+// Creates a new parser to parse tokens.
+// Used inside pongo2 to parse documents and to provide an easy-to-use
+// parser for tag authors
+func newParser(name string, tokens []*Token, template *Template) *Parser {
+ p := &Parser{
+ name: name,
+ tokens: tokens,
+ template: template,
+ }
+ if len(tokens) > 0 {
+ p.lastToken = tokens[len(tokens)-1]
+ }
+ return p
+}
+
+// Consume one token. It will be gone forever.
+func (p *Parser) Consume() {
+ p.ConsumeN(1)
+}
+
+// Consume N tokens. They will be gone forever.
+func (p *Parser) ConsumeN(count int) {
+ p.idx += count
+}
+
+// Returns the current token.
+func (p *Parser) Current() *Token {
+ return p.Get(p.idx)
+}
+
+// Returns the CURRENT token if the given type matches.
+// Consumes this token on success.
+func (p *Parser) MatchType(typ TokenType) *Token {
+ if t := p.PeekType(typ); t != nil {
+ p.Consume()
+ return t
+ }
+ return nil
+}
+
+// Returns the CURRENT token if the given type AND value matches.
+// Consumes this token on success.
+func (p *Parser) Match(typ TokenType, val string) *Token {
+ if t := p.Peek(typ, val); t != nil {
+ p.Consume()
+ return t
+ }
+ return nil
+}
+
+// Returns the CURRENT token if the given type AND *one* of
+// the given values matches.
+// Consumes this token on success.
+func (p *Parser) MatchOne(typ TokenType, vals ...string) *Token {
+ for _, val := range vals {
+ if t := p.Peek(typ, val); t != nil {
+ p.Consume()
+ return t
+ }
+ }
+ return nil
+}
+
+// Returns the CURRENT token if the given type matches.
+// It DOES NOT consume the token.
+func (p *Parser) PeekType(typ TokenType) *Token {
+ return p.PeekTypeN(0, typ)
+}
+
+// Returns the CURRENT token if the given type AND value matches.
+// It DOES NOT consume the token.
+func (p *Parser) Peek(typ TokenType, val string) *Token {
+ return p.PeekN(0, typ, val)
+}
+
+// Returns the CURRENT token if the given type AND *one* of
+// the given values matches.
+// It DOES NOT consume the token.
+func (p *Parser) PeekOne(typ TokenType, vals ...string) *Token {
+ for _, v := range vals {
+ t := p.PeekN(0, typ, v)
+ if t != nil {
+ return t
+ }
+ }
+ return nil
+}
+
+// Returns the tokens[current position + shift] token if the
+// given type AND value matches for that token.
+// DOES NOT consume the token.
+func (p *Parser) PeekN(shift int, typ TokenType, val string) *Token {
+ t := p.Get(p.idx + shift)
+ if t != nil {
+ if t.Typ == typ && t.Val == val {
+ return t
+ }
+ }
+ return nil
+}
+
+// Returns the tokens[current position + shift] token if the given type matches.
+// DOES NOT consume the token for that token.
+func (p *Parser) PeekTypeN(shift int, typ TokenType) *Token {
+ t := p.Get(p.idx + shift)
+ if t != nil {
+ if t.Typ == typ {
+ return t
+ }
+ }
+ return nil
+}
+
+// Returns the UNCONSUMED token count.
+func (p *Parser) Remaining() int {
+ return len(p.tokens) - p.idx
+}
+
+// Returns the total token count.
+func (p *Parser) Count() int {
+ return len(p.tokens)
+}
+
+// Returns tokens[i] or NIL (if i >= len(tokens))
+func (p *Parser) Get(i int) *Token {
+ if i < len(p.tokens) {
+ return p.tokens[i]
+ }
+ return nil
+}
+
+// Returns tokens[current-position + shift] or NIL
+// (if (current-position + i) >= len(tokens))
+func (p *Parser) GetR(shift int) *Token {
+ i := p.idx + shift
+ return p.Get(i)
+}
+
+// Produces a nice error message and returns an error-object.
+// The 'token'-argument is optional. If provided, it will take
+// the token's position information. If not provided, it will
+// automatically use the CURRENT token's position information.
+func (p *Parser) Error(msg string, token *Token) *Error {
+ if token == nil {
+ // Set current token
+ token = p.Current()
+ if token == nil {
+ // Set to last token
+ if len(p.tokens) > 0 {
+ token = p.tokens[len(p.tokens)-1]
+ }
+ }
+ }
+ var line, col int
+ if token != nil {
+ line = token.Line
+ col = token.Col
+ }
+ return &Error{
+ Template: p.template,
+ Filename: p.name,
+ Sender: "parser",
+ Line: line,
+ Column: col,
+ Token: token,
+ ErrorMsg: msg,
+ }
+}
+
+// Wraps all nodes between starting tag and "{% endtag %}" and provides
+// one simple interface to execute the wrapped nodes.
+// It returns a parser to process provided arguments to the tag.
+func (p *Parser) WrapUntilTag(names ...string) (*NodeWrapper, *Parser, *Error) {
+ wrapper := &NodeWrapper{}
+
+ var tagArgs []*Token
+
+ for p.Remaining() > 0 {
+ // New tag, check whether we have to stop wrapping here
+ if p.Peek(TokenSymbol, "{%") != nil {
+ tagIdent := p.PeekTypeN(1, TokenIdentifier)
+
+ if tagIdent != nil {
+ // We've found a (!) end-tag
+
+ found := false
+ for _, n := range names {
+ if tagIdent.Val == n {
+ found = true
+ break
+ }
+ }
+
+ // We only process the tag if we've found an end tag
+ if found {
+ // Okay, endtag found.
+ p.ConsumeN(2) // '{%' tagname
+
+ for {
+ if p.Match(TokenSymbol, "%}") != nil {
+ // Okay, end the wrapping here
+ wrapper.Endtag = tagIdent.Val
+ return wrapper, newParser(p.template.name, tagArgs, p.template), nil
+ }
+ t := p.Current()
+ p.Consume()
+ if t == nil {
+ return nil, nil, p.Error("Unexpected EOF.", p.lastToken)
+ }
+ tagArgs = append(tagArgs, t)
+ }
+ }
+ }
+
+ }
+
+ // Otherwise process next element to be wrapped
+ node, err := p.parseDocElement()
+ if err != nil {
+ return nil, nil, err
+ }
+ wrapper.nodes = append(wrapper.nodes, node)
+ }
+
+ return nil, nil, p.Error(fmt.Sprintf("Unexpected EOF, expected tag %s.", strings.Join(names, " or ")),
+ p.lastToken)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/parser_document.go b/Godeps/_workspace/src/github.com/flosch/pongo2/parser_document.go
new file mode 100644
index 0000000..4ab8b93
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/parser_document.go
@@ -0,0 +1,54 @@
+package pongo2
+
+// Doc = { ( Filter | Tag | HTML ) }
+func (p *Parser) parseDocElement() (INode, *Error) {
+ t := p.Current()
+
+ switch t.Typ {
+ case TokenHTML:
+ p.Consume() // consume HTML element
+ return &nodeHTML{token: t}, nil
+ case TokenSymbol:
+ switch t.Val {
+ case "{{":
+ // parse variable
+ variable, err := p.parseVariableElement()
+ if err != nil {
+ return nil, err
+ }
+ return variable, nil
+ case "{%":
+ // parse tag
+ tag, err := p.parseTagElement()
+ if err != nil {
+ return nil, err
+ }
+ return tag, nil
+ }
+ }
+ return nil, p.Error("Unexpected token (only HTML/tags/filters in templates allowed)", t)
+}
+
+func (tpl *Template) parse() *Error {
+ tpl.parser = newParser(tpl.name, tpl.tokens, tpl)
+ doc, err := tpl.parser.parseDocument()
+ if err != nil {
+ return err
+ }
+ tpl.root = doc
+ return nil
+}
+
+func (p *Parser) parseDocument() (*nodeDocument, *Error) {
+ doc := &nodeDocument{}
+
+ for p.Remaining() > 0 {
+ node, err := p.parseDocElement()
+ if err != nil {
+ return nil, err
+ }
+ doc.Nodes = append(doc.Nodes, node)
+ }
+
+ return doc, nil
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/parser_expression.go b/Godeps/_workspace/src/github.com/flosch/pongo2/parser_expression.go
new file mode 100644
index 0000000..98c6580
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/parser_expression.go
@@ -0,0 +1,491 @@
+package pongo2
+
+import (
+ "fmt"
+ "math"
+)
+
+type Expression struct {
+ // TODO: Add location token?
+ expr1 IEvaluator
+ expr2 IEvaluator
+ opToken *Token
+}
+
+type relationalExpression struct {
+ // TODO: Add location token?
+ expr1 IEvaluator
+ expr2 IEvaluator
+ opToken *Token
+}
+
+type simpleExpression struct {
+ negate bool
+ negativeSign bool
+ term1 IEvaluator
+ term2 IEvaluator
+ opToken *Token
+}
+
+type term struct {
+ // TODO: Add location token?
+ factor1 IEvaluator
+ factor2 IEvaluator
+ opToken *Token
+}
+
+type power struct {
+ // TODO: Add location token?
+ power1 IEvaluator
+ power2 IEvaluator
+}
+
+func (expr *Expression) FilterApplied(name string) bool {
+ return expr.expr1.FilterApplied(name) && (expr.expr2 == nil ||
+ (expr.expr2 != nil && expr.expr2.FilterApplied(name)))
+}
+
+func (expr *relationalExpression) FilterApplied(name string) bool {
+ return expr.expr1.FilterApplied(name) && (expr.expr2 == nil ||
+ (expr.expr2 != nil && expr.expr2.FilterApplied(name)))
+}
+
+func (expr *simpleExpression) FilterApplied(name string) bool {
+ return expr.term1.FilterApplied(name) && (expr.term2 == nil ||
+ (expr.term2 != nil && expr.term2.FilterApplied(name)))
+}
+
+func (expr *term) FilterApplied(name string) bool {
+ return expr.factor1.FilterApplied(name) && (expr.factor2 == nil ||
+ (expr.factor2 != nil && expr.factor2.FilterApplied(name)))
+}
+
+func (expr *power) FilterApplied(name string) bool {
+ return expr.power1.FilterApplied(name) && (expr.power2 == nil ||
+ (expr.power2 != nil && expr.power2.FilterApplied(name)))
+}
+
+func (expr *Expression) GetPositionToken() *Token {
+ return expr.expr1.GetPositionToken()
+}
+
+func (expr *relationalExpression) GetPositionToken() *Token {
+ return expr.expr1.GetPositionToken()
+}
+
+func (expr *simpleExpression) GetPositionToken() *Token {
+ return expr.term1.GetPositionToken()
+}
+
+func (expr *term) GetPositionToken() *Token {
+ return expr.factor1.GetPositionToken()
+}
+
+func (expr *power) GetPositionToken() *Token {
+ return expr.power1.GetPositionToken()
+}
+
+func (expr *Expression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ value, err := expr.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ writer.WriteString(value.String())
+ return nil
+}
+
+func (expr *relationalExpression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ value, err := expr.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ writer.WriteString(value.String())
+ return nil
+}
+
+func (expr *simpleExpression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ value, err := expr.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ writer.WriteString(value.String())
+ return nil
+}
+
+func (expr *term) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ value, err := expr.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ writer.WriteString(value.String())
+ return nil
+}
+
+func (expr *power) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ value, err := expr.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ writer.WriteString(value.String())
+ return nil
+}
+
+func (expr *Expression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
+ v1, err := expr.expr1.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ if expr.expr2 != nil {
+ v2, err := expr.expr2.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ switch expr.opToken.Val {
+ case "and", "&&":
+ return AsValue(v1.IsTrue() && v2.IsTrue()), nil
+ case "or", "||":
+ return AsValue(v1.IsTrue() || v2.IsTrue()), nil
+ default:
+ panic(fmt.Sprintf("unimplemented: %s", expr.opToken.Val))
+ }
+ } else {
+ return v1, nil
+ }
+}
+
+func (expr *relationalExpression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
+ v1, err := expr.expr1.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ if expr.expr2 != nil {
+ v2, err := expr.expr2.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ switch expr.opToken.Val {
+ case "<=":
+ if v1.IsFloat() || v2.IsFloat() {
+ return AsValue(v1.Float() <= v2.Float()), nil
+ }
+ return AsValue(v1.Integer() <= v2.Integer()), nil
+ case ">=":
+ if v1.IsFloat() || v2.IsFloat() {
+ return AsValue(v1.Float() >= v2.Float()), nil
+ }
+ return AsValue(v1.Integer() >= v2.Integer()), nil
+ case "==":
+ return AsValue(v1.EqualValueTo(v2)), nil
+ case ">":
+ if v1.IsFloat() || v2.IsFloat() {
+ return AsValue(v1.Float() > v2.Float()), nil
+ }
+ return AsValue(v1.Integer() > v2.Integer()), nil
+ case "<":
+ if v1.IsFloat() || v2.IsFloat() {
+ return AsValue(v1.Float() < v2.Float()), nil
+ }
+ return AsValue(v1.Integer() < v2.Integer()), nil
+ case "!=", "<>":
+ return AsValue(!v1.EqualValueTo(v2)), nil
+ case "in":
+ return AsValue(v2.Contains(v1)), nil
+ default:
+ panic(fmt.Sprintf("unimplemented: %s", expr.opToken.Val))
+ }
+ } else {
+ return v1, nil
+ }
+}
+
+func (expr *simpleExpression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
+ t1, err := expr.term1.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ result := t1
+
+ if expr.negate {
+ result = result.Negate()
+ }
+
+ if expr.negativeSign {
+ if result.IsNumber() {
+ switch {
+ case result.IsFloat():
+ result = AsValue(-1 * result.Float())
+ case result.IsInteger():
+ result = AsValue(-1 * result.Integer())
+ default:
+ panic("not possible")
+ }
+ } else {
+ return nil, ctx.Error("Negative sign on a non-number expression", expr.GetPositionToken())
+ }
+ }
+
+ if expr.term2 != nil {
+ t2, err := expr.term2.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ switch expr.opToken.Val {
+ case "+":
+ if result.IsFloat() || t2.IsFloat() {
+ // Result will be a float
+ return AsValue(result.Float() + t2.Float()), nil
+ }
+ // Result will be an integer
+ return AsValue(result.Integer() + t2.Integer()), nil
+ case "-":
+ if result.IsFloat() || t2.IsFloat() {
+ // Result will be a float
+ return AsValue(result.Float() - t2.Float()), nil
+ }
+ // Result will be an integer
+ return AsValue(result.Integer() - t2.Integer()), nil
+ default:
+ panic("unimplemented")
+ }
+ }
+
+ return result, nil
+}
+
+func (expr *term) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
+ f1, err := expr.factor1.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ if expr.factor2 != nil {
+ f2, err := expr.factor2.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ switch expr.opToken.Val {
+ case "*":
+ if f1.IsFloat() || f2.IsFloat() {
+ // Result will be float
+ return AsValue(f1.Float() * f2.Float()), nil
+ }
+ // Result will be int
+ return AsValue(f1.Integer() * f2.Integer()), nil
+ case "/":
+ if f1.IsFloat() || f2.IsFloat() {
+ // Result will be float
+ return AsValue(f1.Float() / f2.Float()), nil
+ }
+ // Result will be int
+ return AsValue(f1.Integer() / f2.Integer()), nil
+ case "%":
+ // Result will be int
+ return AsValue(f1.Integer() % f2.Integer()), nil
+ default:
+ panic("unimplemented")
+ }
+ } else {
+ return f1, nil
+ }
+}
+
+func (expr *power) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
+ p1, err := expr.power1.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ if expr.power2 != nil {
+ p2, err := expr.power2.Evaluate(ctx)
+ if err != nil {
+ return nil, err
+ }
+ return AsValue(math.Pow(p1.Float(), p2.Float())), nil
+ }
+ return p1, nil
+}
+
+func (p *Parser) parseFactor() (IEvaluator, *Error) {
+ if p.Match(TokenSymbol, "(") != nil {
+ expr, err := p.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ if p.Match(TokenSymbol, ")") == nil {
+ return nil, p.Error("Closing bracket expected after expression", nil)
+ }
+ return expr, nil
+ }
+
+ return p.parseVariableOrLiteralWithFilter()
+}
+
+func (p *Parser) parsePower() (IEvaluator, *Error) {
+ pw := new(power)
+
+ power1, err := p.parseFactor()
+ if err != nil {
+ return nil, err
+ }
+ pw.power1 = power1
+
+ if p.Match(TokenSymbol, "^") != nil {
+ power2, err := p.parsePower()
+ if err != nil {
+ return nil, err
+ }
+ pw.power2 = power2
+ }
+
+ if pw.power2 == nil {
+ // Shortcut for faster evaluation
+ return pw.power1, nil
+ }
+
+ return pw, nil
+}
+
+func (p *Parser) parseTerm() (IEvaluator, *Error) {
+ returnTerm := new(term)
+
+ factor1, err := p.parsePower()
+ if err != nil {
+ return nil, err
+ }
+ returnTerm.factor1 = factor1
+
+ for p.PeekOne(TokenSymbol, "*", "/", "%") != nil {
+ if returnTerm.opToken != nil {
+ // Create new sub-term
+ returnTerm = &term{
+ factor1: returnTerm,
+ }
+ }
+
+ op := p.Current()
+ p.Consume()
+
+ factor2, err := p.parsePower()
+ if err != nil {
+ return nil, err
+ }
+
+ returnTerm.opToken = op
+ returnTerm.factor2 = factor2
+ }
+
+ if returnTerm.opToken == nil {
+ // Shortcut for faster evaluation
+ return returnTerm.factor1, nil
+ }
+
+ return returnTerm, nil
+}
+
+func (p *Parser) parseSimpleExpression() (IEvaluator, *Error) {
+ expr := new(simpleExpression)
+
+ if sign := p.MatchOne(TokenSymbol, "+", "-"); sign != nil {
+ if sign.Val == "-" {
+ expr.negativeSign = true
+ }
+ }
+
+ if p.Match(TokenSymbol, "!") != nil || p.Match(TokenKeyword, "not") != nil {
+ expr.negate = true
+ }
+
+ term1, err := p.parseTerm()
+ if err != nil {
+ return nil, err
+ }
+ expr.term1 = term1
+
+ for p.PeekOne(TokenSymbol, "+", "-") != nil {
+ if expr.opToken != nil {
+ // New sub expr
+ expr = &simpleExpression{
+ term1: expr,
+ }
+ }
+
+ op := p.Current()
+ p.Consume()
+
+ term2, err := p.parseTerm()
+ if err != nil {
+ return nil, err
+ }
+
+ expr.term2 = term2
+ expr.opToken = op
+ }
+
+ if expr.negate == false && expr.negativeSign == false && expr.term2 == nil {
+ // Shortcut for faster evaluation
+ return expr.term1, nil
+ }
+
+ return expr, nil
+}
+
+func (p *Parser) parseRelationalExpression() (IEvaluator, *Error) {
+ expr1, err := p.parseSimpleExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ expr := &relationalExpression{
+ expr1: expr1,
+ }
+
+ if t := p.MatchOne(TokenSymbol, "==", "<=", ">=", "!=", "<>", ">", "<"); t != nil {
+ expr2, err := p.parseRelationalExpression()
+ if err != nil {
+ return nil, err
+ }
+ expr.opToken = t
+ expr.expr2 = expr2
+ } else if t := p.MatchOne(TokenKeyword, "in"); t != nil {
+ expr2, err := p.parseSimpleExpression()
+ if err != nil {
+ return nil, err
+ }
+ expr.opToken = t
+ expr.expr2 = expr2
+ }
+
+ if expr.expr2 == nil {
+ // Shortcut for faster evaluation
+ return expr.expr1, nil
+ }
+
+ return expr, nil
+}
+
+func (p *Parser) ParseExpression() (IEvaluator, *Error) {
+ rexpr1, err := p.parseRelationalExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ exp := &Expression{
+ expr1: rexpr1,
+ }
+
+ if p.PeekOne(TokenSymbol, "&&", "||") != nil || p.PeekOne(TokenKeyword, "and", "or") != nil {
+ op := p.Current()
+ p.Consume()
+ expr2, err := p.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ exp.expr2 = expr2
+ exp.opToken = op
+ }
+
+ if exp.expr2 == nil {
+ // Shortcut for faster evaluation
+ return exp.expr1, nil
+ }
+
+ return exp, nil
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/pongo2.go b/Godeps/_workspace/src/github.com/flosch/pongo2/pongo2.go
new file mode 100644
index 0000000..eda3aa0
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/pongo2.go
@@ -0,0 +1,14 @@
+package pongo2
+
+// Version string
+const Version = "dev"
+
+// Must panics, if a Template couldn't successfully parsed. This is how you
+// would use it:
+// var baseTemplate = pongo2.Must(pongo2.FromFile("templates/base.html"))
+func Must(tpl *Template, err error) *Template {
+ if err != nil {
+ panic(err)
+ }
+ return tpl
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags.go
new file mode 100644
index 0000000..5168d17
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags.go
@@ -0,0 +1,132 @@
+package pongo2
+
+/* Incomplete:
+ -----------
+
+ verbatim (only the "name" argument is missing for verbatim)
+
+ Reconsideration:
+ ----------------
+
+ debug (reason: not sure what to output yet)
+ regroup / Grouping on other properties (reason: maybe too python-specific; not sure how useful this would be in Go)
+
+ Following built-in tags wont be added:
+ --------------------------------------
+
+ csrf_token (reason: web-framework specific)
+ load (reason: python-specific)
+ url (reason: web-framework specific)
+*/
+
+import (
+ "fmt"
+)
+
+type INodeTag interface {
+ INode
+}
+
+// This is the function signature of the tag's parser you will have
+// to implement in order to create a new tag.
+//
+// 'doc' is providing access to the whole document while 'arguments'
+// is providing access to the user's arguments to the tag:
+//
+// {% your_tag_name some "arguments" 123 %}
+//
+// start_token will be the *Token with the tag's name in it (here: your_tag_name).
+//
+// Please see the Parser documentation on how to use the parser.
+// See RegisterTag()'s documentation for more information about
+// writing a tag as well.
+type TagParser func(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error)
+
+type tag struct {
+ name string
+ parser TagParser
+}
+
+var tags map[string]*tag
+
+func init() {
+ tags = make(map[string]*tag)
+}
+
+// Registers a new tag. If there's already a tag with the same
+// name, RegisterTag will panic. You usually want to call this
+// function in the tag's init() function:
+// http://golang.org/doc/effective_go.html#init
+//
+// See http://www.florian-schlachter.de/post/pongo2/ for more about
+// writing filters and tags.
+func RegisterTag(name string, parserFn TagParser) {
+ _, existing := tags[name]
+ if existing {
+ panic(fmt.Sprintf("Tag with name '%s' is already registered.", name))
+ }
+ tags[name] = &tag{
+ name: name,
+ parser: parserFn,
+ }
+}
+
+// Replaces an already registered tag with a new implementation. Use this
+// function with caution since it allows you to change existing tag behaviour.
+func ReplaceTag(name string, parserFn TagParser) {
+ _, existing := tags[name]
+ if !existing {
+ panic(fmt.Sprintf("Tag with name '%s' does not exist (therefore cannot be overridden).", name))
+ }
+ tags[name] = &tag{
+ name: name,
+ parser: parserFn,
+ }
+}
+
+// Tag = "{%" IDENT ARGS "%}"
+func (p *Parser) parseTagElement() (INodeTag, *Error) {
+ p.Consume() // consume "{%"
+ tokenName := p.MatchType(TokenIdentifier)
+
+ // Check for identifier
+ if tokenName == nil {
+ return nil, p.Error("Tag name must be an identifier.", nil)
+ }
+
+ // Check for the existing tag
+ tag, exists := tags[tokenName.Val]
+ if !exists {
+ // Does not exists
+ return nil, p.Error(fmt.Sprintf("Tag '%s' not found (or beginning tag not provided)", tokenName.Val), tokenName)
+ }
+
+ // Check sandbox tag restriction
+ if _, isBanned := p.template.set.bannedTags[tokenName.Val]; isBanned {
+ return nil, p.Error(fmt.Sprintf("Usage of tag '%s' is not allowed (sandbox restriction active).", tokenName.Val), tokenName)
+ }
+
+ var argsToken []*Token
+ for p.Peek(TokenSymbol, "%}") == nil && p.Remaining() > 0 {
+ // Add token to args
+ argsToken = append(argsToken, p.Current())
+ p.Consume() // next token
+ }
+
+ // EOF?
+ if p.Remaining() == 0 {
+ return nil, p.Error("Unexpectedly reached EOF, no tag end found.", p.lastToken)
+ }
+
+ p.Match(TokenSymbol, "%}")
+
+ argParser := newParser(p.name, argsToken, p.template)
+ if len(argsToken) == 0 {
+ // This is done to have nice EOF error messages
+ argParser.lastToken = tokenName
+ }
+
+ p.template.level++
+ defer func() { p.template.level-- }()
+ return tag.parser(p, tokenName, argParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_autoescape.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_autoescape.go
new file mode 100644
index 0000000..590a1db
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_autoescape.go
@@ -0,0 +1,52 @@
+package pongo2
+
+type tagAutoescapeNode struct {
+ wrapper *NodeWrapper
+ autoescape bool
+}
+
+func (node *tagAutoescapeNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ old := ctx.Autoescape
+ ctx.Autoescape = node.autoescape
+
+ err := node.wrapper.Execute(ctx, writer)
+ if err != nil {
+ return err
+ }
+
+ ctx.Autoescape = old
+
+ return nil
+}
+
+func tagAutoescapeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ autoescapeNode := &tagAutoescapeNode{}
+
+ wrapper, _, err := doc.WrapUntilTag("endautoescape")
+ if err != nil {
+ return nil, err
+ }
+ autoescapeNode.wrapper = wrapper
+
+ modeToken := arguments.MatchType(TokenIdentifier)
+ if modeToken == nil {
+ return nil, arguments.Error("A mode is required for autoescape-tag.", nil)
+ }
+ if modeToken.Val == "on" {
+ autoescapeNode.autoescape = true
+ } else if modeToken.Val == "off" {
+ autoescapeNode.autoescape = false
+ } else {
+ return nil, arguments.Error("Only 'on' or 'off' is valid as an autoescape-mode.", nil)
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed autoescape-tag arguments.", nil)
+ }
+
+ return autoescapeNode, nil
+}
+
+func init() {
+ RegisterTag("autoescape", tagAutoescapeParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_block.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_block.go
new file mode 100644
index 0000000..b558930
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_block.go
@@ -0,0 +1,93 @@
+package pongo2
+
+import (
+ "fmt"
+)
+
+type tagBlockNode struct {
+ name string
+}
+
+func (node *tagBlockNode) getBlockWrapperByName(tpl *Template) *NodeWrapper {
+ var t *NodeWrapper
+ if tpl.child != nil {
+ // First ask the child for the block
+ t = node.getBlockWrapperByName(tpl.child)
+ }
+ if t == nil {
+ // Child has no block, lets look up here at parent
+ t = tpl.blocks[node.name]
+ }
+ return t
+}
+
+func (node *tagBlockNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ tpl := ctx.template
+ if tpl == nil {
+ panic("internal error: tpl == nil")
+ }
+ // Determine the block to execute
+ blockWrapper := node.getBlockWrapperByName(tpl)
+ if blockWrapper == nil {
+ // fmt.Printf("could not find: %s\n", node.name)
+ return ctx.Error("internal error: block_wrapper == nil in tagBlockNode.Execute()", nil)
+ }
+ err := blockWrapper.Execute(ctx, writer)
+ if err != nil {
+ return err
+ }
+
+ // TODO: Add support for {{ block.super }}
+
+ return nil
+}
+
+func tagBlockParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ if arguments.Count() == 0 {
+ return nil, arguments.Error("Tag 'block' requires an identifier.", nil)
+ }
+
+ nameToken := arguments.MatchType(TokenIdentifier)
+ if nameToken == nil {
+ return nil, arguments.Error("First argument for tag 'block' must be an identifier.", nil)
+ }
+
+ if arguments.Remaining() != 0 {
+ return nil, arguments.Error("Tag 'block' takes exactly 1 argument (an identifier).", nil)
+ }
+
+ wrapper, endtagargs, err := doc.WrapUntilTag("endblock")
+ if err != nil {
+ return nil, err
+ }
+ if endtagargs.Remaining() > 0 {
+ endtagnameToken := endtagargs.MatchType(TokenIdentifier)
+ if endtagnameToken != nil {
+ if endtagnameToken.Val != nameToken.Val {
+ return nil, endtagargs.Error(fmt.Sprintf("Name for 'endblock' must equal to 'block'-tag's name ('%s' != '%s').",
+ nameToken.Val, endtagnameToken.Val), nil)
+ }
+ }
+
+ if endtagnameToken == nil || endtagargs.Remaining() > 0 {
+ return nil, endtagargs.Error("Either no or only one argument (identifier) allowed for 'endblock'.", nil)
+ }
+ }
+
+ tpl := doc.template
+ if tpl == nil {
+ panic("internal error: tpl == nil")
+ }
+ _, hasBlock := tpl.blocks[nameToken.Val]
+ if !hasBlock {
+ tpl.blocks[nameToken.Val] = wrapper
+ } else {
+ return nil, arguments.Error(fmt.Sprintf("Block named '%s' already defined", nameToken.Val), nil)
+ }
+
+ return &tagBlockNode{name: nameToken.Val}, nil
+}
+
+func init() {
+ RegisterTag("block", tagBlockParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_comment.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_comment.go
new file mode 100644
index 0000000..a66a973
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_comment.go
@@ -0,0 +1,27 @@
+package pongo2
+
+type tagCommentNode struct{}
+
+func (node *tagCommentNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ return nil
+}
+
+func tagCommentParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ commentNode := &tagCommentNode{}
+
+ // TODO: Process the endtag's arguments (see django 'comment'-tag documentation)
+ _, _, err := doc.WrapUntilTag("endcomment")
+ if err != nil {
+ return nil, err
+ }
+
+ if arguments.Count() != 0 {
+ return nil, arguments.Error("Tag 'comment' does not take any argument.", nil)
+ }
+
+ return commentNode, nil
+}
+
+func init() {
+ RegisterTag("comment", tagCommentParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_cycle.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_cycle.go
new file mode 100644
index 0000000..9b83b9b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_cycle.go
@@ -0,0 +1,106 @@
+package pongo2
+
+type tagCycleValue struct {
+ node *tagCycleNode
+ value *Value
+}
+
+type tagCycleNode struct {
+ position *Token
+ args []IEvaluator
+ idx int
+ asName string
+ silent bool
+}
+
+func (cv *tagCycleValue) String() string {
+ return cv.value.String()
+}
+
+func (node *tagCycleNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ item := node.args[node.idx%len(node.args)]
+ node.idx++
+
+ val, err := item.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ if t, ok := val.Interface().(*tagCycleValue); ok {
+ // {% cycle "test1" "test2"
+ // {% cycle cycleitem %}
+
+ // Update the cycle value with next value
+ item := t.node.args[t.node.idx%len(t.node.args)]
+ t.node.idx++
+
+ val, err := item.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ t.value = val
+
+ if !t.node.silent {
+ writer.WriteString(val.String())
+ }
+ } else {
+ // Regular call
+
+ cycleValue := &tagCycleValue{
+ node: node,
+ value: val,
+ }
+
+ if node.asName != "" {
+ ctx.Private[node.asName] = cycleValue
+ }
+ if !node.silent {
+ writer.WriteString(val.String())
+ }
+ }
+
+ return nil
+}
+
+// HINT: We're not supporting the old comma-seperated list of expresions argument-style
+func tagCycleParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ cycleNode := &tagCycleNode{
+ position: start,
+ }
+
+ for arguments.Remaining() > 0 {
+ node, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ cycleNode.args = append(cycleNode.args, node)
+
+ if arguments.MatchOne(TokenKeyword, "as") != nil {
+ // as
+
+ nameToken := arguments.MatchType(TokenIdentifier)
+ if nameToken == nil {
+ return nil, arguments.Error("Name (identifier) expected after 'as'.", nil)
+ }
+ cycleNode.asName = nameToken.Val
+
+ if arguments.MatchOne(TokenIdentifier, "silent") != nil {
+ cycleNode.silent = true
+ }
+
+ // Now we're finished
+ break
+ }
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed cycle-tag.", nil)
+ }
+
+ return cycleNode, nil
+}
+
+func init() {
+ RegisterTag("cycle", tagCycleParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_extends.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_extends.go
new file mode 100644
index 0000000..5771020
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_extends.go
@@ -0,0 +1,52 @@
+package pongo2
+
+type tagExtendsNode struct {
+ filename string
+}
+
+func (node *tagExtendsNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ return nil
+}
+
+func tagExtendsParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ extendsNode := &tagExtendsNode{}
+
+ if doc.template.level > 1 {
+ return nil, arguments.Error("The 'extends' tag can only defined on root level.", start)
+ }
+
+ if doc.template.parent != nil {
+ // Already one parent
+ return nil, arguments.Error("This template has already one parent.", start)
+ }
+
+ if filenameToken := arguments.MatchType(TokenString); filenameToken != nil {
+ // prepared, static template
+
+ // Get parent's filename
+ parentFilename := doc.template.set.resolveFilename(doc.template, filenameToken.Val)
+
+ // Parse the parent
+ parentTemplate, err := doc.template.set.FromFile(parentFilename)
+ if err != nil {
+ return nil, err.(*Error)
+ }
+
+ // Keep track of things
+ parentTemplate.child = doc.template
+ doc.template.parent = parentTemplate
+ extendsNode.filename = parentFilename
+ } else {
+ return nil, arguments.Error("Tag 'extends' requires a template filename as string.", nil)
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Tag 'extends' does only take 1 argument.", nil)
+ }
+
+ return extendsNode, nil
+}
+
+func init() {
+ RegisterTag("extends", tagExtendsParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_filter.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_filter.go
new file mode 100644
index 0000000..b38fd92
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_filter.go
@@ -0,0 +1,95 @@
+package pongo2
+
+import (
+ "bytes"
+)
+
+type nodeFilterCall struct {
+ name string
+ paramExpr IEvaluator
+}
+
+type tagFilterNode struct {
+ position *Token
+ bodyWrapper *NodeWrapper
+ filterChain []*nodeFilterCall
+}
+
+func (node *tagFilterNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ temp := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB size
+
+ err := node.bodyWrapper.Execute(ctx, temp)
+ if err != nil {
+ return err
+ }
+
+ value := AsValue(temp.String())
+
+ for _, call := range node.filterChain {
+ var param *Value
+ if call.paramExpr != nil {
+ param, err = call.paramExpr.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ } else {
+ param = AsValue(nil)
+ }
+ value, err = ApplyFilter(call.name, value, param)
+ if err != nil {
+ return ctx.Error(err.Error(), node.position)
+ }
+ }
+
+ writer.WriteString(value.String())
+
+ return nil
+}
+
+func tagFilterParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ filterNode := &tagFilterNode{
+ position: start,
+ }
+
+ wrapper, _, err := doc.WrapUntilTag("endfilter")
+ if err != nil {
+ return nil, err
+ }
+ filterNode.bodyWrapper = wrapper
+
+ for arguments.Remaining() > 0 {
+ filterCall := &nodeFilterCall{}
+
+ nameToken := arguments.MatchType(TokenIdentifier)
+ if nameToken == nil {
+ return nil, arguments.Error("Expected a filter name (identifier).", nil)
+ }
+ filterCall.name = nameToken.Val
+
+ if arguments.MatchOne(TokenSymbol, ":") != nil {
+ // Filter parameter
+ // NOTICE: we can't use ParseExpression() here, because it would parse the next filter "|..." as well in the argument list
+ expr, err := arguments.parseVariableOrLiteral()
+ if err != nil {
+ return nil, err
+ }
+ filterCall.paramExpr = expr
+ }
+
+ filterNode.filterChain = append(filterNode.filterChain, filterCall)
+
+ if arguments.MatchOne(TokenSymbol, "|") == nil {
+ break
+ }
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed filter-tag arguments.", nil)
+ }
+
+ return filterNode, nil
+}
+
+func init() {
+ RegisterTag("filter", tagFilterParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_firstof.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_firstof.go
new file mode 100644
index 0000000..5b2888e
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_firstof.go
@@ -0,0 +1,49 @@
+package pongo2
+
+type tagFirstofNode struct {
+ position *Token
+ args []IEvaluator
+}
+
+func (node *tagFirstofNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ for _, arg := range node.args {
+ val, err := arg.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ if val.IsTrue() {
+ if ctx.Autoescape && !arg.FilterApplied("safe") {
+ val, err = ApplyFilter("escape", val, nil)
+ if err != nil {
+ return err
+ }
+ }
+
+ writer.WriteString(val.String())
+ return nil
+ }
+ }
+
+ return nil
+}
+
+func tagFirstofParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ firstofNode := &tagFirstofNode{
+ position: start,
+ }
+
+ for arguments.Remaining() > 0 {
+ node, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ firstofNode.args = append(firstofNode.args, node)
+ }
+
+ return firstofNode, nil
+}
+
+func init() {
+ RegisterTag("firstof", tagFirstofParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_for.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_for.go
new file mode 100644
index 0000000..5b0b555
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_for.go
@@ -0,0 +1,159 @@
+package pongo2
+
+type tagForNode struct {
+ key string
+ value string // only for maps: for key, value in map
+ objectEvaluator IEvaluator
+ reversed bool
+ sorted bool
+
+ bodyWrapper *NodeWrapper
+ emptyWrapper *NodeWrapper
+}
+
+type tagForLoopInformation struct {
+ Counter int
+ Counter0 int
+ Revcounter int
+ Revcounter0 int
+ First bool
+ Last bool
+ Parentloop *tagForLoopInformation
+}
+
+func (node *tagForNode) Execute(ctx *ExecutionContext, writer TemplateWriter) (forError *Error) {
+ // Backup forloop (as parentloop in public context), key-name and value-name
+ forCtx := NewChildExecutionContext(ctx)
+ parentloop := forCtx.Private["forloop"]
+
+ // Create loop struct
+ loopInfo := &tagForLoopInformation{
+ First: true,
+ }
+
+ // Is it a loop in a loop?
+ if parentloop != nil {
+ loopInfo.Parentloop = parentloop.(*tagForLoopInformation)
+ }
+
+ // Register loopInfo in public context
+ forCtx.Private["forloop"] = loopInfo
+
+ obj, err := node.objectEvaluator.Evaluate(forCtx)
+ if err != nil {
+ return err
+ }
+
+ obj.IterateOrder(func(idx, count int, key, value *Value) bool {
+ // There's something to iterate over (correct type and at least 1 item)
+
+ // Update loop infos and public context
+ forCtx.Private[node.key] = key
+ if value != nil {
+ forCtx.Private[node.value] = value
+ }
+ loopInfo.Counter = idx + 1
+ loopInfo.Counter0 = idx
+ if idx == 1 {
+ loopInfo.First = false
+ }
+ if idx+1 == count {
+ loopInfo.Last = true
+ }
+ loopInfo.Revcounter = count - idx // TODO: Not sure about this, have to look it up
+ loopInfo.Revcounter0 = count - (idx + 1) // TODO: Not sure about this, have to look it up
+
+ // Render elements with updated context
+ err := node.bodyWrapper.Execute(forCtx, writer)
+ if err != nil {
+ forError = err
+ return false
+ }
+ return true
+ }, func() {
+ // Nothing to iterate over (maybe wrong type or no items)
+ if node.emptyWrapper != nil {
+ err := node.emptyWrapper.Execute(forCtx, writer)
+ if err != nil {
+ forError = err
+ }
+ }
+ }, node.reversed, node.sorted)
+
+ return forError
+}
+
+func tagForParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ forNode := &tagForNode{}
+
+ // Arguments parsing
+ var valueToken *Token
+ keyToken := arguments.MatchType(TokenIdentifier)
+ if keyToken == nil {
+ return nil, arguments.Error("Expected an key identifier as first argument for 'for'-tag", nil)
+ }
+
+ if arguments.Match(TokenSymbol, ",") != nil {
+ // Value name is provided
+ valueToken = arguments.MatchType(TokenIdentifier)
+ if valueToken == nil {
+ return nil, arguments.Error("Value name must be an identifier.", nil)
+ }
+ }
+
+ if arguments.Match(TokenKeyword, "in") == nil {
+ return nil, arguments.Error("Expected keyword 'in'.", nil)
+ }
+
+ objectEvaluator, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ forNode.objectEvaluator = objectEvaluator
+ forNode.key = keyToken.Val
+ if valueToken != nil {
+ forNode.value = valueToken.Val
+ }
+
+ if arguments.MatchOne(TokenIdentifier, "reversed") != nil {
+ forNode.reversed = true
+ }
+
+ if arguments.MatchOne(TokenIdentifier, "sorted") != nil {
+ forNode.sorted = true
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed for-loop arguments.", nil)
+ }
+
+ // Body wrapping
+ wrapper, endargs, err := doc.WrapUntilTag("empty", "endfor")
+ if err != nil {
+ return nil, err
+ }
+ forNode.bodyWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+
+ if wrapper.Endtag == "empty" {
+ // if there's an else in the if-statement, we need the else-Block as well
+ wrapper, endargs, err = doc.WrapUntilTag("endfor")
+ if err != nil {
+ return nil, err
+ }
+ forNode.emptyWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+ }
+
+ return forNode, nil
+}
+
+func init() {
+ RegisterTag("for", tagForParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_if.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_if.go
new file mode 100644
index 0000000..3eeaf3b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_if.go
@@ -0,0 +1,76 @@
+package pongo2
+
+type tagIfNode struct {
+ conditions []IEvaluator
+ wrappers []*NodeWrapper
+}
+
+func (node *tagIfNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ for i, condition := range node.conditions {
+ result, err := condition.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ if result.IsTrue() {
+ return node.wrappers[i].Execute(ctx, writer)
+ }
+ // Last condition?
+ if len(node.conditions) == i+1 && len(node.wrappers) > i+1 {
+ return node.wrappers[i+1].Execute(ctx, writer)
+ }
+ }
+ return nil
+}
+
+func tagIfParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ ifNode := &tagIfNode{}
+
+ // Parse first and main IF condition
+ condition, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ ifNode.conditions = append(ifNode.conditions, condition)
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("If-condition is malformed.", nil)
+ }
+
+ // Check the rest
+ for {
+ wrapper, tagArgs, err := doc.WrapUntilTag("elif", "else", "endif")
+ if err != nil {
+ return nil, err
+ }
+ ifNode.wrappers = append(ifNode.wrappers, wrapper)
+
+ if wrapper.Endtag == "elif" {
+ // elif can take a condition
+ condition, err = tagArgs.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ ifNode.conditions = append(ifNode.conditions, condition)
+
+ if tagArgs.Remaining() > 0 {
+ return nil, tagArgs.Error("Elif-condition is malformed.", nil)
+ }
+ } else {
+ if tagArgs.Count() > 0 {
+ // else/endif can't take any conditions
+ return nil, tagArgs.Error("Arguments not allowed here.", nil)
+ }
+ }
+
+ if wrapper.Endtag == "endif" {
+ break
+ }
+ }
+
+ return ifNode, nil
+}
+
+func init() {
+ RegisterTag("if", tagIfParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifchanged.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifchanged.go
new file mode 100644
index 0000000..45296a0
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifchanged.go
@@ -0,0 +1,116 @@
+package pongo2
+
+import (
+ "bytes"
+)
+
+type tagIfchangedNode struct {
+ watchedExpr []IEvaluator
+ lastValues []*Value
+ lastContent []byte
+ thenWrapper *NodeWrapper
+ elseWrapper *NodeWrapper
+}
+
+func (node *tagIfchangedNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ if len(node.watchedExpr) == 0 {
+ // Check against own rendered body
+
+ buf := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB
+ err := node.thenWrapper.Execute(ctx, buf)
+ if err != nil {
+ return err
+ }
+
+ bufBytes := buf.Bytes()
+ if !bytes.Equal(node.lastContent, bufBytes) {
+ // Rendered content changed, output it
+ writer.Write(bufBytes)
+ node.lastContent = bufBytes
+ }
+ } else {
+ nowValues := make([]*Value, 0, len(node.watchedExpr))
+ for _, expr := range node.watchedExpr {
+ val, err := expr.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ nowValues = append(nowValues, val)
+ }
+
+ // Compare old to new values now
+ changed := len(node.lastValues) == 0
+
+ for idx, oldVal := range node.lastValues {
+ if !oldVal.EqualValueTo(nowValues[idx]) {
+ changed = true
+ break // we can stop here because ONE value changed
+ }
+ }
+
+ node.lastValues = nowValues
+
+ if changed {
+ // Render thenWrapper
+ err := node.thenWrapper.Execute(ctx, writer)
+ if err != nil {
+ return err
+ }
+ } else {
+ // Render elseWrapper
+ err := node.elseWrapper.Execute(ctx, writer)
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func tagIfchangedParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ ifchangedNode := &tagIfchangedNode{}
+
+ for arguments.Remaining() > 0 {
+ // Parse condition
+ expr, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ ifchangedNode.watchedExpr = append(ifchangedNode.watchedExpr, expr)
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Ifchanged-arguments are malformed.", nil)
+ }
+
+ // Wrap then/else-blocks
+ wrapper, endargs, err := doc.WrapUntilTag("else", "endifchanged")
+ if err != nil {
+ return nil, err
+ }
+ ifchangedNode.thenWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+
+ if wrapper.Endtag == "else" {
+ // if there's an else in the if-statement, we need the else-Block as well
+ wrapper, endargs, err = doc.WrapUntilTag("endifchanged")
+ if err != nil {
+ return nil, err
+ }
+ ifchangedNode.elseWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+ }
+
+ return ifchangedNode, nil
+}
+
+func init() {
+ RegisterTag("ifchanged", tagIfchangedParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifequal.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifequal.go
new file mode 100644
index 0000000..103f1c7
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifequal.go
@@ -0,0 +1,78 @@
+package pongo2
+
+type tagIfEqualNode struct {
+ var1, var2 IEvaluator
+ thenWrapper *NodeWrapper
+ elseWrapper *NodeWrapper
+}
+
+func (node *tagIfEqualNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ r1, err := node.var1.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ r2, err := node.var2.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ result := r1.EqualValueTo(r2)
+
+ if result {
+ return node.thenWrapper.Execute(ctx, writer)
+ }
+ if node.elseWrapper != nil {
+ return node.elseWrapper.Execute(ctx, writer)
+ }
+ return nil
+}
+
+func tagIfEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ ifequalNode := &tagIfEqualNode{}
+
+ // Parse two expressions
+ var1, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ var2, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ ifequalNode.var1 = var1
+ ifequalNode.var2 = var2
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("ifequal only takes 2 arguments.", nil)
+ }
+
+ // Wrap then/else-blocks
+ wrapper, endargs, err := doc.WrapUntilTag("else", "endifequal")
+ if err != nil {
+ return nil, err
+ }
+ ifequalNode.thenWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+
+ if wrapper.Endtag == "else" {
+ // if there's an else in the if-statement, we need the else-Block as well
+ wrapper, endargs, err = doc.WrapUntilTag("endifequal")
+ if err != nil {
+ return nil, err
+ }
+ ifequalNode.elseWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+ }
+
+ return ifequalNode, nil
+}
+
+func init() {
+ RegisterTag("ifequal", tagIfEqualParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifnotequal.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifnotequal.go
new file mode 100644
index 0000000..0d287d3
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ifnotequal.go
@@ -0,0 +1,78 @@
+package pongo2
+
+type tagIfNotEqualNode struct {
+ var1, var2 IEvaluator
+ thenWrapper *NodeWrapper
+ elseWrapper *NodeWrapper
+}
+
+func (node *tagIfNotEqualNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ r1, err := node.var1.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ r2, err := node.var2.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ result := !r1.EqualValueTo(r2)
+
+ if result {
+ return node.thenWrapper.Execute(ctx, writer)
+ }
+ if node.elseWrapper != nil {
+ return node.elseWrapper.Execute(ctx, writer)
+ }
+ return nil
+}
+
+func tagIfNotEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ ifnotequalNode := &tagIfNotEqualNode{}
+
+ // Parse two expressions
+ var1, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ var2, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ ifnotequalNode.var1 = var1
+ ifnotequalNode.var2 = var2
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("ifequal only takes 2 arguments.", nil)
+ }
+
+ // Wrap then/else-blocks
+ wrapper, endargs, err := doc.WrapUntilTag("else", "endifnotequal")
+ if err != nil {
+ return nil, err
+ }
+ ifnotequalNode.thenWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+
+ if wrapper.Endtag == "else" {
+ // if there's an else in the if-statement, we need the else-Block as well
+ wrapper, endargs, err = doc.WrapUntilTag("endifnotequal")
+ if err != nil {
+ return nil, err
+ }
+ ifnotequalNode.elseWrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+ }
+
+ return ifnotequalNode, nil
+}
+
+func init() {
+ RegisterTag("ifnotequal", tagIfNotEqualParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_import.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_import.go
new file mode 100644
index 0000000..7e0d6a0
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_import.go
@@ -0,0 +1,84 @@
+package pongo2
+
+import (
+ "fmt"
+)
+
+type tagImportNode struct {
+ position *Token
+ filename string
+ macros map[string]*tagMacroNode // alias/name -> macro instance
+}
+
+func (node *tagImportNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ for name, macro := range node.macros {
+ func(name string, macro *tagMacroNode) {
+ ctx.Private[name] = func(args ...*Value) *Value {
+ return macro.call(ctx, args...)
+ }
+ }(name, macro)
+ }
+ return nil
+}
+
+func tagImportParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ importNode := &tagImportNode{
+ position: start,
+ macros: make(map[string]*tagMacroNode),
+ }
+
+ filenameToken := arguments.MatchType(TokenString)
+ if filenameToken == nil {
+ return nil, arguments.Error("Import-tag needs a filename as string.", nil)
+ }
+
+ importNode.filename = doc.template.set.resolveFilename(doc.template, filenameToken.Val)
+
+ if arguments.Remaining() == 0 {
+ return nil, arguments.Error("You must at least specify one macro to import.", nil)
+ }
+
+ // Compile the given template
+ tpl, err := doc.template.set.FromFile(importNode.filename)
+ if err != nil {
+ return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, start)
+ }
+
+ for arguments.Remaining() > 0 {
+ macroNameToken := arguments.MatchType(TokenIdentifier)
+ if macroNameToken == nil {
+ return nil, arguments.Error("Expected macro name (identifier).", nil)
+ }
+
+ asName := macroNameToken.Val
+ if arguments.Match(TokenKeyword, "as") != nil {
+ aliasToken := arguments.MatchType(TokenIdentifier)
+ if aliasToken == nil {
+ return nil, arguments.Error("Expected macro alias name (identifier).", nil)
+ }
+ asName = aliasToken.Val
+ }
+
+ macroInstance, has := tpl.exportedMacros[macroNameToken.Val]
+ if !has {
+ return nil, arguments.Error(fmt.Sprintf("Macro '%s' not found (or not exported) in '%s'.", macroNameToken.Val,
+ importNode.filename), macroNameToken)
+ }
+
+ importNode.macros[asName] = macroInstance
+
+ if arguments.Remaining() == 0 {
+ break
+ }
+
+ if arguments.Match(TokenSymbol, ",") == nil {
+ return nil, arguments.Error("Expected ','.", nil)
+ }
+ }
+
+ return importNode, nil
+}
+
+func init() {
+ RegisterTag("import", tagImportParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_include.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_include.go
new file mode 100644
index 0000000..6d619fd
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_include.go
@@ -0,0 +1,146 @@
+package pongo2
+
+type tagIncludeNode struct {
+ tpl *Template
+ filenameEvaluator IEvaluator
+ lazy bool
+ only bool
+ filename string
+ withPairs map[string]IEvaluator
+ ifExists bool
+}
+
+func (node *tagIncludeNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ // Building the context for the template
+ includeCtx := make(Context)
+
+ // Fill the context with all data from the parent
+ if !node.only {
+ includeCtx.Update(ctx.Public)
+ includeCtx.Update(ctx.Private)
+ }
+
+ // Put all custom with-pairs into the context
+ for key, value := range node.withPairs {
+ val, err := value.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ includeCtx[key] = val
+ }
+
+ // Execute the template
+ if node.lazy {
+ // Evaluate the filename
+ filename, err := node.filenameEvaluator.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ if filename.String() == "" {
+ return ctx.Error("Filename for 'include'-tag evaluated to an empty string.", nil)
+ }
+
+ // Get include-filename
+ includedFilename := ctx.template.set.resolveFilename(ctx.template, filename.String())
+
+ includedTpl, err2 := ctx.template.set.FromFile(includedFilename)
+ if err2 != nil {
+ // if this is ReadFile error, and "if_exists" flag is enabled
+ if node.ifExists && err2.(*Error).Sender == "fromfile" {
+ return nil
+ }
+ return err2.(*Error)
+ }
+ err2 = includedTpl.ExecuteWriter(includeCtx, writer)
+ if err2 != nil {
+ return err2.(*Error)
+ }
+ return nil
+ }
+ // Template is already parsed with static filename
+ err := node.tpl.ExecuteWriter(includeCtx, writer)
+ if err != nil {
+ return err.(*Error)
+ }
+ return nil
+}
+
+type tagIncludeEmptyNode struct{}
+
+func (node *tagIncludeEmptyNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ return nil
+}
+
+func tagIncludeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ includeNode := &tagIncludeNode{
+ withPairs: make(map[string]IEvaluator),
+ }
+
+ if filenameToken := arguments.MatchType(TokenString); filenameToken != nil {
+ // prepared, static template
+
+ // "if_exists" flag
+ ifExists := arguments.Match(TokenIdentifier, "if_exists") != nil
+
+ // Get include-filename
+ includedFilename := doc.template.set.resolveFilename(doc.template, filenameToken.Val)
+
+ // Parse the parent
+ includeNode.filename = includedFilename
+ includedTpl, err := doc.template.set.FromFile(includedFilename)
+ if err != nil {
+ // if this is ReadFile error, and "if_exists" token presents we should create and empty node
+ if err.(*Error).Sender == "fromfile" && ifExists {
+ return &tagIncludeEmptyNode{}, nil
+ }
+ return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, filenameToken)
+ }
+ includeNode.tpl = includedTpl
+ } else {
+ // No String, then the user wants to use lazy-evaluation (slower, but possible)
+ filenameEvaluator, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err.updateFromTokenIfNeeded(doc.template, filenameToken)
+ }
+ includeNode.filenameEvaluator = filenameEvaluator
+ includeNode.lazy = true
+ includeNode.ifExists = arguments.Match(TokenIdentifier, "if_exists") != nil // "if_exists" flag
+ }
+
+ // After having parsed the filename we're gonna parse the with+only options
+ if arguments.Match(TokenIdentifier, "with") != nil {
+ for arguments.Remaining() > 0 {
+ // We have at least one key=expr pair (because of starting "with")
+ keyToken := arguments.MatchType(TokenIdentifier)
+ if keyToken == nil {
+ return nil, arguments.Error("Expected an identifier", nil)
+ }
+ if arguments.Match(TokenSymbol, "=") == nil {
+ return nil, arguments.Error("Expected '='.", nil)
+ }
+ valueExpr, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err.updateFromTokenIfNeeded(doc.template, keyToken)
+ }
+
+ includeNode.withPairs[keyToken.Val] = valueExpr
+
+ // Only?
+ if arguments.Match(TokenIdentifier, "only") != nil {
+ includeNode.only = true
+ break // stop parsing arguments because it's the last option
+ }
+ }
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed 'include'-tag arguments.", nil)
+ }
+
+ return includeNode, nil
+}
+
+func init() {
+ RegisterTag("include", tagIncludeParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_lorem.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_lorem.go
new file mode 100644
index 0000000..8a152b3
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_lorem.go
@@ -0,0 +1,131 @@
+package pongo2
+
+import (
+ "math/rand"
+ "strings"
+ "time"
+)
+
+var (
+ tagLoremParagraphs = strings.Split(tagLoremText, "\n")
+ tagLoremWords = strings.Fields(tagLoremText)
+)
+
+type tagLoremNode struct {
+ position *Token
+ count int // number of paragraphs
+ method string // w = words, p = HTML paragraphs, b = plain-text (default is b)
+ random bool // does not use the default paragraph "Lorem ipsum dolor sit amet, ..."
+}
+
+func (node *tagLoremNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ switch node.method {
+ case "b":
+ if node.random {
+ for i := 0; i < node.count; i++ {
+ if i > 0 {
+ writer.WriteString("\n")
+ }
+ par := tagLoremParagraphs[rand.Intn(len(tagLoremParagraphs))]
+ writer.WriteString(par)
+ }
+ } else {
+ for i := 0; i < node.count; i++ {
+ if i > 0 {
+ writer.WriteString("\n")
+ }
+ par := tagLoremParagraphs[i%len(tagLoremParagraphs)]
+ writer.WriteString(par)
+ }
+ }
+ case "w":
+ if node.random {
+ for i := 0; i < node.count; i++ {
+ if i > 0 {
+ writer.WriteString(" ")
+ }
+ word := tagLoremWords[rand.Intn(len(tagLoremWords))]
+ writer.WriteString(word)
+ }
+ } else {
+ for i := 0; i < node.count; i++ {
+ if i > 0 {
+ writer.WriteString(" ")
+ }
+ word := tagLoremWords[i%len(tagLoremWords)]
+ writer.WriteString(word)
+ }
+ }
+ case "p":
+ if node.random {
+ for i := 0; i < node.count; i++ {
+ if i > 0 {
+ writer.WriteString("\n")
+ }
+ writer.WriteString("")
+ par := tagLoremParagraphs[rand.Intn(len(tagLoremParagraphs))]
+ writer.WriteString(par)
+ writer.WriteString("
")
+ }
+ } else {
+ for i := 0; i < node.count; i++ {
+ if i > 0 {
+ writer.WriteString("\n")
+ }
+ writer.WriteString("")
+ par := tagLoremParagraphs[i%len(tagLoremParagraphs)]
+ writer.WriteString(par)
+ writer.WriteString("
")
+
+ }
+ }
+ default:
+ panic("unsupported method")
+ }
+
+ return nil
+}
+
+func tagLoremParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ loremNode := &tagLoremNode{
+ position: start,
+ count: 1,
+ method: "b",
+ }
+
+ if countToken := arguments.MatchType(TokenNumber); countToken != nil {
+ loremNode.count = AsValue(countToken.Val).Integer()
+ }
+
+ if methodToken := arguments.MatchType(TokenIdentifier); methodToken != nil {
+ if methodToken.Val != "w" && methodToken.Val != "p" && methodToken.Val != "b" {
+ return nil, arguments.Error("lorem-method must be either 'w', 'p' or 'b'.", nil)
+ }
+
+ loremNode.method = methodToken.Val
+ }
+
+ if arguments.MatchOne(TokenIdentifier, "random") != nil {
+ loremNode.random = true
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed lorem-tag arguments.", nil)
+ }
+
+ return loremNode, nil
+}
+
+func init() {
+ rand.Seed(time.Now().Unix())
+
+ RegisterTag("lorem", tagLoremParser)
+}
+
+const tagLoremText = `Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
+Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi.
+Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.
+Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis.
+At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua. est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat.
+Consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.`
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_macro.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_macro.go
new file mode 100644
index 0000000..18a2c3c
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_macro.go
@@ -0,0 +1,149 @@
+package pongo2
+
+import (
+ "bytes"
+ "fmt"
+)
+
+type tagMacroNode struct {
+ position *Token
+ name string
+ argsOrder []string
+ args map[string]IEvaluator
+ exported bool
+
+ wrapper *NodeWrapper
+}
+
+func (node *tagMacroNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ ctx.Private[node.name] = func(args ...*Value) *Value {
+ return node.call(ctx, args...)
+ }
+
+ return nil
+}
+
+func (node *tagMacroNode) call(ctx *ExecutionContext, args ...*Value) *Value {
+ argsCtx := make(Context)
+
+ for k, v := range node.args {
+ if v == nil {
+ // User did not provided a default value
+ argsCtx[k] = nil
+ } else {
+ // Evaluate the default value
+ valueExpr, err := v.Evaluate(ctx)
+ if err != nil {
+ ctx.Logf(err.Error())
+ return AsSafeValue(err.Error())
+ }
+
+ argsCtx[k] = valueExpr
+ }
+ }
+
+ if len(args) > len(node.argsOrder) {
+ // Too many arguments, we're ignoring them and just logging into debug mode.
+ err := ctx.Error(fmt.Sprintf("Macro '%s' called with too many arguments (%d instead of %d).",
+ node.name, len(args), len(node.argsOrder)), nil).updateFromTokenIfNeeded(ctx.template, node.position)
+
+ ctx.Logf(err.Error()) // TODO: This is a workaround, because the error is not returned yet to the Execution()-methods
+ return AsSafeValue(err.Error())
+ }
+
+ // Make a context for the macro execution
+ macroCtx := NewChildExecutionContext(ctx)
+
+ // Register all arguments in the private context
+ macroCtx.Private.Update(argsCtx)
+
+ for idx, argValue := range args {
+ macroCtx.Private[node.argsOrder[idx]] = argValue.Interface()
+ }
+
+ var b bytes.Buffer
+ err := node.wrapper.Execute(macroCtx, &b)
+ if err != nil {
+ return AsSafeValue(err.updateFromTokenIfNeeded(ctx.template, node.position).Error())
+ }
+
+ return AsSafeValue(b.String())
+}
+
+func tagMacroParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ macroNode := &tagMacroNode{
+ position: start,
+ args: make(map[string]IEvaluator),
+ }
+
+ nameToken := arguments.MatchType(TokenIdentifier)
+ if nameToken == nil {
+ return nil, arguments.Error("Macro-tag needs at least an identifier as name.", nil)
+ }
+ macroNode.name = nameToken.Val
+
+ if arguments.MatchOne(TokenSymbol, "(") == nil {
+ return nil, arguments.Error("Expected '('.", nil)
+ }
+
+ for arguments.Match(TokenSymbol, ")") == nil {
+ argNameToken := arguments.MatchType(TokenIdentifier)
+ if argNameToken == nil {
+ return nil, arguments.Error("Expected argument name as identifier.", nil)
+ }
+ macroNode.argsOrder = append(macroNode.argsOrder, argNameToken.Val)
+
+ if arguments.Match(TokenSymbol, "=") != nil {
+ // Default expression follows
+ argDefaultExpr, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ macroNode.args[argNameToken.Val] = argDefaultExpr
+ } else {
+ // No default expression
+ macroNode.args[argNameToken.Val] = nil
+ }
+
+ if arguments.Match(TokenSymbol, ")") != nil {
+ break
+ }
+ if arguments.Match(TokenSymbol, ",") == nil {
+ return nil, arguments.Error("Expected ',' or ')'.", nil)
+ }
+ }
+
+ if arguments.Match(TokenKeyword, "export") != nil {
+ macroNode.exported = true
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed macro-tag.", nil)
+ }
+
+ // Body wrapping
+ wrapper, endargs, err := doc.WrapUntilTag("endmacro")
+ if err != nil {
+ return nil, err
+ }
+ macroNode.wrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+
+ if macroNode.exported {
+ // Now register the macro if it wants to be exported
+ _, has := doc.template.exportedMacros[macroNode.name]
+ if has {
+ return nil, doc.Error(fmt.Sprintf("Another macro with name '%s' already exported.", macroNode.name), start)
+ }
+ doc.template.exportedMacros[macroNode.name] = macroNode
+ }
+
+ return macroNode, nil
+}
+
+func init() {
+ RegisterTag("macro", tagMacroParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_now.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_now.go
new file mode 100644
index 0000000..d9fa4a3
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_now.go
@@ -0,0 +1,50 @@
+package pongo2
+
+import (
+ "time"
+)
+
+type tagNowNode struct {
+ position *Token
+ format string
+ fake bool
+}
+
+func (node *tagNowNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ var t time.Time
+ if node.fake {
+ t = time.Date(2014, time.February, 05, 18, 31, 45, 00, time.UTC)
+ } else {
+ t = time.Now()
+ }
+
+ writer.WriteString(t.Format(node.format))
+
+ return nil
+}
+
+func tagNowParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ nowNode := &tagNowNode{
+ position: start,
+ }
+
+ formatToken := arguments.MatchType(TokenString)
+ if formatToken == nil {
+ return nil, arguments.Error("Expected a format string.", nil)
+ }
+ nowNode.format = formatToken.Val
+
+ if arguments.MatchOne(TokenIdentifier, "fake") != nil {
+ nowNode.fake = true
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed now-tag arguments.", nil)
+ }
+
+ return nowNode, nil
+}
+
+func init() {
+ RegisterTag("now", tagNowParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_set.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_set.go
new file mode 100644
index 0000000..be121c1
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_set.go
@@ -0,0 +1,50 @@
+package pongo2
+
+type tagSetNode struct {
+ name string
+ expression IEvaluator
+}
+
+func (node *tagSetNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ // Evaluate expression
+ value, err := node.expression.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ ctx.Private[node.name] = value
+ return nil
+}
+
+func tagSetParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ node := &tagSetNode{}
+
+ // Parse variable name
+ typeToken := arguments.MatchType(TokenIdentifier)
+ if typeToken == nil {
+ return nil, arguments.Error("Expected an identifier.", nil)
+ }
+ node.name = typeToken.Val
+
+ if arguments.Match(TokenSymbol, "=") == nil {
+ return nil, arguments.Error("Expected '='.", nil)
+ }
+
+ // Variable expression
+ keyExpression, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ node.expression = keyExpression
+
+ // Remaining arguments
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed 'set'-tag arguments.", nil)
+ }
+
+ return node, nil
+}
+
+func init() {
+ RegisterTag("set", tagSetParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_spaceless.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_spaceless.go
new file mode 100644
index 0000000..4fa851b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_spaceless.go
@@ -0,0 +1,54 @@
+package pongo2
+
+import (
+ "bytes"
+ "regexp"
+)
+
+type tagSpacelessNode struct {
+ wrapper *NodeWrapper
+}
+
+var tagSpacelessRegexp = regexp.MustCompile(`(?U:(<.*>))([\t\n\v\f\r ]+)(?U:(<.*>))`)
+
+func (node *tagSpacelessNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ b := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB
+
+ err := node.wrapper.Execute(ctx, b)
+ if err != nil {
+ return err
+ }
+
+ s := b.String()
+ // Repeat this recursively
+ changed := true
+ for changed {
+ s2 := tagSpacelessRegexp.ReplaceAllString(s, "$1$3")
+ changed = s != s2
+ s = s2
+ }
+
+ writer.WriteString(s)
+
+ return nil
+}
+
+func tagSpacelessParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ spacelessNode := &tagSpacelessNode{}
+
+ wrapper, _, err := doc.WrapUntilTag("endspaceless")
+ if err != nil {
+ return nil, err
+ }
+ spacelessNode.wrapper = wrapper
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed spaceless-tag arguments.", nil)
+ }
+
+ return spacelessNode, nil
+}
+
+func init() {
+ RegisterTag("spaceless", tagSpacelessParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ssi.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ssi.go
new file mode 100644
index 0000000..09c2325
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_ssi.go
@@ -0,0 +1,68 @@
+package pongo2
+
+import (
+ "io/ioutil"
+)
+
+type tagSSINode struct {
+ filename string
+ content string
+ template *Template
+}
+
+func (node *tagSSINode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ if node.template != nil {
+ // Execute the template within the current context
+ includeCtx := make(Context)
+ includeCtx.Update(ctx.Public)
+ includeCtx.Update(ctx.Private)
+
+ err := node.template.execute(includeCtx, writer)
+ if err != nil {
+ return err.(*Error)
+ }
+ } else {
+ // Just print out the content
+ writer.WriteString(node.content)
+ }
+ return nil
+}
+
+func tagSSIParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ SSINode := &tagSSINode{}
+
+ if fileToken := arguments.MatchType(TokenString); fileToken != nil {
+ SSINode.filename = fileToken.Val
+
+ if arguments.Match(TokenIdentifier, "parsed") != nil {
+ // parsed
+ temporaryTpl, err := doc.template.set.FromFile(doc.template.set.resolveFilename(doc.template, fileToken.Val))
+ if err != nil {
+ return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, fileToken)
+ }
+ SSINode.template = temporaryTpl
+ } else {
+ // plaintext
+ buf, err := ioutil.ReadFile(doc.template.set.resolveFilename(doc.template, fileToken.Val))
+ if err != nil {
+ return nil, (&Error{
+ Sender: "tag:ssi",
+ ErrorMsg: err.Error(),
+ }).updateFromTokenIfNeeded(doc.template, fileToken)
+ }
+ SSINode.content = string(buf)
+ }
+ } else {
+ return nil, arguments.Error("First argument must be a string.", nil)
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed SSI-tag argument.", nil)
+ }
+
+ return SSINode, nil
+}
+
+func init() {
+ RegisterTag("ssi", tagSSIParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_templatetag.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_templatetag.go
new file mode 100644
index 0000000..164b4dc
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_templatetag.go
@@ -0,0 +1,45 @@
+package pongo2
+
+type tagTemplateTagNode struct {
+ content string
+}
+
+var templateTagMapping = map[string]string{
+ "openblock": "{%",
+ "closeblock": "%}",
+ "openvariable": "{{",
+ "closevariable": "}}",
+ "openbrace": "{",
+ "closebrace": "}",
+ "opencomment": "{#",
+ "closecomment": "#}",
+}
+
+func (node *tagTemplateTagNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ writer.WriteString(node.content)
+ return nil
+}
+
+func tagTemplateTagParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ ttNode := &tagTemplateTagNode{}
+
+ if argToken := arguments.MatchType(TokenIdentifier); argToken != nil {
+ output, found := templateTagMapping[argToken.Val]
+ if !found {
+ return nil, arguments.Error("Argument not found", argToken)
+ }
+ ttNode.content = output
+ } else {
+ return nil, arguments.Error("Identifier expected.", nil)
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed templatetag-tag argument.", nil)
+ }
+
+ return ttNode, nil
+}
+
+func init() {
+ RegisterTag("templatetag", tagTemplateTagParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_widthratio.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_widthratio.go
new file mode 100644
index 0000000..70c9c3e
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_widthratio.go
@@ -0,0 +1,83 @@
+package pongo2
+
+import (
+ "fmt"
+ "math"
+)
+
+type tagWidthratioNode struct {
+ position *Token
+ current, max IEvaluator
+ width IEvaluator
+ ctxName string
+}
+
+func (node *tagWidthratioNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ current, err := node.current.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ max, err := node.max.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ width, err := node.width.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+
+ value := int(math.Ceil(current.Float()/max.Float()*width.Float() + 0.5))
+
+ if node.ctxName == "" {
+ writer.WriteString(fmt.Sprintf("%d", value))
+ } else {
+ ctx.Private[node.ctxName] = value
+ }
+
+ return nil
+}
+
+func tagWidthratioParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ widthratioNode := &tagWidthratioNode{
+ position: start,
+ }
+
+ current, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ widthratioNode.current = current
+
+ max, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ widthratioNode.max = max
+
+ width, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ widthratioNode.width = width
+
+ if arguments.MatchOne(TokenKeyword, "as") != nil {
+ // Name follows
+ nameToken := arguments.MatchType(TokenIdentifier)
+ if nameToken == nil {
+ return nil, arguments.Error("Expected name (identifier).", nil)
+ }
+ widthratioNode.ctxName = nameToken.Val
+ }
+
+ if arguments.Remaining() > 0 {
+ return nil, arguments.Error("Malformed widthratio-tag arguments.", nil)
+ }
+
+ return widthratioNode, nil
+}
+
+func init() {
+ RegisterTag("widthratio", tagWidthratioParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/tags_with.go b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_with.go
new file mode 100644
index 0000000..32b3c1c
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/tags_with.go
@@ -0,0 +1,88 @@
+package pongo2
+
+type tagWithNode struct {
+ withPairs map[string]IEvaluator
+ wrapper *NodeWrapper
+}
+
+func (node *tagWithNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
+ //new context for block
+ withctx := NewChildExecutionContext(ctx)
+
+ // Put all custom with-pairs into the context
+ for key, value := range node.withPairs {
+ val, err := value.Evaluate(ctx)
+ if err != nil {
+ return err
+ }
+ withctx.Private[key] = val
+ }
+
+ return node.wrapper.Execute(withctx, writer)
+}
+
+func tagWithParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
+ withNode := &tagWithNode{
+ withPairs: make(map[string]IEvaluator),
+ }
+
+ if arguments.Count() == 0 {
+ return nil, arguments.Error("Tag 'with' requires at least one argument.", nil)
+ }
+
+ wrapper, endargs, err := doc.WrapUntilTag("endwith")
+ if err != nil {
+ return nil, err
+ }
+ withNode.wrapper = wrapper
+
+ if endargs.Count() > 0 {
+ return nil, endargs.Error("Arguments not allowed here.", nil)
+ }
+
+ // Scan through all arguments to see which style the user uses (old or new style).
+ // If we find any "as" keyword we will enforce old style; otherwise we will use new style.
+ oldStyle := false // by default we're using the new_style
+ for i := 0; i < arguments.Count(); i++ {
+ if arguments.PeekN(i, TokenKeyword, "as") != nil {
+ oldStyle = true
+ break
+ }
+ }
+
+ for arguments.Remaining() > 0 {
+ if oldStyle {
+ valueExpr, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ if arguments.Match(TokenKeyword, "as") == nil {
+ return nil, arguments.Error("Expected 'as' keyword.", nil)
+ }
+ keyToken := arguments.MatchType(TokenIdentifier)
+ if keyToken == nil {
+ return nil, arguments.Error("Expected an identifier", nil)
+ }
+ withNode.withPairs[keyToken.Val] = valueExpr
+ } else {
+ keyToken := arguments.MatchType(TokenIdentifier)
+ if keyToken == nil {
+ return nil, arguments.Error("Expected an identifier", nil)
+ }
+ if arguments.Match(TokenSymbol, "=") == nil {
+ return nil, arguments.Error("Expected '='.", nil)
+ }
+ valueExpr, err := arguments.ParseExpression()
+ if err != nil {
+ return nil, err
+ }
+ withNode.withPairs[keyToken.Val] = valueExpr
+ }
+ }
+
+ return withNode, nil
+}
+
+func init() {
+ RegisterTag("with", tagWithParser)
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template.go b/Godeps/_workspace/src/github.com/flosch/pongo2/template.go
new file mode 100644
index 0000000..74bd30b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template.go
@@ -0,0 +1,193 @@
+package pongo2
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+)
+
+type TemplateWriter interface {
+ io.Writer
+ WriteString(string) (int, error)
+}
+
+type templateWriter struct {
+ w io.Writer
+}
+
+func (tw *templateWriter) WriteString(s string) (int, error) {
+ return tw.w.Write([]byte(s))
+}
+
+func (tw *templateWriter) Write(b []byte) (int, error) {
+ return tw.w.Write(b)
+}
+
+type Template struct {
+ set *TemplateSet
+
+ // Input
+ isTplString bool
+ name string
+ tpl string
+ size int
+
+ // Calculation
+ tokens []*Token
+ parser *Parser
+
+ // first come, first serve (it's important to not override existing entries in here)
+ level int
+ parent *Template
+ child *Template
+ blocks map[string]*NodeWrapper
+ exportedMacros map[string]*tagMacroNode
+
+ // Output
+ root *nodeDocument
+}
+
+func newTemplateString(set *TemplateSet, tpl []byte) (*Template, error) {
+ return newTemplate(set, "", true, tpl)
+}
+
+func newTemplate(set *TemplateSet, name string, isTplString bool, tpl []byte) (*Template, error) {
+ strTpl := string(tpl)
+
+ // Create the template
+ t := &Template{
+ set: set,
+ isTplString: isTplString,
+ name: name,
+ tpl: strTpl,
+ size: len(strTpl),
+ blocks: make(map[string]*NodeWrapper),
+ exportedMacros: make(map[string]*tagMacroNode),
+ }
+
+ // Tokenize it
+ tokens, err := lex(name, strTpl)
+ if err != nil {
+ return nil, err
+ }
+ t.tokens = tokens
+
+ // For debugging purposes, show all tokens:
+ /*for i, t := range tokens {
+ fmt.Printf("%3d. %s\n", i, t)
+ }*/
+
+ // Parse it
+ err = t.parse()
+ if err != nil {
+ return nil, err
+ }
+
+ return t, nil
+}
+
+func (tpl *Template) execute(context Context, writer TemplateWriter) error {
+ // Determine the parent to be executed (for template inheritance)
+ parent := tpl
+ for parent.parent != nil {
+ parent = parent.parent
+ }
+
+ // Create context if none is given
+ newContext := make(Context)
+ newContext.Update(tpl.set.Globals)
+
+ if context != nil {
+ newContext.Update(context)
+
+ if len(newContext) > 0 {
+ // Check for context name syntax
+ err := newContext.checkForValidIdentifiers()
+ if err != nil {
+ return err
+ }
+
+ // Check for clashes with macro names
+ for k := range newContext {
+ _, has := tpl.exportedMacros[k]
+ if has {
+ return &Error{
+ Filename: tpl.name,
+ Sender: "execution",
+ ErrorMsg: fmt.Sprintf("Context key name '%s' clashes with macro '%s'.", k, k),
+ }
+ }
+ }
+ }
+ }
+
+ // Create operational context
+ ctx := newExecutionContext(parent, newContext)
+
+ // Run the selected document
+ if err := parent.root.Execute(ctx, writer); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (tpl *Template) newTemplateWriterAndExecute(context Context, writer io.Writer) error {
+ return tpl.execute(context, &templateWriter{w: writer})
+}
+
+func (tpl *Template) newBufferAndExecute(context Context) (*bytes.Buffer, error) {
+ // Create output buffer
+ // We assume that the rendered template will be 30% larger
+ buffer := bytes.NewBuffer(make([]byte, 0, int(float64(tpl.size)*1.3)))
+ if err := tpl.execute(context, buffer); err != nil {
+ return nil, err
+ }
+ return buffer, nil
+}
+
+// Executes the template with the given context and writes to writer (io.Writer)
+// on success. Context can be nil. Nothing is written on error; instead the error
+// is being returned.
+func (tpl *Template) ExecuteWriter(context Context, writer io.Writer) error {
+ buf, err := tpl.newBufferAndExecute(context)
+ if err != nil {
+ return err
+ }
+ _, err = buf.WriteTo(writer)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+// Same as ExecuteWriter. The only difference between both functions is that
+// this function might already have written parts of the generated template in the
+// case of an execution error because there's no intermediate buffer involved for
+// performance reasons. This is handy if you need high performance template
+// generation or if you want to manage your own pool of buffers.
+func (tpl *Template) ExecuteWriterUnbuffered(context Context, writer io.Writer) error {
+ return tpl.newTemplateWriterAndExecute(context, writer)
+}
+
+// Executes the template and returns the rendered template as a []byte
+func (tpl *Template) ExecuteBytes(context Context) ([]byte, error) {
+ // Execute template
+ buffer, err := tpl.newBufferAndExecute(context)
+ if err != nil {
+ return nil, err
+ }
+ return buffer.Bytes(), nil
+}
+
+// Executes the template and returns the rendered template as a string
+func (tpl *Template) Execute(context Context) (string, error) {
+ // Execute template
+ buffer, err := tpl.newBufferAndExecute(context)
+ if err != nil {
+ return "", err
+ }
+
+ return buffer.String(), nil
+
+}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_loader.go b/Godeps/_workspace/src/github.com/flosch/pongo2/template_loader.go
new file mode 100644
index 0000000..abd2340
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_loader.go
@@ -0,0 +1,156 @@
+package pongo2
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+)
+
+// LocalFilesystemLoader represents a local filesystem loader with basic
+// BaseDirectory capabilities. The access to the local filesystem is unrestricted.
+type LocalFilesystemLoader struct {
+ baseDir string
+}
+
+// MustNewLocalFileSystemLoader creates a new LocalFilesystemLoader instance
+// and panics if there's any error during instantiation. The parameters
+// are the same like NewLocalFileSystemLoader.
+func MustNewLocalFileSystemLoader(baseDir string) *LocalFilesystemLoader {
+ fs, err := NewLocalFileSystemLoader(baseDir)
+ if err != nil {
+ log.Panic(err)
+ }
+ return fs
+}
+
+// NewLocalFileSystemLoader creates a new LocalFilesystemLoader and allows
+// templatesto be loaded from disk (unrestricted). If any base directory
+// is given (or being set using SetBaseDir), this base directory is being used
+// for path calculation in template inclusions/imports. Otherwise the path
+// is calculated based relatively to the including template's path.
+func NewLocalFileSystemLoader(baseDir string) (*LocalFilesystemLoader, error) {
+ fs := &LocalFilesystemLoader{}
+ if baseDir != "" {
+ if err := fs.SetBaseDir(baseDir); err != nil {
+ return nil, err
+ }
+ }
+ return fs, nil
+}
+
+// SetBaseDir sets the template's base directory. This directory will
+// be used for any relative path in filters, tags and From*-functions to determine
+// your template. See the comment for NewLocalFileSystemLoader as well.
+func (fs *LocalFilesystemLoader) SetBaseDir(path string) error {
+ // Make the path absolute
+ if !filepath.IsAbs(path) {
+ abs, err := filepath.Abs(path)
+ if err != nil {
+ return err
+ }
+ path = abs
+ }
+
+ // Check for existence
+ fi, err := os.Stat(path)
+ if err != nil {
+ return err
+ }
+ if !fi.IsDir() {
+ return fmt.Errorf("The given path '%s' is not a directory.", path)
+ }
+
+ fs.baseDir = path
+ return nil
+}
+
+// Get reads the path's content from your local filesystem.
+func (fs *LocalFilesystemLoader) Get(path string) (io.Reader, error) {
+ buf, err := ioutil.ReadFile(path)
+ if err != nil {
+ return nil, err
+ }
+ return bytes.NewReader(buf), nil
+}
+
+// Abs resolves a filename relative to the base directory. Absolute paths are allowed.
+// When there's no base dir set, the absolute path to the filename
+// will be calculated based on either the provided base directory (which
+// might be a path of a template which includes another template) or
+// the current working directory.
+func (fs *LocalFilesystemLoader) Abs(base, name string) string {
+ if filepath.IsAbs(name) {
+ return name
+ }
+
+ // Our own base dir has always priority; if there's none
+ // we use the path provided in base.
+ var err error
+ if fs.baseDir == "" {
+ if base == "" {
+ base, err = os.Getwd()
+ if err != nil {
+ panic(err)
+ }
+ return filepath.Join(base, name)
+ }
+
+ return filepath.Join(filepath.Dir(base), name)
+ }
+
+ return filepath.Join(fs.baseDir, name)
+}
+
+// SandboxedFilesystemLoader is still WIP.
+type SandboxedFilesystemLoader struct {
+ *LocalFilesystemLoader
+}
+
+// NewSandboxedFilesystemLoader creates a new sandboxed local file system instance.
+func NewSandboxedFilesystemLoader(baseDir string) (*SandboxedFilesystemLoader, error) {
+ fs, err := NewLocalFileSystemLoader(baseDir)
+ if err != nil {
+ return nil, err
+ }
+ return &SandboxedFilesystemLoader{
+ LocalFilesystemLoader: fs,
+ }, nil
+}
+
+// Move sandbox to a virtual fs
+
+/*
+if len(set.SandboxDirectories) > 0 {
+ defer func() {
+ // Remove any ".." or other crap
+ resolvedPath = filepath.Clean(resolvedPath)
+
+ // Make the path absolute
+ absPath, err := filepath.Abs(resolvedPath)
+ if err != nil {
+ panic(err)
+ }
+ resolvedPath = absPath
+
+ // Check against the sandbox directories (once one pattern matches, we're done and can allow it)
+ for _, pattern := range set.SandboxDirectories {
+ matched, err := filepath.Match(pattern, resolvedPath)
+ if err != nil {
+ panic("Wrong sandbox directory match pattern (see http://golang.org/pkg/path/filepath/#Match).")
+ }
+ if matched {
+ // OK!
+ return
+ }
+ }
+
+ // No pattern matched, we have to log+deny the request
+ set.logf("Access attempt outside of the sandbox directories (blocked): '%s'", resolvedPath)
+ resolvedPath = ""
+ }()
+}
+*/
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_sets.go b/Godeps/_workspace/src/github.com/flosch/pongo2/template_sets.go
new file mode 100644
index 0000000..bcd8a63
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_sets.go
@@ -0,0 +1,239 @@
+package pongo2
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "sync"
+)
+
+// TemplateLoader allows to implement a virtual file system.
+type TemplateLoader interface {
+ // Abs calculates the path to a given template. Whenever a path must be resolved
+ // due to an import from another template, the base equals the parent template's path.
+ Abs(base, name string) string
+
+ // Get returns an io.Reader where the template's content can be read from.
+ Get(path string) (io.Reader, error)
+}
+
+// TemplateSet allows you to create your own group of templates with their own
+// global context (which is shared among all members of the set) and their own
+// configuration.
+// It's useful for a separation of different kind of templates
+// (e. g. web templates vs. mail templates).
+type TemplateSet struct {
+ name string
+ loader TemplateLoader
+
+ // Globals will be provided to all templates created within this template set
+ Globals Context
+
+ // If debug is true (default false), ExecutionContext.Logf() will work and output
+ // to STDOUT. Furthermore, FromCache() won't cache the templates.
+ // Make sure to synchronize the access to it in case you're changing this
+ // variable during program execution (and template compilation/execution).
+ Debug bool
+
+ // Sandbox features
+ // - Disallow access to specific tags and/or filters (using BanTag() and BanFilter())
+ //
+ // For efficiency reasons you can ban tags/filters only *before* you have
+ // added your first template to the set (restrictions are statically checked).
+ // After you added one, it's not possible anymore (for your personal security).
+ firstTemplateCreated bool
+ bannedTags map[string]bool
+ bannedFilters map[string]bool
+
+ // Template cache (for FromCache())
+ templateCache map[string]*Template
+ templateCacheMutex sync.Mutex
+}
+
+// NewSet can be used to create sets with different kind of templates
+// (e. g. web from mail templates), with different globals or
+// other configurations.
+func NewSet(name string, loader TemplateLoader) *TemplateSet {
+ return &TemplateSet{
+ name: name,
+ loader: loader,
+ Globals: make(Context),
+ bannedTags: make(map[string]bool),
+ bannedFilters: make(map[string]bool),
+ templateCache: make(map[string]*Template),
+ }
+}
+
+func (set *TemplateSet) resolveFilename(tpl *Template, path string) string {
+ name := ""
+ if tpl != nil && tpl.isTplString {
+ return path
+ }
+ if tpl != nil {
+ name = tpl.name
+ }
+ return set.loader.Abs(name, path)
+}
+
+// BanTag bans a specific tag for this template set. See more in the documentation for TemplateSet.
+func (set *TemplateSet) BanTag(name string) error {
+ _, has := tags[name]
+ if !has {
+ return fmt.Errorf("Tag '%s' not found.", name)
+ }
+ if set.firstTemplateCreated {
+ return errors.New("You cannot ban any tags after you've added your first template to your template set.")
+ }
+ _, has = set.bannedTags[name]
+ if has {
+ return fmt.Errorf("Tag '%s' is already banned.", name)
+ }
+ set.bannedTags[name] = true
+
+ return nil
+}
+
+// BanFilter bans a specific filter for this template set. See more in the documentation for TemplateSet.
+func (set *TemplateSet) BanFilter(name string) error {
+ _, has := filters[name]
+ if !has {
+ return fmt.Errorf("Filter '%s' not found.", name)
+ }
+ if set.firstTemplateCreated {
+ return errors.New("You cannot ban any filters after you've added your first template to your template set.")
+ }
+ _, has = set.bannedFilters[name]
+ if has {
+ return fmt.Errorf("Filter '%s' is already banned.", name)
+ }
+ set.bannedFilters[name] = true
+
+ return nil
+}
+
+// FromCache is a convenient method to cache templates. It is thread-safe
+// and will only compile the template associated with a filename once.
+// If TemplateSet.Debug is true (for example during development phase),
+// FromCache() will not cache the template and instead recompile it on any
+// call (to make changes to a template live instantaneously).
+func (set *TemplateSet) FromCache(filename string) (*Template, error) {
+ if set.Debug {
+ // Recompile on any request
+ return set.FromFile(filename)
+ }
+ // Cache the template
+ cleanedFilename := set.resolveFilename(nil, filename)
+
+ set.templateCacheMutex.Lock()
+ defer set.templateCacheMutex.Unlock()
+
+ tpl, has := set.templateCache[cleanedFilename]
+
+ // Cache miss
+ if !has {
+ tpl, err := set.FromFile(cleanedFilename)
+ if err != nil {
+ return nil, err
+ }
+ set.templateCache[cleanedFilename] = tpl
+ return tpl, nil
+ }
+
+ // Cache hit
+ return tpl, nil
+}
+
+// FromString loads a template from string and returns a Template instance.
+func (set *TemplateSet) FromString(tpl string) (*Template, error) {
+ set.firstTemplateCreated = true
+
+ return newTemplateString(set, []byte(tpl))
+}
+
+// FromFile loads a template from a filename and returns a Template instance.
+func (set *TemplateSet) FromFile(filename string) (*Template, error) {
+ set.firstTemplateCreated = true
+
+ fd, err := set.loader.Get(set.resolveFilename(nil, filename))
+ if err != nil {
+ return nil, &Error{
+ Filename: filename,
+ Sender: "fromfile",
+ ErrorMsg: err.Error(),
+ }
+ }
+ buf, err := ioutil.ReadAll(fd)
+ if err != nil {
+ return nil, &Error{
+ Filename: filename,
+ Sender: "fromfile",
+ ErrorMsg: err.Error(),
+ }
+ }
+
+ return newTemplate(set, filename, false, buf)
+}
+
+// RenderTemplateString is a shortcut and renders a template string directly.
+// Panics when providing a malformed template or an error occurs during execution.
+func (set *TemplateSet) RenderTemplateString(s string, ctx Context) string {
+ set.firstTemplateCreated = true
+
+ tpl := Must(set.FromString(s))
+ result, err := tpl.Execute(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return result
+}
+
+// RenderTemplateFile is a shortcut and renders a template file directly.
+// Panics when providing a malformed template or an error occurs during execution.
+func (set *TemplateSet) RenderTemplateFile(fn string, ctx Context) string {
+ set.firstTemplateCreated = true
+
+ tpl := Must(set.FromFile(fn))
+ result, err := tpl.Execute(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return result
+}
+
+func (set *TemplateSet) logf(format string, args ...interface{}) {
+ if set.Debug {
+ logger.Printf(fmt.Sprintf("[template set: %s] %s", set.name, format), args...)
+ }
+}
+
+// Logging function (internally used)
+func logf(format string, items ...interface{}) {
+ if debug {
+ logger.Printf(format, items...)
+ }
+}
+
+var (
+ debug bool // internal debugging
+ logger = log.New(os.Stdout, "[pongo2] ", log.LstdFlags|log.Lshortfile)
+
+ // DefaultLoader allows the default un-sandboxed access to the local file
+ // system and is being used by the DefaultSet.
+ DefaultLoader = MustNewLocalFileSystemLoader("")
+
+ // DefaultSet is a set created for you for convinience reasons.
+ DefaultSet = NewSet("default", DefaultLoader)
+
+ // Methods on the default set
+ FromString = DefaultSet.FromString
+ FromFile = DefaultSet.FromFile
+ FromCache = DefaultSet.FromCache
+ RenderTemplateString = DefaultSet.RenderTemplateString
+ RenderTemplateFile = DefaultSet.RenderTemplateFile
+
+ // Globals for the default set
+ Globals = DefaultSet.Globals
+)
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/autoescape.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/autoescape.tpl
new file mode 100644
index 0000000..f02fe9d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/autoescape.tpl
@@ -0,0 +1,10 @@
+{{ "" }}
+{% autoescape off %}
+{{ "" }}
+{% endautoescape %}
+{% autoescape on %}
+{{ "" }}
+{% endautoescape %}
+{% autoescape off %}
+{{ ""|escape }}
+{% endautoescape %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/autoescape.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/autoescape.tpl.out
new file mode 100644
index 0000000..5955cc6
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/autoescape.tpl.out
@@ -0,0 +1,9 @@
+<script>alert('xss');</script>
+
+
+
+
+<script>alert('xss');</script>
+
+
+<script>alert('xss');</script>
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/base.html b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/base.html
new file mode 100644
index 0000000..17044cf
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/base.html
@@ -0,0 +1 @@
+Hello from {{ base_directory }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/include.html b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/include.html
new file mode 100644
index 0000000..ad17c3b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/include.html
@@ -0,0 +1 @@
+{% include "base.html" %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/index.html b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/index.html
new file mode 100644
index 0000000..94d9808
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/index.html
@@ -0,0 +1 @@
+{% extends "base.html" %}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/ssi.html b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/ssi.html
new file mode 100644
index 0000000..584026f
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/base_dir_test/subdir/ssi.html
@@ -0,0 +1 @@
+{% ssi "base.html" parsed %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/complex.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/complex.tpl
new file mode 100644
index 0000000..c707fb1
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/complex.tpl
@@ -0,0 +1,32 @@
+{# A more complex template using pongo2 (fully django-compatible template) #}
+
+
+
+
+ My blog page
+
+
+
+ Blogpost
+
+ {{ complex.post.Text|safe }}
+
+
+ Comments
+
+ {% for comment in complex.comments %}
+ {{ forloop.Counter }}. Comment ({{ forloop.Revcounter}} comment{{ forloop.Revcounter|pluralize:"s" }} left)
+ From: {{ comment.Author.Name }} ({{ comment.Author.Validated|yesno:"validated,not validated,unknown validation status" }})
+
+ {% if complex.is_admin(comment.Author) %}
+ This user is an admin (verify: {{ comment.Author.Is_admin }})!
+ {% else %}
+ This user is not admin!
+ {% endif %}
+
+ Written {{ comment.Date }}
+ {{ comment.Text|striptags }}
+ {% endfor %}
+
+
+
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/complex.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/complex.tpl.out
new file mode 100644
index 0000000..7fa3e1d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/complex.tpl.out
@@ -0,0 +1,50 @@
+
+
+
+
+
+ My blog page
+
+
+
+ Blogpost
+
+
Hello!
Welcome to my new blog page. I'm using pongo2 which supports {{ variables }} and {% tags %}.
+
+
+ Comments
+
+
+ 1. Comment (3 comments left)
+ From: user1 (validated)
+
+
+ This user is not admin!
+
+
+ Written 2014-06-10 15:30:15 +0000 UTC
+ "pongo2 is nice!"
+
+ 2. Comment (2 comments left)
+ From: user2 (validated)
+
+
+ This user is an admin (verify: True)!
+
+
+ Written 2011-03-21 08:37:56.000000012 +0000 UTC
+ comment2 with unsafe tags in it
+
+ 3. Comment (1 comment left)
+ From: user3 (not validated)
+
+
+ This user is not admin!
+
+
+ Written 2014-06-10 15:30:15 +0000 UTC
+ hello! there
+
+
+
+
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/cycle.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/cycle.tpl
new file mode 100644
index 0000000..967d82f
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/cycle.tpl
@@ -0,0 +1,22 @@
+{% for item in simple.multiple_item_list %}
+ '{% cycle "item1" simple.name simple.number %}'
+{% endfor %}
+{% for item in simple.multiple_item_list %}
+ '{% cycle "item1" simple.name simple.number as cycleitem %}'
+ May I present the cycle item again: '{{ cycleitem }}'
+{% endfor %}
+{% for item in simple.multiple_item_list %}
+ '{% cycle "item1" simple.name simple.number as cycleitem silent %}'
+ May I present the cycle item: '{{ cycleitem }}'
+{% endfor %}
+{% for item in simple.multiple_item_list %}
+ '{% cycle "item1" simple.name simple.number as cycleitem silent %}'
+ May I present the cycle item: '{{ cycleitem }}'
+ {% include "inheritance/cycle_include.tpl" %}
+{% endfor %}
+'{% cycle "item1" simple.name simple.number as cycleitem %}'
+'{% cycle cycleitem %}'
+'{% cycle "item1" simple.name simple.number as cycleitem silent %}'
+'{{ cycleitem }}'
+'{% cycle cycleitem %}'
+'{{ cycleitem }}'
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/cycle.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/cycle.tpl.out
new file mode 100644
index 0000000..b966fb3
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/cycle.tpl.out
@@ -0,0 +1,130 @@
+
+ 'item1'
+
+ 'john doe'
+
+ '42'
+
+ 'item1'
+
+ 'john doe'
+
+ '42'
+
+ 'item1'
+
+ 'john doe'
+
+ '42'
+
+ 'item1'
+
+
+ 'item1'
+ May I present the cycle item again: 'item1'
+
+ 'john doe'
+ May I present the cycle item again: 'john doe'
+
+ '42'
+ May I present the cycle item again: '42'
+
+ 'item1'
+ May I present the cycle item again: 'item1'
+
+ 'john doe'
+ May I present the cycle item again: 'john doe'
+
+ '42'
+ May I present the cycle item again: '42'
+
+ 'item1'
+ May I present the cycle item again: 'item1'
+
+ 'john doe'
+ May I present the cycle item again: 'john doe'
+
+ '42'
+ May I present the cycle item again: '42'
+
+ 'item1'
+ May I present the cycle item again: 'item1'
+
+
+ ''
+ May I present the cycle item: 'item1'
+
+ ''
+ May I present the cycle item: 'john doe'
+
+ ''
+ May I present the cycle item: '42'
+
+ ''
+ May I present the cycle item: 'item1'
+
+ ''
+ May I present the cycle item: 'john doe'
+
+ ''
+ May I present the cycle item: '42'
+
+ ''
+ May I present the cycle item: 'item1'
+
+ ''
+ May I present the cycle item: 'john doe'
+
+ ''
+ May I present the cycle item: '42'
+
+ ''
+ May I present the cycle item: 'item1'
+
+
+ ''
+ May I present the cycle item: 'item1'
+ Included 'item1'.
+
+ ''
+ May I present the cycle item: 'john doe'
+ Included 'john doe'.
+
+ ''
+ May I present the cycle item: '42'
+ Included '42'.
+
+ ''
+ May I present the cycle item: 'item1'
+ Included 'item1'.
+
+ ''
+ May I present the cycle item: 'john doe'
+ Included 'john doe'.
+
+ ''
+ May I present the cycle item: '42'
+ Included '42'.
+
+ ''
+ May I present the cycle item: 'item1'
+ Included 'item1'.
+
+ ''
+ May I present the cycle item: 'john doe'
+ Included 'john doe'.
+
+ ''
+ May I present the cycle item: '42'
+ Included '42'.
+
+ ''
+ May I present the cycle item: 'item1'
+ Included 'item1'.
+
+'item1'
+'john doe'
+''
+'item1'
+''
+'john doe'
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/empty.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/empty.tpl
new file mode 100644
index 0000000..e69de29
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/empty.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/empty.tpl.out
new file mode 100644
index 0000000..e69de29
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/expressions.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/expressions.tpl
new file mode 100644
index 0000000..caada14
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/expressions.tpl
@@ -0,0 +1,69 @@
+integers and complex expressions
+{{ 10-100 }}
+{{ -(10-100) }}
+{{ -(-(10-100)) }}
+{{ -1 * (-(-(10-100))) }}
+{{ -1 * (-(-(10-100)) ^ 2) ^ 3 + 3 * (5 - 17) + 1 + 2 }}
+
+floats
+{{ 5.5 }}
+{{ 5.172841 }}
+{{ 5.5 - 1.5 == 4 }}
+{{ 5.5 - 1.5 == 4.0 }}
+
+mul/div
+{{ 2 * 5 }}
+{{ 2 * 5.0 }}
+{{ 2 * 0 }}
+{{ 2.5 * 5.3 }}
+{{ 1/2 }}
+{{ 1/2.0 }}
+{{ 1/0.000001 }}
+
+logic expressions
+{{ !true }}
+{{ !(true || false) }}
+{{ true || false }}
+{{ true or false }}
+{{ false or false }}
+{{ false || false }}
+{{ true && (true && (true && (true && (1 == 1 || false)))) }}
+
+float comparison
+{{ 5.5 <= 5.5 }}
+{{ 5.5 < 5.5 }}
+{{ 5.5 > 5.5 }}
+{{ 5.5 >= 5.5 }}
+
+remainders
+{{ (simple.number+7)%7 }}
+{{ (simple.number+7)%7 == 0 }}
+{{ (simple.number+7)%6 }}
+
+in/not in
+{{ 5 in simple.intmap }}
+{{ 2 in simple.intmap }}
+{{ 7 in simple.intmap }}
+{{ !(5 in simple.intmap) }}
+{{ not(7 in simple.intmap) }}
+{{ 1 in simple.multiple_item_list }}
+{{ 4 in simple.multiple_item_list }}
+{{ !(4 in simple.multiple_item_list) }}
+{{ "Hello" in simple.misc_list }}
+{{ "Hello2" in simple.misc_list }}
+{{ 99 in simple.misc_list }}
+{{ False in simple.misc_list }}
+
+issue #48 (associativity for infix operators)
+{{ 34/3*3 }}
+{{ 10 + 24 / 6 / 2 }}
+{{ 6 - 4 - 2 }}
+
+issue #64 (uint comparison with int const)
+{{ simple.uint }}
+{{ simple.uint == 8 }}
+{{ simple.uint == 9 }}
+{{ simple.uint >= 8 }}
+{{ simple.uint <= 8 }}
+{{ simple.uint < 8 }}
+{{ simple.uint > 8 }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/expressions.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/expressions.tpl.out
new file mode 100644
index 0000000..d710fc8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/expressions.tpl.out
@@ -0,0 +1,69 @@
+integers and complex expressions
+-90
+90
+-90
+90
+531440999967.000000
+
+floats
+5.500000
+5.172841
+False
+True
+
+mul/div
+10
+10.000000
+0
+13.250000
+0
+0.500000
+1000000.000000
+
+logic expressions
+False
+False
+True
+True
+False
+False
+True
+
+float comparison
+True
+False
+False
+True
+
+remainders
+0
+True
+1
+
+in/not in
+True
+True
+False
+False
+True
+True
+False
+True
+True
+False
+True
+False
+
+issue #48 (associativity for infix operators)
+33
+12
+0
+
+issue #64 (uint comparison with int const)
+8
+True
+False
+True
+True
+False
+False
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/extends.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/extends.tpl
new file mode 100644
index 0000000..7216d05
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/extends.tpl
@@ -0,0 +1,3 @@
+{% extends "inheritance/base.tpl" %}
+
+{% block content %}Extends' content{% endblock %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/extends.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/extends.tpl.out
new file mode 100644
index 0000000..4c535c7
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/extends.tpl.out
@@ -0,0 +1 @@
+Start#This is base's bodyExtends' content#End
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-compilation.err b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-compilation.err
new file mode 100644
index 0000000..cc5c8cb
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-compilation.err
@@ -0,0 +1,5 @@
+{{ }}
+{{ (1 - 1 }}
+{{ 1|float: }}
+{{ "test"|non_existent_filter }}
+{{ "test"|"test" }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-compilation.err.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-compilation.err.out
new file mode 100644
index 0000000..3562fae
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-compilation.err.out
@@ -0,0 +1,5 @@
+.*Expected either a number, string, keyword or identifier\.
+.*Closing bracket expected after expression
+.*Filter parameter required after ':'.*
+.*Filter 'non_existent_filter' does not exist\.
+.*Filter name must be an identifier\.
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-execution.err b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-execution.err
new file mode 100644
index 0000000..1fc53f2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-execution.err
@@ -0,0 +1,4 @@
+{{ -(true || false) }}
+{{ simple.func_add("test", 5) }}
+{% for item in simple.multiple_item_list %} {{ simple.func_add("test", 5) }} {% endfor %}
+{{ simple.func_variadic_sum_int("foo") }}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-execution.err.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-execution.err.out
new file mode 100644
index 0000000..a960607
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters-execution.err.out
@@ -0,0 +1,4 @@
+.*where: execution.*Negative sign on a non\-number expression
+.*Function input argument 0 of 'simple.func_add' must be of type int or \*pongo2.Value \(not string\).
+.*Function input argument 0 of 'simple.func_add' must be of type int or \*pongo2.Value \(not string\).
+.*Function variadic input argument of 'simple.func_variadic_sum_int' must be of type int or \*pongo2.Value \(not string\).
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters.tpl
new file mode 100644
index 0000000..c15468a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/filters.tpl
@@ -0,0 +1,304 @@
+add
+{{ 5|add:2 }}
+{{ 5|add:simple.number }}
+{{ 5|add:nothing }}
+{{ 5|add:"test" }}
+{{ "hello "|add:"john doe" }}
+{{ "hello "|add:simple.name }}
+
+addslashes
+{{ "plain text"|addslashes|safe }}
+{{ simple.escape_text|addslashes|safe }}
+
+capfirst
+{{ ""|capfirst }}
+{{ 5|capfirst }}
+{{ "h"|capfirst }}
+{{ "hello there!"|capfirst }}
+{{ simple.chinese_hello_world|capfirst }}
+
+cut
+{{ 15|cut:"5" }}
+{{ "Hello world"|cut: " " }}
+
+default
+{{ simple.nothing|default:"n/a" }}
+{{ nothing|default:simple.number }}
+{{ simple.number|default:"n/a" }}
+{{ 5|default:"n/a" }}
+
+default_if_none
+{{ simple.nothing|default_if_none:"n/a" }}
+{{ ""|default_if_none:"n/a" }}
+{{ nil|default_if_none:"n/a" }}
+
+get_digit
+{{ 1234567890|get_digit:0 }}
+{{ 1234567890|get_digit }}
+{{ 1234567890|get_digit:2 }}
+{{ 1234567890|get_digit:"4" }}
+{{ 1234567890|get_digit:10 }}
+{{ 1234567890|get_digit:15 }}
+
+safe
+{{ "" %}
+{% firstof doesnotexist ""|safe %}
+{% firstof doesnotexist simple.uint 42 %}
+{% firstof doesnotexist "test" simple.number 42 %}
+{% firstof %}
+{% firstof "test" "test2" %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/firstof.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/firstof.tpl.out
new file mode 100644
index 0000000..5ae55ad
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/firstof.tpl.out
@@ -0,0 +1,7 @@
+42
+<script>alert('xss');</script>
+
+8
+test
+
+test
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/for.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/for.tpl
new file mode 100644
index 0000000..d14e632
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/for.tpl
@@ -0,0 +1,27 @@
+{% for comment in complex.comments %}[{{ forloop.Counter }} {{ forloop.Counter0 }} {{ forloop.First }} {{ forloop.Last }} {{ forloop.Revcounter }} {{ forloop.Revcounter0 }}] {{ comment.Author.Name }}
+
+{# nested loop #}
+{% for char in comment.Text %}{{forloop.Parentloop.Counter0}}.{{forloop.Counter0}}:{{ char|safe }} {% endfor %}
+
+{% endfor %}
+
+reversed
+'{% for item in simple.multiple_item_list reversed %}{{ item }} {% endfor %}'
+
+sorted string map
+'{% for key in simple.strmap sorted %}{{ key }} {% endfor %}'
+
+sorted int map
+'{% for key in simple.intmap sorted %}{{ key }} {% endfor %}'
+
+sorted int list
+'{% for key in simple.unsorted_int_list sorted %}{{ key }} {% endfor %}'
+
+reversed sorted int list
+'{% for key in simple.unsorted_int_list reversed sorted %}{{ key }} {% endfor %}'
+
+reversed sorted string map
+'{% for key in simple.strmap reversed sorted %}{{ key }} {% endfor %}'
+
+reversed sorted int map
+'{% for key in simple.intmap reversed sorted %}{{ key }} {% endfor %}'
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/for.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/for.tpl.out
new file mode 100644
index 0000000..27c7120
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/for.tpl.out
@@ -0,0 +1,37 @@
+[1 0 True False 3 2] user1
+
+
+0.0:" 0.1:p 0.2:o 0.3:n 0.4:g 0.5:o 0.6:2 0.7: 0.8:i 0.9:s 0.10: 0.11:n 0.12:i 0.13:c 0.14:e 0.15:! 0.16:"
+
+[2 1 False False 2 1] user2
+
+
+1.0:c 1.1:o 1.2:m 1.3:m 1.4:e 1.5:n 1.6:t 1.7:2 1.8: 1.9:w 1.10:i 1.11:t 1.12:h 1.13: 1.14:< 1.15:s 1.16:c 1.17:r 1.18:i 1.19:p 1.20:t 1.21:> 1.22:u 1.23:n 1.24:s 1.25:a 1.26:f 1.27:e 1.28:< 1.29:/ 1.30:s 1.31:c 1.32:r 1.33:i 1.34:p 1.35:t 1.36:> 1.37: 1.38:t 1.39:a 1.40:g 1.41:s 1.42: 1.43:i 1.44:n 1.45: 1.46:i 1.47:t
+
+[3 2 False True 1 0] user3
+
+
+2.0:< 2.1:b 2.2:> 2.3:h 2.4:e 2.5:l 2.6:l 2.7:o 2.8:! 2.9:< 2.10:/ 2.11:b 2.12:> 2.13: 2.14:t 2.15:h 2.16:e 2.17:r 2.18:e
+
+
+
+reversed
+'55 34 21 13 8 5 3 2 1 1 '
+
+sorted string map
+'aab abc bcd gh ukq zab '
+
+sorted int map
+'1 2 5 '
+
+sorted int list
+'1 22 192 249 581 8271 9999 1828591 '
+
+reversed sorted int list
+'1828591 9999 8271 581 249 192 22 1 '
+
+reversed sorted string map
+'zab ukq gh bcd abc aab '
+
+reversed sorted int map
+'5 2 1 '
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/function_calls_wrapper.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/function_calls_wrapper.tpl
new file mode 100644
index 0000000..85b870a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/function_calls_wrapper.tpl
@@ -0,0 +1,11 @@
+{{ simple.func_add(simple.func_add(5, 15), simple.number) + 17 }}
+{{ simple.func_add_iface(simple.func_add_iface(5, 15), simple.number) + 17 }}
+{{ simple.func_variadic("hello") }}
+{{ simple.func_variadic("hello, %s", simple.name) }}
+{{ simple.func_variadic("%d + %d %s %d", 5, simple.number, "is", 49) }}
+{{ simple.func_variadic_sum_int() }}
+{{ simple.func_variadic_sum_int(1) }}
+{{ simple.func_variadic_sum_int(1, 19, 185) }}
+{{ simple.func_variadic_sum_int2() }}
+{{ simple.func_variadic_sum_int2(2) }}
+{{ simple.func_variadic_sum_int2(1, 7, 100) }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/function_calls_wrapper.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/function_calls_wrapper.tpl.out
new file mode 100644
index 0000000..924e466
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/function_calls_wrapper.tpl.out
@@ -0,0 +1,11 @@
+79
+79
+hello
+hello, john doe
+5 + 42 is 49
+0
+1
+205
+0
+2
+108
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/if.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/if.tpl
new file mode 100644
index 0000000..c434c3f
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/if.tpl
@@ -0,0 +1,17 @@
+{% if nothing %}false{% else %}true{% endif %}
+{% if simple %}simple != nil{% endif %}
+{% if simple.uint %}uint != 0{% endif %}
+{% if simple.float %}float != 0.0{% endif %}
+{% if !simple %}false{% else %}!simple{% endif %}
+{% if !simple.uint %}false{% else %}!simple.uint{% endif %}
+{% if !simple.float %}false{% else %}!simple.float{% endif %}
+{% if "Text" in complex.post %}text field in complex.post{% endif %}
+{% if 5 in simple.intmap %}5 in simple.intmap{% endif %}
+{% if !0.0 %}!0.0{% endif %}
+{% if !0 %}!0{% endif %}
+{% if simple.number == 43 %}no{% else %}42{% endif %}
+{% if simple.number < 42 %}false{% elif simple.number > 42 %}no{% elif simple.number >= 42 %}yes{% else %}no{% endif %}
+{% if simple.number < 42 %}false{% elif simple.number > 42 %}no{% elif simple.number != 42 %}no{% else %}yes{% endif %}
+{% if 0 %}!0{% elif nothing %}nothing{% else %}true{% endif %}
+{% if 0 %}!0{% elif simple.float %}simple.float{% else %}false{% endif %}
+{% if 0 %}!0{% elif !simple.float %}false{% elif "Text" in complex.post%}Elseif with no else{% endif %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/if.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/if.tpl.out
new file mode 100644
index 0000000..bf931be
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/if.tpl.out
@@ -0,0 +1,17 @@
+true
+simple != nil
+uint != 0
+float != 0.0
+!simple
+!simple.uint
+!simple.float
+text field in complex.post
+5 in simple.intmap
+!0.0
+!0
+42
+yes
+yes
+true
+simple.float
+Elseif with no else
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ifchanged.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ifchanged.tpl
new file mode 100644
index 0000000..0282925
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ifchanged.tpl
@@ -0,0 +1,9 @@
+{% for comment in complex.comments2 %}
+ {% ifchanged %}New comment from another user {{ comment.Author.Name }}{% endifchanged %}
+ {% ifchanged comment.Author.Validated %}
+ Validated changed to {{ comment.Author.Validated }}
+ {% else %}
+ Validated value not changed
+ {% endifchanged %}
+ {% ifchanged comment.Author.Name comment.Date %}Comment's author name or date changed{% endifchanged %}
+{% endfor %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ifchanged.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ifchanged.tpl.out
new file mode 100644
index 0000000..3f186cb
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ifchanged.tpl.out
@@ -0,0 +1,18 @@
+
+ New comment from another user user1
+
+ Validated changed to True
+
+ Comment's author name or date changed
+
+
+
+ Validated value not changed
+
+ Comment's author name or date changed
+
+ New comment from another user user3
+
+ Validated changed to False
+
+ Comment's author name or date changed
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.helper b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.helper
new file mode 100644
index 0000000..b66db23
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.helper
@@ -0,0 +1 @@
+I'm {{ what_am_i }}{{ number }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.tpl
new file mode 100644
index 0000000..2394ee9
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.tpl
@@ -0,0 +1,7 @@
+Start '{% include "includes.helper" %}' End
+Start '{% include "includes.helper" if_exists %}' End
+Start '{% include "includes.helper" with what_am_i=simple.name only %}' End
+Start '{% include "includes.helper" with what_am_i=simple.name %}' End
+Start '{% include simple.included_file|lower with number=7 what_am_i="guest" %}' End
+Start '{% include "includes.helper.not_exists" if_exists %}' End
+Start '{% include simple.included_file_not_exists if_exists with number=7 what_am_i="guest" %}' End
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.tpl.out
new file mode 100644
index 0000000..61d9318
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/includes.tpl.out
@@ -0,0 +1,7 @@
+Start 'I'm 11' End
+Start 'I'm 11' End
+Start 'I'm john doe' End
+Start 'I'm john doe11' End
+Start 'I'm guest7' End
+Start '' End
+Start '' End
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/base.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/base.tpl
new file mode 100644
index 0000000..2b06d32
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/base.tpl
@@ -0,0 +1,3 @@
+{% extends "inheritance2/skeleton.tpl" %}
+
+{% block body %}This is base's body{% block content %}Default content{% endblock %}{% endblock %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/base2.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/base2.tpl
new file mode 100644
index 0000000..5ebad5f
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/base2.tpl
@@ -0,0 +1 @@
+{% include "doesnotexist.tpl" %}
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/cycle_include.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/cycle_include.tpl
new file mode 100644
index 0000000..4b5d7b9
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/cycle_include.tpl
@@ -0,0 +1 @@
+Included '{{ cycleitem }}'.
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/inheritance2/skeleton.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/inheritance2/skeleton.tpl
new file mode 100644
index 0000000..c07cde6
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/inheritance/inheritance2/skeleton.tpl
@@ -0,0 +1 @@
+Start#{% block body %}Default body{% endblock %}#End
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/issues.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/issues.tpl
new file mode 100644
index 0000000..e69de29
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/issues.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/issues.tpl.out
new file mode 100644
index 0000000..e69de29
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/lorem.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/lorem.tpl
new file mode 100644
index 0000000..f6b52dd
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/lorem.tpl
@@ -0,0 +1,9 @@
+-----
+{% lorem %}
+-----
+{% lorem 10 %}
+-----
+{% lorem 3 p %}
+-----
+{% lorem 100 w %}
+-----
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/lorem.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/lorem.tpl.out
new file mode 100644
index 0000000..286a148
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/lorem.tpl.out
@@ -0,0 +1,20 @@
+-----
+Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+-----
+Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
+Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi.
+Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.
+Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis.
+At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua. est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat.
+Consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.
+Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
+Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi.
+-----
+Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
+Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi.
+-----
+Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum
+-----
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-compilation.err b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-compilation.err
new file mode 100644
index 0000000..baf3e6e
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-compilation.err
@@ -0,0 +1 @@
+{% macro test_override() export %}{% endmacro %}{% macro test_override() export %}{% endmacro %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-compilation.err.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-compilation.err.out
new file mode 100644
index 0000000..a9ad8f1
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-compilation.err.out
@@ -0,0 +1 @@
+.*Another macro with name 'test_override' already exported.
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-execution.err b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-execution.err
new file mode 100644
index 0000000..ef7872c
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-execution.err
@@ -0,0 +1 @@
+{% macro number() export %}No number here.{% endmacro %}{{ number() }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-execution.err.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-execution.err.out
new file mode 100644
index 0000000..4d4067b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro-execution.err.out
@@ -0,0 +1 @@
+.*Context key name 'number' clashes with macro 'number'.
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.helper b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.helper
new file mode 100644
index 0000000..d9809fd
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.helper
@@ -0,0 +1,2 @@
+{% macro imported_macro(foo) export %}Hey {{ foo }}!
{% endmacro %}
+{% macro imported_macro_void() export %}Hello mate!
{% endmacro %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.tpl
new file mode 100644
index 0000000..3c4d931
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.tpl
@@ -0,0 +1,30 @@
+Begin
+{% macro greetings(to, from=simple.name, name2="guest") %}
+Greetings to {{ to }} from {{ from }}. Howdy, {% if name2 == "guest" %}anonymous guest{% else %}{{ name2 }}{% endif %}!
+{% endmacro %}
+{{ greetings() }}
+{{ greetings(10) }}
+{{ greetings("john") }}
+{{ greetings("john", "michelle") }}
+{{ greetings("john", "michelle", "johann") }}
+{{ greetings("john", "michelle", "johann", "foobar") }}
+
+{% macro test2(loop, value) %}map[{{ loop.Counter0 }}] = {{ value }}{% endmacro %}
+{% for item in simple.misc_list %}
+{{ test2(forloop, item) }}{% endfor %}
+
+issue #39 (deactivate auto-escape of macros)
+{% macro html_test(name) %}
+Hello {{ name }}.
+{% endmacro %}
+{{ html_test("Max") }}
+
+Importing macros
+{% import "macro.helper" imported_macro, imported_macro as renamed_macro, imported_macro as html_test %}
+{{ imported_macro("User1") }}
+{{ renamed_macro("User2") }}
+{{ html_test("Max") }}
+
+Chaining macros{% import "macro2.helper" greeter_macro %}
+{{ greeter_macro() }}
+End
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.tpl.out
new file mode 100644
index 0000000..1bb9274
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro.tpl.out
@@ -0,0 +1,44 @@
+Begin
+
+
+Greetings to from john doe. Howdy, anonymous guest!
+
+
+Greetings to 10 from john doe. Howdy, anonymous guest!
+
+
+Greetings to john from john doe. Howdy, anonymous guest!
+
+
+Greetings to john from michelle. Howdy, anonymous guest!
+
+
+Greetings to john from michelle. Howdy, johann!
+
+[Error (where: execution) in template_tests/macro.tpl | Line 2 Col 4 near 'macro'] Macro 'greetings' called with too many arguments (4 instead of 3).
+
+
+
+map[0] = Hello
+map[1] = 99
+map[2] = 3.140000
+map[3] = good
+
+issue #39 (deactivate auto-escape of macros)
+
+
+Hello Max.
+
+
+Importing macros
+
+Hey User1!
+Hey User2!
+Hey Max!
+
+Chaining macros
+
+
+One greeting: Hey Dirk!
- Hello mate!
+
+End
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro2.helper b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro2.helper
new file mode 100644
index 0000000..faa89c3
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/macro2.helper
@@ -0,0 +1,4 @@
+{% macro greeter_macro() export %}
+{% import "macro.helper" imported_macro, imported_macro_void %}
+One greeting: {{ imported_macro("Dirk") }} - {{ imported_macro_void() }}
+{% endmacro %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/now.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/now.tpl
new file mode 100644
index 0000000..99ddae8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/now.tpl
@@ -0,0 +1,2 @@
+{# The 'fake' argument exists to have tests for the now-tag; it will set the time to a specific date instead of now #}
+{% now "Mon Jan 2 15:04:05 -0700 MST 2006" fake %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/now.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/now.tpl.out
new file mode 100644
index 0000000..4a8a624
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/now.tpl.out
@@ -0,0 +1,2 @@
+
+Wed Feb 5 18:31:45 +0000 UTC 2014
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/pongo2ctx.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/pongo2ctx.tpl
new file mode 100644
index 0000000..a3715f7
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/pongo2ctx.tpl
@@ -0,0 +1 @@
+{{ pongo2.version }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/pongo2ctx.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/pongo2ctx.tpl.out
new file mode 100644
index 0000000..9001211
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/pongo2ctx.tpl.out
@@ -0,0 +1 @@
+dev
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox-compilation.err b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox-compilation.err
new file mode 100644
index 0000000..fd59c3b
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox-compilation.err
@@ -0,0 +1,3 @@
+{{ "hello"|banned_filter }}
+{% banned_tag %}
+{% include "../../test_not_existent" %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox-compilation.err.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox-compilation.err.out
new file mode 100644
index 0000000..cbc56ef
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox-compilation.err.out
@@ -0,0 +1,3 @@
+.*Usage of filter 'banned_filter' is not allowed \(sandbox restriction active\).
+.*Usage of tag 'banned_tag' is not allowed \(sandbox restriction active\).
+\[Error \(where: fromfile\) | Line 1 Col 12 near '../../test_not_existent'\] open : no such file or directory
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox.tpl
new file mode 100644
index 0000000..5a58c75
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox.tpl
@@ -0,0 +1,3 @@
+{{ "hello"|unbanned_filter }}
+{% unbanned_tag %}
+{% include temp_file %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox.tpl.out
new file mode 100644
index 0000000..a60ed32
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/sandbox.tpl.out
@@ -0,0 +1,3 @@
+hello
+hello
+Hello from pongo2
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/set.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/set.tpl
new file mode 100644
index 0000000..09e7b2d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/set.tpl
@@ -0,0 +1,5 @@
+{% set new_var = "hello" %}{{ new_var }}
+{% block content %}{% set new_var = "world" %}{{ new_var }}{% endblock %}
+{{ new_var }}{% for item in simple.misc_list %}
+{% set new_var = item %}{{ new_var }}{% endfor %}
+{{ new_var }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/set.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/set.tpl.out
new file mode 100644
index 0000000..bede53d
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/set.tpl.out
@@ -0,0 +1,8 @@
+hello
+world
+world
+Hello
+99
+3.140000
+good
+world
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/spaceless.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/spaceless.tpl
new file mode 100644
index 0000000..5659e81
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/spaceless.tpl
@@ -0,0 +1,18 @@
+{% spaceless %}
+
+
+ This is a test! Mail me at
+
+
+ mail@example.tld
+
+
+
+
+
+ Yep!
+
+
+
+
+{% endspaceless %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/spaceless.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/spaceless.tpl.out
new file mode 100644
index 0000000..5b33e2e
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/spaceless.tpl.out
@@ -0,0 +1,11 @@
+
+
+ This is a test! Mail me at
+
+
+ mail@example.tld
+
+
+ Yep!
+
+
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.helper b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.helper
new file mode 100644
index 0000000..aa481d9
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.helper
@@ -0,0 +1,2 @@
+{{ number }}
+{{ "hello" }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.tpl
new file mode 100644
index 0000000..0159bf2
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.tpl
@@ -0,0 +1,2 @@
+{% ssi "ssi.helper" %}
+{% ssi "ssi.helper" parsed %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.tpl.out
new file mode 100644
index 0000000..bc9ede0
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/ssi.tpl.out
@@ -0,0 +1,4 @@
+{{ number }}
+{{ "hello" }}
+11
+hello
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tag_filter.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tag_filter.tpl
new file mode 100644
index 0000000..2b11823
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tag_filter.tpl
@@ -0,0 +1,3 @@
+{% filter lower %}This is a nice test; let's see whether it works. Foobar. {{ simple.xss }}{% endfilter %}
+
+{% filter truncatechars:10|lower|length %}This is a nice test; let's see whether it works. Foobar. {{ simple.number }}{% endfilter %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tag_filter.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tag_filter.tpl.out
new file mode 100644
index 0000000..1e0bc0e
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tag_filter.tpl.out
@@ -0,0 +1,3 @@
+this is a nice test; let's see whether it works. foobar. <script>alert("uh oh");</script>
+
+10
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tags-compilation.err b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tags-compilation.err
new file mode 100644
index 0000000..3725c16
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tags-compilation.err
@@ -0,0 +1,6 @@
+{% if true %}
+{% if (1) . %}{% endif %}
+{% block test %}{% block test %}{% endblock %}{% endblock %}
+{% block test %}{% block test %}{% endblock %}{% endblock test2 %}
+{% block test %}{% block test2 %}{% endblock xy %}{% endblock test %}
+{% block test %}{% block test2 %}{% endblock test2 test3 %}{% endblock test %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tags-compilation.err.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tags-compilation.err.out
new file mode 100644
index 0000000..c6bb035
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/tags-compilation.err.out
@@ -0,0 +1,6 @@
+.*Unexpected EOF, expected tag elif or else or endif.
+.*If-condition is malformed.
+.*Block named 'test' already defined.*
+.*Name for 'endblock' must equal to 'block'\-tag's name \('test' != 'test2'\).
+.*Name for 'endblock' must equal to 'block'-tag's name \('test2' != 'xy'\).
+.*Either no or only one argument \(identifier\) allowed for 'endblock'.
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/template_sets.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/template_sets.tpl
new file mode 100644
index 0000000..60e195a
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/template_sets.tpl
@@ -0,0 +1,2 @@
+Globals
+{{ this_is_a_global_variable }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/template_sets.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/template_sets.tpl.out
new file mode 100644
index 0000000..f20279c
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/template_sets.tpl.out
@@ -0,0 +1,2 @@
+Globals
+this is a global text
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/templatetag.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/templatetag.tpl
new file mode 100644
index 0000000..ef23991
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/templatetag.tpl
@@ -0,0 +1,4 @@
+{% templatetag openblock %} url 'entry_list' {% templatetag closeblock %}
+{% templatetag openvariable %}{% templatetag closevariable %}
+{% templatetag openbrace %}{% templatetag closebrace %}
+{% templatetag opencomment %}{% templatetag closecomment %}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/templatetag.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/templatetag.tpl.out
new file mode 100644
index 0000000..5c46ef8
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/templatetag.tpl.out
@@ -0,0 +1,4 @@
+{% url 'entry_list' %}
+{{}}
+{}
+{##}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/variables.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/variables.tpl
new file mode 100644
index 0000000..2f78cae
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/variables.tpl
@@ -0,0 +1,13 @@
+{{ 1 }}
+{{ -5 }}
+{{ "hallo" }}
+{{ true }}
+{{ false }}
+{{ simple.uint }}
+{{ simple.nil }}
+{{ simple.str }}
+{{ simple.bool_false }}
+{{ simple.bool_true }}
+{{ simple.uint }}
+{{ simple.uint|integer }}
+{{ simple.uint|float }}
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/variables.tpl.out b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/variables.tpl.out
new file mode 100644
index 0000000..bdc9569
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/variables.tpl.out
@@ -0,0 +1,13 @@
+1
+-5
+hallo
+True
+False
+8
+
+string
+False
+True
+8
+8
+8.000000
\ No newline at end of file
diff --git a/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/verbatim.tpl b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/verbatim.tpl
new file mode 100644
index 0000000..1d9cc88
--- /dev/null
+++ b/Godeps/_workspace/src/github.com/flosch/pongo2/template_tests/verbatim.tpl
@@ -0,0 +1,7 @@
+.{{ simple.number }}{% verbatim %}
+