1
0
mirror of https://github.com/Luzifer/promcertcheck.git synced 2024-09-19 01:12:56 +00:00

Switch to dep for dependency managment, update deps

Signed-off-by: Knut Ahlers <knut@ahlers.me>
This commit is contained in:
Knut Ahlers 2017-11-05 16:06:22 +01:00
parent d686bf1816
commit 328cc40d38
Signed by: luzifer
GPG Key ID: DC2729FDD34BE99E
1158 changed files with 299365 additions and 5465 deletions

83
Godeps/Godeps.json generated
View File

@ -1,83 +0,0 @@
{
"ImportPath": "github.com/Luzifer/promcertcheck",
"GoVersion": "go1.7",
"GodepVersion": "v79",
"Deps": [
{
"ImportPath": "bitbucket.org/ww/goautoneg",
"Comment": "null-5",
"Rev": "75cd24fc2f2c2a2088577d12123ddee5f54e0675"
},
{
"ImportPath": "github.com/Luzifer/rconfig",
"Comment": "v1.2.0",
"Rev": "7aef1d393c1e2d0758901853b59981c7adc67c7e"
},
{
"ImportPath": "github.com/beorn7/perks/quantile",
"Rev": "b965b613227fddccbfffe13eae360ed3fa822f8d"
},
{
"ImportPath": "github.com/flosch/pongo2",
"Comment": "v1.0-rc1-174-gf5d79aa",
"Rev": "f5d79aa0a914c08eb7f51a96cd7b2dbbe46fca46"
},
{
"ImportPath": "github.com/golang/protobuf/proto",
"Rev": "aece6fb931241ad332956db4f62798dfbea944b3"
},
{
"ImportPath": "github.com/gorilla/context",
"Rev": "215affda49addc4c8ef7e2534915df2c8c35c6cd"
},
{
"ImportPath": "github.com/gorilla/mux",
"Rev": "8096f47503459bcc74d1f4c487b7e6e42e5746b5"
},
{
"ImportPath": "github.com/matttproud/golang_protobuf_extensions/pbutil",
"Rev": "fc2b8d3a73c4867e51861bbdd5ae3c1f0869dd6a"
},
{
"ImportPath": "github.com/prometheus/client_golang/model",
"Comment": "0.6.0-17-g3b16b46",
"Rev": "3b16b46a713f181888e5e9a1205ccc34d6917fb9"
},
{
"ImportPath": "github.com/prometheus/client_golang/prometheus",
"Comment": "0.6.0-17-g3b16b46",
"Rev": "3b16b46a713f181888e5e9a1205ccc34d6917fb9"
},
{
"ImportPath": "github.com/prometheus/client_golang/text",
"Comment": "0.6.0-17-g3b16b46",
"Rev": "3b16b46a713f181888e5e9a1205ccc34d6917fb9"
},
{
"ImportPath": "github.com/prometheus/client_model/go",
"Comment": "model-0.0.2-12-gfa8ad6f",
"Rev": "fa8ad6fec33561be4280a8f0514318c79d7f6cb6"
},
{
"ImportPath": "github.com/prometheus/procfs",
"Rev": "c91d8eefde16bd047416409eb56353ea84a186e4"
},
{
"ImportPath": "github.com/robfig/cron",
"Comment": "v1-2-g67823cd",
"Rev": "67823cd24dece1b04cced3a0a0b3ca2bc84d875e"
},
{
"ImportPath": "github.com/spf13/pflag",
"Rev": "8e7dc108ab3a1ab6ce6d922bbaff5657b88e8e49"
},
{
"ImportPath": "gopkg.in/validator.v2",
"Rev": "07ffaad256c8e957050ad83d6472eb97d785013d"
},
{
"ImportPath": "gopkg.in/yaml.v2",
"Rev": "31c299268d302dd0aa9a0dcf765a3d58971ac83f"
}
]
}

5
Godeps/Readme generated
View File

@ -1,5 +0,0 @@
This directory tree is generated automatically by godep.
Please do not edit.
See https://github.com/tools/godep for more information.

117
Gopkg.lock generated Normal file
View File

@ -0,0 +1,117 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/Luzifer/rconfig"
packages = ["."]
revision = "7aef1d393c1e2d0758901853b59981c7adc67c7e"
version = "v1.2.0"
[[projects]]
branch = "master"
name = "github.com/beorn7/perks"
packages = ["quantile"]
revision = "4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9"
[[projects]]
name = "github.com/flosch/pongo2"
packages = ["."]
revision = "5e81b817a0c48c1c57cdf1a9056cf76bdee02ca9"
version = "v3.0"
[[projects]]
branch = "master"
name = "github.com/golang/protobuf"
packages = ["proto"]
revision = "1643683e1b54a9e88ad26d98f81400c8c9d9f4f9"
[[projects]]
name = "github.com/gorilla/context"
packages = ["."]
revision = "1ea25387ff6f684839d82767c1733ff4d4d15d0a"
version = "v1.1"
[[projects]]
name = "github.com/gorilla/mux"
packages = ["."]
revision = "24fca303ac6da784b9e8269f724ddeb0b2eea5e7"
version = "v1.5.0"
[[projects]]
name = "github.com/matttproud/golang_protobuf_extensions"
packages = ["pbutil"]
revision = "3247c84500bff8d9fb6d579d800f20b3e091582c"
version = "v1.0.0"
[[projects]]
name = "github.com/prometheus/client_golang"
packages = ["prometheus"]
revision = "c5b7fccd204277076155f10851dad72b76a49317"
version = "v0.8.0"
[[projects]]
branch = "master"
name = "github.com/prometheus/client_model"
packages = ["go"]
revision = "6f3806018612930941127f2a7c6c453ba2c527d2"
[[projects]]
branch = "master"
name = "github.com/prometheus/common"
packages = ["expfmt","internal/bitbucket.org/ww/goautoneg","model"]
revision = "e3fb1a1acd7605367a2b378bc2e2f893c05174b7"
[[projects]]
branch = "master"
name = "github.com/prometheus/procfs"
packages = [".","xfs"]
revision = "a6e9df898b1336106c743392c48ee0b71f5c4efa"
[[projects]]
name = "github.com/robfig/cron"
packages = ["."]
revision = "b024fc5ea0e34bc3f83d9941c8d60b0622bfaca4"
version = "v1"
[[projects]]
name = "github.com/sirupsen/logrus"
packages = ["."]
revision = "f006c2ac4710855cf0f916dd6b77acf6b048dc6e"
version = "v1.0.3"
[[projects]]
name = "github.com/spf13/pflag"
packages = ["."]
revision = "e57e3eeb33f795204c1ca35f56c44f83227c6e66"
version = "v1.0.0"
[[projects]]
branch = "master"
name = "golang.org/x/crypto"
packages = ["ssh/terminal"]
revision = "bd6f299fb381e4c3393d1c4b1f0b94f5e77650c8"
[[projects]]
branch = "master"
name = "golang.org/x/sys"
packages = ["unix","windows"]
revision = "8eb05f94d449fdf134ec24630ce69ada5b469c1c"
[[projects]]
branch = "v2"
name = "gopkg.in/validator.v2"
packages = ["."]
revision = "460c83432a98c35224a6fe352acf8b23e067ad06"
[[projects]]
branch = "v2"
name = "gopkg.in/yaml.v2"
packages = ["."]
revision = "eb3733d160e74a9c7e442f435eb3bea458e1d19f"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "d0a90698569f49d4e7ef58586b857a1c7c60e48eaca07f1585d02e7ce91ca092"
solver-name = "gps-cdcl"
solver-version = 1

42
Gopkg.toml Normal file
View File

@ -0,0 +1,42 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
[[constraint]]
name = "github.com/Luzifer/rconfig"
version = "1.2.0"
[[constraint]]
name = "github.com/flosch/pongo2"
[[constraint]]
name = "github.com/gorilla/mux"
[[constraint]]
name = "github.com/prometheus/client_golang"
[[constraint]]
name = "github.com/robfig/cron"
[[constraint]]
name = "github.com/sirupsen/logrus"
version = "1.0.3"

Binary file not shown.

View File

@ -1,13 +0,0 @@
include $(GOROOT)/src/Make.inc
TARG=bitbucket.org/ww/goautoneg
GOFILES=autoneg.go
include $(GOROOT)/src/Make.pkg
format:
gofmt -w *.go
docs:
gomake clean
godoc ${TARG} > README.txt

70
vendor/github.com/Luzifer/rconfig/bool_test.go generated vendored Normal file
View File

@ -0,0 +1,70 @@
package rconfig
import (
"os"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing bool parsing", func() {
type t struct {
Test1 bool `default:"true"`
Test2 bool `default:"false" flag:"test2"`
Test3 bool `default:"true" flag:"test3,t"`
Test4 bool `flag:"test4"`
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{
"--test2",
"-t",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.Test1).To(Equal(true))
Expect(cfg.Test2).To(Equal(true))
Expect(cfg.Test3).To(Equal(true))
Expect(cfg.Test4).To(Equal(false))
})
})
var _ = Describe("Testing to set bool from ENV with default", func() {
type t struct {
Test1 bool `default:"true" env:"TEST1"`
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{}
})
JustBeforeEach(func() {
os.Unsetenv("TEST1")
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.Test1).To(Equal(true))
})
})

41
vendor/github.com/Luzifer/rconfig/duration_test.go generated vendored Normal file
View File

@ -0,0 +1,41 @@
package rconfig
import (
"time"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Duration", func() {
type t struct {
Test time.Duration `flag:"duration"`
TestS time.Duration `flag:"other-duration,o"`
TestDef time.Duration `default:"30h"`
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{
"--duration=23s", "-o", "45m",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.Test).To(Equal(23 * time.Second))
Expect(cfg.TestS).To(Equal(45 * time.Minute))
Expect(cfg.TestDef).To(Equal(30 * time.Hour))
})
})

56
vendor/github.com/Luzifer/rconfig/errors_test.go generated vendored Normal file
View File

@ -0,0 +1,56 @@
package rconfig
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing errors", func() {
It("should not accept string as int", func() {
Expect(parse(&struct {
A int `default:"a"`
}{}, []string{})).To(HaveOccurred())
})
It("should not accept string as float", func() {
Expect(parse(&struct {
A float32 `default:"a"`
}{}, []string{})).To(HaveOccurred())
})
It("should not accept string as uint", func() {
Expect(parse(&struct {
A uint `default:"a"`
}{}, []string{})).To(HaveOccurred())
})
It("should not accept string as uint in sub-struct", func() {
Expect(parse(&struct {
B struct {
A uint `default:"a"`
}
}{}, []string{})).To(HaveOccurred())
})
It("should not accept string slice as int slice", func() {
Expect(parse(&struct {
A []int `default:"a,bn"`
}{}, []string{})).To(HaveOccurred())
})
It("should not accept variables not being pointers", func() {
cfg := struct {
A string `default:"a"`
}{}
Expect(parse(cfg, []string{})).To(HaveOccurred())
})
It("should not accept variables not being pointers to structs", func() {
cfg := "test"
Expect(parse(cfg, []string{})).To(HaveOccurred())
})
})

37
vendor/github.com/Luzifer/rconfig/example_test.go generated vendored Normal file
View File

@ -0,0 +1,37 @@
package rconfig
import (
"fmt"
"os"
)
func ExampleParse() {
// We're building an example configuration with a sub-struct to be filled
// by the Parse command.
config := struct {
Username string `default:"unknown" flag:"user,u" description:"Your name"`
Details struct {
Age int `default:"25" flag:"age" description:"Your age"`
}
}{}
// To have more relieable results we're setting os.Args to a known value.
// In real-life use cases you wouldn't do this but parse the original
// commandline arguments.
os.Args = []string{
"example",
"--user=Luzifer",
}
Parse(&config)
fmt.Printf("Hello %s, happy birthday for your %dth birthday.",
config.Username,
config.Details.Age)
// You can also show an usage message for your user
Usage()
// Output:
// Hello Luzifer, happy birthday for your 25th birthday.
}

44
vendor/github.com/Luzifer/rconfig/float_test.go generated vendored Normal file
View File

@ -0,0 +1,44 @@
package rconfig
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing float parsing", func() {
type t struct {
Test32 float32 `flag:"float32"`
Test32P float32 `flag:"float32p,3"`
Test64 float64 `flag:"float64"`
Test64P float64 `flag:"float64p,6"`
TestDef float32 `default:"66.256"`
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{
"--float32=5.5", "-3", "6.6",
"--float64=7.7", "-6", "8.8",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.Test32).To(Equal(float32(5.5)))
Expect(cfg.Test32P).To(Equal(float32(6.6)))
Expect(cfg.Test64).To(Equal(float64(7.7)))
Expect(cfg.Test64P).To(Equal(float64(8.8)))
Expect(cfg.TestDef).To(Equal(float32(66.256)))
})
})

128
vendor/github.com/Luzifer/rconfig/general_test.go generated vendored Normal file
View File

@ -0,0 +1,128 @@
package rconfig
import (
"os"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing general parsing", func() {
type t struct {
Test string `default:"foo" env:"shell" flag:"shell" description:"Test"`
Test2 string `default:"blub" env:"testvar" flag:"testvar,t" description:"Test"`
DefaultFlag string `default:"goo"`
SadFlag string
}
type tValidated struct {
Test string `flag:"test" default:"" validate:"nonzero"`
}
var (
err error
args []string
cfg t
)
Context("with defined arguments", func() {
BeforeEach(func() {
cfg = t{}
args = []string{
"--shell=test23",
"-t", "bla",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have parsed the expected values", func() {
Expect(cfg.Test).To(Equal("test23"))
Expect(cfg.Test2).To(Equal("bla"))
Expect(cfg.SadFlag).To(Equal(""))
Expect(cfg.DefaultFlag).To(Equal("goo"))
})
})
Context("with no arguments", func() {
BeforeEach(func() {
cfg = t{}
args = []string{}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have used the default value", func() {
Expect(cfg.Test).To(Equal("foo"))
})
})
Context("with no arguments and set env", func() {
BeforeEach(func() {
cfg = t{}
args = []string{}
os.Setenv("shell", "test546")
})
AfterEach(func() {
os.Unsetenv("shell")
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have used the value from env", func() {
Expect(cfg.Test).To(Equal("test546"))
})
})
Context("with additional arguments", func() {
BeforeEach(func() {
cfg = t{}
args = []string{
"--shell=test23",
"-t", "bla",
"positional1", "positional2",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have parsed the expected values", func() {
Expect(cfg.Test).To(Equal("test23"))
Expect(cfg.Test2).To(Equal("bla"))
Expect(cfg.SadFlag).To(Equal(""))
Expect(cfg.DefaultFlag).To(Equal("goo"))
})
It("should have detected the positional arguments", func() {
Expect(Args()).To(Equal([]string{"positional1", "positional2"}))
})
})
Context("making use of the validator package", func() {
var cfgValidated tValidated
BeforeEach(func() {
cfgValidated = tValidated{}
args = []string{}
})
JustBeforeEach(func() {
err = parseAndValidate(&cfgValidated, args)
})
It("should have errored", func() { Expect(err).To(HaveOccurred()) })
})
})

54
vendor/github.com/Luzifer/rconfig/int_test.go generated vendored Normal file
View File

@ -0,0 +1,54 @@
package rconfig
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing int parsing", func() {
type t struct {
Test int `flag:"int"`
TestP int `flag:"intp,i"`
Test8 int8 `flag:"int8"`
Test8P int8 `flag:"int8p,8"`
Test32 int32 `flag:"int32"`
Test32P int32 `flag:"int32p,3"`
Test64 int64 `flag:"int64"`
Test64P int64 `flag:"int64p,6"`
TestDef int8 `default:"66"`
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{
"--int=1", "-i", "2",
"--int8=3", "-8", "4",
"--int32=5", "-3", "6",
"--int64=7", "-6", "8",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.Test).To(Equal(1))
Expect(cfg.TestP).To(Equal(2))
Expect(cfg.Test8).To(Equal(int8(3)))
Expect(cfg.Test8P).To(Equal(int8(4)))
Expect(cfg.Test32).To(Equal(int32(5)))
Expect(cfg.Test32P).To(Equal(int32(6)))
Expect(cfg.Test64).To(Equal(int64(7)))
Expect(cfg.Test64P).To(Equal(int64(8)))
Expect(cfg.TestDef).To(Equal(int8(66)))
})
})

40
vendor/github.com/Luzifer/rconfig/os-args_test.go generated vendored Normal file
View File

@ -0,0 +1,40 @@
package rconfig_test
import (
"os"
. "github.com/Luzifer/rconfig"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing os.Args", func() {
type t struct {
A string `default:"a" flag:"a"`
}
var (
err error
cfg t
)
JustBeforeEach(func() {
err = Parse(&cfg)
})
Context("With only valid arguments", func() {
BeforeEach(func() {
cfg = t{}
os.Args = []string{"--a=bar"}
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.A).To(Equal("bar"))
})
})
})

87
vendor/github.com/Luzifer/rconfig/precedence_test.go generated vendored Normal file
View File

@ -0,0 +1,87 @@
package rconfig
import (
"os"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Precedence", func() {
type t struct {
A int `default:"1" vardefault:"a" env:"a" flag:"avar,a" description:"a"`
}
var (
err error
cfg t
args []string
vardefaults map[string]string
)
JustBeforeEach(func() {
cfg = t{}
SetVariableDefaults(vardefaults)
err = parse(&cfg, args)
})
Context("Provided: Flag, Env, Default, VarDefault", func() {
BeforeEach(func() {
args = []string{"-a", "5"}
os.Setenv("a", "8")
vardefaults = map[string]string{
"a": "3",
}
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have used the flag value", func() {
Expect(cfg.A).To(Equal(5))
})
})
Context("Provided: Env, Default, VarDefault", func() {
BeforeEach(func() {
args = []string{}
os.Setenv("a", "8")
vardefaults = map[string]string{
"a": "3",
}
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have used the env value", func() {
Expect(cfg.A).To(Equal(8))
})
})
Context("Provided: Default, VarDefault", func() {
BeforeEach(func() {
args = []string{}
os.Unsetenv("a")
vardefaults = map[string]string{
"a": "3",
}
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have used the vardefault value", func() {
Expect(cfg.A).To(Equal(3))
})
})
Context("Provided: Default", func() {
BeforeEach(func() {
args = []string{}
os.Unsetenv("a")
vardefaults = map[string]string{}
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have used the default value", func() {
Expect(cfg.A).To(Equal(1))
})
})
})

View File

@ -0,0 +1,13 @@
package rconfig_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"testing"
)
func TestRconfig(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Rconfig Suite")
}

51
vendor/github.com/Luzifer/rconfig/slice_test.go generated vendored Normal file
View File

@ -0,0 +1,51 @@
package rconfig
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing slices", func() {
type t struct {
Int []int `default:"1,2,3" flag:"int"`
String []string `default:"a,b,c" flag:"string"`
IntP []int `default:"1,2,3" flag:"intp,i"`
StringP []string `default:"a,b,c" flag:"stringp,s"`
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{
"--int=4,5", "-s", "hallo,welt",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values for int-slice", func() {
Expect(len(cfg.Int)).To(Equal(2))
Expect(cfg.Int).To(Equal([]int{4, 5}))
Expect(cfg.Int).NotTo(Equal([]int{5, 4}))
})
It("should have the expected values for int-shorthand-slice", func() {
Expect(len(cfg.IntP)).To(Equal(3))
Expect(cfg.IntP).To(Equal([]int{1, 2, 3}))
})
It("should have the expected values for string-slice", func() {
Expect(len(cfg.String)).To(Equal(3))
Expect(cfg.String).To(Equal([]string{"a", "b", "c"}))
})
It("should have the expected values for string-shorthand-slice", func() {
Expect(len(cfg.StringP)).To(Equal(2))
Expect(cfg.StringP).To(Equal([]string{"hallo", "welt"}))
})
})

36
vendor/github.com/Luzifer/rconfig/sub-struct_test.go generated vendored Normal file
View File

@ -0,0 +1,36 @@
package rconfig
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing sub-structs", func() {
type t struct {
Test string `default:"blubb"`
Sub struct {
Test string `default:"Hallo"`
}
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.Test).To(Equal("blubb"))
Expect(cfg.Sub.Test).To(Equal("Hallo"))
})
})

59
vendor/github.com/Luzifer/rconfig/uint_test.go generated vendored Normal file
View File

@ -0,0 +1,59 @@
package rconfig
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing uint parsing", func() {
type t struct {
Test uint `flag:"int"`
TestP uint `flag:"intp,i"`
Test8 uint8 `flag:"int8"`
Test8P uint8 `flag:"int8p,8"`
Test16 uint16 `flag:"int16"`
Test16P uint16 `flag:"int16p,1"`
Test32 uint32 `flag:"int32"`
Test32P uint32 `flag:"int32p,3"`
Test64 uint64 `flag:"int64"`
Test64P uint64 `flag:"int64p,6"`
TestDef uint8 `default:"66"`
}
var (
err error
args []string
cfg t
)
BeforeEach(func() {
cfg = t{}
args = []string{
"--int=1", "-i", "2",
"--int8=3", "-8", "4",
"--int32=5", "-3", "6",
"--int64=7", "-6", "8",
"--int16=9", "-1", "10",
}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.Test).To(Equal(uint(1)))
Expect(cfg.TestP).To(Equal(uint(2)))
Expect(cfg.Test8).To(Equal(uint8(3)))
Expect(cfg.Test8P).To(Equal(uint8(4)))
Expect(cfg.Test32).To(Equal(uint32(5)))
Expect(cfg.Test32P).To(Equal(uint32(6)))
Expect(cfg.Test64).To(Equal(uint64(7)))
Expect(cfg.Test64P).To(Equal(uint64(8)))
Expect(cfg.Test16).To(Equal(uint16(9)))
Expect(cfg.Test16P).To(Equal(uint16(10)))
Expect(cfg.TestDef).To(Equal(uint8(66)))
})
})

122
vendor/github.com/Luzifer/rconfig/vardefault_test.go generated vendored Normal file
View File

@ -0,0 +1,122 @@
package rconfig
import (
"io/ioutil"
"os"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Testing variable defaults", func() {
type t struct {
MySecretValue string `default:"secret" env:"foo" vardefault:"my_secret_value"`
MyUsername string `default:"luzifer" vardefault:"username"`
SomeVar string `flag:"var" description:"some variable"`
IntVar int64 `vardefault:"int_var" default:"23"`
}
var (
err error
cfg t
args = []string{}
vardefaults = map[string]string{
"my_secret_value": "veryverysecretkey",
"unkownkey": "hi there",
"int_var": "42",
}
)
BeforeEach(func() {
cfg = t{}
})
JustBeforeEach(func() {
err = parse(&cfg, args)
})
Context("With manually provided variables", func() {
BeforeEach(func() {
SetVariableDefaults(vardefaults)
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.IntVar).To(Equal(int64(42)))
Expect(cfg.MySecretValue).To(Equal("veryverysecretkey"))
Expect(cfg.MyUsername).To(Equal("luzifer"))
Expect(cfg.SomeVar).To(Equal(""))
})
})
Context("With defaults from YAML data", func() {
BeforeEach(func() {
yamlData := []byte("---\nmy_secret_value: veryverysecretkey\nunknownkey: hi there\nint_var: 42\n")
SetVariableDefaults(VarDefaultsFromYAML(yamlData))
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.IntVar).To(Equal(int64(42)))
Expect(cfg.MySecretValue).To(Equal("veryverysecretkey"))
Expect(cfg.MyUsername).To(Equal("luzifer"))
Expect(cfg.SomeVar).To(Equal(""))
})
})
Context("With defaults from YAML file", func() {
var tmp *os.File
BeforeEach(func() {
tmp, _ = ioutil.TempFile("", "")
yamlData := "---\nmy_secret_value: veryverysecretkey\nunknownkey: hi there\nint_var: 42\n"
tmp.WriteString(yamlData)
SetVariableDefaults(VarDefaultsFromYAMLFile(tmp.Name()))
})
AfterEach(func() {
tmp.Close()
os.Remove(tmp.Name())
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.IntVar).To(Equal(int64(42)))
Expect(cfg.MySecretValue).To(Equal("veryverysecretkey"))
Expect(cfg.MyUsername).To(Equal("luzifer"))
Expect(cfg.SomeVar).To(Equal(""))
})
})
Context("With defaults from invalid YAML data", func() {
BeforeEach(func() {
yamlData := []byte("---\nmy_secret_value = veryverysecretkey\nunknownkey = hi there\nint_var = 42\n")
SetVariableDefaults(VarDefaultsFromYAML(yamlData))
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.IntVar).To(Equal(int64(23)))
Expect(cfg.MySecretValue).To(Equal("secret"))
Expect(cfg.MyUsername).To(Equal("luzifer"))
Expect(cfg.SomeVar).To(Equal(""))
})
})
Context("With defaults from non existent YAML file", func() {
BeforeEach(func() {
file := "/tmp/this_file_should_not_exist_146e26723r"
SetVariableDefaults(VarDefaultsFromYAMLFile(file))
})
It("should not have errored", func() { Expect(err).NotTo(HaveOccurred()) })
It("should have the expected values", func() {
Expect(cfg.IntVar).To(Equal(int64(23)))
Expect(cfg.MySecretValue).To(Equal("secret"))
Expect(cfg.MyUsername).To(Equal("luzifer"))
Expect(cfg.SomeVar).To(Equal(""))
})
})
})

2
vendor/github.com/beorn7/perks/.gitignore generated vendored Normal file
View File

@ -0,0 +1,2 @@
*.test
*.prof

20
vendor/github.com/beorn7/perks/LICENSE generated vendored Normal file
View File

@ -0,0 +1,20 @@
Copyright (C) 2013 Blake Mizerany
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

31
vendor/github.com/beorn7/perks/README.md generated vendored Normal file
View File

@ -0,0 +1,31 @@
# Perks for Go (golang.org)
Perks contains the Go package quantile that computes approximate quantiles over
an unbounded data stream within low memory and CPU bounds.
For more information and examples, see:
http://godoc.org/github.com/bmizerany/perks
A very special thank you and shout out to Graham Cormode (Rutgers University),
Flip Korn (AT&T LabsResearch), S. Muthukrishnan (Rutgers University), and
Divesh Srivastava (AT&T LabsResearch) for their research and publication of
[Effective Computation of Biased Quantiles over Data Streams](http://www.cs.rutgers.edu/~muthu/bquant.pdf)
Thank you, also:
* Armon Dadgar (@armon)
* Andrew Gerrand (@nf)
* Brad Fitzpatrick (@bradfitz)
* Keith Rarick (@kr)
FAQ:
Q: Why not move the quantile package into the project root?
A: I want to add more packages to perks later.
Copyright (C) 2013 Blake Mizerany
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

26
vendor/github.com/beorn7/perks/histogram/bench_test.go generated vendored Normal file
View File

@ -0,0 +1,26 @@
package histogram
import (
"math/rand"
"testing"
)
func BenchmarkInsert10Bins(b *testing.B) {
b.StopTimer()
h := New(10)
b.StartTimer()
for i := 0; i < b.N; i++ {
f := rand.ExpFloat64()
h.Insert(f)
}
}
func BenchmarkInsert100Bins(b *testing.B) {
b.StopTimer()
h := New(100)
b.StartTimer()
for i := 0; i < b.N; i++ {
f := rand.ExpFloat64()
h.Insert(f)
}
}

108
vendor/github.com/beorn7/perks/histogram/histogram.go generated vendored Normal file
View File

@ -0,0 +1,108 @@
// Package histogram provides a Go implementation of BigML's histogram package
// for Clojure/Java. It is currently experimental.
package histogram
import (
"container/heap"
"math"
"sort"
)
type Bin struct {
Count int
Sum float64
}
func (b *Bin) Update(x *Bin) {
b.Count += x.Count
b.Sum += x.Sum
}
func (b *Bin) Mean() float64 {
return b.Sum / float64(b.Count)
}
type Bins []*Bin
func (bs Bins) Len() int { return len(bs) }
func (bs Bins) Less(i, j int) bool { return bs[i].Mean() < bs[j].Mean() }
func (bs Bins) Swap(i, j int) { bs[i], bs[j] = bs[j], bs[i] }
func (bs *Bins) Push(x interface{}) {
*bs = append(*bs, x.(*Bin))
}
func (bs *Bins) Pop() interface{} {
return bs.remove(len(*bs) - 1)
}
func (bs *Bins) remove(n int) *Bin {
if n < 0 || len(*bs) < n {
return nil
}
x := (*bs)[n]
*bs = append((*bs)[:n], (*bs)[n+1:]...)
return x
}
type Histogram struct {
res *reservoir
}
func New(maxBins int) *Histogram {
return &Histogram{res: newReservoir(maxBins)}
}
func (h *Histogram) Insert(f float64) {
h.res.insert(&Bin{1, f})
h.res.compress()
}
func (h *Histogram) Bins() Bins {
return h.res.bins
}
type reservoir struct {
n int
maxBins int
bins Bins
}
func newReservoir(maxBins int) *reservoir {
return &reservoir{maxBins: maxBins}
}
func (r *reservoir) insert(bin *Bin) {
r.n += bin.Count
i := sort.Search(len(r.bins), func(i int) bool {
return r.bins[i].Mean() >= bin.Mean()
})
if i < 0 || i == r.bins.Len() {
// TODO(blake): Maybe use an .insert(i, bin) instead of
// performing the extra work of a heap.Push.
heap.Push(&r.bins, bin)
return
}
r.bins[i].Update(bin)
}
func (r *reservoir) compress() {
for r.bins.Len() > r.maxBins {
minGapIndex := -1
minGap := math.MaxFloat64
for i := 0; i < r.bins.Len()-1; i++ {
gap := gapWeight(r.bins[i], r.bins[i+1])
if minGap > gap {
minGap = gap
minGapIndex = i
}
}
prev := r.bins[minGapIndex]
next := r.bins.remove(minGapIndex + 1)
prev.Update(next)
}
}
func gapWeight(prev, next *Bin) float64 {
return next.Mean() - prev.Mean()
}

View File

@ -0,0 +1,38 @@
package histogram
import (
"math/rand"
"testing"
)
func TestHistogram(t *testing.T) {
const numPoints = 1e6
const maxBins = 3
h := New(maxBins)
for i := 0; i < numPoints; i++ {
f := rand.ExpFloat64()
h.Insert(f)
}
bins := h.Bins()
if g := len(bins); g > maxBins {
t.Fatalf("got %d bins, wanted <= %d", g, maxBins)
}
for _, b := range bins {
t.Logf("%+v", b)
}
if g := count(h.Bins()); g != numPoints {
t.Fatalf("binned %d points, wanted %d", g, numPoints)
}
}
func count(bins Bins) int {
binCounts := 0
for _, b := range bins {
binCounts += b.Count
}
return binCounts
}

View File

@ -133,7 +133,7 @@ func (s *Stream) Query(q float64) float64 {
if l == 0 {
return 0
}
i := int(float64(l) * q)
i := int(math.Ceil(float64(l) * q))
if i > 0 {
i -= 1
}

View File

@ -33,6 +33,9 @@ func verifyPercsWithAbsoluteEpsilon(t *testing.T, a []float64, s *Stream) {
for quantile, epsilon := range Targets {
n := float64(len(a))
k := int(quantile * n)
if k < 1 {
k = 1
}
lower := int((quantile - epsilon) * n)
if lower < 1 {
lower = 1
@ -99,6 +102,30 @@ func TestTargetedQuery(t *testing.T) {
verifyPercsWithAbsoluteEpsilon(t, a, s)
}
func TestTargetedQuerySmallSampleSize(t *testing.T) {
rand.Seed(42)
s := NewTargeted(TargetsSmallEpsilon)
a := []float64{1, 2, 3, 4, 5}
for _, v := range a {
s.Insert(v)
}
verifyPercsWithAbsoluteEpsilon(t, a, s)
// If not yet flushed, results should be precise:
if !s.flushed() {
for φ, want := range map[float64]float64{
0.01: 1,
0.10: 1,
0.50: 3,
0.90: 5,
0.99: 5,
} {
if got := s.Query(φ); got != want {
t.Errorf("want %f for φ=%f, got %f", want, φ, got)
}
}
}
}
func TestLowBiasedQuery(t *testing.T) {
rand.Seed(42)
s := NewLowBiased(RelativeEpsilon)

90
vendor/github.com/beorn7/perks/topk/topk.go generated vendored Normal file
View File

@ -0,0 +1,90 @@
package topk
import (
"sort"
)
// http://www.cs.ucsb.edu/research/tech_reports/reports/2005-23.pdf
type Element struct {
Value string
Count int
}
type Samples []*Element
func (sm Samples) Len() int {
return len(sm)
}
func (sm Samples) Less(i, j int) bool {
return sm[i].Count < sm[j].Count
}
func (sm Samples) Swap(i, j int) {
sm[i], sm[j] = sm[j], sm[i]
}
type Stream struct {
k int
mon map[string]*Element
// the minimum Element
min *Element
}
func New(k int) *Stream {
s := new(Stream)
s.k = k
s.mon = make(map[string]*Element)
s.min = &Element{}
// Track k+1 so that less frequenet items contended for that spot,
// resulting in k being more accurate.
return s
}
func (s *Stream) Insert(x string) {
s.insert(&Element{x, 1})
}
func (s *Stream) Merge(sm Samples) {
for _, e := range sm {
s.insert(e)
}
}
func (s *Stream) insert(in *Element) {
e := s.mon[in.Value]
if e != nil {
e.Count++
} else {
if len(s.mon) < s.k+1 {
e = &Element{in.Value, in.Count}
s.mon[in.Value] = e
} else {
e = s.min
delete(s.mon, e.Value)
e.Value = in.Value
e.Count += in.Count
s.min = e
}
}
if e.Count < s.min.Count {
s.min = e
}
}
func (s *Stream) Query() Samples {
var sm Samples
for _, e := range s.mon {
sm = append(sm, e)
}
sort.Sort(sort.Reverse(sm))
if len(sm) < s.k {
return sm
}
return sm[:s.k]
}

57
vendor/github.com/beorn7/perks/topk/topk_test.go generated vendored Normal file
View File

@ -0,0 +1,57 @@
package topk
import (
"fmt"
"math/rand"
"sort"
"testing"
)
func TestTopK(t *testing.T) {
stream := New(10)
ss := []*Stream{New(10), New(10), New(10)}
m := make(map[string]int)
for _, s := range ss {
for i := 0; i < 1e6; i++ {
v := fmt.Sprintf("%x", int8(rand.ExpFloat64()))
s.Insert(v)
m[v]++
}
stream.Merge(s.Query())
}
var sm Samples
for x, s := range m {
sm = append(sm, &Element{x, s})
}
sort.Sort(sort.Reverse(sm))
g := stream.Query()
if len(g) != 10 {
t.Fatalf("got %d, want 10", len(g))
}
for i, e := range g {
if sm[i].Value != e.Value {
t.Errorf("at %d: want %q, got %q", i, sm[i].Value, e.Value)
}
}
}
func TestQuery(t *testing.T) {
queryTests := []struct {
value string
expected int
}{
{"a", 1},
{"b", 2},
{"c", 2},
}
stream := New(2)
for _, tt := range queryTests {
stream.Insert(tt.value)
if n := len(stream.Query()); n != tt.expected {
t.Errorf("want %d, got %d", tt.expected, n)
}
}
}

View File

@ -1,10 +1,10 @@
language: go
go:
- 1.4
- 1.3
- tip
install:
- go get golang.org/x/tools/cmd/cover
- go get code.google.com/p/go.tools/cmd/cover
- go get github.com/mattn/goveralls
- go get gopkg.in/check.v1
script:

View File

@ -1,6 +1,5 @@
# [pongo](https://en.wikipedia.org/wiki/Pongo_%28genus%29)2
[![Join the chat at https://gitter.im/flosch/pongo2](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/flosch/pongo2)
[![GoDoc](https://godoc.org/github.com/flosch/pongo2?status.png)](https://godoc.org/github.com/flosch/pongo2)
[![Build Status](https://travis-ci.org/flosch/pongo2.svg?branch=master)](https://travis-ci.org/flosch/pongo2)
[![Coverage Status](https://coveralls.io/repos/flosch/pongo2/badge.png?branch=master)](https://coveralls.io/r/flosch/pongo2?branch=master)
@ -96,7 +95,6 @@ Please also have a look on the [caveats](https://github.com/flosch/pongo2#caveat
If you're using the `master`-branch of pongo2, you might be interested in this section. Since pongo2 is still in development (even though there is a first stable release!), there could be (backwards-incompatible) API changes over time. To keep track of these and therefore make it painless for you to adapt your codebase, I'll list them here.
* Function signature for tag execution changed: not taking a `bytes.Buffer` anymore; instead `Execute()`-functions are now taking a `TemplateWriter` interface.
* Function signature for tag and filter parsing/execution changed (`error` return type changed to `*Error`).
* `INodeEvaluator` has been removed and got replaced by `IEvaluator`. You can change your existing tags/filters by simply replacing the interface.
* Two new helper functions: [`RenderTemplateFile()`](https://godoc.org/github.com/flosch/pongo2#RenderTemplateFile) and [`RenderTemplateString()`](https://godoc.org/github.com/flosch/pongo2#RenderTemplateString).
@ -106,7 +104,7 @@ If you're using the `master`-branch of pongo2, you might be interested in this s
## How you can help
* Write [filters](https://github.com/flosch/pongo2/blob/master/filters_builtin.go#L3) / [tags](https://github.com/flosch/pongo2/blob/master/tags.go#L4) (see [tutorial](https://www.florian-schlachter.de/post/pongo2/)) by forking pongo2 and sending pull requests
* Write/improve code tests (use the following command to see what tests are missing: `go test -v -cover -covermode=count -coverprofile=cover.out && go tool cover -html=cover.out` or have a look on [gocover.io/github.com/flosch/pongo2](http://gocover.io/github.com/flosch/pongo2))
* Write/improve code tests (use the following command to see what tests are missing: `go test -v -cover -covermode=count -coverprofile=cover.out && go tool cover -html=cover.out`)
* Write/improve template tests (see the `template_tests/` directory)
* Write middleware, libraries and websites using pongo2. :-)
@ -117,8 +115,7 @@ For a documentation on how the templating language works you can [head over to t
You can access pongo2's API documentation on [godoc](https://godoc.org/github.com/flosch/pongo2).
## Blog post series
* [pongo2 v3 released](https://www.florian-schlachter.de/post/pongo2-v3/)
* [pongo2 v2 released](https://www.florian-schlachter.de/post/pongo2-v2/)
* [pongo2 1.0 released](https://www.florian-schlachter.de/post/pongo2-10/) [August 8th 2014]
* [pongo2 playground](https://www.florian-schlachter.de/post/pongo2-playground/) [August 1st 2014]
@ -158,7 +155,6 @@ You can access pongo2's API documentation on [godoc](https://godoc.org/github.co
* [macaron-pongo2](https://github.com/macaron-contrib/pongo2) - pongo2 support for [Macaron](https://github.com/Unknwon/macaron), a modular web framework.
* [ginpongo2](https://github.com/ngerakines/ginpongo2) - middleware for [gin](github.com/gin-gonic/gin) to use pongo2 templates
* [pongo2-trans](https://github.com/fromYukki/pongo2trans) - `trans`-tag implementation for internationalization
* [tpongo2](https://github.com/tango-contrib/tpongo2) - pongo2 support for [Tango](https://github.com/lunny/tango), a micro-kernel & pluggable web framework.
Please add your project to this list and send me a pull request when you've developed something nice for pongo2.

View File

@ -7,7 +7,7 @@ import (
var reIdentifiers = regexp.MustCompile("^[a-zA-Z0-9_]+$")
// A Context type provides constants, variables, instances or functions to a template.
// Use this Context type to provide constants, variables, instances or functions to your template.
//
// pongo2 automatically provides meta-information or functions through the "pongo2"-key.
// Currently, context["pongo2"] contains the following keys:
@ -32,7 +32,6 @@ func (c Context) checkForValidIdentifiers() *Error {
return nil
}
// Update updates this context with the key/value-pairs from another context.
func (c Context) Update(other Context) Context {
for k, v := range other {
c[k] = v
@ -40,8 +39,6 @@ func (c Context) Update(other Context) Context {
return c
}
// ExecutionContext contains all data important for the current rendering state.
//
// If you're writing a custom tag, your tag's Execute()-function will
// have access to the ExecutionContext. This struct stores anything
// about the current rendering process's Context including

View File

@ -6,7 +6,7 @@ import (
"os"
)
// The Error type is being used to address an error during lexing, parsing or
// This Error type is being used to address an error during lexing, parsing or
// execution. If you want to return an error object (for example in your own
// tag or filter) fill this object with as much information as you have.
// Make sure "Sender" is always given (if you're returning an error within
@ -58,7 +58,7 @@ func (e *Error) Error() string {
return s
}
// RawLine returns the affected line from the original template, if available.
// Returns the affected line from the original template, if available.
func (e *Error) RawLine() (line string, available bool) {
if e.Line <= 0 || e.Filename == "<string>" {
return "", false
@ -72,12 +72,7 @@ func (e *Error) RawLine() (line string, available bool) {
if err != nil {
panic(err)
}
defer func() {
err := file.Close()
if err != nil {
panic(err)
}
}()
defer file.Close()
scanner := bufio.NewScanner(file)
l := 0

View File

@ -86,31 +86,31 @@ func (fc *filterCall) Execute(v *Value, ctx *ExecutionContext) (*Value, *Error)
param = AsValue(nil)
}
filteredValue, err := fc.filterFunc(v, param)
filtered_value, err := fc.filterFunc(v, param)
if err != nil {
return nil, err.updateFromTokenIfNeeded(ctx.template, fc.token)
}
return filteredValue, nil
return filtered_value, nil
}
// Filter = IDENT | IDENT ":" FilterArg | IDENT "|" Filter
func (p *Parser) parseFilter() (*filterCall, *Error) {
identToken := p.MatchType(TokenIdentifier)
ident_token := p.MatchType(TokenIdentifier)
// Check filter ident
if identToken == nil {
if ident_token == nil {
return nil, p.Error("Filter name must be an identifier.", nil)
}
filter := &filterCall{
token: identToken,
name: identToken.Val,
token: ident_token,
name: ident_token.Val,
}
// Get the appropriate filter function and bind it
filterFn, exists := filters[identToken.Val]
filterFn, exists := filters[ident_token.Val]
if !exists {
return nil, p.Error(fmt.Sprintf("Filter '%s' does not exist.", identToken.Val), identToken)
return nil, p.Error(fmt.Sprintf("Filter '%s' does not exist.", ident_token.Val), ident_token)
}
filter.filterFunc = filterFn

View File

@ -78,9 +78,9 @@ func init() {
RegisterFilter("time", filterDate) // time uses filterDate (same golang-format)
RegisterFilter("title", filterTitle)
RegisterFilter("truncatechars", filterTruncatechars)
RegisterFilter("truncatechars_html", filterTruncatecharsHTML)
RegisterFilter("truncatechars_html", filterTruncatecharsHtml)
RegisterFilter("truncatewords", filterTruncatewords)
RegisterFilter("truncatewords_html", filterTruncatewordsHTML)
RegisterFilter("truncatewords_html", filterTruncatewordsHtml)
RegisterFilter("upper", filterUpper)
RegisterFilter("urlencode", filterUrlencode)
RegisterFilter("urlize", filterUrlize)
@ -105,9 +105,9 @@ func filterTruncatecharsHelper(s string, newLen int) string {
return string(runes)
}
func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func() bool, fn func(c rune, s int, idx int) int, finalize func()) {
func filterTruncateHtmlHelper(value string, new_output *bytes.Buffer, cond func() bool, fn func(c rune, s int, idx int) int, finalize func()) {
vLen := len(value)
var tagStack []string
tag_stack := make([]string, 0)
idx := 0
for idx < vLen && !cond() {
@ -118,17 +118,17 @@ func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func()
}
if c == '<' {
newOutput.WriteRune(c)
new_output.WriteRune(c)
idx += s // consume "<"
if idx+1 < vLen {
if value[idx] == '/' {
// Close tag
newOutput.WriteString("/")
new_output.WriteString("/")
tag := ""
idx++ // consume "/"
idx += 1 // consume "/"
for idx < vLen {
c2, size2 := utf8.DecodeRuneInString(value[idx:])
@ -146,21 +146,21 @@ func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func()
idx += size2
}
if len(tagStack) > 0 {
if len(tag_stack) > 0 {
// Ideally, the close tag is TOP of tag stack
// In malformed HTML, it must not be, so iterate through the stack and remove the tag
for i := len(tagStack) - 1; i >= 0; i-- {
if tagStack[i] == tag {
for i := len(tag_stack) - 1; i >= 0; i-- {
if tag_stack[i] == tag {
// Found the tag
tagStack[i] = tagStack[len(tagStack)-1]
tagStack = tagStack[:len(tagStack)-1]
tag_stack[i] = tag_stack[len(tag_stack)-1]
tag_stack = tag_stack[:len(tag_stack)-1]
break
}
}
}
newOutput.WriteString(tag)
newOutput.WriteString(">")
new_output.WriteString(tag)
new_output.WriteString(">")
} else {
// Open tag
@ -174,7 +174,7 @@ func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func()
continue
}
newOutput.WriteRune(c2)
new_output.WriteRune(c2)
// End of tag found
if c2 == '>' {
@ -194,7 +194,7 @@ func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func()
}
// Add tag to stack
tagStack = append(tagStack, tag)
tag_stack = append(tag_stack, tag)
}
}
} else {
@ -204,10 +204,10 @@ func filterTruncateHTMLHelper(value string, newOutput *bytes.Buffer, cond func()
finalize()
for i := len(tagStack) - 1; i >= 0; i-- {
tag := tagStack[i]
for i := len(tag_stack) - 1; i >= 0; i-- {
tag := tag_stack[i]
// Close everything from the regular tag stack
newOutput.WriteString(fmt.Sprintf("</%s>", tag))
new_output.WriteString(fmt.Sprintf("</%s>", tag))
}
}
@ -217,28 +217,28 @@ func filterTruncatechars(in *Value, param *Value) (*Value, *Error) {
return AsValue(filterTruncatecharsHelper(s, newLen)), nil
}
func filterTruncatecharsHTML(in *Value, param *Value) (*Value, *Error) {
func filterTruncatecharsHtml(in *Value, param *Value) (*Value, *Error) {
value := in.String()
newLen := max(param.Integer()-3, 0)
newOutput := bytes.NewBuffer(nil)
new_output := bytes.NewBuffer(nil)
textcounter := 0
filterTruncateHTMLHelper(value, newOutput, func() bool {
filterTruncateHtmlHelper(value, new_output, func() bool {
return textcounter >= newLen
}, func(c rune, s int, idx int) int {
textcounter++
newOutput.WriteRune(c)
new_output.WriteRune(c)
return idx + s
}, func() {
if textcounter >= newLen && textcounter < len(value) {
newOutput.WriteString("...")
new_output.WriteString("...")
}
})
return AsSafeValue(newOutput.String()), nil
return AsSafeValue(new_output.String()), nil
}
func filterTruncatewords(in *Value, param *Value) (*Value, *Error) {
@ -260,19 +260,19 @@ func filterTruncatewords(in *Value, param *Value) (*Value, *Error) {
return AsValue(strings.Join(out, " ")), nil
}
func filterTruncatewordsHTML(in *Value, param *Value) (*Value, *Error) {
func filterTruncatewordsHtml(in *Value, param *Value) (*Value, *Error) {
value := in.String()
newLen := max(param.Integer(), 0)
newOutput := bytes.NewBuffer(nil)
new_output := bytes.NewBuffer(nil)
wordcounter := 0
filterTruncateHTMLHelper(value, newOutput, func() bool {
filterTruncateHtmlHelper(value, new_output, func() bool {
return wordcounter >= newLen
}, func(_ rune, _ int, idx int) int {
// Get next word
wordFound := false
word_found := false
for idx < len(value) {
c2, size2 := utf8.DecodeRuneInString(value[idx:])
@ -286,29 +286,29 @@ func filterTruncatewordsHTML(in *Value, param *Value) (*Value, *Error) {
return idx
}
newOutput.WriteRune(c2)
new_output.WriteRune(c2)
idx += size2
if c2 == ' ' || c2 == '.' || c2 == ',' || c2 == ';' {
// Word ends here, stop capturing it now
break
} else {
wordFound = true
word_found = true
}
}
if wordFound {
if word_found {
wordcounter++
}
return idx
}, func() {
if wordcounter >= newLen {
newOutput.WriteString("...")
new_output.WriteString("...")
}
})
return AsSafeValue(newOutput.String()), nil
return AsSafeValue(new_output.String()), nil
}
func filterEscape(in *Value, param *Value) (*Value, *Error) {
@ -377,8 +377,9 @@ func filterAdd(in *Value, param *Value) (*Value, *Error) {
if in.IsNumber() && param.IsNumber() {
if in.IsFloat() || param.IsFloat() {
return AsValue(in.Float() + param.Float()), nil
} else {
return AsValue(in.Integer() + param.Integer()), nil
}
return AsValue(in.Integer() + param.Integer()), nil
}
// If in/param is not a number, we're relying on the
// Value's String() convertion and just add them both together
@ -549,8 +550,8 @@ func filterCenter(in *Value, param *Value) (*Value, *Error) {
}
func filterDate(in *Value, param *Value) (*Value, *Error) {
t, isTime := in.Interface().(time.Time)
if !isTime {
t, is_time := in.Interface().(time.Time)
if !is_time {
return nil, &Error{
Sender: "filter:date",
ErrorMsg: "Filter input argument must be of type 'time.Time'.",
@ -711,13 +712,13 @@ func filterStringformat(in *Value, param *Value) (*Value, *Error) {
return AsValue(fmt.Sprintf(param.String(), in.Interface())), nil
}
var reStriptags = regexp.MustCompile("<[^>]*?>")
var re_striptags = regexp.MustCompile("<[^>]*?>")
func filterStriptags(in *Value, param *Value) (*Value, *Error) {
s := in.String()
// Strip all tags
s = reStriptags.ReplaceAllString(s, "")
s = re_striptags.ReplaceAllString(s, "")
return AsValue(strings.TrimSpace(s)), nil
}
@ -769,10 +770,11 @@ func filterPluralize(in *Value, param *Value) (*Value, *Error) {
}
return AsValue(""), nil
}
return nil, &Error{
Sender: "filter:pluralize",
ErrorMsg: "Filter 'pluralize' does only work on numbers.",
} else {
return nil, &Error{
Sender: "filter:pluralize",
ErrorMsg: "Filter 'pluralize' does only work on numbers.",
}
}
}
@ -842,16 +844,16 @@ func filterWordcount(in *Value, param *Value) (*Value, *Error) {
func filterWordwrap(in *Value, param *Value) (*Value, *Error) {
words := strings.Fields(in.String())
wordsLen := len(words)
wrapAt := param.Integer()
if wrapAt <= 0 {
words_len := len(words)
wrap_at := param.Integer()
if wrap_at <= 0 {
return in, nil
}
linecount := wordsLen/wrapAt + wordsLen%wrapAt
linecount := words_len/wrap_at + words_len%wrap_at
lines := make([]string, 0, linecount)
for i := 0; i < linecount; i++ {
lines = append(lines, strings.Join(words[wrapAt*i:min(wrapAt*(i+1), wordsLen)], " "))
lines = append(lines, strings.Join(words[wrap_at*i:min(wrap_at*(i+1), words_len)], " "))
}
return AsValue(strings.Join(lines, "\n")), nil
}
@ -862,27 +864,27 @@ func filterYesno(in *Value, param *Value) (*Value, *Error) {
1: "no",
2: "maybe",
}
paramString := param.String()
customChoices := strings.Split(paramString, ",")
if len(paramString) > 0 {
if len(customChoices) > 3 {
param_string := param.String()
custom_choices := strings.Split(param_string, ",")
if len(param_string) > 0 {
if len(custom_choices) > 3 {
return nil, &Error{
Sender: "filter:yesno",
ErrorMsg: fmt.Sprintf("You cannot pass more than 3 options to the 'yesno'-filter (got: '%s').", paramString),
ErrorMsg: fmt.Sprintf("You cannot pass more than 3 options to the 'yesno'-filter (got: '%s').", param_string),
}
}
if len(customChoices) < 2 {
if len(custom_choices) < 2 {
return nil, &Error{
Sender: "filter:yesno",
ErrorMsg: fmt.Sprintf("You must pass either no or at least 2 arguments to the 'yesno'-filter (got: '%s').", paramString),
ErrorMsg: fmt.Sprintf("You must pass either no or at least 2 arguments to the 'yesno'-filter (got: '%s').", param_string),
}
}
// Map to the options now
choices[0] = customChoices[0]
choices[1] = customChoices[1]
if len(customChoices) == 3 {
choices[2] = customChoices[2]
choices[0] = custom_choices[0]
choices[1] = custom_choices[1]
if len(custom_choices) == 3 {
choices[2] = custom_choices[2]
}
}

View File

@ -63,8 +63,8 @@ type lexer struct {
line int
col int
inVerbatim bool
verbatimName string
in_verbatim bool
verbatim_name string
}
func (t *Token) String() string {
@ -216,8 +216,8 @@ func (l *lexer) run() {
for {
// TODO: Support verbatim tag names
// https://docs.djangoproject.com/en/dev/ref/templates/builtins/#verbatim
if l.inVerbatim {
name := l.verbatimName
if l.in_verbatim {
name := l.verbatim_name
if name != "" {
name += " "
}
@ -229,20 +229,20 @@ func (l *lexer) run() {
l.pos += w
l.col += w
l.ignore()
l.inVerbatim = false
l.in_verbatim = false
}
} else if strings.HasPrefix(l.input[l.pos:], "{% verbatim %}") { // tag
if l.pos > l.start {
l.emit(TokenHTML)
}
l.inVerbatim = true
l.in_verbatim = true
w := len("{% verbatim %}")
l.pos += w
l.col += w
l.ignore()
}
if !l.inVerbatim {
if !l.in_verbatim {
// Ignore single-line comments {# ... #}
if strings.HasPrefix(l.input[l.pos:], "{#") {
if l.pos > l.start {
@ -303,7 +303,7 @@ func (l *lexer) run() {
l.emit(TokenHTML)
}
if l.inVerbatim {
if l.in_verbatim {
l.errorf("verbatim-tag not closed, got EOF.")
}
}
@ -394,7 +394,7 @@ func (l *lexer) stateNumber() lexerStateFn {
func (l *lexer) stateString() lexerStateFn {
l.ignore()
l.startcol-- // we're starting the position at the first "
l.startcol -= 1 // we're starting the position at the first "
for !l.accept(`"`) {
switch l.next() {
case '\\':

View File

@ -1,13 +1,17 @@
package pongo2
import (
"bytes"
)
// The root document
type nodeDocument struct {
Nodes []INode
}
func (doc *nodeDocument) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (doc *nodeDocument) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
for _, n := range doc.Nodes {
err := n.Execute(ctx, writer)
err := n.Execute(ctx, buffer)
if err != nil {
return err
}

View File

@ -1,10 +1,14 @@
package pongo2
import (
"bytes"
)
type nodeHTML struct {
token *Token
}
func (n *nodeHTML) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
writer.WriteString(n.token.Val)
func (n *nodeHTML) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
buffer.WriteString(n.token.Val)
return nil
}

View File

@ -1,13 +1,17 @@
package pongo2
import (
"bytes"
)
type NodeWrapper struct {
Endtag string
nodes []INode
}
func (wrapper *NodeWrapper) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (wrapper *NodeWrapper) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
for _, n := range wrapper.nodes {
err := n.Execute(ctx, writer)
err := n.Execute(ctx, buffer)
if err != nil {
return err
}

View File

@ -1,12 +1,13 @@
package pongo2
import (
"bytes"
"fmt"
"strings"
)
type INode interface {
Execute(*ExecutionContext, TemplateWriter) *Error
Execute(*ExecutionContext, *bytes.Buffer) *Error
}
type IEvaluator interface {
@ -26,10 +27,10 @@ type IEvaluator interface {
//
// (See Token's documentation for more about tokens)
type Parser struct {
name string
idx int
tokens []*Token
lastToken *Token
name string
idx int
tokens []*Token
last_token *Token
// if the parser parses a template document, here will be
// a reference to it (needed to access the template through Tags)
@ -46,7 +47,7 @@ func newParser(name string, tokens []*Token, template *Template) *Parser {
template: template,
}
if len(tokens) > 0 {
p.lastToken = tokens[len(tokens)-1]
p.last_token = tokens[len(tokens)-1]
}
return p
}
@ -211,19 +212,19 @@ func (p *Parser) Error(msg string, token *Token) *Error {
func (p *Parser) WrapUntilTag(names ...string) (*NodeWrapper, *Parser, *Error) {
wrapper := &NodeWrapper{}
var tagArgs []*Token
tagArgs := make([]*Token, 0)
for p.Remaining() > 0 {
// New tag, check whether we have to stop wrapping here
if p.Peek(TokenSymbol, "{%") != nil {
tagIdent := p.PeekTypeN(1, TokenIdentifier)
tag_ident := p.PeekTypeN(1, TokenIdentifier)
if tagIdent != nil {
if tag_ident != nil {
// We've found a (!) end-tag
found := false
for _, n := range names {
if tagIdent.Val == n {
if tag_ident.Val == n {
found = true
break
}
@ -237,15 +238,16 @@ func (p *Parser) WrapUntilTag(names ...string) (*NodeWrapper, *Parser, *Error) {
for {
if p.Match(TokenSymbol, "%}") != nil {
// Okay, end the wrapping here
wrapper.Endtag = tagIdent.Val
wrapper.Endtag = tag_ident.Val
return wrapper, newParser(p.template.name, tagArgs, p.template), nil
} else {
t := p.Current()
p.Consume()
if t == nil {
return nil, nil, p.Error("Unexpected EOF.", p.last_token)
}
tagArgs = append(tagArgs, t)
}
t := p.Current()
p.Consume()
if t == nil {
return nil, nil, p.Error("Unexpected EOF.", p.lastToken)
}
tagArgs = append(tagArgs, t)
}
}
}
@ -261,5 +263,5 @@ func (p *Parser) WrapUntilTag(names ...string) (*NodeWrapper, *Parser, *Error) {
}
return nil, nil, p.Error(fmt.Sprintf("Unexpected EOF, expected tag %s.", strings.Join(names, " or ")),
p.lastToken)
p.last_token)
}

View File

@ -1,37 +1,38 @@
package pongo2
import (
"bytes"
"fmt"
"math"
)
type Expression struct {
// TODO: Add location token?
expr1 IEvaluator
expr2 IEvaluator
opToken *Token
expr1 IEvaluator
expr2 IEvaluator
op_token *Token
}
type relationalExpression struct {
// TODO: Add location token?
expr1 IEvaluator
expr2 IEvaluator
opToken *Token
expr1 IEvaluator
expr2 IEvaluator
op_token *Token
}
type simpleExpression struct {
negate bool
negativeSign bool
term1 IEvaluator
term2 IEvaluator
opToken *Token
negate bool
negative_sign bool
term1 IEvaluator
term2 IEvaluator
op_token *Token
}
type term struct {
// TODO: Add location token?
factor1 IEvaluator
factor2 IEvaluator
opToken *Token
factor1 IEvaluator
factor2 IEvaluator
op_token *Token
}
type power struct {
@ -55,14 +56,14 @@ func (expr *simpleExpression) FilterApplied(name string) bool {
(expr.term2 != nil && expr.term2.FilterApplied(name)))
}
func (expr *term) FilterApplied(name string) bool {
return expr.factor1.FilterApplied(name) && (expr.factor2 == nil ||
(expr.factor2 != nil && expr.factor2.FilterApplied(name)))
func (t *term) FilterApplied(name string) bool {
return t.factor1.FilterApplied(name) && (t.factor2 == nil ||
(t.factor2 != nil && t.factor2.FilterApplied(name)))
}
func (expr *power) FilterApplied(name string) bool {
return expr.power1.FilterApplied(name) && (expr.power2 == nil ||
(expr.power2 != nil && expr.power2.FilterApplied(name)))
func (p *power) FilterApplied(name string) bool {
return p.power1.FilterApplied(name) && (p.power2 == nil ||
(p.power2 != nil && p.power2.FilterApplied(name)))
}
func (expr *Expression) GetPositionToken() *Token {
@ -85,48 +86,48 @@ func (expr *power) GetPositionToken() *Token {
return expr.power1.GetPositionToken()
}
func (expr *Expression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (expr *Expression) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (expr *relationalExpression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (expr *relationalExpression) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (expr *simpleExpression) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (expr *simpleExpression) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (expr *term) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (expr *term) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (expr *power) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (expr *power) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
@ -140,13 +141,13 @@ func (expr *Expression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
if err != nil {
return nil, err
}
switch expr.opToken.Val {
switch expr.op_token.Val {
case "and", "&&":
return AsValue(v1.IsTrue() && v2.IsTrue()), nil
case "or", "||":
return AsValue(v1.IsTrue() || v2.IsTrue()), nil
default:
panic(fmt.Sprintf("unimplemented: %s", expr.opToken.Val))
panic(fmt.Sprintf("unimplemented: %s", expr.op_token.Val))
}
} else {
return v1, nil
@ -163,35 +164,39 @@ func (expr *relationalExpression) Evaluate(ctx *ExecutionContext) (*Value, *Erro
if err != nil {
return nil, err
}
switch expr.opToken.Val {
switch expr.op_token.Val {
case "<=":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() <= v2.Float()), nil
} else {
return AsValue(v1.Integer() <= v2.Integer()), nil
}
return AsValue(v1.Integer() <= v2.Integer()), nil
case ">=":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() >= v2.Float()), nil
} else {
return AsValue(v1.Integer() >= v2.Integer()), nil
}
return AsValue(v1.Integer() >= v2.Integer()), nil
case "==":
return AsValue(v1.EqualValueTo(v2)), nil
case ">":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() > v2.Float()), nil
} else {
return AsValue(v1.Integer() > v2.Integer()), nil
}
return AsValue(v1.Integer() > v2.Integer()), nil
case "<":
if v1.IsFloat() || v2.IsFloat() {
return AsValue(v1.Float() < v2.Float()), nil
} else {
return AsValue(v1.Integer() < v2.Integer()), nil
}
return AsValue(v1.Integer() < v2.Integer()), nil
case "!=", "<>":
return AsValue(!v1.EqualValueTo(v2)), nil
case "in":
return AsValue(v2.Contains(v1)), nil
default:
panic(fmt.Sprintf("unimplemented: %s", expr.opToken.Val))
panic(fmt.Sprintf("unimplemented: %s", expr.op_token.Val))
}
} else {
return v1, nil
@ -209,7 +214,7 @@ func (expr *simpleExpression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
result = result.Negate()
}
if expr.negativeSign {
if expr.negative_sign {
if result.IsNumber() {
switch {
case result.IsFloat():
@ -229,21 +234,23 @@ func (expr *simpleExpression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
if err != nil {
return nil, err
}
switch expr.opToken.Val {
switch expr.op_token.Val {
case "+":
if result.IsFloat() || t2.IsFloat() {
// Result will be a float
return AsValue(result.Float() + t2.Float()), nil
} else {
// Result will be an integer
return AsValue(result.Integer() + t2.Integer()), nil
}
// Result will be an integer
return AsValue(result.Integer() + t2.Integer()), nil
case "-":
if result.IsFloat() || t2.IsFloat() {
// Result will be a float
return AsValue(result.Float() - t2.Float()), nil
} else {
// Result will be an integer
return AsValue(result.Integer() - t2.Integer()), nil
}
// Result will be an integer
return AsValue(result.Integer() - t2.Integer()), nil
default:
panic("unimplemented")
}
@ -252,17 +259,17 @@ func (expr *simpleExpression) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
return result, nil
}
func (expr *term) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
f1, err := expr.factor1.Evaluate(ctx)
func (t *term) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
f1, err := t.factor1.Evaluate(ctx)
if err != nil {
return nil, err
}
if expr.factor2 != nil {
f2, err := expr.factor2.Evaluate(ctx)
if t.factor2 != nil {
f2, err := t.factor2.Evaluate(ctx)
if err != nil {
return nil, err
}
switch expr.opToken.Val {
switch t.op_token.Val {
case "*":
if f1.IsFloat() || f2.IsFloat() {
// Result will be float
@ -288,19 +295,20 @@ func (expr *term) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
}
}
func (expr *power) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
p1, err := expr.power1.Evaluate(ctx)
func (pw *power) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
p1, err := pw.power1.Evaluate(ctx)
if err != nil {
return nil, err
}
if expr.power2 != nil {
p2, err := expr.power2.Evaluate(ctx)
if pw.power2 != nil {
p2, err := pw.power2.Evaluate(ctx)
if err != nil {
return nil, err
}
return AsValue(math.Pow(p1.Float(), p2.Float())), nil
} else {
return p1, nil
}
return p1, nil
}
func (p *Parser) parseFactor() (IEvaluator, *Error) {
@ -344,19 +352,19 @@ func (p *Parser) parsePower() (IEvaluator, *Error) {
}
func (p *Parser) parseTerm() (IEvaluator, *Error) {
returnTerm := new(term)
return_term := new(term)
factor1, err := p.parsePower()
if err != nil {
return nil, err
}
returnTerm.factor1 = factor1
return_term.factor1 = factor1
for p.PeekOne(TokenSymbol, "*", "/", "%") != nil {
if returnTerm.opToken != nil {
if return_term.op_token != nil {
// Create new sub-term
returnTerm = &term{
factor1: returnTerm,
return_term = &term{
factor1: return_term,
}
}
@ -368,16 +376,16 @@ func (p *Parser) parseTerm() (IEvaluator, *Error) {
return nil, err
}
returnTerm.opToken = op
returnTerm.factor2 = factor2
return_term.op_token = op
return_term.factor2 = factor2
}
if returnTerm.opToken == nil {
if return_term.op_token == nil {
// Shortcut for faster evaluation
return returnTerm.factor1, nil
return return_term.factor1, nil
}
return returnTerm, nil
return return_term, nil
}
func (p *Parser) parseSimpleExpression() (IEvaluator, *Error) {
@ -385,7 +393,7 @@ func (p *Parser) parseSimpleExpression() (IEvaluator, *Error) {
if sign := p.MatchOne(TokenSymbol, "+", "-"); sign != nil {
if sign.Val == "-" {
expr.negativeSign = true
expr.negative_sign = true
}
}
@ -400,7 +408,7 @@ func (p *Parser) parseSimpleExpression() (IEvaluator, *Error) {
expr.term1 = term1
for p.PeekOne(TokenSymbol, "+", "-") != nil {
if expr.opToken != nil {
if expr.op_token != nil {
// New sub expr
expr = &simpleExpression{
term1: expr,
@ -416,10 +424,10 @@ func (p *Parser) parseSimpleExpression() (IEvaluator, *Error) {
}
expr.term2 = term2
expr.opToken = op
expr.op_token = op
}
if expr.negate == false && expr.negativeSign == false && expr.term2 == nil {
if expr.negate == false && expr.negative_sign == false && expr.term2 == nil {
// Shortcut for faster evaluation
return expr.term1, nil
}
@ -442,14 +450,14 @@ func (p *Parser) parseRelationalExpression() (IEvaluator, *Error) {
if err != nil {
return nil, err
}
expr.opToken = t
expr.op_token = t
expr.expr2 = expr2
} else if t := p.MatchOne(TokenKeyword, "in"); t != nil {
expr2, err := p.parseSimpleExpression()
if err != nil {
return nil, err
}
expr.opToken = t
expr.op_token = t
expr.expr2 = expr2
}
@ -479,7 +487,7 @@ func (p *Parser) ParseExpression() (IEvaluator, *Error) {
return nil, err
}
exp.expr2 = expr2
exp.opToken = op
exp.op_token = op
}
if exp.expr2 == nil {

View File

@ -1,10 +1,10 @@
package pongo2
// Version string
const Version = "dev"
const Version = "v3"
// Must panics, if a Template couldn't successfully parsed. This is how you
// would use it:
// Helper function which panics, if a Template couldn't
// successfully parsed. This is how you would use it:
// var baseTemplate = pongo2.Must(pongo2.FromFile("templates/base.html"))
func Must(tpl *Template, err error) *Template {
if err != nil {

20
vendor/github.com/flosch/pongo2/pongo2_issues_test.go generated vendored Normal file
View File

@ -0,0 +1,20 @@
package pongo2
import (
"testing"
. "gopkg.in/check.v1"
)
// Hook up gocheck into the "go test" runner.
func TestIssues(t *testing.T) { TestingT(t) }
type IssueTestSuite struct{}
var _ = Suite(&IssueTestSuite{})
func (s *TestSuite) TestIssues(c *C) {
// Add a test for any issue
c.Check(42, Equals, 42)
}

View File

@ -1,4 +1,4 @@
package pongo2_test
package pongo2
import (
"bytes"
@ -9,11 +9,9 @@ import (
"strings"
"testing"
"time"
"github.com/flosch/pongo2"
)
var adminList = []string{"user2"}
var admin_list = []string{"user2"}
var time1 = time.Date(2014, 06, 10, 15, 30, 15, 0, time.UTC)
var time2 = time.Date(2011, 03, 21, 8, 37, 56, 12, time.UTC)
@ -34,8 +32,8 @@ type comment struct {
Text string
}
func isAdmin(u *user) bool {
for _, a := range adminList {
func is_admin(u *user) bool {
for _, a := range admin_list {
if a == u.Name {
return true
}
@ -43,12 +41,12 @@ func isAdmin(u *user) bool {
return false
}
func (u *user) Is_admin() *pongo2.Value {
return pongo2.AsValue(isAdmin(u))
func (u *user) Is_admin() *Value {
return AsValue(is_admin(u))
}
func (u *user) Is_admin2() bool {
return isAdmin(u)
return is_admin(u)
}
func (p *post) String() string {
@ -62,53 +60,74 @@ func (p *post) String() string {
type tagSandboxDemoTag struct {
}
func (node *tagSandboxDemoTag) Execute(ctx *pongo2.ExecutionContext, writer pongo2.TemplateWriter) *pongo2.Error {
writer.WriteString("hello")
func (node *tagSandboxDemoTag) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
buffer.WriteString("hello")
return nil
}
func tagSandboxDemoTagParser(doc *pongo2.Parser, start *pongo2.Token, arguments *pongo2.Parser) (pongo2.INodeTag, *pongo2.Error) {
func tagSandboxDemoTagParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
return &tagSandboxDemoTag{}, nil
}
func BannedFilterFn(in *pongo2.Value, params *pongo2.Value) (*pongo2.Value, *pongo2.Error) {
func BannedFilterFn(in *Value, params *Value) (*Value, *Error) {
return in, nil
}
func init() {
pongo2.DefaultSet.Debug = true
DefaultSet.Debug = true
pongo2.RegisterFilter("banned_filter", BannedFilterFn)
pongo2.RegisterFilter("unbanned_filter", BannedFilterFn)
pongo2.RegisterTag("banned_tag", tagSandboxDemoTagParser)
pongo2.RegisterTag("unbanned_tag", tagSandboxDemoTagParser)
RegisterFilter("banned_filter", BannedFilterFn)
RegisterFilter("unbanned_filter", BannedFilterFn)
RegisterTag("banned_tag", tagSandboxDemoTagParser)
RegisterTag("unbanned_tag", tagSandboxDemoTagParser)
pongo2.DefaultSet.BanFilter("banned_filter")
pongo2.DefaultSet.BanTag("banned_tag")
DefaultSet.BanFilter("banned_filter")
DefaultSet.BanTag("banned_tag")
// Allow different kind of levels inside template_tests/
abs_path, err := filepath.Abs("./template_tests/*")
if err != nil {
panic(err)
}
DefaultSet.SandboxDirectories = append(DefaultSet.SandboxDirectories, abs_path)
abs_path, err = filepath.Abs("./template_tests/*/*")
if err != nil {
panic(err)
}
DefaultSet.SandboxDirectories = append(DefaultSet.SandboxDirectories, abs_path)
abs_path, err = filepath.Abs("./template_tests/*/*/*")
if err != nil {
panic(err)
}
DefaultSet.SandboxDirectories = append(DefaultSet.SandboxDirectories, abs_path)
// Allow pongo2 temp files
DefaultSet.SandboxDirectories = append(DefaultSet.SandboxDirectories, "/tmp/pongo2_*")
f, err := ioutil.TempFile("/tmp/", "pongo2_")
if err != nil {
panic("cannot write to /tmp/")
}
f.Write([]byte("Hello from pongo2"))
pongo2.DefaultSet.Globals["temp_file"] = f.Name()
DefaultSet.Globals["temp_file"] = f.Name()
}
/*
* End setup sandbox
*/
var tplContext = pongo2.Context{
var tplContext = Context{
"number": 11,
"simple": map[string]interface{}{
"number": 42,
"name": "john doe",
"included_file": "INCLUDES.helper",
"included_file_not_exists": "INCLUDES.helper.not_exists",
"nil": nil,
"uint": uint(8),
"float": float64(3.1415),
"str": "string",
"number": 42,
"name": "john doe",
"included_file": "INCLUDES.helper",
"nil": nil,
"uint": uint(8),
"float": float64(3.1415),
"str": "string",
"chinese_hello_world": "你好世界",
"bool_true": true,
"bool_false": false,
@ -123,22 +142,13 @@ Yep!`,
"escape_js_test": `escape sequences \r\n\'\" special chars "?!=$<>`,
"one_item_list": []int{99},
"multiple_item_list": []int{1, 1, 2, 3, 5, 8, 13, 21, 34, 55},
"unsorted_int_list": []int{192, 581, 22, 1, 249, 9999, 1828591, 8271},
"misc_list": []interface{}{"Hello", 99, 3.14, "good"},
"escape_text": "This is \\a Test. \"Yep\". 'Yep'.",
"xss": "<script>alert(\"uh oh\");</script>",
"intmap": map[int]string{
1: "one",
5: "five",
2: "two",
},
"strmap": map[string]string{
"abc": "def",
"bcd": "efg",
"zab": "cde",
"gh": "kqm",
"ukq": "qqa",
"aab": "aba",
5: "five",
},
"func_add": func(a, b int) int {
return a + b
@ -157,17 +167,17 @@ Yep!`,
}
return s
},
"func_variadic_sum_int2": func(args ...*pongo2.Value) *pongo2.Value {
"func_variadic_sum_int2": func(args ...*Value) *Value {
// Create a sum
s := 0
for _, i := range args {
s += i.Integer()
}
return pongo2.AsValue(s)
return AsValue(s)
},
},
"complex": map[string]interface{}{
"is_admin": isAdmin,
"is_admin": is_admin,
"post": post{
Text: "<h2>Hello!</h2><p>Welcome to my new blog page. I'm using pongo2 which supports {{ variables }} and {% tags %}.</p>",
Created: time2,
@ -228,8 +238,10 @@ Yep!`,
}
func TestTemplates(t *testing.T) {
debug = true
// Add a global to the default set
pongo2.Globals["this_is_a_global_variable"] = "this is a global text"
Globals["this_is_a_global_variable"] = "this is a global text"
matches, err := filepath.Glob("./template_tests/*.tpl")
if err != nil {
@ -237,34 +249,34 @@ func TestTemplates(t *testing.T) {
}
for idx, match := range matches {
t.Logf("[Template %3d] Testing '%s'", idx+1, match)
tpl, err := pongo2.FromFile(match)
tpl, err := FromFile(match)
if err != nil {
t.Fatalf("Error on FromFile('%s'): %s", match, err.Error())
}
testFilename := fmt.Sprintf("%s.out", match)
testOut, rerr := ioutil.ReadFile(testFilename)
test_filename := fmt.Sprintf("%s.out", match)
test_out, rerr := ioutil.ReadFile(test_filename)
if rerr != nil {
t.Fatalf("Error on ReadFile('%s'): %s", testFilename, rerr.Error())
t.Fatalf("Error on ReadFile('%s'): %s", test_filename, rerr.Error())
}
tplOut, err := tpl.ExecuteBytes(tplContext)
tpl_out, err := tpl.ExecuteBytes(tplContext)
if err != nil {
t.Fatalf("Error on Execute('%s'): %s", match, err.Error())
}
if bytes.Compare(testOut, tplOut) != 0 {
t.Logf("Template (rendered) '%s': '%s'", match, tplOut)
errFilename := filepath.Base(fmt.Sprintf("%s.error", match))
err := ioutil.WriteFile(errFilename, []byte(tplOut), 0600)
if bytes.Compare(test_out, tpl_out) != 0 {
t.Logf("Template (rendered) '%s': '%s'", match, tpl_out)
err_filename := filepath.Base(fmt.Sprintf("%s.error", match))
err := ioutil.WriteFile(err_filename, []byte(tpl_out), 0600)
if err != nil {
t.Fatalf(err.Error())
}
t.Logf("get a complete diff with command: 'diff -ya %s %s'", testFilename, errFilename)
t.Logf("get a complete diff with command: 'diff -ya %s %s'", test_filename, err_filename)
t.Errorf("Failed: test_out != tpl_out for %s", match)
}
}
}
func TestExecutionErrors(t *testing.T) {
//debug = true
debug = true
matches, err := filepath.Glob("./template_tests/*-execution.err")
if err != nil {
@ -273,15 +285,15 @@ func TestExecutionErrors(t *testing.T) {
for idx, match := range matches {
t.Logf("[Errors %3d] Testing '%s'", idx+1, match)
testData, err := ioutil.ReadFile(match)
tests := strings.Split(string(testData), "\n")
test_data, err := ioutil.ReadFile(match)
tests := strings.Split(string(test_data), "\n")
checkFilename := fmt.Sprintf("%s.out", match)
checkData, err := ioutil.ReadFile(checkFilename)
check_filename := fmt.Sprintf("%s.out", match)
check_data, err := ioutil.ReadFile(check_filename)
if err != nil {
t.Fatalf("Error on ReadFile('%s'): %s", checkFilename, err.Error())
t.Fatalf("Error on ReadFile('%s'): %s", check_filename, err.Error())
}
checks := strings.Split(string(checkData), "\n")
checks := strings.Split(string(check_data), "\n")
if len(checks) != len(tests) {
t.Fatal("Template lines != Checks lines")
@ -296,7 +308,7 @@ func TestExecutionErrors(t *testing.T) {
match, idx+1)
}
tpl, err := pongo2.FromString(test)
tpl, err := FromString(test)
if err != nil {
t.Fatalf("Error on FromString('%s'): %s", test, err.Error())
}
@ -317,7 +329,7 @@ func TestExecutionErrors(t *testing.T) {
}
func TestCompilationErrors(t *testing.T) {
//debug = true
debug = true
matches, err := filepath.Glob("./template_tests/*-compilation.err")
if err != nil {
@ -326,15 +338,15 @@ func TestCompilationErrors(t *testing.T) {
for idx, match := range matches {
t.Logf("[Errors %3d] Testing '%s'", idx+1, match)
testData, err := ioutil.ReadFile(match)
tests := strings.Split(string(testData), "\n")
test_data, err := ioutil.ReadFile(match)
tests := strings.Split(string(test_data), "\n")
checkFilename := fmt.Sprintf("%s.out", match)
checkData, err := ioutil.ReadFile(checkFilename)
check_filename := fmt.Sprintf("%s.out", match)
check_data, err := ioutil.ReadFile(check_filename)
if err != nil {
t.Fatalf("Error on ReadFile('%s'): %s", checkFilename, err.Error())
t.Fatalf("Error on ReadFile('%s'): %s", check_filename, err.Error())
}
checks := strings.Split(string(checkData), "\n")
checks := strings.Split(string(check_data), "\n")
if len(checks) != len(tests) {
t.Fatal("Template lines != Checks lines")
@ -349,7 +361,7 @@ func TestCompilationErrors(t *testing.T) {
match, idx+1)
}
_, err = pongo2.FromString(test)
_, err = FromString(test)
if err == nil {
t.Fatalf("[%s | Line %d] Expected error for (got none): %s", match, idx+1, tests[idx])
}
@ -365,10 +377,9 @@ func TestCompilationErrors(t *testing.T) {
func TestBaseDirectory(t *testing.T) {
mustStr := "Hello from template_tests/base_dir_test/"
fs := pongo2.MustNewLocalFileSystemLoader("")
s := pongo2.NewSet("test set with base directory", fs)
s := NewSet("test set with base directory")
s.Globals["base_directory"] = "template_tests/base_dir_test/"
if err := fs.SetBaseDir(s.Globals["base_directory"].(string)); err != nil {
if err := s.SetBaseDirectory(s.Globals["base_directory"].(string)); err != nil {
t.Fatal(err)
}
@ -394,13 +405,13 @@ func TestBaseDirectory(t *testing.T) {
}
func BenchmarkCache(b *testing.B) {
cacheSet := pongo2.NewSet("cache set", pongo2.MustNewLocalFileSystemLoader(""))
cache_set := NewSet("cache set")
for i := 0; i < b.N; i++ {
tpl, err := cacheSet.FromCache("template_tests/complex.tpl")
tpl, err := cache_set.FromCache("template_tests/complex.tpl")
if err != nil {
b.Fatal(err)
}
err = tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err = tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}
@ -408,14 +419,14 @@ func BenchmarkCache(b *testing.B) {
}
func BenchmarkCacheDebugOn(b *testing.B) {
cacheDebugSet := pongo2.NewSet("cache set", pongo2.MustNewLocalFileSystemLoader(""))
cacheDebugSet.Debug = true
cache_debug_set := NewSet("cache set")
cache_debug_set.Debug = true
for i := 0; i < b.N; i++ {
tpl, err := cacheDebugSet.FromFile("template_tests/complex.tpl")
tpl, err := cache_debug_set.FromFile("template_tests/complex.tpl")
if err != nil {
b.Fatal(err)
}
err = tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err = tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}
@ -423,13 +434,13 @@ func BenchmarkCacheDebugOn(b *testing.B) {
}
func BenchmarkExecuteComplexWithSandboxActive(b *testing.B) {
tpl, err := pongo2.FromFile("template_tests/complex.tpl")
tpl, err := FromFile("template_tests/complex.tpl")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
err = tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err = tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}
@ -444,12 +455,12 @@ func BenchmarkCompileAndExecuteComplexWithSandboxActive(b *testing.B) {
preloadedTpl := string(buf)
b.ResetTimer()
for i := 0; i < b.N; i++ {
tpl, err := pongo2.FromString(preloadedTpl)
tpl, err := FromString(preloadedTpl)
if err != nil {
b.Fatal(err)
}
err = tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err = tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}
@ -457,14 +468,14 @@ func BenchmarkCompileAndExecuteComplexWithSandboxActive(b *testing.B) {
}
func BenchmarkParallelExecuteComplexWithSandboxActive(b *testing.B) {
tpl, err := pongo2.FromFile("template_tests/complex.tpl")
tpl, err := FromFile("template_tests/complex.tpl")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
err := tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err := tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}
@ -473,14 +484,14 @@ func BenchmarkParallelExecuteComplexWithSandboxActive(b *testing.B) {
}
func BenchmarkExecuteComplexWithoutSandbox(b *testing.B) {
s := pongo2.NewSet("set without sandbox", pongo2.MustNewLocalFileSystemLoader(""))
s := NewSet("set without sandbox")
tpl, err := s.FromFile("template_tests/complex.tpl")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
err = tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err = tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}
@ -494,7 +505,7 @@ func BenchmarkCompileAndExecuteComplexWithoutSandbox(b *testing.B) {
}
preloadedTpl := string(buf)
s := pongo2.NewSet("set without sandbox", pongo2.MustNewLocalFileSystemLoader(""))
s := NewSet("set without sandbox")
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -503,7 +514,7 @@ func BenchmarkCompileAndExecuteComplexWithoutSandbox(b *testing.B) {
b.Fatal(err)
}
err = tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err = tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}
@ -511,7 +522,7 @@ func BenchmarkCompileAndExecuteComplexWithoutSandbox(b *testing.B) {
}
func BenchmarkParallelExecuteComplexWithoutSandbox(b *testing.B) {
s := pongo2.NewSet("set without sandbox", pongo2.MustNewLocalFileSystemLoader(""))
s := NewSet("set without sandbox")
tpl, err := s.FromFile("template_tests/complex.tpl")
if err != nil {
b.Fatal(err)
@ -519,7 +530,7 @@ func BenchmarkParallelExecuteComplexWithoutSandbox(b *testing.B) {
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
err := tpl.ExecuteWriterUnbuffered(tplContext, ioutil.Discard)
_, err := tpl.ExecuteBytes(tplContext)
if err != nil {
b.Fatal(err)
}

View File

@ -1,10 +1,8 @@
package pongo2_test
package pongo2
import (
"testing"
"github.com/flosch/pongo2"
. "gopkg.in/check.v1"
)
@ -13,16 +11,16 @@ import (
func Test(t *testing.T) { TestingT(t) }
type TestSuite struct {
tpl *pongo2.Template
tpl *Template
}
var (
_ = Suite(&TestSuite{})
testSuite2 = pongo2.NewSet("test suite 2", pongo2.MustNewLocalFileSystemLoader(""))
_ = Suite(&TestSuite{})
test_suite2 = NewSet("test suite 2")
)
func parseTemplate(s string, c pongo2.Context) string {
t, err := testSuite2.FromString(s)
func parseTemplate(s string, c Context) string {
t, err := test_suite2.FromString(s)
if err != nil {
panic(err)
}
@ -33,7 +31,7 @@ func parseTemplate(s string, c pongo2.Context) string {
return out
}
func parseTemplateFn(s string, c pongo2.Context) func() {
func parseTemplateFn(s string, c Context) func() {
return func() {
parseTemplate(s, c)
}
@ -42,27 +40,25 @@ func parseTemplateFn(s string, c pongo2.Context) func() {
func (s *TestSuite) TestMisc(c *C) {
// Must
// TODO: Add better error message (see issue #18)
c.Check(
func() { pongo2.Must(testSuite2.FromFile("template_tests/inheritance/base2.tpl")) },
c.Check(func() { Must(test_suite2.FromFile("template_tests/inheritance/base2.tpl")) },
PanicMatches,
`\[Error \(where: fromfile\) in .*template_tests/inheritance/doesnotexist.tpl | Line 1 Col 12 near 'doesnotexist.tpl'\] open .*template_tests/inheritance/doesnotexist.tpl: no such file or directory`,
)
`\[Error \(where: fromfile\) in template_tests/inheritance/doesnotexist.tpl | Line 1 Col 12 near 'doesnotexist.tpl'\] open template_tests/inheritance/doesnotexist.tpl: no such file or directory`)
// Context
c.Check(parseTemplateFn("", pongo2.Context{"'illegal": nil}), PanicMatches, ".*not a valid identifier.*")
c.Check(parseTemplateFn("", Context{"'illegal": nil}), PanicMatches, ".*not a valid identifier.*")
// Registers
c.Check(func() { pongo2.RegisterFilter("escape", nil) }, PanicMatches, ".*is already registered.*")
c.Check(func() { pongo2.RegisterTag("for", nil) }, PanicMatches, ".*is already registered.*")
c.Check(func() { RegisterFilter("escape", nil) }, PanicMatches, ".*is already registered.*")
c.Check(func() { RegisterTag("for", nil) }, PanicMatches, ".*is already registered.*")
// ApplyFilter
v, err := pongo2.ApplyFilter("title", pongo2.AsValue("this is a title"), nil)
v, err := ApplyFilter("title", AsValue("this is a title"), nil)
if err != nil {
c.Fatal(err)
}
c.Check(v.String(), Equals, "This Is A Title")
c.Check(func() {
_, err := pongo2.ApplyFilter("doesnotexist", nil, nil)
_, err := ApplyFilter("doesnotexist", nil, nil)
if err != nil {
panic(err)
}

View File

@ -87,46 +87,46 @@ func ReplaceTag(name string, parserFn TagParser) {
// Tag = "{%" IDENT ARGS "%}"
func (p *Parser) parseTagElement() (INodeTag, *Error) {
p.Consume() // consume "{%"
tokenName := p.MatchType(TokenIdentifier)
token_name := p.MatchType(TokenIdentifier)
// Check for identifier
if tokenName == nil {
if token_name == nil {
return nil, p.Error("Tag name must be an identifier.", nil)
}
// Check for the existing tag
tag, exists := tags[tokenName.Val]
tag, exists := tags[token_name.Val]
if !exists {
// Does not exists
return nil, p.Error(fmt.Sprintf("Tag '%s' not found (or beginning tag not provided)", tokenName.Val), tokenName)
return nil, p.Error(fmt.Sprintf("Tag '%s' not found (or beginning tag not provided)", token_name.Val), token_name)
}
// Check sandbox tag restriction
if _, isBanned := p.template.set.bannedTags[tokenName.Val]; isBanned {
return nil, p.Error(fmt.Sprintf("Usage of tag '%s' is not allowed (sandbox restriction active).", tokenName.Val), tokenName)
if _, is_banned := p.template.set.bannedTags[token_name.Val]; is_banned {
return nil, p.Error(fmt.Sprintf("Usage of tag '%s' is not allowed (sandbox restriction active).", token_name.Val), token_name)
}
var argsToken []*Token
args_token := make([]*Token, 0)
for p.Peek(TokenSymbol, "%}") == nil && p.Remaining() > 0 {
// Add token to args
argsToken = append(argsToken, p.Current())
args_token = append(args_token, p.Current())
p.Consume() // next token
}
// EOF?
if p.Remaining() == 0 {
return nil, p.Error("Unexpectedly reached EOF, no tag end found.", p.lastToken)
return nil, p.Error("Unexpectedly reached EOF, no tag end found.", p.last_token)
}
p.Match(TokenSymbol, "%}")
argParser := newParser(p.name, argsToken, p.template)
if len(argsToken) == 0 {
arg_parser := newParser(p.name, args_token, p.template)
if len(args_token) == 0 {
// This is done to have nice EOF error messages
argParser.lastToken = tokenName
arg_parser.last_token = token_name
}
p.template.level++
defer func() { p.template.level-- }()
return tag.parser(p, tokenName, argParser)
return tag.parser(p, token_name, arg_parser)
}

View File

@ -1,15 +1,19 @@
package pongo2
import (
"bytes"
)
type tagAutoescapeNode struct {
wrapper *NodeWrapper
autoescape bool
}
func (node *tagAutoescapeNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagAutoescapeNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
old := ctx.Autoescape
ctx.Autoescape = node.autoescape
err := node.wrapper.Execute(ctx, writer)
err := node.wrapper.Execute(ctx, buffer)
if err != nil {
return err
}
@ -20,22 +24,22 @@ func (node *tagAutoescapeNode) Execute(ctx *ExecutionContext, writer TemplateWri
}
func tagAutoescapeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
autoescapeNode := &tagAutoescapeNode{}
autoescape_node := &tagAutoescapeNode{}
wrapper, _, err := doc.WrapUntilTag("endautoescape")
if err != nil {
return nil, err
}
autoescapeNode.wrapper = wrapper
autoescape_node.wrapper = wrapper
modeToken := arguments.MatchType(TokenIdentifier)
if modeToken == nil {
mode_token := arguments.MatchType(TokenIdentifier)
if mode_token == nil {
return nil, arguments.Error("A mode is required for autoescape-tag.", nil)
}
if modeToken.Val == "on" {
autoescapeNode.autoescape = true
} else if modeToken.Val == "off" {
autoescapeNode.autoescape = false
if mode_token.Val == "on" {
autoescape_node.autoescape = true
} else if mode_token.Val == "off" {
autoescape_node.autoescape = false
} else {
return nil, arguments.Error("Only 'on' or 'off' is valid as an autoescape-mode.", nil)
}
@ -44,7 +48,7 @@ func tagAutoescapeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag
return nil, arguments.Error("Malformed autoescape-tag arguments.", nil)
}
return autoescapeNode, nil
return autoescape_node, nil
}
func init() {

View File

@ -1,6 +1,7 @@
package pongo2
import (
"bytes"
"fmt"
)
@ -21,18 +22,18 @@ func (node *tagBlockNode) getBlockWrapperByName(tpl *Template) *NodeWrapper {
return t
}
func (node *tagBlockNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagBlockNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
tpl := ctx.template
if tpl == nil {
panic("internal error: tpl == nil")
}
// Determine the block to execute
blockWrapper := node.getBlockWrapperByName(tpl)
if blockWrapper == nil {
block_wrapper := node.getBlockWrapperByName(tpl)
if block_wrapper == nil {
// fmt.Printf("could not find: %s\n", node.name)
return ctx.Error("internal error: block_wrapper == nil in tagBlockNode.Execute()", nil)
}
err := blockWrapper.Execute(ctx, writer)
err := block_wrapper.Execute(ctx, buffer)
if err != nil {
return err
}
@ -47,8 +48,8 @@ func tagBlockParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Er
return nil, arguments.Error("Tag 'block' requires an identifier.", nil)
}
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
name_token := arguments.MatchType(TokenIdentifier)
if name_token == nil {
return nil, arguments.Error("First argument for tag 'block' must be an identifier.", nil)
}
@ -61,15 +62,15 @@ func tagBlockParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Er
return nil, err
}
if endtagargs.Remaining() > 0 {
endtagnameToken := endtagargs.MatchType(TokenIdentifier)
if endtagnameToken != nil {
if endtagnameToken.Val != nameToken.Val {
endtagname_token := endtagargs.MatchType(TokenIdentifier)
if endtagname_token != nil {
if endtagname_token.Val != name_token.Val {
return nil, endtagargs.Error(fmt.Sprintf("Name for 'endblock' must equal to 'block'-tag's name ('%s' != '%s').",
nameToken.Val, endtagnameToken.Val), nil)
name_token.Val, endtagname_token.Val), nil)
}
}
if endtagnameToken == nil || endtagargs.Remaining() > 0 {
if endtagname_token == nil || endtagargs.Remaining() > 0 {
return nil, endtagargs.Error("Either no or only one argument (identifier) allowed for 'endblock'.", nil)
}
}
@ -78,14 +79,14 @@ func tagBlockParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Er
if tpl == nil {
panic("internal error: tpl == nil")
}
_, hasBlock := tpl.blocks[nameToken.Val]
if !hasBlock {
tpl.blocks[nameToken.Val] = wrapper
_, has_block := tpl.blocks[name_token.Val]
if !has_block {
tpl.blocks[name_token.Val] = wrapper
} else {
return nil, arguments.Error(fmt.Sprintf("Block named '%s' already defined", nameToken.Val), nil)
return nil, arguments.Error(fmt.Sprintf("Block named '%s' already defined", name_token.Val), nil)
}
return &tagBlockNode{name: nameToken.Val}, nil
return &tagBlockNode{name: name_token.Val}, nil
}
func init() {

View File

@ -1,13 +1,17 @@
package pongo2
import (
"bytes"
)
type tagCommentNode struct{}
func (node *tagCommentNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagCommentNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
return nil
}
func tagCommentParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
commentNode := &tagCommentNode{}
comment_node := &tagCommentNode{}
// TODO: Process the endtag's arguments (see django 'comment'-tag documentation)
_, _, err := doc.WrapUntilTag("endcomment")
@ -19,7 +23,7 @@ func tagCommentParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
return nil, arguments.Error("Tag 'comment' does not take any argument.", nil)
}
return commentNode, nil
return comment_node, nil
}
func init() {

View File

@ -1,5 +1,9 @@
package pongo2
import (
"bytes"
)
type tagCycleValue struct {
node *tagCycleNode
value *Value
@ -9,7 +13,7 @@ type tagCycleNode struct {
position *Token
args []IEvaluator
idx int
asName string
as_name string
silent bool
}
@ -17,7 +21,7 @@ func (cv *tagCycleValue) String() string {
return cv.value.String()
}
func (node *tagCycleNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagCycleNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
item := node.args[node.idx%len(node.args)]
node.idx++
@ -42,21 +46,21 @@ func (node *tagCycleNode) Execute(ctx *ExecutionContext, writer TemplateWriter)
t.value = val
if !t.node.silent {
writer.WriteString(val.String())
buffer.WriteString(val.String())
}
} else {
// Regular call
cycleValue := &tagCycleValue{
cycle_value := &tagCycleValue{
node: node,
value: val,
}
if node.asName != "" {
ctx.Private[node.asName] = cycleValue
if node.as_name != "" {
ctx.Private[node.as_name] = cycle_value
}
if !node.silent {
writer.WriteString(val.String())
buffer.WriteString(val.String())
}
}
@ -65,7 +69,7 @@ func (node *tagCycleNode) Execute(ctx *ExecutionContext, writer TemplateWriter)
// HINT: We're not supporting the old comma-seperated list of expresions argument-style
func tagCycleParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
cycleNode := &tagCycleNode{
cycle_node := &tagCycleNode{
position: start,
}
@ -74,19 +78,19 @@ func tagCycleParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Er
if err != nil {
return nil, err
}
cycleNode.args = append(cycleNode.args, node)
cycle_node.args = append(cycle_node.args, node)
if arguments.MatchOne(TokenKeyword, "as") != nil {
// as
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
name_token := arguments.MatchType(TokenIdentifier)
if name_token == nil {
return nil, arguments.Error("Name (identifier) expected after 'as'.", nil)
}
cycleNode.asName = nameToken.Val
cycle_node.as_name = name_token.Val
if arguments.MatchOne(TokenIdentifier, "silent") != nil {
cycleNode.silent = true
cycle_node.silent = true
}
// Now we're finished
@ -98,7 +102,7 @@ func tagCycleParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Er
return nil, arguments.Error("Malformed cycle-tag.", nil)
}
return cycleNode, nil
return cycle_node, nil
}
func init() {

View File

@ -1,15 +1,19 @@
package pongo2
import (
"bytes"
)
type tagExtendsNode struct {
filename string
}
func (node *tagExtendsNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagExtendsNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
return nil
}
func tagExtendsParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
extendsNode := &tagExtendsNode{}
extends_node := &tagExtendsNode{}
if doc.template.level > 1 {
return nil, arguments.Error("The 'extends' tag can only defined on root level.", start)
@ -20,22 +24,22 @@ func tagExtendsParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
return nil, arguments.Error("This template has already one parent.", start)
}
if filenameToken := arguments.MatchType(TokenString); filenameToken != nil {
if filename_token := arguments.MatchType(TokenString); filename_token != nil {
// prepared, static template
// Get parent's filename
parentFilename := doc.template.set.resolveFilename(doc.template, filenameToken.Val)
parent_filename := doc.template.set.resolveFilename(doc.template, filename_token.Val)
// Parse the parent
parentTemplate, err := doc.template.set.FromFile(parentFilename)
parent_template, err := doc.template.set.FromFile(parent_filename)
if err != nil {
return nil, err.(*Error)
}
// Keep track of things
parentTemplate.child = doc.template
doc.template.parent = parentTemplate
extendsNode.filename = parentFilename
parent_template.child = doc.template
doc.template.parent = parent_template
extends_node.filename = parent_filename
} else {
return nil, arguments.Error("Tag 'extends' requires a template filename as string.", nil)
}
@ -44,7 +48,7 @@ func tagExtendsParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
return nil, arguments.Error("Tag 'extends' does only take 1 argument.", nil)
}
return extendsNode, nil
return extends_node, nil
}
func init() {

View File

@ -5,8 +5,8 @@ import (
)
type nodeFilterCall struct {
name string
paramExpr IEvaluator
name string
param_expr IEvaluator
}
type tagFilterNode struct {
@ -15,7 +15,7 @@ type tagFilterNode struct {
filterChain []*nodeFilterCall
}
func (node *tagFilterNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagFilterNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
temp := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB size
err := node.bodyWrapper.Execute(ctx, temp)
@ -27,8 +27,8 @@ func (node *tagFilterNode) Execute(ctx *ExecutionContext, writer TemplateWriter)
for _, call := range node.filterChain {
var param *Value
if call.paramExpr != nil {
param, err = call.paramExpr.Evaluate(ctx)
if call.param_expr != nil {
param, err = call.param_expr.Evaluate(ctx)
if err != nil {
return err
}
@ -41,13 +41,13 @@ func (node *tagFilterNode) Execute(ctx *ExecutionContext, writer TemplateWriter)
}
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func tagFilterParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
filterNode := &tagFilterNode{
filter_node := &tagFilterNode{
position: start,
}
@ -55,16 +55,16 @@ func tagFilterParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *E
if err != nil {
return nil, err
}
filterNode.bodyWrapper = wrapper
filter_node.bodyWrapper = wrapper
for arguments.Remaining() > 0 {
filterCall := &nodeFilterCall{}
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
name_token := arguments.MatchType(TokenIdentifier)
if name_token == nil {
return nil, arguments.Error("Expected a filter name (identifier).", nil)
}
filterCall.name = nameToken.Val
filterCall.name = name_token.Val
if arguments.MatchOne(TokenSymbol, ":") != nil {
// Filter parameter
@ -73,10 +73,10 @@ func tagFilterParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *E
if err != nil {
return nil, err
}
filterCall.paramExpr = expr
filterCall.param_expr = expr
}
filterNode.filterChain = append(filterNode.filterChain, filterCall)
filter_node.filterChain = append(filter_node.filterChain, filterCall)
if arguments.MatchOne(TokenSymbol, "|") == nil {
break
@ -87,7 +87,7 @@ func tagFilterParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *E
return nil, arguments.Error("Malformed filter-tag arguments.", nil)
}
return filterNode, nil
return filter_node, nil
}
func init() {

View File

@ -1,11 +1,15 @@
package pongo2
import (
"bytes"
)
type tagFirstofNode struct {
position *Token
args []IEvaluator
}
func (node *tagFirstofNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagFirstofNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
for _, arg := range node.args {
val, err := arg.Evaluate(ctx)
if err != nil {
@ -20,7 +24,7 @@ func (node *tagFirstofNode) Execute(ctx *ExecutionContext, writer TemplateWriter
}
}
writer.WriteString(val.String())
buffer.WriteString(val.String())
return nil
}
}
@ -29,7 +33,7 @@ func (node *tagFirstofNode) Execute(ctx *ExecutionContext, writer TemplateWriter
}
func tagFirstofParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
firstofNode := &tagFirstofNode{
firstof_node := &tagFirstofNode{
position: start,
}
@ -38,10 +42,10 @@ func tagFirstofParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
if err != nil {
return nil, err
}
firstofNode.args = append(firstofNode.args, node)
firstof_node.args = append(firstof_node.args, node)
}
return firstofNode, nil
return firstof_node, nil
}
func init() {

View File

@ -1,11 +1,14 @@
package pongo2
import (
"bytes"
)
type tagForNode struct {
key string
value string // only for maps: for key, value in map
objectEvaluator IEvaluator
reversed bool
sorted bool
key string
value string // only for maps: for key, value in map
object_evaluator IEvaluator
reversed bool
bodyWrapper *NodeWrapper
emptyWrapper *NodeWrapper
@ -21,7 +24,7 @@ type tagForLoopInformation struct {
Parentloop *tagForLoopInformation
}
func (node *tagForNode) Execute(ctx *ExecutionContext, writer TemplateWriter) (forError *Error) {
func (node *tagForNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) (forError *Error) {
// Backup forloop (as parentloop in public context), key-name and value-name
forCtx := NewChildExecutionContext(ctx)
parentloop := forCtx.Private["forloop"]
@ -39,7 +42,7 @@ func (node *tagForNode) Execute(ctx *ExecutionContext, writer TemplateWriter) (f
// Register loopInfo in public context
forCtx.Private["forloop"] = loopInfo
obj, err := node.objectEvaluator.Evaluate(forCtx)
obj, err := node.object_evaluator.Evaluate(forCtx)
if err != nil {
return err
}
@ -64,7 +67,7 @@ func (node *tagForNode) Execute(ctx *ExecutionContext, writer TemplateWriter) (f
loopInfo.Revcounter0 = count - (idx + 1) // TODO: Not sure about this, have to look it up
// Render elements with updated context
err := node.bodyWrapper.Execute(forCtx, writer)
err := node.bodyWrapper.Execute(forCtx, buffer)
if err != nil {
forError = err
return false
@ -73,30 +76,30 @@ func (node *tagForNode) Execute(ctx *ExecutionContext, writer TemplateWriter) (f
}, func() {
// Nothing to iterate over (maybe wrong type or no items)
if node.emptyWrapper != nil {
err := node.emptyWrapper.Execute(forCtx, writer)
err := node.emptyWrapper.Execute(forCtx, buffer)
if err != nil {
forError = err
}
}
}, node.reversed, node.sorted)
}, node.reversed)
return forError
return nil
}
func tagForParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
forNode := &tagForNode{}
for_node := &tagForNode{}
// Arguments parsing
var valueToken *Token
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
var value_token *Token
key_token := arguments.MatchType(TokenIdentifier)
if key_token == nil {
return nil, arguments.Error("Expected an key identifier as first argument for 'for'-tag", nil)
}
if arguments.Match(TokenSymbol, ",") != nil {
// Value name is provided
valueToken = arguments.MatchType(TokenIdentifier)
if valueToken == nil {
value_token = arguments.MatchType(TokenIdentifier)
if value_token == nil {
return nil, arguments.Error("Value name must be an identifier.", nil)
}
}
@ -105,22 +108,18 @@ func tagForParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Erro
return nil, arguments.Error("Expected keyword 'in'.", nil)
}
objectEvaluator, err := arguments.ParseExpression()
object_evaluator, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
forNode.objectEvaluator = objectEvaluator
forNode.key = keyToken.Val
if valueToken != nil {
forNode.value = valueToken.Val
for_node.object_evaluator = object_evaluator
for_node.key = key_token.Val
if value_token != nil {
for_node.value = value_token.Val
}
if arguments.MatchOne(TokenIdentifier, "reversed") != nil {
forNode.reversed = true
}
if arguments.MatchOne(TokenIdentifier, "sorted") != nil {
forNode.sorted = true
for_node.reversed = true
}
if arguments.Remaining() > 0 {
@ -132,7 +131,7 @@ func tagForParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Erro
if err != nil {
return nil, err
}
forNode.bodyWrapper = wrapper
for_node.bodyWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
@ -144,14 +143,14 @@ func tagForParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Erro
if err != nil {
return nil, err
}
forNode.emptyWrapper = wrapper
for_node.emptyWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return forNode, nil
return for_node, nil
}
func init() {

View File

@ -1,11 +1,15 @@
package pongo2
import (
"bytes"
)
type tagIfNode struct {
conditions []IEvaluator
wrappers []*NodeWrapper
}
func (node *tagIfNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagIfNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
for i, condition := range node.conditions {
result, err := condition.Evaluate(ctx)
if err != nil {
@ -13,25 +17,26 @@ func (node *tagIfNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Er
}
if result.IsTrue() {
return node.wrappers[i].Execute(ctx, writer)
}
// Last condition?
if len(node.conditions) == i+1 && len(node.wrappers) > i+1 {
return node.wrappers[i+1].Execute(ctx, writer)
return node.wrappers[i].Execute(ctx, buffer)
} else {
// Last condition?
if len(node.conditions) == i+1 && len(node.wrappers) > i+1 {
return node.wrappers[i+1].Execute(ctx, buffer)
}
}
}
return nil
}
func tagIfParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifNode := &tagIfNode{}
if_node := &tagIfNode{}
// Parse first and main IF condition
condition, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
ifNode.conditions = append(ifNode.conditions, condition)
if_node.conditions = append(if_node.conditions, condition)
if arguments.Remaining() > 0 {
return nil, arguments.Error("If-condition is malformed.", nil)
@ -39,27 +44,27 @@ func tagIfParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error
// Check the rest
for {
wrapper, tagArgs, err := doc.WrapUntilTag("elif", "else", "endif")
wrapper, tag_args, err := doc.WrapUntilTag("elif", "else", "endif")
if err != nil {
return nil, err
}
ifNode.wrappers = append(ifNode.wrappers, wrapper)
if_node.wrappers = append(if_node.wrappers, wrapper)
if wrapper.Endtag == "elif" {
// elif can take a condition
condition, err = tagArgs.ParseExpression()
condition, err := tag_args.ParseExpression()
if err != nil {
return nil, err
}
ifNode.conditions = append(ifNode.conditions, condition)
if_node.conditions = append(if_node.conditions, condition)
if tagArgs.Remaining() > 0 {
return nil, tagArgs.Error("Elif-condition is malformed.", nil)
if tag_args.Remaining() > 0 {
return nil, tag_args.Error("Elif-condition is malformed.", nil)
}
} else {
if tagArgs.Count() > 0 {
if tag_args.Count() > 0 {
// else/endif can't take any conditions
return nil, tagArgs.Error("Arguments not allowed here.", nil)
return nil, tag_args.Error("Arguments not allowed here.", nil)
}
}
@ -68,7 +73,7 @@ func tagIfParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error
}
}
return ifNode, nil
return if_node, nil
}
func init() {

View File

@ -5,15 +5,16 @@ import (
)
type tagIfchangedNode struct {
watchedExpr []IEvaluator
lastValues []*Value
lastContent []byte
thenWrapper *NodeWrapper
elseWrapper *NodeWrapper
watched_expr []IEvaluator
last_values []*Value
last_content []byte
thenWrapper *NodeWrapper
elseWrapper *NodeWrapper
}
func (node *tagIfchangedNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
if len(node.watchedExpr) == 0 {
func (node *tagIfchangedNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
if len(node.watched_expr) == 0 {
// Check against own rendered body
buf := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB
@ -22,43 +23,43 @@ func (node *tagIfchangedNode) Execute(ctx *ExecutionContext, writer TemplateWrit
return err
}
bufBytes := buf.Bytes()
if !bytes.Equal(node.lastContent, bufBytes) {
buf_bytes := buf.Bytes()
if !bytes.Equal(node.last_content, buf_bytes) {
// Rendered content changed, output it
writer.Write(bufBytes)
node.lastContent = bufBytes
buffer.Write(buf_bytes)
node.last_content = buf_bytes
}
} else {
nowValues := make([]*Value, 0, len(node.watchedExpr))
for _, expr := range node.watchedExpr {
now_values := make([]*Value, 0, len(node.watched_expr))
for _, expr := range node.watched_expr {
val, err := expr.Evaluate(ctx)
if err != nil {
return err
}
nowValues = append(nowValues, val)
now_values = append(now_values, val)
}
// Compare old to new values now
changed := len(node.lastValues) == 0
changed := len(node.last_values) == 0
for idx, oldVal := range node.lastValues {
if !oldVal.EqualValueTo(nowValues[idx]) {
for idx, old_val := range node.last_values {
if !old_val.EqualValueTo(now_values[idx]) {
changed = true
break // we can stop here because ONE value changed
}
}
node.lastValues = nowValues
node.last_values = now_values
if changed {
// Render thenWrapper
err := node.thenWrapper.Execute(ctx, writer)
err := node.thenWrapper.Execute(ctx, buffer)
if err != nil {
return err
}
} else {
// Render elseWrapper
err := node.elseWrapper.Execute(ctx, writer)
err := node.elseWrapper.Execute(ctx, buffer)
if err != nil {
return err
}
@ -69,7 +70,7 @@ func (node *tagIfchangedNode) Execute(ctx *ExecutionContext, writer TemplateWrit
}
func tagIfchangedParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifchangedNode := &tagIfchangedNode{}
ifchanged_node := &tagIfchangedNode{}
for arguments.Remaining() > 0 {
// Parse condition
@ -77,7 +78,7 @@ func tagIfchangedParser(doc *Parser, start *Token, arguments *Parser) (INodeTag,
if err != nil {
return nil, err
}
ifchangedNode.watchedExpr = append(ifchangedNode.watchedExpr, expr)
ifchanged_node.watched_expr = append(ifchanged_node.watched_expr, expr)
}
if arguments.Remaining() > 0 {
@ -89,7 +90,7 @@ func tagIfchangedParser(doc *Parser, start *Token, arguments *Parser) (INodeTag,
if err != nil {
return nil, err
}
ifchangedNode.thenWrapper = wrapper
ifchanged_node.thenWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
@ -101,14 +102,14 @@ func tagIfchangedParser(doc *Parser, start *Token, arguments *Parser) (INodeTag,
if err != nil {
return nil, err
}
ifchangedNode.elseWrapper = wrapper
ifchanged_node.elseWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return ifchangedNode, nil
return ifchanged_node, nil
}
func init() {

View File

@ -1,12 +1,16 @@
package pongo2
import (
"bytes"
)
type tagIfEqualNode struct {
var1, var2 IEvaluator
thenWrapper *NodeWrapper
elseWrapper *NodeWrapper
}
func (node *tagIfEqualNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagIfEqualNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
r1, err := node.var1.Evaluate(ctx)
if err != nil {
return err
@ -19,16 +23,17 @@ func (node *tagIfEqualNode) Execute(ctx *ExecutionContext, writer TemplateWriter
result := r1.EqualValueTo(r2)
if result {
return node.thenWrapper.Execute(ctx, writer)
}
if node.elseWrapper != nil {
return node.elseWrapper.Execute(ctx, writer)
return node.thenWrapper.Execute(ctx, buffer)
} else {
if node.elseWrapper != nil {
return node.elseWrapper.Execute(ctx, buffer)
}
}
return nil
}
func tagIfEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifequalNode := &tagIfEqualNode{}
ifequal_node := &tagIfEqualNode{}
// Parse two expressions
var1, err := arguments.ParseExpression()
@ -39,8 +44,8 @@ func tagIfEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
if err != nil {
return nil, err
}
ifequalNode.var1 = var1
ifequalNode.var2 = var2
ifequal_node.var1 = var1
ifequal_node.var2 = var2
if arguments.Remaining() > 0 {
return nil, arguments.Error("ifequal only takes 2 arguments.", nil)
@ -51,7 +56,7 @@ func tagIfEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
if err != nil {
return nil, err
}
ifequalNode.thenWrapper = wrapper
ifequal_node.thenWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
@ -63,14 +68,14 @@ func tagIfEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
if err != nil {
return nil, err
}
ifequalNode.elseWrapper = wrapper
ifequal_node.elseWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return ifequalNode, nil
return ifequal_node, nil
}
func init() {

View File

@ -1,12 +1,16 @@
package pongo2
import (
"bytes"
)
type tagIfNotEqualNode struct {
var1, var2 IEvaluator
thenWrapper *NodeWrapper
elseWrapper *NodeWrapper
}
func (node *tagIfNotEqualNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagIfNotEqualNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
r1, err := node.var1.Evaluate(ctx)
if err != nil {
return err
@ -19,16 +23,17 @@ func (node *tagIfNotEqualNode) Execute(ctx *ExecutionContext, writer TemplateWri
result := !r1.EqualValueTo(r2)
if result {
return node.thenWrapper.Execute(ctx, writer)
}
if node.elseWrapper != nil {
return node.elseWrapper.Execute(ctx, writer)
return node.thenWrapper.Execute(ctx, buffer)
} else {
if node.elseWrapper != nil {
return node.elseWrapper.Execute(ctx, buffer)
}
}
return nil
}
func tagIfNotEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ifnotequalNode := &tagIfNotEqualNode{}
ifnotequal_node := &tagIfNotEqualNode{}
// Parse two expressions
var1, err := arguments.ParseExpression()
@ -39,19 +44,19 @@ func tagIfNotEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag
if err != nil {
return nil, err
}
ifnotequalNode.var1 = var1
ifnotequalNode.var2 = var2
ifnotequal_node.var1 = var1
ifnotequal_node.var2 = var2
if arguments.Remaining() > 0 {
return nil, arguments.Error("ifequal only takes 2 arguments.", nil)
}
// Wrap then/else-blocks
wrapper, endargs, err := doc.WrapUntilTag("else", "endifnotequal")
wrapper, endargs, err := doc.WrapUntilTag("else", "endifequal")
if err != nil {
return nil, err
}
ifnotequalNode.thenWrapper = wrapper
ifnotequal_node.thenWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
@ -59,18 +64,18 @@ func tagIfNotEqualParser(doc *Parser, start *Token, arguments *Parser) (INodeTag
if wrapper.Endtag == "else" {
// if there's an else in the if-statement, we need the else-Block as well
wrapper, endargs, err = doc.WrapUntilTag("endifnotequal")
wrapper, endargs, err = doc.WrapUntilTag("endifequal")
if err != nil {
return nil, err
}
ifnotequalNode.elseWrapper = wrapper
ifnotequal_node.elseWrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
}
return ifnotequalNode, nil
return ifnotequal_node, nil
}
func init() {

View File

@ -1,16 +1,18 @@
package pongo2
import (
"bytes"
"fmt"
)
type tagImportNode struct {
position *Token
filename string
template *Template
macros map[string]*tagMacroNode // alias/name -> macro instance
}
func (node *tagImportNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagImportNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
for name, macro := range node.macros {
func(name string, macro *tagMacroNode) {
ctx.Private[name] = func(args ...*Value) *Value {
@ -22,50 +24,50 @@ func (node *tagImportNode) Execute(ctx *ExecutionContext, writer TemplateWriter)
}
func tagImportParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
importNode := &tagImportNode{
import_node := &tagImportNode{
position: start,
macros: make(map[string]*tagMacroNode),
}
filenameToken := arguments.MatchType(TokenString)
if filenameToken == nil {
filename_token := arguments.MatchType(TokenString)
if filename_token == nil {
return nil, arguments.Error("Import-tag needs a filename as string.", nil)
}
importNode.filename = doc.template.set.resolveFilename(doc.template, filenameToken.Val)
import_node.filename = doc.template.set.resolveFilename(doc.template, filename_token.Val)
if arguments.Remaining() == 0 {
return nil, arguments.Error("You must at least specify one macro to import.", nil)
}
// Compile the given template
tpl, err := doc.template.set.FromFile(importNode.filename)
tpl, err := doc.template.set.FromFile(import_node.filename)
if err != nil {
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, start)
}
for arguments.Remaining() > 0 {
macroNameToken := arguments.MatchType(TokenIdentifier)
if macroNameToken == nil {
macro_name_token := arguments.MatchType(TokenIdentifier)
if macro_name_token == nil {
return nil, arguments.Error("Expected macro name (identifier).", nil)
}
asName := macroNameToken.Val
as_name := macro_name_token.Val
if arguments.Match(TokenKeyword, "as") != nil {
aliasToken := arguments.MatchType(TokenIdentifier)
if aliasToken == nil {
alias_token := arguments.MatchType(TokenIdentifier)
if alias_token == nil {
return nil, arguments.Error("Expected macro alias name (identifier).", nil)
}
asName = aliasToken.Val
as_name = alias_token.Val
}
macroInstance, has := tpl.exportedMacros[macroNameToken.Val]
macro_instance, has := tpl.exported_macros[macro_name_token.Val]
if !has {
return nil, arguments.Error(fmt.Sprintf("Macro '%s' not found (or not exported) in '%s'.", macroNameToken.Val,
importNode.filename), macroNameToken)
return nil, arguments.Error(fmt.Sprintf("Macro '%s' not found (or not exported) in '%s'.", macro_name_token.Val,
import_node.filename), macro_name_token)
}
importNode.macros[asName] = macroInstance
import_node.macros[as_name] = macro_instance
if arguments.Remaining() == 0 {
break
@ -76,7 +78,7 @@ func tagImportParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *E
}
}
return importNode, nil
return import_node, nil
}
func init() {

View File

@ -1,38 +1,41 @@
package pongo2
import (
"bytes"
)
type tagIncludeNode struct {
tpl *Template
filenameEvaluator IEvaluator
lazy bool
only bool
filename string
withPairs map[string]IEvaluator
ifExists bool
tpl *Template
filename_evaluator IEvaluator
lazy bool
only bool
filename string
with_pairs map[string]IEvaluator
}
func (node *tagIncludeNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagIncludeNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
// Building the context for the template
includeCtx := make(Context)
include_ctx := make(Context)
// Fill the context with all data from the parent
if !node.only {
includeCtx.Update(ctx.Public)
includeCtx.Update(ctx.Private)
include_ctx.Update(ctx.Public)
include_ctx.Update(ctx.Private)
}
// Put all custom with-pairs into the context
for key, value := range node.withPairs {
for key, value := range node.with_pairs {
val, err := value.Evaluate(ctx)
if err != nil {
return err
}
includeCtx[key] = val
include_ctx[key] = val
}
// Execute the template
if node.lazy {
// Evaluate the filename
filename, err := node.filenameEvaluator.Evaluate(ctx)
filename, err := node.filename_evaluator.Evaluate(ctx)
if err != nil {
return err
}
@ -42,93 +45,76 @@ func (node *tagIncludeNode) Execute(ctx *ExecutionContext, writer TemplateWriter
}
// Get include-filename
includedFilename := ctx.template.set.resolveFilename(ctx.template, filename.String())
included_filename := ctx.template.set.resolveFilename(ctx.template, filename.String())
includedTpl, err2 := ctx.template.set.FromFile(includedFilename)
included_tpl, err2 := ctx.template.set.FromFile(included_filename)
if err2 != nil {
// if this is ReadFile error, and "if_exists" flag is enabled
if node.ifExists && err2.(*Error).Sender == "fromfile" {
return nil
}
return err2.(*Error)
}
err2 = includedTpl.ExecuteWriter(includeCtx, writer)
err2 = included_tpl.ExecuteWriter(include_ctx, buffer)
if err2 != nil {
return err2.(*Error)
}
return nil
} else {
// Template is already parsed with static filename
err := node.tpl.ExecuteWriter(include_ctx, buffer)
if err != nil {
return err.(*Error)
}
return nil
}
// Template is already parsed with static filename
err := node.tpl.ExecuteWriter(includeCtx, writer)
if err != nil {
return err.(*Error)
}
return nil
}
type tagIncludeEmptyNode struct{}
func (node *tagIncludeEmptyNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
return nil
}
func tagIncludeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
includeNode := &tagIncludeNode{
withPairs: make(map[string]IEvaluator),
include_node := &tagIncludeNode{
with_pairs: make(map[string]IEvaluator),
}
if filenameToken := arguments.MatchType(TokenString); filenameToken != nil {
if filename_token := arguments.MatchType(TokenString); filename_token != nil {
// prepared, static template
// "if_exists" flag
ifExists := arguments.Match(TokenIdentifier, "if_exists") != nil
// Get include-filename
includedFilename := doc.template.set.resolveFilename(doc.template, filenameToken.Val)
included_filename := doc.template.set.resolveFilename(doc.template, filename_token.Val)
// Parse the parent
includeNode.filename = includedFilename
includedTpl, err := doc.template.set.FromFile(includedFilename)
include_node.filename = included_filename
included_tpl, err := doc.template.set.FromFile(included_filename)
if err != nil {
// if this is ReadFile error, and "if_exists" token presents we should create and empty node
if err.(*Error).Sender == "fromfile" && ifExists {
return &tagIncludeEmptyNode{}, nil
}
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, filenameToken)
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, filename_token)
}
includeNode.tpl = includedTpl
include_node.tpl = included_tpl
} else {
// No String, then the user wants to use lazy-evaluation (slower, but possible)
filenameEvaluator, err := arguments.ParseExpression()
filename_evaluator, err := arguments.ParseExpression()
if err != nil {
return nil, err.updateFromTokenIfNeeded(doc.template, filenameToken)
return nil, err.updateFromTokenIfNeeded(doc.template, filename_token)
}
includeNode.filenameEvaluator = filenameEvaluator
includeNode.lazy = true
includeNode.ifExists = arguments.Match(TokenIdentifier, "if_exists") != nil // "if_exists" flag
include_node.filename_evaluator = filename_evaluator
include_node.lazy = true
}
// After having parsed the filename we're gonna parse the with+only options
if arguments.Match(TokenIdentifier, "with") != nil {
for arguments.Remaining() > 0 {
// We have at least one key=expr pair (because of starting "with")
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
key_token := arguments.MatchType(TokenIdentifier)
if key_token == nil {
return nil, arguments.Error("Expected an identifier", nil)
}
if arguments.Match(TokenSymbol, "=") == nil {
return nil, arguments.Error("Expected '='.", nil)
}
valueExpr, err := arguments.ParseExpression()
value_expr, err := arguments.ParseExpression()
if err != nil {
return nil, err.updateFromTokenIfNeeded(doc.template, keyToken)
return nil, err.updateFromTokenIfNeeded(doc.template, key_token)
}
includeNode.withPairs[keyToken.Val] = valueExpr
include_node.with_pairs[key_token.Val] = value_expr
// Only?
if arguments.Match(TokenIdentifier, "only") != nil {
includeNode.only = true
include_node.only = true
break // stop parsing arguments because it's the last option
}
}
@ -138,7 +124,7 @@ func tagIncludeParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *
return nil, arguments.Error("Malformed 'include'-tag arguments.", nil)
}
return includeNode, nil
return include_node, nil
}
func init() {

View File

@ -1,6 +1,7 @@
package pongo2
import (
"bytes"
"math/rand"
"strings"
"time"
@ -18,64 +19,64 @@ type tagLoremNode struct {
random bool // does not use the default paragraph "Lorem ipsum dolor sit amet, ..."
}
func (node *tagLoremNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagLoremNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
switch node.method {
case "b":
if node.random {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
buffer.WriteString("\n")
}
par := tagLoremParagraphs[rand.Intn(len(tagLoremParagraphs))]
writer.WriteString(par)
buffer.WriteString(par)
}
} else {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
buffer.WriteString("\n")
}
par := tagLoremParagraphs[i%len(tagLoremParagraphs)]
writer.WriteString(par)
buffer.WriteString(par)
}
}
case "w":
if node.random {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString(" ")
buffer.WriteString(" ")
}
word := tagLoremWords[rand.Intn(len(tagLoremWords))]
writer.WriteString(word)
buffer.WriteString(word)
}
} else {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString(" ")
buffer.WriteString(" ")
}
word := tagLoremWords[i%len(tagLoremWords)]
writer.WriteString(word)
buffer.WriteString(word)
}
}
case "p":
if node.random {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
buffer.WriteString("\n")
}
writer.WriteString("<p>")
buffer.WriteString("<p>")
par := tagLoremParagraphs[rand.Intn(len(tagLoremParagraphs))]
writer.WriteString(par)
writer.WriteString("</p>")
buffer.WriteString(par)
buffer.WriteString("</p>")
}
} else {
for i := 0; i < node.count; i++ {
if i > 0 {
writer.WriteString("\n")
buffer.WriteString("\n")
}
writer.WriteString("<p>")
buffer.WriteString("<p>")
par := tagLoremParagraphs[i%len(tagLoremParagraphs)]
writer.WriteString(par)
writer.WriteString("</p>")
buffer.WriteString(par)
buffer.WriteString("</p>")
}
}
@ -87,33 +88,33 @@ func (node *tagLoremNode) Execute(ctx *ExecutionContext, writer TemplateWriter)
}
func tagLoremParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
loremNode := &tagLoremNode{
lorem_node := &tagLoremNode{
position: start,
count: 1,
method: "b",
}
if countToken := arguments.MatchType(TokenNumber); countToken != nil {
loremNode.count = AsValue(countToken.Val).Integer()
if count_token := arguments.MatchType(TokenNumber); count_token != nil {
lorem_node.count = AsValue(count_token.Val).Integer()
}
if methodToken := arguments.MatchType(TokenIdentifier); methodToken != nil {
if methodToken.Val != "w" && methodToken.Val != "p" && methodToken.Val != "b" {
if method_token := arguments.MatchType(TokenIdentifier); method_token != nil {
if method_token.Val != "w" && method_token.Val != "p" && method_token.Val != "b" {
return nil, arguments.Error("lorem-method must be either 'w', 'p' or 'b'.", nil)
}
loremNode.method = methodToken.Val
lorem_node.method = method_token.Val
}
if arguments.MatchOne(TokenIdentifier, "random") != nil {
loremNode.random = true
lorem_node.random = true
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed lorem-tag arguments.", nil)
}
return loremNode, nil
return lorem_node, nil
}
func init() {

View File

@ -6,16 +6,16 @@ import (
)
type tagMacroNode struct {
position *Token
name string
argsOrder []string
args map[string]IEvaluator
exported bool
position *Token
name string
args_order []string
args map[string]IEvaluator
exported bool
wrapper *NodeWrapper
}
func (node *tagMacroNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagMacroNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
ctx.Private[node.name] = func(args ...*Value) *Value {
return node.call(ctx, args...)
}
@ -24,28 +24,28 @@ func (node *tagMacroNode) Execute(ctx *ExecutionContext, writer TemplateWriter)
}
func (node *tagMacroNode) call(ctx *ExecutionContext, args ...*Value) *Value {
argsCtx := make(Context)
args_ctx := make(Context)
for k, v := range node.args {
if v == nil {
// User did not provided a default value
argsCtx[k] = nil
args_ctx[k] = nil
} else {
// Evaluate the default value
valueExpr, err := v.Evaluate(ctx)
value_expr, err := v.Evaluate(ctx)
if err != nil {
ctx.Logf(err.Error())
return AsSafeValue(err.Error())
}
argsCtx[k] = valueExpr
args_ctx[k] = value_expr
}
}
if len(args) > len(node.argsOrder) {
if len(args) > len(node.args_order) {
// Too many arguments, we're ignoring them and just logging into debug mode.
err := ctx.Error(fmt.Sprintf("Macro '%s' called with too many arguments (%d instead of %d).",
node.name, len(args), len(node.argsOrder)), nil).updateFromTokenIfNeeded(ctx.template, node.position)
node.name, len(args), len(node.args_order)), nil).updateFromTokenIfNeeded(ctx.template, node.position)
ctx.Logf(err.Error()) // TODO: This is a workaround, because the error is not returned yet to the Execution()-methods
return AsSafeValue(err.Error())
@ -55,10 +55,10 @@ func (node *tagMacroNode) call(ctx *ExecutionContext, args ...*Value) *Value {
macroCtx := NewChildExecutionContext(ctx)
// Register all arguments in the private context
macroCtx.Private.Update(argsCtx)
macroCtx.Private.Update(args_ctx)
for idx, argValue := range args {
macroCtx.Private[node.argsOrder[idx]] = argValue.Interface()
for idx, arg_value := range args {
macroCtx.Private[node.args_order[idx]] = arg_value.Interface()
}
var b bytes.Buffer
@ -71,38 +71,38 @@ func (node *tagMacroNode) call(ctx *ExecutionContext, args ...*Value) *Value {
}
func tagMacroParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
macroNode := &tagMacroNode{
macro_node := &tagMacroNode{
position: start,
args: make(map[string]IEvaluator),
}
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
name_token := arguments.MatchType(TokenIdentifier)
if name_token == nil {
return nil, arguments.Error("Macro-tag needs at least an identifier as name.", nil)
}
macroNode.name = nameToken.Val
macro_node.name = name_token.Val
if arguments.MatchOne(TokenSymbol, "(") == nil {
return nil, arguments.Error("Expected '('.", nil)
}
for arguments.Match(TokenSymbol, ")") == nil {
argNameToken := arguments.MatchType(TokenIdentifier)
if argNameToken == nil {
arg_name_token := arguments.MatchType(TokenIdentifier)
if arg_name_token == nil {
return nil, arguments.Error("Expected argument name as identifier.", nil)
}
macroNode.argsOrder = append(macroNode.argsOrder, argNameToken.Val)
macro_node.args_order = append(macro_node.args_order, arg_name_token.Val)
if arguments.Match(TokenSymbol, "=") != nil {
// Default expression follows
argDefaultExpr, err := arguments.ParseExpression()
arg_default_expr, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
macroNode.args[argNameToken.Val] = argDefaultExpr
macro_node.args[arg_name_token.Val] = arg_default_expr
} else {
// No default expression
macroNode.args[argNameToken.Val] = nil
macro_node.args[arg_name_token.Val] = nil
}
if arguments.Match(TokenSymbol, ")") != nil {
@ -114,7 +114,7 @@ func tagMacroParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Er
}
if arguments.Match(TokenKeyword, "export") != nil {
macroNode.exported = true
macro_node.exported = true
}
if arguments.Remaining() > 0 {
@ -126,22 +126,22 @@ func tagMacroParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Er
if err != nil {
return nil, err
}
macroNode.wrapper = wrapper
macro_node.wrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
}
if macroNode.exported {
if macro_node.exported {
// Now register the macro if it wants to be exported
_, has := doc.template.exportedMacros[macroNode.name]
_, has := doc.template.exported_macros[macro_node.name]
if has {
return nil, doc.Error(fmt.Sprintf("Another macro with name '%s' already exported.", macroNode.name), start)
return nil, doc.Error(fmt.Sprintf("Another macro with name '%s' already exported.", macro_node.name), start)
}
doc.template.exportedMacros[macroNode.name] = macroNode
doc.template.exported_macros[macro_node.name] = macro_node
}
return macroNode, nil
return macro_node, nil
}
func init() {

View File

@ -1,6 +1,7 @@
package pongo2
import (
"bytes"
"time"
)
@ -10,7 +11,7 @@ type tagNowNode struct {
fake bool
}
func (node *tagNowNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagNowNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
var t time.Time
if node.fake {
t = time.Date(2014, time.February, 05, 18, 31, 45, 00, time.UTC)
@ -18,31 +19,31 @@ func (node *tagNowNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *E
t = time.Now()
}
writer.WriteString(t.Format(node.format))
buffer.WriteString(t.Format(node.format))
return nil
}
func tagNowParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
nowNode := &tagNowNode{
now_node := &tagNowNode{
position: start,
}
formatToken := arguments.MatchType(TokenString)
if formatToken == nil {
format_token := arguments.MatchType(TokenString)
if format_token == nil {
return nil, arguments.Error("Expected a format string.", nil)
}
nowNode.format = formatToken.Val
now_node.format = format_token.Val
if arguments.MatchOne(TokenIdentifier, "fake") != nil {
nowNode.fake = true
now_node.fake = true
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed now-tag arguments.", nil)
}
return nowNode, nil
return now_node, nil
}
func init() {

View File

@ -1,11 +1,13 @@
package pongo2
import "bytes"
type tagSetNode struct {
name string
expression IEvaluator
}
func (node *tagSetNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagSetNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
// Evaluate expression
value, err := node.expression.Evaluate(ctx)
if err != nil {

View File

@ -11,7 +11,7 @@ type tagSpacelessNode struct {
var tagSpacelessRegexp = regexp.MustCompile(`(?U:(<.*>))([\t\n\v\f\r ]+)(?U:(<.*>))`)
func (node *tagSpacelessNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagSpacelessNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
b := bytes.NewBuffer(make([]byte, 0, 1024)) // 1 KiB
err := node.wrapper.Execute(ctx, b)
@ -28,25 +28,25 @@ func (node *tagSpacelessNode) Execute(ctx *ExecutionContext, writer TemplateWrit
s = s2
}
writer.WriteString(s)
buffer.WriteString(s)
return nil
}
func tagSpacelessParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
spacelessNode := &tagSpacelessNode{}
spaceless_node := &tagSpacelessNode{}
wrapper, _, err := doc.WrapUntilTag("endspaceless")
if err != nil {
return nil, err
}
spacelessNode.wrapper = wrapper
spaceless_node.wrapper = wrapper
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed spaceless-tag arguments.", nil)
}
return spacelessNode, nil
return spaceless_node, nil
}
func init() {

View File

@ -1,6 +1,7 @@
package pongo2
import (
"bytes"
"io/ioutil"
)
@ -10,47 +11,47 @@ type tagSSINode struct {
template *Template
}
func (node *tagSSINode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagSSINode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
if node.template != nil {
// Execute the template within the current context
includeCtx := make(Context)
includeCtx.Update(ctx.Public)
includeCtx.Update(ctx.Private)
err := node.template.execute(includeCtx, writer)
err := node.template.ExecuteWriter(includeCtx, buffer)
if err != nil {
return err.(*Error)
}
} else {
// Just print out the content
writer.WriteString(node.content)
buffer.WriteString(node.content)
}
return nil
}
func tagSSIParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
SSINode := &tagSSINode{}
ssi_node := &tagSSINode{}
if fileToken := arguments.MatchType(TokenString); fileToken != nil {
SSINode.filename = fileToken.Val
if file_token := arguments.MatchType(TokenString); file_token != nil {
ssi_node.filename = file_token.Val
if arguments.Match(TokenIdentifier, "parsed") != nil {
// parsed
temporaryTpl, err := doc.template.set.FromFile(doc.template.set.resolveFilename(doc.template, fileToken.Val))
temporary_tpl, err := doc.template.set.FromFile(doc.template.set.resolveFilename(doc.template, file_token.Val))
if err != nil {
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, fileToken)
return nil, err.(*Error).updateFromTokenIfNeeded(doc.template, file_token)
}
SSINode.template = temporaryTpl
ssi_node.template = temporary_tpl
} else {
// plaintext
buf, err := ioutil.ReadFile(doc.template.set.resolveFilename(doc.template, fileToken.Val))
buf, err := ioutil.ReadFile(doc.template.set.resolveFilename(doc.template, file_token.Val))
if err != nil {
return nil, (&Error{
Sender: "tag:ssi",
ErrorMsg: err.Error(),
}).updateFromTokenIfNeeded(doc.template, fileToken)
}).updateFromTokenIfNeeded(doc.template, file_token)
}
SSINode.content = string(buf)
ssi_node.content = string(buf)
}
} else {
return nil, arguments.Error("First argument must be a string.", nil)
@ -60,7 +61,7 @@ func tagSSIParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Erro
return nil, arguments.Error("Malformed SSI-tag argument.", nil)
}
return SSINode, nil
return ssi_node, nil
}
func init() {

View File

@ -1,5 +1,9 @@
package pongo2
import (
"bytes"
)
type tagTemplateTagNode struct {
content string
}
@ -15,20 +19,20 @@ var templateTagMapping = map[string]string{
"closecomment": "#}",
}
func (node *tagTemplateTagNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
writer.WriteString(node.content)
func (node *tagTemplateTagNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
buffer.WriteString(node.content)
return nil
}
func tagTemplateTagParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
ttNode := &tagTemplateTagNode{}
tt_node := &tagTemplateTagNode{}
if argToken := arguments.MatchType(TokenIdentifier); argToken != nil {
output, found := templateTagMapping[argToken.Val]
if arg_token := arguments.MatchType(TokenIdentifier); arg_token != nil {
output, found := templateTagMapping[arg_token.Val]
if !found {
return nil, arguments.Error("Argument not found", argToken)
return nil, arguments.Error("Argument not found", arg_token)
}
ttNode.content = output
tt_node.content = output
} else {
return nil, arguments.Error("Identifier expected.", nil)
}
@ -37,7 +41,7 @@ func tagTemplateTagParser(doc *Parser, start *Token, arguments *Parser) (INodeTa
return nil, arguments.Error("Malformed templatetag-tag argument.", nil)
}
return ttNode, nil
return tt_node, nil
}
func init() {

View File

@ -1,6 +1,7 @@
package pongo2
import (
"bytes"
"fmt"
"math"
)
@ -9,10 +10,10 @@ type tagWidthratioNode struct {
position *Token
current, max IEvaluator
width IEvaluator
ctxName string
ctx_name string
}
func (node *tagWidthratioNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagWidthratioNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
current, err := node.current.Evaluate(ctx)
if err != nil {
return err
@ -30,17 +31,17 @@ func (node *tagWidthratioNode) Execute(ctx *ExecutionContext, writer TemplateWri
value := int(math.Ceil(current.Float()/max.Float()*width.Float() + 0.5))
if node.ctxName == "" {
writer.WriteString(fmt.Sprintf("%d", value))
if node.ctx_name == "" {
buffer.WriteString(fmt.Sprintf("%d", value))
} else {
ctx.Private[node.ctxName] = value
ctx.Private[node.ctx_name] = value
}
return nil
}
func tagWidthratioParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
widthratioNode := &tagWidthratioNode{
widthratio_node := &tagWidthratioNode{
position: start,
}
@ -48,34 +49,34 @@ func tagWidthratioParser(doc *Parser, start *Token, arguments *Parser) (INodeTag
if err != nil {
return nil, err
}
widthratioNode.current = current
widthratio_node.current = current
max, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
widthratioNode.max = max
widthratio_node.max = max
width, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
widthratioNode.width = width
widthratio_node.width = width
if arguments.MatchOne(TokenKeyword, "as") != nil {
// Name follows
nameToken := arguments.MatchType(TokenIdentifier)
if nameToken == nil {
name_token := arguments.MatchType(TokenIdentifier)
if name_token == nil {
return nil, arguments.Error("Expected name (identifier).", nil)
}
widthratioNode.ctxName = nameToken.Val
widthratio_node.ctx_name = name_token.Val
}
if arguments.Remaining() > 0 {
return nil, arguments.Error("Malformed widthratio-tag arguments.", nil)
}
return widthratioNode, nil
return widthratio_node, nil
}
func init() {

View File

@ -1,16 +1,20 @@
package pongo2
import (
"bytes"
)
type tagWithNode struct {
withPairs map[string]IEvaluator
wrapper *NodeWrapper
with_pairs map[string]IEvaluator
wrapper *NodeWrapper
}
func (node *tagWithNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (node *tagWithNode) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
//new context for block
withctx := NewChildExecutionContext(ctx)
// Put all custom with-pairs into the context
for key, value := range node.withPairs {
for key, value := range node.with_pairs {
val, err := value.Evaluate(ctx)
if err != nil {
return err
@ -18,12 +22,12 @@ func (node *tagWithNode) Execute(ctx *ExecutionContext, writer TemplateWriter) *
withctx.Private[key] = val
}
return node.wrapper.Execute(withctx, writer)
return node.wrapper.Execute(withctx, buffer)
}
func tagWithParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Error) {
withNode := &tagWithNode{
withPairs: make(map[string]IEvaluator),
with_node := &tagWithNode{
with_pairs: make(map[string]IEvaluator),
}
if arguments.Count() == 0 {
@ -34,7 +38,7 @@ func tagWithParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Err
if err != nil {
return nil, err
}
withNode.wrapper = wrapper
with_node.wrapper = wrapper
if endargs.Count() > 0 {
return nil, endargs.Error("Arguments not allowed here.", nil)
@ -42,45 +46,45 @@ func tagWithParser(doc *Parser, start *Token, arguments *Parser) (INodeTag, *Err
// Scan through all arguments to see which style the user uses (old or new style).
// If we find any "as" keyword we will enforce old style; otherwise we will use new style.
oldStyle := false // by default we're using the new_style
old_style := false // by default we're using the new_style
for i := 0; i < arguments.Count(); i++ {
if arguments.PeekN(i, TokenKeyword, "as") != nil {
oldStyle = true
old_style = true
break
}
}
for arguments.Remaining() > 0 {
if oldStyle {
valueExpr, err := arguments.ParseExpression()
if old_style {
value_expr, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
if arguments.Match(TokenKeyword, "as") == nil {
return nil, arguments.Error("Expected 'as' keyword.", nil)
}
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
key_token := arguments.MatchType(TokenIdentifier)
if key_token == nil {
return nil, arguments.Error("Expected an identifier", nil)
}
withNode.withPairs[keyToken.Val] = valueExpr
with_node.with_pairs[key_token.Val] = value_expr
} else {
keyToken := arguments.MatchType(TokenIdentifier)
if keyToken == nil {
key_token := arguments.MatchType(TokenIdentifier)
if key_token == nil {
return nil, arguments.Error("Expected an identifier", nil)
}
if arguments.Match(TokenSymbol, "=") == nil {
return nil, arguments.Error("Expected '='.", nil)
}
valueExpr, err := arguments.ParseExpression()
value_expr, err := arguments.ParseExpression()
if err != nil {
return nil, err
}
withNode.withPairs[keyToken.Val] = valueExpr
with_node.with_pairs[key_token.Val] = value_expr
}
}
return withNode, nil
return with_node, nil
}
func init() {

View File

@ -6,67 +6,48 @@ import (
"io"
)
type TemplateWriter interface {
io.Writer
WriteString(string) (int, error)
}
type templateWriter struct {
w io.Writer
}
func (tw *templateWriter) WriteString(s string) (int, error) {
return tw.w.Write([]byte(s))
}
func (tw *templateWriter) Write(b []byte) (int, error) {
return tw.w.Write(b)
}
type Template struct {
set *TemplateSet
// Input
isTplString bool
name string
tpl string
size int
is_tpl_string bool
name string
tpl string
size int
// Calculation
tokens []*Token
parser *Parser
// first come, first serve (it's important to not override existing entries in here)
level int
parent *Template
child *Template
blocks map[string]*NodeWrapper
exportedMacros map[string]*tagMacroNode
level int
parent *Template
child *Template
blocks map[string]*NodeWrapper
exported_macros map[string]*tagMacroNode
// Output
root *nodeDocument
}
func newTemplateString(set *TemplateSet, tpl []byte) (*Template, error) {
func newTemplateString(set *TemplateSet, tpl string) (*Template, error) {
return newTemplate(set, "<string>", true, tpl)
}
func newTemplate(set *TemplateSet, name string, isTplString bool, tpl []byte) (*Template, error) {
strTpl := string(tpl)
func newTemplate(set *TemplateSet, name string, is_tpl_string bool, tpl string) (*Template, error) {
// Create the template
t := &Template{
set: set,
isTplString: isTplString,
name: name,
tpl: strTpl,
size: len(strTpl),
blocks: make(map[string]*NodeWrapper),
exportedMacros: make(map[string]*tagMacroNode),
set: set,
is_tpl_string: is_tpl_string,
name: name,
tpl: tpl,
size: len(tpl),
blocks: make(map[string]*NodeWrapper),
exported_macros: make(map[string]*tagMacroNode),
}
// Tokenize it
tokens, err := lex(name, strTpl)
tokens, err := lex(name, tpl)
if err != nil {
return nil, err
}
@ -86,7 +67,11 @@ func newTemplate(set *TemplateSet, name string, isTplString bool, tpl []byte) (*
return t, nil
}
func (tpl *Template) execute(context Context, writer TemplateWriter) error {
func (tpl *Template) execute(context Context) (*bytes.Buffer, error) {
// Create output buffer
// We assume that the rendered template will be 30% larger
buffer := bytes.NewBuffer(make([]byte, 0, int(float64(tpl.size)*1.3)))
// Determine the parent to be executed (for template inheritance)
parent := tpl
for parent.parent != nil {
@ -104,14 +89,14 @@ func (tpl *Template) execute(context Context, writer TemplateWriter) error {
// Check for context name syntax
err := newContext.checkForValidIdentifiers()
if err != nil {
return err
return nil, err
}
// Check for clashes with macro names
for k := range newContext {
_, has := tpl.exportedMacros[k]
for k, _ := range newContext {
_, has := tpl.exported_macros[k]
if has {
return &Error{
return nil, &Error{
Filename: tpl.name,
Sender: "execution",
ErrorMsg: fmt.Sprintf("Context key name '%s' clashes with macro '%s'.", k, k),
@ -125,22 +110,8 @@ func (tpl *Template) execute(context Context, writer TemplateWriter) error {
ctx := newExecutionContext(parent, newContext)
// Run the selected document
if err := parent.root.Execute(ctx, writer); err != nil {
return err
}
return nil
}
func (tpl *Template) newTemplateWriterAndExecute(context Context, writer io.Writer) error {
return tpl.execute(context, &templateWriter{w: writer})
}
func (tpl *Template) newBufferAndExecute(context Context) (*bytes.Buffer, error) {
// Create output buffer
// We assume that the rendered template will be 30% larger
buffer := bytes.NewBuffer(make([]byte, 0, int(float64(tpl.size)*1.3)))
if err := tpl.execute(context, buffer); err != nil {
err := parent.root.Execute(ctx, buffer)
if err != nil {
return nil, err
}
return buffer, nil
@ -150,30 +121,30 @@ func (tpl *Template) newBufferAndExecute(context Context) (*bytes.Buffer, error)
// on success. Context can be nil. Nothing is written on error; instead the error
// is being returned.
func (tpl *Template) ExecuteWriter(context Context, writer io.Writer) error {
buf, err := tpl.newBufferAndExecute(context)
buffer, err := tpl.execute(context)
if err != nil {
return err
}
_, err = buf.WriteTo(writer)
if err != nil {
return err
l := buffer.Len()
n, werr := buffer.WriteTo(writer)
if int(n) != l {
panic(fmt.Sprintf("error on writing template: n(%d) != buffer.Len(%d)", n, l))
}
if werr != nil {
return &Error{
Filename: tpl.name,
Sender: "execution",
ErrorMsg: werr.Error(),
}
}
return nil
}
// Same as ExecuteWriter. The only difference between both functions is that
// this function might already have written parts of the generated template in the
// case of an execution error because there's no intermediate buffer involved for
// performance reasons. This is handy if you need high performance template
// generation or if you want to manage your own pool of buffers.
func (tpl *Template) ExecuteWriterUnbuffered(context Context, writer io.Writer) error {
return tpl.newTemplateWriterAndExecute(context, writer)
}
// Executes the template and returns the rendered template as a []byte
func (tpl *Template) ExecuteBytes(context Context) ([]byte, error) {
// Execute template
buffer, err := tpl.newBufferAndExecute(context)
buffer, err := tpl.execute(context)
if err != nil {
return nil, err
}
@ -183,7 +154,7 @@ func (tpl *Template) ExecuteBytes(context Context) ([]byte, error) {
// Executes the template and returns the rendered template as a string
func (tpl *Template) Execute(context Context) (string, error) {
// Execute template
buffer, err := tpl.newBufferAndExecute(context)
buffer, err := tpl.execute(context)
if err != nil {
return "", err
}

View File

@ -1,156 +0,0 @@
package pongo2
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
)
// LocalFilesystemLoader represents a local filesystem loader with basic
// BaseDirectory capabilities. The access to the local filesystem is unrestricted.
type LocalFilesystemLoader struct {
baseDir string
}
// MustNewLocalFileSystemLoader creates a new LocalFilesystemLoader instance
// and panics if there's any error during instantiation. The parameters
// are the same like NewLocalFileSystemLoader.
func MustNewLocalFileSystemLoader(baseDir string) *LocalFilesystemLoader {
fs, err := NewLocalFileSystemLoader(baseDir)
if err != nil {
log.Panic(err)
}
return fs
}
// NewLocalFileSystemLoader creates a new LocalFilesystemLoader and allows
// templatesto be loaded from disk (unrestricted). If any base directory
// is given (or being set using SetBaseDir), this base directory is being used
// for path calculation in template inclusions/imports. Otherwise the path
// is calculated based relatively to the including template's path.
func NewLocalFileSystemLoader(baseDir string) (*LocalFilesystemLoader, error) {
fs := &LocalFilesystemLoader{}
if baseDir != "" {
if err := fs.SetBaseDir(baseDir); err != nil {
return nil, err
}
}
return fs, nil
}
// SetBaseDir sets the template's base directory. This directory will
// be used for any relative path in filters, tags and From*-functions to determine
// your template. See the comment for NewLocalFileSystemLoader as well.
func (fs *LocalFilesystemLoader) SetBaseDir(path string) error {
// Make the path absolute
if !filepath.IsAbs(path) {
abs, err := filepath.Abs(path)
if err != nil {
return err
}
path = abs
}
// Check for existence
fi, err := os.Stat(path)
if err != nil {
return err
}
if !fi.IsDir() {
return fmt.Errorf("The given path '%s' is not a directory.", path)
}
fs.baseDir = path
return nil
}
// Get reads the path's content from your local filesystem.
func (fs *LocalFilesystemLoader) Get(path string) (io.Reader, error) {
buf, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
return bytes.NewReader(buf), nil
}
// Abs resolves a filename relative to the base directory. Absolute paths are allowed.
// When there's no base dir set, the absolute path to the filename
// will be calculated based on either the provided base directory (which
// might be a path of a template which includes another template) or
// the current working directory.
func (fs *LocalFilesystemLoader) Abs(base, name string) string {
if filepath.IsAbs(name) {
return name
}
// Our own base dir has always priority; if there's none
// we use the path provided in base.
var err error
if fs.baseDir == "" {
if base == "" {
base, err = os.Getwd()
if err != nil {
panic(err)
}
return filepath.Join(base, name)
}
return filepath.Join(filepath.Dir(base), name)
}
return filepath.Join(fs.baseDir, name)
}
// SandboxedFilesystemLoader is still WIP.
type SandboxedFilesystemLoader struct {
*LocalFilesystemLoader
}
// NewSandboxedFilesystemLoader creates a new sandboxed local file system instance.
func NewSandboxedFilesystemLoader(baseDir string) (*SandboxedFilesystemLoader, error) {
fs, err := NewLocalFileSystemLoader(baseDir)
if err != nil {
return nil, err
}
return &SandboxedFilesystemLoader{
LocalFilesystemLoader: fs,
}, nil
}
// Move sandbox to a virtual fs
/*
if len(set.SandboxDirectories) > 0 {
defer func() {
// Remove any ".." or other crap
resolvedPath = filepath.Clean(resolvedPath)
// Make the path absolute
absPath, err := filepath.Abs(resolvedPath)
if err != nil {
panic(err)
}
resolvedPath = absPath
// Check against the sandbox directories (once one pattern matches, we're done and can allow it)
for _, pattern := range set.SandboxDirectories {
matched, err := filepath.Match(pattern, resolvedPath)
if err != nil {
panic("Wrong sandbox directory match pattern (see http://golang.org/pkg/path/filepath/#Match).")
}
if matched {
// OK!
return
}
}
// No pattern matched, we have to log+deny the request
set.logf("Access attempt outside of the sandbox directories (blocked): '%s'", resolvedPath)
resolvedPath = ""
}()
}
*/

View File

@ -1,49 +1,49 @@
package pongo2
import (
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"sync"
)
// TemplateLoader allows to implement a virtual file system.
type TemplateLoader interface {
// Abs calculates the path to a given template. Whenever a path must be resolved
// due to an import from another template, the base equals the parent template's path.
Abs(base, name string) string
// Get returns an io.Reader where the template's content can be read from.
Get(path string) (io.Reader, error)
}
// TemplateSet allows you to create your own group of templates with their own
// global context (which is shared among all members of the set) and their own
// configuration.
// It's useful for a separation of different kind of templates
// (e. g. web templates vs. mail templates).
// A template set allows you to create your own group of templates with their own global context (which is shared
// among all members of the set), their own configuration (like a specific base directory) and their own sandbox.
// It's useful for a separation of different kind of templates (e. g. web templates vs. mail templates).
type TemplateSet struct {
name string
loader TemplateLoader
name string
// Globals will be provided to all templates created within this template set
Globals Context
// If debug is true (default false), ExecutionContext.Logf() will work and output
// to STDOUT. Furthermore, FromCache() won't cache the templates.
// Make sure to synchronize the access to it in case you're changing this
// If debug is true (default false), ExecutionContext.Logf() will work and output to STDOUT. Furthermore,
// FromCache() won't cache the templates. Make sure to synchronize the access to it in case you're changing this
// variable during program execution (and template compilation/execution).
Debug bool
// Base directory: If you set the base directory (string is non-empty), all filename lookups in tags/filters are
// relative to this directory. If it's empty, all lookups are relative to the current filename which is importing.
baseDirectory string
// Sandbox features
// - Limit access to directories (using SandboxDirectories)
// - Disallow access to specific tags and/or filters (using BanTag() and BanFilter())
//
// For efficiency reasons you can ban tags/filters only *before* you have
// added your first template to the set (restrictions are statically checked).
// After you added one, it's not possible anymore (for your personal security).
// You can limit file accesses (for all tags/filters which are using pongo2's file resolver technique)
// to these sandbox directories. All default pongo2 filters/tags are respecting these restrictions.
// For example, if you only have your base directory in the list, a {% ssi "/etc/passwd" %} will not work.
// No items in SandboxDirectories means no restrictions at all.
//
// For efficiency reasons you can ban tags/filters only *before* you have added your first
// template to the set (restrictions are statically checked). After you added one, it's not possible anymore
// (for your personal security).
//
// SandboxDirectories can be changed at runtime. Please synchronize the access to it if you need to change it
// after you've added your first template to the set. You *must* use this match pattern for your directories:
// http://golang.org/pkg/path/filepath/#Match
SandboxDirectories []string
firstTemplateCreated bool
bannedTags map[string]bool
bannedFilters map[string]bool
@ -53,13 +53,11 @@ type TemplateSet struct {
templateCacheMutex sync.Mutex
}
// NewSet can be used to create sets with different kind of templates
// (e. g. web from mail templates), with different globals or
// other configurations.
func NewSet(name string, loader TemplateLoader) *TemplateSet {
// Create your own template sets to separate different kind of templates (e. g. web from mail templates) with
// different globals or other configurations (like base directories).
func NewSet(name string) *TemplateSet {
return &TemplateSet{
name: name,
loader: loader,
Globals: make(Context),
bannedTags: make(map[string]bool),
bannedFilters: make(map[string]bool),
@ -67,97 +65,117 @@ func NewSet(name string, loader TemplateLoader) *TemplateSet {
}
}
func (set *TemplateSet) resolveFilename(tpl *Template, path string) string {
name := ""
if tpl != nil && tpl.isTplString {
return path
// Use this function to set your template set's base directory. This directory will be used for any relative
// path in filters, tags and From*-functions to determine your template.
func (set *TemplateSet) SetBaseDirectory(name string) error {
// Make the path absolute
if !filepath.IsAbs(name) {
abs, err := filepath.Abs(name)
if err != nil {
return err
}
name = abs
}
if tpl != nil {
name = tpl.name
// Check for existence
fi, err := os.Stat(name)
if err != nil {
return err
}
return set.loader.Abs(name, path)
if !fi.IsDir() {
return fmt.Errorf("The given path '%s' is not a directory.")
}
set.baseDirectory = name
return nil
}
// BanTag bans a specific tag for this template set. See more in the documentation for TemplateSet.
func (set *TemplateSet) BanTag(name string) error {
func (set *TemplateSet) BaseDirectory() string {
return set.baseDirectory
}
// Ban a specific tag for this template set. See more in the documentation for TemplateSet.
func (set *TemplateSet) BanTag(name string) {
_, has := tags[name]
if !has {
return fmt.Errorf("Tag '%s' not found.", name)
panic(fmt.Sprintf("Tag '%s' not found.", name))
}
if set.firstTemplateCreated {
return errors.New("You cannot ban any tags after you've added your first template to your template set.")
panic("You cannot ban any tags after you've added your first template to your template set.")
}
_, has = set.bannedTags[name]
if has {
return fmt.Errorf("Tag '%s' is already banned.", name)
panic(fmt.Sprintf("Tag '%s' is already banned.", name))
}
set.bannedTags[name] = true
return nil
}
// BanFilter bans a specific filter for this template set. See more in the documentation for TemplateSet.
func (set *TemplateSet) BanFilter(name string) error {
// Ban a specific filter for this template set. See more in the documentation for TemplateSet.
func (set *TemplateSet) BanFilter(name string) {
_, has := filters[name]
if !has {
return fmt.Errorf("Filter '%s' not found.", name)
panic(fmt.Sprintf("Filter '%s' not found.", name))
}
if set.firstTemplateCreated {
return errors.New("You cannot ban any filters after you've added your first template to your template set.")
panic("You cannot ban any filters after you've added your first template to your template set.")
}
_, has = set.bannedFilters[name]
if has {
return fmt.Errorf("Filter '%s' is already banned.", name)
panic(fmt.Sprintf("Filter '%s' is already banned.", name))
}
set.bannedFilters[name] = true
return nil
}
// FromCache is a convenient method to cache templates. It is thread-safe
// FromCache() is a convenient method to cache templates. It is thread-safe
// and will only compile the template associated with a filename once.
// If TemplateSet.Debug is true (for example during development phase),
// FromCache() will not cache the template and instead recompile it on any
// call (to make changes to a template live instantaneously).
// Like FromFile(), FromCache() takes a relative path to a set base directory.
// Sandbox restrictions apply (if given).
func (set *TemplateSet) FromCache(filename string) (*Template, error) {
if set.Debug {
// Recompile on any request
return set.FromFile(filename)
}
// Cache the template
cleanedFilename := set.resolveFilename(nil, filename)
} else {
// Cache the template
cleaned_filename := set.resolveFilename(nil, filename)
set.templateCacheMutex.Lock()
defer set.templateCacheMutex.Unlock()
set.templateCacheMutex.Lock()
defer set.templateCacheMutex.Unlock()
tpl, has := set.templateCache[cleanedFilename]
tpl, has := set.templateCache[cleaned_filename]
// Cache miss
if !has {
tpl, err := set.FromFile(cleanedFilename)
if err != nil {
return nil, err
// Cache miss
if !has {
tpl, err := set.FromFile(cleaned_filename)
if err != nil {
return nil, err
}
set.templateCache[cleaned_filename] = tpl
return tpl, nil
}
set.templateCache[cleanedFilename] = tpl
// Cache hit
return tpl, nil
}
// Cache hit
return tpl, nil
}
// FromString loads a template from string and returns a Template instance.
// Loads a template from string and returns a Template instance.
func (set *TemplateSet) FromString(tpl string) (*Template, error) {
set.firstTemplateCreated = true
return newTemplateString(set, []byte(tpl))
return newTemplateString(set, tpl)
}
// FromFile loads a template from a filename and returns a Template instance.
// Loads a template from a filename and returns a Template instance.
// If a base directory is set, the filename must be either relative to it
// or be an absolute path. Sandbox restrictions (SandboxDirectories) apply
// if given.
func (set *TemplateSet) FromFile(filename string) (*Template, error) {
set.firstTemplateCreated = true
fd, err := set.loader.Get(set.resolveFilename(nil, filename))
buf, err := ioutil.ReadFile(set.resolveFilename(nil, filename))
if err != nil {
return nil, &Error{
Filename: filename,
@ -165,20 +183,11 @@ func (set *TemplateSet) FromFile(filename string) (*Template, error) {
ErrorMsg: err.Error(),
}
}
buf, err := ioutil.ReadAll(fd)
if err != nil {
return nil, &Error{
Filename: filename,
Sender: "fromfile",
ErrorMsg: err.Error(),
}
}
return newTemplate(set, filename, false, buf)
return newTemplate(set, filename, false, string(buf))
}
// RenderTemplateString is a shortcut and renders a template string directly.
// Panics when providing a malformed template or an error occurs during execution.
// Shortcut; renders a template string directly. Panics when providing a
// malformed template or an error occurs during execution.
func (set *TemplateSet) RenderTemplateString(s string, ctx Context) string {
set.firstTemplateCreated = true
@ -190,8 +199,8 @@ func (set *TemplateSet) RenderTemplateString(s string, ctx Context) string {
return result
}
// RenderTemplateFile is a shortcut and renders a template file directly.
// Panics when providing a malformed template or an error occurs during execution.
// Shortcut; renders a template file directly. Panics when providing a
// malformed template or an error occurs during execution.
func (set *TemplateSet) RenderTemplateFile(fn string, ctx Context) string {
set.firstTemplateCreated = true
@ -209,6 +218,58 @@ func (set *TemplateSet) logf(format string, args ...interface{}) {
}
}
// Resolves a filename relative to the base directory. Absolute paths are allowed.
// If sandbox restrictions are given (SandboxDirectories), they will be respected and checked.
// On sandbox restriction violation, resolveFilename() panics.
func (set *TemplateSet) resolveFilename(tpl *Template, filename string) (resolved_path string) {
if len(set.SandboxDirectories) > 0 {
defer func() {
// Remove any ".." or other crap
resolved_path = filepath.Clean(resolved_path)
// Make the path absolute
abs_path, err := filepath.Abs(resolved_path)
if err != nil {
panic(err)
}
resolved_path = abs_path
// Check against the sandbox directories (once one pattern matches, we're done and can allow it)
for _, pattern := range set.SandboxDirectories {
matched, err := filepath.Match(pattern, resolved_path)
if err != nil {
panic("Wrong sandbox directory match pattern (see http://golang.org/pkg/path/filepath/#Match).")
}
if matched {
// OK!
return
}
}
// No pattern matched, we have to log+deny the request
set.logf("Access attempt outside of the sandbox directories (blocked): '%s'", resolved_path)
resolved_path = ""
}()
}
if filepath.IsAbs(filename) {
return filename
}
if set.baseDirectory == "" {
if tpl != nil {
if tpl.is_tpl_string {
return filename
}
base := filepath.Dir(tpl.name)
return filepath.Join(base, filename)
}
return filename
} else {
return filepath.Join(set.baseDirectory, filename)
}
}
// Logging function (internally used)
func logf(format string, items ...interface{}) {
if debug {
@ -218,14 +279,10 @@ func logf(format string, items ...interface{}) {
var (
debug bool // internal debugging
logger = log.New(os.Stdout, "[pongo2] ", log.LstdFlags|log.Lshortfile)
logger = log.New(os.Stdout, "[pongo2] ", log.LstdFlags)
// DefaultLoader allows the default un-sandboxed access to the local file
// system and is being used by the DefaultSet.
DefaultLoader = MustNewLocalFileSystemLoader("")
// DefaultSet is a set created for you for convinience reasons.
DefaultSet = NewSet("default", DefaultLoader)
// Creating a default set
DefaultSet = NewSet("default")
// Methods on the default set
FromString = DefaultSet.FromString

View File

@ -46,24 +46,8 @@ in/not in
{{ 7 in simple.intmap }}
{{ !(5 in simple.intmap) }}
{{ not(7 in simple.intmap) }}
{{ 1 in simple.multiple_item_list }}
{{ 4 in simple.multiple_item_list }}
{{ !(4 in simple.multiple_item_list) }}
{{ "Hello" in simple.misc_list }}
{{ "Hello2" in simple.misc_list }}
{{ 99 in simple.misc_list }}
{{ False in simple.misc_list }}
issue #48 (associativity for infix operators)
{{ 34/3*3 }}
{{ 10 + 24 / 6 / 2 }}
{{ 6 - 4 - 2 }}
issue #64 (uint comparison with int const)
{{ simple.uint }}
{{ simple.uint == 8 }}
{{ simple.uint == 9 }}
{{ simple.uint >= 8 }}
{{ simple.uint <= 8 }}
{{ simple.uint < 8 }}
{{ simple.uint > 8 }}
{{ 6 - 4 - 2 }}

View File

@ -46,24 +46,8 @@ True
False
False
True
True
False
True
True
False
True
False
issue #48 (associativity for infix operators)
33
12
0
issue #64 (uint comparison with int const)
8
True
False
True
True
False
False
0

View File

@ -1,4 +1,3 @@
{{ -(true || false) }}
{{ simple.func_add("test", 5) }}
{% for item in simple.multiple_item_list %} {{ simple.func_add("test", 5) }} {% endfor %}
{{ simple.func_variadic_sum_int("foo") }}
{{ simple.func_variadic_sum_int("foo") }}

View File

@ -1,4 +1,3 @@
.*where: execution.*Negative sign on a non\-number expression
.*Function input argument 0 of 'simple.func_add' must be of type int or \*pongo2.Value \(not string\).
.*Function input argument 0 of 'simple.func_add' must be of type int or \*pongo2.Value \(not string\).
.*Function variadic input argument of 'simple.func_variadic_sum_int' must be of type int or \*pongo2.Value \(not string\).
.*Function variadic input argument of 'simple.func_variadic_sum_int' must be of type int or \*pongo2.Value \(not string\).

View File

@ -105,7 +105,7 @@ h
first
T
<pongo2_test.comment Value>
<pongo2.comment Value>
@ -113,7 +113,7 @@ T
last
t
<pongo2_test.comment Value>
<pongo2.comment Value>

View File

@ -6,22 +6,4 @@
{% endfor %}
reversed
'{% for item in simple.multiple_item_list reversed %}{{ item }} {% endfor %}'
sorted string map
'{% for key in simple.strmap sorted %}{{ key }} {% endfor %}'
sorted int map
'{% for key in simple.intmap sorted %}{{ key }} {% endfor %}'
sorted int list
'{% for key in simple.unsorted_int_list sorted %}{{ key }} {% endfor %}'
reversed sorted int list
'{% for key in simple.unsorted_int_list reversed sorted %}{{ key }} {% endfor %}'
reversed sorted string map
'{% for key in simple.strmap reversed sorted %}{{ key }} {% endfor %}'
reversed sorted int map
'{% for key in simple.intmap reversed sorted %}{{ key }} {% endfor %}'
'{% for item in simple.multiple_item_list reversed %}{{ item }} {% endfor %}'

View File

@ -16,22 +16,4 @@
reversed
'55 34 21 13 8 5 3 2 1 1 '
sorted string map
'aab abc bcd gh ukq zab '
sorted int map
'1 2 5 '
sorted int list
'1 22 192 249 581 8271 9999 1828591 '
reversed sorted int list
'1828591 9999 8271 581 249 192 22 1 '
reversed sorted string map
'zab ukq gh bcd abc aab '
reversed sorted int map
'5 2 1 '
'55 34 21 13 8 5 3 2 1 1 '

View File

@ -1,7 +1,4 @@
Start '{% include "includes.helper" %}' End
Start '{% include "includes.helper" if_exists %}' End
Start '{% include "includes.helper" with what_am_i=simple.name only %}' End
Start '{% include "includes.helper" with what_am_i=simple.name %}' End
Start '{% include simple.included_file|lower with number=7 what_am_i="guest" %}' End
Start '{% include "includes.helper.not_exists" if_exists %}' End
Start '{% include simple.included_file_not_exists if_exists with number=7 what_am_i="guest" %}' End
Start '{% include simple.included_file|lower with number=7 what_am_i="guest" %}' End

View File

@ -1,7 +1,4 @@
Start 'I'm 11' End
Start 'I'm 11' End
Start 'I'm john doe' End
Start 'I'm john doe11' End
Start 'I'm guest7' End
Start '' End
Start '' End
Start 'I'm guest7' End

View File

@ -1 +1 @@
dev
v3

View File

@ -11,6 +11,6 @@ Start 'I'm guest7' End
Start 'Hi number 10! Will not be overridden inside the block. I'm john doe, 50 years old.I have 10 children.' End
more with tests
<pongo2_test.user Value>
<pongo2.user Value>
user1
user3

View File

@ -3,7 +3,6 @@ package pongo2
import (
"fmt"
"reflect"
"sort"
"strconv"
"strings"
)
@ -13,7 +12,7 @@ type Value struct {
safe bool // used to indicate whether a Value needs explicit escaping in the template
}
// AsValue converts any given value to a pongo2.Value
// Converts any given value to a pongo2.Value
// Usually being used within own functions passed to a template
// through a Context or within filter functions.
//
@ -25,7 +24,7 @@ func AsValue(i interface{}) *Value {
}
}
// AsSafeValue works like AsValue, but does not apply the 'escape' filter.
// Like AsValue, but does not apply the 'escape' filter.
func AsSafeValue(i interface{}) *Value {
return &Value{
val: reflect.ValueOf(i),
@ -112,8 +111,9 @@ func (v *Value) String() string {
case reflect.Bool:
if v.Bool() {
return "True"
} else {
return "False"
}
return "False"
case reflect.Struct:
if t, ok := v.Interface().(fmt.Stringer); ok {
return t.String()
@ -229,13 +229,15 @@ func (v *Value) Negate() *Value {
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if v.Integer() != 0 {
return AsValue(0)
} else {
return AsValue(1)
}
return AsValue(1)
case reflect.Float32, reflect.Float64:
if v.Float() != 0.0 {
return AsValue(float64(0.0))
} else {
return AsValue(float64(1.1))
}
return AsValue(float64(1.1))
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
return AsValue(v.getResolvedValue().Len() == 0)
case reflect.Bool:
@ -299,7 +301,7 @@ func (v *Value) Index(i int) *Value {
}
}
// Contains checks whether the underlying value (which must be of type struct, map,
// Checks whether the underlying value (which must be of type struct, map,
// string, array or slice) contains of another Value (e. g. used to check
// whether a struct contains of a specific field or a map contains a specific key).
//
@ -308,32 +310,25 @@ func (v *Value) Index(i int) *Value {
func (v *Value) Contains(other *Value) bool {
switch v.getResolvedValue().Kind() {
case reflect.Struct:
fieldValue := v.getResolvedValue().FieldByName(other.String())
return fieldValue.IsValid()
field_value := v.getResolvedValue().FieldByName(other.String())
return field_value.IsValid()
case reflect.Map:
var mapValue reflect.Value
var map_value reflect.Value
switch other.Interface().(type) {
case int:
mapValue = v.getResolvedValue().MapIndex(other.getResolvedValue())
map_value = v.getResolvedValue().MapIndex(other.getResolvedValue())
case string:
mapValue = v.getResolvedValue().MapIndex(other.getResolvedValue())
map_value = v.getResolvedValue().MapIndex(other.getResolvedValue())
default:
logf("Value.Contains() does not support lookup type '%s'\n", other.getResolvedValue().Kind().String())
return false
}
return mapValue.IsValid()
return map_value.IsValid()
case reflect.String:
return strings.Contains(v.getResolvedValue().String(), other.String())
case reflect.Slice, reflect.Array:
for i := 0; i < v.getResolvedValue().Len(); i++ {
item := v.getResolvedValue().Index(i)
if other.Interface() == item.Interface() {
return true
}
}
return false
// TODO: reflect.Array, reflect.Slice
default:
logf("Value.Contains() not available for type: %s\n", v.getResolvedValue().Kind().String())
@ -362,22 +357,16 @@ func (v *Value) CanSlice() bool {
// If the underlying value has no items or is not one of the types above,
// the empty function (function's second argument) will be called.
func (v *Value) Iterate(fn func(idx, count int, key, value *Value) bool, empty func()) {
v.IterateOrder(fn, empty, false, false)
v.IterateOrder(fn, empty, false)
}
// Like Value.Iterate, but can iterate through an array/slice/string in reverse. Does
// not affect the iteration through a map because maps don't have any particular order.
func (v *Value) IterateOrder(fn func(idx, count int, key, value *Value) bool, empty func(), reverse bool, sorted bool) {
func (v *Value) IterateOrder(fn func(idx, count int, key, value *Value) bool, empty func(), reverse bool) {
switch v.getResolvedValue().Kind() {
case reflect.Map:
keys := sortedKeys(v.getResolvedValue().MapKeys())
if sorted {
if reverse {
sort.Sort(sort.Reverse(keys))
} else {
sort.Sort(keys)
}
}
// Reverse not needed for maps, since they are not ordered
keys := v.getResolvedValue().MapKeys()
keyLen := len(keys)
for idx, key := range keys {
value := v.getResolvedValue().MapIndex(key)
@ -390,31 +379,19 @@ func (v *Value) IterateOrder(fn func(idx, count int, key, value *Value) bool, em
}
return // done
case reflect.Array, reflect.Slice:
var items valuesList
itemCount := v.getResolvedValue().Len()
for i := 0; i < itemCount; i++ {
items = append(items, &Value{val: v.getResolvedValue().Index(i)})
}
if sorted {
if itemCount > 0 {
if reverse {
sort.Sort(sort.Reverse(items))
} else {
sort.Sort(items)
}
} else {
if reverse {
for i := 0; i < itemCount/2; i++ {
items[i], items[itemCount-1-i] = items[itemCount-1-i], items[i]
for i := itemCount - 1; i >= 0; i-- {
if !fn(i, itemCount, &Value{val: v.getResolvedValue().Index(i)}, nil) {
return
}
}
}
}
if len(items) > 0 {
for idx, item := range items {
if !fn(idx, itemCount, item, nil) {
return
} else {
for i := 0; i < itemCount; i++ {
if !fn(i, itemCount, &Value{val: v.getResolvedValue().Index(i)}, nil) {
return
}
}
}
} else {
@ -422,12 +399,7 @@ func (v *Value) IterateOrder(fn func(idx, count int, key, value *Value) bool, em
}
return // done
case reflect.String:
if sorted {
// TODO(flosch): Handle sorted
panic("TODO: handle sort for type string")
}
// TODO(flosch): Not utf8-compatible (utf8-decoding neccessary)
// TODO: Not utf8-compatible (utf8-decoding neccessary)
charCount := v.getResolvedValue().Len()
if charCount > 0 {
if reverse {
@ -463,55 +435,5 @@ func (v *Value) Interface() interface{} {
// Checks whether two values are containing the same value or object.
func (v *Value) EqualValueTo(other *Value) bool {
// comparison of uint with int fails using .Interface()-comparison (see issue #64)
if v.IsInteger() && other.IsInteger() {
return v.Integer() == other.Integer()
}
return v.Interface() == other.Interface()
}
type sortedKeys []reflect.Value
func (sk sortedKeys) Len() int {
return len(sk)
}
func (sk sortedKeys) Less(i, j int) bool {
vi := &Value{val: sk[i]}
vj := &Value{val: sk[j]}
switch {
case vi.IsInteger() && vj.IsInteger():
return vi.Integer() < vj.Integer()
case vi.IsFloat() && vj.IsFloat():
return vi.Float() < vj.Float()
default:
return vi.String() < vj.String()
}
}
func (sk sortedKeys) Swap(i, j int) {
sk[i], sk[j] = sk[j], sk[i]
}
type valuesList []*Value
func (vl valuesList) Len() int {
return len(vl)
}
func (vl valuesList) Less(i, j int) bool {
vi := vl[i]
vj := vl[j]
switch {
case vi.IsInteger() && vj.IsInteger():
return vi.Integer() < vj.Integer()
case vi.IsFloat() && vj.IsFloat():
return vi.Float() < vj.Float()
default:
return vi.String() < vj.String()
}
}
func (vl valuesList) Swap(i, j int) {
vl[i], vl[j] = vl[j], vl[i]
}

View File

@ -1,6 +1,7 @@
package pongo2
import (
"bytes"
"fmt"
"reflect"
"strconv"
@ -17,8 +18,8 @@ type variablePart struct {
s string
i int
isFunctionCall bool
callingArgs []functionCallArgument // needed for a function call, represents all argument nodes (INode supports nested function calls)
is_function_call bool
calling_args []functionCallArgument // needed for a function call, represents all argument nodes (INode supports nested function calls)
}
type functionCallArgument interface {
@ -27,119 +28,119 @@ type functionCallArgument interface {
// TODO: Add location tokens
type stringResolver struct {
locationToken *Token
val string
location_token *Token
val string
}
type intResolver struct {
locationToken *Token
val int
location_token *Token
val int
}
type floatResolver struct {
locationToken *Token
val float64
location_token *Token
val float64
}
type boolResolver struct {
locationToken *Token
val bool
location_token *Token
val bool
}
type variableResolver struct {
locationToken *Token
location_token *Token
parts []*variablePart
}
type nodeFilteredVariable struct {
locationToken *Token
location_token *Token
resolver IEvaluator
filterChain []*filterCall
}
type nodeVariable struct {
locationToken *Token
expr IEvaluator
location_token *Token
expr IEvaluator
}
func (v *nodeFilteredVariable) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := v.Evaluate(ctx)
func (expr *nodeFilteredVariable) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (vr *variableResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := vr.Evaluate(ctx)
func (expr *variableResolver) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (s *stringResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := s.Evaluate(ctx)
func (expr *stringResolver) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (i *intResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := i.Evaluate(ctx)
func (expr *intResolver) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (f *floatResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := f.Evaluate(ctx)
func (expr *floatResolver) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (b *boolResolver) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
value, err := b.Evaluate(ctx)
func (expr *boolResolver) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := expr.Evaluate(ctx)
if err != nil {
return err
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
func (v *nodeFilteredVariable) GetPositionToken() *Token {
return v.locationToken
return v.location_token
}
func (vr *variableResolver) GetPositionToken() *Token {
return vr.locationToken
func (v *variableResolver) GetPositionToken() *Token {
return v.location_token
}
func (s *stringResolver) GetPositionToken() *Token {
return s.locationToken
func (v *stringResolver) GetPositionToken() *Token {
return v.location_token
}
func (i *intResolver) GetPositionToken() *Token {
return i.locationToken
func (v *intResolver) GetPositionToken() *Token {
return v.location_token
}
func (f *floatResolver) GetPositionToken() *Token {
return f.locationToken
func (v *floatResolver) GetPositionToken() *Token {
return v.location_token
}
func (b *boolResolver) GetPositionToken() *Token {
return b.locationToken
func (v *boolResolver) GetPositionToken() *Token {
return v.location_token
}
func (s *stringResolver) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
@ -178,7 +179,7 @@ func (nv *nodeVariable) FilterApplied(name string) bool {
return nv.expr.FilterApplied(name)
}
func (nv *nodeVariable) Execute(ctx *ExecutionContext, writer TemplateWriter) *Error {
func (nv *nodeVariable) Execute(ctx *ExecutionContext, buffer *bytes.Buffer) *Error {
value, err := nv.expr.Evaluate(ctx)
if err != nil {
return err
@ -192,7 +193,7 @@ func (nv *nodeVariable) Execute(ctx *ExecutionContext, writer TemplateWriter) *E
}
}
writer.WriteString(value.String())
buffer.WriteString(value.String())
return nil
}
@ -217,15 +218,15 @@ func (vr *variableResolver) String() string {
func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
var current reflect.Value
var isSafe bool
var is_safe bool
for idx, part := range vr.parts {
if idx == 0 {
// We're looking up the first part of the variable.
// First we're having a look in our private
// context (e. g. information provided by tags, like the forloop)
val, inPrivate := ctx.Private[vr.parts[0].s]
if !inPrivate {
val, in_private := ctx.Private[vr.parts[0].s]
if !in_private {
// Nothing found? Then have a final lookup in the public context
val = ctx.Public[vr.parts[0].s]
}
@ -235,16 +236,16 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
// Before resolving the pointer, let's see if we have a method to call
// Problem with resolving the pointer is we're changing the receiver
isFunc := false
is_func := false
if part.typ == varTypeIdent {
funcValue := current.MethodByName(part.s)
if funcValue.IsValid() {
current = funcValue
isFunc = true
func_value := current.MethodByName(part.s)
if func_value.IsValid() {
current = func_value
is_func = true
}
}
if !isFunc {
if !is_func {
// If current a pointer, resolve it
if current.Kind() == reflect.Ptr {
current = current.Elem()
@ -261,11 +262,7 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
// * slices/arrays/strings
switch current.Kind() {
case reflect.String, reflect.Array, reflect.Slice:
if current.Len() > part.i {
current = current.Index(part.i)
} else {
return nil, fmt.Errorf("Index out of range: %d (variable %s)", part.i, vr.String())
}
current = current.Index(part.i)
default:
return nil, fmt.Errorf("Can't access an index on type %s (variable %s)",
current.Kind().String(), vr.String())
@ -299,9 +296,9 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
// Happens in function calls (as a return value) or by injecting
// into the execution context (e.g. in a for-loop)
if current.Type() == reflect.TypeOf(&Value{}) {
tmpValue := current.Interface().(*Value)
current = tmpValue.val
isSafe = tmpValue.safe
tmp_value := current.Interface().(*Value)
current = tmp_value.val
is_safe = tmp_value.safe
}
// Check whether this is an interface and resolve it where required
@ -310,10 +307,10 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
}
// Check if the part is a function call
if part.isFunctionCall || current.Kind() == reflect.Func {
if part.is_function_call || current.Kind() == reflect.Func {
// Check for callable
if current.Kind() != reflect.Func {
return nil, fmt.Errorf("'%s' is not a function (it is %s)", vr.String(), current.Kind().String())
return nil, fmt.Errorf("'%s' is not a function (it is %s).", vr.String(), current.Kind().String())
}
// Check for correct function syntax and types
@ -321,56 +318,58 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
t := current.Type()
// Input arguments
if len(part.callingArgs) != t.NumIn() && !(len(part.callingArgs) >= t.NumIn()-1 && t.IsVariadic()) {
if len(part.calling_args) != t.NumIn() && !(len(part.calling_args) >= t.NumIn()-1 && t.IsVariadic()) {
return nil,
fmt.Errorf("Function input argument count (%d) of '%s' must be equal to the calling argument count (%d).",
t.NumIn(), vr.String(), len(part.callingArgs))
t.NumIn(), vr.String(), len(part.calling_args))
}
// Output arguments
if t.NumOut() != 1 {
return nil, fmt.Errorf("'%s' must have exactly 1 output argument", vr.String())
return nil, fmt.Errorf("'%s' must have exactly 1 output argument.", vr.String())
}
// Evaluate all parameters
var parameters []reflect.Value
parameters := make([]reflect.Value, 0)
numArgs := t.NumIn()
isVariadic := t.IsVariadic()
var fnArg reflect.Type
num_args := t.NumIn()
is_variadic := t.IsVariadic()
var fn_arg reflect.Type
for idx, arg := range part.callingArgs {
for idx, arg := range part.calling_args {
pv, err := arg.Evaluate(ctx)
if err != nil {
return nil, err
}
if isVariadic {
if is_variadic {
if idx >= t.NumIn()-1 {
fnArg = t.In(numArgs - 1).Elem()
fn_arg = t.In(num_args - 1).Elem()
} else {
fnArg = t.In(idx)
fn_arg = t.In(idx)
}
} else {
fnArg = t.In(idx)
fn_arg = t.In(idx)
}
if fnArg != reflect.TypeOf(new(Value)) {
if fn_arg != reflect.TypeOf(new(Value)) {
// Function's argument is not a *pongo2.Value, then we have to check whether input argument is of the same type as the function's argument
if !isVariadic {
if fnArg != reflect.TypeOf(pv.Interface()) && fnArg.Kind() != reflect.Interface {
if !is_variadic {
if fn_arg != reflect.TypeOf(pv.Interface()) && fn_arg.Kind() != reflect.Interface {
return nil, fmt.Errorf("Function input argument %d of '%s' must be of type %s or *pongo2.Value (not %T).",
idx, vr.String(), fnArg.String(), pv.Interface())
idx, vr.String(), fn_arg.String(), pv.Interface())
} else {
// Function's argument has another type, using the interface-value
parameters = append(parameters, reflect.ValueOf(pv.Interface()))
}
// Function's argument has another type, using the interface-value
parameters = append(parameters, reflect.ValueOf(pv.Interface()))
} else {
if fnArg != reflect.TypeOf(pv.Interface()) && fnArg.Kind() != reflect.Interface {
if fn_arg != reflect.TypeOf(pv.Interface()) && fn_arg.Kind() != reflect.Interface {
return nil, fmt.Errorf("Function variadic input argument of '%s' must be of type %s or *pongo2.Value (not %T).",
vr.String(), fnArg.String(), pv.Interface())
vr.String(), fn_arg.String(), pv.Interface())
} else {
// Function's argument has another type, using the interface-value
parameters = append(parameters, reflect.ValueOf(pv.Interface()))
}
// Function's argument has another type, using the interface-value
parameters = append(parameters, reflect.ValueOf(pv.Interface()))
}
} else {
// Function's argument is a *pongo2.Value
@ -378,13 +377,6 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
}
}
// Check if any of the values are invalid
for _, p := range parameters {
if p.Kind() == reflect.Invalid {
return nil, fmt.Errorf("Calling a function using an invalid parameter")
}
}
// Call it and get first return parameter back
rv := current.Call(parameters)[0]
@ -393,7 +385,7 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
} else {
// Return the function call value
current = rv.Interface().(*Value).val
isSafe = rv.Interface().(*Value).safe
is_safe = rv.Interface().(*Value).safe
}
}
}
@ -403,13 +395,13 @@ func (vr *variableResolver) resolve(ctx *ExecutionContext) (*Value, error) {
return AsValue(nil), nil
}
return &Value{val: current, safe: isSafe}, nil
return &Value{val: current, safe: is_safe}, nil
}
func (vr *variableResolver) Evaluate(ctx *ExecutionContext) (*Value, *Error) {
value, err := vr.resolve(ctx)
if err != nil {
return AsValue(nil), ctx.Error(err.Error(), vr.locationToken)
return AsValue(nil), ctx.Error(err.Error(), vr.location_token)
}
return value, nil
}
@ -444,7 +436,7 @@ func (p *Parser) parseVariableOrLiteral() (IEvaluator, *Error) {
t := p.Current()
if t == nil {
return nil, p.Error("Unexpected EOF, expected a number, string, keyword or identifier.", p.lastToken)
return nil, p.Error("Unexpected EOF, expected a number, string, keyword or identifier.", p.last_token)
}
// Is first part a number or a string, there's nothing to resolve (because there's only to return the value then)
@ -468,26 +460,26 @@ func (p *Parser) parseVariableOrLiteral() (IEvaluator, *Error) {
return nil, p.Error(err.Error(), t)
}
fr := &floatResolver{
locationToken: t,
val: f,
location_token: t,
val: f,
}
return fr, nil
} else {
i, err := strconv.Atoi(t.Val)
if err != nil {
return nil, p.Error(err.Error(), t)
}
nr := &intResolver{
location_token: t,
val: i,
}
return nr, nil
}
i, err := strconv.Atoi(t.Val)
if err != nil {
return nil, p.Error(err.Error(), t)
}
nr := &intResolver{
locationToken: t,
val: i,
}
return nr, nil
case TokenString:
p.Consume()
sr := &stringResolver{
locationToken: t,
val: t.Val,
location_token: t,
val: t.Val,
}
return sr, nil
case TokenKeyword:
@ -495,14 +487,14 @@ func (p *Parser) parseVariableOrLiteral() (IEvaluator, *Error) {
switch t.Val {
case "true":
br := &boolResolver{
locationToken: t,
val: true,
location_token: t,
val: true,
}
return br, nil
case "false":
br := &boolResolver{
locationToken: t,
val: false,
location_token: t,
val: false,
}
return br, nil
default:
@ -511,7 +503,7 @@ func (p *Parser) parseVariableOrLiteral() (IEvaluator, *Error) {
}
resolver := &variableResolver{
locationToken: t,
location_token: t,
}
// First part of a variable MUST be an identifier
@ -559,26 +551,26 @@ variableLoop:
} else {
// EOF
return nil, p.Error("Unexpected EOF, expected either IDENTIFIER or NUMBER after DOT.",
p.lastToken)
p.last_token)
}
} else if p.Match(TokenSymbol, "(") != nil {
// Function call
// FunctionName '(' Comma-separated list of expressions ')'
part := resolver.parts[len(resolver.parts)-1]
part.isFunctionCall = true
part.is_function_call = true
argumentLoop:
for {
if p.Remaining() == 0 {
return nil, p.Error("Unexpected EOF, expected function call argument list.", p.lastToken)
return nil, p.Error("Unexpected EOF, expected function call argument list.", p.last_token)
}
if p.Peek(TokenSymbol, ")") == nil {
// No closing bracket, so we're parsing an expression
exprArg, err := p.ParseExpression()
expr_arg, err := p.ParseExpression()
if err != nil {
return nil, err
}
part.callingArgs = append(part.callingArgs, exprArg)
part.calling_args = append(part.calling_args, expr_arg)
if p.Match(TokenSymbol, ")") != nil {
// If there's a closing bracket after an expression, we will stop parsing the arguments
@ -609,7 +601,7 @@ variableLoop:
func (p *Parser) parseVariableOrLiteralWithFilter() (*nodeFilteredVariable, *Error) {
v := &nodeFilteredVariable{
locationToken: p.Current(),
location_token: p.Current(),
}
// Parse the variable name
@ -629,13 +621,15 @@ filterLoop:
}
// Check sandbox filter restriction
if _, isBanned := p.template.set.bannedFilters[filter.name]; isBanned {
if _, is_banned := p.template.set.bannedFilters[filter.name]; is_banned {
return nil, p.Error(fmt.Sprintf("Usage of filter '%s' is not allowed (sandbox restriction active).", filter.name), nil)
}
v.filterChain = append(v.filterChain, filter)
continue filterLoop
return nil, p.Error("This token is not allowed within a variable.", nil)
}
return v, nil
@ -643,7 +637,7 @@ filterLoop:
func (p *Parser) parseVariableElement() (INode, *Error) {
node := &nodeVariable{
locationToken: p.Current(),
location_token: p.Current(),
}
p.Consume() // consume '{{'

16
vendor/github.com/golang/protobuf/.gitignore generated vendored Normal file
View File

@ -0,0 +1,16 @@
.DS_Store
*.[568ao]
*.ao
*.so
*.pyc
._*
.nfs.*
[568a].out
*~
*.orig
core
_obj
_test
_testmain.go
protoc-gen-go/testdata/multi/*.pb.go
_conformance/_conformance

18
vendor/github.com/golang/protobuf/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,18 @@
sudo: false
language: go
go:
- 1.6.x
- 1.7.x
- 1.8.x
- 1.9.x
install:
- go get -v -d -t github.com/golang/protobuf/...
- curl -L https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip -o /tmp/protoc.zip
- unzip /tmp/protoc.zip -d $HOME/protoc
env:
- PATH=$HOME/protoc/bin:$PATH
script:
- make all test

3
vendor/github.com/golang/protobuf/AUTHORS generated vendored Normal file
View File

@ -0,0 +1,3 @@
# This source code refers to The Go Authors for copyright purposes.
# The master list of authors is in the main Go distribution,
# visible at http://tip.golang.org/AUTHORS.

3
vendor/github.com/golang/protobuf/CONTRIBUTORS generated vendored Normal file
View File

@ -0,0 +1,3 @@
# This source code was written by the Go contributors.
# The master list of contributors is in the main Go distribution,
# visible at http://tip.golang.org/CONTRIBUTORS.

31
vendor/github.com/golang/protobuf/LICENSE generated vendored Normal file
View File

@ -0,0 +1,31 @@
Go support for Protocol Buffers - Google's data interchange format
Copyright 2010 The Go Authors. All rights reserved.
https://github.com/golang/protobuf
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

40
vendor/github.com/golang/protobuf/Make.protobuf generated vendored Normal file
View File

@ -0,0 +1,40 @@
# Go support for Protocol Buffers - Google's data interchange format
#
# Copyright 2010 The Go Authors. All rights reserved.
# https://github.com/golang/protobuf
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Includable Makefile to add a rule for generating .pb.go files from .proto files
# (Google protocol buffer descriptions).
# Typical use if myproto.proto is a file in package mypackage in this directory:
#
# include $(GOROOT)/src/pkg/github.com/golang/protobuf/Make.protobuf
%.pb.go: %.proto
protoc --go_out=. $<

55
vendor/github.com/golang/protobuf/Makefile generated vendored Normal file
View File

@ -0,0 +1,55 @@
# Go support for Protocol Buffers - Google's data interchange format
#
# Copyright 2010 The Go Authors. All rights reserved.
# https://github.com/golang/protobuf
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
all: install
install:
go install ./proto ./jsonpb ./ptypes
go install ./protoc-gen-go
test:
go test ./proto ./jsonpb ./ptypes
make -C protoc-gen-go/testdata test
clean:
go clean ./...
nuke:
go clean -i ./...
regenerate:
make -C protoc-gen-go/descriptor regenerate
make -C protoc-gen-go/plugin regenerate
make -C protoc-gen-go/testdata regenerate
make -C proto/testdata regenerate
make -C jsonpb/jsonpb_test_proto regenerate
make -C _conformance regenerate

244
vendor/github.com/golang/protobuf/README.md generated vendored Normal file
View File

@ -0,0 +1,244 @@
# Go support for Protocol Buffers
[![Build Status](https://travis-ci.org/golang/protobuf.svg?branch=master)](https://travis-ci.org/golang/protobuf)
[![GoDoc](https://godoc.org/github.com/golang/protobuf?status.svg)](https://godoc.org/github.com/golang/protobuf)
Google's data interchange format.
Copyright 2010 The Go Authors.
https://github.com/golang/protobuf
This package and the code it generates requires at least Go 1.4.
This software implements Go bindings for protocol buffers. For
information about protocol buffers themselves, see
https://developers.google.com/protocol-buffers/
## Installation ##
To use this software, you must:
- Install the standard C++ implementation of protocol buffers from
https://developers.google.com/protocol-buffers/
- Of course, install the Go compiler and tools from
https://golang.org/
See
https://golang.org/doc/install
for details or, if you are using gccgo, follow the instructions at
https://golang.org/doc/install/gccgo
- Grab the code from the repository and install the proto package.
The simplest way is to run `go get -u github.com/golang/protobuf/protoc-gen-go`.
The compiler plugin, protoc-gen-go, will be installed in $GOBIN,
defaulting to $GOPATH/bin. It must be in your $PATH for the protocol
compiler, protoc, to find it.
This software has two parts: a 'protocol compiler plugin' that
generates Go source files that, once compiled, can access and manage
protocol buffers; and a library that implements run-time support for
encoding (marshaling), decoding (unmarshaling), and accessing protocol
buffers.
There is support for gRPC in Go using protocol buffers.
See the note at the bottom of this file for details.
There are no insertion points in the plugin.
## Using protocol buffers with Go ##
Once the software is installed, there are two steps to using it.
First you must compile the protocol buffer definitions and then import
them, with the support library, into your program.
To compile the protocol buffer definition, run protoc with the --go_out
parameter set to the directory you want to output the Go code to.
protoc --go_out=. *.proto
The generated files will be suffixed .pb.go. See the Test code below
for an example using such a file.
The package comment for the proto library contains text describing
the interface provided in Go for protocol buffers. Here is an edited
version.
==========
The proto package converts data structures to and from the
wire format of protocol buffers. It works in concert with the
Go source code generated for .proto files by the protocol compiler.
A summary of the properties of the protocol buffer interface
for a protocol buffer variable v:
- Names are turned from camel_case to CamelCase for export.
- There are no methods on v to set fields; just treat
them as structure fields.
- There are getters that return a field's value if set,
and return the field's default value if unset.
The getters work even if the receiver is a nil message.
- The zero value for a struct is its correct initialization state.
All desired fields must be set before marshaling.
- A Reset() method will restore a protobuf struct to its zero state.
- Non-repeated fields are pointers to the values; nil means unset.
That is, optional or required field int32 f becomes F *int32.
- Repeated fields are slices.
- Helper functions are available to aid the setting of fields.
Helpers for getting values are superseded by the
GetFoo methods and their use is deprecated.
msg.Foo = proto.String("hello") // set field
- Constants are defined to hold the default values of all fields that
have them. They have the form Default_StructName_FieldName.
Because the getter methods handle defaulted values,
direct use of these constants should be rare.
- Enums are given type names and maps from names to values.
Enum values are prefixed with the enum's type name. Enum types have
a String method, and a Enum method to assist in message construction.
- Nested groups and enums have type names prefixed with the name of
the surrounding message type.
- Extensions are given descriptor names that start with E_,
followed by an underscore-delimited list of the nested messages
that contain it (if any) followed by the CamelCased name of the
extension field itself. HasExtension, ClearExtension, GetExtension
and SetExtension are functions for manipulating extensions.
- Oneof field sets are given a single field in their message,
with distinguished wrapper types for each possible field value.
- Marshal and Unmarshal are functions to encode and decode the wire format.
When the .proto file specifies `syntax="proto3"`, there are some differences:
- Non-repeated fields of non-message type are values instead of pointers.
- Enum types do not get an Enum method.
Consider file test.proto, containing
```proto
syntax = "proto2";
package example;
enum FOO { X = 17; };
message Test {
required string label = 1;
optional int32 type = 2 [default=77];
repeated int64 reps = 3;
optional group OptionalGroup = 4 {
required string RequiredField = 5;
}
}
```
To create and play with a Test object from the example package,
```go
package main
import (
"log"
"github.com/golang/protobuf/proto"
"path/to/example"
)
func main() {
test := &example.Test {
Label: proto.String("hello"),
Type: proto.Int32(17),
Reps: []int64{1, 2, 3},
Optionalgroup: &example.Test_OptionalGroup {
RequiredField: proto.String("good bye"),
},
}
data, err := proto.Marshal(test)
if err != nil {
log.Fatal("marshaling error: ", err)
}
newTest := &example.Test{}
err = proto.Unmarshal(data, newTest)
if err != nil {
log.Fatal("unmarshaling error: ", err)
}
// Now test and newTest contain the same data.
if test.GetLabel() != newTest.GetLabel() {
log.Fatalf("data mismatch %q != %q", test.GetLabel(), newTest.GetLabel())
}
// etc.
}
```
## Parameters ##
To pass extra parameters to the plugin, use a comma-separated
parameter list separated from the output directory by a colon:
protoc --go_out=plugins=grpc,import_path=mypackage:. *.proto
- `import_prefix=xxx` - a prefix that is added onto the beginning of
all imports. Useful for things like generating protos in a
subdirectory, or regenerating vendored protobufs in-place.
- `import_path=foo/bar` - used as the package if no input files
declare `go_package`. If it contains slashes, everything up to the
rightmost slash is ignored.
- `plugins=plugin1+plugin2` - specifies the list of sub-plugins to
load. The only plugin in this repo is `grpc`.
- `Mfoo/bar.proto=quux/shme` - declares that foo/bar.proto is
associated with Go package quux/shme. This is subject to the
import_prefix parameter.
## gRPC Support ##
If a proto file specifies RPC services, protoc-gen-go can be instructed to
generate code compatible with gRPC (http://www.grpc.io/). To do this, pass
the `plugins` parameter to protoc-gen-go; the usual way is to insert it into
the --go_out argument to protoc:
protoc --go_out=plugins=grpc:. *.proto
## Compatibility ##
The library and the generated code are expected to be stable over time.
However, we reserve the right to make breaking changes without notice for the
following reasons:
- Security. A security issue in the specification or implementation may come to
light whose resolution requires breaking compatibility. We reserve the right
to address such security issues.
- Unspecified behavior. There are some aspects of the Protocol Buffers
specification that are undefined. Programs that depend on such unspecified
behavior may break in future releases.
- Specification errors or changes. If it becomes necessary to address an
inconsistency, incompleteness, or change in the Protocol Buffers
specification, resolving the issue could affect the meaning or legality of
existing programs. We reserve the right to address such issues, including
updating the implementations.
- Bugs. If the library has a bug that violates the specification, a program
that depends on the buggy behavior may break if the bug is fixed. We reserve
the right to fix such bugs.
- Adding methods or fields to generated structs. These may conflict with field
names that already exist in a schema, causing applications to break. When the
code generator encounters a field in the schema that would collide with a
generated field or method name, the code generator will append an underscore
to the generated field or method name.
- Adding, removing, or changing methods or fields in generated structs that
start with `XXX`. These parts of the generated code are exported out of
necessity, but should not be considered part of the public API.
- Adding, removing, or changing unexported symbols in generated code.
Any breaking changes outside of these will be announced 6 months in advance to
protobuf@googlegroups.com.
You should, whenever possible, use generated code created by the `protoc-gen-go`
tool built at the same commit as the `proto` package. The `proto` package
declares package-level constants in the form `ProtoPackageIsVersionX`.
Application code and generated code may depend on one of these constants to
ensure that compilation will fail if the available version of the proto library
is too old. Whenever we make a change to the generated code that requires newer
library support, in the same commit we will increment the version number of the
generated code and declare a new package-level constant whose name incorporates
the latest version number. Removing a compatibility constant is considered a
breaking change and would be subject to the announcement policy stated above.
The `protoc-gen-go/generator` package exposes a plugin interface,
which is used by the gRPC code generation. This interface is not
supported and is subject to incompatible changes without notice.

View File

@ -0,0 +1,33 @@
# Go support for Protocol Buffers - Google's data interchange format
#
# Copyright 2016 The Go Authors. All rights reserved.
# https://github.com/golang/protobuf
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
regenerate:
protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,Mgoogle/protobuf/duration.proto=github.com/golang/protobuf/ptypes/duration,Mgoogle/protobuf/struct.proto=github.com/golang/protobuf/ptypes/struct,Mgoogle/protobuf/timestamp.proto=github.com/golang/protobuf/ptypes/timestamp,Mgoogle/protobuf/wrappers.proto=github.com/golang/protobuf/ptypes/wrappers,Mgoogle/protobuf/field_mask.proto=google.golang.org/genproto/protobuf:. conformance_proto/conformance.proto

View File

@ -0,0 +1,161 @@
// Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2016 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// conformance implements the conformance test subprocess protocol as
// documented in conformance.proto.
package main
import (
"encoding/binary"
"fmt"
"io"
"os"
pb "github.com/golang/protobuf/_conformance/conformance_proto"
"github.com/golang/protobuf/jsonpb"
"github.com/golang/protobuf/proto"
)
func main() {
var sizeBuf [4]byte
inbuf := make([]byte, 0, 4096)
outbuf := proto.NewBuffer(nil)
for {
if _, err := io.ReadFull(os.Stdin, sizeBuf[:]); err == io.EOF {
break
} else if err != nil {
fmt.Fprintln(os.Stderr, "go conformance: read request:", err)
os.Exit(1)
}
size := binary.LittleEndian.Uint32(sizeBuf[:])
if int(size) > cap(inbuf) {
inbuf = make([]byte, size)
}
inbuf = inbuf[:size]
if _, err := io.ReadFull(os.Stdin, inbuf); err != nil {
fmt.Fprintln(os.Stderr, "go conformance: read request:", err)
os.Exit(1)
}
req := new(pb.ConformanceRequest)
if err := proto.Unmarshal(inbuf, req); err != nil {
fmt.Fprintln(os.Stderr, "go conformance: parse request:", err)
os.Exit(1)
}
res := handle(req)
if err := outbuf.Marshal(res); err != nil {
fmt.Fprintln(os.Stderr, "go conformance: marshal response:", err)
os.Exit(1)
}
binary.LittleEndian.PutUint32(sizeBuf[:], uint32(len(outbuf.Bytes())))
if _, err := os.Stdout.Write(sizeBuf[:]); err != nil {
fmt.Fprintln(os.Stderr, "go conformance: write response:", err)
os.Exit(1)
}
if _, err := os.Stdout.Write(outbuf.Bytes()); err != nil {
fmt.Fprintln(os.Stderr, "go conformance: write response:", err)
os.Exit(1)
}
outbuf.Reset()
}
}
var jsonMarshaler = jsonpb.Marshaler{
OrigName: true,
}
func handle(req *pb.ConformanceRequest) *pb.ConformanceResponse {
var err error
var msg pb.TestAllTypes
switch p := req.Payload.(type) {
case *pb.ConformanceRequest_ProtobufPayload:
err = proto.Unmarshal(p.ProtobufPayload, &msg)
case *pb.ConformanceRequest_JsonPayload:
err = jsonpb.UnmarshalString(p.JsonPayload, &msg)
if err != nil && err.Error() == "unmarshaling Any not supported yet" {
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_Skipped{
Skipped: err.Error(),
},
}
}
default:
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_RuntimeError{
RuntimeError: "unknown request payload type",
},
}
}
if err != nil {
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_ParseError{
ParseError: err.Error(),
},
}
}
switch req.RequestedOutputFormat {
case pb.WireFormat_PROTOBUF:
p, err := proto.Marshal(&msg)
if err != nil {
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_SerializeError{
SerializeError: err.Error(),
},
}
}
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_ProtobufPayload{
ProtobufPayload: p,
},
}
case pb.WireFormat_JSON:
p, err := jsonMarshaler.MarshalToString(&msg)
if err != nil {
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_SerializeError{
SerializeError: err.Error(),
},
}
}
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_JsonPayload{
JsonPayload: p,
},
}
default:
return &pb.ConformanceResponse{
Result: &pb.ConformanceResponse_RuntimeError{
RuntimeError: "unknown output format",
},
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,285 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto3";
package conformance;
option java_package = "com.google.protobuf.conformance";
import "google/protobuf/any.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/field_mask.proto";
import "google/protobuf/struct.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
// This defines the conformance testing protocol. This protocol exists between
// the conformance test suite itself and the code being tested. For each test,
// the suite will send a ConformanceRequest message and expect a
// ConformanceResponse message.
//
// You can either run the tests in two different ways:
//
// 1. in-process (using the interface in conformance_test.h).
//
// 2. as a sub-process communicating over a pipe. Information about how to
// do this is in conformance_test_runner.cc.
//
// Pros/cons of the two approaches:
//
// - running as a sub-process is much simpler for languages other than C/C++.
//
// - running as a sub-process may be more tricky in unusual environments like
// iOS apps, where fork/stdin/stdout are not available.
enum WireFormat {
UNSPECIFIED = 0;
PROTOBUF = 1;
JSON = 2;
}
// Represents a single test case's input. The testee should:
//
// 1. parse this proto (which should always succeed)
// 2. parse the protobuf or JSON payload in "payload" (which may fail)
// 3. if the parse succeeded, serialize the message in the requested format.
message ConformanceRequest {
// The payload (whether protobuf of JSON) is always for a TestAllTypes proto
// (see below).
oneof payload {
bytes protobuf_payload = 1;
string json_payload = 2;
}
// Which format should the testee serialize its message to?
WireFormat requested_output_format = 3;
}
// Represents a single test case's output.
message ConformanceResponse {
oneof result {
// This string should be set to indicate parsing failed. The string can
// provide more information about the parse error if it is available.
//
// Setting this string does not necessarily mean the testee failed the
// test. Some of the test cases are intentionally invalid input.
string parse_error = 1;
// If the input was successfully parsed but errors occurred when
// serializing it to the requested output format, set the error message in
// this field.
string serialize_error = 6;
// This should be set if some other error occurred. This will always
// indicate that the test failed. The string can provide more information
// about the failure.
string runtime_error = 2;
// If the input was successfully parsed and the requested output was
// protobuf, serialize it to protobuf and set it in this field.
bytes protobuf_payload = 3;
// If the input was successfully parsed and the requested output was JSON,
// serialize to JSON and set it in this field.
string json_payload = 4;
// For when the testee skipped the test, likely because a certain feature
// wasn't supported, like JSON input/output.
string skipped = 5;
}
}
// This proto includes every type of field in both singular and repeated
// forms.
message TestAllTypes {
message NestedMessage {
int32 a = 1;
TestAllTypes corecursive = 2;
}
enum NestedEnum {
FOO = 0;
BAR = 1;
BAZ = 2;
NEG = -1; // Intentionally negative.
}
// Singular
int32 optional_int32 = 1;
int64 optional_int64 = 2;
uint32 optional_uint32 = 3;
uint64 optional_uint64 = 4;
sint32 optional_sint32 = 5;
sint64 optional_sint64 = 6;
fixed32 optional_fixed32 = 7;
fixed64 optional_fixed64 = 8;
sfixed32 optional_sfixed32 = 9;
sfixed64 optional_sfixed64 = 10;
float optional_float = 11;
double optional_double = 12;
bool optional_bool = 13;
string optional_string = 14;
bytes optional_bytes = 15;
NestedMessage optional_nested_message = 18;
ForeignMessage optional_foreign_message = 19;
NestedEnum optional_nested_enum = 21;
ForeignEnum optional_foreign_enum = 22;
string optional_string_piece = 24 [ctype=STRING_PIECE];
string optional_cord = 25 [ctype=CORD];
TestAllTypes recursive_message = 27;
// Repeated
repeated int32 repeated_int32 = 31;
repeated int64 repeated_int64 = 32;
repeated uint32 repeated_uint32 = 33;
repeated uint64 repeated_uint64 = 34;
repeated sint32 repeated_sint32 = 35;
repeated sint64 repeated_sint64 = 36;
repeated fixed32 repeated_fixed32 = 37;
repeated fixed64 repeated_fixed64 = 38;
repeated sfixed32 repeated_sfixed32 = 39;
repeated sfixed64 repeated_sfixed64 = 40;
repeated float repeated_float = 41;
repeated double repeated_double = 42;
repeated bool repeated_bool = 43;
repeated string repeated_string = 44;
repeated bytes repeated_bytes = 45;
repeated NestedMessage repeated_nested_message = 48;
repeated ForeignMessage repeated_foreign_message = 49;
repeated NestedEnum repeated_nested_enum = 51;
repeated ForeignEnum repeated_foreign_enum = 52;
repeated string repeated_string_piece = 54 [ctype=STRING_PIECE];
repeated string repeated_cord = 55 [ctype=CORD];
// Map
map < int32, int32> map_int32_int32 = 56;
map < int64, int64> map_int64_int64 = 57;
map < uint32, uint32> map_uint32_uint32 = 58;
map < uint64, uint64> map_uint64_uint64 = 59;
map < sint32, sint32> map_sint32_sint32 = 60;
map < sint64, sint64> map_sint64_sint64 = 61;
map < fixed32, fixed32> map_fixed32_fixed32 = 62;
map < fixed64, fixed64> map_fixed64_fixed64 = 63;
map <sfixed32, sfixed32> map_sfixed32_sfixed32 = 64;
map <sfixed64, sfixed64> map_sfixed64_sfixed64 = 65;
map < int32, float> map_int32_float = 66;
map < int32, double> map_int32_double = 67;
map < bool, bool> map_bool_bool = 68;
map < string, string> map_string_string = 69;
map < string, bytes> map_string_bytes = 70;
map < string, NestedMessage> map_string_nested_message = 71;
map < string, ForeignMessage> map_string_foreign_message = 72;
map < string, NestedEnum> map_string_nested_enum = 73;
map < string, ForeignEnum> map_string_foreign_enum = 74;
oneof oneof_field {
uint32 oneof_uint32 = 111;
NestedMessage oneof_nested_message = 112;
string oneof_string = 113;
bytes oneof_bytes = 114;
bool oneof_bool = 115;
uint64 oneof_uint64 = 116;
float oneof_float = 117;
double oneof_double = 118;
NestedEnum oneof_enum = 119;
}
// Well-known types
google.protobuf.BoolValue optional_bool_wrapper = 201;
google.protobuf.Int32Value optional_int32_wrapper = 202;
google.protobuf.Int64Value optional_int64_wrapper = 203;
google.protobuf.UInt32Value optional_uint32_wrapper = 204;
google.protobuf.UInt64Value optional_uint64_wrapper = 205;
google.protobuf.FloatValue optional_float_wrapper = 206;
google.protobuf.DoubleValue optional_double_wrapper = 207;
google.protobuf.StringValue optional_string_wrapper = 208;
google.protobuf.BytesValue optional_bytes_wrapper = 209;
repeated google.protobuf.BoolValue repeated_bool_wrapper = 211;
repeated google.protobuf.Int32Value repeated_int32_wrapper = 212;
repeated google.protobuf.Int64Value repeated_int64_wrapper = 213;
repeated google.protobuf.UInt32Value repeated_uint32_wrapper = 214;
repeated google.protobuf.UInt64Value repeated_uint64_wrapper = 215;
repeated google.protobuf.FloatValue repeated_float_wrapper = 216;
repeated google.protobuf.DoubleValue repeated_double_wrapper = 217;
repeated google.protobuf.StringValue repeated_string_wrapper = 218;
repeated google.protobuf.BytesValue repeated_bytes_wrapper = 219;
google.protobuf.Duration optional_duration = 301;
google.protobuf.Timestamp optional_timestamp = 302;
google.protobuf.FieldMask optional_field_mask = 303;
google.protobuf.Struct optional_struct = 304;
google.protobuf.Any optional_any = 305;
google.protobuf.Value optional_value = 306;
repeated google.protobuf.Duration repeated_duration = 311;
repeated google.protobuf.Timestamp repeated_timestamp = 312;
repeated google.protobuf.FieldMask repeated_fieldmask = 313;
repeated google.protobuf.Struct repeated_struct = 324;
repeated google.protobuf.Any repeated_any = 315;
repeated google.protobuf.Value repeated_value = 316;
// Test field-name-to-JSON-name convention.
// (protobuf says names can be any valid C/C++ identifier.)
int32 fieldname1 = 401;
int32 field_name2 = 402;
int32 _field_name3 = 403;
int32 field__name4_ = 404;
int32 field0name5 = 405;
int32 field_0_name6 = 406;
int32 fieldName7 = 407;
int32 FieldName8 = 408;
int32 field_Name9 = 409;
int32 Field_Name10 = 410;
int32 FIELD_NAME11 = 411;
int32 FIELD_name12 = 412;
int32 __field_name13 = 413;
int32 __Field_name14 = 414;
int32 field__name15 = 415;
int32 field__Name16 = 416;
int32 field_name17__ = 417;
int32 Field_name18__ = 418;
}
message ForeignMessage {
int32 c = 1;
}
enum ForeignEnum {
FOREIGN_FOO = 0;
FOREIGN_BAR = 1;
FOREIGN_BAZ = 2;
}

View File

@ -0,0 +1,93 @@
// Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2016 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Package descriptor provides functions for obtaining protocol buffer
// descriptors for generated Go types.
//
// These functions cannot go in package proto because they depend on the
// generated protobuf descriptor messages, which themselves depend on proto.
package descriptor
import (
"bytes"
"compress/gzip"
"fmt"
"io/ioutil"
"github.com/golang/protobuf/proto"
protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor"
)
// extractFile extracts a FileDescriptorProto from a gzip'd buffer.
func extractFile(gz []byte) (*protobuf.FileDescriptorProto, error) {
r, err := gzip.NewReader(bytes.NewReader(gz))
if err != nil {
return nil, fmt.Errorf("failed to open gzip reader: %v", err)
}
defer r.Close()
b, err := ioutil.ReadAll(r)
if err != nil {
return nil, fmt.Errorf("failed to uncompress descriptor: %v", err)
}
fd := new(protobuf.FileDescriptorProto)
if err := proto.Unmarshal(b, fd); err != nil {
return nil, fmt.Errorf("malformed FileDescriptorProto: %v", err)
}
return fd, nil
}
// Message is a proto.Message with a method to return its descriptor.
//
// Message types generated by the protocol compiler always satisfy
// the Message interface.
type Message interface {
proto.Message
Descriptor() ([]byte, []int)
}
// ForMessage returns a FileDescriptorProto and a DescriptorProto from within it
// describing the given message.
func ForMessage(msg Message) (fd *protobuf.FileDescriptorProto, md *protobuf.DescriptorProto) {
gz, path := msg.Descriptor()
fd, err := extractFile(gz)
if err != nil {
panic(fmt.Sprintf("invalid FileDescriptorProto for %T: %v", msg, err))
}
md = fd.MessageType[path[0]]
for _, i := range path[1:] {
md = md.NestedType[i]
}
return fd, md
}

View File

@ -0,0 +1,32 @@
package descriptor_test
import (
"fmt"
"testing"
"github.com/golang/protobuf/descriptor"
tpb "github.com/golang/protobuf/proto/testdata"
protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor"
)
func TestMessage(t *testing.T) {
var msg *protobuf.DescriptorProto
fd, md := descriptor.ForMessage(msg)
if pkg, want := fd.GetPackage(), "google.protobuf"; pkg != want {
t.Errorf("descriptor.ForMessage(%T).GetPackage() = %q; want %q", msg, pkg, want)
}
if name, want := md.GetName(), "DescriptorProto"; name != want {
t.Fatalf("descriptor.ForMessage(%T).GetName() = %q; want %q", msg, name, want)
}
}
func Example_Options() {
var msg *tpb.MyMessageSet
_, md := descriptor.ForMessage(msg)
if md.GetOptions().GetMessageSetWireFormat() {
fmt.Printf("%v uses option message_set_wire_format.\n", md.GetName())
}
// Output:
// MyMessageSet uses option message_set_wire_format.
}

Some files were not shown because too many files have changed in this diff Show More