mirror of
https://github.com/Luzifer/nginx-sso.git
synced 2024-12-20 12:51:17 +00:00
Vendor new dependencies for OIDC
Signed-off-by: Knut Ahlers <knut@ahlers.me>
This commit is contained in:
parent
2b15b34bb4
commit
b8c89a5e0f
117 changed files with 25716 additions and 0 deletions
35
Gopkg.lock
generated
35
Gopkg.lock
generated
|
@ -45,6 +45,14 @@
|
|||
revision = "3cfea5ab600ae37946be2b763b8ec2c1cf2d272d"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:d8ee1b165eb7f4fd9ada718e1e7eeb0bc1fd462592d0bd823df694443f448681"
|
||||
name = "github.com/coreos/go-oidc"
|
||||
packages = ["."]
|
||||
pruneopts = ""
|
||||
revision = "1180514eaf4d9f38d0d19eef639a1d695e066e72"
|
||||
version = "v2.0.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:f65bcfc1efab621eaf5df43869dd1aee399a298c299423c08b96a3b037437c48"
|
||||
|
@ -142,6 +150,17 @@
|
|||
revision = "ba968bfe8b2f7e042a574c888954fccecfa385b4"
|
||||
version = "v0.8.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:de5481dda0c081b66450e391bbb1a5c4435b13e3c0bbf0133ba1a5baeda7b7af"
|
||||
name = "github.com/pquerna/cachecontrol"
|
||||
packages = [
|
||||
".",
|
||||
"cacheobject",
|
||||
]
|
||||
pruneopts = ""
|
||||
revision = "1555304b9b35fdd2b425bccf1a5613677705e7d0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:d8c398c75a666d415196ba289402c3f52b595d8cede451a9e57278354e327a93"
|
||||
name = "github.com/pquerna/otp"
|
||||
|
@ -202,6 +221,9 @@
|
|||
packages = [
|
||||
"bcrypt",
|
||||
"blowfish",
|
||||
"ed25519",
|
||||
"ed25519/internal/edwards25519",
|
||||
"pbkdf2",
|
||||
]
|
||||
pruneopts = ""
|
||||
revision = "df01cb2cc480549d72034218dd98bf97671450ac"
|
||||
|
@ -370,6 +392,18 @@
|
|||
revision = "bb7a9ca6e4fbc2129e3db588a34bc970ffe811a9"
|
||||
version = "v2.5.1"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:40cf02345bfa29fb217cfe0767a9416d99569d4ff21dbb1fd3378ef10682549c"
|
||||
name = "gopkg.in/square/go-jose.v2"
|
||||
packages = [
|
||||
".",
|
||||
"cipher",
|
||||
"json",
|
||||
]
|
||||
pruneopts = ""
|
||||
revision = "730df5f748271903322feb182be83b43ebbbe27d"
|
||||
version = "v2.3.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "v2"
|
||||
digest = "1:bf922c860f64bb6372daa82bb9c1e8ed144e97ac7587efa80d08b4e98737e9ea"
|
||||
|
@ -393,6 +427,7 @@
|
|||
"github.com/GeertJohan/yubigo",
|
||||
"github.com/Luzifer/go_helpers/str",
|
||||
"github.com/Luzifer/rconfig",
|
||||
"github.com/coreos/go-oidc",
|
||||
"github.com/duosecurity/duo_api_golang",
|
||||
"github.com/duosecurity/duo_api_golang/authapi",
|
||||
"github.com/flosch/pongo2",
|
||||
|
|
2
vendor/github.com/coreos/go-oidc/.gitignore
generated
vendored
Normal file
2
vendor/github.com/coreos/go-oidc/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
/bin
|
||||
/gopath
|
16
vendor/github.com/coreos/go-oidc/.travis.yml
generated
vendored
Normal file
16
vendor/github.com/coreos/go-oidc/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.7.5
|
||||
- 1.8
|
||||
|
||||
install:
|
||||
- go get -v -t github.com/coreos/go-oidc/...
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get github.com/golang/lint/golint
|
||||
|
||||
script:
|
||||
- ./test
|
||||
|
||||
notifications:
|
||||
email: false
|
71
vendor/github.com/coreos/go-oidc/CONTRIBUTING.md
generated
vendored
Normal file
71
vendor/github.com/coreos/go-oidc/CONTRIBUTING.md
generated
vendored
Normal file
|
@ -0,0 +1,71 @@
|
|||
# How to Contribute
|
||||
|
||||
CoreOS projects are [Apache 2.0 licensed](LICENSE) and accept contributions via
|
||||
GitHub pull requests. This document outlines some of the conventions on
|
||||
development workflow, commit message formatting, contact points and other
|
||||
resources to make it easier to get your contribution accepted.
|
||||
|
||||
# Certificate of Origin
|
||||
|
||||
By contributing to this project you agree to the Developer Certificate of
|
||||
Origin (DCO). This document was created by the Linux Kernel community and is a
|
||||
simple statement that you, as a contributor, have the legal right to make the
|
||||
contribution. See the [DCO](DCO) file for details.
|
||||
|
||||
# Email and Chat
|
||||
|
||||
The project currently uses the general CoreOS email list and IRC channel:
|
||||
- Email: [coreos-dev](https://groups.google.com/forum/#!forum/coreos-dev)
|
||||
- IRC: #[coreos](irc://irc.freenode.org:6667/#coreos) IRC channel on freenode.org
|
||||
|
||||
Please avoid emailing maintainers found in the MAINTAINERS file directly. They
|
||||
are very busy and read the mailing lists.
|
||||
|
||||
## Getting Started
|
||||
|
||||
- Fork the repository on GitHub
|
||||
- Read the [README](README.md) for build and test instructions
|
||||
- Play with the project, submit bugs, submit patches!
|
||||
|
||||
## Contribution Flow
|
||||
|
||||
This is a rough outline of what a contributor's workflow looks like:
|
||||
|
||||
- Create a topic branch from where you want to base your work (usually master).
|
||||
- Make commits of logical units.
|
||||
- Make sure your commit messages are in the proper format (see below).
|
||||
- Push your changes to a topic branch in your fork of the repository.
|
||||
- Make sure the tests pass, and add any new tests as appropriate.
|
||||
- Submit a pull request to the original repository.
|
||||
|
||||
Thanks for your contributions!
|
||||
|
||||
### Format of the Commit Message
|
||||
|
||||
We follow a rough convention for commit messages that is designed to answer two
|
||||
questions: what changed and why. The subject line should feature the what and
|
||||
the body of the commit should describe the why.
|
||||
|
||||
```
|
||||
scripts: add the test-cluster command
|
||||
|
||||
this uses tmux to setup a test cluster that you can easily kill and
|
||||
start for debugging.
|
||||
|
||||
Fixes #38
|
||||
```
|
||||
|
||||
The format can be described more formally as follows:
|
||||
|
||||
```
|
||||
<subsystem>: <what changed>
|
||||
<BLANK LINE>
|
||||
<why this change was made>
|
||||
<BLANK LINE>
|
||||
<footer>
|
||||
```
|
||||
|
||||
The first line is the subject and should be no longer than 70 characters, the
|
||||
second line is always blank, and other lines should be wrapped at 80 characters.
|
||||
This allows the message to be easier to read on GitHub as well as in various
|
||||
git tools.
|
36
vendor/github.com/coreos/go-oidc/DCO
generated
vendored
Normal file
36
vendor/github.com/coreos/go-oidc/DCO
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
202
vendor/github.com/coreos/go-oidc/LICENSE
generated
vendored
Normal file
202
vendor/github.com/coreos/go-oidc/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
2
vendor/github.com/coreos/go-oidc/MAINTAINERS
generated
vendored
Normal file
2
vendor/github.com/coreos/go-oidc/MAINTAINERS
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
Eric Chiang <echiang@redhat.com> (@ericchiang)
|
||||
Rithu Leena John <rjohn@redhat.com> (@rithujohn191)
|
5
vendor/github.com/coreos/go-oidc/NOTICE
generated
vendored
Normal file
5
vendor/github.com/coreos/go-oidc/NOTICE
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
CoreOS Project
|
||||
Copyright 2014 CoreOS, Inc
|
||||
|
||||
This product includes software developed at CoreOS, Inc.
|
||||
(http://www.coreos.com/).
|
72
vendor/github.com/coreos/go-oidc/README.md
generated
vendored
Normal file
72
vendor/github.com/coreos/go-oidc/README.md
generated
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
# go-oidc
|
||||
|
||||
[![GoDoc](https://godoc.org/github.com/coreos/go-oidc?status.svg)](https://godoc.org/github.com/coreos/go-oidc)
|
||||
[![Build Status](https://travis-ci.org/coreos/go-oidc.png?branch=master)](https://travis-ci.org/coreos/go-oidc)
|
||||
|
||||
## OpenID Connect support for Go
|
||||
|
||||
This package enables OpenID Connect support for the [golang.org/x/oauth2](https://godoc.org/golang.org/x/oauth2) package.
|
||||
|
||||
```go
|
||||
provider, err := oidc.NewProvider(ctx, "https://accounts.google.com")
|
||||
if err != nil {
|
||||
// handle error
|
||||
}
|
||||
|
||||
// Configure an OpenID Connect aware OAuth2 client.
|
||||
oauth2Config := oauth2.Config{
|
||||
ClientID: clientID,
|
||||
ClientSecret: clientSecret,
|
||||
RedirectURL: redirectURL,
|
||||
|
||||
// Discovery returns the OAuth2 endpoints.
|
||||
Endpoint: provider.Endpoint(),
|
||||
|
||||
// "openid" is a required scope for OpenID Connect flows.
|
||||
Scopes: []string{oidc.ScopeOpenID, "profile", "email"},
|
||||
}
|
||||
```
|
||||
|
||||
OAuth2 redirects are unchanged.
|
||||
|
||||
```go
|
||||
func handleRedirect(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, oauth2Config.AuthCodeURL(state), http.StatusFound)
|
||||
}
|
||||
```
|
||||
|
||||
The on responses, the provider can be used to verify ID Tokens.
|
||||
|
||||
```go
|
||||
var verifier = provider.Verifier(&oidc.Config{ClientID: clientID})
|
||||
|
||||
func handleOAuth2Callback(w http.ResponseWriter, r *http.Request) {
|
||||
// Verify state and errors.
|
||||
|
||||
oauth2Token, err := oauth2Config.Exchange(ctx, r.URL.Query().Get("code"))
|
||||
if err != nil {
|
||||
// handle error
|
||||
}
|
||||
|
||||
// Extract the ID Token from OAuth2 token.
|
||||
rawIDToken, ok := oauth2Token.Extra("id_token").(string)
|
||||
if !ok {
|
||||
// handle missing token
|
||||
}
|
||||
|
||||
// Parse and verify ID Token payload.
|
||||
idToken, err := verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
// handle error
|
||||
}
|
||||
|
||||
// Extract custom claims
|
||||
var claims struct {
|
||||
Email string `json:"email"`
|
||||
Verified bool `json:"email_verified"`
|
||||
}
|
||||
if err := idToken.Claims(&claims); err != nil {
|
||||
// handle error
|
||||
}
|
||||
}
|
||||
```
|
61
vendor/github.com/coreos/go-oidc/code-of-conduct.md
generated
vendored
Normal file
61
vendor/github.com/coreos/go-oidc/code-of-conduct.md
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
## CoreOS Community Code of Conduct
|
||||
|
||||
### Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, and in the interest of
|
||||
fostering an open and welcoming community, we pledge to respect all people who
|
||||
contribute through reporting issues, posting feature requests, updating
|
||||
documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free
|
||||
experience for everyone, regardless of level of experience, gender, gender
|
||||
identity and expression, sexual orientation, disability, personal appearance,
|
||||
body size, race, ethnicity, age, religion, or nationality.
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery
|
||||
* Personal attacks
|
||||
* Trolling or insulting/derogatory comments
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as physical or electronic addresses, without explicit permission
|
||||
* Other unethical or unprofessional conduct.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct. By adopting this Code of Conduct,
|
||||
project maintainers commit themselves to fairly and consistently applying these
|
||||
principles to every aspect of managing this project. Project maintainers who do
|
||||
not follow or enforce the Code of Conduct may be permanently removed from the
|
||||
project team.
|
||||
|
||||
This code of conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting a project maintainer, Brandon Philips
|
||||
<brandon.philips@coreos.com>, and/or Rithu John <rithu.john@coreos.com>.
|
||||
|
||||
This Code of Conduct is adapted from the Contributor Covenant
|
||||
(http://contributor-covenant.org), version 1.2.0, available at
|
||||
http://contributor-covenant.org/version/1/2/0/
|
||||
|
||||
### CoreOS Events Code of Conduct
|
||||
|
||||
CoreOS events are working conferences intended for professional networking and
|
||||
collaboration in the CoreOS community. Attendees are expected to behave
|
||||
according to professional standards and in accordance with their employer’s
|
||||
policies on appropriate workplace behavior.
|
||||
|
||||
While at CoreOS events or related social networking opportunities, attendees
|
||||
should not engage in discriminatory or offensive speech or actions including
|
||||
but not limited to gender, sexuality, race, age, disability, or religion.
|
||||
Speakers should be especially aware of these concerns.
|
||||
|
||||
CoreOS does not condone any statements by speakers contrary to these standards.
|
||||
CoreOS reserves the right to deny entrance and/or eject from an event (without
|
||||
refund) any individual found to be engaging in discriminatory or offensive
|
||||
speech or actions.
|
||||
|
||||
Please bring any concerns to the immediate attention of designated on-site
|
||||
staff, Brandon Philips <brandon.philips@coreos.com>, and/or Rithu John <rithu.john@coreos.com>.
|
21
vendor/github.com/coreos/go-oidc/example/README.md
generated
vendored
Normal file
21
vendor/github.com/coreos/go-oidc/example/README.md
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
# Examples
|
||||
|
||||
These are example uses of the oidc package. Each requires a Google account and the client ID and secret of a registered OAuth2 application. To create one:
|
||||
|
||||
1. Visit your [Google Developer Console][google-developer-console].
|
||||
2. Click "Credentials" on the left column.
|
||||
3. Click the "Create credentials" button followed by "OAuth client ID".
|
||||
4. Select "Web application" and add "http://127.0.0.1:5556/auth/google/callback" as an authorized redirect URI.
|
||||
5. Click create and add the printed client ID and secret to your environment using the following variables:
|
||||
|
||||
```
|
||||
GOOGLE_OAUTH2_CLIENT_ID
|
||||
GOOGLE_OAUTH2_CLIENT_SECRET
|
||||
```
|
||||
|
||||
Finally run the examples using the Go tool and navigate to http://127.0.0.1:5556.
|
||||
|
||||
```
|
||||
go run ./example/idtoken/app.go
|
||||
```
|
||||
[google-developer-console]: https://console.developers.google.com/apis/dashboard
|
92
vendor/github.com/coreos/go-oidc/example/idtoken/app.go
generated
vendored
Normal file
92
vendor/github.com/coreos/go-oidc/example/idtoken/app.go
generated
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
This is an example application to demonstrate parsing an ID Token.
|
||||
*/
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
oidc "github.com/coreos/go-oidc"
|
||||
|
||||
"golang.org/x/net/context"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
var (
|
||||
clientID = os.Getenv("GOOGLE_OAUTH2_CLIENT_ID")
|
||||
clientSecret = os.Getenv("GOOGLE_OAUTH2_CLIENT_SECRET")
|
||||
)
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
provider, err := oidc.NewProvider(ctx, "https://accounts.google.com")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
oidcConfig := &oidc.Config{
|
||||
ClientID: clientID,
|
||||
}
|
||||
verifier := provider.Verifier(oidcConfig)
|
||||
|
||||
config := oauth2.Config{
|
||||
ClientID: clientID,
|
||||
ClientSecret: clientSecret,
|
||||
Endpoint: provider.Endpoint(),
|
||||
RedirectURL: "http://127.0.0.1:5556/auth/google/callback",
|
||||
Scopes: []string{oidc.ScopeOpenID, "profile", "email"},
|
||||
}
|
||||
|
||||
state := "foobar" // Don't do this in production.
|
||||
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, config.AuthCodeURL(state), http.StatusFound)
|
||||
})
|
||||
|
||||
http.HandleFunc("/auth/google/callback", func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Query().Get("state") != state {
|
||||
http.Error(w, "state did not match", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
oauth2Token, err := config.Exchange(ctx, r.URL.Query().Get("code"))
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to exchange token: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
rawIDToken, ok := oauth2Token.Extra("id_token").(string)
|
||||
if !ok {
|
||||
http.Error(w, "No id_token field in oauth2 token.", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
idToken, err := verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to verify ID Token: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
oauth2Token.AccessToken = "*REDACTED*"
|
||||
|
||||
resp := struct {
|
||||
OAuth2Token *oauth2.Token
|
||||
IDTokenClaims *json.RawMessage // ID Token payload is just JSON.
|
||||
}{oauth2Token, new(json.RawMessage)}
|
||||
|
||||
if err := idToken.Claims(&resp.IDTokenClaims); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
data, err := json.MarshalIndent(resp, "", " ")
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.Write(data)
|
||||
})
|
||||
|
||||
log.Printf("listening on http://%s/", "127.0.0.1:5556")
|
||||
log.Fatal(http.ListenAndServe("127.0.0.1:5556", nil))
|
||||
}
|
100
vendor/github.com/coreos/go-oidc/example/nonce/app.go
generated
vendored
Normal file
100
vendor/github.com/coreos/go-oidc/example/nonce/app.go
generated
vendored
Normal file
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
This is an example application to demonstrate verifying an ID Token with a nonce.
|
||||
*/
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
oidc "github.com/coreos/go-oidc"
|
||||
|
||||
"golang.org/x/net/context"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
var (
|
||||
clientID = os.Getenv("GOOGLE_OAUTH2_CLIENT_ID")
|
||||
clientSecret = os.Getenv("GOOGLE_OAUTH2_CLIENT_SECRET")
|
||||
)
|
||||
|
||||
const appNonce = "a super secret nonce"
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
provider, err := oidc.NewProvider(ctx, "https://accounts.google.com")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
oidcConfig := &oidc.Config{
|
||||
ClientID: clientID,
|
||||
}
|
||||
// Use the nonce source to create a custom ID Token verifier.
|
||||
nonceEnabledVerifier := provider.Verifier(oidcConfig)
|
||||
|
||||
config := oauth2.Config{
|
||||
ClientID: clientID,
|
||||
ClientSecret: clientSecret,
|
||||
Endpoint: provider.Endpoint(),
|
||||
RedirectURL: "http://127.0.0.1:5556/auth/google/callback",
|
||||
Scopes: []string{oidc.ScopeOpenID, "profile", "email"},
|
||||
}
|
||||
|
||||
state := "foobar" // Don't do this in production.
|
||||
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, config.AuthCodeURL(state, oidc.Nonce(appNonce)), http.StatusFound)
|
||||
})
|
||||
|
||||
http.HandleFunc("/auth/google/callback", func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Query().Get("state") != state {
|
||||
http.Error(w, "state did not match", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
oauth2Token, err := config.Exchange(ctx, r.URL.Query().Get("code"))
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to exchange token: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
rawIDToken, ok := oauth2Token.Extra("id_token").(string)
|
||||
if !ok {
|
||||
http.Error(w, "No id_token field in oauth2 token.", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
// Verify the ID Token signature and nonce.
|
||||
idToken, err := nonceEnabledVerifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to verify ID Token: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if idToken.Nonce != appNonce {
|
||||
http.Error(w, "Invalid ID Token nonce", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
resp := struct {
|
||||
OAuth2Token *oauth2.Token
|
||||
IDTokenClaims *json.RawMessage // ID Token payload is just JSON.
|
||||
}{oauth2Token, new(json.RawMessage)}
|
||||
|
||||
if err := idToken.Claims(&resp.IDTokenClaims); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
data, err := json.MarshalIndent(resp, "", " ")
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.Write(data)
|
||||
})
|
||||
|
||||
log.Printf("listening on http://%s/", "127.0.0.1:5556")
|
||||
log.Fatal(http.ListenAndServe("127.0.0.1:5556", nil))
|
||||
}
|
76
vendor/github.com/coreos/go-oidc/example/userinfo/app.go
generated
vendored
Normal file
76
vendor/github.com/coreos/go-oidc/example/userinfo/app.go
generated
vendored
Normal file
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
This is an example application to demonstrate querying the user info endpoint.
|
||||
*/
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
oidc "github.com/coreos/go-oidc"
|
||||
|
||||
"golang.org/x/net/context"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
var (
|
||||
clientID = os.Getenv("GOOGLE_OAUTH2_CLIENT_ID")
|
||||
clientSecret = os.Getenv("GOOGLE_OAUTH2_CLIENT_SECRET")
|
||||
)
|
||||
|
||||
func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
provider, err := oidc.NewProvider(ctx, "https://accounts.google.com")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
config := oauth2.Config{
|
||||
ClientID: clientID,
|
||||
ClientSecret: clientSecret,
|
||||
Endpoint: provider.Endpoint(),
|
||||
RedirectURL: "http://127.0.0.1:5556/auth/google/callback",
|
||||
Scopes: []string{oidc.ScopeOpenID, "profile", "email"},
|
||||
}
|
||||
|
||||
state := "foobar" // Don't do this in production.
|
||||
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, config.AuthCodeURL(state), http.StatusFound)
|
||||
})
|
||||
|
||||
http.HandleFunc("/auth/google/callback", func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Query().Get("state") != state {
|
||||
http.Error(w, "state did not match", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
oauth2Token, err := config.Exchange(ctx, r.URL.Query().Get("code"))
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to exchange token: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
userInfo, err := provider.UserInfo(ctx, oauth2.StaticTokenSource(oauth2Token))
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to get userinfo: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
resp := struct {
|
||||
OAuth2Token *oauth2.Token
|
||||
UserInfo *oidc.UserInfo
|
||||
}{oauth2Token, userInfo}
|
||||
data, err := json.MarshalIndent(resp, "", " ")
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.Write(data)
|
||||
})
|
||||
|
||||
log.Printf("listening on http://%s/", "127.0.0.1:5556")
|
||||
log.Fatal(http.ListenAndServe("127.0.0.1:5556", nil))
|
||||
}
|
20
vendor/github.com/coreos/go-oidc/jose.go
generated
vendored
Normal file
20
vendor/github.com/coreos/go-oidc/jose.go
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
// +build !golint
|
||||
|
||||
// Don't lint this file. We don't want to have to add a comment to each constant.
|
||||
|
||||
package oidc
|
||||
|
||||
const (
|
||||
// JOSE asymmetric signing algorithm values as defined by RFC 7518
|
||||
//
|
||||
// see: https://tools.ietf.org/html/rfc7518#section-3.1
|
||||
RS256 = "RS256" // RSASSA-PKCS-v1.5 using SHA-256
|
||||
RS384 = "RS384" // RSASSA-PKCS-v1.5 using SHA-384
|
||||
RS512 = "RS512" // RSASSA-PKCS-v1.5 using SHA-512
|
||||
ES256 = "ES256" // ECDSA using P-256 and SHA-256
|
||||
ES384 = "ES384" // ECDSA using P-384 and SHA-384
|
||||
ES512 = "ES512" // ECDSA using P-521 and SHA-512
|
||||
PS256 = "PS256" // RSASSA-PSS using SHA256 and MGF1-SHA256
|
||||
PS384 = "PS384" // RSASSA-PSS using SHA384 and MGF1-SHA384
|
||||
PS512 = "PS512" // RSASSA-PSS using SHA512 and MGF1-SHA512
|
||||
)
|
228
vendor/github.com/coreos/go-oidc/jwks.go
generated
vendored
Normal file
228
vendor/github.com/coreos/go-oidc/jwks.go
generated
vendored
Normal file
|
@ -0,0 +1,228 @@
|
|||
package oidc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/pquerna/cachecontrol"
|
||||
jose "gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
// keysExpiryDelta is the allowed clock skew between a client and the OpenID Connect
|
||||
// server.
|
||||
//
|
||||
// When keys expire, they are valid for this amount of time after.
|
||||
//
|
||||
// If the keys have not expired, and an ID Token claims it was signed by a key not in
|
||||
// the cache, if and only if the keys expire in this amount of time, the keys will be
|
||||
// updated.
|
||||
const keysExpiryDelta = 30 * time.Second
|
||||
|
||||
// NewRemoteKeySet returns a KeySet that can validate JSON web tokens by using HTTP
|
||||
// GETs to fetch JSON web token sets hosted at a remote URL. This is automatically
|
||||
// used by NewProvider using the URLs returned by OpenID Connect discovery, but is
|
||||
// exposed for providers that don't support discovery or to prevent round trips to the
|
||||
// discovery URL.
|
||||
//
|
||||
// The returned KeySet is a long lived verifier that caches keys based on cache-control
|
||||
// headers. Reuse a common remote key set instead of creating new ones as needed.
|
||||
//
|
||||
// The behavior of the returned KeySet is undefined once the context is canceled.
|
||||
func NewRemoteKeySet(ctx context.Context, jwksURL string) KeySet {
|
||||
return newRemoteKeySet(ctx, jwksURL, time.Now)
|
||||
}
|
||||
|
||||
func newRemoteKeySet(ctx context.Context, jwksURL string, now func() time.Time) *remoteKeySet {
|
||||
if now == nil {
|
||||
now = time.Now
|
||||
}
|
||||
return &remoteKeySet{jwksURL: jwksURL, ctx: ctx, now: now}
|
||||
}
|
||||
|
||||
type remoteKeySet struct {
|
||||
jwksURL string
|
||||
ctx context.Context
|
||||
now func() time.Time
|
||||
|
||||
// guard all other fields
|
||||
mu sync.Mutex
|
||||
|
||||
// inflight suppresses parallel execution of updateKeys and allows
|
||||
// multiple goroutines to wait for its result.
|
||||
inflight *inflight
|
||||
|
||||
// A set of cached keys and their expiry.
|
||||
cachedKeys []jose.JSONWebKey
|
||||
expiry time.Time
|
||||
}
|
||||
|
||||
// inflight is used to wait on some in-flight request from multiple goroutines.
|
||||
type inflight struct {
|
||||
doneCh chan struct{}
|
||||
|
||||
keys []jose.JSONWebKey
|
||||
err error
|
||||
}
|
||||
|
||||
func newInflight() *inflight {
|
||||
return &inflight{doneCh: make(chan struct{})}
|
||||
}
|
||||
|
||||
// wait returns a channel that multiple goroutines can receive on. Once it returns
|
||||
// a value, the inflight request is done and result() can be inspected.
|
||||
func (i *inflight) wait() <-chan struct{} {
|
||||
return i.doneCh
|
||||
}
|
||||
|
||||
// done can only be called by a single goroutine. It records the result of the
|
||||
// inflight request and signals other goroutines that the result is safe to
|
||||
// inspect.
|
||||
func (i *inflight) done(keys []jose.JSONWebKey, err error) {
|
||||
i.keys = keys
|
||||
i.err = err
|
||||
close(i.doneCh)
|
||||
}
|
||||
|
||||
// result cannot be called until the wait() channel has returned a value.
|
||||
func (i *inflight) result() ([]jose.JSONWebKey, error) {
|
||||
return i.keys, i.err
|
||||
}
|
||||
|
||||
func (r *remoteKeySet) VerifySignature(ctx context.Context, jwt string) ([]byte, error) {
|
||||
jws, err := jose.ParseSigned(jwt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt: %v", err)
|
||||
}
|
||||
return r.verify(ctx, jws)
|
||||
}
|
||||
|
||||
func (r *remoteKeySet) verify(ctx context.Context, jws *jose.JSONWebSignature) ([]byte, error) {
|
||||
// We don't support JWTs signed with multiple signatures.
|
||||
keyID := ""
|
||||
for _, sig := range jws.Signatures {
|
||||
keyID = sig.Header.KeyID
|
||||
break
|
||||
}
|
||||
|
||||
keys, expiry := r.keysFromCache()
|
||||
|
||||
// Don't check expiry yet. This optimizes for when the provider is unavailable.
|
||||
for _, key := range keys {
|
||||
if keyID == "" || key.KeyID == keyID {
|
||||
if payload, err := jws.Verify(&key); err == nil {
|
||||
return payload, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !r.now().Add(keysExpiryDelta).After(expiry) {
|
||||
// Keys haven't expired, don't refresh.
|
||||
return nil, errors.New("failed to verify id token signature")
|
||||
}
|
||||
|
||||
keys, err := r.keysFromRemote(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("fetching keys %v", err)
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
if keyID == "" || key.KeyID == keyID {
|
||||
if payload, err := jws.Verify(&key); err == nil {
|
||||
return payload, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, errors.New("failed to verify id token signature")
|
||||
}
|
||||
|
||||
func (r *remoteKeySet) keysFromCache() (keys []jose.JSONWebKey, expiry time.Time) {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
return r.cachedKeys, r.expiry
|
||||
}
|
||||
|
||||
// keysFromRemote syncs the key set from the remote set, records the values in the
|
||||
// cache, and returns the key set.
|
||||
func (r *remoteKeySet) keysFromRemote(ctx context.Context) ([]jose.JSONWebKey, error) {
|
||||
// Need to lock to inspect the inflight request field.
|
||||
r.mu.Lock()
|
||||
// If there's not a current inflight request, create one.
|
||||
if r.inflight == nil {
|
||||
r.inflight = newInflight()
|
||||
|
||||
// This goroutine has exclusive ownership over the current inflight
|
||||
// request. It releases the resource by nil'ing the inflight field
|
||||
// once the goroutine is done.
|
||||
go func() {
|
||||
// Sync keys and finish inflight when that's done.
|
||||
keys, expiry, err := r.updateKeys()
|
||||
|
||||
r.inflight.done(keys, err)
|
||||
|
||||
// Lock to update the keys and indicate that there is no longer an
|
||||
// inflight request.
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
if err == nil {
|
||||
r.cachedKeys = keys
|
||||
r.expiry = expiry
|
||||
}
|
||||
|
||||
// Free inflight so a different request can run.
|
||||
r.inflight = nil
|
||||
}()
|
||||
}
|
||||
inflight := r.inflight
|
||||
r.mu.Unlock()
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
case <-inflight.wait():
|
||||
return inflight.result()
|
||||
}
|
||||
}
|
||||
|
||||
func (r *remoteKeySet) updateKeys() ([]jose.JSONWebKey, time.Time, error) {
|
||||
req, err := http.NewRequest("GET", r.jwksURL, nil)
|
||||
if err != nil {
|
||||
return nil, time.Time{}, fmt.Errorf("oidc: can't create request: %v", err)
|
||||
}
|
||||
|
||||
resp, err := doRequest(r.ctx, req)
|
||||
if err != nil {
|
||||
return nil, time.Time{}, fmt.Errorf("oidc: get keys failed %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, time.Time{}, fmt.Errorf("unable to read response body: %v", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, time.Time{}, fmt.Errorf("oidc: get keys failed: %s %s", resp.Status, body)
|
||||
}
|
||||
|
||||
var keySet jose.JSONWebKeySet
|
||||
err = unmarshalResp(resp, body, &keySet)
|
||||
if err != nil {
|
||||
return nil, time.Time{}, fmt.Errorf("oidc: failed to decode keys: %v %s", err, body)
|
||||
}
|
||||
|
||||
// If the server doesn't provide cache control headers, assume the
|
||||
// keys expire immediately.
|
||||
expiry := r.now()
|
||||
|
||||
_, e, err := cachecontrol.CachableResponse(req, resp, cachecontrol.Options{})
|
||||
if err == nil && e.After(expiry) {
|
||||
expiry = e
|
||||
}
|
||||
return keySet.Keys, expiry, nil
|
||||
}
|
250
vendor/github.com/coreos/go-oidc/jwks_test.go
generated
vendored
Normal file
250
vendor/github.com/coreos/go-oidc/jwks_test.go
generated
vendored
Normal file
|
@ -0,0 +1,250 @@
|
|||
package oidc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
jose "gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
type keyServer struct {
|
||||
keys jose.JSONWebKeySet
|
||||
setHeaders func(h http.Header)
|
||||
}
|
||||
|
||||
func (k *keyServer) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if k.setHeaders != nil {
|
||||
k.setHeaders(w.Header())
|
||||
}
|
||||
if err := json.NewEncoder(w).Encode(k.keys); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
type signingKey struct {
|
||||
keyID string // optional
|
||||
priv interface{}
|
||||
pub interface{}
|
||||
alg jose.SignatureAlgorithm
|
||||
}
|
||||
|
||||
// sign creates a JWS using the private key from the provided payload.
|
||||
func (s *signingKey) sign(t *testing.T, payload []byte) string {
|
||||
privKey := &jose.JSONWebKey{Key: s.priv, Algorithm: string(s.alg), KeyID: s.keyID}
|
||||
|
||||
signer, err := jose.NewSigner(jose.SigningKey{Algorithm: s.alg, Key: privKey}, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
jws, err := signer.Sign(payload)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
data, err := jws.CompactSerialize()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
// jwk returns the public part of the signing key.
|
||||
func (s *signingKey) jwk() jose.JSONWebKey {
|
||||
return jose.JSONWebKey{Key: s.pub, Use: "sig", Algorithm: string(s.alg), KeyID: s.keyID}
|
||||
}
|
||||
|
||||
func newRSAKey(t *testing.T) *signingKey {
|
||||
priv, err := rsa.GenerateKey(rand.Reader, 1028)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return &signingKey{"", priv, priv.Public(), jose.RS256}
|
||||
}
|
||||
|
||||
func newECDSAKey(t *testing.T) *signingKey {
|
||||
priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return &signingKey{"", priv, priv.Public(), jose.ES256}
|
||||
}
|
||||
|
||||
func TestRSAVerify(t *testing.T) {
|
||||
good := newRSAKey(t)
|
||||
bad := newRSAKey(t)
|
||||
|
||||
testKeyVerify(t, good, bad, good)
|
||||
}
|
||||
|
||||
func TestECDSAVerify(t *testing.T) {
|
||||
good := newECDSAKey(t)
|
||||
bad := newECDSAKey(t)
|
||||
testKeyVerify(t, good, bad, good)
|
||||
}
|
||||
|
||||
func TestMultipleKeysVerify(t *testing.T) {
|
||||
key1 := newRSAKey(t)
|
||||
key2 := newRSAKey(t)
|
||||
bad := newECDSAKey(t)
|
||||
|
||||
key1.keyID = "key1"
|
||||
key2.keyID = "key2"
|
||||
bad.keyID = "key3"
|
||||
|
||||
testKeyVerify(t, key2, bad, key1, key2)
|
||||
}
|
||||
|
||||
func TestMismatchedKeyID(t *testing.T) {
|
||||
key1 := newRSAKey(t)
|
||||
key2 := newRSAKey(t)
|
||||
|
||||
// shallow copy
|
||||
bad := new(signingKey)
|
||||
*bad = *key1
|
||||
|
||||
// The bad key is a valid key this time, but has a different Key ID.
|
||||
// It shouldn't match key1 because of the mismatched ID, even though
|
||||
// it would confirm the signature just fine.
|
||||
bad.keyID = "key3"
|
||||
|
||||
key1.keyID = "key1"
|
||||
key2.keyID = "key2"
|
||||
|
||||
testKeyVerify(t, key2, bad, key1, key2)
|
||||
}
|
||||
|
||||
func testKeyVerify(t *testing.T, good, bad *signingKey, verification ...*signingKey) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
keySet := jose.JSONWebKeySet{}
|
||||
for _, v := range verification {
|
||||
keySet.Keys = append(keySet.Keys, v.jwk())
|
||||
}
|
||||
|
||||
payload := []byte("a secret")
|
||||
|
||||
jws, err := jose.ParseSigned(good.sign(t, payload))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
badJWS, err := jose.ParseSigned(bad.sign(t, payload))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
s := httptest.NewServer(&keyServer{keys: keySet})
|
||||
defer s.Close()
|
||||
|
||||
rks := newRemoteKeySet(ctx, s.URL, nil)
|
||||
|
||||
// Ensure the token verifies.
|
||||
gotPayload, err := rks.verify(ctx, jws)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !bytes.Equal(gotPayload, payload) {
|
||||
t.Errorf("expected payload %s got %s", payload, gotPayload)
|
||||
}
|
||||
|
||||
// Ensure the token verifies from the cache.
|
||||
gotPayload, err = rks.verify(ctx, jws)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !bytes.Equal(gotPayload, payload) {
|
||||
t.Errorf("expected payload %s got %s", payload, gotPayload)
|
||||
}
|
||||
|
||||
// Ensure item signed by wrong token doesn't verify.
|
||||
if _, err := rks.verify(context.Background(), badJWS); err == nil {
|
||||
t.Errorf("incorrectly verified signature")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCacheControl(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
key1 := newRSAKey(t)
|
||||
key2 := newRSAKey(t)
|
||||
|
||||
key1.keyID = "key1"
|
||||
key2.keyID = "key2"
|
||||
|
||||
payload := []byte("a secret")
|
||||
jws1, err := jose.ParseSigned(key1.sign(t, payload))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
jws2, err := jose.ParseSigned(key2.sign(t, payload))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
cacheForSeconds := 1200
|
||||
now := time.Now()
|
||||
|
||||
server := &keyServer{
|
||||
keys: jose.JSONWebKeySet{
|
||||
Keys: []jose.JSONWebKey{key1.jwk()},
|
||||
},
|
||||
setHeaders: func(h http.Header) {
|
||||
h.Set("Cache-Control", "max-age="+strconv.Itoa(cacheForSeconds))
|
||||
},
|
||||
}
|
||||
s := httptest.NewServer(server)
|
||||
defer s.Close()
|
||||
|
||||
rks := newRemoteKeySet(ctx, s.URL, func() time.Time { return now })
|
||||
|
||||
if _, err := rks.verify(ctx, jws1); err != nil {
|
||||
t.Errorf("failed to verify valid signature: %v", err)
|
||||
}
|
||||
if _, err := rks.verify(ctx, jws2); err == nil {
|
||||
t.Errorf("incorrectly verified signature")
|
||||
}
|
||||
|
||||
// Add second key to public list.
|
||||
server.keys = jose.JSONWebKeySet{
|
||||
Keys: []jose.JSONWebKey{key1.jwk(), key2.jwk()},
|
||||
}
|
||||
|
||||
if _, err := rks.verify(ctx, jws1); err != nil {
|
||||
t.Errorf("failed to verify valid signature: %v", err)
|
||||
}
|
||||
if _, err := rks.verify(ctx, jws2); err == nil {
|
||||
t.Errorf("incorrectly verified signature, still within cache limit")
|
||||
}
|
||||
|
||||
// Move time forward. Remote key set should not query the remote server.
|
||||
now = now.Add(time.Duration(cacheForSeconds) * time.Second)
|
||||
|
||||
if _, err := rks.verify(ctx, jws1); err != nil {
|
||||
t.Errorf("failed to verify valid signature: %v", err)
|
||||
}
|
||||
if _, err := rks.verify(ctx, jws2); err != nil {
|
||||
t.Errorf("failed to verify valid signature: %v", err)
|
||||
}
|
||||
|
||||
// Kill server and move time forward again. Keys should still verify.
|
||||
s.Close()
|
||||
now = now.Add(time.Duration(cacheForSeconds) * time.Second)
|
||||
|
||||
if _, err := rks.verify(ctx, jws1); err != nil {
|
||||
t.Errorf("failed to verify valid signature: %v", err)
|
||||
}
|
||||
if _, err := rks.verify(ctx, jws2); err != nil {
|
||||
t.Errorf("failed to verify valid signature: %v", err)
|
||||
}
|
||||
}
|
374
vendor/github.com/coreos/go-oidc/oidc.go
generated
vendored
Normal file
374
vendor/github.com/coreos/go-oidc/oidc.go
generated
vendored
Normal file
|
@ -0,0 +1,374 @@
|
|||
// Package oidc implements OpenID Connect client logic for the golang.org/x/oauth2 package.
|
||||
package oidc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
jose "gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
// ScopeOpenID is the mandatory scope for all OpenID Connect OAuth2 requests.
|
||||
ScopeOpenID = "openid"
|
||||
|
||||
// ScopeOfflineAccess is an optional scope defined by OpenID Connect for requesting
|
||||
// OAuth2 refresh tokens.
|
||||
//
|
||||
// Support for this scope differs between OpenID Connect providers. For instance
|
||||
// Google rejects it, favoring appending "access_type=offline" as part of the
|
||||
// authorization request instead.
|
||||
//
|
||||
// See: https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess
|
||||
ScopeOfflineAccess = "offline_access"
|
||||
)
|
||||
|
||||
var (
|
||||
errNoAtHash = errors.New("id token did not have an access token hash")
|
||||
errInvalidAtHash = errors.New("access token hash does not match value in ID token")
|
||||
)
|
||||
|
||||
// ClientContext returns a new Context that carries the provided HTTP client.
|
||||
//
|
||||
// This method sets the same context key used by the golang.org/x/oauth2 package,
|
||||
// so the returned context works for that package too.
|
||||
//
|
||||
// myClient := &http.Client{}
|
||||
// ctx := oidc.ClientContext(parentContext, myClient)
|
||||
//
|
||||
// // This will use the custom client
|
||||
// provider, err := oidc.NewProvider(ctx, "https://accounts.example.com")
|
||||
//
|
||||
func ClientContext(ctx context.Context, client *http.Client) context.Context {
|
||||
return context.WithValue(ctx, oauth2.HTTPClient, client)
|
||||
}
|
||||
|
||||
func doRequest(ctx context.Context, req *http.Request) (*http.Response, error) {
|
||||
client := http.DefaultClient
|
||||
if c, ok := ctx.Value(oauth2.HTTPClient).(*http.Client); ok {
|
||||
client = c
|
||||
}
|
||||
return client.Do(req.WithContext(ctx))
|
||||
}
|
||||
|
||||
// Provider represents an OpenID Connect server's configuration.
|
||||
type Provider struct {
|
||||
issuer string
|
||||
authURL string
|
||||
tokenURL string
|
||||
userInfoURL string
|
||||
|
||||
// Raw claims returned by the server.
|
||||
rawClaims []byte
|
||||
|
||||
remoteKeySet KeySet
|
||||
}
|
||||
|
||||
type cachedKeys struct {
|
||||
keys []jose.JSONWebKey
|
||||
expiry time.Time
|
||||
}
|
||||
|
||||
type providerJSON struct {
|
||||
Issuer string `json:"issuer"`
|
||||
AuthURL string `json:"authorization_endpoint"`
|
||||
TokenURL string `json:"token_endpoint"`
|
||||
JWKSURL string `json:"jwks_uri"`
|
||||
UserInfoURL string `json:"userinfo_endpoint"`
|
||||
}
|
||||
|
||||
// NewProvider uses the OpenID Connect discovery mechanism to construct a Provider.
|
||||
//
|
||||
// The issuer is the URL identifier for the service. For example: "https://accounts.google.com"
|
||||
// or "https://login.salesforce.com".
|
||||
func NewProvider(ctx context.Context, issuer string) (*Provider, error) {
|
||||
wellKnown := strings.TrimSuffix(issuer, "/") + "/.well-known/openid-configuration"
|
||||
req, err := http.NewRequest("GET", wellKnown, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp, err := doRequest(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read response body: %v", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("%s: %s", resp.Status, body)
|
||||
}
|
||||
|
||||
var p providerJSON
|
||||
err = unmarshalResp(resp, body, &p)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: failed to decode provider discovery object: %v", err)
|
||||
}
|
||||
|
||||
if p.Issuer != issuer {
|
||||
return nil, fmt.Errorf("oidc: issuer did not match the issuer returned by provider, expected %q got %q", issuer, p.Issuer)
|
||||
}
|
||||
return &Provider{
|
||||
issuer: p.Issuer,
|
||||
authURL: p.AuthURL,
|
||||
tokenURL: p.TokenURL,
|
||||
userInfoURL: p.UserInfoURL,
|
||||
rawClaims: body,
|
||||
remoteKeySet: NewRemoteKeySet(ctx, p.JWKSURL),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Claims unmarshals raw fields returned by the server during discovery.
|
||||
//
|
||||
// var claims struct {
|
||||
// ScopesSupported []string `json:"scopes_supported"`
|
||||
// ClaimsSupported []string `json:"claims_supported"`
|
||||
// }
|
||||
//
|
||||
// if err := provider.Claims(&claims); err != nil {
|
||||
// // handle unmarshaling error
|
||||
// }
|
||||
//
|
||||
// For a list of fields defined by the OpenID Connect spec see:
|
||||
// https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
|
||||
func (p *Provider) Claims(v interface{}) error {
|
||||
if p.rawClaims == nil {
|
||||
return errors.New("oidc: claims not set")
|
||||
}
|
||||
return json.Unmarshal(p.rawClaims, v)
|
||||
}
|
||||
|
||||
// Endpoint returns the OAuth2 auth and token endpoints for the given provider.
|
||||
func (p *Provider) Endpoint() oauth2.Endpoint {
|
||||
return oauth2.Endpoint{AuthURL: p.authURL, TokenURL: p.tokenURL}
|
||||
}
|
||||
|
||||
// UserInfo represents the OpenID Connect userinfo claims.
|
||||
type UserInfo struct {
|
||||
Subject string `json:"sub"`
|
||||
Profile string `json:"profile"`
|
||||
Email string `json:"email"`
|
||||
EmailVerified bool `json:"email_verified"`
|
||||
|
||||
claims []byte
|
||||
}
|
||||
|
||||
// Claims unmarshals the raw JSON object claims into the provided object.
|
||||
func (u *UserInfo) Claims(v interface{}) error {
|
||||
if u.claims == nil {
|
||||
return errors.New("oidc: claims not set")
|
||||
}
|
||||
return json.Unmarshal(u.claims, v)
|
||||
}
|
||||
|
||||
// UserInfo uses the token source to query the provider's user info endpoint.
|
||||
func (p *Provider) UserInfo(ctx context.Context, tokenSource oauth2.TokenSource) (*UserInfo, error) {
|
||||
if p.userInfoURL == "" {
|
||||
return nil, errors.New("oidc: user info endpoint is not supported by this provider")
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("GET", p.userInfoURL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: create GET request: %v", err)
|
||||
}
|
||||
|
||||
token, err := tokenSource.Token()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: get access token: %v", err)
|
||||
}
|
||||
token.SetAuthHeader(req)
|
||||
|
||||
resp, err := doRequest(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("%s: %s", resp.Status, body)
|
||||
}
|
||||
|
||||
var userInfo UserInfo
|
||||
if err := json.Unmarshal(body, &userInfo); err != nil {
|
||||
return nil, fmt.Errorf("oidc: failed to decode userinfo: %v", err)
|
||||
}
|
||||
userInfo.claims = body
|
||||
return &userInfo, nil
|
||||
}
|
||||
|
||||
// IDToken is an OpenID Connect extension that provides a predictable representation
|
||||
// of an authorization event.
|
||||
//
|
||||
// The ID Token only holds fields OpenID Connect requires. To access additional
|
||||
// claims returned by the server, use the Claims method.
|
||||
type IDToken struct {
|
||||
// The URL of the server which issued this token. OpenID Connect
|
||||
// requires this value always be identical to the URL used for
|
||||
// initial discovery.
|
||||
//
|
||||
// Note: Because of a known issue with Google Accounts' implementation
|
||||
// this value may differ when using Google.
|
||||
//
|
||||
// See: https://developers.google.com/identity/protocols/OpenIDConnect#obtainuserinfo
|
||||
Issuer string
|
||||
|
||||
// The client ID, or set of client IDs, that this token is issued for. For
|
||||
// common uses, this is the client that initialized the auth flow.
|
||||
//
|
||||
// This package ensures the audience contains an expected value.
|
||||
Audience []string
|
||||
|
||||
// A unique string which identifies the end user.
|
||||
Subject string
|
||||
|
||||
// Expiry of the token. Ths package will not process tokens that have
|
||||
// expired unless that validation is explicitly turned off.
|
||||
Expiry time.Time
|
||||
// When the token was issued by the provider.
|
||||
IssuedAt time.Time
|
||||
|
||||
// Initial nonce provided during the authentication redirect.
|
||||
//
|
||||
// This package does NOT provided verification on the value of this field
|
||||
// and it's the user's responsibility to ensure it contains a valid value.
|
||||
Nonce string
|
||||
|
||||
// at_hash claim, if set in the ID token. Callers can verify an access token
|
||||
// that corresponds to the ID token using the VerifyAccessToken method.
|
||||
AccessTokenHash string
|
||||
|
||||
// signature algorithm used for ID token, needed to compute a verification hash of an
|
||||
// access token
|
||||
sigAlgorithm string
|
||||
|
||||
// Raw payload of the id_token.
|
||||
claims []byte
|
||||
}
|
||||
|
||||
// Claims unmarshals the raw JSON payload of the ID Token into a provided struct.
|
||||
//
|
||||
// idToken, err := idTokenVerifier.Verify(rawIDToken)
|
||||
// if err != nil {
|
||||
// // handle error
|
||||
// }
|
||||
// var claims struct {
|
||||
// Email string `json:"email"`
|
||||
// EmailVerified bool `json:"email_verified"`
|
||||
// }
|
||||
// if err := idToken.Claims(&claims); err != nil {
|
||||
// // handle error
|
||||
// }
|
||||
//
|
||||
func (i *IDToken) Claims(v interface{}) error {
|
||||
if i.claims == nil {
|
||||
return errors.New("oidc: claims not set")
|
||||
}
|
||||
return json.Unmarshal(i.claims, v)
|
||||
}
|
||||
|
||||
// VerifyAccessToken verifies that the hash of the access token that corresponds to the iD token
|
||||
// matches the hash in the id token. It returns an error if the hashes don't match.
|
||||
// It is the caller's responsibility to ensure that the optional access token hash is present for the ID token
|
||||
// before calling this method. See https://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken
|
||||
func (i *IDToken) VerifyAccessToken(accessToken string) error {
|
||||
if i.AccessTokenHash == "" {
|
||||
return errNoAtHash
|
||||
}
|
||||
var h hash.Hash
|
||||
switch i.sigAlgorithm {
|
||||
case RS256, ES256, PS256:
|
||||
h = sha256.New()
|
||||
case RS384, ES384, PS384:
|
||||
h = sha512.New384()
|
||||
case RS512, ES512, PS512:
|
||||
h = sha512.New()
|
||||
default:
|
||||
return fmt.Errorf("oidc: unsupported signing algorithm %q", i.sigAlgorithm)
|
||||
}
|
||||
h.Write([]byte(accessToken)) // hash documents that Write will never return an error
|
||||
sum := h.Sum(nil)[:h.Size()/2]
|
||||
actual := base64.RawURLEncoding.EncodeToString(sum)
|
||||
if actual != i.AccessTokenHash {
|
||||
return errInvalidAtHash
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type idToken struct {
|
||||
Issuer string `json:"iss"`
|
||||
Subject string `json:"sub"`
|
||||
Audience audience `json:"aud"`
|
||||
Expiry jsonTime `json:"exp"`
|
||||
IssuedAt jsonTime `json:"iat"`
|
||||
Nonce string `json:"nonce"`
|
||||
AtHash string `json:"at_hash"`
|
||||
}
|
||||
|
||||
type audience []string
|
||||
|
||||
func (a *audience) UnmarshalJSON(b []byte) error {
|
||||
var s string
|
||||
if json.Unmarshal(b, &s) == nil {
|
||||
*a = audience{s}
|
||||
return nil
|
||||
}
|
||||
var auds []string
|
||||
if err := json.Unmarshal(b, &auds); err != nil {
|
||||
return err
|
||||
}
|
||||
*a = audience(auds)
|
||||
return nil
|
||||
}
|
||||
|
||||
type jsonTime time.Time
|
||||
|
||||
func (j *jsonTime) UnmarshalJSON(b []byte) error {
|
||||
var n json.Number
|
||||
if err := json.Unmarshal(b, &n); err != nil {
|
||||
return err
|
||||
}
|
||||
var unix int64
|
||||
|
||||
if t, err := n.Int64(); err == nil {
|
||||
unix = t
|
||||
} else {
|
||||
f, err := n.Float64()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
unix = int64(f)
|
||||
}
|
||||
*j = jsonTime(time.Unix(unix, 0))
|
||||
return nil
|
||||
}
|
||||
|
||||
func unmarshalResp(r *http.Response, body []byte, v interface{}) error {
|
||||
err := json.Unmarshal(body, &v)
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
ct := r.Header.Get("Content-Type")
|
||||
mediaType, _, parseErr := mime.ParseMediaType(ct)
|
||||
if parseErr == nil && mediaType == "application/json" {
|
||||
return fmt.Errorf("got Content-Type = application/json, but could not unmarshal as JSON: %v", err)
|
||||
}
|
||||
return fmt.Errorf("expected Content-Type = application/json, got %q: %v", ct, err)
|
||||
}
|
95
vendor/github.com/coreos/go-oidc/oidc_test.go
generated
vendored
Normal file
95
vendor/github.com/coreos/go-oidc/oidc_test.go
generated
vendored
Normal file
|
@ -0,0 +1,95 @@
|
|||
package oidc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
// at_hash value and access_token returned by Google.
|
||||
googleAccessTokenHash = "piwt8oCH-K2D9pXlaS1Y-w"
|
||||
googleAccessToken = "ya29.CjHSA1l5WUn8xZ6HanHFzzdHdbXm-14rxnC7JHch9eFIsZkQEGoWzaYG4o7k5f6BnPLj"
|
||||
googleSigningAlg = RS256
|
||||
// following values computed by own algo for regression testing
|
||||
computed384TokenHash = "_ILKVQjbEzFKNJjUKC2kz9eReYi0A9Of"
|
||||
computed512TokenHash = "Spa_APgwBrarSeQbxI-rbragXho6dqFpH5x9PqaPfUI"
|
||||
)
|
||||
|
||||
type accessTokenTest struct {
|
||||
name string
|
||||
tok *IDToken
|
||||
accessToken string
|
||||
verifier func(err error) error
|
||||
}
|
||||
|
||||
func (a accessTokenTest) run(t *testing.T) {
|
||||
err := a.tok.VerifyAccessToken(a.accessToken)
|
||||
result := a.verifier(err)
|
||||
if result != nil {
|
||||
t.Error(result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAccessTokenVerification(t *testing.T) {
|
||||
newToken := func(alg, atHash string) *IDToken {
|
||||
return &IDToken{sigAlgorithm: alg, AccessTokenHash: atHash}
|
||||
}
|
||||
assertNil := func(err error) error {
|
||||
if err != nil {
|
||||
return fmt.Errorf("want nil error, got %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
assertMsg := func(msg string) func(err error) error {
|
||||
return func(err error) error {
|
||||
if err == nil {
|
||||
return fmt.Errorf("expected error, got success")
|
||||
}
|
||||
if err.Error() != msg {
|
||||
return fmt.Errorf("bad error message, %q, (want %q)", err.Error(), msg)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
tests := []accessTokenTest{
|
||||
{
|
||||
"goodRS256",
|
||||
newToken(googleSigningAlg, googleAccessTokenHash),
|
||||
googleAccessToken,
|
||||
assertNil,
|
||||
},
|
||||
{
|
||||
"goodES384",
|
||||
newToken("ES384", computed384TokenHash),
|
||||
googleAccessToken,
|
||||
assertNil,
|
||||
},
|
||||
{
|
||||
"goodPS512",
|
||||
newToken("PS512", computed512TokenHash),
|
||||
googleAccessToken,
|
||||
assertNil,
|
||||
},
|
||||
{
|
||||
"badRS256",
|
||||
newToken("RS256", computed512TokenHash),
|
||||
googleAccessToken,
|
||||
assertMsg("access token hash does not match value in ID token"),
|
||||
},
|
||||
{
|
||||
"nohash",
|
||||
newToken("RS256", ""),
|
||||
googleAccessToken,
|
||||
assertMsg("id token did not have an access token hash"),
|
||||
},
|
||||
{
|
||||
"badSignAlgo",
|
||||
newToken("none", "xxx"),
|
||||
googleAccessToken,
|
||||
assertMsg(`oidc: unsupported signing algorithm "none"`),
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, test.run)
|
||||
}
|
||||
}
|
16
vendor/github.com/coreos/go-oidc/test
generated
vendored
Executable file
16
vendor/github.com/coreos/go-oidc/test
generated
vendored
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Filter out any files with a !golint build tag.
|
||||
LINTABLE=$( go list -tags=golint -f '
|
||||
{{- range $i, $file := .GoFiles -}}
|
||||
{{ $file }} {{ end }}
|
||||
{{ range $i, $file := .TestGoFiles -}}
|
||||
{{ $file }} {{ end }}' github.com/coreos/go-oidc )
|
||||
|
||||
go test -v -i -race github.com/coreos/go-oidc/...
|
||||
go test -v -race github.com/coreos/go-oidc/...
|
||||
golint -set_exit_status $LINTABLE
|
||||
go vet github.com/coreos/go-oidc/...
|
||||
go build -v ./example/...
|
243
vendor/github.com/coreos/go-oidc/verify.go
generated
vendored
Normal file
243
vendor/github.com/coreos/go-oidc/verify.go
generated
vendored
Normal file
|
@ -0,0 +1,243 @@
|
|||
package oidc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
jose "gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
issuerGoogleAccounts = "https://accounts.google.com"
|
||||
issuerGoogleAccountsNoScheme = "accounts.google.com"
|
||||
)
|
||||
|
||||
// KeySet is a set of publc JSON Web Keys that can be used to validate the signature
|
||||
// of JSON web tokens. This is expected to be backed by a remote key set through
|
||||
// provider metadata discovery or an in-memory set of keys delivered out-of-band.
|
||||
type KeySet interface {
|
||||
// VerifySignature parses the JSON web token, verifies the signature, and returns
|
||||
// the raw payload. Header and claim fields are validated by other parts of the
|
||||
// package. For example, the KeySet does not need to check values such as signature
|
||||
// algorithm, issuer, and audience since the IDTokenVerifier validates these values
|
||||
// independently.
|
||||
//
|
||||
// If VerifySignature makes HTTP requests to verify the token, it's expected to
|
||||
// use any HTTP client associated with the context through ClientContext.
|
||||
VerifySignature(ctx context.Context, jwt string) (payload []byte, err error)
|
||||
}
|
||||
|
||||
// IDTokenVerifier provides verification for ID Tokens.
|
||||
type IDTokenVerifier struct {
|
||||
keySet KeySet
|
||||
config *Config
|
||||
issuer string
|
||||
}
|
||||
|
||||
// NewVerifier returns a verifier manually constructed from a key set and issuer URL.
|
||||
//
|
||||
// It's easier to use provider discovery to construct an IDTokenVerifier than creating
|
||||
// one directly. This method is intended to be used with provider that don't support
|
||||
// metadata discovery, or avoiding round trips when the key set URL is already known.
|
||||
//
|
||||
// This constructor can be used to create a verifier directly using the issuer URL and
|
||||
// JSON Web Key Set URL without using discovery:
|
||||
//
|
||||
// keySet := oidc.NewRemoteKeySet(ctx, "https://www.googleapis.com/oauth2/v3/certs")
|
||||
// verifier := oidc.NewVerifier("https://accounts.google.com", keySet, config)
|
||||
//
|
||||
// Since KeySet is an interface, this constructor can also be used to supply custom
|
||||
// public key sources. For example, if a user wanted to supply public keys out-of-band
|
||||
// and hold them statically in-memory:
|
||||
//
|
||||
// // Custom KeySet implementation.
|
||||
// keySet := newStatisKeySet(publicKeys...)
|
||||
//
|
||||
// // Verifier uses the custom KeySet implementation.
|
||||
// verifier := oidc.NewVerifier("https://auth.example.com", keySet, config)
|
||||
//
|
||||
func NewVerifier(issuerURL string, keySet KeySet, config *Config) *IDTokenVerifier {
|
||||
return &IDTokenVerifier{keySet: keySet, config: config, issuer: issuerURL}
|
||||
}
|
||||
|
||||
// Config is the configuration for an IDTokenVerifier.
|
||||
type Config struct {
|
||||
// Expected audience of the token. For a majority of the cases this is expected to be
|
||||
// the ID of the client that initialized the login flow. It may occasionally differ if
|
||||
// the provider supports the authorizing party (azp) claim.
|
||||
//
|
||||
// If not provided, users must explicitly set SkipClientIDCheck.
|
||||
ClientID string
|
||||
// If specified, only this set of algorithms may be used to sign the JWT.
|
||||
//
|
||||
// Since many providers only support RS256, SupportedSigningAlgs defaults to this value.
|
||||
SupportedSigningAlgs []string
|
||||
|
||||
// If true, no ClientID check performed. Must be true if ClientID field is empty.
|
||||
SkipClientIDCheck bool
|
||||
// If true, token expiry is not checked.
|
||||
SkipExpiryCheck bool
|
||||
|
||||
// Time function to check Token expiry. Defaults to time.Now
|
||||
Now func() time.Time
|
||||
}
|
||||
|
||||
// Verifier returns an IDTokenVerifier that uses the provider's key set to verify JWTs.
|
||||
//
|
||||
// The returned IDTokenVerifier is tied to the Provider's context and its behavior is
|
||||
// undefined once the Provider's context is canceled.
|
||||
func (p *Provider) Verifier(config *Config) *IDTokenVerifier {
|
||||
return NewVerifier(p.issuer, p.remoteKeySet, config)
|
||||
}
|
||||
|
||||
func parseJWT(p string) ([]byte, error) {
|
||||
parts := strings.Split(p, ".")
|
||||
if len(parts) < 2 {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt, expected 3 parts got %d", len(parts))
|
||||
}
|
||||
payload, err := base64.RawURLEncoding.DecodeString(parts[1])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt payload: %v", err)
|
||||
}
|
||||
return payload, nil
|
||||
}
|
||||
|
||||
func contains(sli []string, ele string) bool {
|
||||
for _, s := range sli {
|
||||
if s == ele {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Verify parses a raw ID Token, verifies it's been signed by the provider, preforms
|
||||
// any additional checks depending on the Config, and returns the payload.
|
||||
//
|
||||
// Verify does NOT do nonce validation, which is the callers responsibility.
|
||||
//
|
||||
// See: https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation
|
||||
//
|
||||
// oauth2Token, err := oauth2Config.Exchange(ctx, r.URL.Query().Get("code"))
|
||||
// if err != nil {
|
||||
// // handle error
|
||||
// }
|
||||
//
|
||||
// // Extract the ID Token from oauth2 token.
|
||||
// rawIDToken, ok := oauth2Token.Extra("id_token").(string)
|
||||
// if !ok {
|
||||
// // handle error
|
||||
// }
|
||||
//
|
||||
// token, err := verifier.Verify(ctx, rawIDToken)
|
||||
//
|
||||
func (v *IDTokenVerifier) Verify(ctx context.Context, rawIDToken string) (*IDToken, error) {
|
||||
jws, err := jose.ParseSigned(rawIDToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt: %v", err)
|
||||
}
|
||||
|
||||
// Throw out tokens with invalid claims before trying to verify the token. This lets
|
||||
// us do cheap checks before possibly re-syncing keys.
|
||||
payload, err := parseJWT(rawIDToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt: %v", err)
|
||||
}
|
||||
var token idToken
|
||||
if err := json.Unmarshal(payload, &token); err != nil {
|
||||
return nil, fmt.Errorf("oidc: failed to unmarshal claims: %v", err)
|
||||
}
|
||||
|
||||
t := &IDToken{
|
||||
Issuer: token.Issuer,
|
||||
Subject: token.Subject,
|
||||
Audience: []string(token.Audience),
|
||||
Expiry: time.Time(token.Expiry),
|
||||
IssuedAt: time.Time(token.IssuedAt),
|
||||
Nonce: token.Nonce,
|
||||
AccessTokenHash: token.AtHash,
|
||||
claims: payload,
|
||||
}
|
||||
|
||||
// Check issuer.
|
||||
if t.Issuer != v.issuer {
|
||||
// Google sometimes returns "accounts.google.com" as the issuer claim instead of
|
||||
// the required "https://accounts.google.com". Detect this case and allow it only
|
||||
// for Google.
|
||||
//
|
||||
// We will not add hooks to let other providers go off spec like this.
|
||||
if !(v.issuer == issuerGoogleAccounts && t.Issuer == issuerGoogleAccountsNoScheme) {
|
||||
return nil, fmt.Errorf("oidc: id token issued by a different provider, expected %q got %q", v.issuer, t.Issuer)
|
||||
}
|
||||
}
|
||||
|
||||
// If a client ID has been provided, make sure it's part of the audience. SkipClientIDCheck must be true if ClientID is empty.
|
||||
//
|
||||
// This check DOES NOT ensure that the ClientID is the party to which the ID Token was issued (i.e. Authorized party).
|
||||
if !v.config.SkipClientIDCheck {
|
||||
if v.config.ClientID != "" {
|
||||
if !contains(t.Audience, v.config.ClientID) {
|
||||
return nil, fmt.Errorf("oidc: expected audience %q got %q", v.config.ClientID, t.Audience)
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("oidc: invalid configuration, clientID must be provided or SkipClientIDCheck must be set")
|
||||
}
|
||||
}
|
||||
|
||||
// If a SkipExpiryCheck is false, make sure token is not expired.
|
||||
if !v.config.SkipExpiryCheck {
|
||||
now := time.Now
|
||||
if v.config.Now != nil {
|
||||
now = v.config.Now
|
||||
}
|
||||
|
||||
if t.Expiry.Before(now()) {
|
||||
return nil, fmt.Errorf("oidc: token is expired (Token Expiry: %v)", t.Expiry)
|
||||
}
|
||||
}
|
||||
|
||||
switch len(jws.Signatures) {
|
||||
case 0:
|
||||
return nil, fmt.Errorf("oidc: id token not signed")
|
||||
case 1:
|
||||
default:
|
||||
return nil, fmt.Errorf("oidc: multiple signatures on id token not supported")
|
||||
}
|
||||
|
||||
sig := jws.Signatures[0]
|
||||
supportedSigAlgs := v.config.SupportedSigningAlgs
|
||||
if len(supportedSigAlgs) == 0 {
|
||||
supportedSigAlgs = []string{RS256}
|
||||
}
|
||||
|
||||
if !contains(supportedSigAlgs, sig.Header.Algorithm) {
|
||||
return nil, fmt.Errorf("oidc: id token signed with unsupported algorithm, expected %q got %q", supportedSigAlgs, sig.Header.Algorithm)
|
||||
}
|
||||
|
||||
t.sigAlgorithm = sig.Header.Algorithm
|
||||
|
||||
gotPayload, err := v.keySet.VerifySignature(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to verify signature: %v", err)
|
||||
}
|
||||
|
||||
// Ensure that the payload returned by the square actually matches the payload parsed earlier.
|
||||
if !bytes.Equal(gotPayload, payload) {
|
||||
return nil, errors.New("oidc: internal error, payload parsed did not match previous payload")
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// Nonce returns an auth code option which requires the ID Token created by the
|
||||
// OpenID Connect provider to contain the specified nonce.
|
||||
func Nonce(nonce string) oauth2.AuthCodeOption {
|
||||
return oauth2.SetAuthURLParam("nonce", nonce)
|
||||
}
|
272
vendor/github.com/coreos/go-oidc/verify_test.go
generated
vendored
Normal file
272
vendor/github.com/coreos/go-oidc/verify_test.go
generated
vendored
Normal file
|
@ -0,0 +1,272 @@
|
|||
package oidc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
jose "gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
type testVerifier struct {
|
||||
jwk jose.JSONWebKey
|
||||
}
|
||||
|
||||
func (t *testVerifier) VerifySignature(ctx context.Context, jwt string) ([]byte, error) {
|
||||
jws, err := jose.ParseSigned(jwt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("oidc: malformed jwt: %v", err)
|
||||
}
|
||||
return jws.Verify(&t.jwk)
|
||||
}
|
||||
|
||||
func TestVerify(t *testing.T) {
|
||||
tests := []verificationTest{
|
||||
{
|
||||
name: "good token",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
},
|
||||
{
|
||||
name: "invalid issuer",
|
||||
issuer: "https://bar",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "invalid sig",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
verificationKey: newRSAKey(t),
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "google accounts without scheme",
|
||||
issuer: "https://accounts.google.com",
|
||||
idToken: `{"iss":"accounts.google.com"}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
},
|
||||
{
|
||||
name: "expired token",
|
||||
idToken: `{"iss":"https://foo","exp":` + strconv.FormatInt(time.Now().Add(-time.Hour).Unix(), 10) + `}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "unexpired token",
|
||||
idToken: `{"iss":"https://foo","exp":` + strconv.FormatInt(time.Now().Add(time.Hour).Unix(), 10) + `}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
},
|
||||
{
|
||||
name: "expiry as float",
|
||||
idToken: `{"iss":"https://foo","exp":` +
|
||||
strconv.FormatFloat(float64(time.Now().Add(time.Hour).Unix()), 'E', -1, 64) +
|
||||
`}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, test.run)
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyAudience(t *testing.T) {
|
||||
tests := []verificationTest{
|
||||
{
|
||||
name: "good audience",
|
||||
idToken: `{"iss":"https://foo","aud":"client1"}`,
|
||||
config: Config{
|
||||
ClientID: "client1",
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
},
|
||||
{
|
||||
name: "mismatched audience",
|
||||
idToken: `{"iss":"https://foo","aud":"client2"}`,
|
||||
config: Config{
|
||||
ClientID: "client1",
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "multiple audiences, one matches",
|
||||
idToken: `{"iss":"https://foo","aud":["client1","client2"]}`,
|
||||
config: Config{
|
||||
ClientID: "client2",
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, test.run)
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifySigningAlg(t *testing.T) {
|
||||
tests := []verificationTest{
|
||||
{
|
||||
name: "default signing alg",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
},
|
||||
{
|
||||
name: "bad signing alg",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newECDSAKey(t),
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "ecdsa signing",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SupportedSigningAlgs: []string{ES256},
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newECDSAKey(t),
|
||||
},
|
||||
{
|
||||
name: "one of many supported",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
SupportedSigningAlgs: []string{RS256, ES256},
|
||||
},
|
||||
signKey: newECDSAKey(t),
|
||||
},
|
||||
{
|
||||
name: "not in requiredAlgs",
|
||||
idToken: `{"iss":"https://foo"}`,
|
||||
config: Config{
|
||||
SupportedSigningAlgs: []string{RS256, ES512},
|
||||
SkipClientIDCheck: true,
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newECDSAKey(t),
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, test.run)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAccessTokenHash(t *testing.T) {
|
||||
atHash := "piwt8oCH-K2D9pXlaS1Y-w"
|
||||
vt := verificationTest{
|
||||
name: "preserves token hash and sig algo",
|
||||
idToken: `{"iss":"https://foo","aud":"client1", "at_hash": "` + atHash + `"}`,
|
||||
config: Config{
|
||||
ClientID: "client1",
|
||||
SkipExpiryCheck: true,
|
||||
},
|
||||
signKey: newRSAKey(t),
|
||||
}
|
||||
t.Run("at_hash", func(t *testing.T) {
|
||||
tok := vt.runGetToken(t)
|
||||
if tok != nil {
|
||||
if tok.AccessTokenHash != atHash {
|
||||
t.Errorf("access token hash not preserved correctly, want %q got %q", atHash, tok.AccessTokenHash)
|
||||
}
|
||||
if tok.sigAlgorithm != RS256 {
|
||||
t.Errorf("invalid signature algo, want %q got %q", RS256, tok.sigAlgorithm)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
type verificationTest struct {
|
||||
// Name of the subtest.
|
||||
name string
|
||||
|
||||
// If not provided defaults to "https://foo"
|
||||
issuer string
|
||||
|
||||
// JWT payload (just the claims).
|
||||
idToken string
|
||||
|
||||
// Key to sign the ID Token with.
|
||||
signKey *signingKey
|
||||
// If not provided defaults to signKey. Only useful when
|
||||
// testing invalid signatures.
|
||||
verificationKey *signingKey
|
||||
|
||||
config Config
|
||||
wantErr bool
|
||||
}
|
||||
|
||||
func (v verificationTest) runGetToken(t *testing.T) *IDToken {
|
||||
token := v.signKey.sign(t, []byte(v.idToken))
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
issuer := "https://foo"
|
||||
if v.issuer != "" {
|
||||
issuer = v.issuer
|
||||
}
|
||||
var ks KeySet
|
||||
if v.verificationKey == nil {
|
||||
ks = &testVerifier{v.signKey.jwk()}
|
||||
} else {
|
||||
ks = &testVerifier{v.verificationKey.jwk()}
|
||||
}
|
||||
verifier := NewVerifier(issuer, ks, &v.config)
|
||||
|
||||
idToken, err := verifier.Verify(ctx, token)
|
||||
if err != nil {
|
||||
if !v.wantErr {
|
||||
t.Errorf("%s: verify %v", v.name, err)
|
||||
}
|
||||
} else {
|
||||
if v.wantErr {
|
||||
t.Errorf("%s: expected error", v.name)
|
||||
}
|
||||
}
|
||||
return idToken
|
||||
}
|
||||
|
||||
func (v verificationTest) run(t *testing.T) {
|
||||
v.runGetToken(t)
|
||||
}
|
10
vendor/github.com/pquerna/cachecontrol/.travis.yml
generated
vendored
Normal file
10
vendor/github.com/pquerna/cachecontrol/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
language: go
|
||||
|
||||
install:
|
||||
- go get -d -v ./...
|
||||
- go get -u github.com/stretchr/testify/require
|
||||
|
||||
go:
|
||||
- 1.7
|
||||
- 1.8
|
||||
- tip
|
202
vendor/github.com/pquerna/cachecontrol/LICENSE
generated
vendored
Normal file
202
vendor/github.com/pquerna/cachecontrol/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
107
vendor/github.com/pquerna/cachecontrol/README.md
generated
vendored
Normal file
107
vendor/github.com/pquerna/cachecontrol/README.md
generated
vendored
Normal file
|
@ -0,0 +1,107 @@
|
|||
# cachecontrol: HTTP Caching Parser and Interpretation
|
||||
|
||||
[![GoDoc](https://godoc.org/github.com/pquerna/cachecontrol?status.svg)](https://godoc.org/github.com/pquerna/cachecontrol)[![Build Status](https://travis-ci.org/pquerna/cachecontrol.svg?branch=master)](https://travis-ci.org/pquerna/cachecontrol)
|
||||
|
||||
|
||||
|
||||
`cachecontrol` implements [RFC 7234](http://tools.ietf.org/html/rfc7234) __Hypertext Transfer Protocol (HTTP/1.1): Caching__. It does this by parsing the `Cache-Control` and other headers, providing information about requests and responses -- but `cachecontrol` does not implement an actual cache backend, just the control plane to make decisions about if a particular response is cachable.
|
||||
|
||||
# Usage
|
||||
|
||||
`cachecontrol.CachableResponse` returns an array of [reasons](https://godoc.org/github.com/pquerna/cachecontrol/cacheobject#Reason) why a response should not be cached and when it expires. In the case that `len(reasons) == 0`, the response is cachable according to the RFC. However, some people want non-compliant caches for various business use cases, so each reason is specifically named, so if your cache wants to cache `POST` requests, it can easily do that, but still be RFC compliant in other situations.
|
||||
|
||||
# Examples
|
||||
|
||||
## Can you cache Example.com?
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/pquerna/cachecontrol"
|
||||
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func main() {
|
||||
req, _ := http.NewRequest("GET", "http://www.example.com/", nil)
|
||||
|
||||
res, _ := http.DefaultClient.Do(req)
|
||||
_, _ = ioutil.ReadAll(res.Body)
|
||||
|
||||
reasons, expires, _ := cachecontrol.CachableResponse(req, res, cachecontrol.Options{})
|
||||
|
||||
fmt.Println("Reasons to not cache: ", reasons)
|
||||
fmt.Println("Expiration: ", expires.String())
|
||||
}
|
||||
```
|
||||
|
||||
## Can I use this in a high performance caching server?
|
||||
|
||||
`cachecontrol` is divided into two packages: `cachecontrol` with a high level API, and a lower level `cacheobject` package. Use [Object](https://godoc.org/github.com/pquerna/cachecontrol/cacheobject#Object) in a high performance use case where you have previously parsed headers containing dates or would like to avoid memory allocations.
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/pquerna/cachecontrol/cacheobject"
|
||||
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func main() {
|
||||
req, _ := http.NewRequest("GET", "http://www.example.com/", nil)
|
||||
|
||||
res, _ := http.DefaultClient.Do(req)
|
||||
_, _ = ioutil.ReadAll(res.Body)
|
||||
|
||||
reqDir, _ := cacheobject.ParseRequestCacheControl(req.Header.Get("Cache-Control"))
|
||||
|
||||
resDir, _ := cacheobject.ParseResponseCacheControl(res.Header.Get("Cache-Control"))
|
||||
expiresHeader, _ := http.ParseTime(res.Header.Get("Expires"))
|
||||
dateHeader, _ := http.ParseTime(res.Header.Get("Date"))
|
||||
lastModifiedHeader, _ := http.ParseTime(res.Header.Get("Last-Modified"))
|
||||
|
||||
obj := cacheobject.Object{
|
||||
RespDirectives: resDir,
|
||||
RespHeaders: res.Header,
|
||||
RespStatusCode: res.StatusCode,
|
||||
RespExpiresHeader: expiresHeader,
|
||||
RespDateHeader: dateHeader,
|
||||
RespLastModifiedHeader: lastModifiedHeader,
|
||||
|
||||
ReqDirectives: reqDir,
|
||||
ReqHeaders: req.Header,
|
||||
ReqMethod: req.Method,
|
||||
|
||||
NowUTC: time.Now().UTC(),
|
||||
}
|
||||
rv := cacheobject.ObjectResults{}
|
||||
|
||||
cacheobject.CachableObject(&obj, &rv)
|
||||
cacheobject.ExpirationObject(&obj, &rv)
|
||||
|
||||
fmt.Println("Errors: ", rv.OutErr)
|
||||
fmt.Println("Reasons to not cache: ", rv.OutReasons)
|
||||
fmt.Println("Warning headers to add: ", rv.OutWarnings)
|
||||
fmt.Println("Expiration: ", rv.OutExpirationTime.String())
|
||||
}
|
||||
```
|
||||
|
||||
## Improvements, bugs, adding features, and taking cachecontrol new directions!
|
||||
|
||||
Please [open issues in Github](https://github.com/pquerna/cachecontrol/issues) for ideas, bugs, and general thoughts. Pull requests are of course preferred :)
|
||||
|
||||
# Credits
|
||||
|
||||
`cachecontrol` has recieved significant contributions from:
|
||||
|
||||
* [Paul Querna](https://github.com/pquerna)
|
||||
|
||||
## License
|
||||
|
||||
`cachecontrol` is licensed under the [Apache License, Version 2.0](./LICENSE)
|
48
vendor/github.com/pquerna/cachecontrol/api.go
generated
vendored
Normal file
48
vendor/github.com/pquerna/cachecontrol/api.go
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cachecontrol
|
||||
|
||||
import (
|
||||
"github.com/pquerna/cachecontrol/cacheobject"
|
||||
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
// Set to True for a prviate cache, which is not shared amoung users (eg, in a browser)
|
||||
// Set to False for a "shared" cache, which is more common in a server context.
|
||||
PrivateCache bool
|
||||
}
|
||||
|
||||
// Given an HTTP Request, the future Status Code, and an ResponseWriter,
|
||||
// determine the possible reasons a response SHOULD NOT be cached.
|
||||
func CachableResponseWriter(req *http.Request,
|
||||
statusCode int,
|
||||
resp http.ResponseWriter,
|
||||
opts Options) ([]cacheobject.Reason, time.Time, error) {
|
||||
return cacheobject.UsingRequestResponse(req, statusCode, resp.Header(), opts.PrivateCache)
|
||||
}
|
||||
|
||||
// Given an HTTP Request and Response, determine the possible reasons a response SHOULD NOT
|
||||
// be cached.
|
||||
func CachableResponse(req *http.Request,
|
||||
resp *http.Response,
|
||||
opts Options) ([]cacheobject.Reason, time.Time, error) {
|
||||
return cacheobject.UsingRequestResponse(req, resp.StatusCode, resp.Header, opts.PrivateCache)
|
||||
}
|
111
vendor/github.com/pquerna/cachecontrol/api_test.go
generated
vendored
Normal file
111
vendor/github.com/pquerna/cachecontrol/api_test.go
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cachecontrol
|
||||
|
||||
import (
|
||||
"github.com/pquerna/cachecontrol/cacheobject"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func roundTrip(t *testing.T, fnc func(w http.ResponseWriter, r *http.Request)) (*http.Request, *http.Response) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(fnc))
|
||||
defer ts.Close()
|
||||
|
||||
req, err := http.NewRequest("GET", ts.URL, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = ioutil.ReadAll(res.Body)
|
||||
res.Body.Close()
|
||||
require.NoError(t, err)
|
||||
return req, res
|
||||
}
|
||||
|
||||
func TestCachableResponsePublic(t *testing.T) {
|
||||
req, res := roundTrip(t, func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Header().Set("Cache-Control", "public")
|
||||
w.Header().Set("Last-Modified",
|
||||
time.Now().UTC().Add(time.Duration(time.Hour*-5)).Format(http.TimeFormat))
|
||||
fmt.Fprintln(w, `{}`)
|
||||
})
|
||||
|
||||
opts := Options{}
|
||||
reasons, expires, err := CachableResponse(req, res, opts)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 0)
|
||||
require.WithinDuration(t,
|
||||
time.Now().UTC().Add(time.Duration(float64(time.Hour)*0.5)),
|
||||
expires,
|
||||
10*time.Second)
|
||||
}
|
||||
|
||||
func TestCachableResponsePrivate(t *testing.T) {
|
||||
req, res := roundTrip(t, func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Header().Set("Cache-Control", "private")
|
||||
fmt.Fprintln(w, `{}`)
|
||||
})
|
||||
|
||||
opts := Options{}
|
||||
reasons, expires, err := CachableResponse(req, res, opts)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 1)
|
||||
require.Equal(t, reasons[0], cacheobject.ReasonResponsePrivate)
|
||||
require.Equal(t, time.Time{}, expires)
|
||||
|
||||
opts.PrivateCache = true
|
||||
reasons, expires, err = CachableResponse(req, res, opts)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 0)
|
||||
require.Equal(t, time.Time{}, expires)
|
||||
}
|
||||
|
||||
func TestResponseWriter(t *testing.T) {
|
||||
var resp http.ResponseWriter
|
||||
var req *http.Request
|
||||
_, _ = roundTrip(t, func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Header().Set("Cache-Control", "private")
|
||||
fmt.Fprintln(w, `{}`)
|
||||
resp = w
|
||||
req = r
|
||||
})
|
||||
|
||||
opts := Options{}
|
||||
reasons, expires, err := CachableResponseWriter(req, 200, resp, opts)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 1)
|
||||
require.Equal(t, reasons[0], cacheobject.ReasonResponsePrivate)
|
||||
require.Equal(t, time.Time{}, expires)
|
||||
|
||||
opts.PrivateCache = true
|
||||
reasons, expires, err = CachableResponseWriter(req, 200, resp, opts)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 0)
|
||||
require.Equal(t, time.Time{}, expires)
|
||||
}
|
546
vendor/github.com/pquerna/cachecontrol/cacheobject/directive.go
generated
vendored
Normal file
546
vendor/github.com/pquerna/cachecontrol/cacheobject/directive.go
generated
vendored
Normal file
|
@ -0,0 +1,546 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cacheobject
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/textproto"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// TODO(pquerna): add extensions from here: http://www.iana.org/assignments/http-cache-directives/http-cache-directives.xhtml
|
||||
|
||||
var (
|
||||
ErrQuoteMismatch = errors.New("Missing closing quote")
|
||||
ErrMaxAgeDeltaSeconds = errors.New("Failed to parse delta-seconds in `max-age`")
|
||||
ErrSMaxAgeDeltaSeconds = errors.New("Failed to parse delta-seconds in `s-maxage`")
|
||||
ErrMaxStaleDeltaSeconds = errors.New("Failed to parse delta-seconds in `min-fresh`")
|
||||
ErrMinFreshDeltaSeconds = errors.New("Failed to parse delta-seconds in `min-fresh`")
|
||||
ErrNoCacheNoArgs = errors.New("Unexpected argument to `no-cache`")
|
||||
ErrNoStoreNoArgs = errors.New("Unexpected argument to `no-store`")
|
||||
ErrNoTransformNoArgs = errors.New("Unexpected argument to `no-transform`")
|
||||
ErrOnlyIfCachedNoArgs = errors.New("Unexpected argument to `only-if-cached`")
|
||||
ErrMustRevalidateNoArgs = errors.New("Unexpected argument to `must-revalidate`")
|
||||
ErrPublicNoArgs = errors.New("Unexpected argument to `public`")
|
||||
ErrProxyRevalidateNoArgs = errors.New("Unexpected argument to `proxy-revalidate`")
|
||||
// Experimental
|
||||
ErrImmutableNoArgs = errors.New("Unexpected argument to `immutable`")
|
||||
ErrStaleIfErrorDeltaSeconds = errors.New("Failed to parse delta-seconds in `stale-if-error`")
|
||||
ErrStaleWhileRevalidateDeltaSeconds = errors.New("Failed to parse delta-seconds in `stale-while-revalidate`")
|
||||
)
|
||||
|
||||
func whitespace(b byte) bool {
|
||||
if b == '\t' || b == ' ' {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func parse(value string, cd cacheDirective) error {
|
||||
var err error = nil
|
||||
i := 0
|
||||
|
||||
for i < len(value) && err == nil {
|
||||
// eat leading whitespace or commas
|
||||
if whitespace(value[i]) || value[i] == ',' {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
j := i + 1
|
||||
|
||||
for j < len(value) {
|
||||
if !isToken(value[j]) {
|
||||
break
|
||||
}
|
||||
j++
|
||||
}
|
||||
|
||||
token := strings.ToLower(value[i:j])
|
||||
tokenHasFields := hasFieldNames(token)
|
||||
/*
|
||||
println("GOT TOKEN:")
|
||||
println(" i -> ", i)
|
||||
println(" j -> ", j)
|
||||
println(" token -> ", token)
|
||||
*/
|
||||
|
||||
if j+1 < len(value) && value[j] == '=' {
|
||||
k := j + 1
|
||||
// minimum size two bytes of "", but we let httpUnquote handle it.
|
||||
if k < len(value) && value[k] == '"' {
|
||||
eaten, result := httpUnquote(value[k:])
|
||||
if eaten == -1 {
|
||||
return ErrQuoteMismatch
|
||||
}
|
||||
i = k + eaten
|
||||
|
||||
err = cd.addPair(token, result)
|
||||
} else {
|
||||
z := k
|
||||
for z < len(value) {
|
||||
if tokenHasFields {
|
||||
if whitespace(value[z]) {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
if whitespace(value[z]) || value[z] == ',' {
|
||||
break
|
||||
}
|
||||
}
|
||||
z++
|
||||
}
|
||||
i = z
|
||||
|
||||
result := value[k:z]
|
||||
if result != "" && result[len(result)-1] == ',' {
|
||||
result = result[:len(result)-1]
|
||||
}
|
||||
|
||||
err = cd.addPair(token, result)
|
||||
}
|
||||
} else {
|
||||
if token != "," {
|
||||
err = cd.addToken(token)
|
||||
}
|
||||
i = j
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// DeltaSeconds specifies a non-negative integer, representing
|
||||
// time in seconds: http://tools.ietf.org/html/rfc7234#section-1.2.1
|
||||
//
|
||||
// When set to -1, this means unset.
|
||||
//
|
||||
type DeltaSeconds int32
|
||||
|
||||
// Parser for delta-seconds, a uint31, more or less:
|
||||
// http://tools.ietf.org/html/rfc7234#section-1.2.1
|
||||
func parseDeltaSeconds(v string) (DeltaSeconds, error) {
|
||||
n, err := strconv.ParseUint(v, 10, 32)
|
||||
if err != nil {
|
||||
if numError, ok := err.(*strconv.NumError); ok {
|
||||
if numError.Err == strconv.ErrRange {
|
||||
return DeltaSeconds(math.MaxInt32), nil
|
||||
}
|
||||
}
|
||||
return DeltaSeconds(-1), err
|
||||
} else {
|
||||
if n > math.MaxInt32 {
|
||||
return DeltaSeconds(math.MaxInt32), nil
|
||||
} else {
|
||||
return DeltaSeconds(n), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fields present in a header.
|
||||
type FieldNames map[string]bool
|
||||
|
||||
// internal interface for shared methods of RequestCacheDirectives and ResponseCacheDirectives
|
||||
type cacheDirective interface {
|
||||
addToken(s string) error
|
||||
addPair(s string, v string) error
|
||||
}
|
||||
|
||||
// LOW LEVEL API: Repersentation of possible request directives in a `Cache-Control` header: http://tools.ietf.org/html/rfc7234#section-5.2.1
|
||||
//
|
||||
// Note: Many fields will be `nil` in practice.
|
||||
//
|
||||
type RequestCacheDirectives struct {
|
||||
|
||||
// max-age(delta seconds): http://tools.ietf.org/html/rfc7234#section-5.2.1.1
|
||||
//
|
||||
// The "max-age" request directive indicates that the client is
|
||||
// unwilling to accept a response whose age is greater than the
|
||||
// specified number of seconds. Unless the max-stale request directive
|
||||
// is also present, the client is not willing to accept a stale
|
||||
// response.
|
||||
MaxAge DeltaSeconds
|
||||
|
||||
// max-stale(delta seconds): http://tools.ietf.org/html/rfc7234#section-5.2.1.2
|
||||
//
|
||||
// The "max-stale" request directive indicates that the client is
|
||||
// willing to accept a response that has exceeded its freshness
|
||||
// lifetime. If max-stale is assigned a value, then the client is
|
||||
// willing to accept a response that has exceeded its freshness lifetime
|
||||
// by no more than the specified number of seconds. If no value is
|
||||
// assigned to max-stale, then the client is willing to accept a stale
|
||||
// response of any age.
|
||||
MaxStale DeltaSeconds
|
||||
|
||||
// min-fresh(delta seconds): http://tools.ietf.org/html/rfc7234#section-5.2.1.3
|
||||
//
|
||||
// The "min-fresh" request directive indicates that the client is
|
||||
// willing to accept a response whose freshness lifetime is no less than
|
||||
// its current age plus the specified time in seconds. That is, the
|
||||
// client wants a response that will still be fresh for at least the
|
||||
// specified number of seconds.
|
||||
MinFresh DeltaSeconds
|
||||
|
||||
// no-cache(bool): http://tools.ietf.org/html/rfc7234#section-5.2.1.4
|
||||
//
|
||||
// The "no-cache" request directive indicates that a cache MUST NOT use
|
||||
// a stored response to satisfy the request without successful
|
||||
// validation on the origin server.
|
||||
NoCache bool
|
||||
|
||||
// no-store(bool): http://tools.ietf.org/html/rfc7234#section-5.2.1.5
|
||||
//
|
||||
// The "no-store" request directive indicates that a cache MUST NOT
|
||||
// store any part of either this request or any response to it. This
|
||||
// directive applies to both private and shared caches.
|
||||
NoStore bool
|
||||
|
||||
// no-transform(bool): http://tools.ietf.org/html/rfc7234#section-5.2.1.6
|
||||
//
|
||||
// The "no-transform" request directive indicates that an intermediary
|
||||
// (whether or not it implements a cache) MUST NOT transform the
|
||||
// payload, as defined in Section 5.7.2 of RFC7230.
|
||||
NoTransform bool
|
||||
|
||||
// only-if-cached(bool): http://tools.ietf.org/html/rfc7234#section-5.2.1.7
|
||||
//
|
||||
// The "only-if-cached" request directive indicates that the client only
|
||||
// wishes to obtain a stored response.
|
||||
OnlyIfCached bool
|
||||
|
||||
// Extensions: http://tools.ietf.org/html/rfc7234#section-5.2.3
|
||||
//
|
||||
// The Cache-Control header field can be extended through the use of one
|
||||
// or more cache-extension tokens, each with an optional value. A cache
|
||||
// MUST ignore unrecognized cache directives.
|
||||
Extensions []string
|
||||
}
|
||||
|
||||
func (cd *RequestCacheDirectives) addToken(token string) error {
|
||||
var err error = nil
|
||||
|
||||
switch token {
|
||||
case "max-age":
|
||||
err = ErrMaxAgeDeltaSeconds
|
||||
case "max-stale":
|
||||
err = ErrMaxStaleDeltaSeconds
|
||||
case "min-fresh":
|
||||
err = ErrMinFreshDeltaSeconds
|
||||
case "no-cache":
|
||||
cd.NoCache = true
|
||||
case "no-store":
|
||||
cd.NoStore = true
|
||||
case "no-transform":
|
||||
cd.NoTransform = true
|
||||
case "only-if-cached":
|
||||
cd.OnlyIfCached = true
|
||||
default:
|
||||
cd.Extensions = append(cd.Extensions, token)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (cd *RequestCacheDirectives) addPair(token string, v string) error {
|
||||
var err error = nil
|
||||
|
||||
switch token {
|
||||
case "max-age":
|
||||
cd.MaxAge, err = parseDeltaSeconds(v)
|
||||
if err != nil {
|
||||
err = ErrMaxAgeDeltaSeconds
|
||||
}
|
||||
case "max-stale":
|
||||
cd.MaxStale, err = parseDeltaSeconds(v)
|
||||
if err != nil {
|
||||
err = ErrMaxStaleDeltaSeconds
|
||||
}
|
||||
case "min-fresh":
|
||||
cd.MinFresh, err = parseDeltaSeconds(v)
|
||||
if err != nil {
|
||||
err = ErrMinFreshDeltaSeconds
|
||||
}
|
||||
case "no-cache":
|
||||
err = ErrNoCacheNoArgs
|
||||
case "no-store":
|
||||
err = ErrNoStoreNoArgs
|
||||
case "no-transform":
|
||||
err = ErrNoTransformNoArgs
|
||||
case "only-if-cached":
|
||||
err = ErrOnlyIfCachedNoArgs
|
||||
default:
|
||||
// TODO(pquerna): this sucks, making user re-parse
|
||||
cd.Extensions = append(cd.Extensions, token+"="+v)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// LOW LEVEL API: Parses a Cache Control Header from a Request into a set of directives.
|
||||
func ParseRequestCacheControl(value string) (*RequestCacheDirectives, error) {
|
||||
cd := &RequestCacheDirectives{
|
||||
MaxAge: -1,
|
||||
MaxStale: -1,
|
||||
MinFresh: -1,
|
||||
}
|
||||
|
||||
err := parse(value, cd)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cd, nil
|
||||
}
|
||||
|
||||
// LOW LEVEL API: Repersentation of possible response directives in a `Cache-Control` header: http://tools.ietf.org/html/rfc7234#section-5.2.2
|
||||
//
|
||||
// Note: Many fields will be `nil` in practice.
|
||||
//
|
||||
type ResponseCacheDirectives struct {
|
||||
|
||||
// must-revalidate(bool): http://tools.ietf.org/html/rfc7234#section-5.2.2.1
|
||||
//
|
||||
// The "must-revalidate" response directive indicates that once it has
|
||||
// become stale, a cache MUST NOT use the response to satisfy subsequent
|
||||
// requests without successful validation on the origin server.
|
||||
MustRevalidate bool
|
||||
|
||||
// no-cache(FieldName): http://tools.ietf.org/html/rfc7234#section-5.2.2.2
|
||||
//
|
||||
// The "no-cache" response directive indicates that the response MUST
|
||||
// NOT be used to satisfy a subsequent request without successful
|
||||
// validation on the origin server.
|
||||
//
|
||||
// If the no-cache response directive specifies one or more field-names,
|
||||
// then a cache MAY use the response to satisfy a subsequent request,
|
||||
// subject to any other restrictions on caching. However, any header
|
||||
// fields in the response that have the field-name(s) listed MUST NOT be
|
||||
// sent in the response to a subsequent request without successful
|
||||
// revalidation with the origin server.
|
||||
NoCache FieldNames
|
||||
|
||||
// no-cache(cast-to-bool): http://tools.ietf.org/html/rfc7234#section-5.2.2.2
|
||||
//
|
||||
// While the RFC defines optional field-names on a no-cache directive,
|
||||
// many applications only want to know if any no-cache directives were
|
||||
// present at all.
|
||||
NoCachePresent bool
|
||||
|
||||
// no-store(bool): http://tools.ietf.org/html/rfc7234#section-5.2.2.3
|
||||
//
|
||||
// The "no-store" request directive indicates that a cache MUST NOT
|
||||
// store any part of either this request or any response to it. This
|
||||
// directive applies to both private and shared caches.
|
||||
NoStore bool
|
||||
|
||||
// no-transform(bool): http://tools.ietf.org/html/rfc7234#section-5.2.2.4
|
||||
//
|
||||
// The "no-transform" response directive indicates that an intermediary
|
||||
// (regardless of whether it implements a cache) MUST NOT transform the
|
||||
// payload, as defined in Section 5.7.2 of RFC7230.
|
||||
NoTransform bool
|
||||
|
||||
// public(bool): http://tools.ietf.org/html/rfc7234#section-5.2.2.5
|
||||
//
|
||||
// The "public" response directive indicates that any cache MAY store
|
||||
// the response, even if the response would normally be non-cacheable or
|
||||
// cacheable only within a private cache.
|
||||
Public bool
|
||||
|
||||
// private(FieldName): http://tools.ietf.org/html/rfc7234#section-5.2.2.6
|
||||
//
|
||||
// The "private" response directive indicates that the response message
|
||||
// is intended for a single user and MUST NOT be stored by a shared
|
||||
// cache. A private cache MAY store the response and reuse it for later
|
||||
// requests, even if the response would normally be non-cacheable.
|
||||
//
|
||||
// If the private response directive specifies one or more field-names,
|
||||
// this requirement is limited to the field-values associated with the
|
||||
// listed response header fields. That is, a shared cache MUST NOT
|
||||
// store the specified field-names(s), whereas it MAY store the
|
||||
// remainder of the response message.
|
||||
Private FieldNames
|
||||
|
||||
// private(cast-to-bool): http://tools.ietf.org/html/rfc7234#section-5.2.2.6
|
||||
//
|
||||
// While the RFC defines optional field-names on a private directive,
|
||||
// many applications only want to know if any private directives were
|
||||
// present at all.
|
||||
PrivatePresent bool
|
||||
|
||||
// proxy-revalidate(bool): http://tools.ietf.org/html/rfc7234#section-5.2.2.7
|
||||
//
|
||||
// The "proxy-revalidate" response directive has the same meaning as the
|
||||
// must-revalidate response directive, except that it does not apply to
|
||||
// private caches.
|
||||
ProxyRevalidate bool
|
||||
|
||||
// max-age(delta seconds): http://tools.ietf.org/html/rfc7234#section-5.2.2.8
|
||||
//
|
||||
// The "max-age" response directive indicates that the response is to be
|
||||
// considered stale after its age is greater than the specified number
|
||||
// of seconds.
|
||||
MaxAge DeltaSeconds
|
||||
|
||||
// s-maxage(delta seconds): http://tools.ietf.org/html/rfc7234#section-5.2.2.9
|
||||
//
|
||||
// The "s-maxage" response directive indicates that, in shared caches,
|
||||
// the maximum age specified by this directive overrides the maximum age
|
||||
// specified by either the max-age directive or the Expires header
|
||||
// field. The s-maxage directive also implies the semantics of the
|
||||
// proxy-revalidate response directive.
|
||||
SMaxAge DeltaSeconds
|
||||
|
||||
////
|
||||
// Experimental features
|
||||
// - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control#Extension_Cache-Control_directives
|
||||
// - https://www.fastly.com/blog/stale-while-revalidate-stale-if-error-available-today
|
||||
////
|
||||
|
||||
// immutable(cast-to-bool): experimental feature
|
||||
Immutable bool
|
||||
|
||||
// stale-if-error(delta seconds): experimental feature
|
||||
StaleIfError DeltaSeconds
|
||||
|
||||
// stale-while-revalidate(delta seconds): experimental feature
|
||||
StaleWhileRevalidate DeltaSeconds
|
||||
|
||||
// Extensions: http://tools.ietf.org/html/rfc7234#section-5.2.3
|
||||
//
|
||||
// The Cache-Control header field can be extended through the use of one
|
||||
// or more cache-extension tokens, each with an optional value. A cache
|
||||
// MUST ignore unrecognized cache directives.
|
||||
Extensions []string
|
||||
}
|
||||
|
||||
// LOW LEVEL API: Parses a Cache Control Header from a Response into a set of directives.
|
||||
func ParseResponseCacheControl(value string) (*ResponseCacheDirectives, error) {
|
||||
cd := &ResponseCacheDirectives{
|
||||
MaxAge: -1,
|
||||
SMaxAge: -1,
|
||||
// Exerimantal stale timeouts
|
||||
StaleIfError: -1,
|
||||
StaleWhileRevalidate: -1,
|
||||
}
|
||||
|
||||
err := parse(value, cd)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cd, nil
|
||||
}
|
||||
|
||||
func (cd *ResponseCacheDirectives) addToken(token string) error {
|
||||
var err error = nil
|
||||
switch token {
|
||||
case "must-revalidate":
|
||||
cd.MustRevalidate = true
|
||||
case "no-cache":
|
||||
cd.NoCachePresent = true
|
||||
case "no-store":
|
||||
cd.NoStore = true
|
||||
case "no-transform":
|
||||
cd.NoTransform = true
|
||||
case "public":
|
||||
cd.Public = true
|
||||
case "private":
|
||||
cd.PrivatePresent = true
|
||||
case "proxy-revalidate":
|
||||
cd.ProxyRevalidate = true
|
||||
case "max-age":
|
||||
err = ErrMaxAgeDeltaSeconds
|
||||
case "s-maxage":
|
||||
err = ErrSMaxAgeDeltaSeconds
|
||||
// Experimental
|
||||
case "immutable":
|
||||
cd.Immutable = true
|
||||
case "stale-if-error":
|
||||
err = ErrMaxAgeDeltaSeconds
|
||||
case "stale-while-revalidate":
|
||||
err = ErrMaxAgeDeltaSeconds
|
||||
default:
|
||||
cd.Extensions = append(cd.Extensions, token)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func hasFieldNames(token string) bool {
|
||||
switch token {
|
||||
case "no-cache":
|
||||
return true
|
||||
case "private":
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (cd *ResponseCacheDirectives) addPair(token string, v string) error {
|
||||
var err error = nil
|
||||
|
||||
switch token {
|
||||
case "must-revalidate":
|
||||
err = ErrMustRevalidateNoArgs
|
||||
case "no-cache":
|
||||
cd.NoCachePresent = true
|
||||
tokens := strings.Split(v, ",")
|
||||
if cd.NoCache == nil {
|
||||
cd.NoCache = make(FieldNames)
|
||||
}
|
||||
for _, t := range tokens {
|
||||
k := http.CanonicalHeaderKey(textproto.TrimString(t))
|
||||
cd.NoCache[k] = true
|
||||
}
|
||||
case "no-store":
|
||||
err = ErrNoStoreNoArgs
|
||||
case "no-transform":
|
||||
err = ErrNoTransformNoArgs
|
||||
case "public":
|
||||
err = ErrPublicNoArgs
|
||||
case "private":
|
||||
cd.PrivatePresent = true
|
||||
tokens := strings.Split(v, ",")
|
||||
if cd.Private == nil {
|
||||
cd.Private = make(FieldNames)
|
||||
}
|
||||
for _, t := range tokens {
|
||||
k := http.CanonicalHeaderKey(textproto.TrimString(t))
|
||||
cd.Private[k] = true
|
||||
}
|
||||
case "proxy-revalidate":
|
||||
err = ErrProxyRevalidateNoArgs
|
||||
case "max-age":
|
||||
cd.MaxAge, err = parseDeltaSeconds(v)
|
||||
case "s-maxage":
|
||||
cd.SMaxAge, err = parseDeltaSeconds(v)
|
||||
// Experimental
|
||||
case "immutable":
|
||||
err = ErrImmutableNoArgs
|
||||
case "stale-if-error":
|
||||
cd.StaleIfError, err = parseDeltaSeconds(v)
|
||||
case "stale-while-revalidate":
|
||||
cd.StaleWhileRevalidate, err = parseDeltaSeconds(v)
|
||||
default:
|
||||
// TODO(pquerna): this sucks, making user re-parse, and its technically not 'quoted' like the original,
|
||||
// but this is still easier, just a SplitN on "="
|
||||
cd.Extensions = append(cd.Extensions, token+"="+v)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
454
vendor/github.com/pquerna/cachecontrol/cacheobject/directive_test.go
generated
vendored
Normal file
454
vendor/github.com/pquerna/cachecontrol/cacheobject/directive_test.go
generated
vendored
Normal file
|
@ -0,0 +1,454 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cacheobject
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"fmt"
|
||||
"math"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMaxAge(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl("")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(-1))
|
||||
|
||||
cd, err = ParseResponseCacheControl("max-age")
|
||||
require.Error(t, err)
|
||||
|
||||
cd, err = ParseResponseCacheControl("max-age=20")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(20))
|
||||
|
||||
cd, err = ParseResponseCacheControl("max-age=0")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(0))
|
||||
|
||||
cd, err = ParseResponseCacheControl("max-age=-1")
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestSMaxAge(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl("")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.SMaxAge, DeltaSeconds(-1))
|
||||
|
||||
cd, err = ParseResponseCacheControl("s-maxage")
|
||||
require.Error(t, err)
|
||||
|
||||
cd, err = ParseResponseCacheControl("s-maxage=20")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.SMaxAge, DeltaSeconds(20))
|
||||
|
||||
cd, err = ParseResponseCacheControl("s-maxage=0")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.SMaxAge, DeltaSeconds(0))
|
||||
|
||||
cd, err = ParseResponseCacheControl("s-maxage=-1")
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestResNoCache(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl("")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.SMaxAge, DeltaSeconds(-1))
|
||||
|
||||
cd, err = ParseResponseCacheControl("no-cache")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.NoCachePresent, true)
|
||||
require.Equal(t, len(cd.NoCache), 0)
|
||||
|
||||
cd, err = ParseResponseCacheControl("no-cache=MyThing")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.NoCachePresent, true)
|
||||
require.Equal(t, len(cd.NoCache), 1)
|
||||
}
|
||||
|
||||
func TestResSpaceOnly(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(" ")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.SMaxAge, DeltaSeconds(-1))
|
||||
}
|
||||
|
||||
func TestResTabOnly(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl("\t")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.SMaxAge, DeltaSeconds(-1))
|
||||
}
|
||||
|
||||
func TestResPrivateExtensionQuoted(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`private="Set-Cookie,Request-Id" public`)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.Public, true)
|
||||
require.Equal(t, cd.PrivatePresent, true)
|
||||
require.Equal(t, len(cd.Private), 2)
|
||||
require.Equal(t, len(cd.Extensions), 0)
|
||||
require.Equal(t, cd.Private["Set-Cookie"], true)
|
||||
require.Equal(t, cd.Private["Request-Id"], true)
|
||||
}
|
||||
|
||||
func TestResCommaFollowingBare(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`public, max-age=500`)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.Public, true)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(500))
|
||||
require.Equal(t, cd.PrivatePresent, false)
|
||||
require.Equal(t, len(cd.Extensions), 0)
|
||||
}
|
||||
|
||||
func TestResCommaFollowingKV(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`max-age=500, public`)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.Public, true)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(500))
|
||||
require.Equal(t, cd.PrivatePresent, false)
|
||||
require.Equal(t, len(cd.Extensions), 0)
|
||||
}
|
||||
|
||||
func TestResPrivateTrailingComma(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`private=Set-Cookie, public`)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.Public, true)
|
||||
require.Equal(t, cd.PrivatePresent, true)
|
||||
require.Equal(t, len(cd.Private), 1)
|
||||
require.Equal(t, len(cd.Extensions), 0)
|
||||
require.Equal(t, cd.Private["Set-Cookie"], true)
|
||||
}
|
||||
|
||||
func TestResPrivateExtension(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`private=Set-Cookie,Request-Id public`)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.Public, true)
|
||||
require.Equal(t, cd.PrivatePresent, true)
|
||||
require.Equal(t, len(cd.Private), 2)
|
||||
require.Equal(t, len(cd.Extensions), 0)
|
||||
require.Equal(t, cd.Private["Set-Cookie"], true)
|
||||
require.Equal(t, cd.Private["Request-Id"], true)
|
||||
}
|
||||
|
||||
func TestResMultipleNoCacheTabExtension(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl("no-cache " + "\t" + "no-cache=Mything aasdfdsfa")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.NoCachePresent, true)
|
||||
require.Equal(t, len(cd.NoCache), 1)
|
||||
require.Equal(t, len(cd.Extensions), 1)
|
||||
require.Equal(t, cd.NoCache["Mything"], true)
|
||||
}
|
||||
|
||||
func TestResExtensionsEmptyQuote(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`foo="" bar="hi"`)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cd.SMaxAge, DeltaSeconds(-1))
|
||||
require.Equal(t, len(cd.Extensions), 2)
|
||||
require.Contains(t, cd.Extensions, "bar=hi")
|
||||
require.Contains(t, cd.Extensions, "foo=")
|
||||
}
|
||||
|
||||
func TestResQuoteMismatch(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`foo="`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrQuoteMismatch)
|
||||
}
|
||||
|
||||
func TestResMustRevalidateNoArgs(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`must-revalidate=234`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrMustRevalidateNoArgs)
|
||||
}
|
||||
|
||||
func TestResNoTransformNoArgs(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`no-transform="xxx"`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrNoTransformNoArgs)
|
||||
}
|
||||
|
||||
func TestResNoStoreNoArgs(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`no-store=""`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrNoStoreNoArgs)
|
||||
}
|
||||
|
||||
func TestResProxyRevalidateNoArgs(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`proxy-revalidate=23432`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrProxyRevalidateNoArgs)
|
||||
}
|
||||
|
||||
func TestResPublicNoArgs(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`public=999Vary`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrPublicNoArgs)
|
||||
}
|
||||
|
||||
func TestResMustRevalidate(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`must-revalidate`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.MustRevalidate, true)
|
||||
}
|
||||
|
||||
func TestResNoTransform(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`no-transform`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.NoTransform, true)
|
||||
}
|
||||
|
||||
func TestResNoStore(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`no-store`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.NoStore, true)
|
||||
}
|
||||
|
||||
func TestResProxyRevalidate(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`proxy-revalidate`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.ProxyRevalidate, true)
|
||||
}
|
||||
|
||||
func TestResPublic(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`public`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.Public, true)
|
||||
}
|
||||
|
||||
func TestResPrivate(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`private`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Len(t, cd.Private, 0)
|
||||
require.Equal(t, cd.PrivatePresent, true)
|
||||
}
|
||||
|
||||
func TestResImmutable(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`immutable`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.Immutable, true)
|
||||
}
|
||||
|
||||
func TestResStaleIfError(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`stale-if-error=99999`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.StaleIfError, DeltaSeconds(99999))
|
||||
}
|
||||
|
||||
func TestResStaleWhileRevalidate(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`stale-while-revalidate=99999`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.StaleWhileRevalidate, DeltaSeconds(99999))
|
||||
}
|
||||
|
||||
func TestParseDeltaSecondsZero(t *testing.T) {
|
||||
ds, err := parseDeltaSeconds("0")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ds, DeltaSeconds(0))
|
||||
}
|
||||
|
||||
func TestParseDeltaSecondsLarge(t *testing.T) {
|
||||
ds, err := parseDeltaSeconds(fmt.Sprintf("%d", int64(math.MaxInt32)*2))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ds, DeltaSeconds(math.MaxInt32))
|
||||
}
|
||||
|
||||
func TestParseDeltaSecondsVeryLarge(t *testing.T) {
|
||||
ds, err := parseDeltaSeconds(fmt.Sprintf("%d", int64(math.MaxInt64)))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ds, DeltaSeconds(math.MaxInt32))
|
||||
}
|
||||
|
||||
func TestParseDeltaSecondsNegative(t *testing.T) {
|
||||
ds, err := parseDeltaSeconds("-60")
|
||||
require.Error(t, err)
|
||||
require.Equal(t, DeltaSeconds(-1), ds)
|
||||
}
|
||||
|
||||
func TestReqNoCacheNoArgs(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`no-cache=234`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrNoCacheNoArgs)
|
||||
}
|
||||
|
||||
func TestReqNoStoreNoArgs(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`no-store=,,x`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrNoStoreNoArgs)
|
||||
}
|
||||
|
||||
func TestReqNoTransformNoArgs(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`no-transform=akx`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrNoTransformNoArgs)
|
||||
}
|
||||
|
||||
func TestReqOnlyIfCachedNoArgs(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`only-if-cached=no-store`)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, cd)
|
||||
require.Equal(t, err, ErrOnlyIfCachedNoArgs)
|
||||
}
|
||||
|
||||
func TestReqMaxAge(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`max-age=99999`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(99999))
|
||||
require.Equal(t, cd.MaxStale, DeltaSeconds(-1))
|
||||
}
|
||||
|
||||
func TestReqMaxStale(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`max-stale=99999`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.MaxStale, DeltaSeconds(99999))
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(-1))
|
||||
require.Equal(t, cd.MinFresh, DeltaSeconds(-1))
|
||||
}
|
||||
|
||||
func TestReqMaxAgeBroken(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`max-age`)
|
||||
require.Error(t, err)
|
||||
require.Equal(t, ErrMaxAgeDeltaSeconds, err)
|
||||
require.Nil(t, cd)
|
||||
}
|
||||
|
||||
func TestReqMaxStaleBroken(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`max-stale`)
|
||||
require.Error(t, err)
|
||||
require.Equal(t, ErrMaxStaleDeltaSeconds, err)
|
||||
require.Nil(t, cd)
|
||||
}
|
||||
|
||||
func TestReqMinFresh(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`min-fresh=99999`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.MinFresh, DeltaSeconds(99999))
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(-1))
|
||||
require.Equal(t, cd.MaxStale, DeltaSeconds(-1))
|
||||
}
|
||||
|
||||
func TestReqMinFreshBroken(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`min-fresh`)
|
||||
require.Error(t, err)
|
||||
require.Equal(t, ErrMinFreshDeltaSeconds, err)
|
||||
require.Nil(t, cd)
|
||||
}
|
||||
|
||||
func TestReqMinFreshJunk(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`min-fresh=a99a`)
|
||||
require.Equal(t, ErrMinFreshDeltaSeconds, err)
|
||||
require.Nil(t, cd)
|
||||
}
|
||||
|
||||
func TestReqMinFreshBadValue(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`min-fresh=-1`)
|
||||
require.Equal(t, ErrMinFreshDeltaSeconds, err)
|
||||
require.Nil(t, cd)
|
||||
}
|
||||
|
||||
func TestReqExtensions(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`min-fresh=99999 foobar=1 cats`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.MinFresh, DeltaSeconds(99999))
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(-1))
|
||||
require.Equal(t, cd.MaxStale, DeltaSeconds(-1))
|
||||
require.Len(t, cd.Extensions, 2)
|
||||
require.Contains(t, cd.Extensions, "foobar=1")
|
||||
require.Contains(t, cd.Extensions, "cats")
|
||||
}
|
||||
|
||||
func TestReqMultiple(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`no-store no-transform`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.NoStore, true)
|
||||
require.Equal(t, cd.NoTransform, true)
|
||||
require.Equal(t, cd.OnlyIfCached, false)
|
||||
require.Len(t, cd.Extensions, 0)
|
||||
}
|
||||
|
||||
func TestReqMultipleComma(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`no-cache,only-if-cached`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.NoCache, true)
|
||||
require.Equal(t, cd.NoStore, false)
|
||||
require.Equal(t, cd.NoTransform, false)
|
||||
require.Equal(t, cd.OnlyIfCached, true)
|
||||
require.Len(t, cd.Extensions, 0)
|
||||
}
|
||||
|
||||
func TestReqLeadingComma(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`,no-cache`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Len(t, cd.Extensions, 0)
|
||||
require.Equal(t, cd.NoCache, true)
|
||||
require.Equal(t, cd.NoStore, false)
|
||||
require.Equal(t, cd.NoTransform, false)
|
||||
require.Equal(t, cd.OnlyIfCached, false)
|
||||
}
|
||||
|
||||
func TestReqMinFreshQuoted(t *testing.T) {
|
||||
cd, err := ParseRequestCacheControl(`min-fresh="99999"`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.MinFresh, DeltaSeconds(99999))
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(-1))
|
||||
require.Equal(t, cd.MaxStale, DeltaSeconds(-1))
|
||||
}
|
||||
|
||||
func TestNoSpacesIssue3(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`no-cache,no-store,max-age=0,must-revalidate`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.NoCachePresent, true)
|
||||
require.Equal(t, cd.NoStore, true)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(0))
|
||||
require.Equal(t, cd.MustRevalidate, true)
|
||||
}
|
||||
|
||||
func TestNoSpacesIssue3PrivateFields(t *testing.T) {
|
||||
cd, err := ParseResponseCacheControl(`no-cache, no-store, private=set-cookie,hello, max-age=0, must-revalidate`)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cd)
|
||||
require.Equal(t, cd.NoCachePresent, true)
|
||||
require.Equal(t, cd.NoStore, true)
|
||||
require.Equal(t, cd.MaxAge, DeltaSeconds(0))
|
||||
require.Equal(t, cd.MustRevalidate, true)
|
||||
require.Equal(t, true, cd.Private["Set-Cookie"])
|
||||
require.Equal(t, true, cd.Private["Hello"])
|
||||
}
|
93
vendor/github.com/pquerna/cachecontrol/cacheobject/lex.go
generated
vendored
Normal file
93
vendor/github.com/pquerna/cachecontrol/cacheobject/lex.go
generated
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cacheobject
|
||||
|
||||
// This file deals with lexical matters of HTTP
|
||||
|
||||
func isSeparator(c byte) bool {
|
||||
switch c {
|
||||
case '(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']', '?', '=', '{', '}', ' ', '\t':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isCtl(c byte) bool { return (0 <= c && c <= 31) || c == 127 }
|
||||
|
||||
func isChar(c byte) bool { return 0 <= c && c <= 127 }
|
||||
|
||||
func isAnyText(c byte) bool { return !isCtl(c) }
|
||||
|
||||
func isQdText(c byte) bool { return isAnyText(c) && c != '"' }
|
||||
|
||||
func isToken(c byte) bool { return isChar(c) && !isCtl(c) && !isSeparator(c) }
|
||||
|
||||
// Valid escaped sequences are not specified in RFC 2616, so for now, we assume
|
||||
// that they coincide with the common sense ones used by GO. Malformed
|
||||
// characters should probably not be treated as errors by a robust (forgiving)
|
||||
// parser, so we replace them with the '?' character.
|
||||
func httpUnquotePair(b byte) byte {
|
||||
// skip the first byte, which should always be '\'
|
||||
switch b {
|
||||
case 'a':
|
||||
return '\a'
|
||||
case 'b':
|
||||
return '\b'
|
||||
case 'f':
|
||||
return '\f'
|
||||
case 'n':
|
||||
return '\n'
|
||||
case 'r':
|
||||
return '\r'
|
||||
case 't':
|
||||
return '\t'
|
||||
case 'v':
|
||||
return '\v'
|
||||
case '\\':
|
||||
return '\\'
|
||||
case '\'':
|
||||
return '\''
|
||||
case '"':
|
||||
return '"'
|
||||
}
|
||||
return '?'
|
||||
}
|
||||
|
||||
// raw must begin with a valid quoted string. Only the first quoted string is
|
||||
// parsed and is unquoted in result. eaten is the number of bytes parsed, or -1
|
||||
// upon failure.
|
||||
func httpUnquote(raw string) (eaten int, result string) {
|
||||
buf := make([]byte, len(raw))
|
||||
if raw[0] != '"' {
|
||||
return -1, ""
|
||||
}
|
||||
eaten = 1
|
||||
j := 0 // # of bytes written in buf
|
||||
for i := 1; i < len(raw); i++ {
|
||||
switch b := raw[i]; b {
|
||||
case '"':
|
||||
eaten++
|
||||
buf = buf[0:j]
|
||||
return i + 1, string(buf)
|
||||
case '\\':
|
||||
if len(raw) < i+2 {
|
||||
return -1, ""
|
||||
}
|
||||
buf[j] = httpUnquotePair(raw[i+1])
|
||||
eaten += 2
|
||||
j++
|
||||
i++
|
||||
default:
|
||||
if isQdText(b) {
|
||||
buf[j] = b
|
||||
} else {
|
||||
buf[j] = '?'
|
||||
}
|
||||
eaten++
|
||||
j++
|
||||
}
|
||||
}
|
||||
return -1, ""
|
||||
}
|
387
vendor/github.com/pquerna/cachecontrol/cacheobject/object.go
generated
vendored
Normal file
387
vendor/github.com/pquerna/cachecontrol/cacheobject/object.go
generated
vendored
Normal file
|
@ -0,0 +1,387 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cacheobject
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// LOW LEVEL API: Repersents a potentially cachable HTTP object.
|
||||
//
|
||||
// This struct is designed to be serialized efficiently, so in a high
|
||||
// performance caching server, things like Date-Strings don't need to be
|
||||
// parsed for every use of a cached object.
|
||||
type Object struct {
|
||||
CacheIsPrivate bool
|
||||
|
||||
RespDirectives *ResponseCacheDirectives
|
||||
RespHeaders http.Header
|
||||
RespStatusCode int
|
||||
RespExpiresHeader time.Time
|
||||
RespDateHeader time.Time
|
||||
RespLastModifiedHeader time.Time
|
||||
|
||||
ReqDirectives *RequestCacheDirectives
|
||||
ReqHeaders http.Header
|
||||
ReqMethod string
|
||||
|
||||
NowUTC time.Time
|
||||
}
|
||||
|
||||
// LOW LEVEL API: Repersents the results of examinig an Object with
|
||||
// CachableObject and ExpirationObject.
|
||||
//
|
||||
// TODO(pquerna): decide if this is a good idea or bad
|
||||
type ObjectResults struct {
|
||||
OutReasons []Reason
|
||||
OutWarnings []Warning
|
||||
OutExpirationTime time.Time
|
||||
OutErr error
|
||||
}
|
||||
|
||||
// LOW LEVEL API: Check if a object is cachable.
|
||||
func CachableObject(obj *Object, rv *ObjectResults) {
|
||||
rv.OutReasons = nil
|
||||
rv.OutWarnings = nil
|
||||
rv.OutErr = nil
|
||||
|
||||
switch obj.ReqMethod {
|
||||
case "GET":
|
||||
break
|
||||
case "HEAD":
|
||||
break
|
||||
case "POST":
|
||||
/**
|
||||
POST: http://tools.ietf.org/html/rfc7231#section-4.3.3
|
||||
|
||||
Responses to POST requests are only cacheable when they include
|
||||
explicit freshness information (see Section 4.2.1 of [RFC7234]).
|
||||
However, POST caching is not widely implemented. For cases where an
|
||||
origin server wishes the client to be able to cache the result of a
|
||||
POST in a way that can be reused by a later GET, the origin server
|
||||
MAY send a 200 (OK) response containing the result and a
|
||||
Content-Location header field that has the same value as the POST's
|
||||
effective request URI (Section 3.1.4.2).
|
||||
*/
|
||||
if !hasFreshness(obj.ReqDirectives, obj.RespDirectives, obj.RespHeaders, obj.RespExpiresHeader, obj.CacheIsPrivate) {
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestMethodPOST)
|
||||
}
|
||||
|
||||
case "PUT":
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestMethodPUT)
|
||||
|
||||
case "DELETE":
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestMethodDELETE)
|
||||
|
||||
case "CONNECT":
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestMethodCONNECT)
|
||||
|
||||
case "OPTIONS":
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestMethodOPTIONS)
|
||||
|
||||
case "TRACE":
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestMethodTRACE)
|
||||
|
||||
// HTTP Extension Methods: http://www.iana.org/assignments/http-methods/http-methods.xhtml
|
||||
//
|
||||
// To my knowledge, none of them are cachable. Please open a ticket if this is not the case!
|
||||
//
|
||||
default:
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestMethodUnkown)
|
||||
}
|
||||
|
||||
if obj.ReqDirectives.NoStore {
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestNoStore)
|
||||
}
|
||||
|
||||
// Storing Responses to Authenticated Requests: http://tools.ietf.org/html/rfc7234#section-3.2
|
||||
authz := obj.ReqHeaders.Get("Authorization")
|
||||
if authz != "" {
|
||||
if obj.RespDirectives.MustRevalidate ||
|
||||
obj.RespDirectives.Public ||
|
||||
obj.RespDirectives.SMaxAge != -1 {
|
||||
// Expires of some kind present, this is potentially OK.
|
||||
} else {
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonRequestAuthorizationHeader)
|
||||
}
|
||||
}
|
||||
|
||||
if obj.RespDirectives.PrivatePresent && !obj.CacheIsPrivate {
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonResponsePrivate)
|
||||
}
|
||||
|
||||
if obj.RespDirectives.NoStore {
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonResponseNoStore)
|
||||
}
|
||||
|
||||
/*
|
||||
the response either:
|
||||
|
||||
* contains an Expires header field (see Section 5.3), or
|
||||
|
||||
* contains a max-age response directive (see Section 5.2.2.8), or
|
||||
|
||||
* contains a s-maxage response directive (see Section 5.2.2.9)
|
||||
and the cache is shared, or
|
||||
|
||||
* contains a Cache Control Extension (see Section 5.2.3) that
|
||||
allows it to be cached, or
|
||||
|
||||
* has a status code that is defined as cacheable by default (see
|
||||
Section 4.2.2), or
|
||||
|
||||
* contains a public response directive (see Section 5.2.2.5).
|
||||
*/
|
||||
|
||||
expires := obj.RespHeaders.Get("Expires") != ""
|
||||
statusCachable := cachableStatusCode(obj.RespStatusCode)
|
||||
|
||||
if expires ||
|
||||
obj.RespDirectives.MaxAge != -1 ||
|
||||
(obj.RespDirectives.SMaxAge != -1 && !obj.CacheIsPrivate) ||
|
||||
statusCachable ||
|
||||
obj.RespDirectives.Public {
|
||||
/* cachable by default, at least one of the above conditions was true */
|
||||
} else {
|
||||
rv.OutReasons = append(rv.OutReasons, ReasonResponseUncachableByDefault)
|
||||
}
|
||||
}
|
||||
|
||||
var twentyFourHours = time.Duration(24 * time.Hour)
|
||||
|
||||
const debug = false
|
||||
|
||||
// LOW LEVEL API: Update an objects expiration time.
|
||||
func ExpirationObject(obj *Object, rv *ObjectResults) {
|
||||
/**
|
||||
* Okay, lets calculate Freshness/Expiration now. woo:
|
||||
* http://tools.ietf.org/html/rfc7234#section-4.2
|
||||
*/
|
||||
|
||||
/*
|
||||
o If the cache is shared and the s-maxage response directive
|
||||
(Section 5.2.2.9) is present, use its value, or
|
||||
|
||||
o If the max-age response directive (Section 5.2.2.8) is present,
|
||||
use its value, or
|
||||
|
||||
o If the Expires response header field (Section 5.3) is present, use
|
||||
its value minus the value of the Date response header field, or
|
||||
|
||||
o Otherwise, no explicit expiration time is present in the response.
|
||||
A heuristic freshness lifetime might be applicable; see
|
||||
Section 4.2.2.
|
||||
*/
|
||||
|
||||
var expiresTime time.Time
|
||||
|
||||
if obj.RespDirectives.SMaxAge != -1 && !obj.CacheIsPrivate {
|
||||
expiresTime = obj.NowUTC.Add(time.Second * time.Duration(obj.RespDirectives.SMaxAge))
|
||||
} else if obj.RespDirectives.MaxAge != -1 {
|
||||
expiresTime = obj.NowUTC.UTC().Add(time.Second * time.Duration(obj.RespDirectives.MaxAge))
|
||||
} else if !obj.RespExpiresHeader.IsZero() {
|
||||
serverDate := obj.RespDateHeader
|
||||
if serverDate.IsZero() {
|
||||
// common enough case when a Date: header has not yet been added to an
|
||||
// active response.
|
||||
serverDate = obj.NowUTC
|
||||
}
|
||||
expiresTime = obj.NowUTC.Add(obj.RespExpiresHeader.Sub(serverDate))
|
||||
} else if !obj.RespLastModifiedHeader.IsZero() {
|
||||
// heuristic freshness lifetime
|
||||
rv.OutWarnings = append(rv.OutWarnings, WarningHeuristicExpiration)
|
||||
|
||||
// http://httpd.apache.org/docs/2.4/mod/mod_cache.html#cachelastmodifiedfactor
|
||||
// CacheMaxExpire defaults to 24 hours
|
||||
// CacheLastModifiedFactor: is 0.1
|
||||
//
|
||||
// expiry-period = MIN(time-since-last-modified-date * factor, 24 hours)
|
||||
//
|
||||
// obj.NowUTC
|
||||
|
||||
since := obj.RespLastModifiedHeader.Sub(obj.NowUTC)
|
||||
since = time.Duration(float64(since) * -0.1)
|
||||
|
||||
if since > twentyFourHours {
|
||||
expiresTime = obj.NowUTC.Add(twentyFourHours)
|
||||
} else {
|
||||
expiresTime = obj.NowUTC.Add(since)
|
||||
}
|
||||
|
||||
if debug {
|
||||
println("Now UTC: ", obj.NowUTC.String())
|
||||
println("Last-Modified: ", obj.RespLastModifiedHeader.String())
|
||||
println("Since: ", since.String())
|
||||
println("TwentyFourHours: ", twentyFourHours.String())
|
||||
println("Expiration: ", expiresTime.String())
|
||||
}
|
||||
} else {
|
||||
// TODO(pquerna): what should the default behavoir be for expiration time?
|
||||
}
|
||||
|
||||
rv.OutExpirationTime = expiresTime
|
||||
}
|
||||
|
||||
// Evaluate cachability based on an HTTP request, and parts of the response.
|
||||
func UsingRequestResponse(req *http.Request,
|
||||
statusCode int,
|
||||
respHeaders http.Header,
|
||||
privateCache bool) ([]Reason, time.Time, error) {
|
||||
reasons, time, _, _, err := UsingRequestResponseWithObject(req, statusCode, respHeaders, privateCache)
|
||||
return reasons, time, err
|
||||
}
|
||||
|
||||
// Evaluate cachability based on an HTTP request, and parts of the response.
|
||||
// Returns the parsed Object as well.
|
||||
func UsingRequestResponseWithObject(req *http.Request,
|
||||
statusCode int,
|
||||
respHeaders http.Header,
|
||||
privateCache bool) ([]Reason, time.Time, []Warning, *Object, error) {
|
||||
var reqHeaders http.Header
|
||||
var reqMethod string
|
||||
|
||||
var reqDir *RequestCacheDirectives = nil
|
||||
respDir, err := ParseResponseCacheControl(respHeaders.Get("Cache-Control"))
|
||||
if err != nil {
|
||||
return nil, time.Time{}, nil, nil, err
|
||||
}
|
||||
|
||||
if req != nil {
|
||||
reqDir, err = ParseRequestCacheControl(req.Header.Get("Cache-Control"))
|
||||
if err != nil {
|
||||
return nil, time.Time{}, nil, nil, err
|
||||
}
|
||||
reqHeaders = req.Header
|
||||
reqMethod = req.Method
|
||||
}
|
||||
|
||||
var expiresHeader time.Time
|
||||
var dateHeader time.Time
|
||||
var lastModifiedHeader time.Time
|
||||
|
||||
if respHeaders.Get("Expires") != "" {
|
||||
expiresHeader, err = http.ParseTime(respHeaders.Get("Expires"))
|
||||
if err != nil {
|
||||
// sometimes servers will return `Expires: 0` or `Expires: -1` to
|
||||
// indicate expired content
|
||||
expiresHeader = time.Time{}
|
||||
}
|
||||
expiresHeader = expiresHeader.UTC()
|
||||
}
|
||||
|
||||
if respHeaders.Get("Date") != "" {
|
||||
dateHeader, err = http.ParseTime(respHeaders.Get("Date"))
|
||||
if err != nil {
|
||||
return nil, time.Time{}, nil, nil, err
|
||||
}
|
||||
dateHeader = dateHeader.UTC()
|
||||
}
|
||||
|
||||
if respHeaders.Get("Last-Modified") != "" {
|
||||
lastModifiedHeader, err = http.ParseTime(respHeaders.Get("Last-Modified"))
|
||||
if err != nil {
|
||||
return nil, time.Time{}, nil, nil, err
|
||||
}
|
||||
lastModifiedHeader = lastModifiedHeader.UTC()
|
||||
}
|
||||
|
||||
obj := Object{
|
||||
CacheIsPrivate: privateCache,
|
||||
|
||||
RespDirectives: respDir,
|
||||
RespHeaders: respHeaders,
|
||||
RespStatusCode: statusCode,
|
||||
RespExpiresHeader: expiresHeader,
|
||||
RespDateHeader: dateHeader,
|
||||
RespLastModifiedHeader: lastModifiedHeader,
|
||||
|
||||
ReqDirectives: reqDir,
|
||||
ReqHeaders: reqHeaders,
|
||||
ReqMethod: reqMethod,
|
||||
|
||||
NowUTC: time.Now().UTC(),
|
||||
}
|
||||
rv := ObjectResults{}
|
||||
|
||||
CachableObject(&obj, &rv)
|
||||
if rv.OutErr != nil {
|
||||
return nil, time.Time{}, nil, nil, rv.OutErr
|
||||
}
|
||||
|
||||
ExpirationObject(&obj, &rv)
|
||||
if rv.OutErr != nil {
|
||||
return nil, time.Time{}, nil, nil, rv.OutErr
|
||||
}
|
||||
|
||||
return rv.OutReasons, rv.OutExpirationTime, rv.OutWarnings, &obj, nil
|
||||
}
|
||||
|
||||
// calculate if a freshness directive is present: http://tools.ietf.org/html/rfc7234#section-4.2.1
|
||||
func hasFreshness(reqDir *RequestCacheDirectives, respDir *ResponseCacheDirectives, respHeaders http.Header, respExpires time.Time, privateCache bool) bool {
|
||||
if !privateCache && respDir.SMaxAge != -1 {
|
||||
return true
|
||||
}
|
||||
|
||||
if respDir.MaxAge != -1 {
|
||||
return true
|
||||
}
|
||||
|
||||
if !respExpires.IsZero() || respHeaders.Get("Expires") != "" {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func cachableStatusCode(statusCode int) bool {
|
||||
/*
|
||||
Responses with status codes that are defined as cacheable by default
|
||||
(e.g., 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, and 501 in
|
||||
this specification) can be reused by a cache with heuristic
|
||||
expiration unless otherwise indicated by the method definition or
|
||||
explicit cache controls [RFC7234]; all other status codes are not
|
||||
cacheable by default.
|
||||
*/
|
||||
switch statusCode {
|
||||
case 200:
|
||||
return true
|
||||
case 203:
|
||||
return true
|
||||
case 204:
|
||||
return true
|
||||
case 206:
|
||||
return true
|
||||
case 300:
|
||||
return true
|
||||
case 301:
|
||||
return true
|
||||
case 404:
|
||||
return true
|
||||
case 405:
|
||||
return true
|
||||
case 410:
|
||||
return true
|
||||
case 414:
|
||||
return true
|
||||
case 501:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
91
vendor/github.com/pquerna/cachecontrol/cacheobject/object_http_test.go
generated
vendored
Normal file
91
vendor/github.com/pquerna/cachecontrol/cacheobject/object_http_test.go
generated
vendored
Normal file
|
@ -0,0 +1,91 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cacheobject
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func roundTrip(t *testing.T, fnc func(w http.ResponseWriter, r *http.Request)) (*http.Request, *http.Response) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(fnc))
|
||||
defer ts.Close()
|
||||
|
||||
req, err := http.NewRequest("GET", ts.URL, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = ioutil.ReadAll(res.Body)
|
||||
res.Body.Close()
|
||||
require.NoError(t, err)
|
||||
return req, res
|
||||
}
|
||||
|
||||
func TestCachableResponsePublic(t *testing.T) {
|
||||
req, res := roundTrip(t, func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Header().Set("Cache-Control", "public")
|
||||
w.Header().Set("Last-Modified",
|
||||
time.Now().UTC().Add(time.Duration(time.Hour*-5)).Format(http.TimeFormat))
|
||||
fmt.Fprintln(w, `{}`)
|
||||
})
|
||||
|
||||
reasons, expires, err := UsingRequestResponse(req, res.StatusCode, res.Header, false)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 0)
|
||||
require.WithinDuration(t,
|
||||
time.Now().UTC().Add(time.Duration(float64(time.Hour)*0.5)),
|
||||
expires,
|
||||
10*time.Second)
|
||||
}
|
||||
|
||||
func TestCachableResponseNoHeaders(t *testing.T) {
|
||||
req, res := roundTrip(t, func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
fmt.Fprintln(w, `{}`)
|
||||
})
|
||||
|
||||
reasons, expires, err := UsingRequestResponse(req, res.StatusCode, res.Header, false)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 0)
|
||||
require.True(t, expires.IsZero())
|
||||
}
|
||||
|
||||
func TestCachableResponseBadExpires(t *testing.T) {
|
||||
req, res := roundTrip(t, func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Header().Set("Expires", "-1")
|
||||
fmt.Fprintln(w, `{}`)
|
||||
})
|
||||
|
||||
reasons, expires, err := UsingRequestResponse(req, res.StatusCode, res.Header, false)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reasons, 0)
|
||||
require.True(t, expires.IsZero())
|
||||
}
|
394
vendor/github.com/pquerna/cachecontrol/cacheobject/object_test.go
generated
vendored
Normal file
394
vendor/github.com/pquerna/cachecontrol/cacheobject/object_test.go
generated
vendored
Normal file
|
@ -0,0 +1,394 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cacheobject
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestCachableStatusCode(t *testing.T) {
|
||||
ok := []int{200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501}
|
||||
for _, v := range ok {
|
||||
require.True(t, cachableStatusCode(v), "status code should be cacheable: %d", v)
|
||||
}
|
||||
|
||||
notok := []int{201, 429, 500, 504}
|
||||
for _, v := range notok {
|
||||
require.False(t, cachableStatusCode(v), "status code should not be cachable: %d", v)
|
||||
}
|
||||
}
|
||||
|
||||
func fill(t *testing.T, now time.Time) Object {
|
||||
RespDirectives, err := ParseResponseCacheControl("")
|
||||
require.NoError(t, err)
|
||||
ReqDirectives, err := ParseRequestCacheControl("")
|
||||
require.NoError(t, err)
|
||||
|
||||
obj := Object{
|
||||
RespDirectives: RespDirectives,
|
||||
RespHeaders: http.Header{},
|
||||
RespStatusCode: 200,
|
||||
RespDateHeader: now,
|
||||
|
||||
ReqDirectives: ReqDirectives,
|
||||
ReqHeaders: http.Header{},
|
||||
ReqMethod: "GET",
|
||||
|
||||
NowUTC: now,
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
func TestGETPrivate(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
RespDirectives, err := ParseResponseCacheControl("private")
|
||||
require.NoError(t, err)
|
||||
|
||||
obj.RespDirectives = RespDirectives
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonResponsePrivate)
|
||||
}
|
||||
|
||||
func TestGETPrivateWithPrivateCache(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
RespDirectives, err := ParseResponseCacheControl("private")
|
||||
require.NoError(t, err)
|
||||
|
||||
obj.CacheIsPrivate = true
|
||||
obj.RespDirectives = RespDirectives
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
}
|
||||
|
||||
func TestUncachableMethods(t *testing.T) {
|
||||
type methodPair struct {
|
||||
m string
|
||||
r Reason
|
||||
}
|
||||
|
||||
tc := []methodPair{
|
||||
{"PUT", ReasonRequestMethodPUT},
|
||||
{"DELETE", ReasonRequestMethodDELETE},
|
||||
{"CONNECT", ReasonRequestMethodCONNECT},
|
||||
{"OPTIONS", ReasonRequestMethodOPTIONS},
|
||||
{"CONNECT", ReasonRequestMethodCONNECT},
|
||||
{"TRACE", ReasonRequestMethodTRACE},
|
||||
{"MADEUP", ReasonRequestMethodUnkown},
|
||||
}
|
||||
|
||||
for _, mp := range tc {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = mp.m
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, mp.r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHEAD(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "HEAD"
|
||||
obj.RespLastModifiedHeader = now.Add(time.Hour * -1)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
require.False(t, rv.OutExpirationTime.IsZero())
|
||||
}
|
||||
|
||||
func TestHEADLongLastModified(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "HEAD"
|
||||
obj.RespLastModifiedHeader = now.Add(time.Hour * -70000)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
require.False(t, rv.OutExpirationTime.IsZero())
|
||||
require.WithinDuration(t, now.Add(twentyFourHours), rv.OutExpirationTime, time.Second*60)
|
||||
}
|
||||
|
||||
func TestNonCachablePOST(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "POST"
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonRequestMethodPOST)
|
||||
}
|
||||
|
||||
func TestCachablePOSTExpiresHeader(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "POST"
|
||||
obj.RespExpiresHeader = now.Add(time.Hour * 1)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
}
|
||||
|
||||
func TestCachablePOSTSMax(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "POST"
|
||||
obj.RespDirectives.SMaxAge = DeltaSeconds(900)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
}
|
||||
|
||||
func TestNonCachablePOSTSMax(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "POST"
|
||||
obj.CacheIsPrivate = true
|
||||
obj.RespDirectives.SMaxAge = DeltaSeconds(900)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonRequestMethodPOST)
|
||||
}
|
||||
|
||||
func TestCachablePOSTMax(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "POST"
|
||||
obj.RespDirectives.MaxAge = DeltaSeconds(9000)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
}
|
||||
|
||||
func TestPUTs(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "PUT"
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonRequestMethodPUT)
|
||||
}
|
||||
|
||||
func TestPUTWithExpires(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqMethod = "PUT"
|
||||
obj.RespExpiresHeader = now.Add(time.Hour * 1)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonRequestMethodPUT)
|
||||
}
|
||||
|
||||
func TestAuthorization(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqHeaders.Set("Authorization", "bearer random")
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonRequestAuthorizationHeader)
|
||||
}
|
||||
|
||||
func TestCachableAuthorization(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqHeaders.Set("Authorization", "bearer random")
|
||||
obj.RespDirectives.Public = true
|
||||
obj.RespDirectives.MaxAge = DeltaSeconds(300)
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.NoError(t, rv.OutErr)
|
||||
require.Len(t, rv.OutReasons, 0)
|
||||
}
|
||||
|
||||
func TestRespNoStore(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.RespDirectives.NoStore = true
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonResponseNoStore)
|
||||
}
|
||||
|
||||
func TestReqNoStore(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.ReqDirectives.NoStore = true
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonRequestNoStore)
|
||||
}
|
||||
|
||||
func TestResp500(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.RespStatusCode = 500
|
||||
|
||||
rv := ObjectResults{}
|
||||
CachableObject(&obj, &rv)
|
||||
require.Len(t, rv.OutReasons, 1)
|
||||
require.Contains(t, rv.OutReasons, ReasonResponseUncachableByDefault)
|
||||
}
|
||||
|
||||
func TestExpirationSMaxShared(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.RespDirectives.SMaxAge = DeltaSeconds(60)
|
||||
|
||||
rv := ObjectResults{}
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.Len(t, rv.OutWarnings, 0)
|
||||
require.WithinDuration(t, now.Add(time.Second*60), rv.OutExpirationTime, time.Second*1)
|
||||
}
|
||||
|
||||
func TestExpirationSMaxPrivate(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.CacheIsPrivate = true
|
||||
obj.RespDirectives.SMaxAge = DeltaSeconds(60)
|
||||
|
||||
rv := ObjectResults{}
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.Len(t, rv.OutWarnings, 0)
|
||||
require.True(t, rv.OutExpirationTime.IsZero())
|
||||
}
|
||||
|
||||
func TestExpirationMax(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
obj.RespDirectives.MaxAge = DeltaSeconds(60)
|
||||
|
||||
rv := ObjectResults{}
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.Len(t, rv.OutWarnings, 0)
|
||||
require.WithinDuration(t, now.Add(time.Second*60), rv.OutExpirationTime, time.Second*1)
|
||||
}
|
||||
|
||||
func TestExpirationMaxAndSMax(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
// cache should select the SMax age since this is a shared cache.
|
||||
obj.RespDirectives.MaxAge = DeltaSeconds(60)
|
||||
obj.RespDirectives.SMaxAge = DeltaSeconds(900)
|
||||
|
||||
rv := ObjectResults{}
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.Len(t, rv.OutWarnings, 0)
|
||||
require.WithinDuration(t, now.Add(time.Second*900), rv.OutExpirationTime, time.Second*1)
|
||||
}
|
||||
|
||||
func TestExpirationExpires(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
// cache should select the SMax age since this is a shared cache.
|
||||
obj.RespExpiresHeader = now.Add(time.Second * 1500)
|
||||
|
||||
rv := ObjectResults{}
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.Len(t, rv.OutWarnings, 0)
|
||||
require.WithinDuration(t, now.Add(time.Second*1500), rv.OutExpirationTime, time.Second*1)
|
||||
}
|
||||
|
||||
func TestExpirationExpiresNoServerDate(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
obj := fill(t, now)
|
||||
// cache should select the SMax age since this is a shared cache.
|
||||
obj.RespDateHeader = time.Time{}
|
||||
obj.RespExpiresHeader = now.Add(time.Second * 1500)
|
||||
|
||||
rv := ObjectResults{}
|
||||
ExpirationObject(&obj, &rv)
|
||||
require.Len(t, rv.OutWarnings, 0)
|
||||
require.WithinDuration(t, now.Add(time.Second*1500), rv.OutExpirationTime, time.Second*1)
|
||||
}
|
95
vendor/github.com/pquerna/cachecontrol/cacheobject/reasons.go
generated
vendored
Normal file
95
vendor/github.com/pquerna/cachecontrol/cacheobject/reasons.go
generated
vendored
Normal file
|
@ -0,0 +1,95 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cacheobject
|
||||
|
||||
// Repersents a potential Reason to not cache an object.
|
||||
//
|
||||
// Applications may wish to ignore specific reasons, which will make them non-RFC
|
||||
// compliant, but this type gives them specific cases they can choose to ignore,
|
||||
// making them compliant in as many cases as they can.
|
||||
type Reason int
|
||||
|
||||
const (
|
||||
|
||||
// The request method was POST and an Expiration header was not supplied.
|
||||
ReasonRequestMethodPOST Reason = iota
|
||||
|
||||
// The request method was PUT and PUTs are not cachable.
|
||||
ReasonRequestMethodPUT
|
||||
|
||||
// The request method was DELETE and DELETEs are not cachable.
|
||||
ReasonRequestMethodDELETE
|
||||
|
||||
// The request method was CONNECT and CONNECTs are not cachable.
|
||||
ReasonRequestMethodCONNECT
|
||||
|
||||
// The request method was OPTIONS and OPTIONS are not cachable.
|
||||
ReasonRequestMethodOPTIONS
|
||||
|
||||
// The request method was TRACE and TRACEs are not cachable.
|
||||
ReasonRequestMethodTRACE
|
||||
|
||||
// The request method was not recognized by cachecontrol, and should not be cached.
|
||||
ReasonRequestMethodUnkown
|
||||
|
||||
// The request included an Cache-Control: no-store header
|
||||
ReasonRequestNoStore
|
||||
|
||||
// The request included an Authorization header without an explicit Public or Expiration time: http://tools.ietf.org/html/rfc7234#section-3.2
|
||||
ReasonRequestAuthorizationHeader
|
||||
|
||||
// The response included an Cache-Control: no-store header
|
||||
ReasonResponseNoStore
|
||||
|
||||
// The response included an Cache-Control: private header and this is not a Private cache
|
||||
ReasonResponsePrivate
|
||||
|
||||
// The response failed to meet at least one of the conditions specified in RFC 7234 section 3: http://tools.ietf.org/html/rfc7234#section-3
|
||||
ReasonResponseUncachableByDefault
|
||||
)
|
||||
|
||||
func (r Reason) String() string {
|
||||
switch r {
|
||||
case ReasonRequestMethodPOST:
|
||||
return "ReasonRequestMethodPOST"
|
||||
case ReasonRequestMethodPUT:
|
||||
return "ReasonRequestMethodPUT"
|
||||
case ReasonRequestMethodDELETE:
|
||||
return "ReasonRequestMethodDELETE"
|
||||
case ReasonRequestMethodCONNECT:
|
||||
return "ReasonRequestMethodCONNECT"
|
||||
case ReasonRequestMethodOPTIONS:
|
||||
return "ReasonRequestMethodOPTIONS"
|
||||
case ReasonRequestMethodTRACE:
|
||||
return "ReasonRequestMethodTRACE"
|
||||
case ReasonRequestMethodUnkown:
|
||||
return "ReasonRequestMethodUnkown"
|
||||
case ReasonRequestNoStore:
|
||||
return "ReasonRequestNoStore"
|
||||
case ReasonRequestAuthorizationHeader:
|
||||
return "ReasonRequestAuthorizationHeader"
|
||||
case ReasonResponseNoStore:
|
||||
return "ReasonResponseNoStore"
|
||||
case ReasonResponsePrivate:
|
||||
return "ReasonResponsePrivate"
|
||||
case ReasonResponseUncachableByDefault:
|
||||
return "ReasonResponseUncachableByDefault"
|
||||
}
|
||||
|
||||
panic(r)
|
||||
}
|
107
vendor/github.com/pquerna/cachecontrol/cacheobject/warning.go
generated
vendored
Normal file
107
vendor/github.com/pquerna/cachecontrol/cacheobject/warning.go
generated
vendored
Normal file
|
@ -0,0 +1,107 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package cacheobject
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Repersents an HTTP Warning: http://tools.ietf.org/html/rfc7234#section-5.5
|
||||
type Warning int
|
||||
|
||||
const (
|
||||
// Response is Stale
|
||||
// A cache SHOULD generate this whenever the sent response is stale.
|
||||
WarningResponseIsStale Warning = 110
|
||||
|
||||
// Revalidation Failed
|
||||
// A cache SHOULD generate this when sending a stale
|
||||
// response because an attempt to validate the response failed, due to an
|
||||
// inability to reach the server.
|
||||
WarningRevalidationFailed Warning = 111
|
||||
|
||||
// Disconnected Operation
|
||||
// A cache SHOULD generate this if it is intentionally disconnected from
|
||||
// the rest of the network for a period of time.
|
||||
WarningDisconnectedOperation Warning = 112
|
||||
|
||||
// Heuristic Expiration
|
||||
//
|
||||
// A cache SHOULD generate this if it heuristically chose a freshness
|
||||
// lifetime greater than 24 hours and the response's age is greater than
|
||||
// 24 hours.
|
||||
WarningHeuristicExpiration Warning = 113
|
||||
|
||||
// Miscellaneous Warning
|
||||
//
|
||||
// The warning text can include arbitrary information to be presented to
|
||||
// a human user or logged. A system receiving this warning MUST NOT
|
||||
// take any automated action, besides presenting the warning to the
|
||||
// user.
|
||||
WarningMiscellaneousWarning Warning = 199
|
||||
|
||||
// Transformation Applied
|
||||
//
|
||||
// This Warning code MUST be added by a proxy if it applies any
|
||||
// transformation to the representation, such as changing the
|
||||
// content-coding, media-type, or modifying the representation data,
|
||||
// unless this Warning code already appears in the response.
|
||||
WarningTransformationApplied Warning = 214
|
||||
|
||||
// Miscellaneous Persistent Warning
|
||||
//
|
||||
// The warning text can include arbitrary information to be presented to
|
||||
// a human user or logged. A system receiving this warning MUST NOT
|
||||
// take any automated action.
|
||||
WarningMiscellaneousPersistentWarning Warning = 299
|
||||
)
|
||||
|
||||
func (w Warning) HeaderString(agent string, date time.Time) string {
|
||||
if agent == "" {
|
||||
agent = "-"
|
||||
} else {
|
||||
// TODO(pquerna): this doesn't escape agent if it contains bad things.
|
||||
agent = `"` + agent + `"`
|
||||
}
|
||||
return fmt.Sprintf(`%d %s "%s" %s`, w, agent, w.String(), date.Format(http.TimeFormat))
|
||||
}
|
||||
|
||||
func (w Warning) String() string {
|
||||
switch w {
|
||||
case WarningResponseIsStale:
|
||||
return "Response is Stale"
|
||||
case WarningRevalidationFailed:
|
||||
return "Revalidation Failed"
|
||||
case WarningDisconnectedOperation:
|
||||
return "Disconnected Operation"
|
||||
case WarningHeuristicExpiration:
|
||||
return "Heuristic Expiration"
|
||||
case WarningMiscellaneousWarning:
|
||||
// TODO(pquerna): ideally had a better way to override this one code.
|
||||
return "Miscellaneous Warning"
|
||||
case WarningTransformationApplied:
|
||||
return "Transformation Applied"
|
||||
case WarningMiscellaneousPersistentWarning:
|
||||
// TODO(pquerna): same as WarningMiscellaneousWarning
|
||||
return "Miscellaneous Persistent Warning"
|
||||
}
|
||||
|
||||
panic(w)
|
||||
}
|
25
vendor/github.com/pquerna/cachecontrol/doc.go
generated
vendored
Normal file
25
vendor/github.com/pquerna/cachecontrol/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
/**
|
||||
* Copyright 2015 Paul Querna
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
// Package cachecontrol implements the logic for HTTP Caching
|
||||
//
|
||||
// Deciding if an HTTP Response can be cached is often harder
|
||||
// and more bug prone than an actual cache storage backend.
|
||||
// cachecontrol provides a simple interface to determine if
|
||||
// request and response pairs are cachable as defined under
|
||||
// RFC 7234 http://tools.ietf.org/html/rfc7234
|
||||
package cachecontrol
|
21
vendor/github.com/pquerna/cachecontrol/examples/example-com.go
generated
vendored
Normal file
21
vendor/github.com/pquerna/cachecontrol/examples/example-com.go
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/pquerna/cachecontrol"
|
||||
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func main() {
|
||||
req, _ := http.NewRequest("GET", "http://www.example.com/", nil)
|
||||
|
||||
res, _ := http.DefaultClient.Do(req)
|
||||
_, _ = ioutil.ReadAll(res.Body)
|
||||
|
||||
reasons, expires, _ := cachecontrol.CachableResponse(req, res, cachecontrol.Options{})
|
||||
|
||||
fmt.Println("Reasons to not cache: ", reasons)
|
||||
fmt.Println("Expiration: ", expires.String())
|
||||
}
|
48
vendor/github.com/pquerna/cachecontrol/examples/lowlevel/ll-example-com.go
generated
vendored
Normal file
48
vendor/github.com/pquerna/cachecontrol/examples/lowlevel/ll-example-com.go
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/pquerna/cachecontrol/cacheobject"
|
||||
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
func main() {
|
||||
req, _ := http.NewRequest("GET", "http://www.example.com/", nil)
|
||||
|
||||
res, _ := http.DefaultClient.Do(req)
|
||||
_, _ = ioutil.ReadAll(res.Body)
|
||||
|
||||
reqDir, _ := cacheobject.ParseRequestCacheControl(req.Header.Get("Cache-Control"))
|
||||
|
||||
resDir, _ := cacheobject.ParseResponseCacheControl(res.Header.Get("Cache-Control"))
|
||||
expiresHeader, _ := http.ParseTime(res.Header.Get("Expires"))
|
||||
dateHeader, _ := http.ParseTime(res.Header.Get("Date"))
|
||||
lastModifiedHeader, _ := http.ParseTime(res.Header.Get("Last-Modified"))
|
||||
|
||||
obj := cacheobject.Object{
|
||||
RespDirectives: resDir,
|
||||
RespHeaders: res.Header,
|
||||
RespStatusCode: res.StatusCode,
|
||||
RespExpiresHeader: expiresHeader,
|
||||
RespDateHeader: dateHeader,
|
||||
RespLastModifiedHeader: lastModifiedHeader,
|
||||
|
||||
ReqDirectives: reqDir,
|
||||
ReqHeaders: req.Header,
|
||||
ReqMethod: req.Method,
|
||||
|
||||
NowUTC: time.Now().UTC(),
|
||||
}
|
||||
rv := cacheobject.ObjectResults{}
|
||||
|
||||
cacheobject.CachableObject(&obj, &rv)
|
||||
cacheobject.ExpirationObject(&obj, &rv)
|
||||
|
||||
fmt.Println("Errors: ", rv.OutErr)
|
||||
fmt.Println("Reasons to not cache: ", rv.OutReasons)
|
||||
fmt.Println("Warning headers to add: ", rv.OutWarnings)
|
||||
fmt.Println("Expiration: ", rv.OutExpirationTime.String())
|
||||
}
|
1
vendor/gopkg.in/square/go-jose.v2/.gitcookies.sh.enc
generated
vendored
Normal file
1
vendor/gopkg.in/square/go-jose.v2/.gitcookies.sh.enc
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
'|Ę&{tÄU|gGę(ěŹCy=+¨śňcű:u:/pś#~žü["±4¤!nŮAŞDK<Šuf˙hĹażÂ:şü¸ˇ´B/ŁŘ¤ą¤ň_<C588>hÎŰSăT*wĚxĽŻťą-ç|ťŕŔÓ<C594>ŃÄäóĚ㣗A$$â6ŁÁâG)8nĎpűĆˡ3ĚšśoďĎvŽB–3ż]xÝ“Ó2l§G•|qRŢŻ
ö2
5R–Ó×Ç$´ń˝YčˇŢÝ™l‘Ë«yAI"ŰŚ<C5B0>®íĂ»ąĽkÄ|Kĺţ[9ĆâŇĺ=°ú˙źń|@S•3ó#ćťx?ľV„,ľ‚SĆÝőśwPíogŇ6&V6 ©D.dBŠ7
|
7
vendor/gopkg.in/square/go-jose.v2/.gitignore
generated
vendored
Normal file
7
vendor/gopkg.in/square/go-jose.v2/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
*~
|
||||
.*.swp
|
||||
*.out
|
||||
*.test
|
||||
*.pem
|
||||
*.cov
|
||||
jose-util/jose-util
|
46
vendor/gopkg.in/square/go-jose.v2/.travis.yml
generated
vendored
Normal file
46
vendor/gopkg.in/square/go-jose.v2/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
language: go
|
||||
|
||||
sudo: false
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
allow_failures:
|
||||
- go: tip
|
||||
|
||||
go:
|
||||
- '1.7.x'
|
||||
- '1.8.x'
|
||||
- '1.9.x'
|
||||
- '1.10.x'
|
||||
- '1.11.x'
|
||||
|
||||
go_import_path: gopkg.in/square/go-jose.v2
|
||||
|
||||
before_script:
|
||||
- export PATH=$HOME/.local/bin:$PATH
|
||||
|
||||
before_install:
|
||||
# Install encrypted gitcookies to get around bandwidth-limits
|
||||
# that is causing Travis-CI builds to fail. For more info, see
|
||||
# https://github.com/golang/go/issues/12933
|
||||
- openssl aes-256-cbc -K $encrypted_1528c3c2cafd_key -iv $encrypted_1528c3c2cafd_iv -in .gitcookies.sh.enc -out .gitcookies.sh -d || true
|
||||
- bash .gitcookies.sh || true
|
||||
- go get github.com/wadey/gocovmerge
|
||||
- go get github.com/mattn/goveralls
|
||||
- go get github.com/stretchr/testify/assert
|
||||
- go get golang.org/x/tools/cmd/cover || true
|
||||
- go get code.google.com/p/go.tools/cmd/cover || true
|
||||
- pip install cram --user
|
||||
|
||||
script:
|
||||
- go test . -v -covermode=count -coverprofile=profile.cov
|
||||
- go test ./cipher -v -covermode=count -coverprofile=cipher/profile.cov
|
||||
- go test ./jwt -v -covermode=count -coverprofile=jwt/profile.cov
|
||||
- go test ./json -v # no coverage for forked encoding/json package
|
||||
- cd jose-util && go build && PATH=$PWD:$PATH cram -v jose-util.t
|
||||
- cd ..
|
||||
|
||||
after_success:
|
||||
- gocovmerge *.cov */*.cov > merged.coverprofile
|
||||
- $HOME/gopath/bin/goveralls -coverprofile merged.coverprofile -service=travis-ci
|
||||
|
10
vendor/gopkg.in/square/go-jose.v2/BUG-BOUNTY.md
generated
vendored
Normal file
10
vendor/gopkg.in/square/go-jose.v2/BUG-BOUNTY.md
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
Serious about security
|
||||
======================
|
||||
|
||||
Square recognizes the important contributions the security research community
|
||||
can make. We therefore encourage reporting security issues with the code
|
||||
contained in this repository.
|
||||
|
||||
If you believe you have discovered a security vulnerability, please follow the
|
||||
guidelines at <https://bugcrowd.com/squareopensource>.
|
||||
|
14
vendor/gopkg.in/square/go-jose.v2/CONTRIBUTING.md
generated
vendored
Normal file
14
vendor/gopkg.in/square/go-jose.v2/CONTRIBUTING.md
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Contributing
|
||||
|
||||
If you would like to contribute code to go-jose you can do so through GitHub by
|
||||
forking the repository and sending a pull request.
|
||||
|
||||
When submitting code, please make every effort to follow existing conventions
|
||||
and style in order to keep the code as readable as possible. Please also make
|
||||
sure all tests pass by running `go test`, and format your code with `go fmt`.
|
||||
We also recommend using `golint` and `errcheck`.
|
||||
|
||||
Before your code can be accepted into the project you must also sign the
|
||||
[Individual Contributor License Agreement][1].
|
||||
|
||||
[1]: https://spreadsheets.google.com/spreadsheet/viewform?formkey=dDViT2xzUHAwRkI3X3k5Z0lQM091OGc6MQ&ndplr=1
|
202
vendor/gopkg.in/square/go-jose.v2/LICENSE
generated
vendored
Normal file
202
vendor/gopkg.in/square/go-jose.v2/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
118
vendor/gopkg.in/square/go-jose.v2/README.md
generated
vendored
Normal file
118
vendor/gopkg.in/square/go-jose.v2/README.md
generated
vendored
Normal file
|
@ -0,0 +1,118 @@
|
|||
# Go JOSE
|
||||
|
||||
[![godoc](http://img.shields.io/badge/godoc-version_1-blue.svg?style=flat)](https://godoc.org/gopkg.in/square/go-jose.v1)
|
||||
[![godoc](http://img.shields.io/badge/godoc-version_2-blue.svg?style=flat)](https://godoc.org/gopkg.in/square/go-jose.v2)
|
||||
[![license](http://img.shields.io/badge/license-apache_2.0-blue.svg?style=flat)](https://raw.githubusercontent.com/square/go-jose/master/LICENSE)
|
||||
[![build](https://travis-ci.org/square/go-jose.svg?branch=v2)](https://travis-ci.org/square/go-jose)
|
||||
[![coverage](https://coveralls.io/repos/github/square/go-jose/badge.svg?branch=v2)](https://coveralls.io/r/square/go-jose)
|
||||
|
||||
Package jose aims to provide an implementation of the Javascript Object Signing
|
||||
and Encryption set of standards. This includes support for JSON Web Encryption,
|
||||
JSON Web Signature, and JSON Web Token standards.
|
||||
|
||||
**Disclaimer**: This library contains encryption software that is subject to
|
||||
the U.S. Export Administration Regulations. You may not export, re-export,
|
||||
transfer or download this code or any part of it in violation of any United
|
||||
States law, directive or regulation. In particular this software may not be
|
||||
exported or re-exported in any form or on any media to Iran, North Sudan,
|
||||
Syria, Cuba, or North Korea, or to denied persons or entities mentioned on any
|
||||
US maintained blocked list.
|
||||
|
||||
## Overview
|
||||
|
||||
The implementation follows the
|
||||
[JSON Web Encryption](http://dx.doi.org/10.17487/RFC7516) (RFC 7516),
|
||||
[JSON Web Signature](http://dx.doi.org/10.17487/RFC7515) (RFC 7515), and
|
||||
[JSON Web Token](http://dx.doi.org/10.17487/RFC7519) (RFC 7519).
|
||||
Tables of supported algorithms are shown below. The library supports both
|
||||
the compact and full serialization formats, and has optional support for
|
||||
multiple recipients. It also comes with a small command-line utility
|
||||
([`jose-util`](https://github.com/square/go-jose/tree/v2/jose-util))
|
||||
for dealing with JOSE messages in a shell.
|
||||
|
||||
**Note**: We use a forked version of the `encoding/json` package from the Go
|
||||
standard library which uses case-sensitive matching for member names (instead
|
||||
of [case-insensitive matching](https://www.ietf.org/mail-archive/web/json/current/msg03763.html)).
|
||||
This is to avoid differences in interpretation of messages between go-jose and
|
||||
libraries in other languages.
|
||||
|
||||
### Versions
|
||||
|
||||
We use [gopkg.in](https://gopkg.in) for versioning.
|
||||
|
||||
[Version 2](https://gopkg.in/square/go-jose.v2)
|
||||
([branch](https://github.com/square/go-jose/tree/v2),
|
||||
[doc](https://godoc.org/gopkg.in/square/go-jose.v2)) is the current version:
|
||||
|
||||
import "gopkg.in/square/go-jose.v2"
|
||||
|
||||
The old `v1` branch ([go-jose.v1](https://gopkg.in/square/go-jose.v1)) will
|
||||
still receive backported bug fixes and security fixes, but otherwise
|
||||
development is frozen. All new feature development takes place on the `v2`
|
||||
branch. Version 2 also contains additional sub-packages such as the
|
||||
[jwt](https://godoc.org/gopkg.in/square/go-jose.v2/jwt) implementation
|
||||
contributed by [@shaxbee](https://github.com/shaxbee).
|
||||
|
||||
### Supported algorithms
|
||||
|
||||
See below for a table of supported algorithms. Algorithm identifiers match
|
||||
the names in the [JSON Web Algorithms](http://dx.doi.org/10.17487/RFC7518)
|
||||
standard where possible. The Godoc reference has a list of constants.
|
||||
|
||||
Key encryption | Algorithm identifier(s)
|
||||
:------------------------- | :------------------------------
|
||||
RSA-PKCS#1v1.5 | RSA1_5
|
||||
RSA-OAEP | RSA-OAEP, RSA-OAEP-256
|
||||
AES key wrap | A128KW, A192KW, A256KW
|
||||
AES-GCM key wrap | A128GCMKW, A192GCMKW, A256GCMKW
|
||||
ECDH-ES + AES key wrap | ECDH-ES+A128KW, ECDH-ES+A192KW, ECDH-ES+A256KW
|
||||
ECDH-ES (direct) | ECDH-ES<sup>1</sup>
|
||||
Direct encryption | dir<sup>1</sup>
|
||||
|
||||
<sup>1. Not supported in multi-recipient mode</sup>
|
||||
|
||||
Signing / MAC | Algorithm identifier(s)
|
||||
:------------------------- | :------------------------------
|
||||
RSASSA-PKCS#1v1.5 | RS256, RS384, RS512
|
||||
RSASSA-PSS | PS256, PS384, PS512
|
||||
HMAC | HS256, HS384, HS512
|
||||
ECDSA | ES256, ES384, ES512
|
||||
Ed25519 | EdDSA<sup>2</sup>
|
||||
|
||||
<sup>2. Only available in version 2 of the package</sup>
|
||||
|
||||
Content encryption | Algorithm identifier(s)
|
||||
:------------------------- | :------------------------------
|
||||
AES-CBC+HMAC | A128CBC-HS256, A192CBC-HS384, A256CBC-HS512
|
||||
AES-GCM | A128GCM, A192GCM, A256GCM
|
||||
|
||||
Compression | Algorithm identifiers(s)
|
||||
:------------------------- | -------------------------------
|
||||
DEFLATE (RFC 1951) | DEF
|
||||
|
||||
### Supported key types
|
||||
|
||||
See below for a table of supported key types. These are understood by the
|
||||
library, and can be passed to corresponding functions such as `NewEncrypter` or
|
||||
`NewSigner`. Each of these keys can also be wrapped in a JWK if desired, which
|
||||
allows attaching a key id.
|
||||
|
||||
Algorithm(s) | Corresponding types
|
||||
:------------------------- | -------------------------------
|
||||
RSA | *[rsa.PublicKey](http://golang.org/pkg/crypto/rsa/#PublicKey), *[rsa.PrivateKey](http://golang.org/pkg/crypto/rsa/#PrivateKey)
|
||||
ECDH, ECDSA | *[ecdsa.PublicKey](http://golang.org/pkg/crypto/ecdsa/#PublicKey), *[ecdsa.PrivateKey](http://golang.org/pkg/crypto/ecdsa/#PrivateKey)
|
||||
EdDSA<sup>1</sup> | [ed25519.PublicKey](https://godoc.org/golang.org/x/crypto/ed25519#PublicKey), [ed25519.PrivateKey](https://godoc.org/golang.org/x/crypto/ed25519#PrivateKey)
|
||||
AES, HMAC | []byte
|
||||
|
||||
<sup>1. Only available in version 2 of the package</sup>
|
||||
|
||||
## Examples
|
||||
|
||||
[![godoc](http://img.shields.io/badge/godoc-version_1-blue.svg?style=flat)](https://godoc.org/gopkg.in/square/go-jose.v1)
|
||||
[![godoc](http://img.shields.io/badge/godoc-version_2-blue.svg?style=flat)](https://godoc.org/gopkg.in/square/go-jose.v2)
|
||||
|
||||
Examples can be found in the Godoc
|
||||
reference for this package. The
|
||||
[`jose-util`](https://github.com/square/go-jose/tree/v2/jose-util)
|
||||
subdirectory also contains a small command-line utility which might be useful
|
||||
as an example.
|
592
vendor/gopkg.in/square/go-jose.v2/asymmetric.go
generated
vendored
Normal file
592
vendor/gopkg.in/square/go-jose.v2/asymmetric.go
generated
vendored
Normal file
|
@ -0,0 +1,592 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"crypto/aes"
|
||||
"crypto/ecdsa"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/big"
|
||||
|
||||
"golang.org/x/crypto/ed25519"
|
||||
"gopkg.in/square/go-jose.v2/cipher"
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
// A generic RSA-based encrypter/verifier
|
||||
type rsaEncrypterVerifier struct {
|
||||
publicKey *rsa.PublicKey
|
||||
}
|
||||
|
||||
// A generic RSA-based decrypter/signer
|
||||
type rsaDecrypterSigner struct {
|
||||
privateKey *rsa.PrivateKey
|
||||
}
|
||||
|
||||
// A generic EC-based encrypter/verifier
|
||||
type ecEncrypterVerifier struct {
|
||||
publicKey *ecdsa.PublicKey
|
||||
}
|
||||
|
||||
type edEncrypterVerifier struct {
|
||||
publicKey ed25519.PublicKey
|
||||
}
|
||||
|
||||
// A key generator for ECDH-ES
|
||||
type ecKeyGenerator struct {
|
||||
size int
|
||||
algID string
|
||||
publicKey *ecdsa.PublicKey
|
||||
}
|
||||
|
||||
// A generic EC-based decrypter/signer
|
||||
type ecDecrypterSigner struct {
|
||||
privateKey *ecdsa.PrivateKey
|
||||
}
|
||||
|
||||
type edDecrypterSigner struct {
|
||||
privateKey ed25519.PrivateKey
|
||||
}
|
||||
|
||||
// newRSARecipient creates recipientKeyInfo based on the given key.
|
||||
func newRSARecipient(keyAlg KeyAlgorithm, publicKey *rsa.PublicKey) (recipientKeyInfo, error) {
|
||||
// Verify that key management algorithm is supported by this encrypter
|
||||
switch keyAlg {
|
||||
case RSA1_5, RSA_OAEP, RSA_OAEP_256:
|
||||
default:
|
||||
return recipientKeyInfo{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
if publicKey == nil {
|
||||
return recipientKeyInfo{}, errors.New("invalid public key")
|
||||
}
|
||||
|
||||
return recipientKeyInfo{
|
||||
keyAlg: keyAlg,
|
||||
keyEncrypter: &rsaEncrypterVerifier{
|
||||
publicKey: publicKey,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// newRSASigner creates a recipientSigInfo based on the given key.
|
||||
func newRSASigner(sigAlg SignatureAlgorithm, privateKey *rsa.PrivateKey) (recipientSigInfo, error) {
|
||||
// Verify that key management algorithm is supported by this encrypter
|
||||
switch sigAlg {
|
||||
case RS256, RS384, RS512, PS256, PS384, PS512:
|
||||
default:
|
||||
return recipientSigInfo{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
if privateKey == nil {
|
||||
return recipientSigInfo{}, errors.New("invalid private key")
|
||||
}
|
||||
|
||||
return recipientSigInfo{
|
||||
sigAlg: sigAlg,
|
||||
publicKey: staticPublicKey(&JSONWebKey{
|
||||
Key: privateKey.Public(),
|
||||
}),
|
||||
signer: &rsaDecrypterSigner{
|
||||
privateKey: privateKey,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func newEd25519Signer(sigAlg SignatureAlgorithm, privateKey ed25519.PrivateKey) (recipientSigInfo, error) {
|
||||
if sigAlg != EdDSA {
|
||||
return recipientSigInfo{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
if privateKey == nil {
|
||||
return recipientSigInfo{}, errors.New("invalid private key")
|
||||
}
|
||||
return recipientSigInfo{
|
||||
sigAlg: sigAlg,
|
||||
publicKey: staticPublicKey(&JSONWebKey{
|
||||
Key: privateKey.Public(),
|
||||
}),
|
||||
signer: &edDecrypterSigner{
|
||||
privateKey: privateKey,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// newECDHRecipient creates recipientKeyInfo based on the given key.
|
||||
func newECDHRecipient(keyAlg KeyAlgorithm, publicKey *ecdsa.PublicKey) (recipientKeyInfo, error) {
|
||||
// Verify that key management algorithm is supported by this encrypter
|
||||
switch keyAlg {
|
||||
case ECDH_ES, ECDH_ES_A128KW, ECDH_ES_A192KW, ECDH_ES_A256KW:
|
||||
default:
|
||||
return recipientKeyInfo{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
if publicKey == nil || !publicKey.Curve.IsOnCurve(publicKey.X, publicKey.Y) {
|
||||
return recipientKeyInfo{}, errors.New("invalid public key")
|
||||
}
|
||||
|
||||
return recipientKeyInfo{
|
||||
keyAlg: keyAlg,
|
||||
keyEncrypter: &ecEncrypterVerifier{
|
||||
publicKey: publicKey,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// newECDSASigner creates a recipientSigInfo based on the given key.
|
||||
func newECDSASigner(sigAlg SignatureAlgorithm, privateKey *ecdsa.PrivateKey) (recipientSigInfo, error) {
|
||||
// Verify that key management algorithm is supported by this encrypter
|
||||
switch sigAlg {
|
||||
case ES256, ES384, ES512:
|
||||
default:
|
||||
return recipientSigInfo{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
if privateKey == nil {
|
||||
return recipientSigInfo{}, errors.New("invalid private key")
|
||||
}
|
||||
|
||||
return recipientSigInfo{
|
||||
sigAlg: sigAlg,
|
||||
publicKey: staticPublicKey(&JSONWebKey{
|
||||
Key: privateKey.Public(),
|
||||
}),
|
||||
signer: &ecDecrypterSigner{
|
||||
privateKey: privateKey,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Encrypt the given payload and update the object.
|
||||
func (ctx rsaEncrypterVerifier) encryptKey(cek []byte, alg KeyAlgorithm) (recipientInfo, error) {
|
||||
encryptedKey, err := ctx.encrypt(cek, alg)
|
||||
if err != nil {
|
||||
return recipientInfo{}, err
|
||||
}
|
||||
|
||||
return recipientInfo{
|
||||
encryptedKey: encryptedKey,
|
||||
header: &rawHeader{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Encrypt the given payload. Based on the key encryption algorithm,
|
||||
// this will either use RSA-PKCS1v1.5 or RSA-OAEP (with SHA-1 or SHA-256).
|
||||
func (ctx rsaEncrypterVerifier) encrypt(cek []byte, alg KeyAlgorithm) ([]byte, error) {
|
||||
switch alg {
|
||||
case RSA1_5:
|
||||
return rsa.EncryptPKCS1v15(RandReader, ctx.publicKey, cek)
|
||||
case RSA_OAEP:
|
||||
return rsa.EncryptOAEP(sha1.New(), RandReader, ctx.publicKey, cek, []byte{})
|
||||
case RSA_OAEP_256:
|
||||
return rsa.EncryptOAEP(sha256.New(), RandReader, ctx.publicKey, cek, []byte{})
|
||||
}
|
||||
|
||||
return nil, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
// Decrypt the given payload and return the content encryption key.
|
||||
func (ctx rsaDecrypterSigner) decryptKey(headers rawHeader, recipient *recipientInfo, generator keyGenerator) ([]byte, error) {
|
||||
return ctx.decrypt(recipient.encryptedKey, headers.getAlgorithm(), generator)
|
||||
}
|
||||
|
||||
// Decrypt the given payload. Based on the key encryption algorithm,
|
||||
// this will either use RSA-PKCS1v1.5 or RSA-OAEP (with SHA-1 or SHA-256).
|
||||
func (ctx rsaDecrypterSigner) decrypt(jek []byte, alg KeyAlgorithm, generator keyGenerator) ([]byte, error) {
|
||||
// Note: The random reader on decrypt operations is only used for blinding,
|
||||
// so stubbing is meanlingless (hence the direct use of rand.Reader).
|
||||
switch alg {
|
||||
case RSA1_5:
|
||||
defer func() {
|
||||
// DecryptPKCS1v15SessionKey sometimes panics on an invalid payload
|
||||
// because of an index out of bounds error, which we want to ignore.
|
||||
// This has been fixed in Go 1.3.1 (released 2014/08/13), the recover()
|
||||
// only exists for preventing crashes with unpatched versions.
|
||||
// See: https://groups.google.com/forum/#!topic/golang-dev/7ihX6Y6kx9k
|
||||
// See: https://code.google.com/p/go/source/detail?r=58ee390ff31602edb66af41ed10901ec95904d33
|
||||
_ = recover()
|
||||
}()
|
||||
|
||||
// Perform some input validation.
|
||||
keyBytes := ctx.privateKey.PublicKey.N.BitLen() / 8
|
||||
if keyBytes != len(jek) {
|
||||
// Input size is incorrect, the encrypted payload should always match
|
||||
// the size of the public modulus (e.g. using a 2048 bit key will
|
||||
// produce 256 bytes of output). Reject this since it's invalid input.
|
||||
return nil, ErrCryptoFailure
|
||||
}
|
||||
|
||||
cek, _, err := generator.genKey()
|
||||
if err != nil {
|
||||
return nil, ErrCryptoFailure
|
||||
}
|
||||
|
||||
// When decrypting an RSA-PKCS1v1.5 payload, we must take precautions to
|
||||
// prevent chosen-ciphertext attacks as described in RFC 3218, "Preventing
|
||||
// the Million Message Attack on Cryptographic Message Syntax". We are
|
||||
// therefore deliberately ignoring errors here.
|
||||
_ = rsa.DecryptPKCS1v15SessionKey(rand.Reader, ctx.privateKey, jek, cek)
|
||||
|
||||
return cek, nil
|
||||
case RSA_OAEP:
|
||||
// Use rand.Reader for RSA blinding
|
||||
return rsa.DecryptOAEP(sha1.New(), rand.Reader, ctx.privateKey, jek, []byte{})
|
||||
case RSA_OAEP_256:
|
||||
// Use rand.Reader for RSA blinding
|
||||
return rsa.DecryptOAEP(sha256.New(), rand.Reader, ctx.privateKey, jek, []byte{})
|
||||
}
|
||||
|
||||
return nil, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
// Sign the given payload
|
||||
func (ctx rsaDecrypterSigner) signPayload(payload []byte, alg SignatureAlgorithm) (Signature, error) {
|
||||
var hash crypto.Hash
|
||||
|
||||
switch alg {
|
||||
case RS256, PS256:
|
||||
hash = crypto.SHA256
|
||||
case RS384, PS384:
|
||||
hash = crypto.SHA384
|
||||
case RS512, PS512:
|
||||
hash = crypto.SHA512
|
||||
default:
|
||||
return Signature{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
hasher := hash.New()
|
||||
|
||||
// According to documentation, Write() on hash never fails
|
||||
_, _ = hasher.Write(payload)
|
||||
hashed := hasher.Sum(nil)
|
||||
|
||||
var out []byte
|
||||
var err error
|
||||
|
||||
switch alg {
|
||||
case RS256, RS384, RS512:
|
||||
out, err = rsa.SignPKCS1v15(RandReader, ctx.privateKey, hash, hashed)
|
||||
case PS256, PS384, PS512:
|
||||
out, err = rsa.SignPSS(RandReader, ctx.privateKey, hash, hashed, &rsa.PSSOptions{
|
||||
SaltLength: rsa.PSSSaltLengthAuto,
|
||||
})
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return Signature{}, err
|
||||
}
|
||||
|
||||
return Signature{
|
||||
Signature: out,
|
||||
protected: &rawHeader{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Verify the given payload
|
||||
func (ctx rsaEncrypterVerifier) verifyPayload(payload []byte, signature []byte, alg SignatureAlgorithm) error {
|
||||
var hash crypto.Hash
|
||||
|
||||
switch alg {
|
||||
case RS256, PS256:
|
||||
hash = crypto.SHA256
|
||||
case RS384, PS384:
|
||||
hash = crypto.SHA384
|
||||
case RS512, PS512:
|
||||
hash = crypto.SHA512
|
||||
default:
|
||||
return ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
hasher := hash.New()
|
||||
|
||||
// According to documentation, Write() on hash never fails
|
||||
_, _ = hasher.Write(payload)
|
||||
hashed := hasher.Sum(nil)
|
||||
|
||||
switch alg {
|
||||
case RS256, RS384, RS512:
|
||||
return rsa.VerifyPKCS1v15(ctx.publicKey, hash, hashed, signature)
|
||||
case PS256, PS384, PS512:
|
||||
return rsa.VerifyPSS(ctx.publicKey, hash, hashed, signature, nil)
|
||||
}
|
||||
|
||||
return ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
// Encrypt the given payload and update the object.
|
||||
func (ctx ecEncrypterVerifier) encryptKey(cek []byte, alg KeyAlgorithm) (recipientInfo, error) {
|
||||
switch alg {
|
||||
case ECDH_ES:
|
||||
// ECDH-ES mode doesn't wrap a key, the shared secret is used directly as the key.
|
||||
return recipientInfo{
|
||||
header: &rawHeader{},
|
||||
}, nil
|
||||
case ECDH_ES_A128KW, ECDH_ES_A192KW, ECDH_ES_A256KW:
|
||||
default:
|
||||
return recipientInfo{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
generator := ecKeyGenerator{
|
||||
algID: string(alg),
|
||||
publicKey: ctx.publicKey,
|
||||
}
|
||||
|
||||
switch alg {
|
||||
case ECDH_ES_A128KW:
|
||||
generator.size = 16
|
||||
case ECDH_ES_A192KW:
|
||||
generator.size = 24
|
||||
case ECDH_ES_A256KW:
|
||||
generator.size = 32
|
||||
}
|
||||
|
||||
kek, header, err := generator.genKey()
|
||||
if err != nil {
|
||||
return recipientInfo{}, err
|
||||
}
|
||||
|
||||
block, err := aes.NewCipher(kek)
|
||||
if err != nil {
|
||||
return recipientInfo{}, err
|
||||
}
|
||||
|
||||
jek, err := josecipher.KeyWrap(block, cek)
|
||||
if err != nil {
|
||||
return recipientInfo{}, err
|
||||
}
|
||||
|
||||
return recipientInfo{
|
||||
encryptedKey: jek,
|
||||
header: &header,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Get key size for EC key generator
|
||||
func (ctx ecKeyGenerator) keySize() int {
|
||||
return ctx.size
|
||||
}
|
||||
|
||||
// Get a content encryption key for ECDH-ES
|
||||
func (ctx ecKeyGenerator) genKey() ([]byte, rawHeader, error) {
|
||||
priv, err := ecdsa.GenerateKey(ctx.publicKey.Curve, RandReader)
|
||||
if err != nil {
|
||||
return nil, rawHeader{}, err
|
||||
}
|
||||
|
||||
out := josecipher.DeriveECDHES(ctx.algID, []byte{}, []byte{}, priv, ctx.publicKey, ctx.size)
|
||||
|
||||
b, err := json.Marshal(&JSONWebKey{
|
||||
Key: &priv.PublicKey,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
headers := rawHeader{
|
||||
headerEPK: makeRawMessage(b),
|
||||
}
|
||||
|
||||
return out, headers, nil
|
||||
}
|
||||
|
||||
// Decrypt the given payload and return the content encryption key.
|
||||
func (ctx ecDecrypterSigner) decryptKey(headers rawHeader, recipient *recipientInfo, generator keyGenerator) ([]byte, error) {
|
||||
epk, err := headers.getEPK()
|
||||
if err != nil {
|
||||
return nil, errors.New("square/go-jose: invalid epk header")
|
||||
}
|
||||
if epk == nil {
|
||||
return nil, errors.New("square/go-jose: missing epk header")
|
||||
}
|
||||
|
||||
publicKey, ok := epk.Key.(*ecdsa.PublicKey)
|
||||
if publicKey == nil || !ok {
|
||||
return nil, errors.New("square/go-jose: invalid epk header")
|
||||
}
|
||||
|
||||
if !ctx.privateKey.Curve.IsOnCurve(publicKey.X, publicKey.Y) {
|
||||
return nil, errors.New("square/go-jose: invalid public key in epk header")
|
||||
}
|
||||
|
||||
apuData, err := headers.getAPU()
|
||||
if err != nil {
|
||||
return nil, errors.New("square/go-jose: invalid apu header")
|
||||
}
|
||||
apvData, err := headers.getAPV()
|
||||
if err != nil {
|
||||
return nil, errors.New("square/go-jose: invalid apv header")
|
||||
}
|
||||
|
||||
deriveKey := func(algID string, size int) []byte {
|
||||
return josecipher.DeriveECDHES(algID, apuData.bytes(), apvData.bytes(), ctx.privateKey, publicKey, size)
|
||||
}
|
||||
|
||||
var keySize int
|
||||
|
||||
algorithm := headers.getAlgorithm()
|
||||
switch algorithm {
|
||||
case ECDH_ES:
|
||||
// ECDH-ES uses direct key agreement, no key unwrapping necessary.
|
||||
return deriveKey(string(headers.getEncryption()), generator.keySize()), nil
|
||||
case ECDH_ES_A128KW:
|
||||
keySize = 16
|
||||
case ECDH_ES_A192KW:
|
||||
keySize = 24
|
||||
case ECDH_ES_A256KW:
|
||||
keySize = 32
|
||||
default:
|
||||
return nil, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
key := deriveKey(string(algorithm), keySize)
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return josecipher.KeyUnwrap(block, recipient.encryptedKey)
|
||||
}
|
||||
|
||||
func (ctx edDecrypterSigner) signPayload(payload []byte, alg SignatureAlgorithm) (Signature, error) {
|
||||
if alg != EdDSA {
|
||||
return Signature{}, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
sig, err := ctx.privateKey.Sign(RandReader, payload, crypto.Hash(0))
|
||||
if err != nil {
|
||||
return Signature{}, err
|
||||
}
|
||||
|
||||
return Signature{
|
||||
Signature: sig,
|
||||
protected: &rawHeader{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (ctx edEncrypterVerifier) verifyPayload(payload []byte, signature []byte, alg SignatureAlgorithm) error {
|
||||
if alg != EdDSA {
|
||||
return ErrUnsupportedAlgorithm
|
||||
}
|
||||
ok := ed25519.Verify(ctx.publicKey, payload, signature)
|
||||
if !ok {
|
||||
return errors.New("square/go-jose: ed25519 signature failed to verify")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Sign the given payload
|
||||
func (ctx ecDecrypterSigner) signPayload(payload []byte, alg SignatureAlgorithm) (Signature, error) {
|
||||
var expectedBitSize int
|
||||
var hash crypto.Hash
|
||||
|
||||
switch alg {
|
||||
case ES256:
|
||||
expectedBitSize = 256
|
||||
hash = crypto.SHA256
|
||||
case ES384:
|
||||
expectedBitSize = 384
|
||||
hash = crypto.SHA384
|
||||
case ES512:
|
||||
expectedBitSize = 521
|
||||
hash = crypto.SHA512
|
||||
}
|
||||
|
||||
curveBits := ctx.privateKey.Curve.Params().BitSize
|
||||
if expectedBitSize != curveBits {
|
||||
return Signature{}, fmt.Errorf("square/go-jose: expected %d bit key, got %d bits instead", expectedBitSize, curveBits)
|
||||
}
|
||||
|
||||
hasher := hash.New()
|
||||
|
||||
// According to documentation, Write() on hash never fails
|
||||
_, _ = hasher.Write(payload)
|
||||
hashed := hasher.Sum(nil)
|
||||
|
||||
r, s, err := ecdsa.Sign(RandReader, ctx.privateKey, hashed)
|
||||
if err != nil {
|
||||
return Signature{}, err
|
||||
}
|
||||
|
||||
keyBytes := curveBits / 8
|
||||
if curveBits%8 > 0 {
|
||||
keyBytes++
|
||||
}
|
||||
|
||||
// We serialize the outputs (r and s) into big-endian byte arrays and pad
|
||||
// them with zeros on the left to make sure the sizes work out. Both arrays
|
||||
// must be keyBytes long, and the output must be 2*keyBytes long.
|
||||
rBytes := r.Bytes()
|
||||
rBytesPadded := make([]byte, keyBytes)
|
||||
copy(rBytesPadded[keyBytes-len(rBytes):], rBytes)
|
||||
|
||||
sBytes := s.Bytes()
|
||||
sBytesPadded := make([]byte, keyBytes)
|
||||
copy(sBytesPadded[keyBytes-len(sBytes):], sBytes)
|
||||
|
||||
out := append(rBytesPadded, sBytesPadded...)
|
||||
|
||||
return Signature{
|
||||
Signature: out,
|
||||
protected: &rawHeader{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Verify the given payload
|
||||
func (ctx ecEncrypterVerifier) verifyPayload(payload []byte, signature []byte, alg SignatureAlgorithm) error {
|
||||
var keySize int
|
||||
var hash crypto.Hash
|
||||
|
||||
switch alg {
|
||||
case ES256:
|
||||
keySize = 32
|
||||
hash = crypto.SHA256
|
||||
case ES384:
|
||||
keySize = 48
|
||||
hash = crypto.SHA384
|
||||
case ES512:
|
||||
keySize = 66
|
||||
hash = crypto.SHA512
|
||||
default:
|
||||
return ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
if len(signature) != 2*keySize {
|
||||
return fmt.Errorf("square/go-jose: invalid signature size, have %d bytes, wanted %d", len(signature), 2*keySize)
|
||||
}
|
||||
|
||||
hasher := hash.New()
|
||||
|
||||
// According to documentation, Write() on hash never fails
|
||||
_, _ = hasher.Write(payload)
|
||||
hashed := hasher.Sum(nil)
|
||||
|
||||
r := big.NewInt(0).SetBytes(signature[:keySize])
|
||||
s := big.NewInt(0).SetBytes(signature[keySize:])
|
||||
|
||||
match := ecdsa.Verify(ctx.publicKey, hashed, r, s)
|
||||
if !match {
|
||||
return errors.New("square/go-jose: ecdsa signature failed to verify")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
388
vendor/gopkg.in/square/go-jose.v2/asymmetric_test.go
generated
vendored
Normal file
388
vendor/gopkg.in/square/go-jose.v2/asymmetric_test.go
generated
vendored
Normal file
|
@ -0,0 +1,388 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"errors"
|
||||
"io"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEd25519(t *testing.T) {
|
||||
_, err := newEd25519Signer("XYZ", nil)
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
enc := new(edEncrypterVerifier)
|
||||
enc.publicKey = ed25519PublicKey
|
||||
err = enc.verifyPayload([]byte{}, []byte{}, "XYZ")
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
dec := new(edDecrypterSigner)
|
||||
dec.privateKey = ed25519PrivateKey
|
||||
_, err = dec.signPayload([]byte{}, "XYZ")
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
sig, err := dec.signPayload([]byte("This is a test"), "EdDSA")
|
||||
if err != nil {
|
||||
t.Error("should not error trying to sign payload")
|
||||
}
|
||||
if sig.Signature == nil {
|
||||
t.Error("Check the signature")
|
||||
}
|
||||
err = enc.verifyPayload([]byte("This is a test"), sig.Signature, "EdDSA")
|
||||
if err != nil {
|
||||
t.Error("should not error trying to verify payload")
|
||||
}
|
||||
|
||||
err = enc.verifyPayload([]byte("This is test number 2"), sig.Signature, "EdDSA")
|
||||
if err == nil {
|
||||
t.Error("should not error trying to verify payload")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidAlgorithmsRSA(t *testing.T) {
|
||||
_, err := newRSARecipient("XYZ", nil)
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
_, err = newRSASigner("XYZ", nil)
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
enc := new(rsaEncrypterVerifier)
|
||||
enc.publicKey = &rsaTestKey.PublicKey
|
||||
_, err = enc.encryptKey([]byte{}, "XYZ")
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
err = enc.verifyPayload([]byte{}, []byte{}, "XYZ")
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
dec := new(rsaDecrypterSigner)
|
||||
dec.privateKey = rsaTestKey
|
||||
_, err = dec.decrypt(make([]byte, 256), "XYZ", randomKeyGenerator{size: 16})
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
_, err = dec.signPayload([]byte{}, "XYZ")
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
}
|
||||
|
||||
type failingKeyGenerator struct{}
|
||||
|
||||
func (ctx failingKeyGenerator) keySize() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (ctx failingKeyGenerator) genKey() ([]byte, rawHeader, error) {
|
||||
return nil, rawHeader{}, errors.New("failed to generate key")
|
||||
}
|
||||
|
||||
func TestPKCSKeyGeneratorFailure(t *testing.T) {
|
||||
dec := new(rsaDecrypterSigner)
|
||||
dec.privateKey = rsaTestKey
|
||||
generator := failingKeyGenerator{}
|
||||
_, err := dec.decrypt(make([]byte, 256), RSA1_5, generator)
|
||||
if err != ErrCryptoFailure {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidAlgorithmsEC(t *testing.T) {
|
||||
_, err := newECDHRecipient("XYZ", nil)
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
_, err = newECDSASigner("XYZ", nil)
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
|
||||
enc := new(ecEncrypterVerifier)
|
||||
enc.publicKey = &ecTestKey256.PublicKey
|
||||
_, err = enc.encryptKey([]byte{}, "XYZ")
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Error("should return error on invalid algorithm")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidECKeyGen(t *testing.T) {
|
||||
gen := ecKeyGenerator{
|
||||
size: 16,
|
||||
algID: "A128GCM",
|
||||
publicKey: &ecTestKey256.PublicKey,
|
||||
}
|
||||
|
||||
if gen.keySize() != 16 {
|
||||
t.Error("ec key generator reported incorrect key size")
|
||||
}
|
||||
|
||||
_, _, err := gen.genKey()
|
||||
if err != nil {
|
||||
t.Error("ec key generator failed to generate key", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidECDecrypt(t *testing.T) {
|
||||
dec := ecDecrypterSigner{
|
||||
privateKey: ecTestKey256,
|
||||
}
|
||||
|
||||
generator := randomKeyGenerator{size: 16}
|
||||
|
||||
// Missing epk header
|
||||
headers := rawHeader{}
|
||||
headers.set(headerAlgorithm, ECDH_ES)
|
||||
|
||||
_, err := dec.decryptKey(headers, nil, generator)
|
||||
if err == nil {
|
||||
t.Error("ec decrypter accepted object with missing epk header")
|
||||
}
|
||||
|
||||
// Invalid epk header
|
||||
headers.set(headerEPK, &JSONWebKey{})
|
||||
|
||||
_, err = dec.decryptKey(headers, nil, generator)
|
||||
if err == nil {
|
||||
t.Error("ec decrypter accepted object with invalid epk header")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecryptWithIncorrectSize(t *testing.T) {
|
||||
priv, err := rsa.GenerateKey(rand.Reader, 2048)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
dec := new(rsaDecrypterSigner)
|
||||
dec.privateKey = priv
|
||||
aes := newAESGCM(16)
|
||||
|
||||
keygen := randomKeyGenerator{
|
||||
size: aes.keySize(),
|
||||
}
|
||||
|
||||
payload := make([]byte, 254)
|
||||
_, err = dec.decrypt(payload, RSA1_5, keygen)
|
||||
if err == nil {
|
||||
t.Error("Invalid payload size should return error")
|
||||
}
|
||||
|
||||
payload = make([]byte, 257)
|
||||
_, err = dec.decrypt(payload, RSA1_5, keygen)
|
||||
if err == nil {
|
||||
t.Error("Invalid payload size should return error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPKCSDecryptNeverFails(t *testing.T) {
|
||||
// We don't want RSA-PKCS1 v1.5 decryption to ever fail, in order to prevent
|
||||
// side-channel timing attacks (Bleichenbacher attack in particular).
|
||||
priv, err := rsa.GenerateKey(rand.Reader, 2048)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
dec := new(rsaDecrypterSigner)
|
||||
dec.privateKey = priv
|
||||
aes := newAESGCM(16)
|
||||
|
||||
keygen := randomKeyGenerator{
|
||||
size: aes.keySize(),
|
||||
}
|
||||
|
||||
for i := 1; i < 50; i++ {
|
||||
payload := make([]byte, 256)
|
||||
_, err := io.ReadFull(rand.Reader, payload)
|
||||
if err != nil {
|
||||
t.Error("Unable to get random data:", err)
|
||||
return
|
||||
}
|
||||
_, err = dec.decrypt(payload, RSA1_5, keygen)
|
||||
if err != nil {
|
||||
t.Error("PKCS1v1.5 decrypt should never fail:", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPKCSDecryptWithValidPayloads(b *testing.B) {
|
||||
priv, err := rsa.GenerateKey(rand.Reader, 2048)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
enc := new(rsaEncrypterVerifier)
|
||||
enc.publicKey = &priv.PublicKey
|
||||
dec := new(rsaDecrypterSigner)
|
||||
dec.privateKey = priv
|
||||
aes := newAESGCM(32)
|
||||
|
||||
b.StopTimer()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
plaintext := make([]byte, 32)
|
||||
_, err = io.ReadFull(rand.Reader, plaintext)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ciphertext, err := enc.encrypt(plaintext, RSA1_5)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
keygen := randomKeyGenerator{
|
||||
size: aes.keySize(),
|
||||
}
|
||||
|
||||
b.StartTimer()
|
||||
_, err = dec.decrypt(ciphertext, RSA1_5, keygen)
|
||||
b.StopTimer()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPKCSDecryptWithInvalidPayloads(b *testing.B) {
|
||||
priv, err := rsa.GenerateKey(rand.Reader, 2048)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
enc := new(rsaEncrypterVerifier)
|
||||
enc.publicKey = &priv.PublicKey
|
||||
dec := new(rsaDecrypterSigner)
|
||||
dec.privateKey = priv
|
||||
aes := newAESGCM(16)
|
||||
|
||||
keygen := randomKeyGenerator{
|
||||
size: aes.keySize(),
|
||||
}
|
||||
|
||||
b.StopTimer()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
plaintext := make([]byte, 16)
|
||||
_, err = io.ReadFull(rand.Reader, plaintext)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ciphertext, err := enc.encrypt(plaintext, RSA1_5)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Do some simple scrambling
|
||||
ciphertext[128] ^= 0xFF
|
||||
|
||||
b.StartTimer()
|
||||
_, err = dec.decrypt(ciphertext, RSA1_5, keygen)
|
||||
b.StopTimer()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidEllipticCurve(t *testing.T) {
|
||||
signer256 := ecDecrypterSigner{privateKey: ecTestKey256}
|
||||
signer384 := ecDecrypterSigner{privateKey: ecTestKey384}
|
||||
signer521 := ecDecrypterSigner{privateKey: ecTestKey521}
|
||||
|
||||
_, err := signer256.signPayload([]byte{}, ES384)
|
||||
if err == nil {
|
||||
t.Error("should not generate ES384 signature with P-256 key")
|
||||
}
|
||||
_, err = signer256.signPayload([]byte{}, ES512)
|
||||
if err == nil {
|
||||
t.Error("should not generate ES512 signature with P-256 key")
|
||||
}
|
||||
_, err = signer384.signPayload([]byte{}, ES256)
|
||||
if err == nil {
|
||||
t.Error("should not generate ES256 signature with P-384 key")
|
||||
}
|
||||
_, err = signer384.signPayload([]byte{}, ES512)
|
||||
if err == nil {
|
||||
t.Error("should not generate ES512 signature with P-384 key")
|
||||
}
|
||||
_, err = signer521.signPayload([]byte{}, ES256)
|
||||
if err == nil {
|
||||
t.Error("should not generate ES256 signature with P-521 key")
|
||||
}
|
||||
_, err = signer521.signPayload([]byte{}, ES384)
|
||||
if err == nil {
|
||||
t.Error("should not generate ES384 signature with P-521 key")
|
||||
}
|
||||
}
|
||||
|
||||
func estInvalidECPublicKey(t *testing.T) {
|
||||
// Invalid key
|
||||
invalid := &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: elliptic.P256(),
|
||||
X: fromBase64Int("MTEx"),
|
||||
Y: fromBase64Int("MTEx"),
|
||||
},
|
||||
D: fromBase64Int("0_NxaRPUMQoAJt50Gz8YiTr8gRTwyEaCumd-MToTmIo"),
|
||||
}
|
||||
|
||||
headers := rawHeader{}
|
||||
headers.set(headerAlgorithm, ECDH_ES)
|
||||
headers.set(headerEPK, &JSONWebKey{
|
||||
Key: &invalid.PublicKey,
|
||||
})
|
||||
|
||||
dec := ecDecrypterSigner{
|
||||
privateKey: ecTestKey256,
|
||||
}
|
||||
|
||||
_, err := dec.decryptKey(headers, nil, randomKeyGenerator{size: 16})
|
||||
if err == nil {
|
||||
t.Fatal("decrypter accepted JWS with invalid ECDH public key")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidAlgorithmEC(t *testing.T) {
|
||||
err := ecEncrypterVerifier{publicKey: &ecTestKey256.PublicKey}.verifyPayload([]byte{}, []byte{}, "XYZ")
|
||||
if err != ErrUnsupportedAlgorithm {
|
||||
t.Fatal("should not accept invalid/unsupported algorithm")
|
||||
}
|
||||
}
|
196
vendor/gopkg.in/square/go-jose.v2/cipher/cbc_hmac.go
generated
vendored
Normal file
196
vendor/gopkg.in/square/go-jose.v2/cipher/cbc_hmac.go
generated
vendored
Normal file
|
@ -0,0 +1,196 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/cipher"
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"crypto/subtle"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"hash"
|
||||
)
|
||||
|
||||
const (
|
||||
nonceBytes = 16
|
||||
)
|
||||
|
||||
// NewCBCHMAC instantiates a new AEAD based on CBC+HMAC.
|
||||
func NewCBCHMAC(key []byte, newBlockCipher func([]byte) (cipher.Block, error)) (cipher.AEAD, error) {
|
||||
keySize := len(key) / 2
|
||||
integrityKey := key[:keySize]
|
||||
encryptionKey := key[keySize:]
|
||||
|
||||
blockCipher, err := newBlockCipher(encryptionKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var hash func() hash.Hash
|
||||
switch keySize {
|
||||
case 16:
|
||||
hash = sha256.New
|
||||
case 24:
|
||||
hash = sha512.New384
|
||||
case 32:
|
||||
hash = sha512.New
|
||||
}
|
||||
|
||||
return &cbcAEAD{
|
||||
hash: hash,
|
||||
blockCipher: blockCipher,
|
||||
authtagBytes: keySize,
|
||||
integrityKey: integrityKey,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// An AEAD based on CBC+HMAC
|
||||
type cbcAEAD struct {
|
||||
hash func() hash.Hash
|
||||
authtagBytes int
|
||||
integrityKey []byte
|
||||
blockCipher cipher.Block
|
||||
}
|
||||
|
||||
func (ctx *cbcAEAD) NonceSize() int {
|
||||
return nonceBytes
|
||||
}
|
||||
|
||||
func (ctx *cbcAEAD) Overhead() int {
|
||||
// Maximum overhead is block size (for padding) plus auth tag length, where
|
||||
// the length of the auth tag is equivalent to the key size.
|
||||
return ctx.blockCipher.BlockSize() + ctx.authtagBytes
|
||||
}
|
||||
|
||||
// Seal encrypts and authenticates the plaintext.
|
||||
func (ctx *cbcAEAD) Seal(dst, nonce, plaintext, data []byte) []byte {
|
||||
// Output buffer -- must take care not to mangle plaintext input.
|
||||
ciphertext := make([]byte, uint64(len(plaintext))+uint64(ctx.Overhead()))[:len(plaintext)]
|
||||
copy(ciphertext, plaintext)
|
||||
ciphertext = padBuffer(ciphertext, ctx.blockCipher.BlockSize())
|
||||
|
||||
cbc := cipher.NewCBCEncrypter(ctx.blockCipher, nonce)
|
||||
|
||||
cbc.CryptBlocks(ciphertext, ciphertext)
|
||||
authtag := ctx.computeAuthTag(data, nonce, ciphertext)
|
||||
|
||||
ret, out := resize(dst, uint64(len(dst))+uint64(len(ciphertext))+uint64(len(authtag)))
|
||||
copy(out, ciphertext)
|
||||
copy(out[len(ciphertext):], authtag)
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// Open decrypts and authenticates the ciphertext.
|
||||
func (ctx *cbcAEAD) Open(dst, nonce, ciphertext, data []byte) ([]byte, error) {
|
||||
if len(ciphertext) < ctx.authtagBytes {
|
||||
return nil, errors.New("square/go-jose: invalid ciphertext (too short)")
|
||||
}
|
||||
|
||||
offset := len(ciphertext) - ctx.authtagBytes
|
||||
expectedTag := ctx.computeAuthTag(data, nonce, ciphertext[:offset])
|
||||
match := subtle.ConstantTimeCompare(expectedTag, ciphertext[offset:])
|
||||
if match != 1 {
|
||||
return nil, errors.New("square/go-jose: invalid ciphertext (auth tag mismatch)")
|
||||
}
|
||||
|
||||
cbc := cipher.NewCBCDecrypter(ctx.blockCipher, nonce)
|
||||
|
||||
// Make copy of ciphertext buffer, don't want to modify in place
|
||||
buffer := append([]byte{}, []byte(ciphertext[:offset])...)
|
||||
|
||||
if len(buffer)%ctx.blockCipher.BlockSize() > 0 {
|
||||
return nil, errors.New("square/go-jose: invalid ciphertext (invalid length)")
|
||||
}
|
||||
|
||||
cbc.CryptBlocks(buffer, buffer)
|
||||
|
||||
// Remove padding
|
||||
plaintext, err := unpadBuffer(buffer, ctx.blockCipher.BlockSize())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, out := resize(dst, uint64(len(dst))+uint64(len(plaintext)))
|
||||
copy(out, plaintext)
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// Compute an authentication tag
|
||||
func (ctx *cbcAEAD) computeAuthTag(aad, nonce, ciphertext []byte) []byte {
|
||||
buffer := make([]byte, uint64(len(aad))+uint64(len(nonce))+uint64(len(ciphertext))+8)
|
||||
n := 0
|
||||
n += copy(buffer, aad)
|
||||
n += copy(buffer[n:], nonce)
|
||||
n += copy(buffer[n:], ciphertext)
|
||||
binary.BigEndian.PutUint64(buffer[n:], uint64(len(aad))*8)
|
||||
|
||||
// According to documentation, Write() on hash.Hash never fails.
|
||||
hmac := hmac.New(ctx.hash, ctx.integrityKey)
|
||||
_, _ = hmac.Write(buffer)
|
||||
|
||||
return hmac.Sum(nil)[:ctx.authtagBytes]
|
||||
}
|
||||
|
||||
// resize ensures the the given slice has a capacity of at least n bytes.
|
||||
// If the capacity of the slice is less than n, a new slice is allocated
|
||||
// and the existing data will be copied.
|
||||
func resize(in []byte, n uint64) (head, tail []byte) {
|
||||
if uint64(cap(in)) >= n {
|
||||
head = in[:n]
|
||||
} else {
|
||||
head = make([]byte, n)
|
||||
copy(head, in)
|
||||
}
|
||||
|
||||
tail = head[len(in):]
|
||||
return
|
||||
}
|
||||
|
||||
// Apply padding
|
||||
func padBuffer(buffer []byte, blockSize int) []byte {
|
||||
missing := blockSize - (len(buffer) % blockSize)
|
||||
ret, out := resize(buffer, uint64(len(buffer))+uint64(missing))
|
||||
padding := bytes.Repeat([]byte{byte(missing)}, missing)
|
||||
copy(out, padding)
|
||||
return ret
|
||||
}
|
||||
|
||||
// Remove padding
|
||||
func unpadBuffer(buffer []byte, blockSize int) ([]byte, error) {
|
||||
if len(buffer)%blockSize != 0 {
|
||||
return nil, errors.New("square/go-jose: invalid padding")
|
||||
}
|
||||
|
||||
last := buffer[len(buffer)-1]
|
||||
count := int(last)
|
||||
|
||||
if count == 0 || count > blockSize || count > len(buffer) {
|
||||
return nil, errors.New("square/go-jose: invalid padding")
|
||||
}
|
||||
|
||||
padding := bytes.Repeat([]byte{last}, count)
|
||||
if !bytes.HasSuffix(buffer, padding) {
|
||||
return nil, errors.New("square/go-jose: invalid padding")
|
||||
}
|
||||
|
||||
return buffer[:len(buffer)-count], nil
|
||||
}
|
498
vendor/gopkg.in/square/go-jose.v2/cipher/cbc_hmac_test.go
generated
vendored
Normal file
498
vendor/gopkg.in/square/go-jose.v2/cipher/cbc_hmac_test.go
generated
vendored
Normal file
|
@ -0,0 +1,498 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/rand"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestInvalidInputs(t *testing.T) {
|
||||
key := []byte{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
}
|
||||
|
||||
nonce := []byte{
|
||||
92, 80, 104, 49, 133, 25, 161, 215, 173, 101, 219, 211, 136, 91, 210, 145}
|
||||
|
||||
aead, _ := NewCBCHMAC(key, aes.NewCipher)
|
||||
ciphertext := aead.Seal(nil, nonce, []byte("plaintext"), []byte("aad"))
|
||||
|
||||
// Changed AAD, must fail
|
||||
_, err := aead.Open(nil, nonce, ciphertext, []byte("INVALID"))
|
||||
if err == nil {
|
||||
t.Error("must detect invalid aad")
|
||||
}
|
||||
|
||||
// Empty ciphertext, must fail
|
||||
_, err = aead.Open(nil, nonce, []byte{}, []byte("aad"))
|
||||
if err == nil {
|
||||
t.Error("must detect invalid/empty ciphertext")
|
||||
}
|
||||
|
||||
// Corrupt ciphertext, must fail
|
||||
corrupt := make([]byte, len(ciphertext))
|
||||
copy(corrupt, ciphertext)
|
||||
corrupt[0] ^= 0xFF
|
||||
|
||||
_, err = aead.Open(nil, nonce, corrupt, []byte("aad"))
|
||||
if err == nil {
|
||||
t.Error("must detect corrupt ciphertext")
|
||||
}
|
||||
|
||||
// Corrupt authtag, must fail
|
||||
copy(corrupt, ciphertext)
|
||||
corrupt[len(ciphertext)-1] ^= 0xFF
|
||||
|
||||
_, err = aead.Open(nil, nonce, corrupt, []byte("aad"))
|
||||
if err == nil {
|
||||
t.Error("must detect corrupt authtag")
|
||||
}
|
||||
|
||||
// Truncated data, must fail
|
||||
_, err = aead.Open(nil, nonce, ciphertext[:10], []byte("aad"))
|
||||
if err == nil {
|
||||
t.Error("must detect corrupt authtag")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVectorsAESCBC128(t *testing.T) {
|
||||
// Source: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption-29#appendix-A.2
|
||||
plaintext := []byte{
|
||||
76, 105, 118, 101, 32, 108, 111, 110, 103, 32, 97, 110, 100, 32,
|
||||
112, 114, 111, 115, 112, 101, 114, 46}
|
||||
|
||||
aad := []byte{
|
||||
101, 121, 74, 104, 98, 71, 99, 105, 79, 105, 74, 83, 85, 48, 69,
|
||||
120, 88, 122, 85, 105, 76, 67, 74, 108, 98, 109, 77, 105, 79, 105,
|
||||
74, 66, 77, 84, 73, 52, 81, 48, 74, 68, 76, 85, 104, 84, 77, 106, 85,
|
||||
50, 73, 110, 48}
|
||||
|
||||
expectedCiphertext := []byte{
|
||||
40, 57, 83, 181, 119, 33, 133, 148, 198, 185, 243, 24, 152, 230, 6,
|
||||
75, 129, 223, 127, 19, 210, 82, 183, 230, 168, 33, 215, 104, 143,
|
||||
112, 56, 102}
|
||||
|
||||
expectedAuthtag := []byte{
|
||||
246, 17, 244, 190, 4, 95, 98, 3, 231, 0, 115, 157, 242, 203, 100,
|
||||
191}
|
||||
|
||||
key := []byte{
|
||||
4, 211, 31, 197, 84, 157, 252, 254, 11, 100, 157, 250, 63, 170, 106, 206,
|
||||
107, 124, 212, 45, 111, 107, 9, 219, 200, 177, 0, 240, 143, 156, 44, 207}
|
||||
|
||||
nonce := []byte{
|
||||
3, 22, 60, 12, 43, 67, 104, 105, 108, 108, 105, 99, 111, 116, 104, 101}
|
||||
|
||||
enc, err := NewCBCHMAC(key, aes.NewCipher)
|
||||
out := enc.Seal(nil, nonce, plaintext, aad)
|
||||
if err != nil {
|
||||
t.Error("Unable to encrypt:", err)
|
||||
return
|
||||
}
|
||||
|
||||
if bytes.Compare(out[:len(out)-16], expectedCiphertext) != 0 {
|
||||
t.Error("Ciphertext did not match")
|
||||
}
|
||||
if bytes.Compare(out[len(out)-16:], expectedAuthtag) != 0 {
|
||||
t.Error("Auth tag did not match")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVectorsAESCBC256(t *testing.T) {
|
||||
// Source: https://tools.ietf.org/html/draft-mcgrew-aead-aes-cbc-hmac-sha2-05#section-5.4
|
||||
plaintext := []byte{
|
||||
0x41, 0x20, 0x63, 0x69, 0x70, 0x68, 0x65, 0x72, 0x20, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x20,
|
||||
0x6d, 0x75, 0x73, 0x74, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x65, 0x71, 0x75,
|
||||
0x69, 0x72, 0x65, 0x64, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65, 0x20, 0x73, 0x65, 0x63, 0x72, 0x65,
|
||||
0x74, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x69, 0x74, 0x20, 0x6d, 0x75, 0x73, 0x74, 0x20, 0x62,
|
||||
0x65, 0x20, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x66, 0x61, 0x6c, 0x6c, 0x20, 0x69,
|
||||
0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x73, 0x20, 0x6f, 0x66,
|
||||
0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, 0x65, 0x6d, 0x79, 0x20, 0x77, 0x69, 0x74, 0x68, 0x6f,
|
||||
0x75, 0x74, 0x20, 0x69, 0x6e, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x6e, 0x69, 0x65, 0x6e, 0x63, 0x65}
|
||||
|
||||
aad := []byte{
|
||||
0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x20, 0x70, 0x72, 0x69, 0x6e, 0x63,
|
||||
0x69, 0x70, 0x6c, 0x65, 0x20, 0x6f, 0x66, 0x20, 0x41, 0x75, 0x67, 0x75, 0x73, 0x74, 0x65, 0x20,
|
||||
0x4b, 0x65, 0x72, 0x63, 0x6b, 0x68, 0x6f, 0x66, 0x66, 0x73}
|
||||
|
||||
expectedCiphertext := []byte{
|
||||
0x4a, 0xff, 0xaa, 0xad, 0xb7, 0x8c, 0x31, 0xc5, 0xda, 0x4b, 0x1b, 0x59, 0x0d, 0x10, 0xff, 0xbd,
|
||||
0x3d, 0xd8, 0xd5, 0xd3, 0x02, 0x42, 0x35, 0x26, 0x91, 0x2d, 0xa0, 0x37, 0xec, 0xbc, 0xc7, 0xbd,
|
||||
0x82, 0x2c, 0x30, 0x1d, 0xd6, 0x7c, 0x37, 0x3b, 0xcc, 0xb5, 0x84, 0xad, 0x3e, 0x92, 0x79, 0xc2,
|
||||
0xe6, 0xd1, 0x2a, 0x13, 0x74, 0xb7, 0x7f, 0x07, 0x75, 0x53, 0xdf, 0x82, 0x94, 0x10, 0x44, 0x6b,
|
||||
0x36, 0xeb, 0xd9, 0x70, 0x66, 0x29, 0x6a, 0xe6, 0x42, 0x7e, 0xa7, 0x5c, 0x2e, 0x08, 0x46, 0xa1,
|
||||
0x1a, 0x09, 0xcc, 0xf5, 0x37, 0x0d, 0xc8, 0x0b, 0xfe, 0xcb, 0xad, 0x28, 0xc7, 0x3f, 0x09, 0xb3,
|
||||
0xa3, 0xb7, 0x5e, 0x66, 0x2a, 0x25, 0x94, 0x41, 0x0a, 0xe4, 0x96, 0xb2, 0xe2, 0xe6, 0x60, 0x9e,
|
||||
0x31, 0xe6, 0xe0, 0x2c, 0xc8, 0x37, 0xf0, 0x53, 0xd2, 0x1f, 0x37, 0xff, 0x4f, 0x51, 0x95, 0x0b,
|
||||
0xbe, 0x26, 0x38, 0xd0, 0x9d, 0xd7, 0xa4, 0x93, 0x09, 0x30, 0x80, 0x6d, 0x07, 0x03, 0xb1, 0xf6}
|
||||
|
||||
expectedAuthtag := []byte{
|
||||
0x4d, 0xd3, 0xb4, 0xc0, 0x88, 0xa7, 0xf4, 0x5c, 0x21, 0x68, 0x39, 0x64, 0x5b, 0x20, 0x12, 0xbf,
|
||||
0x2e, 0x62, 0x69, 0xa8, 0xc5, 0x6a, 0x81, 0x6d, 0xbc, 0x1b, 0x26, 0x77, 0x61, 0x95, 0x5b, 0xc5}
|
||||
|
||||
key := []byte{
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f}
|
||||
|
||||
nonce := []byte{
|
||||
0x1a, 0xf3, 0x8c, 0x2d, 0xc2, 0xb9, 0x6f, 0xfd, 0xd8, 0x66, 0x94, 0x09, 0x23, 0x41, 0xbc, 0x04}
|
||||
|
||||
enc, err := NewCBCHMAC(key, aes.NewCipher)
|
||||
out := enc.Seal(nil, nonce, plaintext, aad)
|
||||
if err != nil {
|
||||
t.Error("Unable to encrypt:", err)
|
||||
return
|
||||
}
|
||||
|
||||
if bytes.Compare(out[:len(out)-32], expectedCiphertext) != 0 {
|
||||
t.Error("Ciphertext did not match, got", out[:len(out)-32], "wanted", expectedCiphertext)
|
||||
}
|
||||
if bytes.Compare(out[len(out)-32:], expectedAuthtag) != 0 {
|
||||
t.Error("Auth tag did not match, got", out[len(out)-32:], "wanted", expectedAuthtag)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAESCBCRoundtrip(t *testing.T) {
|
||||
key128 := []byte{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
|
||||
|
||||
key192 := []byte{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7}
|
||||
|
||||
key256 := []byte{
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
|
||||
|
||||
nonce := []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
|
||||
|
||||
RunRoundtrip(t, key128, nonce)
|
||||
RunRoundtrip(t, key192, nonce)
|
||||
RunRoundtrip(t, key256, nonce)
|
||||
}
|
||||
|
||||
func RunRoundtrip(t *testing.T, key, nonce []byte) {
|
||||
aead, err := NewCBCHMAC(key, aes.NewCipher)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if aead.NonceSize() != len(nonce) {
|
||||
panic("invalid nonce")
|
||||
}
|
||||
|
||||
// Test pre-existing data in dst buffer
|
||||
dst := []byte{15, 15, 15, 15}
|
||||
plaintext := []byte{0, 0, 0, 0}
|
||||
aad := []byte{4, 3, 2, 1}
|
||||
|
||||
result := aead.Seal(dst, nonce, plaintext, aad)
|
||||
if bytes.Compare(dst, result[:4]) != 0 {
|
||||
t.Error("Existing data in dst not preserved")
|
||||
}
|
||||
|
||||
// Test pre-existing (empty) dst buffer with sufficient capacity
|
||||
dst = make([]byte, 256)[:0]
|
||||
result, err = aead.Open(dst, nonce, result[4:], aad)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if bytes.Compare(result, plaintext) != 0 {
|
||||
t.Error("Plaintext does not match output")
|
||||
}
|
||||
}
|
||||
|
||||
func TestAESCBCOverhead(t *testing.T) {
|
||||
aead, err := NewCBCHMAC(make([]byte, 32), aes.NewCipher)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if aead.Overhead() != 32 {
|
||||
t.Error("CBC-HMAC reports incorrect overhead value")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPadding(t *testing.T) {
|
||||
for i := 0; i < 256; i++ {
|
||||
slice := make([]byte, i)
|
||||
padded := padBuffer(slice, 16)
|
||||
if len(padded)%16 != 0 {
|
||||
t.Error("failed to pad slice properly", i)
|
||||
return
|
||||
}
|
||||
unpadded, err := unpadBuffer(padded, 16)
|
||||
if err != nil || len(unpadded) != i {
|
||||
t.Error("failed to unpad slice properly", i)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidKey(t *testing.T) {
|
||||
key := make([]byte, 30)
|
||||
_, err := NewCBCHMAC(key, aes.NewCipher)
|
||||
if err == nil {
|
||||
t.Error("should not be able to instantiate CBC-HMAC with invalid key")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTruncatedCiphertext(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
nonce := make([]byte, 16)
|
||||
data := make([]byte, 32)
|
||||
|
||||
io.ReadFull(rand.Reader, key)
|
||||
io.ReadFull(rand.Reader, nonce)
|
||||
|
||||
aead, err := NewCBCHMAC(key, aes.NewCipher)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ctx := aead.(*cbcAEAD)
|
||||
ct := aead.Seal(nil, nonce, data, nil)
|
||||
|
||||
// Truncated ciphertext, but with correct auth tag
|
||||
truncated, tail := resize(ct[:len(ct)-ctx.authtagBytes-2], uint64(len(ct))-2)
|
||||
copy(tail, ctx.computeAuthTag(nil, nonce, truncated[:len(truncated)-ctx.authtagBytes]))
|
||||
|
||||
// Open should fail
|
||||
_, err = aead.Open(nil, nonce, truncated, nil)
|
||||
if err == nil {
|
||||
t.Error("open on truncated ciphertext should fail")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidPaddingOpen(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
nonce := make([]byte, 16)
|
||||
|
||||
// Plaintext with invalid padding
|
||||
plaintext := padBuffer(make([]byte, 28), aes.BlockSize)
|
||||
plaintext[len(plaintext)-1] = 0xFF
|
||||
|
||||
io.ReadFull(rand.Reader, key)
|
||||
io.ReadFull(rand.Reader, nonce)
|
||||
|
||||
block, _ := aes.NewCipher(key)
|
||||
cbc := cipher.NewCBCEncrypter(block, nonce)
|
||||
buffer := append([]byte{}, plaintext...)
|
||||
cbc.CryptBlocks(buffer, buffer)
|
||||
|
||||
aead, _ := NewCBCHMAC(key, aes.NewCipher)
|
||||
ctx := aead.(*cbcAEAD)
|
||||
|
||||
// Mutated ciphertext, but with correct auth tag
|
||||
size := uint64(len(buffer))
|
||||
ciphertext, tail := resize(buffer, size+(uint64(len(key))/2))
|
||||
copy(tail, ctx.computeAuthTag(nil, nonce, ciphertext[:size]))
|
||||
|
||||
// Open should fail (b/c of invalid padding, even though tag matches)
|
||||
_, err := aead.Open(nil, nonce, ciphertext, nil)
|
||||
if err == nil || !strings.Contains(err.Error(), "invalid padding") {
|
||||
t.Error("no or unexpected error on open with invalid padding:", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidPadding(t *testing.T) {
|
||||
for i := 0; i < 256; i++ {
|
||||
slice := make([]byte, i)
|
||||
padded := padBuffer(slice, 16)
|
||||
if len(padded)%16 != 0 {
|
||||
t.Error("failed to pad slice properly", i)
|
||||
return
|
||||
}
|
||||
|
||||
paddingBytes := 16 - (i % 16)
|
||||
|
||||
// Mutate padding for testing
|
||||
for j := 1; j <= paddingBytes; j++ {
|
||||
mutated := make([]byte, len(padded))
|
||||
copy(mutated, padded)
|
||||
mutated[len(mutated)-j] ^= 0xFF
|
||||
|
||||
_, err := unpadBuffer(mutated, 16)
|
||||
if err == nil {
|
||||
t.Error("unpad on invalid padding should fail", i)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Test truncated padding
|
||||
_, err := unpadBuffer(padded[:len(padded)-1], 16)
|
||||
if err == nil {
|
||||
t.Error("unpad on truncated padding should fail", i)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroLengthPadding(t *testing.T) {
|
||||
data := make([]byte, 16)
|
||||
data, err := unpadBuffer(data, 16)
|
||||
if err == nil {
|
||||
t.Error("padding with 0x00 should never be valid")
|
||||
}
|
||||
}
|
||||
|
||||
func benchEncryptCBCHMAC(b *testing.B, keySize, chunkSize int) {
|
||||
key := make([]byte, keySize*2)
|
||||
nonce := make([]byte, 16)
|
||||
|
||||
io.ReadFull(rand.Reader, key)
|
||||
io.ReadFull(rand.Reader, nonce)
|
||||
|
||||
chunk := make([]byte, chunkSize)
|
||||
|
||||
aead, err := NewCBCHMAC(key, aes.NewCipher)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
b.SetBytes(int64(chunkSize))
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
aead.Seal(nil, nonce, chunk, nil)
|
||||
}
|
||||
}
|
||||
|
||||
func benchDecryptCBCHMAC(b *testing.B, keySize, chunkSize int) {
|
||||
key := make([]byte, keySize*2)
|
||||
nonce := make([]byte, 16)
|
||||
|
||||
io.ReadFull(rand.Reader, key)
|
||||
io.ReadFull(rand.Reader, nonce)
|
||||
|
||||
chunk := make([]byte, chunkSize)
|
||||
|
||||
aead, err := NewCBCHMAC(key, aes.NewCipher)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
out := aead.Seal(nil, nonce, chunk, nil)
|
||||
|
||||
b.SetBytes(int64(chunkSize))
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
aead.Open(nil, nonce, out, nil)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES128_CBCHMAC_1k(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 16, 1024)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES128_CBCHMAC_64k(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 16, 65536)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES128_CBCHMAC_1MB(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 16, 1048576)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES128_CBCHMAC_64MB(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 16, 67108864)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES128_CBCHMAC_1k(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 16, 1024)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES128_CBCHMAC_64k(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 16, 65536)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES128_CBCHMAC_1MB(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 16, 1048576)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES128_CBCHMAC_64MB(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 16, 67108864)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES192_CBCHMAC_64k(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 24, 65536)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES192_CBCHMAC_1MB(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 24, 1048576)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES192_CBCHMAC_64MB(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 24, 67108864)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES192_CBCHMAC_1k(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 24, 1024)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES192_CBCHMAC_64k(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 24, 65536)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES192_CBCHMAC_1MB(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 24, 1048576)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES192_CBCHMAC_64MB(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 24, 67108864)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES256_CBCHMAC_64k(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 32, 65536)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES256_CBCHMAC_1MB(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 32, 1048576)
|
||||
}
|
||||
|
||||
func BenchmarkEncryptAES256_CBCHMAC_64MB(b *testing.B) {
|
||||
benchEncryptCBCHMAC(b, 32, 67108864)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES256_CBCHMAC_1k(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 32, 1032)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES256_CBCHMAC_64k(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 32, 65536)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES256_CBCHMAC_1MB(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 32, 1048576)
|
||||
}
|
||||
|
||||
func BenchmarkDecryptAES256_CBCHMAC_64MB(b *testing.B) {
|
||||
benchDecryptCBCHMAC(b, 32, 67108864)
|
||||
}
|
75
vendor/gopkg.in/square/go-jose.v2/cipher/concat_kdf.go
generated
vendored
Normal file
75
vendor/gopkg.in/square/go-jose.v2/cipher/concat_kdf.go
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"encoding/binary"
|
||||
"hash"
|
||||
"io"
|
||||
)
|
||||
|
||||
type concatKDF struct {
|
||||
z, info []byte
|
||||
i uint32
|
||||
cache []byte
|
||||
hasher hash.Hash
|
||||
}
|
||||
|
||||
// NewConcatKDF builds a KDF reader based on the given inputs.
|
||||
func NewConcatKDF(hash crypto.Hash, z, algID, ptyUInfo, ptyVInfo, supPubInfo, supPrivInfo []byte) io.Reader {
|
||||
buffer := make([]byte, uint64(len(algID))+uint64(len(ptyUInfo))+uint64(len(ptyVInfo))+uint64(len(supPubInfo))+uint64(len(supPrivInfo)))
|
||||
n := 0
|
||||
n += copy(buffer, algID)
|
||||
n += copy(buffer[n:], ptyUInfo)
|
||||
n += copy(buffer[n:], ptyVInfo)
|
||||
n += copy(buffer[n:], supPubInfo)
|
||||
copy(buffer[n:], supPrivInfo)
|
||||
|
||||
hasher := hash.New()
|
||||
|
||||
return &concatKDF{
|
||||
z: z,
|
||||
info: buffer,
|
||||
hasher: hasher,
|
||||
cache: []byte{},
|
||||
i: 1,
|
||||
}
|
||||
}
|
||||
|
||||
func (ctx *concatKDF) Read(out []byte) (int, error) {
|
||||
copied := copy(out, ctx.cache)
|
||||
ctx.cache = ctx.cache[copied:]
|
||||
|
||||
for copied < len(out) {
|
||||
ctx.hasher.Reset()
|
||||
|
||||
// Write on a hash.Hash never fails
|
||||
_ = binary.Write(ctx.hasher, binary.BigEndian, ctx.i)
|
||||
_, _ = ctx.hasher.Write(ctx.z)
|
||||
_, _ = ctx.hasher.Write(ctx.info)
|
||||
|
||||
hash := ctx.hasher.Sum(nil)
|
||||
chunkCopied := copy(out[copied:], hash)
|
||||
copied += chunkCopied
|
||||
ctx.cache = hash[chunkCopied:]
|
||||
|
||||
ctx.i++
|
||||
}
|
||||
|
||||
return copied, nil
|
||||
}
|
150
vendor/gopkg.in/square/go-jose.v2/cipher/concat_kdf_test.go
generated
vendored
Normal file
150
vendor/gopkg.in/square/go-jose.v2/cipher/concat_kdf_test.go
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Taken from: https://tools.ietf.org/id/draft-ietf-jose-json-web-algorithms-38.txt
|
||||
func TestVectorConcatKDF(t *testing.T) {
|
||||
z := []byte{
|
||||
158, 86, 217, 29, 129, 113, 53, 211, 114, 131, 66, 131, 191, 132,
|
||||
38, 156, 251, 49, 110, 163, 218, 128, 106, 72, 246, 218, 167, 121,
|
||||
140, 254, 144, 196}
|
||||
|
||||
algID := []byte{0, 0, 0, 7, 65, 49, 50, 56, 71, 67, 77}
|
||||
|
||||
ptyUInfo := []byte{0, 0, 0, 5, 65, 108, 105, 99, 101}
|
||||
ptyVInfo := []byte{0, 0, 0, 3, 66, 111, 98}
|
||||
|
||||
supPubInfo := []byte{0, 0, 0, 128}
|
||||
supPrivInfo := []byte{}
|
||||
|
||||
expected := []byte{
|
||||
86, 170, 141, 234, 248, 35, 109, 32, 92, 34, 40, 205, 113, 167, 16, 26}
|
||||
|
||||
ckdf := NewConcatKDF(crypto.SHA256, z, algID, ptyUInfo, ptyVInfo, supPubInfo, supPrivInfo)
|
||||
|
||||
out0 := make([]byte, 9)
|
||||
out1 := make([]byte, 7)
|
||||
|
||||
read0, err := ckdf.Read(out0)
|
||||
if err != nil {
|
||||
t.Error("error when reading from concat kdf reader", err)
|
||||
return
|
||||
}
|
||||
|
||||
read1, err := ckdf.Read(out1)
|
||||
if err != nil {
|
||||
t.Error("error when reading from concat kdf reader", err)
|
||||
return
|
||||
}
|
||||
|
||||
if read0+read1 != len(out0)+len(out1) {
|
||||
t.Error("did not receive enough bytes from concat kdf reader")
|
||||
return
|
||||
}
|
||||
|
||||
out := []byte{}
|
||||
out = append(out, out0...)
|
||||
out = append(out, out1...)
|
||||
|
||||
if bytes.Compare(out, expected) != 0 {
|
||||
t.Error("did not receive expected output from concat kdf reader")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestCache(t *testing.T) {
|
||||
z := []byte{
|
||||
158, 86, 217, 29, 129, 113, 53, 211, 114, 131, 66, 131, 191, 132,
|
||||
38, 156, 251, 49, 110, 163, 218, 128, 106, 72, 246, 218, 167, 121,
|
||||
140, 254, 144, 196}
|
||||
|
||||
algID := []byte{1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4}
|
||||
|
||||
ptyUInfo := []byte{1, 2, 3, 4}
|
||||
ptyVInfo := []byte{4, 3, 2, 1}
|
||||
|
||||
supPubInfo := []byte{}
|
||||
supPrivInfo := []byte{}
|
||||
|
||||
outputs := [][]byte{}
|
||||
|
||||
// Read the same amount of data in different chunk sizes
|
||||
chunkSizes := []int{1, 2, 4, 8, 16, 32, 64, 128, 256, 512}
|
||||
|
||||
for _, c := range chunkSizes {
|
||||
out := make([]byte, 1024)
|
||||
reader := NewConcatKDF(crypto.SHA256, z, algID, ptyUInfo, ptyVInfo, supPubInfo, supPrivInfo)
|
||||
|
||||
for i := 0; i < 1024; i += c {
|
||||
_, _ = reader.Read(out[i : i+c])
|
||||
}
|
||||
|
||||
outputs = append(outputs, out)
|
||||
}
|
||||
|
||||
for i := range outputs {
|
||||
if bytes.Compare(outputs[i], outputs[(i+1)%len(outputs)]) != 0 {
|
||||
t.Error("not all outputs from KDF matched")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkKDF(b *testing.B, total int) {
|
||||
z := []byte{
|
||||
158, 86, 217, 29, 129, 113, 53, 211, 114, 131, 66, 131, 191, 132,
|
||||
38, 156, 251, 49, 110, 163, 218, 128, 106, 72, 246, 218, 167, 121,
|
||||
140, 254, 144, 196}
|
||||
|
||||
algID := []byte{1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4}
|
||||
|
||||
ptyUInfo := []byte{1, 2, 3, 4}
|
||||
ptyVInfo := []byte{4, 3, 2, 1}
|
||||
|
||||
supPubInfo := []byte{}
|
||||
supPrivInfo := []byte{}
|
||||
|
||||
out := make([]byte, total)
|
||||
reader := NewConcatKDF(crypto.SHA256, z, algID, ptyUInfo, ptyVInfo, supPubInfo, supPrivInfo)
|
||||
|
||||
b.ResetTimer()
|
||||
b.SetBytes(int64(total))
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = reader.Read(out)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkConcatKDF_1k(b *testing.B) {
|
||||
benchmarkKDF(b, 1024)
|
||||
}
|
||||
|
||||
func BenchmarkConcatKDF_64k(b *testing.B) {
|
||||
benchmarkKDF(b, 65536)
|
||||
}
|
||||
|
||||
func BenchmarkConcatKDF_1MB(b *testing.B) {
|
||||
benchmarkKDF(b, 1048576)
|
||||
}
|
||||
|
||||
func BenchmarkConcatKDF_64MB(b *testing.B) {
|
||||
benchmarkKDF(b, 67108864)
|
||||
}
|
62
vendor/gopkg.in/square/go-jose.v2/cipher/ecdh_es.go
generated
vendored
Normal file
62
vendor/gopkg.in/square/go-jose.v2/cipher/ecdh_es.go
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"encoding/binary"
|
||||
)
|
||||
|
||||
// DeriveECDHES derives a shared encryption key using ECDH/ConcatKDF as described in JWE/JWA.
|
||||
// It is an error to call this function with a private/public key that are not on the same
|
||||
// curve. Callers must ensure that the keys are valid before calling this function. Output
|
||||
// size may be at most 1<<16 bytes (64 KiB).
|
||||
func DeriveECDHES(alg string, apuData, apvData []byte, priv *ecdsa.PrivateKey, pub *ecdsa.PublicKey, size int) []byte {
|
||||
if size > 1<<16 {
|
||||
panic("ECDH-ES output size too large, must be less than or equal to 1<<16")
|
||||
}
|
||||
|
||||
// algId, partyUInfo, partyVInfo inputs must be prefixed with the length
|
||||
algID := lengthPrefixed([]byte(alg))
|
||||
ptyUInfo := lengthPrefixed(apuData)
|
||||
ptyVInfo := lengthPrefixed(apvData)
|
||||
|
||||
// suppPubInfo is the encoded length of the output size in bits
|
||||
supPubInfo := make([]byte, 4)
|
||||
binary.BigEndian.PutUint32(supPubInfo, uint32(size)*8)
|
||||
|
||||
if !priv.PublicKey.Curve.IsOnCurve(pub.X, pub.Y) {
|
||||
panic("public key not on same curve as private key")
|
||||
}
|
||||
|
||||
z, _ := priv.PublicKey.Curve.ScalarMult(pub.X, pub.Y, priv.D.Bytes())
|
||||
reader := NewConcatKDF(crypto.SHA256, z.Bytes(), algID, ptyUInfo, ptyVInfo, supPubInfo, []byte{})
|
||||
|
||||
key := make([]byte, size)
|
||||
|
||||
// Read on the KDF will never fail
|
||||
_, _ = reader.Read(key)
|
||||
return key
|
||||
}
|
||||
|
||||
func lengthPrefixed(data []byte) []byte {
|
||||
out := make([]byte, len(data)+4)
|
||||
binary.BigEndian.PutUint32(out, uint32(len(data)))
|
||||
copy(out[4:], data)
|
||||
return out
|
||||
}
|
115
vendor/gopkg.in/square/go-jose.v2/cipher/ecdh_es_test.go
generated
vendored
Normal file
115
vendor/gopkg.in/square/go-jose.v2/cipher/ecdh_es_test.go
generated
vendored
Normal file
|
@ -0,0 +1,115 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"encoding/base64"
|
||||
"math/big"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Example keys from JWA, Appendix C
|
||||
var aliceKey = &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: elliptic.P256(),
|
||||
X: fromBase64Int("gI0GAILBdu7T53akrFmMyGcsF3n5dO7MmwNBHKW5SV0="),
|
||||
Y: fromBase64Int("SLW_xSffzlPWrHEVI30DHM_4egVwt3NQqeUD7nMFpps="),
|
||||
},
|
||||
D: fromBase64Int("0_NxaRPUMQoAJt50Gz8YiTr8gRTwyEaCumd-MToTmIo="),
|
||||
}
|
||||
|
||||
var bobKey = &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: elliptic.P256(),
|
||||
X: fromBase64Int("weNJy2HscCSM6AEDTDg04biOvhFhyyWvOHQfeF_PxMQ="),
|
||||
Y: fromBase64Int("e8lnCO-AlStT-NJVX-crhB7QRYhiix03illJOVAOyck="),
|
||||
},
|
||||
D: fromBase64Int("VEmDZpDXXK8p8N0Cndsxs924q6nS1RXFASRl6BfUqdw="),
|
||||
}
|
||||
|
||||
// Build big int from base64-encoded string. Strips whitespace (for testing).
|
||||
func fromBase64Int(data string) *big.Int {
|
||||
val, err := base64.URLEncoding.DecodeString(data)
|
||||
if err != nil {
|
||||
panic("Invalid test data: " + err.Error())
|
||||
}
|
||||
return new(big.Int).SetBytes(val)
|
||||
}
|
||||
|
||||
func TestVectorECDHES(t *testing.T) {
|
||||
apuData := []byte("Alice")
|
||||
apvData := []byte("Bob")
|
||||
|
||||
expected := []byte{
|
||||
86, 170, 141, 234, 248, 35, 109, 32, 92, 34, 40, 205, 113, 167, 16, 26}
|
||||
|
||||
output := DeriveECDHES("A128GCM", apuData, apvData, bobKey, &aliceKey.PublicKey, 16)
|
||||
|
||||
if bytes.Compare(output, expected) != 0 {
|
||||
t.Error("output did not match what we expect, got", output, "wanted", expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidECPublicKey(t *testing.T) {
|
||||
defer func() { recover() }()
|
||||
|
||||
// Invalid key
|
||||
invalid := &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: elliptic.P256(),
|
||||
X: fromBase64Int("MTEx"),
|
||||
Y: fromBase64Int("MTEx"),
|
||||
},
|
||||
D: fromBase64Int("0_NxaRPUMQoAJt50Gz8YiTr8gRTwyEaCumd-MToTmIo="),
|
||||
}
|
||||
|
||||
DeriveECDHES("A128GCM", []byte{}, []byte{}, bobKey, &invalid.PublicKey, 16)
|
||||
t.Fatal("should panic if public key was invalid")
|
||||
}
|
||||
|
||||
func BenchmarkECDHES_128(b *testing.B) {
|
||||
apuData := []byte("APU")
|
||||
apvData := []byte("APV")
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
DeriveECDHES("ID", apuData, apvData, bobKey, &aliceKey.PublicKey, 16)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkECDHES_192(b *testing.B) {
|
||||
apuData := []byte("APU")
|
||||
apvData := []byte("APV")
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
DeriveECDHES("ID", apuData, apvData, bobKey, &aliceKey.PublicKey, 24)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkECDHES_256(b *testing.B) {
|
||||
apuData := []byte("APU")
|
||||
apvData := []byte("APV")
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
DeriveECDHES("ID", apuData, apvData, bobKey, &aliceKey.PublicKey, 32)
|
||||
}
|
||||
}
|
109
vendor/gopkg.in/square/go-jose.v2/cipher/key_wrap.go
generated
vendored
Normal file
109
vendor/gopkg.in/square/go-jose.v2/cipher/key_wrap.go
generated
vendored
Normal file
|
@ -0,0 +1,109 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"crypto/cipher"
|
||||
"crypto/subtle"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
)
|
||||
|
||||
var defaultIV = []byte{0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6}
|
||||
|
||||
// KeyWrap implements NIST key wrapping; it wraps a content encryption key (cek) with the given block cipher.
|
||||
func KeyWrap(block cipher.Block, cek []byte) ([]byte, error) {
|
||||
if len(cek)%8 != 0 {
|
||||
return nil, errors.New("square/go-jose: key wrap input must be 8 byte blocks")
|
||||
}
|
||||
|
||||
n := len(cek) / 8
|
||||
r := make([][]byte, n)
|
||||
|
||||
for i := range r {
|
||||
r[i] = make([]byte, 8)
|
||||
copy(r[i], cek[i*8:])
|
||||
}
|
||||
|
||||
buffer := make([]byte, 16)
|
||||
tBytes := make([]byte, 8)
|
||||
copy(buffer, defaultIV)
|
||||
|
||||
for t := 0; t < 6*n; t++ {
|
||||
copy(buffer[8:], r[t%n])
|
||||
|
||||
block.Encrypt(buffer, buffer)
|
||||
|
||||
binary.BigEndian.PutUint64(tBytes, uint64(t+1))
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
buffer[i] = buffer[i] ^ tBytes[i]
|
||||
}
|
||||
copy(r[t%n], buffer[8:])
|
||||
}
|
||||
|
||||
out := make([]byte, (n+1)*8)
|
||||
copy(out, buffer[:8])
|
||||
for i := range r {
|
||||
copy(out[(i+1)*8:], r[i])
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// KeyUnwrap implements NIST key unwrapping; it unwraps a content encryption key (cek) with the given block cipher.
|
||||
func KeyUnwrap(block cipher.Block, ciphertext []byte) ([]byte, error) {
|
||||
if len(ciphertext)%8 != 0 {
|
||||
return nil, errors.New("square/go-jose: key wrap input must be 8 byte blocks")
|
||||
}
|
||||
|
||||
n := (len(ciphertext) / 8) - 1
|
||||
r := make([][]byte, n)
|
||||
|
||||
for i := range r {
|
||||
r[i] = make([]byte, 8)
|
||||
copy(r[i], ciphertext[(i+1)*8:])
|
||||
}
|
||||
|
||||
buffer := make([]byte, 16)
|
||||
tBytes := make([]byte, 8)
|
||||
copy(buffer[:8], ciphertext[:8])
|
||||
|
||||
for t := 6*n - 1; t >= 0; t-- {
|
||||
binary.BigEndian.PutUint64(tBytes, uint64(t+1))
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
buffer[i] = buffer[i] ^ tBytes[i]
|
||||
}
|
||||
copy(buffer[8:], r[t%n])
|
||||
|
||||
block.Decrypt(buffer, buffer)
|
||||
|
||||
copy(r[t%n], buffer[8:])
|
||||
}
|
||||
|
||||
if subtle.ConstantTimeCompare(buffer[:8], defaultIV) == 0 {
|
||||
return nil, errors.New("square/go-jose: failed to unwrap key")
|
||||
}
|
||||
|
||||
out := make([]byte, n*8)
|
||||
for i := range r {
|
||||
copy(out[i*8:], r[i])
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
133
vendor/gopkg.in/square/go-jose.v2/cipher/key_wrap_test.go
generated
vendored
Normal file
133
vendor/gopkg.in/square/go-jose.v2/cipher/key_wrap_test.go
generated
vendored
Normal file
|
@ -0,0 +1,133 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package josecipher
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/aes"
|
||||
"encoding/hex"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAesKeyWrap(t *testing.T) {
|
||||
// Test vectors from: http://csrc.nist.gov/groups/ST/toolkit/documents/kms/key-wrap.pdf
|
||||
kek0, _ := hex.DecodeString("000102030405060708090A0B0C0D0E0F")
|
||||
cek0, _ := hex.DecodeString("00112233445566778899AABBCCDDEEFF")
|
||||
|
||||
expected0, _ := hex.DecodeString("1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5")
|
||||
|
||||
kek1, _ := hex.DecodeString("000102030405060708090A0B0C0D0E0F1011121314151617")
|
||||
cek1, _ := hex.DecodeString("00112233445566778899AABBCCDDEEFF")
|
||||
|
||||
expected1, _ := hex.DecodeString("96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D")
|
||||
|
||||
kek2, _ := hex.DecodeString("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F")
|
||||
cek2, _ := hex.DecodeString("00112233445566778899AABBCCDDEEFF0001020304050607")
|
||||
|
||||
expected2, _ := hex.DecodeString("A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1")
|
||||
|
||||
block0, _ := aes.NewCipher(kek0)
|
||||
block1, _ := aes.NewCipher(kek1)
|
||||
block2, _ := aes.NewCipher(kek2)
|
||||
|
||||
out0, _ := KeyWrap(block0, cek0)
|
||||
out1, _ := KeyWrap(block1, cek1)
|
||||
out2, _ := KeyWrap(block2, cek2)
|
||||
|
||||
if bytes.Compare(out0, expected0) != 0 {
|
||||
t.Error("output 0 not as expected, got", out0, "wanted", expected0)
|
||||
}
|
||||
|
||||
if bytes.Compare(out1, expected1) != 0 {
|
||||
t.Error("output 1 not as expected, got", out1, "wanted", expected1)
|
||||
}
|
||||
|
||||
if bytes.Compare(out2, expected2) != 0 {
|
||||
t.Error("output 2 not as expected, got", out2, "wanted", expected2)
|
||||
}
|
||||
|
||||
unwrap0, _ := KeyUnwrap(block0, out0)
|
||||
unwrap1, _ := KeyUnwrap(block1, out1)
|
||||
unwrap2, _ := KeyUnwrap(block2, out2)
|
||||
|
||||
if bytes.Compare(unwrap0, cek0) != 0 {
|
||||
t.Error("key unwrap did not return original input, got", unwrap0, "wanted", cek0)
|
||||
}
|
||||
|
||||
if bytes.Compare(unwrap1, cek1) != 0 {
|
||||
t.Error("key unwrap did not return original input, got", unwrap1, "wanted", cek1)
|
||||
}
|
||||
|
||||
if bytes.Compare(unwrap2, cek2) != 0 {
|
||||
t.Error("key unwrap did not return original input, got", unwrap2, "wanted", cek2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAesKeyWrapInvalid(t *testing.T) {
|
||||
kek, _ := hex.DecodeString("000102030405060708090A0B0C0D0E0F")
|
||||
|
||||
// Invalid unwrap input (bit flipped)
|
||||
input0, _ := hex.DecodeString("1EA68C1A8112B447AEF34BD8FB5A7B828D3E862371D2CFE5")
|
||||
|
||||
block, _ := aes.NewCipher(kek)
|
||||
|
||||
_, err := KeyUnwrap(block, input0)
|
||||
if err == nil {
|
||||
t.Error("key unwrap failed to detect invalid input")
|
||||
}
|
||||
|
||||
// Invalid unwrap input (truncated)
|
||||
input1, _ := hex.DecodeString("1EA68C1A8112B447AEF34BD8FB5A7B828D3E862371D2CF")
|
||||
|
||||
_, err = KeyUnwrap(block, input1)
|
||||
if err == nil {
|
||||
t.Error("key unwrap failed to detect truncated input")
|
||||
}
|
||||
|
||||
// Invalid wrap input (not multiple of 8)
|
||||
input2, _ := hex.DecodeString("0123456789ABCD")
|
||||
|
||||
_, err = KeyWrap(block, input2)
|
||||
if err == nil {
|
||||
t.Error("key wrap accepted invalid input")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func BenchmarkAesKeyWrap(b *testing.B) {
|
||||
kek, _ := hex.DecodeString("000102030405060708090A0B0C0D0E0F")
|
||||
key, _ := hex.DecodeString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF")
|
||||
|
||||
block, _ := aes.NewCipher(kek)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
KeyWrap(block, key)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkAesKeyUnwrap(b *testing.B) {
|
||||
kek, _ := hex.DecodeString("000102030405060708090A0B0C0D0E0F")
|
||||
input, _ := hex.DecodeString("1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5")
|
||||
|
||||
block, _ := aes.NewCipher(kek)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
KeyUnwrap(block, input)
|
||||
}
|
||||
}
|
535
vendor/gopkg.in/square/go-jose.v2/crypter.go
generated
vendored
Normal file
535
vendor/gopkg.in/square/go-jose.v2/crypter.go
generated
vendored
Normal file
|
@ -0,0 +1,535 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/rsa"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
// Encrypter represents an encrypter which produces an encrypted JWE object.
|
||||
type Encrypter interface {
|
||||
Encrypt(plaintext []byte) (*JSONWebEncryption, error)
|
||||
EncryptWithAuthData(plaintext []byte, aad []byte) (*JSONWebEncryption, error)
|
||||
Options() EncrypterOptions
|
||||
}
|
||||
|
||||
// A generic content cipher
|
||||
type contentCipher interface {
|
||||
keySize() int
|
||||
encrypt(cek []byte, aad, plaintext []byte) (*aeadParts, error)
|
||||
decrypt(cek []byte, aad []byte, parts *aeadParts) ([]byte, error)
|
||||
}
|
||||
|
||||
// A key generator (for generating/getting a CEK)
|
||||
type keyGenerator interface {
|
||||
keySize() int
|
||||
genKey() ([]byte, rawHeader, error)
|
||||
}
|
||||
|
||||
// A generic key encrypter
|
||||
type keyEncrypter interface {
|
||||
encryptKey(cek []byte, alg KeyAlgorithm) (recipientInfo, error) // Encrypt a key
|
||||
}
|
||||
|
||||
// A generic key decrypter
|
||||
type keyDecrypter interface {
|
||||
decryptKey(headers rawHeader, recipient *recipientInfo, generator keyGenerator) ([]byte, error) // Decrypt a key
|
||||
}
|
||||
|
||||
// A generic encrypter based on the given key encrypter and content cipher.
|
||||
type genericEncrypter struct {
|
||||
contentAlg ContentEncryption
|
||||
compressionAlg CompressionAlgorithm
|
||||
cipher contentCipher
|
||||
recipients []recipientKeyInfo
|
||||
keyGenerator keyGenerator
|
||||
extraHeaders map[HeaderKey]interface{}
|
||||
}
|
||||
|
||||
type recipientKeyInfo struct {
|
||||
keyID string
|
||||
keyAlg KeyAlgorithm
|
||||
keyEncrypter keyEncrypter
|
||||
}
|
||||
|
||||
// EncrypterOptions represents options that can be set on new encrypters.
|
||||
type EncrypterOptions struct {
|
||||
Compression CompressionAlgorithm
|
||||
|
||||
// Optional map of additional keys to be inserted into the protected header
|
||||
// of a JWS object. Some specifications which make use of JWS like to insert
|
||||
// additional values here. All values must be JSON-serializable.
|
||||
ExtraHeaders map[HeaderKey]interface{}
|
||||
}
|
||||
|
||||
// WithHeader adds an arbitrary value to the ExtraHeaders map, initializing it
|
||||
// if necessary. It returns itself and so can be used in a fluent style.
|
||||
func (eo *EncrypterOptions) WithHeader(k HeaderKey, v interface{}) *EncrypterOptions {
|
||||
if eo.ExtraHeaders == nil {
|
||||
eo.ExtraHeaders = map[HeaderKey]interface{}{}
|
||||
}
|
||||
eo.ExtraHeaders[k] = v
|
||||
return eo
|
||||
}
|
||||
|
||||
// WithContentType adds a content type ("cty") header and returns the updated
|
||||
// EncrypterOptions.
|
||||
func (eo *EncrypterOptions) WithContentType(contentType ContentType) *EncrypterOptions {
|
||||
return eo.WithHeader(HeaderContentType, contentType)
|
||||
}
|
||||
|
||||
// WithType adds a type ("typ") header and returns the updated EncrypterOptions.
|
||||
func (eo *EncrypterOptions) WithType(typ ContentType) *EncrypterOptions {
|
||||
return eo.WithHeader(HeaderType, typ)
|
||||
}
|
||||
|
||||
// Recipient represents an algorithm/key to encrypt messages to.
|
||||
//
|
||||
// PBES2Count and PBES2Salt correspond with the "p2c" and "p2s" headers used
|
||||
// on the password-based encryption algorithms PBES2-HS256+A128KW,
|
||||
// PBES2-HS384+A192KW, and PBES2-HS512+A256KW. If they are not provided a safe
|
||||
// default of 100000 will be used for the count and a 128-bit random salt will
|
||||
// be generated.
|
||||
type Recipient struct {
|
||||
Algorithm KeyAlgorithm
|
||||
Key interface{}
|
||||
KeyID string
|
||||
PBES2Count int
|
||||
PBES2Salt []byte
|
||||
}
|
||||
|
||||
// NewEncrypter creates an appropriate encrypter based on the key type
|
||||
func NewEncrypter(enc ContentEncryption, rcpt Recipient, opts *EncrypterOptions) (Encrypter, error) {
|
||||
encrypter := &genericEncrypter{
|
||||
contentAlg: enc,
|
||||
recipients: []recipientKeyInfo{},
|
||||
cipher: getContentCipher(enc),
|
||||
}
|
||||
if opts != nil {
|
||||
encrypter.compressionAlg = opts.Compression
|
||||
encrypter.extraHeaders = opts.ExtraHeaders
|
||||
}
|
||||
|
||||
if encrypter.cipher == nil {
|
||||
return nil, ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
var keyID string
|
||||
var rawKey interface{}
|
||||
switch encryptionKey := rcpt.Key.(type) {
|
||||
case JSONWebKey:
|
||||
keyID, rawKey = encryptionKey.KeyID, encryptionKey.Key
|
||||
case *JSONWebKey:
|
||||
keyID, rawKey = encryptionKey.KeyID, encryptionKey.Key
|
||||
default:
|
||||
rawKey = encryptionKey
|
||||
}
|
||||
|
||||
switch rcpt.Algorithm {
|
||||
case DIRECT:
|
||||
// Direct encryption mode must be treated differently
|
||||
if reflect.TypeOf(rawKey) != reflect.TypeOf([]byte{}) {
|
||||
return nil, ErrUnsupportedKeyType
|
||||
}
|
||||
if encrypter.cipher.keySize() != len(rawKey.([]byte)) {
|
||||
return nil, ErrInvalidKeySize
|
||||
}
|
||||
encrypter.keyGenerator = staticKeyGenerator{
|
||||
key: rawKey.([]byte),
|
||||
}
|
||||
recipientInfo, _ := newSymmetricRecipient(rcpt.Algorithm, rawKey.([]byte))
|
||||
recipientInfo.keyID = keyID
|
||||
if rcpt.KeyID != "" {
|
||||
recipientInfo.keyID = rcpt.KeyID
|
||||
}
|
||||
encrypter.recipients = []recipientKeyInfo{recipientInfo}
|
||||
return encrypter, nil
|
||||
case ECDH_ES:
|
||||
// ECDH-ES (w/o key wrapping) is similar to DIRECT mode
|
||||
typeOf := reflect.TypeOf(rawKey)
|
||||
if typeOf != reflect.TypeOf(&ecdsa.PublicKey{}) {
|
||||
return nil, ErrUnsupportedKeyType
|
||||
}
|
||||
encrypter.keyGenerator = ecKeyGenerator{
|
||||
size: encrypter.cipher.keySize(),
|
||||
algID: string(enc),
|
||||
publicKey: rawKey.(*ecdsa.PublicKey),
|
||||
}
|
||||
recipientInfo, _ := newECDHRecipient(rcpt.Algorithm, rawKey.(*ecdsa.PublicKey))
|
||||
recipientInfo.keyID = keyID
|
||||
if rcpt.KeyID != "" {
|
||||
recipientInfo.keyID = rcpt.KeyID
|
||||
}
|
||||
encrypter.recipients = []recipientKeyInfo{recipientInfo}
|
||||
return encrypter, nil
|
||||
default:
|
||||
// Can just add a standard recipient
|
||||
encrypter.keyGenerator = randomKeyGenerator{
|
||||
size: encrypter.cipher.keySize(),
|
||||
}
|
||||
err := encrypter.addRecipient(rcpt)
|
||||
return encrypter, err
|
||||
}
|
||||
}
|
||||
|
||||
// NewMultiEncrypter creates a multi-encrypter based on the given parameters
|
||||
func NewMultiEncrypter(enc ContentEncryption, rcpts []Recipient, opts *EncrypterOptions) (Encrypter, error) {
|
||||
cipher := getContentCipher(enc)
|
||||
|
||||
if cipher == nil {
|
||||
return nil, ErrUnsupportedAlgorithm
|
||||
}
|
||||
if rcpts == nil || len(rcpts) == 0 {
|
||||
return nil, fmt.Errorf("square/go-jose: recipients is nil or empty")
|
||||
}
|
||||
|
||||
encrypter := &genericEncrypter{
|
||||
contentAlg: enc,
|
||||
recipients: []recipientKeyInfo{},
|
||||
cipher: cipher,
|
||||
keyGenerator: randomKeyGenerator{
|
||||
size: cipher.keySize(),
|
||||
},
|
||||
}
|
||||
|
||||
if opts != nil {
|
||||
encrypter.compressionAlg = opts.Compression
|
||||
}
|
||||
|
||||
for _, recipient := range rcpts {
|
||||
err := encrypter.addRecipient(recipient)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return encrypter, nil
|
||||
}
|
||||
|
||||
func (ctx *genericEncrypter) addRecipient(recipient Recipient) (err error) {
|
||||
var recipientInfo recipientKeyInfo
|
||||
|
||||
switch recipient.Algorithm {
|
||||
case DIRECT, ECDH_ES:
|
||||
return fmt.Errorf("square/go-jose: key algorithm '%s' not supported in multi-recipient mode", recipient.Algorithm)
|
||||
}
|
||||
|
||||
recipientInfo, err = makeJWERecipient(recipient.Algorithm, recipient.Key)
|
||||
if recipient.KeyID != "" {
|
||||
recipientInfo.keyID = recipient.KeyID
|
||||
}
|
||||
|
||||
switch recipient.Algorithm {
|
||||
case PBES2_HS256_A128KW, PBES2_HS384_A192KW, PBES2_HS512_A256KW:
|
||||
if sr, ok := recipientInfo.keyEncrypter.(*symmetricKeyCipher); ok {
|
||||
sr.p2c = recipient.PBES2Count
|
||||
sr.p2s = recipient.PBES2Salt
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
ctx.recipients = append(ctx.recipients, recipientInfo)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func makeJWERecipient(alg KeyAlgorithm, encryptionKey interface{}) (recipientKeyInfo, error) {
|
||||
switch encryptionKey := encryptionKey.(type) {
|
||||
case *rsa.PublicKey:
|
||||
return newRSARecipient(alg, encryptionKey)
|
||||
case *ecdsa.PublicKey:
|
||||
return newECDHRecipient(alg, encryptionKey)
|
||||
case []byte:
|
||||
return newSymmetricRecipient(alg, encryptionKey)
|
||||
case string:
|
||||
return newSymmetricRecipient(alg, []byte(encryptionKey))
|
||||
case *JSONWebKey:
|
||||
recipient, err := makeJWERecipient(alg, encryptionKey.Key)
|
||||
recipient.keyID = encryptionKey.KeyID
|
||||
return recipient, err
|
||||
default:
|
||||
return recipientKeyInfo{}, ErrUnsupportedKeyType
|
||||
}
|
||||
}
|
||||
|
||||
// newDecrypter creates an appropriate decrypter based on the key type
|
||||
func newDecrypter(decryptionKey interface{}) (keyDecrypter, error) {
|
||||
switch decryptionKey := decryptionKey.(type) {
|
||||
case *rsa.PrivateKey:
|
||||
return &rsaDecrypterSigner{
|
||||
privateKey: decryptionKey,
|
||||
}, nil
|
||||
case *ecdsa.PrivateKey:
|
||||
return &ecDecrypterSigner{
|
||||
privateKey: decryptionKey,
|
||||
}, nil
|
||||
case []byte:
|
||||
return &symmetricKeyCipher{
|
||||
key: decryptionKey,
|
||||
}, nil
|
||||
case string:
|
||||
return &symmetricKeyCipher{
|
||||
key: []byte(decryptionKey),
|
||||
}, nil
|
||||
case JSONWebKey:
|
||||
return newDecrypter(decryptionKey.Key)
|
||||
case *JSONWebKey:
|
||||
return newDecrypter(decryptionKey.Key)
|
||||
default:
|
||||
return nil, ErrUnsupportedKeyType
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of encrypt method producing a JWE object.
|
||||
func (ctx *genericEncrypter) Encrypt(plaintext []byte) (*JSONWebEncryption, error) {
|
||||
return ctx.EncryptWithAuthData(plaintext, nil)
|
||||
}
|
||||
|
||||
// Implementation of encrypt method producing a JWE object.
|
||||
func (ctx *genericEncrypter) EncryptWithAuthData(plaintext, aad []byte) (*JSONWebEncryption, error) {
|
||||
obj := &JSONWebEncryption{}
|
||||
obj.aad = aad
|
||||
|
||||
obj.protected = &rawHeader{}
|
||||
err := obj.protected.set(headerEncryption, ctx.contentAlg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
obj.recipients = make([]recipientInfo, len(ctx.recipients))
|
||||
|
||||
if len(ctx.recipients) == 0 {
|
||||
return nil, fmt.Errorf("square/go-jose: no recipients to encrypt to")
|
||||
}
|
||||
|
||||
cek, headers, err := ctx.keyGenerator.genKey()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
obj.protected.merge(&headers)
|
||||
|
||||
for i, info := range ctx.recipients {
|
||||
recipient, err := info.keyEncrypter.encryptKey(cek, info.keyAlg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = recipient.header.set(headerAlgorithm, info.keyAlg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if info.keyID != "" {
|
||||
err = recipient.header.set(headerKeyID, info.keyID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
obj.recipients[i] = recipient
|
||||
}
|
||||
|
||||
if len(ctx.recipients) == 1 {
|
||||
// Move per-recipient headers into main protected header if there's
|
||||
// only a single recipient.
|
||||
obj.protected.merge(obj.recipients[0].header)
|
||||
obj.recipients[0].header = nil
|
||||
}
|
||||
|
||||
if ctx.compressionAlg != NONE {
|
||||
plaintext, err = compress(ctx.compressionAlg, plaintext)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = obj.protected.set(headerCompression, ctx.compressionAlg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
for k, v := range ctx.extraHeaders {
|
||||
b, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
(*obj.protected)[k] = makeRawMessage(b)
|
||||
}
|
||||
|
||||
authData := obj.computeAuthData()
|
||||
parts, err := ctx.cipher.encrypt(cek, authData, plaintext)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
obj.iv = parts.iv
|
||||
obj.ciphertext = parts.ciphertext
|
||||
obj.tag = parts.tag
|
||||
|
||||
return obj, nil
|
||||
}
|
||||
|
||||
func (ctx *genericEncrypter) Options() EncrypterOptions {
|
||||
return EncrypterOptions{
|
||||
Compression: ctx.compressionAlg,
|
||||
ExtraHeaders: ctx.extraHeaders,
|
||||
}
|
||||
}
|
||||
|
||||
// Decrypt and validate the object and return the plaintext. Note that this
|
||||
// function does not support multi-recipient, if you desire multi-recipient
|
||||
// decryption use DecryptMulti instead.
|
||||
func (obj JSONWebEncryption) Decrypt(decryptionKey interface{}) ([]byte, error) {
|
||||
headers := obj.mergedHeaders(nil)
|
||||
|
||||
if len(obj.recipients) > 1 {
|
||||
return nil, errors.New("square/go-jose: too many recipients in payload; expecting only one")
|
||||
}
|
||||
|
||||
critical, err := headers.getCritical()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid crit header")
|
||||
}
|
||||
|
||||
if len(critical) > 0 {
|
||||
return nil, fmt.Errorf("square/go-jose: unsupported crit header")
|
||||
}
|
||||
|
||||
decrypter, err := newDecrypter(decryptionKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cipher := getContentCipher(headers.getEncryption())
|
||||
if cipher == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: unsupported enc value '%s'", string(headers.getEncryption()))
|
||||
}
|
||||
|
||||
generator := randomKeyGenerator{
|
||||
size: cipher.keySize(),
|
||||
}
|
||||
|
||||
parts := &aeadParts{
|
||||
iv: obj.iv,
|
||||
ciphertext: obj.ciphertext,
|
||||
tag: obj.tag,
|
||||
}
|
||||
|
||||
authData := obj.computeAuthData()
|
||||
|
||||
var plaintext []byte
|
||||
recipient := obj.recipients[0]
|
||||
recipientHeaders := obj.mergedHeaders(&recipient)
|
||||
|
||||
cek, err := decrypter.decryptKey(recipientHeaders, &recipient, generator)
|
||||
if err == nil {
|
||||
// Found a valid CEK -- let's try to decrypt.
|
||||
plaintext, err = cipher.decrypt(cek, authData, parts)
|
||||
}
|
||||
|
||||
if plaintext == nil {
|
||||
return nil, ErrCryptoFailure
|
||||
}
|
||||
|
||||
// The "zip" header parameter may only be present in the protected header.
|
||||
if comp := obj.protected.getCompression(); comp != "" {
|
||||
plaintext, err = decompress(comp, plaintext)
|
||||
}
|
||||
|
||||
return plaintext, err
|
||||
}
|
||||
|
||||
// DecryptMulti decrypts and validates the object and returns the plaintexts,
|
||||
// with support for multiple recipients. It returns the index of the recipient
|
||||
// for which the decryption was successful, the merged headers for that recipient,
|
||||
// and the plaintext.
|
||||
func (obj JSONWebEncryption) DecryptMulti(decryptionKey interface{}) (int, Header, []byte, error) {
|
||||
globalHeaders := obj.mergedHeaders(nil)
|
||||
|
||||
critical, err := globalHeaders.getCritical()
|
||||
if err != nil {
|
||||
return -1, Header{}, nil, fmt.Errorf("square/go-jose: invalid crit header")
|
||||
}
|
||||
|
||||
if len(critical) > 0 {
|
||||
return -1, Header{}, nil, fmt.Errorf("square/go-jose: unsupported crit header")
|
||||
}
|
||||
|
||||
decrypter, err := newDecrypter(decryptionKey)
|
||||
if err != nil {
|
||||
return -1, Header{}, nil, err
|
||||
}
|
||||
|
||||
encryption := globalHeaders.getEncryption()
|
||||
cipher := getContentCipher(encryption)
|
||||
if cipher == nil {
|
||||
return -1, Header{}, nil, fmt.Errorf("square/go-jose: unsupported enc value '%s'", string(encryption))
|
||||
}
|
||||
|
||||
generator := randomKeyGenerator{
|
||||
size: cipher.keySize(),
|
||||
}
|
||||
|
||||
parts := &aeadParts{
|
||||
iv: obj.iv,
|
||||
ciphertext: obj.ciphertext,
|
||||
tag: obj.tag,
|
||||
}
|
||||
|
||||
authData := obj.computeAuthData()
|
||||
|
||||
index := -1
|
||||
var plaintext []byte
|
||||
var headers rawHeader
|
||||
|
||||
for i, recipient := range obj.recipients {
|
||||
recipientHeaders := obj.mergedHeaders(&recipient)
|
||||
|
||||
cek, err := decrypter.decryptKey(recipientHeaders, &recipient, generator)
|
||||
if err == nil {
|
||||
// Found a valid CEK -- let's try to decrypt.
|
||||
plaintext, err = cipher.decrypt(cek, authData, parts)
|
||||
if err == nil {
|
||||
index = i
|
||||
headers = recipientHeaders
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if plaintext == nil || err != nil {
|
||||
return -1, Header{}, nil, ErrCryptoFailure
|
||||
}
|
||||
|
||||
// The "zip" header parameter may only be present in the protected header.
|
||||
if comp := obj.protected.getCompression(); comp != "" {
|
||||
plaintext, err = decompress(comp, plaintext)
|
||||
}
|
||||
|
||||
sanitized, err := headers.sanitized()
|
||||
if err != nil {
|
||||
return -1, Header{}, nil, fmt.Errorf("square/go-jose: failed to sanitize header: %v", err)
|
||||
}
|
||||
|
||||
return index, sanitized, plaintext, err
|
||||
}
|
1070
vendor/gopkg.in/square/go-jose.v2/crypter_test.go
generated
vendored
Normal file
1070
vendor/gopkg.in/square/go-jose.v2/crypter_test.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
138
vendor/gopkg.in/square/go-jose.v2/cryptosigner/cryptosigner.go
generated
vendored
Normal file
138
vendor/gopkg.in/square/go-jose.v2/cryptosigner/cryptosigner.go
generated
vendored
Normal file
|
@ -0,0 +1,138 @@
|
|||
/*-
|
||||
* Copyright 2018 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Package cryptosigner implements an OpaqueSigner that wraps a "crypto".Signer
|
||||
//
|
||||
// https://godoc.org/crypto#Signer
|
||||
package cryptosigner
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"encoding/asn1"
|
||||
"io"
|
||||
"math/big"
|
||||
|
||||
"golang.org/x/crypto/ed25519"
|
||||
"gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
// Opaque creates an OpaqueSigner from a "crypto".Signer
|
||||
func Opaque(s crypto.Signer) jose.OpaqueSigner {
|
||||
pk := &jose.JSONWebKey{
|
||||
Key: s.Public(),
|
||||
}
|
||||
return &cryptoSigner{signer: s, rand: rand.Reader, pk: pk}
|
||||
}
|
||||
|
||||
type cryptoSigner struct {
|
||||
pk *jose.JSONWebKey
|
||||
signer crypto.Signer
|
||||
rand io.Reader
|
||||
}
|
||||
|
||||
func (s *cryptoSigner) Public() *jose.JSONWebKey {
|
||||
return s.pk
|
||||
}
|
||||
|
||||
func (s *cryptoSigner) Algs() []jose.SignatureAlgorithm {
|
||||
switch s.signer.Public().(type) {
|
||||
case ed25519.PublicKey:
|
||||
return []jose.SignatureAlgorithm{jose.EdDSA}
|
||||
case *ecdsa.PublicKey:
|
||||
// This could be more precise
|
||||
return []jose.SignatureAlgorithm{jose.ES256, jose.ES384, jose.ES512}
|
||||
case *rsa.PublicKey:
|
||||
return []jose.SignatureAlgorithm{jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *cryptoSigner) SignPayload(payload []byte, alg jose.SignatureAlgorithm) ([]byte, error) {
|
||||
var hash crypto.Hash
|
||||
switch alg {
|
||||
case jose.EdDSA:
|
||||
case jose.RS256, jose.PS256, jose.ES256:
|
||||
hash = crypto.SHA256
|
||||
case jose.RS384, jose.PS384, jose.ES384:
|
||||
hash = crypto.SHA384
|
||||
case jose.RS512, jose.PS512, jose.ES512:
|
||||
hash = crypto.SHA512
|
||||
default:
|
||||
return nil, jose.ErrUnsupportedAlgorithm
|
||||
}
|
||||
|
||||
var hashed []byte
|
||||
if hash != crypto.Hash(0) {
|
||||
hasher := hash.New()
|
||||
if _, err := hasher.Write(payload); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hashed = hasher.Sum(nil)
|
||||
}
|
||||
|
||||
var (
|
||||
out []byte
|
||||
err error
|
||||
)
|
||||
switch alg {
|
||||
case jose.EdDSA:
|
||||
out, err = s.signer.Sign(s.rand, payload, crypto.Hash(0))
|
||||
case jose.ES256, jose.ES384, jose.ES512:
|
||||
var byteLen int
|
||||
switch alg {
|
||||
case jose.ES256:
|
||||
byteLen = 32
|
||||
case jose.ES384:
|
||||
byteLen = 48
|
||||
case jose.ES512:
|
||||
byteLen = 66
|
||||
}
|
||||
var b []byte
|
||||
b, err = s.signer.Sign(s.rand, hashed, hash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sig := struct {
|
||||
R, S *big.Int
|
||||
}{}
|
||||
if _, err = asn1.Unmarshal(b, &sig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rBytes := sig.R.Bytes()
|
||||
rBytesPadded := make([]byte, byteLen)
|
||||
copy(rBytesPadded[byteLen-len(rBytes):], rBytes)
|
||||
|
||||
sBytes := sig.S.Bytes()
|
||||
sBytesPadded := make([]byte, byteLen)
|
||||
copy(sBytesPadded[byteLen-len(sBytes):], sBytes)
|
||||
|
||||
out = append(rBytesPadded, sBytesPadded...)
|
||||
case jose.RS256, jose.RS384, jose.RS512:
|
||||
out, err = s.signer.Sign(s.rand, hashed, hash)
|
||||
case jose.PS256, jose.PS384, jose.PS512:
|
||||
out, err = s.signer.Sign(s.rand, hashed, &rsa.PSSOptions{
|
||||
SaltLength: rsa.PSSSaltLengthAuto,
|
||||
Hash: hash,
|
||||
})
|
||||
}
|
||||
return out, err
|
||||
}
|
137
vendor/gopkg.in/square/go-jose.v2/cryptosigner/cryptosigner_test.go
generated
vendored
Normal file
137
vendor/gopkg.in/square/go-jose.v2/cryptosigner/cryptosigner_test.go
generated
vendored
Normal file
|
@ -0,0 +1,137 @@
|
|||
/*-
|
||||
* Copyright 2018 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package cryptosigner
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/crypto/ed25519"
|
||||
"gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
func TestRoundtripsJWSCryptoSigner(t *testing.T) {
|
||||
sigAlgs := []jose.SignatureAlgorithm{jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512, jose.ES256, jose.ES384, jose.ES512, jose.EdDSA}
|
||||
|
||||
serializers := []func(*jose.JSONWebSignature) (string, error){
|
||||
func(obj *jose.JSONWebSignature) (string, error) { return obj.CompactSerialize() },
|
||||
func(obj *jose.JSONWebSignature) (string, error) { return obj.FullSerialize(), nil },
|
||||
}
|
||||
|
||||
for _, alg := range sigAlgs {
|
||||
signingKey, verificationKey := generateSigningTestKey(alg)
|
||||
|
||||
for i, serializer := range serializers {
|
||||
err := roundtripJWS(alg, serializer, Opaque(signingKey.(crypto.Signer)), verificationKey)
|
||||
if err != nil {
|
||||
t.Error(err, alg, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type staticNonceSource string
|
||||
|
||||
func (sns staticNonceSource) Nonce() (string, error) {
|
||||
return string(sns), nil
|
||||
}
|
||||
|
||||
func roundtripJWS(sigAlg jose.SignatureAlgorithm, serializer func(*jose.JSONWebSignature) (string, error), signingKey interface{}, verificationKey interface{}) error {
|
||||
nonce := "test_nonce"
|
||||
opts := &jose.SignerOptions{
|
||||
NonceSource: staticNonceSource(nonce),
|
||||
}
|
||||
|
||||
signer, err := jose.NewSigner(jose.SigningKey{Algorithm: sigAlg, Key: signingKey}, opts)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error on new signer: %s", err)
|
||||
}
|
||||
|
||||
input := []byte("Lorem ipsum dolor sit amet")
|
||||
obj, err := signer.Sign(input)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error on sign: %s", err)
|
||||
}
|
||||
|
||||
msg, err := serializer(obj)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error on serialize: %s", err)
|
||||
}
|
||||
|
||||
obj, err = jose.ParseSigned(msg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error on parse: %s", err)
|
||||
}
|
||||
|
||||
output, err := obj.Verify(verificationKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error on verify: %s", err)
|
||||
}
|
||||
|
||||
// Check that verify works with embedded keys (if present)
|
||||
for i, sig := range obj.Signatures {
|
||||
if sig.Header.JSONWebKey != nil {
|
||||
_, err = obj.Verify(sig.Header.JSONWebKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error on verify with embedded key %d: %s", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Check that the nonce correctly round-tripped (if present)
|
||||
if sig.Header.Nonce != nonce {
|
||||
return fmt.Errorf("Incorrect nonce returned: [%s]", sig.Header.Nonce)
|
||||
}
|
||||
}
|
||||
|
||||
if bytes.Compare(output, input) != 0 {
|
||||
return fmt.Errorf("input/output do not match, got '%s', expected '%s'", output, input)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func generateSigningTestKey(sigAlg jose.SignatureAlgorithm) (sig, ver interface{}) {
|
||||
switch sigAlg {
|
||||
case jose.EdDSA:
|
||||
ver, sig, _ = ed25519.GenerateKey(rand.Reader)
|
||||
case jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512:
|
||||
rsaTestKey, _ := rsa.GenerateKey(rand.Reader, 2048)
|
||||
sig = rsaTestKey
|
||||
ver = &rsaTestKey.PublicKey
|
||||
case jose.ES256:
|
||||
key, _ := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
sig = key
|
||||
ver = &key.PublicKey
|
||||
case jose.ES384:
|
||||
key, _ := ecdsa.GenerateKey(elliptic.P384(), rand.Reader)
|
||||
sig = key
|
||||
ver = &key.PublicKey
|
||||
case jose.ES512:
|
||||
key, _ := ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
|
||||
sig = key
|
||||
ver = &key.PublicKey
|
||||
default:
|
||||
panic("Must update test case")
|
||||
}
|
||||
return
|
||||
}
|
27
vendor/gopkg.in/square/go-jose.v2/doc.go
generated
vendored
Normal file
27
vendor/gopkg.in/square/go-jose.v2/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
|
||||
Package jose aims to provide an implementation of the Javascript Object Signing
|
||||
and Encryption set of standards. It implements encryption and signing based on
|
||||
the JSON Web Encryption and JSON Web Signature standards, with optional JSON
|
||||
Web Token support available in a sub-package. The library supports both the
|
||||
compact and full serialization formats, and has optional support for multiple
|
||||
recipients.
|
||||
|
||||
*/
|
||||
package jose
|
201
vendor/gopkg.in/square/go-jose.v2/doc_test.go
generated
vendored
Normal file
201
vendor/gopkg.in/square/go-jose.v2/doc_test.go
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Dummy encrypter for use in examples
|
||||
var encrypter Encrypter
|
||||
|
||||
func Example_jWE() {
|
||||
// Generate a public/private key pair to use for this example.
|
||||
privateKey, err := rsa.GenerateKey(rand.Reader, 2048)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Instantiate an encrypter using RSA-OAEP with AES128-GCM. An error would
|
||||
// indicate that the selected algorithm(s) are not currently supported.
|
||||
publicKey := &privateKey.PublicKey
|
||||
encrypter, err := NewEncrypter(A128GCM, Recipient{Algorithm: RSA_OAEP, Key: publicKey}, nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Encrypt a sample plaintext. Calling the encrypter returns an encrypted
|
||||
// JWE object, which can then be serialized for output afterwards. An error
|
||||
// would indicate a problem in an underlying cryptographic primitive.
|
||||
var plaintext = []byte("Lorem ipsum dolor sit amet")
|
||||
object, err := encrypter.Encrypt(plaintext)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Serialize the encrypted object using the full serialization format.
|
||||
// Alternatively you can also use the compact format here by calling
|
||||
// object.CompactSerialize() instead.
|
||||
serialized := object.FullSerialize()
|
||||
|
||||
// Parse the serialized, encrypted JWE object. An error would indicate that
|
||||
// the given input did not represent a valid message.
|
||||
object, err = ParseEncrypted(serialized)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Now we can decrypt and get back our original plaintext. An error here
|
||||
// would indicate the the message failed to decrypt, e.g. because the auth
|
||||
// tag was broken or the message was tampered with.
|
||||
decrypted, err := object.Decrypt(privateKey)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf(string(decrypted))
|
||||
// output: Lorem ipsum dolor sit amet
|
||||
}
|
||||
|
||||
func Example_jWS() {
|
||||
// Generate a public/private key pair to use for this example.
|
||||
privateKey, err := rsa.GenerateKey(rand.Reader, 2048)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Instantiate a signer using RSASSA-PSS (SHA512) with the given private key.
|
||||
signer, err := NewSigner(SigningKey{Algorithm: PS512, Key: privateKey}, nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Sign a sample payload. Calling the signer returns a protected JWS object,
|
||||
// which can then be serialized for output afterwards. An error would
|
||||
// indicate a problem in an underlying cryptographic primitive.
|
||||
var payload = []byte("Lorem ipsum dolor sit amet")
|
||||
object, err := signer.Sign(payload)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Serialize the encrypted object using the full serialization format.
|
||||
// Alternatively you can also use the compact format here by calling
|
||||
// object.CompactSerialize() instead.
|
||||
serialized := object.FullSerialize()
|
||||
|
||||
// Parse the serialized, protected JWS object. An error would indicate that
|
||||
// the given input did not represent a valid message.
|
||||
object, err = ParseSigned(serialized)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Now we can verify the signature on the payload. An error here would
|
||||
// indicate the the message failed to verify, e.g. because the signature was
|
||||
// broken or the message was tampered with.
|
||||
output, err := object.Verify(&privateKey.PublicKey)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf(string(output))
|
||||
// output: Lorem ipsum dolor sit amet
|
||||
}
|
||||
|
||||
func ExampleNewEncrypter_publicKey() {
|
||||
var publicKey *rsa.PublicKey
|
||||
|
||||
// Instantiate an encrypter using RSA-OAEP with AES128-GCM.
|
||||
NewEncrypter(A128GCM, Recipient{Algorithm: RSA_OAEP, Key: publicKey}, nil)
|
||||
|
||||
// Instantiate an encrypter using RSA-PKCS1v1.5 with AES128-CBC+HMAC.
|
||||
NewEncrypter(A128CBC_HS256, Recipient{Algorithm: RSA1_5, Key: publicKey}, nil)
|
||||
}
|
||||
|
||||
func ExampleNewEncrypter_symmetric() {
|
||||
var sharedKey []byte
|
||||
|
||||
// Instantiate an encrypter using AES128-GCM with AES-GCM key wrap.
|
||||
NewEncrypter(A128GCM, Recipient{Algorithm: A128GCMKW, Key: sharedKey}, nil)
|
||||
|
||||
// Instantiate an encrypter using AES128-GCM directly, w/o key wrapping.
|
||||
NewEncrypter(A128GCM, Recipient{Algorithm: DIRECT, Key: sharedKey}, nil)
|
||||
}
|
||||
|
||||
func ExampleNewSigner_publicKey() {
|
||||
var rsaPrivateKey *rsa.PrivateKey
|
||||
var ecdsaPrivateKey *ecdsa.PrivateKey
|
||||
|
||||
// Instantiate a signer using RSA-PKCS#1v1.5 with SHA-256.
|
||||
NewSigner(SigningKey{Algorithm: RS256, Key: rsaPrivateKey}, nil)
|
||||
|
||||
// Instantiate a signer using ECDSA with SHA-384.
|
||||
NewSigner(SigningKey{Algorithm: ES384, Key: ecdsaPrivateKey}, nil)
|
||||
}
|
||||
|
||||
func ExampleNewSigner_symmetric() {
|
||||
var sharedKey []byte
|
||||
|
||||
// Instantiate an signer using HMAC-SHA256.
|
||||
NewSigner(SigningKey{Algorithm: HS256, Key: sharedKey}, nil)
|
||||
|
||||
// Instantiate an signer using HMAC-SHA512.
|
||||
NewSigner(SigningKey{Algorithm: HS512, Key: sharedKey}, nil)
|
||||
}
|
||||
|
||||
func ExampleNewMultiEncrypter() {
|
||||
var publicKey *rsa.PublicKey
|
||||
var sharedKey []byte
|
||||
|
||||
// Instantiate an encrypter using AES-GCM.
|
||||
NewMultiEncrypter(A128GCM, []Recipient{
|
||||
{Algorithm: A128GCMKW, Key: sharedKey},
|
||||
{Algorithm: RSA_OAEP, Key: publicKey},
|
||||
}, nil)
|
||||
}
|
||||
|
||||
func ExampleNewMultiSigner() {
|
||||
var privateKey *rsa.PrivateKey
|
||||
var sharedKey []byte
|
||||
|
||||
// Instantiate a signer for multiple recipients.
|
||||
NewMultiSigner([]SigningKey{
|
||||
{Algorithm: HS256, Key: sharedKey},
|
||||
{Algorithm: PS384, Key: privateKey},
|
||||
}, nil)
|
||||
}
|
||||
|
||||
func ExampleEncrypter_encrypt() {
|
||||
// Encrypt a plaintext in order to get an encrypted JWE object.
|
||||
var plaintext = []byte("This is a secret message")
|
||||
|
||||
encrypter.Encrypt(plaintext)
|
||||
}
|
||||
|
||||
func ExampleEncrypter_encryptWithAuthData() {
|
||||
// Encrypt a plaintext in order to get an encrypted JWE object. Also attach
|
||||
// some additional authenticated data (AAD) to the object. Note that objects
|
||||
// with attached AAD can only be represented using full serialization.
|
||||
var plaintext = []byte("This is a secret message")
|
||||
var aad = []byte("This is authenticated, but public data")
|
||||
|
||||
encrypter.EncryptWithAuthData(plaintext, aad)
|
||||
}
|
179
vendor/gopkg.in/square/go-jose.v2/encoding.go
generated
vendored
Normal file
179
vendor/gopkg.in/square/go-jose.v2/encoding.go
generated
vendored
Normal file
|
@ -0,0 +1,179 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/flate"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"io"
|
||||
"math/big"
|
||||
"regexp"
|
||||
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
var stripWhitespaceRegex = regexp.MustCompile("\\s")
|
||||
|
||||
// Helper function to serialize known-good objects.
|
||||
// Precondition: value is not a nil pointer.
|
||||
func mustSerializeJSON(value interface{}) []byte {
|
||||
out, err := json.Marshal(value)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
// We never want to serialize the top-level value "null," since it's not a
|
||||
// valid JOSE message. But if a caller passes in a nil pointer to this method,
|
||||
// MarshalJSON will happily serialize it as the top-level value "null". If
|
||||
// that value is then embedded in another operation, for instance by being
|
||||
// base64-encoded and fed as input to a signing algorithm
|
||||
// (https://github.com/square/go-jose/issues/22), the result will be
|
||||
// incorrect. Because this method is intended for known-good objects, and a nil
|
||||
// pointer is not a known-good object, we are free to panic in this case.
|
||||
// Note: It's not possible to directly check whether the data pointed at by an
|
||||
// interface is a nil pointer, so we do this hacky workaround.
|
||||
// https://groups.google.com/forum/#!topic/golang-nuts/wnH302gBa4I
|
||||
if string(out) == "null" {
|
||||
panic("Tried to serialize a nil pointer.")
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// Strip all newlines and whitespace
|
||||
func stripWhitespace(data string) string {
|
||||
return stripWhitespaceRegex.ReplaceAllString(data, "")
|
||||
}
|
||||
|
||||
// Perform compression based on algorithm
|
||||
func compress(algorithm CompressionAlgorithm, input []byte) ([]byte, error) {
|
||||
switch algorithm {
|
||||
case DEFLATE:
|
||||
return deflate(input)
|
||||
default:
|
||||
return nil, ErrUnsupportedAlgorithm
|
||||
}
|
||||
}
|
||||
|
||||
// Perform decompression based on algorithm
|
||||
func decompress(algorithm CompressionAlgorithm, input []byte) ([]byte, error) {
|
||||
switch algorithm {
|
||||
case DEFLATE:
|
||||
return inflate(input)
|
||||
default:
|
||||
return nil, ErrUnsupportedAlgorithm
|
||||
}
|
||||
}
|
||||
|
||||
// Compress with DEFLATE
|
||||
func deflate(input []byte) ([]byte, error) {
|
||||
output := new(bytes.Buffer)
|
||||
|
||||
// Writing to byte buffer, err is always nil
|
||||
writer, _ := flate.NewWriter(output, 1)
|
||||
_, _ = io.Copy(writer, bytes.NewBuffer(input))
|
||||
|
||||
err := writer.Close()
|
||||
return output.Bytes(), err
|
||||
}
|
||||
|
||||
// Decompress with DEFLATE
|
||||
func inflate(input []byte) ([]byte, error) {
|
||||
output := new(bytes.Buffer)
|
||||
reader := flate.NewReader(bytes.NewBuffer(input))
|
||||
|
||||
_, err := io.Copy(output, reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = reader.Close()
|
||||
return output.Bytes(), err
|
||||
}
|
||||
|
||||
// byteBuffer represents a slice of bytes that can be serialized to url-safe base64.
|
||||
type byteBuffer struct {
|
||||
data []byte
|
||||
}
|
||||
|
||||
func newBuffer(data []byte) *byteBuffer {
|
||||
if data == nil {
|
||||
return nil
|
||||
}
|
||||
return &byteBuffer{
|
||||
data: data,
|
||||
}
|
||||
}
|
||||
|
||||
func newFixedSizeBuffer(data []byte, length int) *byteBuffer {
|
||||
if len(data) > length {
|
||||
panic("square/go-jose: invalid call to newFixedSizeBuffer (len(data) > length)")
|
||||
}
|
||||
pad := make([]byte, length-len(data))
|
||||
return newBuffer(append(pad, data...))
|
||||
}
|
||||
|
||||
func newBufferFromInt(num uint64) *byteBuffer {
|
||||
data := make([]byte, 8)
|
||||
binary.BigEndian.PutUint64(data, num)
|
||||
return newBuffer(bytes.TrimLeft(data, "\x00"))
|
||||
}
|
||||
|
||||
func (b *byteBuffer) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(b.base64())
|
||||
}
|
||||
|
||||
func (b *byteBuffer) UnmarshalJSON(data []byte) error {
|
||||
var encoded string
|
||||
err := json.Unmarshal(data, &encoded)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if encoded == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
decoded, err := base64.RawURLEncoding.DecodeString(encoded)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*b = *newBuffer(decoded)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *byteBuffer) base64() string {
|
||||
return base64.RawURLEncoding.EncodeToString(b.data)
|
||||
}
|
||||
|
||||
func (b *byteBuffer) bytes() []byte {
|
||||
// Handling nil here allows us to transparently handle nil slices when serializing.
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
return b.data
|
||||
}
|
||||
|
||||
func (b byteBuffer) bigInt() *big.Int {
|
||||
return new(big.Int).SetBytes(b.data)
|
||||
}
|
||||
|
||||
func (b byteBuffer) toInt() int {
|
||||
return int(b.bigInt().Int64())
|
||||
}
|
122
vendor/gopkg.in/square/go-jose.v2/encoding_test.go
generated
vendored
Normal file
122
vendor/gopkg.in/square/go-jose.v2/encoding_test.go
generated
vendored
Normal file
|
@ -0,0 +1,122 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDeflateRoundtrip(t *testing.T) {
|
||||
original := []byte("Lorem ipsum dolor sit amet")
|
||||
|
||||
compressed, err := deflate(original)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
output, err := inflate(compressed)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if bytes.Compare(output, original) != 0 {
|
||||
t.Error("Input and output do not match")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidCompression(t *testing.T) {
|
||||
_, err := compress("XYZ", []byte{})
|
||||
if err == nil {
|
||||
t.Error("should not accept invalid algorithm")
|
||||
}
|
||||
|
||||
_, err = decompress("XYZ", []byte{})
|
||||
if err == nil {
|
||||
t.Error("should not accept invalid algorithm")
|
||||
}
|
||||
|
||||
_, err = decompress(DEFLATE, []byte{1, 2, 3, 4})
|
||||
if err == nil {
|
||||
t.Error("should not accept invalid data")
|
||||
}
|
||||
}
|
||||
|
||||
func TestByteBufferTrim(t *testing.T) {
|
||||
buf := newBufferFromInt(1)
|
||||
if !bytes.Equal(buf.data, []byte{1}) {
|
||||
t.Error("Byte buffer for integer '1' should contain [0x01]")
|
||||
}
|
||||
|
||||
buf = newBufferFromInt(65537)
|
||||
if !bytes.Equal(buf.data, []byte{1, 0, 1}) {
|
||||
t.Error("Byte buffer for integer '65537' should contain [0x01, 0x00, 0x01]")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFixedSizeBuffer(t *testing.T) {
|
||||
data0 := []byte{}
|
||||
data1 := []byte{1}
|
||||
data2 := []byte{1, 2}
|
||||
data3 := []byte{1, 2, 3}
|
||||
data4 := []byte{1, 2, 3, 4}
|
||||
|
||||
buf0 := newFixedSizeBuffer(data0, 4)
|
||||
buf1 := newFixedSizeBuffer(data1, 4)
|
||||
buf2 := newFixedSizeBuffer(data2, 4)
|
||||
buf3 := newFixedSizeBuffer(data3, 4)
|
||||
buf4 := newFixedSizeBuffer(data4, 4)
|
||||
|
||||
if !bytes.Equal(buf0.data, []byte{0, 0, 0, 0}) {
|
||||
t.Error("Invalid padded buffer for buf0")
|
||||
}
|
||||
if !bytes.Equal(buf1.data, []byte{0, 0, 0, 1}) {
|
||||
t.Error("Invalid padded buffer for buf1")
|
||||
}
|
||||
if !bytes.Equal(buf2.data, []byte{0, 0, 1, 2}) {
|
||||
t.Error("Invalid padded buffer for buf2")
|
||||
}
|
||||
if !bytes.Equal(buf3.data, []byte{0, 1, 2, 3}) {
|
||||
t.Error("Invalid padded buffer for buf3")
|
||||
}
|
||||
if !bytes.Equal(buf4.data, []byte{1, 2, 3, 4}) {
|
||||
t.Error("Invalid padded buffer for buf4")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSerializeJSONRejectsNil(t *testing.T) {
|
||||
defer func() {
|
||||
r := recover()
|
||||
if r == nil || !strings.Contains(r.(string), "nil pointer") {
|
||||
t.Error("serialize function should not accept nil pointer")
|
||||
}
|
||||
}()
|
||||
|
||||
mustSerializeJSON(nil)
|
||||
}
|
||||
|
||||
func TestFixedSizeBufferTooLarge(t *testing.T) {
|
||||
defer func() {
|
||||
r := recover()
|
||||
if r == nil {
|
||||
t.Error("should not be able to create fixed size buffer with oversized data")
|
||||
}
|
||||
}()
|
||||
|
||||
newFixedSizeBuffer(make([]byte, 2), 1)
|
||||
}
|
59
vendor/gopkg.in/square/go-jose.v2/jose-util/README.md
generated
vendored
Normal file
59
vendor/gopkg.in/square/go-jose.v2/jose-util/README.md
generated
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
# JOSE CLI
|
||||
|
||||
The `jose-util` command line utility allows for encryption, decryption, signing
|
||||
and verification of JOSE messages. Its main purpose is to facilitate dealing
|
||||
with JOSE messages when testing or debugging.
|
||||
|
||||
## Usage
|
||||
|
||||
The utility includes the subcommands `encrypt`, `decrypt`, `sign`, `verify` and
|
||||
`expand`. Examples for each command can be found below.
|
||||
|
||||
Algorithms are selected via the `--alg` and `--enc` flags, which influence the
|
||||
`alg` and `enc` headers in respectively. For JWE, `--alg` specifies the key
|
||||
management algorithm (e.g. `RSA-OAEP`) and `--enc` specifies the content
|
||||
encryption algorithm (e.g. `A128GCM`). For JWS, `--alg` specifies the
|
||||
signature algorithm (e.g. `PS256`).
|
||||
|
||||
Input and output files can be specified via the `--in` and `--out` flags.
|
||||
Either flag can be omitted, in which case `jose-util` uses stdin/stdout for
|
||||
input/output respectively. By default each command will output a compact
|
||||
message, but it's possible to get the full serialization by supplying the
|
||||
`--full` flag.
|
||||
|
||||
Keys are specified via the `--key` flag. Supported key types are naked RSA/EC
|
||||
keys and X.509 certificates with embedded RSA/EC keys. Keys must be in PEM
|
||||
or DER formats.
|
||||
|
||||
## Examples
|
||||
|
||||
### Encrypt
|
||||
|
||||
Takes a plaintext as input, encrypts, and prints the encrypted message.
|
||||
|
||||
echo 'test message' | jose-util encrypt --key public-key.pem --alg RSA-OAEP --enc A128GCM
|
||||
|
||||
### Decrypt
|
||||
|
||||
Takes an encrypted message (JWE) as input, decrypts, and prints the plaintext.
|
||||
|
||||
jose-util decrypt --key private-key.pem
|
||||
|
||||
### Sign
|
||||
|
||||
Takes a payload as input, signs it, and prints the signed message with the embedded payload.
|
||||
|
||||
jose-util sign --key private-key.pem --alg PS256
|
||||
|
||||
### Verify
|
||||
|
||||
Reads a signed message (JWS), verifies it, and extracts the payload.
|
||||
|
||||
jose-util verify --key public-key.pem
|
||||
|
||||
### Expand
|
||||
|
||||
Expands a compact message to the full serialization format.
|
||||
|
||||
jose-util expand --format JWE # Expands a compact JWE to full format
|
||||
jose-util expand --format JWS # Expands a compact JWS to full format
|
6
vendor/gopkg.in/square/go-jose.v2/jose-util/ec.key
generated
vendored
Normal file
6
vendor/gopkg.in/square/go-jose.v2/jose-util/ec.key
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
-----BEGIN EC PRIVATE KEY-----
|
||||
MIGkAgEBBDDvoj/bM1HokUjYWO/IDFs26Jo0GIFtU3tMQQu7ZabKscDMK3dZA0mK
|
||||
v97ij7BBFbCgBwYFK4EEACKhZANiAAT3KhQQCDFN32y/B72g+qOFw/5/aNx1MvZa
|
||||
rwDDa/2G3V0HLTS0VE82sLEUKS8xwkWFI+gNRXk0vvN+Hf+myJI1jOIY+tYQlh+C
|
||||
ZiKGNJ6g5/Su7V6ukGtN+UiY+sx+0LI=
|
||||
-----END EC PRIVATE KEY-----
|
5
vendor/gopkg.in/square/go-jose.v2/jose-util/ec.pub
generated
vendored
Normal file
5
vendor/gopkg.in/square/go-jose.v2/jose-util/ec.pub
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
-----BEGIN PUBLIC KEY-----
|
||||
MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE9yoUEAgxTd9svwe9oPqjhcP+f2jcdTL2
|
||||
Wq8Aw2v9ht1dBy00tFRPNrCxFCkvMcJFhSPoDUV5NL7zfh3/psiSNYziGPrWEJYf
|
||||
gmYihjSeoOf0ru1erpBrTflImPrMftCy
|
||||
-----END PUBLIC KEY-----
|
94
vendor/gopkg.in/square/go-jose.v2/jose-util/jose-util.t
generated
vendored
Normal file
94
vendor/gopkg.in/square/go-jose.v2/jose-util/jose-util.t
generated
vendored
Normal file
|
@ -0,0 +1,94 @@
|
|||
Set up test keys.
|
||||
|
||||
$ cat > rsa.pub <<EOF
|
||||
> -----BEGIN PUBLIC KEY-----
|
||||
> MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAslWybuiNYR7uOgKuvaBw
|
||||
> qVk8saEutKhOAaW+3hWF65gJei+ZV8QFfYDxs9ZaRZlWAUMtncQPnw7ZQlXO9ogN
|
||||
> 5cMcN50C6qMOOZzghK7danalhF5lUETC4Hk3Eisbi/PR3IfVyXaRmqL6X66MKj/J
|
||||
> AKyD9NFIDVy52K8A198Jojnrw2+XXQW72U68fZtvlyl/BTBWQ9Re5JSTpEcVmpCR
|
||||
> 8FrFc0RPMBm+G5dRs08vvhZNiTT2JACO5V+J5ZrgP3s5hnGFcQFZgDnXLInDUdoi
|
||||
> 1MuCjaAU0ta8/08pHMijNix5kFofdPEB954MiZ9k4kQ5/utt02I9x2ssHqw71ojj
|
||||
> vwIDAQAB
|
||||
> -----END PUBLIC KEY-----
|
||||
> EOF
|
||||
|
||||
$ cat > rsa.key <<EOF
|
||||
> -----BEGIN RSA PRIVATE KEY-----
|
||||
> MIIEogIBAAKCAQEAslWybuiNYR7uOgKuvaBwqVk8saEutKhOAaW+3hWF65gJei+Z
|
||||
> V8QFfYDxs9ZaRZlWAUMtncQPnw7ZQlXO9ogN5cMcN50C6qMOOZzghK7danalhF5l
|
||||
> UETC4Hk3Eisbi/PR3IfVyXaRmqL6X66MKj/JAKyD9NFIDVy52K8A198Jojnrw2+X
|
||||
> XQW72U68fZtvlyl/BTBWQ9Re5JSTpEcVmpCR8FrFc0RPMBm+G5dRs08vvhZNiTT2
|
||||
> JACO5V+J5ZrgP3s5hnGFcQFZgDnXLInDUdoi1MuCjaAU0ta8/08pHMijNix5kFof
|
||||
> dPEB954MiZ9k4kQ5/utt02I9x2ssHqw71ojjvwIDAQABAoIBABrYDYDmXom1BzUS
|
||||
> PE1s/ihvt1QhqA8nmn5i/aUeZkc9XofW7GUqq4zlwPxKEtKRL0IHY7Fw1s0hhhCX
|
||||
> LA0uE7F3OiMg7lR1cOm5NI6kZ83jyCxxrRx1DUSO2nxQotfhPsDMbaDiyS4WxEts
|
||||
> 0cp2SYJhdYd/jTH9uDfmt+DGwQN7Jixio1Dj3vwB7krDY+mdre4SFY7Gbk9VxkDg
|
||||
> LgCLMoq52m+wYufP8CTgpKFpMb2/yJrbLhuJxYZrJ3qd/oYo/91k6v7xlBKEOkwD
|
||||
> 2veGk9Dqi8YPNxaRktTEjnZb6ybhezat93+VVxq4Oem3wMwou1SfXrSUKtgM/p2H
|
||||
> vfw/76ECgYEA2fNL9tC8u9M0wjA+kvvtDG96qO6O66Hksssy6RWInD+Iqk3MtHQt
|
||||
> LeoCjvX+zERqwOb6SI6empk5pZ9E3/9vJ0dBqkxx3nqn4M/nRWnExGgngJsL959t
|
||||
> f50cdxva8y1RjNhT4kCwTrupX/TP8lAG8SfG1Alo2VFR8iWd8hDQcTECgYEA0Xfj
|
||||
> EgqAsVh4U0s3lFxKjOepEyp0G1Imty5J16SvcOEAD1Mrmz94aSSp0bYhXNVdbf7n
|
||||
> Rk77htWC7SE29fGjOzZRS76wxj/SJHF+rktHB2Zt23k1jBeZ4uLMPMnGLY/BJ099
|
||||
> 5DTGo0yU0rrPbyXosx+ukfQLAHFuggX4RNeM5+8CgYB7M1J/hGMLcUpjcs4MXCgV
|
||||
> XXbiw2c6v1r9zmtK4odEe42PZ0cNwpY/XAZyNZAAe7Q0stxL44K4NWEmxC80x7lX
|
||||
> ZKozz96WOpNnO16qGC3IMHAT/JD5Or+04WTT14Ue7UEp8qcIQDTpbJ9DxKk/eglS
|
||||
> jH+SIHeKULOXw7fSu7p4IQKBgBnyVchIUMSnBtCagpn4DKwDjif3nEY+GNmb/D2g
|
||||
> ArNiy5UaYk5qwEmV5ws5GkzbiSU07AUDh5ieHgetk5dHhUayZcOSLWeBRFCLVnvU
|
||||
> i0nZYEZNb1qZGdDG8zGcdNXz9qMd76Qy/WAA/nZT+Zn1AiweAovFxQ8a/etRPf2Z
|
||||
> DbU1AoGAHpCgP7B/4GTBe49H0AQueQHBn4RIkgqMy9xiMeR+U+U0vaY0TlfLhnX+
|
||||
> 5PkNfkPXohXlfL7pxwZNYa6FZhCAubzvhKCdUASivkoGaIEk6g1VTVYS/eDVQ4CA
|
||||
> slfl+elXtLq/l1kQ8C14jlHrQzSXx4PQvjDEnAmaHSJNz4mP9Fg=
|
||||
> -----END RSA PRIVATE KEY-----
|
||||
> EOF
|
||||
|
||||
$ cat > ec.pub <<EOF
|
||||
> -----BEGIN PUBLIC KEY-----
|
||||
> MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE9yoUEAgxTd9svwe9oPqjhcP+f2jcdTL2
|
||||
> Wq8Aw2v9ht1dBy00tFRPNrCxFCkvMcJFhSPoDUV5NL7zfh3/psiSNYziGPrWEJYf
|
||||
> gmYihjSeoOf0ru1erpBrTflImPrMftCy
|
||||
> -----END PUBLIC KEY-----
|
||||
> EOF
|
||||
|
||||
$ cat > ec.key <<EOF
|
||||
> -----BEGIN EC PRIVATE KEY-----
|
||||
> MIGkAgEBBDDvoj/bM1HokUjYWO/IDFs26Jo0GIFtU3tMQQu7ZabKscDMK3dZA0mK
|
||||
> v97ij7BBFbCgBwYFK4EEACKhZANiAAT3KhQQCDFN32y/B72g+qOFw/5/aNx1MvZa
|
||||
> rwDDa/2G3V0HLTS0VE82sLEUKS8xwkWFI+gNRXk0vvN+Hf+myJI1jOIY+tYQlh+C
|
||||
> ZiKGNJ6g5/Su7V6ukGtN+UiY+sx+0LI=
|
||||
> -----END EC PRIVATE KEY-----
|
||||
> EOF
|
||||
|
||||
Encrypt and then decrypt a test message (RSA).
|
||||
|
||||
$ echo "Lorem ipsum dolor sit amet" |
|
||||
> jose-util encrypt --alg RSA-OAEP --enc A128GCM --key rsa.pub |
|
||||
> jose-util decrypt --key rsa.key
|
||||
Lorem ipsum dolor sit amet
|
||||
|
||||
Encrypt and then decrypt a test message (EC).
|
||||
|
||||
$ echo "Lorem ipsum dolor sit amet" |
|
||||
> jose-util encrypt --alg ECDH-ES+A128KW --enc A128GCM --key ec.pub |
|
||||
> jose-util decrypt --key ec.key
|
||||
Lorem ipsum dolor sit amet
|
||||
|
||||
Sign and verify a test message (RSA).
|
||||
|
||||
$ echo "Lorem ipsum dolor sit amet" |
|
||||
> jose-util sign --alg PS256 --key rsa.key |
|
||||
> jose-util verify --key rsa.pub
|
||||
Lorem ipsum dolor sit amet
|
||||
|
||||
Sign and verify a test message (EC).
|
||||
|
||||
$ echo "Lorem ipsum dolor sit amet" |
|
||||
> jose-util sign --alg ES384 --key ec.key |
|
||||
> jose-util verify --key ec.pub
|
||||
Lorem ipsum dolor sit amet
|
||||
|
||||
Expand a compact message to full format.
|
||||
|
||||
$ echo "eyJhbGciOiJFUzM4NCJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQK.QPU35XY913Im7ZEaN2yHykfbtPqjHZvYp-lV8OcTAJZs67bJFSdTSkQhQWE9ch6tvYrj_7py6HKaWVFLll_s_Rm6bmwq3JszsHrIvFFm1NydruYHhvAnx7rjYiqwOu0W" |
|
||||
> jose-util expand --format JWS
|
||||
{"payload":"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQK","protected":"eyJhbGciOiJFUzM4NCJ9","signature":"QPU35XY913Im7ZEaN2yHykfbtPqjHZvYp-lV8OcTAJZs67bJFSdTSkQhQWE9ch6tvYrj_7py6HKaWVFLll_s_Rm6bmwq3JszsHrIvFFm1NydruYHhvAnx7rjYiqwOu0W"}
|
189
vendor/gopkg.in/square/go-jose.v2/jose-util/main.go
generated
vendored
Normal file
189
vendor/gopkg.in/square/go-jose.v2/jose-util/main.go
generated
vendored
Normal file
|
@ -0,0 +1,189 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
|
||||
"gopkg.in/alecthomas/kingpin.v2"
|
||||
"gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
var (
|
||||
app = kingpin.New("jose-util", "A command-line utility for dealing with JOSE objects.")
|
||||
|
||||
keyFile = app.Flag("key", "Path to key file (PEM or DER-encoded)").ExistingFile()
|
||||
inFile = app.Flag("in", "Path to input file (stdin if missing)").ExistingFile()
|
||||
outFile = app.Flag("out", "Path to output file (stdout if missing)").ExistingFile()
|
||||
|
||||
encryptCommand = app.Command("encrypt", "Encrypt a plaintext, output ciphertext.")
|
||||
algFlag = encryptCommand.Flag("alg", "Key management algorithm (e.g. RSA-OAEP)").Required().String()
|
||||
encFlag = encryptCommand.Flag("enc", "Content encryption algorithm (e.g. A128GCM)").Required().String()
|
||||
|
||||
decryptCommand = app.Command("decrypt", "Decrypt a ciphertext, output plaintext.")
|
||||
|
||||
signCommand = app.Command("sign", "Sign a payload, output signed message.")
|
||||
sigAlgFlag = signCommand.Flag("alg", "Key management algorithm (e.g. RSA-OAEP)").Required().String()
|
||||
|
||||
verifyCommand = app.Command("verify", "Verify a signed message, output payload.")
|
||||
|
||||
expandCommand = app.Command("expand", "Expand JOSE object to full serialization format.")
|
||||
formatFlag = expandCommand.Flag("format", "Type of message to expand (JWS or JWE, defaults to JWE)").String()
|
||||
|
||||
full = app.Flag("full", "Use full serialization format (instead of compact)").Bool()
|
||||
)
|
||||
|
||||
func main() {
|
||||
app.Version("v2")
|
||||
|
||||
command := kingpin.MustParse(app.Parse(os.Args[1:]))
|
||||
|
||||
var keyBytes []byte
|
||||
var err error
|
||||
if command != "expand" {
|
||||
keyBytes, err = ioutil.ReadFile(*keyFile)
|
||||
exitOnError(err, "unable to read key file")
|
||||
}
|
||||
|
||||
switch command {
|
||||
case "encrypt":
|
||||
pub, err := LoadPublicKey(keyBytes)
|
||||
exitOnError(err, "unable to read public key")
|
||||
|
||||
alg := jose.KeyAlgorithm(*algFlag)
|
||||
enc := jose.ContentEncryption(*encFlag)
|
||||
|
||||
crypter, err := jose.NewEncrypter(enc, jose.Recipient{Algorithm: alg, Key: pub}, nil)
|
||||
exitOnError(err, "unable to instantiate encrypter")
|
||||
|
||||
obj, err := crypter.Encrypt(readInput(*inFile))
|
||||
exitOnError(err, "unable to encrypt")
|
||||
|
||||
var msg string
|
||||
if *full {
|
||||
msg = obj.FullSerialize()
|
||||
} else {
|
||||
msg, err = obj.CompactSerialize()
|
||||
exitOnError(err, "unable to serialize message")
|
||||
}
|
||||
|
||||
writeOutput(*outFile, []byte(msg))
|
||||
case "decrypt":
|
||||
priv, err := LoadPrivateKey(keyBytes)
|
||||
exitOnError(err, "unable to read private key")
|
||||
|
||||
obj, err := jose.ParseEncrypted(string(readInput(*inFile)))
|
||||
exitOnError(err, "unable to parse message")
|
||||
|
||||
plaintext, err := obj.Decrypt(priv)
|
||||
exitOnError(err, "unable to decrypt message")
|
||||
|
||||
writeOutput(*outFile, plaintext)
|
||||
case "sign":
|
||||
signingKey, err := LoadPrivateKey(keyBytes)
|
||||
exitOnError(err, "unable to read private key")
|
||||
|
||||
alg := jose.SignatureAlgorithm(*sigAlgFlag)
|
||||
signer, err := jose.NewSigner(jose.SigningKey{Algorithm: alg, Key: signingKey}, nil)
|
||||
exitOnError(err, "unable to make signer")
|
||||
|
||||
obj, err := signer.Sign(readInput(*inFile))
|
||||
exitOnError(err, "unable to sign")
|
||||
|
||||
var msg string
|
||||
if *full {
|
||||
msg = obj.FullSerialize()
|
||||
} else {
|
||||
msg, err = obj.CompactSerialize()
|
||||
exitOnError(err, "unable to serialize message")
|
||||
}
|
||||
|
||||
writeOutput(*outFile, []byte(msg))
|
||||
case "verify":
|
||||
verificationKey, err := LoadPublicKey(keyBytes)
|
||||
exitOnError(err, "unable to read public key")
|
||||
|
||||
obj, err := jose.ParseSigned(string(readInput(*inFile)))
|
||||
exitOnError(err, "unable to parse message")
|
||||
|
||||
plaintext, err := obj.Verify(verificationKey)
|
||||
exitOnError(err, "invalid signature")
|
||||
|
||||
writeOutput(*outFile, plaintext)
|
||||
case "expand":
|
||||
input := string(readInput(*inFile))
|
||||
|
||||
var serialized string
|
||||
var err error
|
||||
switch *formatFlag {
|
||||
case "", "JWE":
|
||||
var jwe *jose.JSONWebEncryption
|
||||
jwe, err = jose.ParseEncrypted(input)
|
||||
if err == nil {
|
||||
serialized = jwe.FullSerialize()
|
||||
}
|
||||
case "JWS":
|
||||
var jws *jose.JSONWebSignature
|
||||
jws, err = jose.ParseSigned(input)
|
||||
if err == nil {
|
||||
serialized = jws.FullSerialize()
|
||||
}
|
||||
}
|
||||
|
||||
exitOnError(err, "unable to expand message")
|
||||
writeOutput(*outFile, []byte(serialized))
|
||||
writeOutput(*outFile, []byte("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
// Exit and print error message if we encountered a problem
|
||||
func exitOnError(err error, msg string) {
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%s: %s\n", msg, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Read input from file or stdin
|
||||
func readInput(path string) []byte {
|
||||
var bytes []byte
|
||||
var err error
|
||||
|
||||
if path != "" {
|
||||
bytes, err = ioutil.ReadFile(path)
|
||||
} else {
|
||||
bytes, err = ioutil.ReadAll(os.Stdin)
|
||||
}
|
||||
|
||||
exitOnError(err, "unable to read input")
|
||||
return bytes
|
||||
}
|
||||
|
||||
// Write output to file or stdin
|
||||
func writeOutput(path string, data []byte) {
|
||||
var err error
|
||||
|
||||
if path != "" {
|
||||
err = ioutil.WriteFile(path, data, 0644)
|
||||
} else {
|
||||
_, err = os.Stdout.Write(data)
|
||||
}
|
||||
|
||||
exitOnError(err, "unable to write output")
|
||||
}
|
6
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/ecdh.key
generated
vendored
Normal file
6
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/ecdh.key
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
-----BEGIN EC PRIVATE KEY-----
|
||||
MIGkAgEBBDDvoj/bM1HokUjYWO/IDFs26Jo0GIFtU3tMQQu7ZabKscDMK3dZA0mK
|
||||
v97ij7BBFbCgBwYFK4EEACKhZANiAAT3KhQQCDFN32y/B72g+qOFw/5/aNx1MvZa
|
||||
rwDDa/2G3V0HLTS0VE82sLEUKS8xwkWFI+gNRXk0vvN+Hf+myJI1jOIY+tYQlh+C
|
||||
ZiKGNJ6g5/Su7V6ukGtN+UiY+sx+0LI=
|
||||
-----END EC PRIVATE KEY-----
|
5
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/ecdh.pub
generated
vendored
Normal file
5
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/ecdh.pub
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
-----BEGIN PUBLIC KEY-----
|
||||
MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE9yoUEAgxTd9svwe9oPqjhcP+f2jcdTL2
|
||||
Wq8Aw2v9ht1dBy00tFRPNrCxFCkvMcJFhSPoDUV5NL7zfh3/psiSNYziGPrWEJYf
|
||||
gmYihjSeoOf0ru1erpBrTflImPrMftCy
|
||||
-----END PUBLIC KEY-----
|
27
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/rsa.key
generated
vendored
Normal file
27
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/rsa.key
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEogIBAAKCAQEAslWybuiNYR7uOgKuvaBwqVk8saEutKhOAaW+3hWF65gJei+Z
|
||||
V8QFfYDxs9ZaRZlWAUMtncQPnw7ZQlXO9ogN5cMcN50C6qMOOZzghK7danalhF5l
|
||||
UETC4Hk3Eisbi/PR3IfVyXaRmqL6X66MKj/JAKyD9NFIDVy52K8A198Jojnrw2+X
|
||||
XQW72U68fZtvlyl/BTBWQ9Re5JSTpEcVmpCR8FrFc0RPMBm+G5dRs08vvhZNiTT2
|
||||
JACO5V+J5ZrgP3s5hnGFcQFZgDnXLInDUdoi1MuCjaAU0ta8/08pHMijNix5kFof
|
||||
dPEB954MiZ9k4kQ5/utt02I9x2ssHqw71ojjvwIDAQABAoIBABrYDYDmXom1BzUS
|
||||
PE1s/ihvt1QhqA8nmn5i/aUeZkc9XofW7GUqq4zlwPxKEtKRL0IHY7Fw1s0hhhCX
|
||||
LA0uE7F3OiMg7lR1cOm5NI6kZ83jyCxxrRx1DUSO2nxQotfhPsDMbaDiyS4WxEts
|
||||
0cp2SYJhdYd/jTH9uDfmt+DGwQN7Jixio1Dj3vwB7krDY+mdre4SFY7Gbk9VxkDg
|
||||
LgCLMoq52m+wYufP8CTgpKFpMb2/yJrbLhuJxYZrJ3qd/oYo/91k6v7xlBKEOkwD
|
||||
2veGk9Dqi8YPNxaRktTEjnZb6ybhezat93+VVxq4Oem3wMwou1SfXrSUKtgM/p2H
|
||||
vfw/76ECgYEA2fNL9tC8u9M0wjA+kvvtDG96qO6O66Hksssy6RWInD+Iqk3MtHQt
|
||||
LeoCjvX+zERqwOb6SI6empk5pZ9E3/9vJ0dBqkxx3nqn4M/nRWnExGgngJsL959t
|
||||
f50cdxva8y1RjNhT4kCwTrupX/TP8lAG8SfG1Alo2VFR8iWd8hDQcTECgYEA0Xfj
|
||||
EgqAsVh4U0s3lFxKjOepEyp0G1Imty5J16SvcOEAD1Mrmz94aSSp0bYhXNVdbf7n
|
||||
Rk77htWC7SE29fGjOzZRS76wxj/SJHF+rktHB2Zt23k1jBeZ4uLMPMnGLY/BJ099
|
||||
5DTGo0yU0rrPbyXosx+ukfQLAHFuggX4RNeM5+8CgYB7M1J/hGMLcUpjcs4MXCgV
|
||||
XXbiw2c6v1r9zmtK4odEe42PZ0cNwpY/XAZyNZAAe7Q0stxL44K4NWEmxC80x7lX
|
||||
ZKozz96WOpNnO16qGC3IMHAT/JD5Or+04WTT14Ue7UEp8qcIQDTpbJ9DxKk/eglS
|
||||
jH+SIHeKULOXw7fSu7p4IQKBgBnyVchIUMSnBtCagpn4DKwDjif3nEY+GNmb/D2g
|
||||
ArNiy5UaYk5qwEmV5ws5GkzbiSU07AUDh5ieHgetk5dHhUayZcOSLWeBRFCLVnvU
|
||||
i0nZYEZNb1qZGdDG8zGcdNXz9qMd76Qy/WAA/nZT+Zn1AiweAovFxQ8a/etRPf2Z
|
||||
DbU1AoGAHpCgP7B/4GTBe49H0AQueQHBn4RIkgqMy9xiMeR+U+U0vaY0TlfLhnX+
|
||||
5PkNfkPXohXlfL7pxwZNYa6FZhCAubzvhKCdUASivkoGaIEk6g1VTVYS/eDVQ4CA
|
||||
slfl+elXtLq/l1kQ8C14jlHrQzSXx4PQvjDEnAmaHSJNz4mP9Fg=
|
||||
-----END RSA PRIVATE KEY-----
|
9
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/rsa.pub
generated
vendored
Normal file
9
vendor/gopkg.in/square/go-jose.v2/jose-util/test-keys/rsa.pub
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAslWybuiNYR7uOgKuvaBw
|
||||
qVk8saEutKhOAaW+3hWF65gJei+ZV8QFfYDxs9ZaRZlWAUMtncQPnw7ZQlXO9ogN
|
||||
5cMcN50C6qMOOZzghK7danalhF5lUETC4Hk3Eisbi/PR3IfVyXaRmqL6X66MKj/J
|
||||
AKyD9NFIDVy52K8A198Jojnrw2+XXQW72U68fZtvlyl/BTBWQ9Re5JSTpEcVmpCR
|
||||
8FrFc0RPMBm+G5dRs08vvhZNiTT2JACO5V+J5ZrgP3s5hnGFcQFZgDnXLInDUdoi
|
||||
1MuCjaAU0ta8/08pHMijNix5kFofdPEB954MiZ9k4kQ5/utt02I9x2ssHqw71ojj
|
||||
vwIDAQAB
|
||||
-----END PUBLIC KEY-----
|
101
vendor/gopkg.in/square/go-jose.v2/jose-util/utils.go
generated
vendored
Normal file
101
vendor/gopkg.in/square/go-jose.v2/jose-util/utils.go
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"fmt"
|
||||
"gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
func LoadJSONWebKey(json []byte, pub bool) (*jose.JSONWebKey, error) {
|
||||
var jwk jose.JSONWebKey
|
||||
err := jwk.UnmarshalJSON(json)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !jwk.Valid() {
|
||||
return nil, errors.New("invalid JWK key")
|
||||
}
|
||||
if jwk.IsPublic() != pub {
|
||||
return nil, errors.New("priv/pub JWK key mismatch")
|
||||
}
|
||||
return &jwk, nil
|
||||
}
|
||||
|
||||
// LoadPublicKey loads a public key from PEM/DER/JWK-encoded data.
|
||||
func LoadPublicKey(data []byte) (interface{}, error) {
|
||||
input := data
|
||||
|
||||
block, _ := pem.Decode(data)
|
||||
if block != nil {
|
||||
input = block.Bytes
|
||||
}
|
||||
|
||||
// Try to load SubjectPublicKeyInfo
|
||||
pub, err0 := x509.ParsePKIXPublicKey(input)
|
||||
if err0 == nil {
|
||||
return pub, nil
|
||||
}
|
||||
|
||||
cert, err1 := x509.ParseCertificate(input)
|
||||
if err1 == nil {
|
||||
return cert.PublicKey, nil
|
||||
}
|
||||
|
||||
jwk, err2 := LoadJSONWebKey(data, true)
|
||||
if err2 == nil {
|
||||
return jwk, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("square/go-jose: parse error, got '%s', '%s' and '%s'", err0, err1, err2)
|
||||
}
|
||||
|
||||
// LoadPrivateKey loads a private key from PEM/DER/JWK-encoded data.
|
||||
func LoadPrivateKey(data []byte) (interface{}, error) {
|
||||
input := data
|
||||
|
||||
block, _ := pem.Decode(data)
|
||||
if block != nil {
|
||||
input = block.Bytes
|
||||
}
|
||||
|
||||
var priv interface{}
|
||||
priv, err0 := x509.ParsePKCS1PrivateKey(input)
|
||||
if err0 == nil {
|
||||
return priv, nil
|
||||
}
|
||||
|
||||
priv, err1 := x509.ParsePKCS8PrivateKey(input)
|
||||
if err1 == nil {
|
||||
return priv, nil
|
||||
}
|
||||
|
||||
priv, err2 := x509.ParseECPrivateKey(input)
|
||||
if err2 == nil {
|
||||
return priv, nil
|
||||
}
|
||||
|
||||
jwk, err3 := LoadJSONWebKey(input, false)
|
||||
if err3 == nil {
|
||||
return jwk, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("square/go-jose: parse error, got '%s', '%s', '%s' and '%s'", err0, err1, err2, err3)
|
||||
}
|
27
vendor/gopkg.in/square/go-jose.v2/json/LICENSE
generated
vendored
Normal file
27
vendor/gopkg.in/square/go-jose.v2/json/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
13
vendor/gopkg.in/square/go-jose.v2/json/README.md
generated
vendored
Normal file
13
vendor/gopkg.in/square/go-jose.v2/json/README.md
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Safe JSON
|
||||
|
||||
This repository contains a fork of the `encoding/json` package from Go 1.6.
|
||||
|
||||
The following changes were made:
|
||||
|
||||
* Object deserialization uses case-sensitive member name matching instead of
|
||||
[case-insensitive matching](https://www.ietf.org/mail-archive/web/json/current/msg03763.html).
|
||||
This is to avoid differences in the interpretation of JOSE messages between
|
||||
go-jose and libraries written in other languages.
|
||||
* When deserializing a JSON object, we check for duplicate keys and reject the
|
||||
input whenever we detect a duplicate. Rather than trying to work with malformed
|
||||
data, we prefer to reject it right away.
|
223
vendor/gopkg.in/square/go-jose.v2/json/bench_test.go
generated
vendored
Normal file
223
vendor/gopkg.in/square/go-jose.v2/json/bench_test.go
generated
vendored
Normal file
|
@ -0,0 +1,223 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Large data benchmark.
|
||||
// The JSON data is a summary of agl's changes in the
|
||||
// go, webkit, and chromium open source projects.
|
||||
// We benchmark converting between the JSON form
|
||||
// and in-memory data structures.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type codeResponse struct {
|
||||
Tree *codeNode `json:"tree"`
|
||||
Username string `json:"username"`
|
||||
}
|
||||
|
||||
type codeNode struct {
|
||||
Name string `json:"name"`
|
||||
Kids []*codeNode `json:"kids"`
|
||||
CLWeight float64 `json:"cl_weight"`
|
||||
Touches int `json:"touches"`
|
||||
MinT int64 `json:"min_t"`
|
||||
MaxT int64 `json:"max_t"`
|
||||
MeanT int64 `json:"mean_t"`
|
||||
}
|
||||
|
||||
var codeJSON []byte
|
||||
var codeStruct codeResponse
|
||||
|
||||
func codeInit() {
|
||||
f, err := os.Open("testdata/code.json.gz")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer f.Close()
|
||||
gz, err := gzip.NewReader(f)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
data, err := ioutil.ReadAll(gz)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
codeJSON = data
|
||||
|
||||
if err := Unmarshal(codeJSON, &codeStruct); err != nil {
|
||||
panic("unmarshal code.json: " + err.Error())
|
||||
}
|
||||
|
||||
if data, err = Marshal(&codeStruct); err != nil {
|
||||
panic("marshal code.json: " + err.Error())
|
||||
}
|
||||
|
||||
if !bytes.Equal(data, codeJSON) {
|
||||
println("different lengths", len(data), len(codeJSON))
|
||||
for i := 0; i < len(data) && i < len(codeJSON); i++ {
|
||||
if data[i] != codeJSON[i] {
|
||||
println("re-marshal: changed at byte", i)
|
||||
println("orig: ", string(codeJSON[i-10:i+10]))
|
||||
println("new: ", string(data[i-10:i+10]))
|
||||
break
|
||||
}
|
||||
}
|
||||
panic("re-marshal code.json: different result")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCodeEncoder(b *testing.B) {
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
enc := NewEncoder(ioutil.Discard)
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := enc.Encode(&codeStruct); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func BenchmarkCodeMarshal(b *testing.B) {
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := Marshal(&codeStruct); err != nil {
|
||||
b.Fatal("Marshal:", err)
|
||||
}
|
||||
}
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func BenchmarkCodeDecoder(b *testing.B) {
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
dec := NewDecoder(&buf)
|
||||
var r codeResponse
|
||||
for i := 0; i < b.N; i++ {
|
||||
buf.Write(codeJSON)
|
||||
// hide EOF
|
||||
buf.WriteByte('\n')
|
||||
buf.WriteByte('\n')
|
||||
buf.WriteByte('\n')
|
||||
if err := dec.Decode(&r); err != nil {
|
||||
b.Fatal("Decode:", err)
|
||||
}
|
||||
}
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func BenchmarkDecoderStream(b *testing.B) {
|
||||
b.StopTimer()
|
||||
var buf bytes.Buffer
|
||||
dec := NewDecoder(&buf)
|
||||
buf.WriteString(`"` + strings.Repeat("x", 1000000) + `"` + "\n\n\n")
|
||||
var x interface{}
|
||||
if err := dec.Decode(&x); err != nil {
|
||||
b.Fatal("Decode:", err)
|
||||
}
|
||||
ones := strings.Repeat(" 1\n", 300000) + "\n\n\n"
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if i%300000 == 0 {
|
||||
buf.WriteString(ones)
|
||||
}
|
||||
x = nil
|
||||
if err := dec.Decode(&x); err != nil || x != 1.0 {
|
||||
b.Fatalf("Decode: %v after %d", err, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCodeUnmarshal(b *testing.B) {
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
for i := 0; i < b.N; i++ {
|
||||
var r codeResponse
|
||||
if err := Unmarshal(codeJSON, &r); err != nil {
|
||||
b.Fatal("Unmmarshal:", err)
|
||||
}
|
||||
}
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func BenchmarkCodeUnmarshalReuse(b *testing.B) {
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
var r codeResponse
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := Unmarshal(codeJSON, &r); err != nil {
|
||||
b.Fatal("Unmmarshal:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkUnmarshalString(b *testing.B) {
|
||||
data := []byte(`"hello, world"`)
|
||||
var s string
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := Unmarshal(data, &s); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkUnmarshalFloat64(b *testing.B) {
|
||||
var f float64
|
||||
data := []byte(`3.14`)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := Unmarshal(data, &f); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkUnmarshalInt64(b *testing.B) {
|
||||
var x int64
|
||||
data := []byte(`3`)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := Unmarshal(data, &x); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIssue10335(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
var s struct{}
|
||||
j := []byte(`{"a":{ }}`)
|
||||
for n := 0; n < b.N; n++ {
|
||||
if err := Unmarshal(j, &s); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
1183
vendor/gopkg.in/square/go-jose.v2/json/decode.go
generated
vendored
Normal file
1183
vendor/gopkg.in/square/go-jose.v2/json/decode.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1474
vendor/gopkg.in/square/go-jose.v2/json/decode_test.go
generated
vendored
Normal file
1474
vendor/gopkg.in/square/go-jose.v2/json/decode_test.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1197
vendor/gopkg.in/square/go-jose.v2/json/encode.go
generated
vendored
Normal file
1197
vendor/gopkg.in/square/go-jose.v2/json/encode.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
538
vendor/gopkg.in/square/go-jose.v2/json/encode_test.go
generated
vendored
Normal file
538
vendor/gopkg.in/square/go-jose.v2/json/encode_test.go
generated
vendored
Normal file
|
@ -0,0 +1,538 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"math"
|
||||
"reflect"
|
||||
"testing"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type Optionals struct {
|
||||
Sr string `json:"sr"`
|
||||
So string `json:"so,omitempty"`
|
||||
Sw string `json:"-"`
|
||||
|
||||
Ir int `json:"omitempty"` // actually named omitempty, not an option
|
||||
Io int `json:"io,omitempty"`
|
||||
|
||||
Slr []string `json:"slr,random"`
|
||||
Slo []string `json:"slo,omitempty"`
|
||||
|
||||
Mr map[string]interface{} `json:"mr"`
|
||||
Mo map[string]interface{} `json:",omitempty"`
|
||||
|
||||
Fr float64 `json:"fr"`
|
||||
Fo float64 `json:"fo,omitempty"`
|
||||
|
||||
Br bool `json:"br"`
|
||||
Bo bool `json:"bo,omitempty"`
|
||||
|
||||
Ur uint `json:"ur"`
|
||||
Uo uint `json:"uo,omitempty"`
|
||||
|
||||
Str struct{} `json:"str"`
|
||||
Sto struct{} `json:"sto,omitempty"`
|
||||
}
|
||||
|
||||
var optionalsExpected = `{
|
||||
"sr": "",
|
||||
"omitempty": 0,
|
||||
"slr": null,
|
||||
"mr": {},
|
||||
"fr": 0,
|
||||
"br": false,
|
||||
"ur": 0,
|
||||
"str": {},
|
||||
"sto": {}
|
||||
}`
|
||||
|
||||
func TestOmitEmpty(t *testing.T) {
|
||||
var o Optionals
|
||||
o.Sw = "something"
|
||||
o.Mr = map[string]interface{}{}
|
||||
o.Mo = map[string]interface{}{}
|
||||
|
||||
got, err := MarshalIndent(&o, "", " ")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if got := string(got); got != optionalsExpected {
|
||||
t.Errorf(" got: %s\nwant: %s\n", got, optionalsExpected)
|
||||
}
|
||||
}
|
||||
|
||||
type StringTag struct {
|
||||
BoolStr bool `json:",string"`
|
||||
IntStr int64 `json:",string"`
|
||||
StrStr string `json:",string"`
|
||||
}
|
||||
|
||||
var stringTagExpected = `{
|
||||
"BoolStr": "true",
|
||||
"IntStr": "42",
|
||||
"StrStr": "\"xzbit\""
|
||||
}`
|
||||
|
||||
func TestStringTag(t *testing.T) {
|
||||
var s StringTag
|
||||
s.BoolStr = true
|
||||
s.IntStr = 42
|
||||
s.StrStr = "xzbit"
|
||||
got, err := MarshalIndent(&s, "", " ")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if got := string(got); got != stringTagExpected {
|
||||
t.Fatalf(" got: %s\nwant: %s\n", got, stringTagExpected)
|
||||
}
|
||||
|
||||
// Verify that it round-trips.
|
||||
var s2 StringTag
|
||||
err = NewDecoder(bytes.NewReader(got)).Decode(&s2)
|
||||
if err != nil {
|
||||
t.Fatalf("Decode: %v", err)
|
||||
}
|
||||
if !reflect.DeepEqual(s, s2) {
|
||||
t.Fatalf("decode didn't match.\nsource: %#v\nEncoded as:\n%s\ndecode: %#v", s, string(got), s2)
|
||||
}
|
||||
}
|
||||
|
||||
// byte slices are special even if they're renamed types.
|
||||
type renamedByte byte
|
||||
type renamedByteSlice []byte
|
||||
type renamedRenamedByteSlice []renamedByte
|
||||
|
||||
func TestEncodeRenamedByteSlice(t *testing.T) {
|
||||
s := renamedByteSlice("abc")
|
||||
result, err := Marshal(s)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
expect := `"YWJj"`
|
||||
if string(result) != expect {
|
||||
t.Errorf(" got %s want %s", result, expect)
|
||||
}
|
||||
r := renamedRenamedByteSlice("abc")
|
||||
result, err = Marshal(r)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if string(result) != expect {
|
||||
t.Errorf(" got %s want %s", result, expect)
|
||||
}
|
||||
}
|
||||
|
||||
var unsupportedValues = []interface{}{
|
||||
math.NaN(),
|
||||
math.Inf(-1),
|
||||
math.Inf(1),
|
||||
}
|
||||
|
||||
func TestUnsupportedValues(t *testing.T) {
|
||||
for _, v := range unsupportedValues {
|
||||
if _, err := Marshal(v); err != nil {
|
||||
if _, ok := err.(*UnsupportedValueError); !ok {
|
||||
t.Errorf("for %v, got %T want UnsupportedValueError", v, err)
|
||||
}
|
||||
} else {
|
||||
t.Errorf("for %v, expected error", v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Ref has Marshaler and Unmarshaler methods with pointer receiver.
|
||||
type Ref int
|
||||
|
||||
func (*Ref) MarshalJSON() ([]byte, error) {
|
||||
return []byte(`"ref"`), nil
|
||||
}
|
||||
|
||||
func (r *Ref) UnmarshalJSON([]byte) error {
|
||||
*r = 12
|
||||
return nil
|
||||
}
|
||||
|
||||
// Val has Marshaler methods with value receiver.
|
||||
type Val int
|
||||
|
||||
func (Val) MarshalJSON() ([]byte, error) {
|
||||
return []byte(`"val"`), nil
|
||||
}
|
||||
|
||||
// RefText has Marshaler and Unmarshaler methods with pointer receiver.
|
||||
type RefText int
|
||||
|
||||
func (*RefText) MarshalText() ([]byte, error) {
|
||||
return []byte(`"ref"`), nil
|
||||
}
|
||||
|
||||
func (r *RefText) UnmarshalText([]byte) error {
|
||||
*r = 13
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValText has Marshaler methods with value receiver.
|
||||
type ValText int
|
||||
|
||||
func (ValText) MarshalText() ([]byte, error) {
|
||||
return []byte(`"val"`), nil
|
||||
}
|
||||
|
||||
func TestRefValMarshal(t *testing.T) {
|
||||
var s = struct {
|
||||
R0 Ref
|
||||
R1 *Ref
|
||||
R2 RefText
|
||||
R3 *RefText
|
||||
V0 Val
|
||||
V1 *Val
|
||||
V2 ValText
|
||||
V3 *ValText
|
||||
}{
|
||||
R0: 12,
|
||||
R1: new(Ref),
|
||||
R2: 14,
|
||||
R3: new(RefText),
|
||||
V0: 13,
|
||||
V1: new(Val),
|
||||
V2: 15,
|
||||
V3: new(ValText),
|
||||
}
|
||||
const want = `{"R0":"ref","R1":"ref","R2":"\"ref\"","R3":"\"ref\"","V0":"val","V1":"val","V2":"\"val\"","V3":"\"val\""}`
|
||||
b, err := Marshal(&s)
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal: %v", err)
|
||||
}
|
||||
if got := string(b); got != want {
|
||||
t.Errorf("got %q, want %q", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
// C implements Marshaler and returns unescaped JSON.
|
||||
type C int
|
||||
|
||||
func (C) MarshalJSON() ([]byte, error) {
|
||||
return []byte(`"<&>"`), nil
|
||||
}
|
||||
|
||||
// CText implements Marshaler and returns unescaped text.
|
||||
type CText int
|
||||
|
||||
func (CText) MarshalText() ([]byte, error) {
|
||||
return []byte(`"<&>"`), nil
|
||||
}
|
||||
|
||||
func TestMarshalerEscaping(t *testing.T) {
|
||||
var c C
|
||||
want := `"\u003c\u0026\u003e"`
|
||||
b, err := Marshal(c)
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal(c): %v", err)
|
||||
}
|
||||
if got := string(b); got != want {
|
||||
t.Errorf("Marshal(c) = %#q, want %#q", got, want)
|
||||
}
|
||||
|
||||
var ct CText
|
||||
want = `"\"\u003c\u0026\u003e\""`
|
||||
b, err = Marshal(ct)
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal(ct): %v", err)
|
||||
}
|
||||
if got := string(b); got != want {
|
||||
t.Errorf("Marshal(ct) = %#q, want %#q", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
type IntType int
|
||||
|
||||
type MyStruct struct {
|
||||
IntType
|
||||
}
|
||||
|
||||
func TestAnonymousNonstruct(t *testing.T) {
|
||||
var i IntType = 11
|
||||
a := MyStruct{i}
|
||||
const want = `{"IntType":11}`
|
||||
|
||||
b, err := Marshal(a)
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal: %v", err)
|
||||
}
|
||||
if got := string(b); got != want {
|
||||
t.Errorf("got %q, want %q", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
type BugA struct {
|
||||
S string
|
||||
}
|
||||
|
||||
type BugB struct {
|
||||
BugA
|
||||
S string
|
||||
}
|
||||
|
||||
type BugC struct {
|
||||
S string
|
||||
}
|
||||
|
||||
// Legal Go: We never use the repeated embedded field (S).
|
||||
type BugX struct {
|
||||
A int
|
||||
BugA
|
||||
BugB
|
||||
}
|
||||
|
||||
// Issue 5245.
|
||||
func TestEmbeddedBug(t *testing.T) {
|
||||
v := BugB{
|
||||
BugA{"A"},
|
||||
"B",
|
||||
}
|
||||
b, err := Marshal(v)
|
||||
if err != nil {
|
||||
t.Fatal("Marshal:", err)
|
||||
}
|
||||
want := `{"S":"B"}`
|
||||
got := string(b)
|
||||
if got != want {
|
||||
t.Fatalf("Marshal: got %s want %s", got, want)
|
||||
}
|
||||
// Now check that the duplicate field, S, does not appear.
|
||||
x := BugX{
|
||||
A: 23,
|
||||
}
|
||||
b, err = Marshal(x)
|
||||
if err != nil {
|
||||
t.Fatal("Marshal:", err)
|
||||
}
|
||||
want = `{"A":23}`
|
||||
got = string(b)
|
||||
if got != want {
|
||||
t.Fatalf("Marshal: got %s want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
type BugD struct { // Same as BugA after tagging.
|
||||
XXX string `json:"S"`
|
||||
}
|
||||
|
||||
// BugD's tagged S field should dominate BugA's.
|
||||
type BugY struct {
|
||||
BugA
|
||||
BugD
|
||||
}
|
||||
|
||||
// Test that a field with a tag dominates untagged fields.
|
||||
func TestTaggedFieldDominates(t *testing.T) {
|
||||
v := BugY{
|
||||
BugA{"BugA"},
|
||||
BugD{"BugD"},
|
||||
}
|
||||
b, err := Marshal(v)
|
||||
if err != nil {
|
||||
t.Fatal("Marshal:", err)
|
||||
}
|
||||
want := `{"S":"BugD"}`
|
||||
got := string(b)
|
||||
if got != want {
|
||||
t.Fatalf("Marshal: got %s want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
// There are no tags here, so S should not appear.
|
||||
type BugZ struct {
|
||||
BugA
|
||||
BugC
|
||||
BugY // Contains a tagged S field through BugD; should not dominate.
|
||||
}
|
||||
|
||||
func TestDuplicatedFieldDisappears(t *testing.T) {
|
||||
v := BugZ{
|
||||
BugA{"BugA"},
|
||||
BugC{"BugC"},
|
||||
BugY{
|
||||
BugA{"nested BugA"},
|
||||
BugD{"nested BugD"},
|
||||
},
|
||||
}
|
||||
b, err := Marshal(v)
|
||||
if err != nil {
|
||||
t.Fatal("Marshal:", err)
|
||||
}
|
||||
want := `{}`
|
||||
got := string(b)
|
||||
if got != want {
|
||||
t.Fatalf("Marshal: got %s want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringBytes(t *testing.T) {
|
||||
// Test that encodeState.stringBytes and encodeState.string use the same encoding.
|
||||
es := &encodeState{}
|
||||
var r []rune
|
||||
for i := '\u0000'; i <= unicode.MaxRune; i++ {
|
||||
r = append(r, i)
|
||||
}
|
||||
s := string(r) + "\xff\xff\xffhello" // some invalid UTF-8 too
|
||||
es.string(s)
|
||||
|
||||
esBytes := &encodeState{}
|
||||
esBytes.stringBytes([]byte(s))
|
||||
|
||||
enc := es.Buffer.String()
|
||||
encBytes := esBytes.Buffer.String()
|
||||
if enc != encBytes {
|
||||
i := 0
|
||||
for i < len(enc) && i < len(encBytes) && enc[i] == encBytes[i] {
|
||||
i++
|
||||
}
|
||||
enc = enc[i:]
|
||||
encBytes = encBytes[i:]
|
||||
i = 0
|
||||
for i < len(enc) && i < len(encBytes) && enc[len(enc)-i-1] == encBytes[len(encBytes)-i-1] {
|
||||
i++
|
||||
}
|
||||
enc = enc[:len(enc)-i]
|
||||
encBytes = encBytes[:len(encBytes)-i]
|
||||
|
||||
if len(enc) > 20 {
|
||||
enc = enc[:20] + "..."
|
||||
}
|
||||
if len(encBytes) > 20 {
|
||||
encBytes = encBytes[:20] + "..."
|
||||
}
|
||||
|
||||
t.Errorf("encodings differ at %#q vs %#q", enc, encBytes)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIssue6458(t *testing.T) {
|
||||
type Foo struct {
|
||||
M RawMessage
|
||||
}
|
||||
x := Foo{RawMessage(`"foo"`)}
|
||||
|
||||
b, err := Marshal(&x)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if want := `{"M":"foo"}`; string(b) != want {
|
||||
t.Errorf("Marshal(&x) = %#q; want %#q", b, want)
|
||||
}
|
||||
|
||||
b, err = Marshal(x)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if want := `{"M":"ImZvbyI="}`; string(b) != want {
|
||||
t.Errorf("Marshal(x) = %#q; want %#q", b, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIssue10281(t *testing.T) {
|
||||
type Foo struct {
|
||||
N Number
|
||||
}
|
||||
x := Foo{Number(`invalid`)}
|
||||
|
||||
b, err := Marshal(&x)
|
||||
if err == nil {
|
||||
t.Errorf("Marshal(&x) = %#q; want error", b)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHTMLEscape(t *testing.T) {
|
||||
var b, want bytes.Buffer
|
||||
m := `{"M":"<html>foo &` + "\xe2\x80\xa8 \xe2\x80\xa9" + `</html>"}`
|
||||
want.Write([]byte(`{"M":"\u003chtml\u003efoo \u0026\u2028 \u2029\u003c/html\u003e"}`))
|
||||
HTMLEscape(&b, []byte(m))
|
||||
if !bytes.Equal(b.Bytes(), want.Bytes()) {
|
||||
t.Errorf("HTMLEscape(&b, []byte(m)) = %s; want %s", b.Bytes(), want.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
// golang.org/issue/8582
|
||||
func TestEncodePointerString(t *testing.T) {
|
||||
type stringPointer struct {
|
||||
N *int64 `json:"n,string"`
|
||||
}
|
||||
var n int64 = 42
|
||||
b, err := Marshal(stringPointer{N: &n})
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal: %v", err)
|
||||
}
|
||||
if got, want := string(b), `{"n":"42"}`; got != want {
|
||||
t.Errorf("Marshal = %s, want %s", got, want)
|
||||
}
|
||||
var back stringPointer
|
||||
err = Unmarshal(b, &back)
|
||||
if err != nil {
|
||||
t.Fatalf("Unmarshal: %v", err)
|
||||
}
|
||||
if back.N == nil {
|
||||
t.Fatalf("Unmarshalled nil N field")
|
||||
}
|
||||
if *back.N != 42 {
|
||||
t.Fatalf("*N = %d; want 42", *back.N)
|
||||
}
|
||||
}
|
||||
|
||||
var encodeStringTests = []struct {
|
||||
in string
|
||||
out string
|
||||
}{
|
||||
{"\x00", `"\u0000"`},
|
||||
{"\x01", `"\u0001"`},
|
||||
{"\x02", `"\u0002"`},
|
||||
{"\x03", `"\u0003"`},
|
||||
{"\x04", `"\u0004"`},
|
||||
{"\x05", `"\u0005"`},
|
||||
{"\x06", `"\u0006"`},
|
||||
{"\x07", `"\u0007"`},
|
||||
{"\x08", `"\u0008"`},
|
||||
{"\x09", `"\t"`},
|
||||
{"\x0a", `"\n"`},
|
||||
{"\x0b", `"\u000b"`},
|
||||
{"\x0c", `"\u000c"`},
|
||||
{"\x0d", `"\r"`},
|
||||
{"\x0e", `"\u000e"`},
|
||||
{"\x0f", `"\u000f"`},
|
||||
{"\x10", `"\u0010"`},
|
||||
{"\x11", `"\u0011"`},
|
||||
{"\x12", `"\u0012"`},
|
||||
{"\x13", `"\u0013"`},
|
||||
{"\x14", `"\u0014"`},
|
||||
{"\x15", `"\u0015"`},
|
||||
{"\x16", `"\u0016"`},
|
||||
{"\x17", `"\u0017"`},
|
||||
{"\x18", `"\u0018"`},
|
||||
{"\x19", `"\u0019"`},
|
||||
{"\x1a", `"\u001a"`},
|
||||
{"\x1b", `"\u001b"`},
|
||||
{"\x1c", `"\u001c"`},
|
||||
{"\x1d", `"\u001d"`},
|
||||
{"\x1e", `"\u001e"`},
|
||||
{"\x1f", `"\u001f"`},
|
||||
}
|
||||
|
||||
func TestEncodeString(t *testing.T) {
|
||||
for _, tt := range encodeStringTests {
|
||||
b, err := Marshal(tt.in)
|
||||
if err != nil {
|
||||
t.Errorf("Marshal(%q): %v", tt.in, err)
|
||||
continue
|
||||
}
|
||||
out := string(b)
|
||||
if out != tt.out {
|
||||
t.Errorf("Marshal(%q) = %#q, want %#q", tt.in, out, tt.out)
|
||||
}
|
||||
}
|
||||
}
|
141
vendor/gopkg.in/square/go-jose.v2/json/indent.go
generated
vendored
Normal file
141
vendor/gopkg.in/square/go-jose.v2/json/indent.go
generated
vendored
Normal file
|
@ -0,0 +1,141 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import "bytes"
|
||||
|
||||
// Compact appends to dst the JSON-encoded src with
|
||||
// insignificant space characters elided.
|
||||
func Compact(dst *bytes.Buffer, src []byte) error {
|
||||
return compact(dst, src, false)
|
||||
}
|
||||
|
||||
func compact(dst *bytes.Buffer, src []byte, escape bool) error {
|
||||
origLen := dst.Len()
|
||||
var scan scanner
|
||||
scan.reset()
|
||||
start := 0
|
||||
for i, c := range src {
|
||||
if escape && (c == '<' || c == '>' || c == '&') {
|
||||
if start < i {
|
||||
dst.Write(src[start:i])
|
||||
}
|
||||
dst.WriteString(`\u00`)
|
||||
dst.WriteByte(hex[c>>4])
|
||||
dst.WriteByte(hex[c&0xF])
|
||||
start = i + 1
|
||||
}
|
||||
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
|
||||
if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
|
||||
if start < i {
|
||||
dst.Write(src[start:i])
|
||||
}
|
||||
dst.WriteString(`\u202`)
|
||||
dst.WriteByte(hex[src[i+2]&0xF])
|
||||
start = i + 3
|
||||
}
|
||||
v := scan.step(&scan, c)
|
||||
if v >= scanSkipSpace {
|
||||
if v == scanError {
|
||||
break
|
||||
}
|
||||
if start < i {
|
||||
dst.Write(src[start:i])
|
||||
}
|
||||
start = i + 1
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
dst.Truncate(origLen)
|
||||
return scan.err
|
||||
}
|
||||
if start < len(src) {
|
||||
dst.Write(src[start:])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func newline(dst *bytes.Buffer, prefix, indent string, depth int) {
|
||||
dst.WriteByte('\n')
|
||||
dst.WriteString(prefix)
|
||||
for i := 0; i < depth; i++ {
|
||||
dst.WriteString(indent)
|
||||
}
|
||||
}
|
||||
|
||||
// Indent appends to dst an indented form of the JSON-encoded src.
|
||||
// Each element in a JSON object or array begins on a new,
|
||||
// indented line beginning with prefix followed by one or more
|
||||
// copies of indent according to the indentation nesting.
|
||||
// The data appended to dst does not begin with the prefix nor
|
||||
// any indentation, to make it easier to embed inside other formatted JSON data.
|
||||
// Although leading space characters (space, tab, carriage return, newline)
|
||||
// at the beginning of src are dropped, trailing space characters
|
||||
// at the end of src are preserved and copied to dst.
|
||||
// For example, if src has no trailing spaces, neither will dst;
|
||||
// if src ends in a trailing newline, so will dst.
|
||||
func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
|
||||
origLen := dst.Len()
|
||||
var scan scanner
|
||||
scan.reset()
|
||||
needIndent := false
|
||||
depth := 0
|
||||
for _, c := range src {
|
||||
scan.bytes++
|
||||
v := scan.step(&scan, c)
|
||||
if v == scanSkipSpace {
|
||||
continue
|
||||
}
|
||||
if v == scanError {
|
||||
break
|
||||
}
|
||||
if needIndent && v != scanEndObject && v != scanEndArray {
|
||||
needIndent = false
|
||||
depth++
|
||||
newline(dst, prefix, indent, depth)
|
||||
}
|
||||
|
||||
// Emit semantically uninteresting bytes
|
||||
// (in particular, punctuation in strings) unmodified.
|
||||
if v == scanContinue {
|
||||
dst.WriteByte(c)
|
||||
continue
|
||||
}
|
||||
|
||||
// Add spacing around real punctuation.
|
||||
switch c {
|
||||
case '{', '[':
|
||||
// delay indent so that empty object and array are formatted as {} and [].
|
||||
needIndent = true
|
||||
dst.WriteByte(c)
|
||||
|
||||
case ',':
|
||||
dst.WriteByte(c)
|
||||
newline(dst, prefix, indent, depth)
|
||||
|
||||
case ':':
|
||||
dst.WriteByte(c)
|
||||
dst.WriteByte(' ')
|
||||
|
||||
case '}', ']':
|
||||
if needIndent {
|
||||
// suppress indent in empty object/array
|
||||
needIndent = false
|
||||
} else {
|
||||
depth--
|
||||
newline(dst, prefix, indent, depth)
|
||||
}
|
||||
dst.WriteByte(c)
|
||||
|
||||
default:
|
||||
dst.WriteByte(c)
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
dst.Truncate(origLen)
|
||||
return scan.err
|
||||
}
|
||||
return nil
|
||||
}
|
133
vendor/gopkg.in/square/go-jose.v2/json/number_test.go
generated
vendored
Normal file
133
vendor/gopkg.in/square/go-jose.v2/json/number_test.go
generated
vendored
Normal file
|
@ -0,0 +1,133 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNumberIsValid(t *testing.T) {
|
||||
// From: http://stackoverflow.com/a/13340826
|
||||
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
|
||||
|
||||
validTests := []string{
|
||||
"0",
|
||||
"-0",
|
||||
"1",
|
||||
"-1",
|
||||
"0.1",
|
||||
"-0.1",
|
||||
"1234",
|
||||
"-1234",
|
||||
"12.34",
|
||||
"-12.34",
|
||||
"12E0",
|
||||
"12E1",
|
||||
"12e34",
|
||||
"12E-0",
|
||||
"12e+1",
|
||||
"12e-34",
|
||||
"-12E0",
|
||||
"-12E1",
|
||||
"-12e34",
|
||||
"-12E-0",
|
||||
"-12e+1",
|
||||
"-12e-34",
|
||||
"1.2E0",
|
||||
"1.2E1",
|
||||
"1.2e34",
|
||||
"1.2E-0",
|
||||
"1.2e+1",
|
||||
"1.2e-34",
|
||||
"-1.2E0",
|
||||
"-1.2E1",
|
||||
"-1.2e34",
|
||||
"-1.2E-0",
|
||||
"-1.2e+1",
|
||||
"-1.2e-34",
|
||||
"0E0",
|
||||
"0E1",
|
||||
"0e34",
|
||||
"0E-0",
|
||||
"0e+1",
|
||||
"0e-34",
|
||||
"-0E0",
|
||||
"-0E1",
|
||||
"-0e34",
|
||||
"-0E-0",
|
||||
"-0e+1",
|
||||
"-0e-34",
|
||||
}
|
||||
|
||||
for _, test := range validTests {
|
||||
if !isValidNumber(test) {
|
||||
t.Errorf("%s should be valid", test)
|
||||
}
|
||||
|
||||
var f float64
|
||||
if err := Unmarshal([]byte(test), &f); err != nil {
|
||||
t.Errorf("%s should be valid but Unmarshal failed: %v", test, err)
|
||||
}
|
||||
|
||||
if !jsonNumberRegexp.MatchString(test) {
|
||||
t.Errorf("%s should be valid but regexp does not match", test)
|
||||
}
|
||||
}
|
||||
|
||||
invalidTests := []string{
|
||||
"",
|
||||
"invalid",
|
||||
"1.0.1",
|
||||
"1..1",
|
||||
"-1-2",
|
||||
"012a42",
|
||||
"01.2",
|
||||
"012",
|
||||
"12E12.12",
|
||||
"1e2e3",
|
||||
"1e+-2",
|
||||
"1e--23",
|
||||
"1e",
|
||||
"e1",
|
||||
"1e+",
|
||||
"1ea",
|
||||
"1a",
|
||||
"1.a",
|
||||
"1.",
|
||||
"01",
|
||||
"1.e1",
|
||||
}
|
||||
|
||||
for _, test := range invalidTests {
|
||||
if isValidNumber(test) {
|
||||
t.Errorf("%s should be invalid", test)
|
||||
}
|
||||
|
||||
var f float64
|
||||
if err := Unmarshal([]byte(test), &f); err == nil {
|
||||
t.Errorf("%s should be invalid but unmarshal wrote %v", test, f)
|
||||
}
|
||||
|
||||
if jsonNumberRegexp.MatchString(test) {
|
||||
t.Errorf("%s should be invalid but matches regexp", test)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNumberIsValid(b *testing.B) {
|
||||
s := "-61657.61667E+61673"
|
||||
for i := 0; i < b.N; i++ {
|
||||
isValidNumber(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNumberIsValidRegexp(b *testing.B) {
|
||||
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
|
||||
s := "-61657.61667E+61673"
|
||||
for i := 0; i < b.N; i++ {
|
||||
jsonNumberRegexp.MatchString(s)
|
||||
}
|
||||
}
|
623
vendor/gopkg.in/square/go-jose.v2/json/scanner.go
generated
vendored
Normal file
623
vendor/gopkg.in/square/go-jose.v2/json/scanner.go
generated
vendored
Normal file
|
@ -0,0 +1,623 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
// JSON value parser state machine.
|
||||
// Just about at the limit of what is reasonable to write by hand.
|
||||
// Some parts are a bit tedious, but overall it nicely factors out the
|
||||
// otherwise common code from the multiple scanning functions
|
||||
// in this package (Compact, Indent, checkValid, nextValue, etc).
|
||||
//
|
||||
// This file starts with two simple examples using the scanner
|
||||
// before diving into the scanner itself.
|
||||
|
||||
import "strconv"
|
||||
|
||||
// checkValid verifies that data is valid JSON-encoded data.
|
||||
// scan is passed in for use by checkValid to avoid an allocation.
|
||||
func checkValid(data []byte, scan *scanner) error {
|
||||
scan.reset()
|
||||
for _, c := range data {
|
||||
scan.bytes++
|
||||
if scan.step(scan, c) == scanError {
|
||||
return scan.err
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return scan.err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// nextValue splits data after the next whole JSON value,
|
||||
// returning that value and the bytes that follow it as separate slices.
|
||||
// scan is passed in for use by nextValue to avoid an allocation.
|
||||
func nextValue(data []byte, scan *scanner) (value, rest []byte, err error) {
|
||||
scan.reset()
|
||||
for i, c := range data {
|
||||
v := scan.step(scan, c)
|
||||
if v >= scanEndObject {
|
||||
switch v {
|
||||
// probe the scanner with a space to determine whether we will
|
||||
// get scanEnd on the next character. Otherwise, if the next character
|
||||
// is not a space, scanEndTop allocates a needless error.
|
||||
case scanEndObject, scanEndArray:
|
||||
if scan.step(scan, ' ') == scanEnd {
|
||||
return data[:i+1], data[i+1:], nil
|
||||
}
|
||||
case scanError:
|
||||
return nil, nil, scan.err
|
||||
case scanEnd:
|
||||
return data[:i], data[i:], nil
|
||||
}
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return nil, nil, scan.err
|
||||
}
|
||||
return data, nil, nil
|
||||
}
|
||||
|
||||
// A SyntaxError is a description of a JSON syntax error.
|
||||
type SyntaxError struct {
|
||||
msg string // description of error
|
||||
Offset int64 // error occurred after reading Offset bytes
|
||||
}
|
||||
|
||||
func (e *SyntaxError) Error() string { return e.msg }
|
||||
|
||||
// A scanner is a JSON scanning state machine.
|
||||
// Callers call scan.reset() and then pass bytes in one at a time
|
||||
// by calling scan.step(&scan, c) for each byte.
|
||||
// The return value, referred to as an opcode, tells the
|
||||
// caller about significant parsing events like beginning
|
||||
// and ending literals, objects, and arrays, so that the
|
||||
// caller can follow along if it wishes.
|
||||
// The return value scanEnd indicates that a single top-level
|
||||
// JSON value has been completed, *before* the byte that
|
||||
// just got passed in. (The indication must be delayed in order
|
||||
// to recognize the end of numbers: is 123 a whole value or
|
||||
// the beginning of 12345e+6?).
|
||||
type scanner struct {
|
||||
// The step is a func to be called to execute the next transition.
|
||||
// Also tried using an integer constant and a single func
|
||||
// with a switch, but using the func directly was 10% faster
|
||||
// on a 64-bit Mac Mini, and it's nicer to read.
|
||||
step func(*scanner, byte) int
|
||||
|
||||
// Reached end of top-level value.
|
||||
endTop bool
|
||||
|
||||
// Stack of what we're in the middle of - array values, object keys, object values.
|
||||
parseState []int
|
||||
|
||||
// Error that happened, if any.
|
||||
err error
|
||||
|
||||
// 1-byte redo (see undo method)
|
||||
redo bool
|
||||
redoCode int
|
||||
redoState func(*scanner, byte) int
|
||||
|
||||
// total bytes consumed, updated by decoder.Decode
|
||||
bytes int64
|
||||
}
|
||||
|
||||
// These values are returned by the state transition functions
|
||||
// assigned to scanner.state and the method scanner.eof.
|
||||
// They give details about the current state of the scan that
|
||||
// callers might be interested to know about.
|
||||
// It is okay to ignore the return value of any particular
|
||||
// call to scanner.state: if one call returns scanError,
|
||||
// every subsequent call will return scanError too.
|
||||
const (
|
||||
// Continue.
|
||||
scanContinue = iota // uninteresting byte
|
||||
scanBeginLiteral // end implied by next result != scanContinue
|
||||
scanBeginObject // begin object
|
||||
scanObjectKey // just finished object key (string)
|
||||
scanObjectValue // just finished non-last object value
|
||||
scanEndObject // end object (implies scanObjectValue if possible)
|
||||
scanBeginArray // begin array
|
||||
scanArrayValue // just finished array value
|
||||
scanEndArray // end array (implies scanArrayValue if possible)
|
||||
scanSkipSpace // space byte; can skip; known to be last "continue" result
|
||||
|
||||
// Stop.
|
||||
scanEnd // top-level value ended *before* this byte; known to be first "stop" result
|
||||
scanError // hit an error, scanner.err.
|
||||
)
|
||||
|
||||
// These values are stored in the parseState stack.
|
||||
// They give the current state of a composite value
|
||||
// being scanned. If the parser is inside a nested value
|
||||
// the parseState describes the nested state, outermost at entry 0.
|
||||
const (
|
||||
parseObjectKey = iota // parsing object key (before colon)
|
||||
parseObjectValue // parsing object value (after colon)
|
||||
parseArrayValue // parsing array value
|
||||
)
|
||||
|
||||
// reset prepares the scanner for use.
|
||||
// It must be called before calling s.step.
|
||||
func (s *scanner) reset() {
|
||||
s.step = stateBeginValue
|
||||
s.parseState = s.parseState[0:0]
|
||||
s.err = nil
|
||||
s.redo = false
|
||||
s.endTop = false
|
||||
}
|
||||
|
||||
// eof tells the scanner that the end of input has been reached.
|
||||
// It returns a scan status just as s.step does.
|
||||
func (s *scanner) eof() int {
|
||||
if s.err != nil {
|
||||
return scanError
|
||||
}
|
||||
if s.endTop {
|
||||
return scanEnd
|
||||
}
|
||||
s.step(s, ' ')
|
||||
if s.endTop {
|
||||
return scanEnd
|
||||
}
|
||||
if s.err == nil {
|
||||
s.err = &SyntaxError{"unexpected end of JSON input", s.bytes}
|
||||
}
|
||||
return scanError
|
||||
}
|
||||
|
||||
// pushParseState pushes a new parse state p onto the parse stack.
|
||||
func (s *scanner) pushParseState(p int) {
|
||||
s.parseState = append(s.parseState, p)
|
||||
}
|
||||
|
||||
// popParseState pops a parse state (already obtained) off the stack
|
||||
// and updates s.step accordingly.
|
||||
func (s *scanner) popParseState() {
|
||||
n := len(s.parseState) - 1
|
||||
s.parseState = s.parseState[0:n]
|
||||
s.redo = false
|
||||
if n == 0 {
|
||||
s.step = stateEndTop
|
||||
s.endTop = true
|
||||
} else {
|
||||
s.step = stateEndValue
|
||||
}
|
||||
}
|
||||
|
||||
func isSpace(c byte) bool {
|
||||
return c == ' ' || c == '\t' || c == '\r' || c == '\n'
|
||||
}
|
||||
|
||||
// stateBeginValueOrEmpty is the state after reading `[`.
|
||||
func stateBeginValueOrEmpty(s *scanner, c byte) int {
|
||||
if c <= ' ' && isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == ']' {
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
return stateBeginValue(s, c)
|
||||
}
|
||||
|
||||
// stateBeginValue is the state at the beginning of the input.
|
||||
func stateBeginValue(s *scanner, c byte) int {
|
||||
if c <= ' ' && isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
switch c {
|
||||
case '{':
|
||||
s.step = stateBeginStringOrEmpty
|
||||
s.pushParseState(parseObjectKey)
|
||||
return scanBeginObject
|
||||
case '[':
|
||||
s.step = stateBeginValueOrEmpty
|
||||
s.pushParseState(parseArrayValue)
|
||||
return scanBeginArray
|
||||
case '"':
|
||||
s.step = stateInString
|
||||
return scanBeginLiteral
|
||||
case '-':
|
||||
s.step = stateNeg
|
||||
return scanBeginLiteral
|
||||
case '0': // beginning of 0.123
|
||||
s.step = state0
|
||||
return scanBeginLiteral
|
||||
case 't': // beginning of true
|
||||
s.step = stateT
|
||||
return scanBeginLiteral
|
||||
case 'f': // beginning of false
|
||||
s.step = stateF
|
||||
return scanBeginLiteral
|
||||
case 'n': // beginning of null
|
||||
s.step = stateN
|
||||
return scanBeginLiteral
|
||||
}
|
||||
if '1' <= c && c <= '9' { // beginning of 1234.5
|
||||
s.step = state1
|
||||
return scanBeginLiteral
|
||||
}
|
||||
return s.error(c, "looking for beginning of value")
|
||||
}
|
||||
|
||||
// stateBeginStringOrEmpty is the state after reading `{`.
|
||||
func stateBeginStringOrEmpty(s *scanner, c byte) int {
|
||||
if c <= ' ' && isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == '}' {
|
||||
n := len(s.parseState)
|
||||
s.parseState[n-1] = parseObjectValue
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
return stateBeginString(s, c)
|
||||
}
|
||||
|
||||
// stateBeginString is the state after reading `{"key": value,`.
|
||||
func stateBeginString(s *scanner, c byte) int {
|
||||
if c <= ' ' && isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == '"' {
|
||||
s.step = stateInString
|
||||
return scanBeginLiteral
|
||||
}
|
||||
return s.error(c, "looking for beginning of object key string")
|
||||
}
|
||||
|
||||
// stateEndValue is the state after completing a value,
|
||||
// such as after reading `{}` or `true` or `["x"`.
|
||||
func stateEndValue(s *scanner, c byte) int {
|
||||
n := len(s.parseState)
|
||||
if n == 0 {
|
||||
// Completed top-level before the current byte.
|
||||
s.step = stateEndTop
|
||||
s.endTop = true
|
||||
return stateEndTop(s, c)
|
||||
}
|
||||
if c <= ' ' && isSpace(c) {
|
||||
s.step = stateEndValue
|
||||
return scanSkipSpace
|
||||
}
|
||||
ps := s.parseState[n-1]
|
||||
switch ps {
|
||||
case parseObjectKey:
|
||||
if c == ':' {
|
||||
s.parseState[n-1] = parseObjectValue
|
||||
s.step = stateBeginValue
|
||||
return scanObjectKey
|
||||
}
|
||||
return s.error(c, "after object key")
|
||||
case parseObjectValue:
|
||||
if c == ',' {
|
||||
s.parseState[n-1] = parseObjectKey
|
||||
s.step = stateBeginString
|
||||
return scanObjectValue
|
||||
}
|
||||
if c == '}' {
|
||||
s.popParseState()
|
||||
return scanEndObject
|
||||
}
|
||||
return s.error(c, "after object key:value pair")
|
||||
case parseArrayValue:
|
||||
if c == ',' {
|
||||
s.step = stateBeginValue
|
||||
return scanArrayValue
|
||||
}
|
||||
if c == ']' {
|
||||
s.popParseState()
|
||||
return scanEndArray
|
||||
}
|
||||
return s.error(c, "after array element")
|
||||
}
|
||||
return s.error(c, "")
|
||||
}
|
||||
|
||||
// stateEndTop is the state after finishing the top-level value,
|
||||
// such as after reading `{}` or `[1,2,3]`.
|
||||
// Only space characters should be seen now.
|
||||
func stateEndTop(s *scanner, c byte) int {
|
||||
if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
|
||||
// Complain about non-space byte on next call.
|
||||
s.error(c, "after top-level value")
|
||||
}
|
||||
return scanEnd
|
||||
}
|
||||
|
||||
// stateInString is the state after reading `"`.
|
||||
func stateInString(s *scanner, c byte) int {
|
||||
if c == '"' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
if c == '\\' {
|
||||
s.step = stateInStringEsc
|
||||
return scanContinue
|
||||
}
|
||||
if c < 0x20 {
|
||||
return s.error(c, "in string literal")
|
||||
}
|
||||
return scanContinue
|
||||
}
|
||||
|
||||
// stateInStringEsc is the state after reading `"\` during a quoted string.
|
||||
func stateInStringEsc(s *scanner, c byte) int {
|
||||
switch c {
|
||||
case 'b', 'f', 'n', 'r', 't', '\\', '/', '"':
|
||||
s.step = stateInString
|
||||
return scanContinue
|
||||
case 'u':
|
||||
s.step = stateInStringEscU
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in string escape code")
|
||||
}
|
||||
|
||||
// stateInStringEscU is the state after reading `"\u` during a quoted string.
|
||||
func stateInStringEscU(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU1
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU1 is the state after reading `"\u1` during a quoted string.
|
||||
func stateInStringEscU1(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU12
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU12 is the state after reading `"\u12` during a quoted string.
|
||||
func stateInStringEscU12(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU123
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU123 is the state after reading `"\u123` during a quoted string.
|
||||
func stateInStringEscU123(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInString
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateNeg is the state after reading `-` during a number.
|
||||
func stateNeg(s *scanner, c byte) int {
|
||||
if c == '0' {
|
||||
s.step = state0
|
||||
return scanContinue
|
||||
}
|
||||
if '1' <= c && c <= '9' {
|
||||
s.step = state1
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in numeric literal")
|
||||
}
|
||||
|
||||
// state1 is the state after reading a non-zero integer during a number,
|
||||
// such as after reading `1` or `100` but not `0`.
|
||||
func state1(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = state1
|
||||
return scanContinue
|
||||
}
|
||||
return state0(s, c)
|
||||
}
|
||||
|
||||
// state0 is the state after reading `0` during a number.
|
||||
func state0(s *scanner, c byte) int {
|
||||
if c == '.' {
|
||||
s.step = stateDot
|
||||
return scanContinue
|
||||
}
|
||||
if c == 'e' || c == 'E' {
|
||||
s.step = stateE
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateDot is the state after reading the integer and decimal point in a number,
|
||||
// such as after reading `1.`.
|
||||
func stateDot(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = stateDot0
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "after decimal point in numeric literal")
|
||||
}
|
||||
|
||||
// stateDot0 is the state after reading the integer, decimal point, and subsequent
|
||||
// digits of a number, such as after reading `3.14`.
|
||||
func stateDot0(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
return scanContinue
|
||||
}
|
||||
if c == 'e' || c == 'E' {
|
||||
s.step = stateE
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateE is the state after reading the mantissa and e in a number,
|
||||
// such as after reading `314e` or `0.314e`.
|
||||
func stateE(s *scanner, c byte) int {
|
||||
if c == '+' || c == '-' {
|
||||
s.step = stateESign
|
||||
return scanContinue
|
||||
}
|
||||
return stateESign(s, c)
|
||||
}
|
||||
|
||||
// stateESign is the state after reading the mantissa, e, and sign in a number,
|
||||
// such as after reading `314e-` or `0.314e+`.
|
||||
func stateESign(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = stateE0
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in exponent of numeric literal")
|
||||
}
|
||||
|
||||
// stateE0 is the state after reading the mantissa, e, optional sign,
|
||||
// and at least one digit of the exponent in a number,
|
||||
// such as after reading `314e-2` or `0.314e+1` or `3.14e0`.
|
||||
func stateE0(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateT is the state after reading `t`.
|
||||
func stateT(s *scanner, c byte) int {
|
||||
if c == 'r' {
|
||||
s.step = stateTr
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'r')")
|
||||
}
|
||||
|
||||
// stateTr is the state after reading `tr`.
|
||||
func stateTr(s *scanner, c byte) int {
|
||||
if c == 'u' {
|
||||
s.step = stateTru
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'u')")
|
||||
}
|
||||
|
||||
// stateTru is the state after reading `tru`.
|
||||
func stateTru(s *scanner, c byte) int {
|
||||
if c == 'e' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'e')")
|
||||
}
|
||||
|
||||
// stateF is the state after reading `f`.
|
||||
func stateF(s *scanner, c byte) int {
|
||||
if c == 'a' {
|
||||
s.step = stateFa
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'a')")
|
||||
}
|
||||
|
||||
// stateFa is the state after reading `fa`.
|
||||
func stateFa(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateFal
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateFal is the state after reading `fal`.
|
||||
func stateFal(s *scanner, c byte) int {
|
||||
if c == 's' {
|
||||
s.step = stateFals
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 's')")
|
||||
}
|
||||
|
||||
// stateFals is the state after reading `fals`.
|
||||
func stateFals(s *scanner, c byte) int {
|
||||
if c == 'e' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'e')")
|
||||
}
|
||||
|
||||
// stateN is the state after reading `n`.
|
||||
func stateN(s *scanner, c byte) int {
|
||||
if c == 'u' {
|
||||
s.step = stateNu
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'u')")
|
||||
}
|
||||
|
||||
// stateNu is the state after reading `nu`.
|
||||
func stateNu(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateNul
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateNul is the state after reading `nul`.
|
||||
func stateNul(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateError is the state after reaching a syntax error,
|
||||
// such as after reading `[1}` or `5.1.2`.
|
||||
func stateError(s *scanner, c byte) int {
|
||||
return scanError
|
||||
}
|
||||
|
||||
// error records an error and switches to the error state.
|
||||
func (s *scanner) error(c byte, context string) int {
|
||||
s.step = stateError
|
||||
s.err = &SyntaxError{"invalid character " + quoteChar(c) + " " + context, s.bytes}
|
||||
return scanError
|
||||
}
|
||||
|
||||
// quoteChar formats c as a quoted character literal
|
||||
func quoteChar(c byte) string {
|
||||
// special cases - different from quoted strings
|
||||
if c == '\'' {
|
||||
return `'\''`
|
||||
}
|
||||
if c == '"' {
|
||||
return `'"'`
|
||||
}
|
||||
|
||||
// use quoted string with different quotation marks
|
||||
s := strconv.Quote(string(c))
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
|
||||
// undo causes the scanner to return scanCode from the next state transition.
|
||||
// This gives callers a simple 1-byte undo mechanism.
|
||||
func (s *scanner) undo(scanCode int) {
|
||||
if s.redo {
|
||||
panic("json: invalid use of scanner")
|
||||
}
|
||||
s.redoCode = scanCode
|
||||
s.redoState = s.step
|
||||
s.step = stateRedo
|
||||
s.redo = true
|
||||
}
|
||||
|
||||
// stateRedo helps implement the scanner's 1-byte undo.
|
||||
func stateRedo(s *scanner, c byte) int {
|
||||
s.redo = false
|
||||
s.step = s.redoState
|
||||
return s.redoCode
|
||||
}
|
316
vendor/gopkg.in/square/go-jose.v2/json/scanner_test.go
generated
vendored
Normal file
316
vendor/gopkg.in/square/go-jose.v2/json/scanner_test.go
generated
vendored
Normal file
|
@ -0,0 +1,316 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"math"
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Tests of simple examples.
|
||||
|
||||
type example struct {
|
||||
compact string
|
||||
indent string
|
||||
}
|
||||
|
||||
var examples = []example{
|
||||
{`1`, `1`},
|
||||
{`{}`, `{}`},
|
||||
{`[]`, `[]`},
|
||||
{`{"":2}`, "{\n\t\"\": 2\n}"},
|
||||
{`[3]`, "[\n\t3\n]"},
|
||||
{`[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"},
|
||||
{`{"x":1}`, "{\n\t\"x\": 1\n}"},
|
||||
{ex1, ex1i},
|
||||
}
|
||||
|
||||
var ex1 = `[true,false,null,"x",1,1.5,0,-5e+2]`
|
||||
|
||||
var ex1i = `[
|
||||
true,
|
||||
false,
|
||||
null,
|
||||
"x",
|
||||
1,
|
||||
1.5,
|
||||
0,
|
||||
-5e+2
|
||||
]`
|
||||
|
||||
func TestCompact(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
for _, tt := range examples {
|
||||
buf.Reset()
|
||||
if err := Compact(&buf, []byte(tt.compact)); err != nil {
|
||||
t.Errorf("Compact(%#q): %v", tt.compact, err)
|
||||
} else if s := buf.String(); s != tt.compact {
|
||||
t.Errorf("Compact(%#q) = %#q, want original", tt.compact, s)
|
||||
}
|
||||
|
||||
buf.Reset()
|
||||
if err := Compact(&buf, []byte(tt.indent)); err != nil {
|
||||
t.Errorf("Compact(%#q): %v", tt.indent, err)
|
||||
continue
|
||||
} else if s := buf.String(); s != tt.compact {
|
||||
t.Errorf("Compact(%#q) = %#q, want %#q", tt.indent, s, tt.compact)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompactSeparators(t *testing.T) {
|
||||
// U+2028 and U+2029 should be escaped inside strings.
|
||||
// They should not appear outside strings.
|
||||
tests := []struct {
|
||||
in, compact string
|
||||
}{
|
||||
{"{\"\u2028\": 1}", `{"\u2028":1}`},
|
||||
{"{\"\u2029\" :2}", `{"\u2029":2}`},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
var buf bytes.Buffer
|
||||
if err := Compact(&buf, []byte(tt.in)); err != nil {
|
||||
t.Errorf("Compact(%q): %v", tt.in, err)
|
||||
} else if s := buf.String(); s != tt.compact {
|
||||
t.Errorf("Compact(%q) = %q, want %q", tt.in, s, tt.compact)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndent(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
for _, tt := range examples {
|
||||
buf.Reset()
|
||||
if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil {
|
||||
t.Errorf("Indent(%#q): %v", tt.indent, err)
|
||||
} else if s := buf.String(); s != tt.indent {
|
||||
t.Errorf("Indent(%#q) = %#q, want original", tt.indent, s)
|
||||
}
|
||||
|
||||
buf.Reset()
|
||||
if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil {
|
||||
t.Errorf("Indent(%#q): %v", tt.compact, err)
|
||||
continue
|
||||
} else if s := buf.String(); s != tt.indent {
|
||||
t.Errorf("Indent(%#q) = %#q, want %#q", tt.compact, s, tt.indent)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tests of a large random structure.
|
||||
|
||||
func TestCompactBig(t *testing.T) {
|
||||
initBig()
|
||||
var buf bytes.Buffer
|
||||
if err := Compact(&buf, jsonBig); err != nil {
|
||||
t.Fatalf("Compact: %v", err)
|
||||
}
|
||||
b := buf.Bytes()
|
||||
if !bytes.Equal(b, jsonBig) {
|
||||
t.Error("Compact(jsonBig) != jsonBig")
|
||||
diff(t, b, jsonBig)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndentBig(t *testing.T) {
|
||||
initBig()
|
||||
var buf bytes.Buffer
|
||||
if err := Indent(&buf, jsonBig, "", "\t"); err != nil {
|
||||
t.Fatalf("Indent1: %v", err)
|
||||
}
|
||||
b := buf.Bytes()
|
||||
if len(b) == len(jsonBig) {
|
||||
// jsonBig is compact (no unnecessary spaces);
|
||||
// indenting should make it bigger
|
||||
t.Fatalf("Indent(jsonBig) did not get bigger")
|
||||
}
|
||||
|
||||
// should be idempotent
|
||||
var buf1 bytes.Buffer
|
||||
if err := Indent(&buf1, b, "", "\t"); err != nil {
|
||||
t.Fatalf("Indent2: %v", err)
|
||||
}
|
||||
b1 := buf1.Bytes()
|
||||
if !bytes.Equal(b1, b) {
|
||||
t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig)")
|
||||
diff(t, b1, b)
|
||||
return
|
||||
}
|
||||
|
||||
// should get back to original
|
||||
buf1.Reset()
|
||||
if err := Compact(&buf1, b); err != nil {
|
||||
t.Fatalf("Compact: %v", err)
|
||||
}
|
||||
b1 = buf1.Bytes()
|
||||
if !bytes.Equal(b1, jsonBig) {
|
||||
t.Error("Compact(Indent(jsonBig)) != jsonBig")
|
||||
diff(t, b1, jsonBig)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
type indentErrorTest struct {
|
||||
in string
|
||||
err error
|
||||
}
|
||||
|
||||
var indentErrorTests = []indentErrorTest{
|
||||
{`{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}},
|
||||
{`{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}},
|
||||
}
|
||||
|
||||
func TestIndentErrors(t *testing.T) {
|
||||
for i, tt := range indentErrorTests {
|
||||
slice := make([]uint8, 0)
|
||||
buf := bytes.NewBuffer(slice)
|
||||
if err := Indent(buf, []uint8(tt.in), "", ""); err != nil {
|
||||
if !reflect.DeepEqual(err, tt.err) {
|
||||
t.Errorf("#%d: Indent: %#v", i, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNextValueBig(t *testing.T) {
|
||||
initBig()
|
||||
var scan scanner
|
||||
item, rest, err := nextValue(jsonBig, &scan)
|
||||
if err != nil {
|
||||
t.Fatalf("nextValue: %s", err)
|
||||
}
|
||||
if len(item) != len(jsonBig) || &item[0] != &jsonBig[0] {
|
||||
t.Errorf("invalid item: %d %d", len(item), len(jsonBig))
|
||||
}
|
||||
if len(rest) != 0 {
|
||||
t.Errorf("invalid rest: %d", len(rest))
|
||||
}
|
||||
|
||||
item, rest, err = nextValue(append(jsonBig, "HELLO WORLD"...), &scan)
|
||||
if err != nil {
|
||||
t.Fatalf("nextValue extra: %s", err)
|
||||
}
|
||||
if len(item) != len(jsonBig) {
|
||||
t.Errorf("invalid item: %d %d", len(item), len(jsonBig))
|
||||
}
|
||||
if string(rest) != "HELLO WORLD" {
|
||||
t.Errorf("invalid rest: %d", len(rest))
|
||||
}
|
||||
}
|
||||
|
||||
var benchScan scanner
|
||||
|
||||
func BenchmarkSkipValue(b *testing.B) {
|
||||
initBig()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
nextValue(jsonBig, &benchScan)
|
||||
}
|
||||
b.SetBytes(int64(len(jsonBig)))
|
||||
}
|
||||
|
||||
func diff(t *testing.T, a, b []byte) {
|
||||
for i := 0; ; i++ {
|
||||
if i >= len(a) || i >= len(b) || a[i] != b[i] {
|
||||
j := i - 10
|
||||
if j < 0 {
|
||||
j = 0
|
||||
}
|
||||
t.Errorf("diverge at %d: «%s» vs «%s»", i, trim(a[j:]), trim(b[j:]))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func trim(b []byte) []byte {
|
||||
if len(b) > 20 {
|
||||
return b[0:20]
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Generate a random JSON object.
|
||||
|
||||
var jsonBig []byte
|
||||
|
||||
func initBig() {
|
||||
n := 10000
|
||||
if testing.Short() {
|
||||
n = 100
|
||||
}
|
||||
b, err := Marshal(genValue(n))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
jsonBig = b
|
||||
}
|
||||
|
||||
func genValue(n int) interface{} {
|
||||
if n > 1 {
|
||||
switch rand.Intn(2) {
|
||||
case 0:
|
||||
return genArray(n)
|
||||
case 1:
|
||||
return genMap(n)
|
||||
}
|
||||
}
|
||||
switch rand.Intn(3) {
|
||||
case 0:
|
||||
return rand.Intn(2) == 0
|
||||
case 1:
|
||||
return rand.NormFloat64()
|
||||
case 2:
|
||||
return genString(30)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func genString(stddev float64) string {
|
||||
n := int(math.Abs(rand.NormFloat64()*stddev + stddev/2))
|
||||
c := make([]rune, n)
|
||||
for i := range c {
|
||||
f := math.Abs(rand.NormFloat64()*64 + 32)
|
||||
if f > 0x10ffff {
|
||||
f = 0x10ffff
|
||||
}
|
||||
c[i] = rune(f)
|
||||
}
|
||||
return string(c)
|
||||
}
|
||||
|
||||
func genArray(n int) []interface{} {
|
||||
f := int(math.Abs(rand.NormFloat64()) * math.Min(10, float64(n/2)))
|
||||
if f > n {
|
||||
f = n
|
||||
}
|
||||
if f < 1 {
|
||||
f = 1
|
||||
}
|
||||
x := make([]interface{}, f)
|
||||
for i := range x {
|
||||
x[i] = genValue(((i+1)*n)/f - (i*n)/f)
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func genMap(n int) map[string]interface{} {
|
||||
f := int(math.Abs(rand.NormFloat64()) * math.Min(10, float64(n/2)))
|
||||
if f > n {
|
||||
f = n
|
||||
}
|
||||
if n > 0 && f == 0 {
|
||||
f = 1
|
||||
}
|
||||
x := make(map[string]interface{})
|
||||
for i := 0; i < f; i++ {
|
||||
x[genString(10)] = genValue(((i+1)*n)/f - (i*n)/f)
|
||||
}
|
||||
return x
|
||||
}
|
480
vendor/gopkg.in/square/go-jose.v2/json/stream.go
generated
vendored
Normal file
480
vendor/gopkg.in/square/go-jose.v2/json/stream.go
generated
vendored
Normal file
|
@ -0,0 +1,480 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
// A Decoder reads and decodes JSON objects from an input stream.
|
||||
type Decoder struct {
|
||||
r io.Reader
|
||||
buf []byte
|
||||
d decodeState
|
||||
scanp int // start of unread data in buf
|
||||
scan scanner
|
||||
err error
|
||||
|
||||
tokenState int
|
||||
tokenStack []int
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// The decoder introduces its own buffering and may
|
||||
// read data from r beyond the JSON values requested.
|
||||
func NewDecoder(r io.Reader) *Decoder {
|
||||
return &Decoder{r: r}
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
|
||||
|
||||
// Decode reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v.
|
||||
//
|
||||
// See the documentation for Unmarshal for details about
|
||||
// the conversion of JSON into a Go value.
|
||||
func (dec *Decoder) Decode(v interface{}) error {
|
||||
if dec.err != nil {
|
||||
return dec.err
|
||||
}
|
||||
|
||||
if err := dec.tokenPrepareForDecode(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !dec.tokenValueAllowed() {
|
||||
return &SyntaxError{msg: "not at beginning of value"}
|
||||
}
|
||||
|
||||
// Read whole value into buffer.
|
||||
n, err := dec.readValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dec.d.init(dec.buf[dec.scanp : dec.scanp+n])
|
||||
dec.scanp += n
|
||||
|
||||
// Don't save err from unmarshal into dec.err:
|
||||
// the connection is still usable since we read a complete JSON
|
||||
// object from it before the error happened.
|
||||
err = dec.d.unmarshal(v)
|
||||
|
||||
// fixup token streaming state
|
||||
dec.tokenValueEnd()
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Buffered returns a reader of the data remaining in the Decoder's
|
||||
// buffer. The reader is valid until the next call to Decode.
|
||||
func (dec *Decoder) Buffered() io.Reader {
|
||||
return bytes.NewReader(dec.buf[dec.scanp:])
|
||||
}
|
||||
|
||||
// readValue reads a JSON value into dec.buf.
|
||||
// It returns the length of the encoding.
|
||||
func (dec *Decoder) readValue() (int, error) {
|
||||
dec.scan.reset()
|
||||
|
||||
scanp := dec.scanp
|
||||
var err error
|
||||
Input:
|
||||
for {
|
||||
// Look in the buffer for a new value.
|
||||
for i, c := range dec.buf[scanp:] {
|
||||
dec.scan.bytes++
|
||||
v := dec.scan.step(&dec.scan, c)
|
||||
if v == scanEnd {
|
||||
scanp += i
|
||||
break Input
|
||||
}
|
||||
// scanEnd is delayed one byte.
|
||||
// We might block trying to get that byte from src,
|
||||
// so instead invent a space byte.
|
||||
if (v == scanEndObject || v == scanEndArray) && dec.scan.step(&dec.scan, ' ') == scanEnd {
|
||||
scanp += i + 1
|
||||
break Input
|
||||
}
|
||||
if v == scanError {
|
||||
dec.err = dec.scan.err
|
||||
return 0, dec.scan.err
|
||||
}
|
||||
}
|
||||
scanp = len(dec.buf)
|
||||
|
||||
// Did the last read have an error?
|
||||
// Delayed until now to allow buffer scan.
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
if dec.scan.step(&dec.scan, ' ') == scanEnd {
|
||||
break Input
|
||||
}
|
||||
if nonSpace(dec.buf) {
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
}
|
||||
dec.err = err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
n := scanp - dec.scanp
|
||||
err = dec.refill()
|
||||
scanp = dec.scanp + n
|
||||
}
|
||||
return scanp - dec.scanp, nil
|
||||
}
|
||||
|
||||
func (dec *Decoder) refill() error {
|
||||
// Make room to read more into the buffer.
|
||||
// First slide down data already consumed.
|
||||
if dec.scanp > 0 {
|
||||
n := copy(dec.buf, dec.buf[dec.scanp:])
|
||||
dec.buf = dec.buf[:n]
|
||||
dec.scanp = 0
|
||||
}
|
||||
|
||||
// Grow buffer if not large enough.
|
||||
const minRead = 512
|
||||
if cap(dec.buf)-len(dec.buf) < minRead {
|
||||
newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead)
|
||||
copy(newBuf, dec.buf)
|
||||
dec.buf = newBuf
|
||||
}
|
||||
|
||||
// Read. Delay error for next iteration (after scan).
|
||||
n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
|
||||
dec.buf = dec.buf[0 : len(dec.buf)+n]
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func nonSpace(b []byte) bool {
|
||||
for _, c := range b {
|
||||
if !isSpace(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// An Encoder writes JSON objects to an output stream.
|
||||
type Encoder struct {
|
||||
w io.Writer
|
||||
err error
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{w: w}
|
||||
}
|
||||
|
||||
// Encode writes the JSON encoding of v to the stream,
|
||||
// followed by a newline character.
|
||||
//
|
||||
// See the documentation for Marshal for details about the
|
||||
// conversion of Go values to JSON.
|
||||
func (enc *Encoder) Encode(v interface{}) error {
|
||||
if enc.err != nil {
|
||||
return enc.err
|
||||
}
|
||||
e := newEncodeState()
|
||||
err := e.marshal(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Terminate each value with a newline.
|
||||
// This makes the output look a little nicer
|
||||
// when debugging, and some kind of space
|
||||
// is required if the encoded value was a number,
|
||||
// so that the reader knows there aren't more
|
||||
// digits coming.
|
||||
e.WriteByte('\n')
|
||||
|
||||
if _, err = enc.w.Write(e.Bytes()); err != nil {
|
||||
enc.err = err
|
||||
}
|
||||
encodeStatePool.Put(e)
|
||||
return err
|
||||
}
|
||||
|
||||
// RawMessage is a raw encoded JSON object.
|
||||
// It implements Marshaler and Unmarshaler and can
|
||||
// be used to delay JSON decoding or precompute a JSON encoding.
|
||||
type RawMessage []byte
|
||||
|
||||
// MarshalJSON returns *m as the JSON encoding of m.
|
||||
func (m *RawMessage) MarshalJSON() ([]byte, error) {
|
||||
return *m, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON sets *m to a copy of data.
|
||||
func (m *RawMessage) UnmarshalJSON(data []byte) error {
|
||||
if m == nil {
|
||||
return errors.New("json.RawMessage: UnmarshalJSON on nil pointer")
|
||||
}
|
||||
*m = append((*m)[0:0], data...)
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ Marshaler = (*RawMessage)(nil)
|
||||
var _ Unmarshaler = (*RawMessage)(nil)
|
||||
|
||||
// A Token holds a value of one of these types:
|
||||
//
|
||||
// Delim, for the four JSON delimiters [ ] { }
|
||||
// bool, for JSON booleans
|
||||
// float64, for JSON numbers
|
||||
// Number, for JSON numbers
|
||||
// string, for JSON string literals
|
||||
// nil, for JSON null
|
||||
//
|
||||
type Token interface{}
|
||||
|
||||
const (
|
||||
tokenTopValue = iota
|
||||
tokenArrayStart
|
||||
tokenArrayValue
|
||||
tokenArrayComma
|
||||
tokenObjectStart
|
||||
tokenObjectKey
|
||||
tokenObjectColon
|
||||
tokenObjectValue
|
||||
tokenObjectComma
|
||||
)
|
||||
|
||||
// advance tokenstate from a separator state to a value state
|
||||
func (dec *Decoder) tokenPrepareForDecode() error {
|
||||
// Note: Not calling peek before switch, to avoid
|
||||
// putting peek into the standard Decode path.
|
||||
// peek is only called when using the Token API.
|
||||
switch dec.tokenState {
|
||||
case tokenArrayComma:
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != ',' {
|
||||
return &SyntaxError{"expected comma after array element", 0}
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenArrayValue
|
||||
case tokenObjectColon:
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != ':' {
|
||||
return &SyntaxError{"expected colon after object key", 0}
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectValue
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenValueAllowed() bool {
|
||||
switch dec.tokenState {
|
||||
case tokenTopValue, tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenValueEnd() {
|
||||
switch dec.tokenState {
|
||||
case tokenArrayStart, tokenArrayValue:
|
||||
dec.tokenState = tokenArrayComma
|
||||
case tokenObjectValue:
|
||||
dec.tokenState = tokenObjectComma
|
||||
}
|
||||
}
|
||||
|
||||
// A Delim is a JSON array or object delimiter, one of [ ] { or }.
|
||||
type Delim rune
|
||||
|
||||
func (d Delim) String() string {
|
||||
return string(d)
|
||||
}
|
||||
|
||||
// Token returns the next JSON token in the input stream.
|
||||
// At the end of the input stream, Token returns nil, io.EOF.
|
||||
//
|
||||
// Token guarantees that the delimiters [ ] { } it returns are
|
||||
// properly nested and matched: if Token encounters an unexpected
|
||||
// delimiter in the input, it will return an error.
|
||||
//
|
||||
// The input stream consists of basic JSON values—bool, string,
|
||||
// number, and null—along with delimiters [ ] { } of type Delim
|
||||
// to mark the start and end of arrays and objects.
|
||||
// Commas and colons are elided.
|
||||
func (dec *Decoder) Token() (Token, error) {
|
||||
for {
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch c {
|
||||
case '[':
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
||||
dec.tokenState = tokenArrayStart
|
||||
return Delim('['), nil
|
||||
|
||||
case ']':
|
||||
if dec.tokenState != tokenArrayStart && dec.tokenState != tokenArrayComma {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
||||
dec.tokenValueEnd()
|
||||
return Delim(']'), nil
|
||||
|
||||
case '{':
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
||||
dec.tokenState = tokenObjectStart
|
||||
return Delim('{'), nil
|
||||
|
||||
case '}':
|
||||
if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
||||
dec.tokenValueEnd()
|
||||
return Delim('}'), nil
|
||||
|
||||
case ':':
|
||||
if dec.tokenState != tokenObjectColon {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectValue
|
||||
continue
|
||||
|
||||
case ',':
|
||||
if dec.tokenState == tokenArrayComma {
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenArrayValue
|
||||
continue
|
||||
}
|
||||
if dec.tokenState == tokenObjectComma {
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectKey
|
||||
continue
|
||||
}
|
||||
return dec.tokenError(c)
|
||||
|
||||
case '"':
|
||||
if dec.tokenState == tokenObjectStart || dec.tokenState == tokenObjectKey {
|
||||
var x string
|
||||
old := dec.tokenState
|
||||
dec.tokenState = tokenTopValue
|
||||
err := dec.Decode(&x)
|
||||
dec.tokenState = old
|
||||
if err != nil {
|
||||
clearOffset(err)
|
||||
return nil, err
|
||||
}
|
||||
dec.tokenState = tokenObjectColon
|
||||
return x, nil
|
||||
}
|
||||
fallthrough
|
||||
|
||||
default:
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
var x interface{}
|
||||
if err := dec.Decode(&x); err != nil {
|
||||
clearOffset(err)
|
||||
return nil, err
|
||||
}
|
||||
return x, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func clearOffset(err error) {
|
||||
if s, ok := err.(*SyntaxError); ok {
|
||||
s.Offset = 0
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenError(c byte) (Token, error) {
|
||||
var context string
|
||||
switch dec.tokenState {
|
||||
case tokenTopValue:
|
||||
context = " looking for beginning of value"
|
||||
case tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
||||
context = " looking for beginning of value"
|
||||
case tokenArrayComma:
|
||||
context = " after array element"
|
||||
case tokenObjectKey:
|
||||
context = " looking for beginning of object key string"
|
||||
case tokenObjectColon:
|
||||
context = " after object key"
|
||||
case tokenObjectComma:
|
||||
context = " after object key:value pair"
|
||||
}
|
||||
return nil, &SyntaxError{"invalid character " + quoteChar(c) + " " + context, 0}
|
||||
}
|
||||
|
||||
// More reports whether there is another element in the
|
||||
// current array or object being parsed.
|
||||
func (dec *Decoder) More() bool {
|
||||
c, err := dec.peek()
|
||||
return err == nil && c != ']' && c != '}'
|
||||
}
|
||||
|
||||
func (dec *Decoder) peek() (byte, error) {
|
||||
var err error
|
||||
for {
|
||||
for i := dec.scanp; i < len(dec.buf); i++ {
|
||||
c := dec.buf[i]
|
||||
if isSpace(c) {
|
||||
continue
|
||||
}
|
||||
dec.scanp = i
|
||||
return c, nil
|
||||
}
|
||||
// buffer has been scanned, now report any error
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
err = dec.refill()
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
TODO
|
||||
|
||||
// EncodeToken writes the given JSON token to the stream.
|
||||
// It returns an error if the delimiters [ ] { } are not properly used.
|
||||
//
|
||||
// EncodeToken does not call Flush, because usually it is part of
|
||||
// a larger operation such as Encode, and those will call Flush when finished.
|
||||
// Callers that create an Encoder and then invoke EncodeToken directly,
|
||||
// without using Encode, need to call Flush when finished to ensure that
|
||||
// the JSON is written to the underlying writer.
|
||||
func (e *Encoder) EncodeToken(t Token) error {
|
||||
...
|
||||
}
|
||||
|
||||
*/
|
354
vendor/gopkg.in/square/go-jose.v2/json/stream_test.go
generated
vendored
Normal file
354
vendor/gopkg.in/square/go-jose.v2/json/stream_test.go
generated
vendored
Normal file
|
@ -0,0 +1,354 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Test values for the stream test.
|
||||
// One of each JSON kind.
|
||||
var streamTest = []interface{}{
|
||||
0.1,
|
||||
"hello",
|
||||
nil,
|
||||
true,
|
||||
false,
|
||||
[]interface{}{"a", "b", "c"},
|
||||
map[string]interface{}{"K": "Kelvin", "ß": "long s"},
|
||||
3.14, // another value to make sure something can follow map
|
||||
}
|
||||
|
||||
var streamEncoded = `0.1
|
||||
"hello"
|
||||
null
|
||||
true
|
||||
false
|
||||
["a","b","c"]
|
||||
{"ß":"long s","K":"Kelvin"}
|
||||
3.14
|
||||
`
|
||||
|
||||
func TestEncoder(t *testing.T) {
|
||||
for i := 0; i <= len(streamTest); i++ {
|
||||
var buf bytes.Buffer
|
||||
enc := NewEncoder(&buf)
|
||||
for j, v := range streamTest[0:i] {
|
||||
if err := enc.Encode(v); err != nil {
|
||||
t.Fatalf("encode #%d: %v", j, err)
|
||||
}
|
||||
}
|
||||
if have, want := buf.String(), nlines(streamEncoded, i); have != want {
|
||||
t.Errorf("encoding %d items: mismatch", i)
|
||||
diff(t, []byte(have), []byte(want))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecoder(t *testing.T) {
|
||||
for i := 0; i <= len(streamTest); i++ {
|
||||
// Use stream without newlines as input,
|
||||
// just to stress the decoder even more.
|
||||
// Our test input does not include back-to-back numbers.
|
||||
// Otherwise stripping the newlines would
|
||||
// merge two adjacent JSON values.
|
||||
var buf bytes.Buffer
|
||||
for _, c := range nlines(streamEncoded, i) {
|
||||
if c != '\n' {
|
||||
buf.WriteRune(c)
|
||||
}
|
||||
}
|
||||
out := make([]interface{}, i)
|
||||
dec := NewDecoder(&buf)
|
||||
for j := range out {
|
||||
if err := dec.Decode(&out[j]); err != nil {
|
||||
t.Fatalf("decode #%d/%d: %v", j, i, err)
|
||||
}
|
||||
}
|
||||
if !reflect.DeepEqual(out, streamTest[0:i]) {
|
||||
t.Errorf("decoding %d items: mismatch", i)
|
||||
for j := range out {
|
||||
if !reflect.DeepEqual(out[j], streamTest[j]) {
|
||||
t.Errorf("#%d: have %v want %v", j, out[j], streamTest[j])
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecoderBuffered(t *testing.T) {
|
||||
r := strings.NewReader(`{"Name": "Gopher"} extra `)
|
||||
var m struct {
|
||||
Name string
|
||||
}
|
||||
d := NewDecoder(r)
|
||||
err := d.Decode(&m)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if m.Name != "Gopher" {
|
||||
t.Errorf("Name = %q; want Gopher", m.Name)
|
||||
}
|
||||
rest, err := ioutil.ReadAll(d.Buffered())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if g, w := string(rest), " extra "; g != w {
|
||||
t.Errorf("Remaining = %q; want %q", g, w)
|
||||
}
|
||||
}
|
||||
|
||||
func nlines(s string, n int) string {
|
||||
if n <= 0 {
|
||||
return ""
|
||||
}
|
||||
for i, c := range s {
|
||||
if c == '\n' {
|
||||
if n--; n == 0 {
|
||||
return s[0 : i+1]
|
||||
}
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func TestRawMessage(t *testing.T) {
|
||||
// TODO(rsc): Should not need the * in *RawMessage
|
||||
var data struct {
|
||||
X float64
|
||||
Id *RawMessage
|
||||
Y float32
|
||||
}
|
||||
const raw = `["\u0056",null]`
|
||||
const msg = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}`
|
||||
err := Unmarshal([]byte(msg), &data)
|
||||
if err != nil {
|
||||
t.Fatalf("Unmarshal: %v", err)
|
||||
}
|
||||
if string([]byte(*data.Id)) != raw {
|
||||
t.Fatalf("Raw mismatch: have %#q want %#q", []byte(*data.Id), raw)
|
||||
}
|
||||
b, err := Marshal(&data)
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal: %v", err)
|
||||
}
|
||||
if string(b) != msg {
|
||||
t.Fatalf("Marshal: have %#q want %#q", b, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNullRawMessage(t *testing.T) {
|
||||
// TODO(rsc): Should not need the * in *RawMessage
|
||||
var data struct {
|
||||
X float64
|
||||
Id *RawMessage
|
||||
Y float32
|
||||
}
|
||||
data.Id = new(RawMessage)
|
||||
const msg = `{"X":0.1,"Id":null,"Y":0.2}`
|
||||
err := Unmarshal([]byte(msg), &data)
|
||||
if err != nil {
|
||||
t.Fatalf("Unmarshal: %v", err)
|
||||
}
|
||||
if data.Id != nil {
|
||||
t.Fatalf("Raw mismatch: have non-nil, want nil")
|
||||
}
|
||||
b, err := Marshal(&data)
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal: %v", err)
|
||||
}
|
||||
if string(b) != msg {
|
||||
t.Fatalf("Marshal: have %#q want %#q", b, msg)
|
||||
}
|
||||
}
|
||||
|
||||
var blockingTests = []string{
|
||||
`{"x": 1}`,
|
||||
`[1, 2, 3]`,
|
||||
}
|
||||
|
||||
func TestBlocking(t *testing.T) {
|
||||
for _, enc := range blockingTests {
|
||||
r, w := net.Pipe()
|
||||
go w.Write([]byte(enc))
|
||||
var val interface{}
|
||||
|
||||
// If Decode reads beyond what w.Write writes above,
|
||||
// it will block, and the test will deadlock.
|
||||
if err := NewDecoder(r).Decode(&val); err != nil {
|
||||
t.Errorf("decoding %s: %v", enc, err)
|
||||
}
|
||||
r.Close()
|
||||
w.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkEncoderEncode(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
type T struct {
|
||||
X, Y string
|
||||
}
|
||||
v := &T{"foo", "bar"}
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := NewEncoder(ioutil.Discard).Encode(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type tokenStreamCase struct {
|
||||
json string
|
||||
expTokens []interface{}
|
||||
}
|
||||
|
||||
type decodeThis struct {
|
||||
v interface{}
|
||||
}
|
||||
|
||||
var tokenStreamCases []tokenStreamCase = []tokenStreamCase{
|
||||
// streaming token cases
|
||||
{json: `10`, expTokens: []interface{}{float64(10)}},
|
||||
{json: ` [10] `, expTokens: []interface{}{
|
||||
Delim('['), float64(10), Delim(']')}},
|
||||
{json: ` [false,10,"b"] `, expTokens: []interface{}{
|
||||
Delim('['), false, float64(10), "b", Delim(']')}},
|
||||
{json: `{ "a": 1 }`, expTokens: []interface{}{
|
||||
Delim('{'), "a", float64(1), Delim('}')}},
|
||||
{json: `{"a": 1, "b":"3"}`, expTokens: []interface{}{
|
||||
Delim('{'), "a", float64(1), "b", "3", Delim('}')}},
|
||||
{json: ` [{"a": 1},{"a": 2}] `, expTokens: []interface{}{
|
||||
Delim('['),
|
||||
Delim('{'), "a", float64(1), Delim('}'),
|
||||
Delim('{'), "a", float64(2), Delim('}'),
|
||||
Delim(']')}},
|
||||
{json: `{"obj": {"a": 1}}`, expTokens: []interface{}{
|
||||
Delim('{'), "obj", Delim('{'), "a", float64(1), Delim('}'),
|
||||
Delim('}')}},
|
||||
{json: `{"obj": [{"a": 1}]}`, expTokens: []interface{}{
|
||||
Delim('{'), "obj", Delim('['),
|
||||
Delim('{'), "a", float64(1), Delim('}'),
|
||||
Delim(']'), Delim('}')}},
|
||||
|
||||
// streaming tokens with intermittent Decode()
|
||||
{json: `{ "a": 1 }`, expTokens: []interface{}{
|
||||
Delim('{'), "a",
|
||||
decodeThis{float64(1)},
|
||||
Delim('}')}},
|
||||
{json: ` [ { "a" : 1 } ] `, expTokens: []interface{}{
|
||||
Delim('['),
|
||||
decodeThis{map[string]interface{}{"a": float64(1)}},
|
||||
Delim(']')}},
|
||||
{json: ` [{"a": 1},{"a": 2}] `, expTokens: []interface{}{
|
||||
Delim('['),
|
||||
decodeThis{map[string]interface{}{"a": float64(1)}},
|
||||
decodeThis{map[string]interface{}{"a": float64(2)}},
|
||||
Delim(']')}},
|
||||
{json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []interface{}{
|
||||
Delim('{'), "obj", Delim('['),
|
||||
decodeThis{map[string]interface{}{"a": float64(1)}},
|
||||
Delim(']'), Delim('}')}},
|
||||
|
||||
{json: `{"obj": {"a": 1}}`, expTokens: []interface{}{
|
||||
Delim('{'), "obj",
|
||||
decodeThis{map[string]interface{}{"a": float64(1)}},
|
||||
Delim('}')}},
|
||||
{json: `{"obj": [{"a": 1}]}`, expTokens: []interface{}{
|
||||
Delim('{'), "obj",
|
||||
decodeThis{[]interface{}{
|
||||
map[string]interface{}{"a": float64(1)},
|
||||
}},
|
||||
Delim('}')}},
|
||||
{json: ` [{"a": 1} {"a": 2}] `, expTokens: []interface{}{
|
||||
Delim('['),
|
||||
decodeThis{map[string]interface{}{"a": float64(1)}},
|
||||
decodeThis{&SyntaxError{"expected comma after array element", 0}},
|
||||
}},
|
||||
{json: `{ "a" 1 }`, expTokens: []interface{}{
|
||||
Delim('{'), "a",
|
||||
decodeThis{&SyntaxError{"expected colon after object key", 0}},
|
||||
}},
|
||||
}
|
||||
|
||||
func TestDecodeInStream(t *testing.T) {
|
||||
|
||||
for ci, tcase := range tokenStreamCases {
|
||||
|
||||
dec := NewDecoder(strings.NewReader(tcase.json))
|
||||
for i, etk := range tcase.expTokens {
|
||||
|
||||
var tk interface{}
|
||||
var err error
|
||||
|
||||
if dt, ok := etk.(decodeThis); ok {
|
||||
etk = dt.v
|
||||
err = dec.Decode(&tk)
|
||||
} else {
|
||||
tk, err = dec.Token()
|
||||
}
|
||||
if experr, ok := etk.(error); ok {
|
||||
if err == nil || err.Error() != experr.Error() {
|
||||
t.Errorf("case %v: Expected error %v in %q, but was %v", ci, experr, tcase.json, err)
|
||||
}
|
||||
break
|
||||
} else if err == io.EOF {
|
||||
t.Errorf("case %v: Unexpected EOF in %q", ci, tcase.json)
|
||||
break
|
||||
} else if err != nil {
|
||||
t.Errorf("case %v: Unexpected error '%v' in %q", ci, err, tcase.json)
|
||||
break
|
||||
}
|
||||
if !reflect.DeepEqual(tk, etk) {
|
||||
t.Errorf(`case %v: %q @ %v expected %T(%v) was %T(%v)`, ci, tcase.json, i, etk, etk, tk, tk)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Test from golang.org/issue/11893
|
||||
func TestHTTPDecoding(t *testing.T) {
|
||||
const raw = `{ "foo": "bar" }`
|
||||
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte(raw))
|
||||
}))
|
||||
defer ts.Close()
|
||||
res, err := http.Get(ts.URL)
|
||||
if err != nil {
|
||||
log.Fatalf("GET failed: %v", err)
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
foo := struct {
|
||||
Foo string `json:"foo"`
|
||||
}{}
|
||||
|
||||
d := NewDecoder(res.Body)
|
||||
err = d.Decode(&foo)
|
||||
if err != nil {
|
||||
t.Fatalf("Decode: %v", err)
|
||||
}
|
||||
if foo.Foo != "bar" {
|
||||
t.Errorf("decoded %q; want \"bar\"", foo.Foo)
|
||||
}
|
||||
|
||||
// make sure we get the EOF the second time
|
||||
err = d.Decode(&foo)
|
||||
if err != io.EOF {
|
||||
t.Errorf("err = %v; want io.EOF", err)
|
||||
}
|
||||
}
|
115
vendor/gopkg.in/square/go-jose.v2/json/tagkey_test.go
generated
vendored
Normal file
115
vendor/gopkg.in/square/go-jose.v2/json/tagkey_test.go
generated
vendored
Normal file
|
@ -0,0 +1,115 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
type basicLatin2xTag struct {
|
||||
V string `json:"$%-/"`
|
||||
}
|
||||
|
||||
type basicLatin3xTag struct {
|
||||
V string `json:"0123456789"`
|
||||
}
|
||||
|
||||
type basicLatin4xTag struct {
|
||||
V string `json:"ABCDEFGHIJKLMO"`
|
||||
}
|
||||
|
||||
type basicLatin5xTag struct {
|
||||
V string `json:"PQRSTUVWXYZ_"`
|
||||
}
|
||||
|
||||
type basicLatin6xTag struct {
|
||||
V string `json:"abcdefghijklmno"`
|
||||
}
|
||||
|
||||
type basicLatin7xTag struct {
|
||||
V string `json:"pqrstuvwxyz"`
|
||||
}
|
||||
|
||||
type miscPlaneTag struct {
|
||||
V string `json:"色は匂へど"`
|
||||
}
|
||||
|
||||
type percentSlashTag struct {
|
||||
V string `json:"text/html%"` // https://golang.org/issue/2718
|
||||
}
|
||||
|
||||
type punctuationTag struct {
|
||||
V string `json:"!#$%&()*+-./:<=>?@[]^_{|}~"` // https://golang.org/issue/3546
|
||||
}
|
||||
|
||||
type emptyTag struct {
|
||||
W string
|
||||
}
|
||||
|
||||
type misnamedTag struct {
|
||||
X string `jsom:"Misnamed"`
|
||||
}
|
||||
|
||||
type badFormatTag struct {
|
||||
Y string `:"BadFormat"`
|
||||
}
|
||||
|
||||
type badCodeTag struct {
|
||||
Z string `json:" !\"#&'()*+,."`
|
||||
}
|
||||
|
||||
type spaceTag struct {
|
||||
Q string `json:"With space"`
|
||||
}
|
||||
|
||||
type unicodeTag struct {
|
||||
W string `json:"Ελλάδα"`
|
||||
}
|
||||
|
||||
var structTagObjectKeyTests = []struct {
|
||||
raw interface{}
|
||||
value string
|
||||
key string
|
||||
}{
|
||||
{basicLatin2xTag{"2x"}, "2x", "$%-/"},
|
||||
{basicLatin3xTag{"3x"}, "3x", "0123456789"},
|
||||
{basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"},
|
||||
{basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"},
|
||||
{basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"},
|
||||
{basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"},
|
||||
{miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"},
|
||||
{emptyTag{"Pour Moi"}, "Pour Moi", "W"},
|
||||
{misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"},
|
||||
{badFormatTag{"Orfevre"}, "Orfevre", "Y"},
|
||||
{badCodeTag{"Reliable Man"}, "Reliable Man", "Z"},
|
||||
{percentSlashTag{"brut"}, "brut", "text/html%"},
|
||||
{punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:<=>?@[]^_{|}~"},
|
||||
{spaceTag{"Perreddu"}, "Perreddu", "With space"},
|
||||
{unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"},
|
||||
}
|
||||
|
||||
func TestStructTagObjectKey(t *testing.T) {
|
||||
for _, tt := range structTagObjectKeyTests {
|
||||
b, err := Marshal(tt.raw)
|
||||
if err != nil {
|
||||
t.Fatalf("Marshal(%#q) failed: %v", tt.raw, err)
|
||||
}
|
||||
var f interface{}
|
||||
err = Unmarshal(b, &f)
|
||||
if err != nil {
|
||||
t.Fatalf("Unmarshal(%#q) failed: %v", b, err)
|
||||
}
|
||||
for i, v := range f.(map[string]interface{}) {
|
||||
switch i {
|
||||
case tt.key:
|
||||
if s, ok := v.(string); !ok || s != tt.value {
|
||||
t.Fatalf("Unexpected value: %#q, want %v", s, tt.value)
|
||||
}
|
||||
default:
|
||||
t.Fatalf("Unexpected key: %#q, from %#q", i, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
44
vendor/gopkg.in/square/go-jose.v2/json/tags.go
generated
vendored
Normal file
44
vendor/gopkg.in/square/go-jose.v2/json/tags.go
generated
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// tagOptions is the string following a comma in a struct field's "json"
|
||||
// tag, or the empty string. It does not include the leading comma.
|
||||
type tagOptions string
|
||||
|
||||
// parseTag splits a struct field's json tag into its name and
|
||||
// comma-separated options.
|
||||
func parseTag(tag string) (string, tagOptions) {
|
||||
if idx := strings.Index(tag, ","); idx != -1 {
|
||||
return tag[:idx], tagOptions(tag[idx+1:])
|
||||
}
|
||||
return tag, tagOptions("")
|
||||
}
|
||||
|
||||
// Contains reports whether a comma-separated list of options
|
||||
// contains a particular substr flag. substr must be surrounded by a
|
||||
// string boundary or commas.
|
||||
func (o tagOptions) Contains(optionName string) bool {
|
||||
if len(o) == 0 {
|
||||
return false
|
||||
}
|
||||
s := string(o)
|
||||
for s != "" {
|
||||
var next string
|
||||
i := strings.Index(s, ",")
|
||||
if i >= 0 {
|
||||
s, next = s[:i], s[i+1:]
|
||||
}
|
||||
if s == optionName {
|
||||
return true
|
||||
}
|
||||
s = next
|
||||
}
|
||||
return false
|
||||
}
|
28
vendor/gopkg.in/square/go-jose.v2/json/tags_test.go
generated
vendored
Normal file
28
vendor/gopkg.in/square/go-jose.v2/json/tags_test.go
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTagParsing(t *testing.T) {
|
||||
name, opts := parseTag("field,foobar,foo")
|
||||
if name != "field" {
|
||||
t.Fatalf("name = %q, want field", name)
|
||||
}
|
||||
for _, tt := range []struct {
|
||||
opt string
|
||||
want bool
|
||||
}{
|
||||
{"foobar", true},
|
||||
{"foo", true},
|
||||
{"bar", false},
|
||||
} {
|
||||
if opts.Contains(tt.opt) != tt.want {
|
||||
t.Errorf("Contains(%q) = %v", tt.opt, !tt.want)
|
||||
}
|
||||
}
|
||||
}
|
BIN
vendor/gopkg.in/square/go-jose.v2/json/testdata/code.json.gz
generated
vendored
Normal file
BIN
vendor/gopkg.in/square/go-jose.v2/json/testdata/code.json.gz
generated
vendored
Normal file
Binary file not shown.
294
vendor/gopkg.in/square/go-jose.v2/jwe.go
generated
vendored
Normal file
294
vendor/gopkg.in/square/go-jose.v2/jwe.go
generated
vendored
Normal file
|
@ -0,0 +1,294 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
// rawJSONWebEncryption represents a raw JWE JSON object. Used for parsing/serializing.
|
||||
type rawJSONWebEncryption struct {
|
||||
Protected *byteBuffer `json:"protected,omitempty"`
|
||||
Unprotected *rawHeader `json:"unprotected,omitempty"`
|
||||
Header *rawHeader `json:"header,omitempty"`
|
||||
Recipients []rawRecipientInfo `json:"recipients,omitempty"`
|
||||
Aad *byteBuffer `json:"aad,omitempty"`
|
||||
EncryptedKey *byteBuffer `json:"encrypted_key,omitempty"`
|
||||
Iv *byteBuffer `json:"iv,omitempty"`
|
||||
Ciphertext *byteBuffer `json:"ciphertext,omitempty"`
|
||||
Tag *byteBuffer `json:"tag,omitempty"`
|
||||
}
|
||||
|
||||
// rawRecipientInfo represents a raw JWE Per-Recipient header JSON object. Used for parsing/serializing.
|
||||
type rawRecipientInfo struct {
|
||||
Header *rawHeader `json:"header,omitempty"`
|
||||
EncryptedKey string `json:"encrypted_key,omitempty"`
|
||||
}
|
||||
|
||||
// JSONWebEncryption represents an encrypted JWE object after parsing.
|
||||
type JSONWebEncryption struct {
|
||||
Header Header
|
||||
protected, unprotected *rawHeader
|
||||
recipients []recipientInfo
|
||||
aad, iv, ciphertext, tag []byte
|
||||
original *rawJSONWebEncryption
|
||||
}
|
||||
|
||||
// recipientInfo represents a raw JWE Per-Recipient header JSON object after parsing.
|
||||
type recipientInfo struct {
|
||||
header *rawHeader
|
||||
encryptedKey []byte
|
||||
}
|
||||
|
||||
// GetAuthData retrieves the (optional) authenticated data attached to the object.
|
||||
func (obj JSONWebEncryption) GetAuthData() []byte {
|
||||
if obj.aad != nil {
|
||||
out := make([]byte, len(obj.aad))
|
||||
copy(out, obj.aad)
|
||||
return out
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get the merged header values
|
||||
func (obj JSONWebEncryption) mergedHeaders(recipient *recipientInfo) rawHeader {
|
||||
out := rawHeader{}
|
||||
out.merge(obj.protected)
|
||||
out.merge(obj.unprotected)
|
||||
|
||||
if recipient != nil {
|
||||
out.merge(recipient.header)
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// Get the additional authenticated data from a JWE object.
|
||||
func (obj JSONWebEncryption) computeAuthData() []byte {
|
||||
var protected string
|
||||
|
||||
if obj.original != nil && obj.original.Protected != nil {
|
||||
protected = obj.original.Protected.base64()
|
||||
} else if obj.protected != nil {
|
||||
protected = base64.RawURLEncoding.EncodeToString(mustSerializeJSON((obj.protected)))
|
||||
} else {
|
||||
protected = ""
|
||||
}
|
||||
|
||||
output := []byte(protected)
|
||||
if obj.aad != nil {
|
||||
output = append(output, '.')
|
||||
output = append(output, []byte(base64.RawURLEncoding.EncodeToString(obj.aad))...)
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
// ParseEncrypted parses an encrypted message in compact or full serialization format.
|
||||
func ParseEncrypted(input string) (*JSONWebEncryption, error) {
|
||||
input = stripWhitespace(input)
|
||||
if strings.HasPrefix(input, "{") {
|
||||
return parseEncryptedFull(input)
|
||||
}
|
||||
|
||||
return parseEncryptedCompact(input)
|
||||
}
|
||||
|
||||
// parseEncryptedFull parses a message in compact format.
|
||||
func parseEncryptedFull(input string) (*JSONWebEncryption, error) {
|
||||
var parsed rawJSONWebEncryption
|
||||
err := json.Unmarshal([]byte(input), &parsed)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parsed.sanitized()
|
||||
}
|
||||
|
||||
// sanitized produces a cleaned-up JWE object from the raw JSON.
|
||||
func (parsed *rawJSONWebEncryption) sanitized() (*JSONWebEncryption, error) {
|
||||
obj := &JSONWebEncryption{
|
||||
original: parsed,
|
||||
unprotected: parsed.Unprotected,
|
||||
}
|
||||
|
||||
// Check that there is not a nonce in the unprotected headers
|
||||
if parsed.Unprotected != nil {
|
||||
if nonce := parsed.Unprotected.getNonce(); nonce != "" {
|
||||
return nil, ErrUnprotectedNonce
|
||||
}
|
||||
}
|
||||
if parsed.Header != nil {
|
||||
if nonce := parsed.Header.getNonce(); nonce != "" {
|
||||
return nil, ErrUnprotectedNonce
|
||||
}
|
||||
}
|
||||
|
||||
if parsed.Protected != nil && len(parsed.Protected.bytes()) > 0 {
|
||||
err := json.Unmarshal(parsed.Protected.bytes(), &obj.protected)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid protected header: %s, %s", err, parsed.Protected.base64())
|
||||
}
|
||||
}
|
||||
|
||||
// Note: this must be called _after_ we parse the protected header,
|
||||
// otherwise fields from the protected header will not get picked up.
|
||||
var err error
|
||||
mergedHeaders := obj.mergedHeaders(nil)
|
||||
obj.Header, err = mergedHeaders.sanitized()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("square/go-jose: cannot sanitize merged headers: %v (%v)", err, mergedHeaders)
|
||||
}
|
||||
|
||||
if len(parsed.Recipients) == 0 {
|
||||
obj.recipients = []recipientInfo{
|
||||
{
|
||||
header: parsed.Header,
|
||||
encryptedKey: parsed.EncryptedKey.bytes(),
|
||||
},
|
||||
}
|
||||
} else {
|
||||
obj.recipients = make([]recipientInfo, len(parsed.Recipients))
|
||||
for r := range parsed.Recipients {
|
||||
encryptedKey, err := base64.RawURLEncoding.DecodeString(parsed.Recipients[r].EncryptedKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check that there is not a nonce in the unprotected header
|
||||
if parsed.Recipients[r].Header != nil && parsed.Recipients[r].Header.getNonce() != "" {
|
||||
return nil, ErrUnprotectedNonce
|
||||
}
|
||||
|
||||
obj.recipients[r].header = parsed.Recipients[r].Header
|
||||
obj.recipients[r].encryptedKey = encryptedKey
|
||||
}
|
||||
}
|
||||
|
||||
for _, recipient := range obj.recipients {
|
||||
headers := obj.mergedHeaders(&recipient)
|
||||
if headers.getAlgorithm() == "" || headers.getEncryption() == "" {
|
||||
return nil, fmt.Errorf("square/go-jose: message is missing alg/enc headers")
|
||||
}
|
||||
}
|
||||
|
||||
obj.iv = parsed.Iv.bytes()
|
||||
obj.ciphertext = parsed.Ciphertext.bytes()
|
||||
obj.tag = parsed.Tag.bytes()
|
||||
obj.aad = parsed.Aad.bytes()
|
||||
|
||||
return obj, nil
|
||||
}
|
||||
|
||||
// parseEncryptedCompact parses a message in compact format.
|
||||
func parseEncryptedCompact(input string) (*JSONWebEncryption, error) {
|
||||
parts := strings.Split(input, ".")
|
||||
if len(parts) != 5 {
|
||||
return nil, fmt.Errorf("square/go-jose: compact JWE format must have five parts")
|
||||
}
|
||||
|
||||
rawProtected, err := base64.RawURLEncoding.DecodeString(parts[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
encryptedKey, err := base64.RawURLEncoding.DecodeString(parts[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
iv, err := base64.RawURLEncoding.DecodeString(parts[2])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ciphertext, err := base64.RawURLEncoding.DecodeString(parts[3])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tag, err := base64.RawURLEncoding.DecodeString(parts[4])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
raw := &rawJSONWebEncryption{
|
||||
Protected: newBuffer(rawProtected),
|
||||
EncryptedKey: newBuffer(encryptedKey),
|
||||
Iv: newBuffer(iv),
|
||||
Ciphertext: newBuffer(ciphertext),
|
||||
Tag: newBuffer(tag),
|
||||
}
|
||||
|
||||
return raw.sanitized()
|
||||
}
|
||||
|
||||
// CompactSerialize serializes an object using the compact serialization format.
|
||||
func (obj JSONWebEncryption) CompactSerialize() (string, error) {
|
||||
if len(obj.recipients) != 1 || obj.unprotected != nil ||
|
||||
obj.protected == nil || obj.recipients[0].header != nil {
|
||||
return "", ErrNotSupported
|
||||
}
|
||||
|
||||
serializedProtected := mustSerializeJSON(obj.protected)
|
||||
|
||||
return fmt.Sprintf(
|
||||
"%s.%s.%s.%s.%s",
|
||||
base64.RawURLEncoding.EncodeToString(serializedProtected),
|
||||
base64.RawURLEncoding.EncodeToString(obj.recipients[0].encryptedKey),
|
||||
base64.RawURLEncoding.EncodeToString(obj.iv),
|
||||
base64.RawURLEncoding.EncodeToString(obj.ciphertext),
|
||||
base64.RawURLEncoding.EncodeToString(obj.tag)), nil
|
||||
}
|
||||
|
||||
// FullSerialize serializes an object using the full JSON serialization format.
|
||||
func (obj JSONWebEncryption) FullSerialize() string {
|
||||
raw := rawJSONWebEncryption{
|
||||
Unprotected: obj.unprotected,
|
||||
Iv: newBuffer(obj.iv),
|
||||
Ciphertext: newBuffer(obj.ciphertext),
|
||||
EncryptedKey: newBuffer(obj.recipients[0].encryptedKey),
|
||||
Tag: newBuffer(obj.tag),
|
||||
Aad: newBuffer(obj.aad),
|
||||
Recipients: []rawRecipientInfo{},
|
||||
}
|
||||
|
||||
if len(obj.recipients) > 1 {
|
||||
for _, recipient := range obj.recipients {
|
||||
info := rawRecipientInfo{
|
||||
Header: recipient.header,
|
||||
EncryptedKey: base64.RawURLEncoding.EncodeToString(recipient.encryptedKey),
|
||||
}
|
||||
raw.Recipients = append(raw.Recipients, info)
|
||||
}
|
||||
} else {
|
||||
// Use flattened serialization
|
||||
raw.Header = obj.recipients[0].header
|
||||
raw.EncryptedKey = newBuffer(obj.recipients[0].encryptedKey)
|
||||
}
|
||||
|
||||
if obj.protected != nil {
|
||||
raw.Protected = newBuffer(mustSerializeJSON(obj.protected))
|
||||
}
|
||||
|
||||
return string(mustSerializeJSON(raw))
|
||||
}
|
619
vendor/gopkg.in/square/go-jose.v2/jwe_test.go
generated
vendored
Normal file
619
vendor/gopkg.in/square/go-jose.v2/jwe_test.go
generated
vendored
Normal file
|
@ -0,0 +1,619 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rsa"
|
||||
"crypto/x509"
|
||||
"math/big"
|
||||
"regexp"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCompactParseJWE(t *testing.T) {
|
||||
// Should parse
|
||||
msg := "eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.dGVzdA.dGVzdA.dGVzdA.dGVzdA"
|
||||
_, err := ParseEncrypted(msg)
|
||||
if err != nil {
|
||||
t.Error("Unable to parse valid message:", err)
|
||||
}
|
||||
|
||||
// Messages that should fail to parse
|
||||
failures := []string{
|
||||
// Too many parts
|
||||
"eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.dGVzdA.dGVzdA.dGVzdA.dGVzdA.dGVzdA",
|
||||
// Not enough parts
|
||||
"eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.dGVzdA.dGVzdA.dGVzdA",
|
||||
// Invalid encrypted key
|
||||
"eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.//////.dGVzdA.dGVzdA.dGVzdA",
|
||||
// Invalid IV
|
||||
"eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.dGVzdA.//////.dGVzdA.dGVzdA",
|
||||
// Invalid ciphertext
|
||||
"eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.dGVzdA.dGVzdA.//////.dGVzdA",
|
||||
// Invalid tag
|
||||
"eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.dGVzdA.dGVzdA.dGVzdA.//////",
|
||||
// Invalid header
|
||||
"W10.dGVzdA.dGVzdA.dGVzdA.dGVzdA",
|
||||
// Invalid header
|
||||
"######.dGVzdA.dGVzdA.dGVzdA.dGVzdA",
|
||||
// Missing alc/enc params
|
||||
"e30.dGVzdA.dGVzdA.dGVzdA.dGVzdA",
|
||||
}
|
||||
|
||||
for _, msg := range failures {
|
||||
_, err = ParseEncrypted(msg)
|
||||
if err == nil {
|
||||
t.Error("Able to parse invalid message", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFullParseJWE(t *testing.T) {
|
||||
// Messages that should succeed to parse
|
||||
successes := []string{
|
||||
// Flattened serialization, single recipient
|
||||
"{\"protected\":\"eyJhbGciOiJYWVoiLCJlbmMiOiJYWVoifQo\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
|
||||
// Unflattened serialization, single recipient
|
||||
"{\"protected\":\"\",\"unprotected\":{\"enc\":\"XYZ\"},\"recipients\":[{\"header\":{\"alg\":\"XYZ\"},\"encrypted_key\":\"QUJD\"}],\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
|
||||
}
|
||||
|
||||
for i := range successes {
|
||||
_, err := ParseEncrypted(successes[i])
|
||||
if err != nil {
|
||||
t.Error("Unble to parse valid message", err, successes[i])
|
||||
}
|
||||
}
|
||||
|
||||
// Messages that should fail to parse
|
||||
failures := []string{
|
||||
// Empty
|
||||
"{}",
|
||||
// Invalid JSON
|
||||
"{XX",
|
||||
// Invalid protected header
|
||||
"{\"protected\":\"###\"}",
|
||||
// Invalid protected header
|
||||
"{\"protected\":\"e1gK\"}",
|
||||
// Invalid encrypted key
|
||||
"{\"protected\":\"e30\",\"encrypted_key\":\"###\"}",
|
||||
// Invalid IV
|
||||
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"###\"}",
|
||||
// Invalid ciphertext
|
||||
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"###\"}",
|
||||
// Invalid tag
|
||||
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"###\"}",
|
||||
// Invalid AAD
|
||||
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\",\"aad\":\"###\"}",
|
||||
// Missing alg/enc headers
|
||||
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
|
||||
// Missing enc header
|
||||
"{\"protected\":\"eyJhbGciOiJYWVoifQ\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
|
||||
// Missing alg header
|
||||
"{\"protected\":\"eyJlbmMiOiJYWVoifQ\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
|
||||
// Unflattened serialization, single recipient, invalid encrypted_key
|
||||
"{\"protected\":\"\",\"recipients\":[{\"header\":{\"alg\":\"XYZ\", \"enc\":\"XYZ\"},\"encrypted_key\":\"###\"}],\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
|
||||
// Unflattened serialization, single recipient, missing alg
|
||||
"{\"protected\":\"eyJhbGciOiJYWVoifQ\",\"recipients\":[{\"encrypted_key\":\"QUJD\"}],\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
|
||||
}
|
||||
|
||||
for i := range failures {
|
||||
_, err := ParseEncrypted(failures[i])
|
||||
if err == nil {
|
||||
t.Error("Able to parse invalid message", err, failures[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMissingInvalidHeaders(t *testing.T) {
|
||||
protected := &rawHeader{}
|
||||
protected.set(headerEncryption, A128GCM)
|
||||
|
||||
obj := &JSONWebEncryption{
|
||||
protected: protected,
|
||||
unprotected: &rawHeader{},
|
||||
recipients: []recipientInfo{
|
||||
{},
|
||||
},
|
||||
}
|
||||
|
||||
_, err := obj.Decrypt(nil)
|
||||
if err != ErrUnsupportedKeyType {
|
||||
t.Error("should detect invalid key")
|
||||
}
|
||||
|
||||
obj.unprotected.set(headerCritical, []string{"1", "2"})
|
||||
|
||||
_, err = obj.Decrypt(nil)
|
||||
if err == nil {
|
||||
t.Error("should reject message with crit header")
|
||||
}
|
||||
|
||||
obj.unprotected.set(headerCritical, nil)
|
||||
obj.protected = &rawHeader{}
|
||||
obj.protected.set(headerAlgorithm, RSA1_5)
|
||||
|
||||
_, err = obj.Decrypt(rsaTestKey)
|
||||
if err == nil || err == ErrCryptoFailure {
|
||||
t.Error("should detect missing enc header")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRejectUnprotectedJWENonce(t *testing.T) {
|
||||
// No need to test compact, since that's always protected
|
||||
|
||||
// Flattened JSON
|
||||
input := `{
|
||||
"header": {
|
||||
"alg": "XYZ", "enc": "XYZ",
|
||||
"nonce": "should-cause-an-error"
|
||||
},
|
||||
"encrypted_key": "does-not-matter",
|
||||
"aad": "does-not-matter",
|
||||
"iv": "does-not-matter",
|
||||
"ciphertext": "does-not-matter",
|
||||
"tag": "does-not-matter"
|
||||
}`
|
||||
_, err := ParseEncrypted(input)
|
||||
if err == nil {
|
||||
t.Error("JWE with an unprotected nonce parsed as valid.")
|
||||
} else if err.Error() != "square/go-jose: Nonce parameter included in unprotected header" {
|
||||
t.Errorf("Improper error for unprotected nonce: %v", err)
|
||||
}
|
||||
|
||||
input = `{
|
||||
"unprotected": {
|
||||
"alg": "XYZ", "enc": "XYZ",
|
||||
"nonce": "should-cause-an-error"
|
||||
},
|
||||
"encrypted_key": "does-not-matter",
|
||||
"aad": "does-not-matter",
|
||||
"iv": "does-not-matter",
|
||||
"ciphertext": "does-not-matter",
|
||||
"tag": "does-not-matter"
|
||||
}`
|
||||
_, err = ParseEncrypted(input)
|
||||
if err == nil {
|
||||
t.Error("JWE with an unprotected nonce parsed as valid.")
|
||||
} else if err.Error() != "square/go-jose: Nonce parameter included in unprotected header" {
|
||||
t.Errorf("Improper error for unprotected nonce: %v", err)
|
||||
}
|
||||
|
||||
// Full JSON
|
||||
input = `{
|
||||
"header": { "alg": "XYZ", "enc": "XYZ" },
|
||||
"aad": "does-not-matter",
|
||||
"iv": "does-not-matter",
|
||||
"ciphertext": "does-not-matter",
|
||||
"tag": "does-not-matter",
|
||||
"recipients": [{
|
||||
"header": { "nonce": "should-cause-an-error" },
|
||||
"encrypted_key": "does-not-matter"
|
||||
}]
|
||||
}`
|
||||
_, err = ParseEncrypted(input)
|
||||
if err == nil {
|
||||
t.Error("JWS with an unprotected nonce parsed as valid.")
|
||||
} else if err.Error() != "square/go-jose: Nonce parameter included in unprotected header" {
|
||||
t.Errorf("Improper error for unprotected nonce: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompactSerialize(t *testing.T) {
|
||||
// Compact serialization must fail if we have unprotected headers
|
||||
obj := &JSONWebEncryption{
|
||||
unprotected: &rawHeader{},
|
||||
}
|
||||
obj.unprotected.set(headerAlgorithm, "XYZ")
|
||||
|
||||
_, err := obj.CompactSerialize()
|
||||
if err == nil {
|
||||
t.Error("Object with unprotected headers can't be compact serialized")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVectorsJWE(t *testing.T) {
|
||||
plaintext := []byte("The true sign of intelligence is not knowledge but imagination.")
|
||||
|
||||
publicKey := &rsa.PublicKey{
|
||||
N: fromBase64Int(`
|
||||
oahUIoWw0K0usKNuOR6H4wkf4oBUXHTxRvgb48E-BVvxkeDNjbC4he8rUW
|
||||
cJoZmds2h7M70imEVhRU5djINXtqllXI4DFqcI1DgjT9LewND8MW2Krf3S
|
||||
psk_ZkoFnilakGygTwpZ3uesH-PFABNIUYpOiN15dsQRkgr0vEhxN92i2a
|
||||
sbOenSZeyaxziK72UwxrrKoExv6kc5twXTq4h-QChLOln0_mtUZwfsRaMS
|
||||
tPs6mS6XrgxnxbWhojf663tuEQueGC-FCMfra36C9knDFGzKsNa7LZK2dj
|
||||
YgyD3JR_MB_4NUJW_TqOQtwHYbxevoJArm-L5StowjzGy-_bq6Gw`),
|
||||
E: 65537,
|
||||
}
|
||||
|
||||
expectedCompact := stripWhitespace(`
|
||||
eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkEyNTZHQ00ifQ.ROQCfge4JPm_
|
||||
yACxv1C1NSXmwNbL6kvmCuyxBRGpW57DvlwByjyjsb6g8m7wtLMqKEyhFCn
|
||||
tV7sjippEePIlKln6BvVnz5ZLXHNYQgmubuNq8MC0KTwcaGJ_C0z_T8j4PZ
|
||||
a1nfpbhSe-ePYaALrf_nIsSRKu7cWsrwOSlaRPecRnYeDd_ytAxEQWYEKFi
|
||||
Pszc70fP9geZOB_09y9jq0vaOF0jGmpIAmgk71lCcUpSdrhNokTKo5y8MH8
|
||||
3NcbIvmuZ51cjXQj1f0_AwM9RW3oCh2Hu0z0C5l4BujZVsDuGgMsGZsjUhS
|
||||
RZsAQSXHCAmlJ2NlnN60U7y4SPJhKv5tKYw.48V1_ALb6US04U3b.5eym8T
|
||||
W_c8SuK0ltJ3rpYIzOeDQz7TALvtu6UG9oMo4vpzs9tX_EFShS8iB7j6jiS
|
||||
diwkIr3ajwQzaBtQD_A.XFBoMYUZodetZdvTiFvSkQ`)
|
||||
|
||||
expectedFull := stripWhitespace(`
|
||||
{ "protected":"eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkEyNTZHQ00ifQ",
|
||||
"encrypted_key":
|
||||
"ROQCfge4JPm_yACxv1C1NSXmwNbL6kvmCuyxBRGpW57DvlwByjyjsb
|
||||
6g8m7wtLMqKEyhFCntV7sjippEePIlKln6BvVnz5ZLXHNYQgmubuNq
|
||||
8MC0KTwcaGJ_C0z_T8j4PZa1nfpbhSe-ePYaALrf_nIsSRKu7cWsrw
|
||||
OSlaRPecRnYeDd_ytAxEQWYEKFiPszc70fP9geZOB_09y9jq0vaOF0
|
||||
jGmpIAmgk71lCcUpSdrhNokTKo5y8MH83NcbIvmuZ51cjXQj1f0_Aw
|
||||
M9RW3oCh2Hu0z0C5l4BujZVsDuGgMsGZsjUhSRZsAQSXHCAmlJ2Nln
|
||||
N60U7y4SPJhKv5tKYw",
|
||||
"iv": "48V1_ALb6US04U3b",
|
||||
"ciphertext":
|
||||
"5eym8TW_c8SuK0ltJ3rpYIzOeDQz7TALvtu6UG9oMo4vpzs9tX_EFS
|
||||
hS8iB7j6jiSdiwkIr3ajwQzaBtQD_A",
|
||||
"tag":"XFBoMYUZodetZdvTiFvSkQ" }`)
|
||||
|
||||
// Mock random reader
|
||||
RandReader = bytes.NewReader([]byte{
|
||||
// Encryption key
|
||||
177, 161, 244, 128, 84, 143, 225, 115, 63, 180, 3, 255, 107, 154,
|
||||
212, 246, 138, 7, 110, 91, 112, 46, 34, 105, 47, 130, 203, 46, 122,
|
||||
234, 64, 252,
|
||||
// Randomness for RSA-OAEP
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
// Initialization vector
|
||||
227, 197, 117, 252, 2, 219, 233, 68, 180, 225, 77, 219})
|
||||
defer resetRandReader()
|
||||
|
||||
// Encrypt with a dummy key
|
||||
encrypter, err := NewEncrypter(A256GCM, Recipient{Algorithm: RSA_OAEP, Key: publicKey}, nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
object, err := encrypter.Encrypt(plaintext)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
serialized, err := object.CompactSerialize()
|
||||
if serialized != expectedCompact {
|
||||
t.Error("Compact serialization is not what we expected", serialized, expectedCompact)
|
||||
}
|
||||
|
||||
serialized = object.FullSerialize()
|
||||
if serialized != expectedFull {
|
||||
t.Error("Full serialization is not what we expected")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWENilProtected(t *testing.T) {
|
||||
key := []byte("1234567890123456")
|
||||
serialized := `{"unprotected":{"alg":"dir","enc":"A128GCM"}}`
|
||||
jwe, _ := ParseEncrypted(serialized)
|
||||
jwe.Decrypt(key)
|
||||
}
|
||||
|
||||
func TestVectorsJWECorrupt(t *testing.T) {
|
||||
priv := &rsa.PrivateKey{
|
||||
PublicKey: rsa.PublicKey{
|
||||
N: fromHexInt(`
|
||||
a8b3b284af8eb50b387034a860f146c4919f318763cd6c5598c8
|
||||
ae4811a1e0abc4c7e0b082d693a5e7fced675cf4668512772c0c
|
||||
bc64a742c6c630f533c8cc72f62ae833c40bf25842e984bb78bd
|
||||
bf97c0107d55bdb662f5c4e0fab9845cb5148ef7392dd3aaff93
|
||||
ae1e6b667bb3d4247616d4f5ba10d4cfd226de88d39f16fb`),
|
||||
E: 65537,
|
||||
},
|
||||
D: fromHexInt(`
|
||||
53339cfdb79fc8466a655c7316aca85c55fd8f6dd898fdaf1195
|
||||
17ef4f52e8fd8e258df93fee180fa0e4ab29693cd83b152a553d
|
||||
4ac4d1812b8b9fa5af0e7f55fe7304df41570926f3311f15c4d6
|
||||
5a732c483116ee3d3d2d0af3549ad9bf7cbfb78ad884f84d5beb
|
||||
04724dc7369b31def37d0cf539e9cfcdd3de653729ead5d1`),
|
||||
Primes: []*big.Int{
|
||||
fromHexInt(`
|
||||
d32737e7267ffe1341b2d5c0d150a81b586fb3132bed2f8d5262
|
||||
864a9cb9f30af38be448598d413a172efb802c21acf1c11c520c
|
||||
2f26a471dcad212eac7ca39d`),
|
||||
fromHexInt(`
|
||||
cc8853d1d54da630fac004f471f281c7b8982d8224a490edbeb3
|
||||
3d3e3d5cc93c4765703d1dd791642f1f116a0dd852be2419b2af
|
||||
72bfe9a030e860b0288b5d77`),
|
||||
},
|
||||
}
|
||||
|
||||
corruptCiphertext := stripWhitespace(`
|
||||
eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.NFl09dehy
|
||||
IR2Oh5iSsvEa82Ps7DLjRHeo0RnuTuSR45OsaIP6U8yu7vLlWaZKSZMy
|
||||
B2qRBSujf-5XIRoNhtyIyjk81eJRXGa_Bxaor1XBCMyyhGchW2H2P71f
|
||||
PhDO6ufSC7kV4bNqgHR-4ziS7KXwzN83_5kogXqxUpymUoJDNc.tk-GT
|
||||
W_VVhiTIKFF.D_BE6ImZUl9F.52a-zFnRb3YQwIC7UrhVyQ`)
|
||||
|
||||
corruptAuthtag := stripWhitespace(`
|
||||
eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.NFl09dehy
|
||||
IR2Oh5iSsvEa82Ps7DLjRHeo0RnuTuSR45OsaIP6U8yu7vLlWaZKSZMy
|
||||
B2qRBSujf-5XIRoNhtyIyjk81eJRXGa_Bxaor1XBCMyyhGchW2H2P71f
|
||||
PhDO6ufSC7kV4bNqgHR-4ziS7KNwzN83_5kogXqxUpymUoJDNc.tk-GT
|
||||
W_VVhiTIKFF.D_BE6ImZUl9F.52a-zFnRb3YQwiC7UrhVyQ`)
|
||||
|
||||
msg, _ := ParseEncrypted(corruptCiphertext)
|
||||
_, err := msg.Decrypt(priv)
|
||||
if err != ErrCryptoFailure {
|
||||
t.Error("should detect corrupt ciphertext")
|
||||
}
|
||||
|
||||
msg, _ = ParseEncrypted(corruptAuthtag)
|
||||
_, err = msg.Decrypt(priv)
|
||||
if err != ErrCryptoFailure {
|
||||
t.Error("should detect corrupt auth tag")
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors generated with nimbus-jose-jwt
|
||||
func TestSampleNimbusJWEMessagesRSA(t *testing.T) {
|
||||
rsaPrivateKey, err := x509.ParsePKCS8PrivateKey(fromBase64Bytes(`
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCNRCEmf5PlbXKuT4uwnb
|
||||
wGKvFrtpi+bDYxOZxxqxdVkZM/bYATAnD1fg9pNvLMKeF+MWJ9kPIMmDgOh9RdnRdLvQGb
|
||||
BzhLmxwhhcua2QYiHEZizXmiaXvNP12bzEBhebdX7ObW8izMVW0p0lqHPNzkK3K75B0Sxo
|
||||
FMVKkZ7KtBHgepBT5yPhPPcNe5lXQeTne5bo3I60DRcN9jTBgMJOXdq0I9o4y6ZmoXdNTm
|
||||
0EyLzn9/EYiHqBxtKFh791EHR7wYgyi/t+nOKr4sO74NbEByP0mHDil+mPvZSzFW4l7fPx
|
||||
OclRZvpRIKIub2TroZA9s2WsshGf79eqqXYbBB9NNRAgMBAAECggEAIExbZ/nzTplfhwsY
|
||||
3SCzRJW87OuqsJ79JPQPGM4NX7sQ94eJqM7+FKLl0yCFErjgnYGdCyiArvB+oJPdsimgke
|
||||
h83X0hGeg03lVA3/6OsG3WifCAxulnLN44AM8KST8S9D9t5+cm5vEBLHazzAfWWTS13s+g
|
||||
9hH8rf8NSqgZ36EutjKlvLdHx1mWcKX7SREFVHT8FWPAbdhTLEHUjoWHrfSektnczaSHnt
|
||||
q8fFJy6Ld13QkF1ZJRUhtA24XrD+qLTc+M36IuedjeZaLHFB+KyhYR3YvXEtrbCug7dCRd
|
||||
uG6uTlDCSaSy7xHeTPolWtWo9F202jal54otxiAJFGUHgQKBgQDRAT0s6YQZUfwE0wluXV
|
||||
k0JdhDdCo8sC1aMmKlRKWUkBAqrDl7BI3MF56VOr4ybr90buuscshFf9TtrtBOjHSGcfDI
|
||||
tSKfhhkW5ewQKB0YqyHzoD6UKT0/XAshFY3esc3uCxuJ/6vOiXV0og9o7eFvr51O0TfDFh
|
||||
mcTvW4wirKlQKBgQCtB7UAu8I9Nn8czkd6oXLDRyTWYviuiqFmxR+PM9klgZtsumkeSxO1
|
||||
lkfFoj9+G8nFaqYEBA9sPeNtJVTSROCvj/iQtoqpV2NiI/wWeVszpBwsswx2mlks4LJa8a
|
||||
Yz9xrsfNoroKYVppefc/MCoSx4M+99RSm3FSpLGZQHAUGyzQKBgQDMQmq4JuuMF1y2lk0E
|
||||
SESyuz21BqV0tDVOjilsHT+5hmXWXoS6nkO6L2czrrpM7YE82F6JJZBmo7zEIXHBInGLJ3
|
||||
XLoYLZ5qNEhqYDUEDHaBCBWZ1vDTKnZlwWFEuXVavNNZvPbUhKTHq25t8qjDki/r09Vykp
|
||||
BsM2yNBKpbBOVQKBgCJyUVd3CaFUExQyAMrqD0XPCQdhJq7gzGcAQVsp8EXmOoH3zmuIeM
|
||||
ECzQEMXuWFNLMHm0tbX5Kl83vMHcnKioyI9ewhWxOBYTitf0ceG8j5F97SOl32NmCXzwoJ
|
||||
55Oa0xJXfLuIvOe8hZzp4WwZmBfKBxiCR166aPQQgIawelrVAoGAEJsHomfCI4epxH4oMw
|
||||
qYJMCGy95zloB+2+c86BZCOJAGwnfzbtc2eutWZw61/9sSO8sQCfzA8oX+5HwAgnFVzwW4
|
||||
lNMZohppYcpwN9EyjkPaCXuALC7p5rF2o63wY7JLvnjS2aYZliknh2yW6X6fSB0PK0Cpvd
|
||||
lAIyRw6Kud0zI=`))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
rsaSampleMessages := []string{
|
||||
"eyJlbmMiOiJBMTI4R0NNIiwiYWxnIjoiUlNBMV81In0.EW0KOhHeoAxTBnLjYhh2T6HjwI-srNs6RpcSdZvE-GJ5iww3EYWBCmeGGj1UVz6OcBfwW3wllZ6GPOHU-hxVQH5KYpVOjkmrFIYU6-8BHhxBP_PjSJEBCZzjOgsCm9Th4-zmlO7UWTdK_UtwE7nk4X-kkmEy-aZBCShA8nFe2MVvqD5F7nvEWNFBOHh8ae_juo-kvycoIzvxLV9g1B0Zn8K9FAlu8YF1KiL5NFekn76f3jvAwlExuRbFPUx4gJN6CeBDK_D57ABsY2aBVDSiQceuYZxvCIAajqSS6dMT382FNJzAiQhToOpo_1w5FnnBjzJLLEKDk_I-Eo2YCWxxsQ.5mCMuxJqLRuPXGAr.Ghe4INeBhP3MDWGvyNko7qanKdZIzKjfeiU.ja3UlVWJXKNFJ-rZsJWycw",
|
||||
"eyJlbmMiOiJBMTkyR0NNIiwiYWxnIjoiUlNBMV81In0.JsJeYoP0St1bRYNUaAmA34DAA27usE7RNuC2grGikBRmh1xrwUOpnEIXXpwr7fjVmNi52zzWkNHC8JkkRTrLcCh2VXvnOnarpH8DCr9qM6440bSrahzbxIvDds8z8q0wT1W4kjVnq1mGwGxg8RQNBWTV6Sp2FLQkZyjzt_aXsgYzr3zEmLZxB-d41lBS81Mguk_hdFJIg_WO4ao54lozvxkCn_uMiIZ8eLb8qHy0h-N21tiHGCaiC2vV8KXomwoqbJ0SXrEH4r9_R2J844H80TBZdbvNBd8whvoQNHvOX659LNs9EQ9xxvHU2kqGZekXBu7sDXXTjctMkMITobGSzw.1v5govaDvanP3LGp.llwYNBDrD7MwVLaFHesljlratfmndWs4XPQ.ZGT1zk9_yIKi2GzW6CuAyA",
|
||||
"eyJlbmMiOiJBMjU2R0NNIiwiYWxnIjoiUlNBMV81In0.fBv3fA3TMS3ML8vlsCuvwdsKvB0ym8R30jJrlOiqkWKk7WVUkjDInFzr1zw3Owla6c5BqOJNoACXt4IWbkLbkoWV3tweXlWwpafuaWPkjLOUH_K31rS2fCX5x-MTj8_hScquVQXpbz3vk2EfulRmGXZc_8JU2NqQCAsYy3a28houqP3rDe5jEAvZS2SOFvJkKW--f5S-z39t1D7fNz1N8Btd9SmXWQzjbul5YNxI9ctqxhJpkKYpxOLlvrzdA6YdJjOlDx3n6S-HnSZGM6kQd_xKtAf8l1EGwhQmhbXhMhjVxMvGwE5BX7PAb8Ccde5bzOCJx-PVbVetuLb169ZYqQ._jiZbOPRR82FEWMZ.88j68LI-K2KT6FMBEdlz6amG5nvaJU8a-90.EnEbUTJsWNqJYKzfO0x4Yw",
|
||||
"eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiUlNBMV81In0.bN6FN0qmGxhkESiVukrCaDVG3woL0xE-0bHN_Mu0WZXTQWbzzT-7jOvaN1xhGK8nzi8qpCSRgE5onONNB9i8OnJm3MMIxF7bUUEAXO9SUAFn2v--wNc4drPc5OjIu0RiJrDVDkkGjNrBDIuBaEQcke7A0v91PH58dXE7o4TLPzC8UJmRtXWhUSwjXVF3-UmYRMht2rjHJlvRbtm6Tu2LMBIopRL0zj6tlPP4Dm7I7sz9OEB3VahYAhpXnFR7D_f8RjLSXQmBvB1FiI5l_vMz2NFt2hYUmQF3EJMLIEdHvvPp3iHDGiXC1obJrDID_CCf3qs9UY7DMYL622KLvP2NIg.qb72oxECzxd_aNuHVR0aNg.Gwet9Ms8hB8rKEb0h4RGdFNRq97Qs2LQaJM0HWrCqoI.03ljVThOFvgXzMmQJ79VjQ",
|
||||
"eyJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwiYWxnIjoiUlNBMV81In0.ZbEOP6rqdiIP4g7Nl1PL5gwhgDwv9RinyiUQxZXPOmD7kwEZrZ093dJnhqI9kEd3QGFlHDpB7HgNz53d27z2zmEj1-27v6miizq6tH4sN2MoeZLwSyk16O1_n3bVdDmROawsTYYFJfHsuLwyVJxPd37duIYnbUCFO9J8lLIv-2VI50KJ1t47YfE4P-Wt9jVzxP2CVUQaJwTlcwfiDLJTagYmfyrDjf525WlQFlgfJGqsJKp8BX9gmKvAo-1iCBAM8VpEjS0u0_hW9VSye36yh8BthVV-VJkhJ-0tMpto3bbBmj7M25Xf4gbTrrVU7Nz6wb18YZuhHZWmj2Y2nHV6Jg.AjnS44blTrIIfFlqVw0_Mg.muCRgaEXNKKpW8rMfW7jf7Zpn3VwSYDz-JTRg16jZxY.qjc9OGlMaaWKDWQSIwVpR4K556Pp6SF9",
|
||||
"eyJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwiYWxnIjoiUlNBMV81In0.c7_F1lMlRHQQE3WbKmtHBYTosdZrG9hPfs-F9gNQYet61zKG8NXVkSy0Zf2UFHt0vhcO8hP2qrqOFsy7vmRj20xnGHQ2EE29HH6hwX5bx1Jj3uE5WT9Gvh0OewpvF9VubbwWTIObBpdEG7XdJsMAQlIxtXUmQYAtLTWcy2ZJipyJtVlWQLaPuE8BKfZH-XAsp2CpQNiRPI8Ftza3EAspiyRfVQbjKt7nF8nuZ2sESjt7Y50q4CSiiCuGT28T3diMN0_rWrH-I-xx7OQvJlrQaNGglGtu3jKUcrJDcvxW2e1OxriaTeuQ848ayuRvGUNeSv6WoVYmkiK1x_gNwUAAbw.7XtSqHJA7kjt6JrfxJMwiA.Yvi4qukAbdT-k-Fd2s4G8xzL4VFxaFC0ZIzgFDAI6n0.JSWPJ-HjOE3SK9Lm0yHclmjS7Z1ahtQga9FHGCWVRcc",
|
||||
"eyJlbmMiOiJBMTI4R0NNIiwiYWxnIjoiUlNBLU9BRVAifQ.SYVxJbCnJ_tcR13LJpaqHQj-nGNkMxre4A1FmnUdxnvzeJwuvyrLiUdRsZR1IkP4fqLtDON2mumx39QeJQf0WIObPBYlIxycRLkwxDHRVlyTmPvdZHAxN26jPrk09wa5SgK1UF1W1VSQIPm-Tek8jNAmarF1Yxzxl-t54wZFlQiHP4TuaczugO5f-J4nlWenfla2mU1snDgdUMlEZGOAQ_gTEtwSgd1MqXmK_7LZBkoDqqoCujMZhziafJPXPDaUUqBLW3hHkkDA7GpVec3XcTtNUWQJqOpMyQhqo1KQMc8jg3fuirILp-hjvvNVtBnCRBvbrKUCPzu2_yH3HM_agA.2VsdijtonAxShNIW.QzzB3P9CxYP3foNKN0Ma1Z9tMwijAlkWo08.ZdQkIPDY_M-hxqi5fD4NGw",
|
||||
"eyJlbmMiOiJBMTkyR0NNIiwiYWxnIjoiUlNBLU9BRVAifQ.Z2oTJXXib1u-S38Vn3DRKE3JnhnwgUa92UhsefzY2Wpdn0dmxMfYt9iRoJGFfSAcA97MOfjyvXVRCKWXGrG5AZCMAXEqU8SNQwKPRjlcqojcVzQyMucXI0ikLC4mUgeRlfKTwsBicq6JZZylzRoLGGSNJQbni3_BLsf7H3Qor0BYg0FPCLG9Z2OVvrFzvjTLmZtV6gFlVrMHBxJub_aUet9gAkxiu1Wx_Kx46TlLX2tkumXIpTGlzX6pef6jLeZ5EIg_K-Uz4tkWgWQIEkLD7qmTyk5pAGmzukHa_08jIh5-U-Sd8XGZdx4J1pVPJ5CPg0qDJGZ_cfgkgpWbP_wB6A.4qgKfokK1EwYxz20._Md82bv_KH2Vru0Ue2Eb6oAqHP2xBBP5jF8.WFRojvQpD5VmZlOr_dN0rQ",
|
||||
"eyJlbmMiOiJBMjU2R0NNIiwiYWxnIjoiUlNBLU9BRVAifQ.JzCUgJcBJmBgByp4PBAABUfhezPvndxBIVzaoZ96DAS0HPni0OjMbsOGsz6JwNsiTr1gSn_S6R1WpZM8GJc9R2z0EKKVP67TR62ZSG0MEWyLpHmG_4ug0fAp1HWWMa9bT4ApSaOLgwlpVAb_-BPZZgIu6c8cREuMon6UBHDqW1euTBbzk8zix3-FTZ6p5b_3soDL1wXfRiRBEsxxUGMnpryx1OFb8Od0JdyGF0GgfLt6OoaujDJpo-XtLRawu1Xlg6GqRs0NQwSHZ5jXgQ6-zgCufXonAmYTiIyBXY2no9XmECTexjwrS_05nA7H-UyIZEBOCp3Yhz2zxrt5j_0pvQ.SJR-ghhaUKP4zXtZ.muiuzLfZA0y0BDNsroGTw2r2-l73SLf9lK8.XFMH1oHr1G6ByP3dWSUUPA",
|
||||
"eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiUlNBLU9BRVAifQ.U946MVfIm4Dpk_86HrnIA-QXyiUu0LZ67PL93CMLmEtJemMNDqmRd9fXyenCIhAC7jPIV1aaqW7gS194xyrrnUpBoJBdbegiPqOfquy493Iq_GQ8OXnFxFibPNQ6rU0l8BwIfh28ei_VIF2jqN6bhxFURCVW7fG6n6zkCCuEyc7IcxWafSHjH2FNttREuVj-jS-4LYDZsFzSKbpqoYF6mHt8H3btNEZDTSmy_6v0fV1foNtUKNfWopCp-iE4hNh4EzJfDuU8eXLhDb03aoOockrUiUCh-E0tQx9su4rOv-mDEOHHAQK7swm5etxoa7__9PC3Hg97_p4GM9gC9ykNgw.pnXwvoSPi0kMQP54of-HGg.RPJt1CMWs1nyotx1fOIfZ8760mYQ69HlyDp3XmdVsZ8.Yxw2iPVWaBROFE_FGbvodA",
|
||||
"eyJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwiYWxnIjoiUlNBLU9BRVAifQ.eKEOIJUJpXmO_ghH_nGCJmoEspqKyiy3D5l0P8lKutlo8AuYHPQlgOsaFYnDkypyUVWd9zi-JaQuCeo7dzoBiS1L71nAZo-SUoN0anQBkVuyuRjr-deJMhPPfq1H86tTk-4rKzPr1Ivd2RGXMtWsrUpNGk81r1v8DdMntLE7UxZQqT34ONuZg1IXnD_U6di7k07unI29zuU1ySeUr6w1YPw5aUDErMlpZcEJWrgOEYWaS2nuC8sWGlPGYEjqkACMFGn-y40UoS_JatNZO6gHK3SKZnXD7vN5NAaMo_mFNbh50e1t_zO8DaUdLtXPOBLcx_ULoteNd9H8HyDGWqwAPw.0xmtzJfeVMoIT1Cp68QrXA.841l1aA4c3uvSYfw6l180gn5JZQjL53WQ5fr8ejtvoI.lojzeWql_3gDq-AoaIbl_aGQRH_54w_f",
|
||||
"eyJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwiYWxnIjoiUlNBLU9BRVAifQ.D0QkvIXR1TL7dIHWuPNMybmmD8UPyQd1bRKjRDNbA2HmKGpamCtcJmpNB_EetNFe-LDmhe44BYI_XN2wIBbYURKgDK_WG9BH0LQw_nCVqQ-sKqjtj3yQeytXhLHYTDmiF0TO-uW-RFR7GbPAdARBfuf4zj82r_wDD9sD5WSCGx89iPfozDOYQ_OLwdL2WD99VvDyfwS3ZhxA-9IMSYv5pwqPkxj4C0JdjCqrN0YNrZn_1ORgjtsVmcWXsmusObTozUGA7n5GeVepfZdU1vrMulAwdRYqOYtlqKaOpFowe9xFN3ncBG7wb4f9pmzbS_Dgt-1_Ii_4SEB9GQ4NiuBZ0w.N4AZeCxMGUv52A0UVJsaZw.5eHOGbZdtahnp3l_PDY-YojYib4ft4SRmdsQ2kggrTs.WsmGH8ZDv4ctBFs7qsQvw2obe4dVToRcAQaZ3PYL34E",
|
||||
"eyJlbmMiOiJBMTI4R0NNIiwiYWxnIjoiUlNBLU9BRVAtMjU2In0.fDTxO_ZzZ3Jdrdw-bxvg7u-xWB2q1tp3kI5zH6JfhLUm4h6rt9qDA_wZlRym8-GzEtkUjkTtQGs6HgQx_qlyy8ylCakY5GHsNhCG4m0UNhRiNfcasAs03JSXfON9-tfTJimWD9n4k5OHHhvcrsCW1G3jYeLsK9WHCGRIhNz5ULbo8HBrCTbmZ6bOEQ9mqhdssLpdV24HDpebotf3bgPJqoaTfWU6Uy7tLmPiNuuNRLQ-iTpLyNMTVvGqqZhpcV3lAEN5l77QabI5xLJYucvYjrXQhAEZ7YXO8oRYhGkdG2XXIRcwr87rBeRH-47HAyhZgF_PBPBhhrJNS9UNMqdfBw.FvU4_s7Md6vxnXWd.fw29Q4_gHt4f026DPPV-CNebQ8plJ6IVLX8._apBZrw7WsT8HOmxgCrTwA",
|
||||
"eyJlbmMiOiJBMTkyR0NNIiwiYWxnIjoiUlNBLU9BRVAtMjU2In0.bYuorK-rHMbO4c2CRWtvyOEaM1EN-o-wLRZ0wFWRX9mCXQ-iTNarZn7ksYM1XnGmZ4u3CSowX1Hpca9Rg72_VJCmKapqCT7r3YfasN4_oeLwuSKI_gT-uVOznod97tn3Gf_EDv0y1V4H0k9BEIFGbajAcG1znTD_ODY3j2KZJxisfrsBoslc6N-HI0kKZMC2hSGuHOcOf8HN1sTE-BLqZCtoj-zxQECJK8Wh14Ih4jzzdmmiu_qmSR780K6su-4PRt3j8uY7oCiLBfwpCsCmhJgp8rKd91zoedZmamfvX38mJIfE52j4fG6HmIYw9Ov814fk9OffV6tzixjcg54Q2g.yeVJz4aSh2s-GUr9.TBzzWP5llEiDdugpP2SmPf2U4MEGG9EoPWk.g25UoWpsBaOd45J__FX7mA",
|
||||
"eyJlbmMiOiJBMjU2R0NNIiwiYWxnIjoiUlNBLU9BRVAtMjU2In0.h9tFtmh762JuffBxlSQbJujCyI4Zs9yc3IOb1yR8g65W4ZHosIvzVGHWbShj4EY9MNrz-RbKtHfqQGGzDeo3Xb4-HcQ2ZDHyWoUg7VfA8JafJ5zIKL1npz8eUExOVMLsAaRfHg8qNfczodg3egoSmX5Q-nrx4DeidDSXYZaZjV0C72stLTPcuQ7XPV7z1tvERAkqpvcsRmJn_PiRNxIbAgoyHMJ4Gijuzt1bWZwezlxYmw0TEuwCTVC2fl9NJTZyxOntS1Lcm-WQGlPkVYeVgYTOQXLlp7tF9t-aAvYpth2oWGT6Y-hbPrjx_19WaKD0XyWCR46V32DlXEVDP3Xl2A.NUgfnzQyEaJjzt9r.k2To43B2YVWMeR-w3n4Pr2b5wYq2o87giHk.X8_QYCg0IGnn1pJqe8p_KA",
|
||||
"eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiUlNBLU9BRVAtMjU2In0.EDq6cNP6Yp1sds5HZ4CkXYp7bs9plIYVZScKvuyxUy0H1VyBC_YWg0HvndPNb-vwh1LA6KMxRazlOwJ9iPR9YzHnYmGgPM3Je_ZzBfiPlRfq6hQBpGnNaypBI1XZ2tyFBhulsVLqyJe2SmM2Ud00kasOdMYgcN8FNFzq7IOE7E0FUQkIwLdUL1nrzepiYDp-5bGkxWRcL02cYfdqdm00G4m0GkUxAmdxa3oPNxZlt2NeBI_UVWQSgJE-DJVJQkDcyA0id27TV2RCDnmujYauNT_wYlyb0bFDx3pYzzNXfAXd4wHZxt75QaLZ5APJ0EVfiXJ0qki6kT-GRVmOimUbQA.vTULZL7LvS0WD8kR8ZUtLg.mb2f0StEmmkuuvsyz8UplMvF58FtZzlu8eEwzvPUvN0.hbhveEN40V-pgG2hSVgyKg",
|
||||
"eyJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwiYWxnIjoiUlNBLU9BRVAtMjU2In0.DuYk92p7u-YIN-JKn-XThmlVcnhU9x5TieQ2uhsLQVNlo0iWC9JJPP6bT6aI6u_1BIS3yE8_tSGGL7eM-zyEk6LuTqSWFRaZcZC06d0MnS9eYZcw1T2D17fL-ki-NtCaTahJD7jE2s0HevRVW49YtL-_V8whnO_EyVjvXIAQlPYqhH_o-0Nzcpng9ggdAnuF2rY1_6iRPYFJ3BLQvG1oWhyJ9s6SBttlOa0i6mmFCVLHx6sRpdGAB3lbCL3wfmHq4tpIv77gfoYUNP0SNff-zNmBXF_wp3dCntLZFTjbfMpGyHlruF_uoaLqwdjYpUGNUFVUoeSiMnSbMKm9NxiDgQ.6Mdgcqz7bMU1UeoAwFC8pg.W36QWOlBaJezakUX5FMZzbAgeAu_R14AYKZCQmuhguw.5OeyIJ03olxmJft8uBmjuOFQPWNZMYLI",
|
||||
"eyJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwiYWxnIjoiUlNBLU9BRVAtMjU2In0.ECulJArWFsPL2FlpCN0W8E7IseSjJg1cZqE3wz5jk9gvwgNForAUEv5KYZqhNI-p5IxkGV0f8K6Y2X8pWzbLwiPIjZe8_dVqHYJoINxqCSgWLBhz0V36qL9Nc_xARTBk4-ZteIu75NoXVeos9gNvFnkOCj4tm-jGo8z8EFO9XfODgjhiR4xv8VqUtvrkjo9GQConaga5zpV-J4JQlXbdqbDjnuwacnJAxYpFyuemqcgqsl6BnFX3tovGkmSUPqcvF1A6tiHqr-TEmcgVqo5C3xswknRBKTQRM00iAmJ92WlVdkoOCx6E6O7cVHFawZ14BLzWzm66Crb4tv0ucYvk_Q.mxolwUaoj5S5kHCfph0w8g.nFpgYdnYg3blHCCEi2XXQGkkKQBXs2OkZaH11m3PRvk.k8BAVT4EcyrUFVIKr-KOSPbF89xyL0Vri2rFTu2iIWM",
|
||||
}
|
||||
|
||||
for _, msg := range rsaSampleMessages {
|
||||
obj, err := ParseEncrypted(msg)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", msg, err)
|
||||
continue
|
||||
}
|
||||
plaintext, err := obj.Decrypt(rsaPrivateKey)
|
||||
if err != nil {
|
||||
t.Error("unable to decrypt message", msg, err)
|
||||
continue
|
||||
}
|
||||
if string(plaintext) != "Lorem ipsum dolor sit amet" {
|
||||
t.Error("plaintext is not what we expected for msg", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors generated with nimbus-jose-jwt
|
||||
func TestSampleNimbusJWEMessagesAESKW(t *testing.T) {
|
||||
aesTestKeys := [][]byte{
|
||||
fromHexBytes("DF1FA4F36FFA7FC42C81D4B3C033928D"),
|
||||
fromHexBytes("DF1FA4F36FFA7FC42C81D4B3C033928D95EC9CDC2D82233C"),
|
||||
fromHexBytes("DF1FA4F36FFA7FC42C81D4B3C033928D95EC9CDC2D82233C333C35BA29044E90"),
|
||||
}
|
||||
|
||||
aesSampleMessages := [][]string{
|
||||
{
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4R0NNIiwidGFnIjoib2ZMd2Q5NGloVWFRckJ0T1pQUDdjUSIsImFsZyI6IkExMjhHQ01LVyIsIml2IjoiV2Z3TnN5cjEwWUFjY2p2diJ9.9x3RxdqIS6P9xjh93Eu1bQ.6fs3_fSGt2jull_5.YDlzr6sWACkFg_GU5MEc-ZEWxNLwI_JMKe_jFA.f-pq-V7rlSSg_q2e1gDygw",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyR0NNIiwidGFnIjoic2RneXB1ckFjTEFzTmZJU0lkZUNpUSIsImFsZyI6IkExMjhHQ01LVyIsIml2IjoieVFMR0dCdDJFZ0c1THdyViJ9.arslKo4aKlh6f4s0z1_-U-8JbmhAoZHN.Xw2Q-GX98YXwuc4i.halTEWMWAYZbv-qOD52G6bte4x6sxlh1_VpGEA.Z1spn016v58cW6Q2o0Qxag",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2R0NNIiwidGFnIjoicTNzejF5VUlhbVBDYXJfZ05kSVJqQSIsImFsZyI6IkExMjhHQ01LVyIsIml2IjoiM0ZRM0FsLWJWdWhmcEIyQyJ9.dhVipWbzIdsINttuZM4hnjpHvwEHf0VsVrOp4GAg01g.dk7dUyt1Qj13Pipw.5Tt70ONATF0BZAS8dBkYmCV7AQUrfb8qmKNLmw.A6ton9MQjZg0b3C0QcW-hg",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwidGFnIjoiUHNpTGphZnJZNE16UlRmNlBPLTZfdyIsImFsZyI6IkExMjhHQ01LVyIsIml2IjoiSUFPbnd2ODR5YXFEaUxtbSJ9.swf92_LyCvjsvkynHTuMNXRl_MX2keU-fMDWIMezHG4.LOp9SVIXzs4yTnOtMyXZYQ.HUlXrzqJ1qXYl3vUA-ydezCg77WvJNtKdmZ3FPABoZw.8UYl1LOofQLAxHHvWqoTbg",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwidGFnIjoiWGRndHQ5dUVEMVlVeU1rVHl6M3lqZyIsImFsZyI6IkExMjhHQ01LVyIsIml2IjoiWF90V2RhSmh6X3J1SHJvQSJ9.JQ3dS1JSgzIFi5M9ig63FoFU1nHBTmPwXY_ovNE2m1JOSUvHtalmihIuraPDloCf.e920JVryUIWt7zJJQM-www.8DUrl4LmsxIEhRr9RLTHG9tBTOcwXqEbQHAJd_qMHzE.wHinoqGUhL4O7lx125kponpwNtlp8VGJ",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwidGFnIjoicGgyaTdoY0FWNlh3ZkQta1RHYlVXdyIsImFsZyI6IkExMjhHQ01LVyIsIml2IjoiaG41Smk4Wm1rUmRrSUxWVSJ9._bQlJXl22dhsBgYPhkxUyinBNi871teGWbviOueWj2PqG9OPxIc9SDS8a27YLSVDMircd5Q1Df28--vcXIABQA.DssmhrAg6w_f2VDaPpxTbQ.OGclEmqrxwvZqAfn7EgXlIfXgr0wiGvEbZz3zADnqJs.YZeP0uKVEiDl8VyC-s20YN-RbdyGNsbdtoGDP3eMof8",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4R0NNIiwiYWxnIjoiQTEyOEtXIn0.TEMcXEoY8WyqGjYs5GZgS-M_Niwu6wDY.i-26KtTt51Td6Iwd.wvhkagvPsLj3QxhPBbfH_th8OqxisUtme2UadQ.vlfvBPv3bw2Zk2H60JVNLQ",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyR0NNIiwiYWxnIjoiQTEyOEtXIn0.gPaR6mgQ9TUx05V6DRfgTQeZxl0ZSzBa5uQd-qw6yLs.MojplOD77FkMooS-.2yuD7dKR_C3sFbhgwiBccKKOF8DrSvNiwX7wPQ.qDKUbSvMnJv0qifjpWC14g",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2R0NNIiwiYWxnIjoiQTEyOEtXIn0.Fg-dgSkUW1KEaL5YDPoWHNL8fpX1WxWVLA9OOWsjIFhQVDKyUZI7BQ.mjRBpyJTZf7H-quf.YlNHezMadtaSKp23G-ozmYhHOeHwuJnvWGTtGg.YagnR7awBItUlMDo4uklvg",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiQTEyOEtXIn0.x1vYzUE-E2XBWva9OPuwtqfQaf9rlJCIBAyAe6N2q2kWfJrkxGxFsQ.gAwe78dyODFaoP2IOityAA.Yh5YfovkWxGBNAs1sVhvXow_2izHHsBiYEc9JYD6kVg.mio1p3ncp2wLEaEaRa7P0w",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwiYWxnIjoiQTEyOEtXIn0.szGrdnmF7D5put2aRBvSSFfp0vRgkRGYaafijJIqAF6PWd1IxsysZRV8aQkQOW1cB6d0fXsTfYM.Ru25LVOOk4xhaK-cIZ0ThA.pF9Ok5zot7elVqXFW5YYHV8MuF9gVGzpQnG1XDs_g_w.-7la0uwcNPpteev185pMHZjbVDXlrec8",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwiYWxnIjoiQTEyOEtXIn0.cz-hRv0xR5CnOcnoRWNK8Q9poyVYzRCVTjfmEXQN6xPOZUkJ3zKNqb8Pir_FS0o2TVvxmIbuxeISeATTR2Ttx_YGCNgMkc93.SF5rEQT94lZR-UORcMKqGw.xphygoU7zE0ZggOczXCi_ytt-Evln8CL-7WLDlWcUHg.5h99r8xCCwP2PgDbZqzCJ13oFfB2vZWetD5qZjmmVho",
|
||||
},
|
||||
{
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4R0NNIiwidGFnIjoiVWR5WUVKdEJ5ZTA5dzdjclY0cXI1QSIsImFsZyI6IkExOTJHQ01LVyIsIml2IjoiZlBBV0QwUmdSbHlFdktQcCJ9.P1uTfTuH-imL-NJJMpuTRA.22yqZ1NIfx3KNPgc.hORWZaTSgni1FS-JT90vJly-cU37qTn-tWSqTg.gMN0ufXF92rSXupTtBNkhA",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyR0NNIiwidGFnIjoiOU9qX3B2LTJSNW5lZl9YbWVkUWltUSIsImFsZyI6IkExOTJHQ01LVyIsIml2IjoiY3BybGEwYUYzREVQNmFJTSJ9.6NVpAm_APiC7km2v-oNR8g23K9U_kf1-.jIg-p8tNwSvwxch0.1i-GPaxS4qR6Gy4tzeVtSdRFRSKQSMpmn-VhzA.qhFWPqtA6vVPl7OM3DThsA",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2R0NNIiwidGFnIjoiOVc3THg3MVhGQVJCb3NaLVZ5dXc4ZyIsImFsZyI6IkExOTJHQ01LVyIsIml2IjoiZ1N4ZE5heFdBSVBRR0tHYiJ9.3YjPz6dVQwAtCekvtXiHZrooOUlmCsMSvyfwmGwdrOA.hA_C0IDJmGaRzsB0.W4l7OPqpFxiVOZTGfAlRktquyRTo4cEOk9KurQ.l4bGxOkO_ql_jlPo3Oz3TQ",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwidGFnIjoiOHJYbWl2WXFWZjNfbHhhd2NUbHJoUSIsImFsZyI6IkExOTJHQ01LVyIsIml2IjoiVXBWeXprVTNKcjEwYXRqYyJ9.8qft-Q_xqUbo5j_aVrVNHchooeLttR4Kb6j01O8k98M.hXO-5IKBYCL9UdwBFVm0tg.EBM4lCZX_K6tfqYmfoDxVPHcf6cT--AegXTTjfSqsIw.Of8xUvEQSh3xgFT3uENnAg",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwidGFnIjoiVnItSnVaX0tqV2hSWWMzdzFwZ3cwdyIsImFsZyI6IkExOTJHQ01LVyIsIml2IjoiRGg2R3dISVBVS3ljZGNZeCJ9.YSEDjCnGWr_n9H94AvLoRnwm6bdU9w6-Q67k-QQRVcKRd6673pgH9zEF9A9Dt6o1.gcmVN4kxqBuMq6c7GrK3UQ.vWzJb0He6OY1lhYYjYS7CLh55REAAq1O7yNN-ND4R5Q.OD0B6nwyFaDr_92ysDOtlVnJaeoIqhGw",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwidGFnIjoieEtad1BGYURpQ3NqUnBqZUprZHhmZyIsImFsZyI6IkExOTJHQ01LVyIsIml2IjoieTVHRFdteXdkb2R1SDJlYyJ9.AW0gbhWqlptOQ1y9aoNVwrTIIkBfrp33C2OWJsbrDRk6lhxg_IgFhMDTE37moReySGUtttC4CXQD_7etHmd3Hw.OvKXK-aRKlXHOpJQ9ZY_YQ.Ngv7WarDDvR2uBj_DavPAR3DYuIaygvSSdcHrc8-ZqM.MJ6ElitzFCKf_0h5fIJw8uOLC6ps7dKZPozF8juQmUY",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4R0NNIiwiYWxnIjoiQTE5MktXIn0.8qu63pppcSvp1vv37WrZ44qcCTg7dQMA.cDp-f8dJTrDEpZW4.H6OBJYs4UvFR_IZHLYQZxB6u9a0wOdAif2LNfQ.1dB-id0UIwRSlmwHx5BJCg",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyR0NNIiwiYWxnIjoiQTE5MktXIn0._FdoKQvC8qUs7K0upriEihUwztK8gOwonXpOxdIwrfs.UO38ok8gDdpLVa1T.x1GvHdVCy4fxoQRg-OQK4Ez3jDOvu9gllLPeEA.3dLeZGIprh_nHizOTVi1xw",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2R0NNIiwiYWxnIjoiQTE5MktXIn0.uzCJskgSIK6VkjJIu-dQi18biqaY0INc_A1Ehx0oESafgtR99_n4IA.W2eKK8Y14WwTowI_.J2cJC7R6Bz6maR0s1UBMPyRi5BebNUAmof4pvw.-7w6htAlc4iUsOJ6I04rFg",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiQTE5MktXIn0.gImQeQETp_6dfJypFDPLlv7c5pCzuq86U16gzrLiCXth6X9XfxJpvQ.YlC4MxjtLWrsyEvlFhvsqw.Vlpvmg9F3gkz4e1xG01Yl2RXx-jG99rF5UvCxOBXSLc.RZUrU_FoR5bG3M-j3GY0Dw",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwiYWxnIjoiQTE5MktXIn0.T2EfQ6Tu2wJyRMgZzfvBYmQNCCfdMudMrg86ibEMVAOUKJPtR3WMPEb_Syy9p2VjrLKRlv7nebo.GPc8VbarPPRtzIRATB8NsA.ugPCqLvVLwh55bWlwjsFkmWzJ31z5z-wuih2oJqmG_U.m7FY3EjvV6mKosEYJ5cY7ezFoVQoJS8X",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwiYWxnIjoiQTE5MktXIn0.OgLMhZ-2ZhslQyHfzOfyC-qmT6bNg9AdpP59B4jtyxWkQu3eW475WCdiAjojjeyBtVRGQ5vOomwaOIFejY_IekzH6I_taii3.U9x44MF6Wyz5TIwIzwhoxQ.vK7yvSF2beKdNxNY_7n4XdF7JluCGZoxdFJyTJVkSmI.bXRlI8KL-g7gpprQxGmXjVYjYghhWJq7mlCfWI8q2uA",
|
||||
},
|
||||
{
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4R0NNIiwidGFnIjoiR3BjX3pfbjduZjJVZlEtWGdsaTBaQSIsImFsZyI6IkEyNTZHQ01LVyIsIml2IjoiUk40eUdhOVlvYlFhUmZ1TCJ9.Q4ukD6_hZpmASAVcqWJ9Wg.Zfhny_1WNdlp4fH-.3sekDCjkExQCcv28ZW4yrcFnz0vma3vgoenSXA.g8_Ird2Y0itTCDP61du-Yg",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyR0NNIiwidGFnIjoiWC05UkNVWVh4U3NRelcwelVJS01VUSIsImFsZyI6IkEyNTZHQ01LVyIsIml2IjoiY3JNMnJfa3RrdWpyQ1h5OSJ9.c0q2jCxxV4y1h9u_Xvn7FqUDnbkmNEG4.S_noOTZKuUo9z1l6.ez0RdA25vXMUGH96iXmj3DEVox0J7TasJMnzgg.RbuSPTte_NzTtEEokbc5Ig",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2R0NNIiwidGFnIjoiWmwyaDFpUW11QWZWd2lJeVp5RHloZyIsImFsZyI6IkEyNTZHQ01LVyIsIml2Ijoib19xZmljb0N0NzNzRWo1QyJ9.NpJxRJ0aqcpekD6HU2u9e6_pL_11JXjWvjfeQnAKkZU.4c5qBcBBrMWi27Lf.NKwNIb4b6cRDJ1TwMKsPrjs7ADn6aNoBdQClVw.yNWmSSRBqQfIQObzj8zDqw",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwidGFnIjoiMXdwVEI3LWhjdzZUVXhCbVh2UzdhUSIsImFsZyI6IkEyNTZHQ01LVyIsIml2IjoiOUdIVnZJaDZ0a09vX2pHUSJ9.MFgIhp9mzlq9hoPqqKVKHJ3HL79EBYtV4iNhD63yqiU.UzW5iq8ou21VpZYJgKEN8A.1gOEzA4uAPvHP76GMfs9uLloAV10mKaxiZVAeL7iQA0.i1X_2i0bCAz-soXF9bI_zw",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwidGFnIjoiNThocUtsSk15Y1BFUEFRUlNfSzlNUSIsImFsZyI6IkEyNTZHQ01LVyIsIml2IjoiUDh3aTBWMTluVnZqNXpkOSJ9.FXidOWHNFJODO74Thq3J2cC-Z2B8UZkn7SikeosU0bUK6Jx_lzzmUZ-Lafadpdpj.iLfcDbpuBKFiSfiBzUQc7Q.VZK-aD7BFspqfvbwa0wE2wwWxdomzk2IKMetFe8bI44.7wC6rJRGa4x48xbYMd6NH9VzK8uNn4Cb",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwidGFnIjoicGcwOEpUcXdzMXdEaXBaRUlpVExoQSIsImFsZyI6IkEyNTZHQ01LVyIsIml2IjoiSlpodk9CdU1RUDFFZTZTNSJ9.wqVgTPm6TcYCTkpbwmn9sW4mgJROH2A3dIdSXo5oKIQUIVbQsmy7KXH8UYO2RS9slMGtb869C8o0My67GKg9dQ.ogrRiLlqjB1S5j-7a05OwA.2Y_LyqhU4S_RXMsB74bxcBacd23J2Sp5Lblw-sOkaUY.XGMiYoU-f3GaEzSvG41vpJP2DMGbeDFoWmkUGLUjc4M",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4R0NNIiwiYWxnIjoiQTI1NktXIn0.QiIZm9NYfahqYFIbiaoUhCCHjotHMkup.EsU0XLn4FjzzCILn.WuCoQkm9vzo95E7hxBtfYpt-Mooc_vmSTyzj6Q.NbeeYVy6gQPlmhoWDrZwaQ",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyR0NNIiwiYWxnIjoiQTI1NktXIn0.1ol3j_Lt0Os3UMe2Gypj0o8b77k0FSmqD7kNRNoMa9U.vZ2HMTgN2dgUd42h.JvNcy8-c8sYzOC089VtFSg2BOQx3YF8CqSTuJw.t03LRioWWKN3d7SjinU6SQ",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2R0NNIiwiYWxnIjoiQTI1NktXIn0.gbkk03l1gyrE9qGEMVtORiyyUqKsgzbqjLd8lw0RQ07WWn--TV4BgA.J8ThH4ac2UhSsMIP.g-W1piEGrdi3tNwQDJXpYm3fQjTf82mtVCrCOg.-vY05P4kiB9FgF2vwrSeXQ",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiQTI1NktXIn0.k86pQs7gmQIzuIWRFwesF32XY2xi1WbYxi7XUf_CYlOlehwGCTINHg.3NcC9VzfQgsECISKf4xy-g.v2amdo-rgeGsg-II_tvPukX9D-KAP27xxf2uQJ277Ws.E4LIE3fte3glAnPpnd8D9Q",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMTkyQ0JDLUhTMzg0IiwiYWxnIjoiQTI1NktXIn0.b8iN0Am3fCUvj7sBd7Z0lpfzBjh1MOgojV7J5rDfrcTU3b35RGYgEV1RdcrtUTBgUwITDjmU7jM.wsSDBFghDga_ERv36I2AOg.6uJsucCb2YReFOJGBdo4zidTIKLUmZBIXfm_M0AJpKk.YwdAfXI3HHcw2wLSnfCRtw4huZQtSKhz",
|
||||
"eyJ6aXAiOiJERUYiLCJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwiYWxnIjoiQTI1NktXIn0.akY9pHCbkHPh5VpXIrX0At41XnJIKBR9iMMkf301vKeJNAZYJTxWzeJhFd-DhQ47tMctc3YYkwZkQ5I_9fGYb_f0oBcw4esh.JNwuuHud78h6S99NO1oBQQ.0RwckPYATBgvw67upkAQ1AezETHc-gh3rryz19i5ryc.3XClRTScgzfMgLCHxHHoRF8mm9VVGXv_Ahtx65PskKQ",
|
||||
},
|
||||
}
|
||||
|
||||
for i, msgs := range aesSampleMessages {
|
||||
for _, msg := range msgs {
|
||||
obj, err := ParseEncrypted(msg)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", msg, err)
|
||||
continue
|
||||
}
|
||||
plaintext, err := obj.Decrypt(aesTestKeys[i])
|
||||
if err != nil {
|
||||
t.Error("unable to decrypt message", msg, err)
|
||||
continue
|
||||
}
|
||||
if string(plaintext) != "Lorem ipsum dolor sit amet" {
|
||||
t.Error("plaintext is not what we expected for msg", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors generated with jose4j
|
||||
func TestSampleJose4jJWEMessagesECDH(t *testing.T) {
|
||||
ecTestKey := &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: elliptic.P256(),
|
||||
X: fromBase64Int("weNJy2HscCSM6AEDTDg04biOvhFhyyWvOHQfeF_PxMQ"),
|
||||
Y: fromBase64Int("e8lnCO-AlStT-NJVX-crhB7QRYhiix03illJOVAOyck"),
|
||||
},
|
||||
D: fromBase64Int("VEmDZpDXXK8p8N0Cndsxs924q6nS1RXFASRl6BfUqdw"),
|
||||
}
|
||||
|
||||
ecSampleMessages := []string{
|
||||
"eyJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTEyOENCQy1IUzI1NiIsImVwayI6eyJrdHkiOiJFQyIsIngiOiJTQzAtRnJHUkVvVkpKSmg1TGhORmZqZnFXMC1XSUFyd3RZMzJzQmFQVVh3IiwieSI6ImFQMWlPRENveU9laTVyS1l2VENMNlRMZFN5UEdUN0djMnFsRnBwNXdiWFEiLCJjcnYiOiJQLTI1NiJ9fQ..3mifklTnTTGuA_etSUBBCw.dj8KFM8OlrQ3rT35nHcHZ7A5p84VB2OZb054ghSjS-M.KOIgnJjz87LGqMtikXGxXw",
|
||||
"eyJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTE5MkNCQy1IUzM4NCIsImVwayI6eyJrdHkiOiJFQyIsIngiOiJUaHRGc0lRZ1E5MkZOYWFMbUFDQURLbE93dmNGVlRORHc4ampfWlJidUxjIiwieSI6IjJmRDZ3UXc3YmpYTm1nVThXMGpFbnl5ZUZkX3Y4ZmpDa3l1R29vTFhGM0EiLCJjcnYiOiJQLTI1NiJ9fQ..90zFayMkKc-fQC_19f6P3A.P1Y_7lMnfkUQOXW_en31lKZ3zAn1nEYn6fXLjmyVPrQ.hrgwy1cePVfhMWT0h-crKTXldglHZ-4g",
|
||||
"eyJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTI1NkNCQy1IUzUxMiIsImVwayI6eyJrdHkiOiJFQyIsIngiOiI5R1Z6c3VKNWgySl96UURVUFR3WU5zUkFzVzZfY2RzN0pELVQ2RDREQ1ZVIiwieSI6InFZVGl1dVU4aTB1WFpoaS14VGlRNlZJQm5vanFoWENPVnpmWm1pR2lRTEUiLCJjcnYiOiJQLTI1NiJ9fQ..v2reRlDkIsw3eWEsTCc1NA.0qakrFdbhtBCTSl7EREf9sxgHBP9I-Xw29OTJYnrqP8.54ozViEBYYmRkcKp7d2Ztt4hzjQ9Vb5zCeijN_RQrcI",
|
||||
"eyJhbGciOiJFQ0RILUVTK0EyNTZLVyIsImVuYyI6IkExMjhDQkMtSFMyNTYiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiOElUemg3VVFaaUthTWtfME9qX1hFaHZENXpUWjE2Ti13WVdjeTJYUC1tdyIsInkiOiJPNUJiVEk0bUFpU005ZmpCejBRU3pXaU5vbnl3cWlQLUN0RGgwdnNGYXNRIiwiY3J2IjoiUC0yNTYifX0.D3DP3wqPvJv4TYYfhnfrOG6nsM-MMH_CqGfnOGjgdXHNF7xRwEJBOA.WL9Kz3gNYA7S5Rs5mKcXmA.EmQkXhO_nFqAwxJWaM0DH4s3pmCscZovB8YWJ3Ru4N8.Bf88uzwfxiyTjpejU5B0Ng",
|
||||
"eyJhbGciOiJFQ0RILUVTK0EyNTZLVyIsImVuYyI6IkExOTJDQkMtSFMzODQiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiMjlJMk4zRkF0UlBlNGhzYjRLWlhTbmVyV0wyTVhtSUN1LXJJaXhNSHpJQSIsInkiOiJvMjY1bzFReEdmbDhzMHQ0U1JROS00RGNpc3otbXh4NlJ6WVF4SktyeWpJIiwiY3J2IjoiUC0yNTYifX0.DRmsmXz6fCnLc_njDIKdpM7Oc4jTqd_yd9J94TOUksAstEUkAl9Ie3Wg-Ji_LzbdX2xRLXIimcw.FwJOHPQhnqKJCfxt1_qRnQ.ssx3q1ZYILsMTln5q-K8HVn93BVPI5ViusstKMxZzRs.zzcfzWNYSdNDdQ4CiHfymj0bePaAbVaT",
|
||||
"eyJhbGciOiJFQ0RILUVTK0EyNTZLVyIsImVuYyI6IkEyNTZDQkMtSFM1MTIiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiRUp6bTViQnRzVXJNYTl2Y1Q2d1hZRXI3ZjNMcjB0N1V4SDZuZzdGcFF0VSIsInkiOiJRYTNDSDllVTFXYjItdFdVSDN3Sk9fTDVMZXRsRUlMQWNkNE9XR2tFd0hZIiwiY3J2IjoiUC0yNTYifX0.5WxwluZpVWAOJdVrsnDIlEc4_wfRE1gXOaQyx_rKkElNz157Ykf-JsAD7aEvXfx--NKF4js5zYyjeCtxWBhRWPOoNNZJlqV_.Iuo82-qsP2S1SgQQklAnrw.H4wB6XoLKOKWCu6Y3LPAEuHkvyvr-xAh4IBm53uRF8g._fOLKq0bqDZ8KNjni_MJ4olHNaYz376dV9eNmp9O9PU",
|
||||
"eyJhbGciOiJFQ0RILUVTK0ExOTJLVyIsImVuYyI6IkExMjhDQkMtSFMyNTYiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiZktNSG5sRkoxajBTSnJ3WGtVWlpaX3BtWHdUQlJtcHhlaTkxdUpaczUycyIsInkiOiJLRkxKaXhEUTJQcjEybWp1aFdYb3pna2U1V3lhWnhmTWlxZkJ0OEJpbkRvIiwiY3J2IjoiUC0yNTYifX0.2LSD2Mw4tyYJyfsmpVmzBtJRd12jMEYGdlhFbaXIbKi5A33CGNQ1tg.s40aAjmZOvK8Us86FCBdHg.jpYSMAKp___oMCoWM495mTfbi_YC80ObeoCmGE3H_gs.A6V-jJJRY1yz24CaXGUbzg",
|
||||
"eyJhbGciOiJFQ0RILUVTK0ExOTJLVyIsImVuYyI6IkExOTJDQkMtSFMzODQiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiSDRxcFUzeWtuRktWRnV4SmxLa3NZSE5ieHF3aXM0WWtCVVFHVE1Td05JQSIsInkiOiJHb0lpRUZaUGRRSHJCbVR4ZTA3akJoZmxrdWNqUjVoX1QwNWVXc3Zib0prIiwiY3J2IjoiUC0yNTYifX0.KTrwwV2uzD--gf3PGG-kjEAGgi7u0eMqZPZfa4kpyFGm3x8t2m1NHdz3t9rfiqjuaqsxPKhF4gs.cu16fEOzYaSxhHu_Ht9w4g.BRJdxVBI9spVtY5KQ6gTR4CNcKvmLUMKZap0AO-RF2I.DZyUaa2p6YCIaYtjWOjC9GN_VIYgySlZ",
|
||||
"eyJhbGciOiJFQ0RILUVTK0ExOTJLVyIsImVuYyI6IkEyNTZDQkMtSFM1MTIiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoieDBYSGRkSGM2Q0ktSnlfbUVMOEZZRExhWnV0UkVFczR4c3BMQmcwZk1jbyIsInkiOiJEa0xzOUJGTlBkTTVTNkpLYVJ3cnV1TWMwcUFzWW9yNW9fZWp6NXBNVXFrIiwiY3J2IjoiUC0yNTYifX0.mfCxJ7JYIqTMqcAh5Vp2USF0eF7OhOeluqda7YagOUJNwxA9wC9o23DSoLUylfrZUfanZrJJJcG69awlv-LY7anOLHlp3Ht5.ec48A_JWb4qa_PVHWZaTfQ.kDAjIDb3LzJpfxNh-DiAmAuaKMYaOGSTb0rkiJLuVeY.oxGCpPlii4pr89XMk4b9s084LucTqPGU6TLbOW2MZoc",
|
||||
"eyJhbGciOiJFQ0RILUVTK0ExMjhLVyIsImVuYyI6IkExMjhDQkMtSFMyNTYiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiQXB5TnlqU2d0bmRUcFg0eENYenNDRnZva1l3X18weXg2dGRUYzdPUUhIMCIsInkiOiJYUHdHMDVDaW1vOGlhWmxZbDNsMEp3ZllhY1FZWHFuM2RRZEJUWFpldDZBIiwiY3J2IjoiUC0yNTYifX0.yTA2PwK9IPqkaGPenZ9R-gOn9m9rvcSEfuX_Nm8AkuwHIYLzzYeAEA.ZW1F1iyHYKfo-YoanNaIVg.PouKQD94DlPA5lbpfGJXY-EJhidC7l4vSayVN2vVzvA.MexquqtGaXKUvX7WBmD4bA",
|
||||
"eyJhbGciOiJFQ0RILUVTK0ExMjhLVyIsImVuYyI6IkExOTJDQkMtSFMzODQiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiaDRWeGNzNVUzWk1fTlp4WmJxQ3hMTVB5UmEtR2ktSVNZa0xDTzE1RHJkZyIsInkiOiJFeVotS3dWNVE5OXlnWk5zU0lpSldpR3hqbXNLUk1WVE5sTTNSd1VYTFRvIiwiY3J2IjoiUC0yNTYifX0.wo56VISyL1QAbi2HLuVut5NGF2FvxKt7B8zHzJ3FpmavPozfbVZV08-GSYQ6jLQWJ4xsO80I4Kg.3_9Bo5ozvD96WHGhqp_tfQ.48UkJ6jk6WK70QItb2QZr0edKH7O-aMuVahTEeqyfW4.ulMlY2tbC341ct20YSmNdtc84FRz1I4g",
|
||||
"eyJhbGciOiJFQ0RILUVTK0ExMjhLVyIsImVuYyI6IkEyNTZDQkMtSFM1MTIiLCJlcGsiOnsia3R5IjoiRUMiLCJ4IjoiN0xZRzZZWTJkel9ZaGNvNnRCcG1IX0tPREQ2X2hwX05tajdEc1c2RXgxcyIsInkiOiI5Y2lPeDcwUkdGT0tpVnBRX0NHQXB5NVlyeThDazBmUkpwNHVrQ2tjNmQ0IiwiY3J2IjoiUC0yNTYifX0.bWwW3J80k46HG1fQAZxUroko2OO8OKkeRavr_o3AnhJDMvp78OR229x-fZUaBm4uWv27_Yjm0X9T2H2lhlIli2Rl9v1PNC77.1NmsJBDGI1fDjRzyc4mtyA.9KfCFynQj7LmJq08qxAG4c-6ZPz1Lh3h3nUbgVwB0TI.cqech0d8XHzWfkWqgKZq1SlAfmO0PUwOsNVkuByVGWk",
|
||||
}
|
||||
|
||||
for _, msg := range ecSampleMessages {
|
||||
obj, err := ParseEncrypted(msg)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", msg, err)
|
||||
continue
|
||||
}
|
||||
plaintext, err := obj.Decrypt(ecTestKey)
|
||||
if err != nil {
|
||||
t.Error("unable to decrypt message", msg, err)
|
||||
continue
|
||||
}
|
||||
if string(plaintext) != "Lorem ipsum dolor sit amet." {
|
||||
t.Error("plaintext is not what we expected for msg", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSampleAESCBCHMACMessagesFromNodeJose(t *testing.T) {
|
||||
samples := []struct {
|
||||
key []byte
|
||||
ciphertext string
|
||||
}{
|
||||
// A256CBC
|
||||
{
|
||||
fromBase64URLBytes("5SeJepAQ8Hmza4bM_wAQjvW0cFbPo_0TBc-sPblNBKs5SeJepAQ8Hmza4bM_wAQjvW0cFbPo_0TBc-sPblNBKs"),
|
||||
`{"protected":"eyJjdHkiOiJKV1QiLCJhbGciOiJkaXIiLCJlbmMiOiJBMjU2Q0JDLUhTNTEyIiwia2lkIjoidGVzdDUxMiJ9","iv":"9Gloee8teNSSjXG_ifPRnA","ciphertext":"x12B46biQ6axL4ee5mHwpA","tag":"ec7qQ4c8Gyx57BelJoULIW6GRW7Bccm44d0iEfU4yIw"}`,
|
||||
},
|
||||
// A192CBC
|
||||
{
|
||||
fromBase64URLBytes("b0ha2QPh_1D-wxtRK3jzzg7MEuD1g91zb0ha2QPh_1D-wxtRK3jzzg7MEuD1g91z"),
|
||||
`{"protected":"eyJjdHkiOiJKV1QiLCJhbGciOiJkaXIiLCJlbmMiOiJBMTkyQ0JDLUhTMzg0Iiwia2lkIjoidGVzdDM4NCJ9","iv":"zocDwPrIX4PZ7ObdoP3m7A","ciphertext":"kLTpZfiX7Qv3r2TaZzUCFg","tag":"Hxk9xu72WHBos5JWpShFmasiNbVqBQqi"}`,
|
||||
},
|
||||
// A128CBC
|
||||
{
|
||||
fromBase64URLBytes("5SeJepAQ8Hmza4bM_wAQjvW0cFbPo_0TBc-sPblNBKs"),
|
||||
`{"protected":"eyJjdHkiOiJKV1QiLCJhbGciOiJkaXIiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2Iiwia2lkIjoidGVzdDI1NiJ9","iv":"oDjxSLU4joRZR9YybE83RQ","ciphertext":"y93rgZp209nvmbX8hvN0Zg","tag":"NrivXSKPqzkmcpQTThXpzQ"}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, sample := range samples {
|
||||
obj, err := ParseEncrypted(sample.ciphertext)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", sample.ciphertext, err)
|
||||
continue
|
||||
}
|
||||
plaintext, err := obj.Decrypt(sample.key)
|
||||
if err != nil {
|
||||
t.Error("unable to decrypt message", sample.ciphertext, err)
|
||||
continue
|
||||
}
|
||||
if string(plaintext) != "Hello World" {
|
||||
t.Error("plaintext is not what we expected for msg", sample.ciphertext, string(plaintext))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTamperedJWE(t *testing.T) {
|
||||
key := []byte("1234567890123456")
|
||||
|
||||
encrypter, _ := NewEncrypter(A128GCM,
|
||||
Recipient{Algorithm: DIRECT, Key: key}, nil)
|
||||
|
||||
var plaintext = []byte("Lorem ipsum dolor sit amet")
|
||||
object, _ := encrypter.Encrypt(plaintext)
|
||||
|
||||
serialized := object.FullSerialize()
|
||||
|
||||
// Inject a longer iv
|
||||
serialized = regexp.MustCompile(`"iv":"[^"]+"`).
|
||||
ReplaceAllString(serialized, `"iv":"UotNnfiavtNOOSZAcfI03i"`)
|
||||
|
||||
object, _ = ParseEncrypted(serialized)
|
||||
|
||||
_, err := object.Decrypt(key)
|
||||
if err == nil {
|
||||
t.Error("Decrypt() on invalid object should fail")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWEWithNullAlg(t *testing.T) {
|
||||
// {"alg":null,"enc":"A128GCM"}
|
||||
serialized := `{"protected":"eyJhbGciOm51bGwsImVuYyI6IkExMjhHQ00ifQ"}`
|
||||
ParseEncrypted(serialized)
|
||||
}
|
35
vendor/gopkg.in/square/go-jose.v2/jwk-keygen/README.md
generated
vendored
Normal file
35
vendor/gopkg.in/square/go-jose.v2/jwk-keygen/README.md
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
# JWK Key Generator
|
||||
|
||||
The `jwk-keygen` command line utility generates keypairs used for asymmetric
|
||||
encryption and signing algorithms in JSON Web Key (JWK) format.
|
||||
|
||||
## Usage
|
||||
|
||||
The utility requires specification of both desired algorithm (`alg`) and key
|
||||
usage (`use`) to remind that same keypair should never be used both for
|
||||
encryption and signing.
|
||||
|
||||
Algorithms are selected via the `--alg` flag, which influence the `alg` header.
|
||||
For JWE (`--use=enc`), `--alg` specifies the key management algorithm (e.g.
|
||||
`RSA-OAEP`). For JWS (`--use=sig`), `--alg` specifies the signature algorithm
|
||||
(e.g. `PS256`).
|
||||
|
||||
Output file is determined by specified usage, algorithm and Key ID, e.g.
|
||||
`jwk-keygen --use=sig --alg=RS512 --kid=test` produces files
|
||||
`jwk_sig_RS512_test` and `jwk_sig_RS512_test.pub`. Keys are sent to stdout when
|
||||
no Key ID is specified: neither pre-defined nor random one.
|
||||
|
||||
## Examples
|
||||
|
||||
### RSA 2048
|
||||
|
||||
Generate RSA/2048 key for encryption and output to stdout.
|
||||
|
||||
jwk-keygen --use enc --alg RSA-OAEP
|
||||
|
||||
### Custom key length
|
||||
|
||||
Generate RSA/4096 key for signing and store to files.
|
||||
|
||||
jwk-keygen --use sig --alg RS256 --bits 4096 --kid test
|
||||
|
200
vendor/gopkg.in/square/go-jose.v2/jwk-keygen/main.go
generated
vendored
Normal file
200
vendor/gopkg.in/square/go-jose.v2/jwk-keygen/main.go
generated
vendored
Normal file
|
@ -0,0 +1,200 @@
|
|||
/*-
|
||||
* Copyright 2017 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"encoding/base32"
|
||||
"errors"
|
||||
"fmt"
|
||||
"golang.org/x/crypto/ed25519"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"gopkg.in/alecthomas/kingpin.v2"
|
||||
"gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
var (
|
||||
app = kingpin.New("jwk-keygen", "A command-line utility to generate public/pirvate keypairs in JWK format.")
|
||||
|
||||
use = app.Flag("use", "Desrired key use").Required().Enum("enc", "sig")
|
||||
alg = app.Flag("alg", "Generate key to be used for ALG").Required().Enum(
|
||||
// `sig`
|
||||
string(jose.ES256), string(jose.ES384), string(jose.ES512), string(jose.EdDSA),
|
||||
string(jose.RS256), string(jose.RS384), string(jose.RS512), string(jose.PS256), string(jose.PS384), string(jose.PS512),
|
||||
// `enc`
|
||||
string(jose.RSA1_5), string(jose.RSA_OAEP), string(jose.RSA_OAEP_256),
|
||||
string(jose.ECDH_ES), string(jose.ECDH_ES_A128KW), string(jose.ECDH_ES_A192KW), string(jose.ECDH_ES_A256KW),
|
||||
)
|
||||
bits = app.Flag("bits", "Key size in bits").Int()
|
||||
kid = app.Flag("kid", "Key ID").String()
|
||||
kidRand = app.Flag("kid-rand", "Generate random Key ID").Bool()
|
||||
)
|
||||
|
||||
// KeygenSig generates keypair for corresponding SignatureAlgorithm.
|
||||
func KeygenSig(alg jose.SignatureAlgorithm, bits int) (crypto.PublicKey, crypto.PrivateKey, error) {
|
||||
switch alg {
|
||||
case jose.ES256, jose.ES384, jose.ES512, jose.EdDSA:
|
||||
keylen := map[jose.SignatureAlgorithm]int{
|
||||
jose.ES256: 256,
|
||||
jose.ES384: 384,
|
||||
jose.ES512: 521, // sic!
|
||||
jose.EdDSA: 256,
|
||||
}
|
||||
if bits != 0 && bits != keylen[alg] {
|
||||
return nil, nil, errors.New("this `alg` does not support arbitrary key length")
|
||||
}
|
||||
case jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512:
|
||||
if bits == 0 {
|
||||
bits = 2048
|
||||
}
|
||||
if bits < 2048 {
|
||||
return nil, nil, errors.New("too short key for RSA `alg`, 2048+ is required")
|
||||
}
|
||||
}
|
||||
switch alg {
|
||||
case jose.ES256:
|
||||
// The cryptographic operations are implemented using constant-time algorithms.
|
||||
key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
return key.Public(), key, err
|
||||
case jose.ES384:
|
||||
// NB: The cryptographic operations do not use constant-time algorithms.
|
||||
key, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader)
|
||||
return key.Public(), key, err
|
||||
case jose.ES512:
|
||||
// NB: The cryptographic operations do not use constant-time algorithms.
|
||||
key, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
|
||||
return key.Public(), key, err
|
||||
case jose.EdDSA:
|
||||
pub, key, err := ed25519.GenerateKey(rand.Reader)
|
||||
return pub, key, err
|
||||
case jose.RS256, jose.RS384, jose.RS512, jose.PS256, jose.PS384, jose.PS512:
|
||||
key, err := rsa.GenerateKey(rand.Reader, bits)
|
||||
return key.Public(), key, err
|
||||
default:
|
||||
return nil, nil, errors.New("unknown `alg` for `use` = `sig`")
|
||||
}
|
||||
}
|
||||
|
||||
// KeygenEnc generates keypair for corresponding KeyAlgorithm.
|
||||
func KeygenEnc(alg jose.KeyAlgorithm, bits int) (crypto.PublicKey, crypto.PrivateKey, error) {
|
||||
switch alg {
|
||||
case jose.RSA1_5, jose.RSA_OAEP, jose.RSA_OAEP_256:
|
||||
if bits == 0 {
|
||||
bits = 2048
|
||||
}
|
||||
if bits < 2048 {
|
||||
return nil, nil, errors.New("too short key for RSA `alg`, 2048+ is required")
|
||||
}
|
||||
key, err := rsa.GenerateKey(rand.Reader, bits)
|
||||
return key.Public(), key, err
|
||||
case jose.ECDH_ES, jose.ECDH_ES_A128KW, jose.ECDH_ES_A192KW, jose.ECDH_ES_A256KW:
|
||||
var crv elliptic.Curve
|
||||
switch bits {
|
||||
case 0, 256:
|
||||
crv = elliptic.P256()
|
||||
case 384:
|
||||
crv = elliptic.P384()
|
||||
case 521:
|
||||
crv = elliptic.P521()
|
||||
default:
|
||||
return nil, nil, errors.New("unknown elliptic curve bit length, use one of 256, 384, 521")
|
||||
}
|
||||
key, err := ecdsa.GenerateKey(crv, rand.Reader)
|
||||
return key.Public(), key, err
|
||||
default:
|
||||
return nil, nil, errors.New("unknown `alg` for `use` = `enc`")
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
app.Version("v2")
|
||||
kingpin.MustParse(app.Parse(os.Args[1:]))
|
||||
|
||||
if *kidRand {
|
||||
if *kid == "" {
|
||||
b := make([]byte, 5)
|
||||
_, err := rand.Read(b)
|
||||
app.FatalIfError(err, "can't Read() crypto/rand")
|
||||
*kid = base32.StdEncoding.EncodeToString(b)
|
||||
} else {
|
||||
app.FatalUsage("can't combine --kid and --kid-rand")
|
||||
}
|
||||
}
|
||||
|
||||
var privKey crypto.PublicKey
|
||||
var pubKey crypto.PrivateKey
|
||||
var err error
|
||||
switch *use {
|
||||
case "sig":
|
||||
pubKey, privKey, err = KeygenSig(jose.SignatureAlgorithm(*alg), *bits)
|
||||
case "enc":
|
||||
pubKey, privKey, err = KeygenEnc(jose.KeyAlgorithm(*alg), *bits)
|
||||
}
|
||||
app.FatalIfError(err, "unable to generate key")
|
||||
|
||||
priv := jose.JSONWebKey{Key: privKey, KeyID: *kid, Algorithm: *alg, Use: *use}
|
||||
pub := jose.JSONWebKey{Key: pubKey, KeyID: *kid, Algorithm: *alg, Use: *use}
|
||||
|
||||
if priv.IsPublic() || !pub.IsPublic() || !priv.Valid() || !pub.Valid() {
|
||||
app.Fatalf("invalid keys were generated")
|
||||
}
|
||||
|
||||
privJS, err := priv.MarshalJSON()
|
||||
app.FatalIfError(err, "can't Marshal private key to JSON")
|
||||
pubJS, err := pub.MarshalJSON()
|
||||
app.FatalIfError(err, "can't Marshal public key to JSON")
|
||||
|
||||
if *kid == "" {
|
||||
fmt.Printf("==> jwk_%s.pub <==\n", *alg)
|
||||
fmt.Println(string(pubJS))
|
||||
fmt.Printf("==> jwk_%s <==\n", *alg)
|
||||
fmt.Println(string(privJS))
|
||||
} else {
|
||||
// JWK Thumbprint (RFC7638) is not used for key id because of
|
||||
// lack of canonical representation.
|
||||
fname := fmt.Sprintf("jwk_%s_%s_%s", *use, *alg, *kid)
|
||||
err = writeNewFile(fname+".pub", pubJS, 0444)
|
||||
app.FatalIfError(err, "can't write public key to file %s.pub", fname)
|
||||
fmt.Printf("Written public key to %s.pub\n", fname)
|
||||
err = writeNewFile(fname, privJS, 0400)
|
||||
app.FatalIfError(err, "cant' write private key to file %s", fname)
|
||||
fmt.Printf("Written private key to %s\n", fname)
|
||||
}
|
||||
}
|
||||
|
||||
// writeNewFile is shameless copy-paste from ioutil.WriteFile with a bit
|
||||
// different flags for OpenFile.
|
||||
func writeNewFile(filename string, data []byte, perm os.FileMode) error {
|
||||
f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, perm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n, err := f.Write(data)
|
||||
if err == nil && n < len(data) {
|
||||
err = io.ErrShortWrite
|
||||
}
|
||||
if err1 := f.Close(); err == nil {
|
||||
err = err1
|
||||
}
|
||||
return err
|
||||
}
|
608
vendor/gopkg.in/square/go-jose.v2/jwk.go
generated
vendored
Normal file
608
vendor/gopkg.in/square/go-jose.v2/jwk.go
generated
vendored
Normal file
|
@ -0,0 +1,608 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rsa"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/crypto/ed25519"
|
||||
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
// rawJSONWebKey represents a public or private key in JWK format, used for parsing/serializing.
|
||||
type rawJSONWebKey struct {
|
||||
Use string `json:"use,omitempty"`
|
||||
Kty string `json:"kty,omitempty"`
|
||||
Kid string `json:"kid,omitempty"`
|
||||
Crv string `json:"crv,omitempty"`
|
||||
Alg string `json:"alg,omitempty"`
|
||||
K *byteBuffer `json:"k,omitempty"`
|
||||
X *byteBuffer `json:"x,omitempty"`
|
||||
Y *byteBuffer `json:"y,omitempty"`
|
||||
N *byteBuffer `json:"n,omitempty"`
|
||||
E *byteBuffer `json:"e,omitempty"`
|
||||
// -- Following fields are only used for private keys --
|
||||
// RSA uses D, P and Q, while ECDSA uses only D. Fields Dp, Dq, and Qi are
|
||||
// completely optional. Therefore for RSA/ECDSA, D != nil is a contract that
|
||||
// we have a private key whereas D == nil means we have only a public key.
|
||||
D *byteBuffer `json:"d,omitempty"`
|
||||
P *byteBuffer `json:"p,omitempty"`
|
||||
Q *byteBuffer `json:"q,omitempty"`
|
||||
Dp *byteBuffer `json:"dp,omitempty"`
|
||||
Dq *byteBuffer `json:"dq,omitempty"`
|
||||
Qi *byteBuffer `json:"qi,omitempty"`
|
||||
// Certificates
|
||||
X5c []string `json:"x5c,omitempty"`
|
||||
}
|
||||
|
||||
// JSONWebKey represents a public or private key in JWK format.
|
||||
type JSONWebKey struct {
|
||||
Key interface{}
|
||||
Certificates []*x509.Certificate
|
||||
KeyID string
|
||||
Algorithm string
|
||||
Use string
|
||||
}
|
||||
|
||||
// MarshalJSON serializes the given key to its JSON representation.
|
||||
func (k JSONWebKey) MarshalJSON() ([]byte, error) {
|
||||
var raw *rawJSONWebKey
|
||||
var err error
|
||||
|
||||
switch key := k.Key.(type) {
|
||||
case ed25519.PublicKey:
|
||||
raw = fromEdPublicKey(key)
|
||||
case *ecdsa.PublicKey:
|
||||
raw, err = fromEcPublicKey(key)
|
||||
case *rsa.PublicKey:
|
||||
raw = fromRsaPublicKey(key)
|
||||
case ed25519.PrivateKey:
|
||||
raw, err = fromEdPrivateKey(key)
|
||||
case *ecdsa.PrivateKey:
|
||||
raw, err = fromEcPrivateKey(key)
|
||||
case *rsa.PrivateKey:
|
||||
raw, err = fromRsaPrivateKey(key)
|
||||
case []byte:
|
||||
raw, err = fromSymmetricKey(key)
|
||||
default:
|
||||
return nil, fmt.Errorf("square/go-jose: unknown key type '%s'", reflect.TypeOf(key))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
raw.Kid = k.KeyID
|
||||
raw.Alg = k.Algorithm
|
||||
raw.Use = k.Use
|
||||
|
||||
for _, cert := range k.Certificates {
|
||||
raw.X5c = append(raw.X5c, base64.StdEncoding.EncodeToString(cert.Raw))
|
||||
}
|
||||
|
||||
return json.Marshal(raw)
|
||||
}
|
||||
|
||||
// UnmarshalJSON reads a key from its JSON representation.
|
||||
func (k *JSONWebKey) UnmarshalJSON(data []byte) (err error) {
|
||||
var raw rawJSONWebKey
|
||||
err = json.Unmarshal(data, &raw)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var key interface{}
|
||||
switch raw.Kty {
|
||||
case "EC":
|
||||
if raw.D != nil {
|
||||
key, err = raw.ecPrivateKey()
|
||||
} else {
|
||||
key, err = raw.ecPublicKey()
|
||||
}
|
||||
case "RSA":
|
||||
if raw.D != nil {
|
||||
key, err = raw.rsaPrivateKey()
|
||||
} else {
|
||||
key, err = raw.rsaPublicKey()
|
||||
}
|
||||
case "oct":
|
||||
key, err = raw.symmetricKey()
|
||||
case "OKP":
|
||||
if raw.Crv == "Ed25519" && raw.X != nil {
|
||||
if raw.D != nil {
|
||||
key, err = raw.edPrivateKey()
|
||||
} else {
|
||||
key, err = raw.edPublicKey()
|
||||
}
|
||||
} else {
|
||||
err = fmt.Errorf("square/go-jose: unknown curve %s'", raw.Crv)
|
||||
}
|
||||
default:
|
||||
err = fmt.Errorf("square/go-jose: unknown json web key type '%s'", raw.Kty)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
*k = JSONWebKey{Key: key, KeyID: raw.Kid, Algorithm: raw.Alg, Use: raw.Use}
|
||||
|
||||
k.Certificates, err = parseCertificateChain(raw.X5c)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to unmarshal x5c field: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// JSONWebKeySet represents a JWK Set object.
|
||||
type JSONWebKeySet struct {
|
||||
Keys []JSONWebKey `json:"keys"`
|
||||
}
|
||||
|
||||
// Key convenience method returns keys by key ID. Specification states
|
||||
// that a JWK Set "SHOULD" use distinct key IDs, but allows for some
|
||||
// cases where they are not distinct. Hence method returns a slice
|
||||
// of JSONWebKeys.
|
||||
func (s *JSONWebKeySet) Key(kid string) []JSONWebKey {
|
||||
var keys []JSONWebKey
|
||||
for _, key := range s.Keys {
|
||||
if key.KeyID == kid {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
}
|
||||
|
||||
return keys
|
||||
}
|
||||
|
||||
const rsaThumbprintTemplate = `{"e":"%s","kty":"RSA","n":"%s"}`
|
||||
const ecThumbprintTemplate = `{"crv":"%s","kty":"EC","x":"%s","y":"%s"}`
|
||||
const edThumbprintTemplate = `{"crv":"%s","kty":"OKP",x":"%s"}`
|
||||
|
||||
func ecThumbprintInput(curve elliptic.Curve, x, y *big.Int) (string, error) {
|
||||
coordLength := curveSize(curve)
|
||||
crv, err := curveName(curve)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(x.Bytes()) > coordLength || len(y.Bytes()) > coordLength {
|
||||
return "", errors.New("square/go-jose: invalid elliptic key (too large)")
|
||||
}
|
||||
|
||||
return fmt.Sprintf(ecThumbprintTemplate, crv,
|
||||
newFixedSizeBuffer(x.Bytes(), coordLength).base64(),
|
||||
newFixedSizeBuffer(y.Bytes(), coordLength).base64()), nil
|
||||
}
|
||||
|
||||
func rsaThumbprintInput(n *big.Int, e int) (string, error) {
|
||||
return fmt.Sprintf(rsaThumbprintTemplate,
|
||||
newBufferFromInt(uint64(e)).base64(),
|
||||
newBuffer(n.Bytes()).base64()), nil
|
||||
}
|
||||
|
||||
func edThumbprintInput(ed ed25519.PublicKey) (string, error) {
|
||||
crv := "Ed25519"
|
||||
if len(ed) > 32 {
|
||||
return "", errors.New("square/go-jose: invalid elliptic key (too large)")
|
||||
}
|
||||
return fmt.Sprintf(edThumbprintTemplate, crv,
|
||||
newFixedSizeBuffer(ed, 32).base64()), nil
|
||||
}
|
||||
|
||||
// Thumbprint computes the JWK Thumbprint of a key using the
|
||||
// indicated hash algorithm.
|
||||
func (k *JSONWebKey) Thumbprint(hash crypto.Hash) ([]byte, error) {
|
||||
var input string
|
||||
var err error
|
||||
switch key := k.Key.(type) {
|
||||
case ed25519.PublicKey:
|
||||
input, err = edThumbprintInput(key)
|
||||
case *ecdsa.PublicKey:
|
||||
input, err = ecThumbprintInput(key.Curve, key.X, key.Y)
|
||||
case *ecdsa.PrivateKey:
|
||||
input, err = ecThumbprintInput(key.Curve, key.X, key.Y)
|
||||
case *rsa.PublicKey:
|
||||
input, err = rsaThumbprintInput(key.N, key.E)
|
||||
case *rsa.PrivateKey:
|
||||
input, err = rsaThumbprintInput(key.N, key.E)
|
||||
case ed25519.PrivateKey:
|
||||
input, err = edThumbprintInput(ed25519.PublicKey(key[32:]))
|
||||
default:
|
||||
return nil, fmt.Errorf("square/go-jose: unknown key type '%s'", reflect.TypeOf(key))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
h := hash.New()
|
||||
h.Write([]byte(input))
|
||||
return h.Sum(nil), nil
|
||||
}
|
||||
|
||||
// IsPublic returns true if the JWK represents a public key (not symmetric, not private).
|
||||
func (k *JSONWebKey) IsPublic() bool {
|
||||
switch k.Key.(type) {
|
||||
case *ecdsa.PublicKey, *rsa.PublicKey, ed25519.PublicKey:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Public creates JSONWebKey with corresponding publik key if JWK represents asymmetric private key.
|
||||
func (k *JSONWebKey) Public() JSONWebKey {
|
||||
if k.IsPublic() {
|
||||
return *k
|
||||
}
|
||||
ret := *k
|
||||
switch key := k.Key.(type) {
|
||||
case *ecdsa.PrivateKey:
|
||||
ret.Key = key.Public()
|
||||
case *rsa.PrivateKey:
|
||||
ret.Key = key.Public()
|
||||
case ed25519.PrivateKey:
|
||||
ret.Key = key.Public()
|
||||
default:
|
||||
return JSONWebKey{} // returning invalid key
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// Valid checks that the key contains the expected parameters.
|
||||
func (k *JSONWebKey) Valid() bool {
|
||||
if k.Key == nil {
|
||||
return false
|
||||
}
|
||||
switch key := k.Key.(type) {
|
||||
case *ecdsa.PublicKey:
|
||||
if key.Curve == nil || key.X == nil || key.Y == nil {
|
||||
return false
|
||||
}
|
||||
case *ecdsa.PrivateKey:
|
||||
if key.Curve == nil || key.X == nil || key.Y == nil || key.D == nil {
|
||||
return false
|
||||
}
|
||||
case *rsa.PublicKey:
|
||||
if key.N == nil || key.E == 0 {
|
||||
return false
|
||||
}
|
||||
case *rsa.PrivateKey:
|
||||
if key.N == nil || key.E == 0 || key.D == nil || len(key.Primes) < 2 {
|
||||
return false
|
||||
}
|
||||
case ed25519.PublicKey:
|
||||
if len(key) != 32 {
|
||||
return false
|
||||
}
|
||||
case ed25519.PrivateKey:
|
||||
if len(key) != 64 {
|
||||
return false
|
||||
}
|
||||
default:
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (key rawJSONWebKey) rsaPublicKey() (*rsa.PublicKey, error) {
|
||||
if key.N == nil || key.E == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid RSA key, missing n/e values")
|
||||
}
|
||||
|
||||
return &rsa.PublicKey{
|
||||
N: key.N.bigInt(),
|
||||
E: key.E.toInt(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func fromEdPublicKey(pub ed25519.PublicKey) *rawJSONWebKey {
|
||||
return &rawJSONWebKey{
|
||||
Kty: "OKP",
|
||||
Crv: "Ed25519",
|
||||
X: newBuffer(pub),
|
||||
}
|
||||
}
|
||||
|
||||
func fromRsaPublicKey(pub *rsa.PublicKey) *rawJSONWebKey {
|
||||
return &rawJSONWebKey{
|
||||
Kty: "RSA",
|
||||
N: newBuffer(pub.N.Bytes()),
|
||||
E: newBufferFromInt(uint64(pub.E)),
|
||||
}
|
||||
}
|
||||
|
||||
func (key rawJSONWebKey) ecPublicKey() (*ecdsa.PublicKey, error) {
|
||||
var curve elliptic.Curve
|
||||
switch key.Crv {
|
||||
case "P-256":
|
||||
curve = elliptic.P256()
|
||||
case "P-384":
|
||||
curve = elliptic.P384()
|
||||
case "P-521":
|
||||
curve = elliptic.P521()
|
||||
default:
|
||||
return nil, fmt.Errorf("square/go-jose: unsupported elliptic curve '%s'", key.Crv)
|
||||
}
|
||||
|
||||
if key.X == nil || key.Y == nil {
|
||||
return nil, errors.New("square/go-jose: invalid EC key, missing x/y values")
|
||||
}
|
||||
|
||||
// The length of this octet string MUST be the full size of a coordinate for
|
||||
// the curve specified in the "crv" parameter.
|
||||
// https://tools.ietf.org/html/rfc7518#section-6.2.1.2
|
||||
if curveSize(curve) != len(key.X.data) {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC private key, wrong length for x")
|
||||
}
|
||||
|
||||
if curveSize(curve) != len(key.Y.data) {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC private key, wrong length for y")
|
||||
}
|
||||
|
||||
x := key.X.bigInt()
|
||||
y := key.Y.bigInt()
|
||||
|
||||
if !curve.IsOnCurve(x, y) {
|
||||
return nil, errors.New("square/go-jose: invalid EC key, X/Y are not on declared curve")
|
||||
}
|
||||
|
||||
return &ecdsa.PublicKey{
|
||||
Curve: curve,
|
||||
X: x,
|
||||
Y: y,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func fromEcPublicKey(pub *ecdsa.PublicKey) (*rawJSONWebKey, error) {
|
||||
if pub == nil || pub.X == nil || pub.Y == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC key (nil, or X/Y missing)")
|
||||
}
|
||||
|
||||
name, err := curveName(pub.Curve)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
size := curveSize(pub.Curve)
|
||||
|
||||
xBytes := pub.X.Bytes()
|
||||
yBytes := pub.Y.Bytes()
|
||||
|
||||
if len(xBytes) > size || len(yBytes) > size {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC key (X/Y too large)")
|
||||
}
|
||||
|
||||
key := &rawJSONWebKey{
|
||||
Kty: "EC",
|
||||
Crv: name,
|
||||
X: newFixedSizeBuffer(xBytes, size),
|
||||
Y: newFixedSizeBuffer(yBytes, size),
|
||||
}
|
||||
|
||||
return key, nil
|
||||
}
|
||||
|
||||
func (key rawJSONWebKey) edPrivateKey() (ed25519.PrivateKey, error) {
|
||||
var missing []string
|
||||
switch {
|
||||
case key.D == nil:
|
||||
missing = append(missing, "D")
|
||||
case key.X == nil:
|
||||
missing = append(missing, "X")
|
||||
}
|
||||
|
||||
if len(missing) > 0 {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid Ed25519 private key, missing %s value(s)", strings.Join(missing, ", "))
|
||||
}
|
||||
|
||||
privateKey := make([]byte, ed25519.PrivateKeySize)
|
||||
copy(privateKey[0:32], key.D.bytes())
|
||||
copy(privateKey[32:], key.X.bytes())
|
||||
rv := ed25519.PrivateKey(privateKey)
|
||||
return rv, nil
|
||||
}
|
||||
|
||||
func (key rawJSONWebKey) edPublicKey() (ed25519.PublicKey, error) {
|
||||
if key.X == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid Ed key, missing x value")
|
||||
}
|
||||
publicKey := make([]byte, ed25519.PublicKeySize)
|
||||
copy(publicKey[0:32], key.X.bytes())
|
||||
rv := ed25519.PublicKey(publicKey)
|
||||
return rv, nil
|
||||
}
|
||||
|
||||
func (key rawJSONWebKey) rsaPrivateKey() (*rsa.PrivateKey, error) {
|
||||
var missing []string
|
||||
switch {
|
||||
case key.N == nil:
|
||||
missing = append(missing, "N")
|
||||
case key.E == nil:
|
||||
missing = append(missing, "E")
|
||||
case key.D == nil:
|
||||
missing = append(missing, "D")
|
||||
case key.P == nil:
|
||||
missing = append(missing, "P")
|
||||
case key.Q == nil:
|
||||
missing = append(missing, "Q")
|
||||
}
|
||||
|
||||
if len(missing) > 0 {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid RSA private key, missing %s value(s)", strings.Join(missing, ", "))
|
||||
}
|
||||
|
||||
rv := &rsa.PrivateKey{
|
||||
PublicKey: rsa.PublicKey{
|
||||
N: key.N.bigInt(),
|
||||
E: key.E.toInt(),
|
||||
},
|
||||
D: key.D.bigInt(),
|
||||
Primes: []*big.Int{
|
||||
key.P.bigInt(),
|
||||
key.Q.bigInt(),
|
||||
},
|
||||
}
|
||||
|
||||
if key.Dp != nil {
|
||||
rv.Precomputed.Dp = key.Dp.bigInt()
|
||||
}
|
||||
if key.Dq != nil {
|
||||
rv.Precomputed.Dq = key.Dq.bigInt()
|
||||
}
|
||||
if key.Qi != nil {
|
||||
rv.Precomputed.Qinv = key.Qi.bigInt()
|
||||
}
|
||||
|
||||
err := rv.Validate()
|
||||
return rv, err
|
||||
}
|
||||
|
||||
func fromEdPrivateKey(ed ed25519.PrivateKey) (*rawJSONWebKey, error) {
|
||||
raw := fromEdPublicKey(ed25519.PublicKey(ed[32:]))
|
||||
|
||||
raw.D = newBuffer(ed[0:32])
|
||||
return raw, nil
|
||||
}
|
||||
|
||||
func fromRsaPrivateKey(rsa *rsa.PrivateKey) (*rawJSONWebKey, error) {
|
||||
if len(rsa.Primes) != 2 {
|
||||
return nil, ErrUnsupportedKeyType
|
||||
}
|
||||
|
||||
raw := fromRsaPublicKey(&rsa.PublicKey)
|
||||
|
||||
raw.D = newBuffer(rsa.D.Bytes())
|
||||
raw.P = newBuffer(rsa.Primes[0].Bytes())
|
||||
raw.Q = newBuffer(rsa.Primes[1].Bytes())
|
||||
|
||||
if rsa.Precomputed.Dp != nil {
|
||||
raw.Dp = newBuffer(rsa.Precomputed.Dp.Bytes())
|
||||
}
|
||||
if rsa.Precomputed.Dq != nil {
|
||||
raw.Dq = newBuffer(rsa.Precomputed.Dq.Bytes())
|
||||
}
|
||||
if rsa.Precomputed.Qinv != nil {
|
||||
raw.Qi = newBuffer(rsa.Precomputed.Qinv.Bytes())
|
||||
}
|
||||
|
||||
return raw, nil
|
||||
}
|
||||
|
||||
func (key rawJSONWebKey) ecPrivateKey() (*ecdsa.PrivateKey, error) {
|
||||
var curve elliptic.Curve
|
||||
switch key.Crv {
|
||||
case "P-256":
|
||||
curve = elliptic.P256()
|
||||
case "P-384":
|
||||
curve = elliptic.P384()
|
||||
case "P-521":
|
||||
curve = elliptic.P521()
|
||||
default:
|
||||
return nil, fmt.Errorf("square/go-jose: unsupported elliptic curve '%s'", key.Crv)
|
||||
}
|
||||
|
||||
if key.X == nil || key.Y == nil || key.D == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC private key, missing x/y/d values")
|
||||
}
|
||||
|
||||
// The length of this octet string MUST be the full size of a coordinate for
|
||||
// the curve specified in the "crv" parameter.
|
||||
// https://tools.ietf.org/html/rfc7518#section-6.2.1.2
|
||||
if curveSize(curve) != len(key.X.data) {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC private key, wrong length for x")
|
||||
}
|
||||
|
||||
if curveSize(curve) != len(key.Y.data) {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC private key, wrong length for y")
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7518#section-6.2.2.1
|
||||
if dSize(curve) != len(key.D.data) {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC private key, wrong length for d")
|
||||
}
|
||||
|
||||
x := key.X.bigInt()
|
||||
y := key.Y.bigInt()
|
||||
|
||||
if !curve.IsOnCurve(x, y) {
|
||||
return nil, errors.New("square/go-jose: invalid EC key, X/Y are not on declared curve")
|
||||
}
|
||||
|
||||
return &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: curve,
|
||||
X: x,
|
||||
Y: y,
|
||||
},
|
||||
D: key.D.bigInt(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func fromEcPrivateKey(ec *ecdsa.PrivateKey) (*rawJSONWebKey, error) {
|
||||
raw, err := fromEcPublicKey(&ec.PublicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ec.D == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid EC private key")
|
||||
}
|
||||
|
||||
raw.D = newFixedSizeBuffer(ec.D.Bytes(), dSize(ec.PublicKey.Curve))
|
||||
|
||||
return raw, nil
|
||||
}
|
||||
|
||||
// dSize returns the size in octets for the "d" member of an elliptic curve
|
||||
// private key.
|
||||
// The length of this octet string MUST be ceiling(log-base-2(n)/8)
|
||||
// octets (where n is the order of the curve).
|
||||
// https://tools.ietf.org/html/rfc7518#section-6.2.2.1
|
||||
func dSize(curve elliptic.Curve) int {
|
||||
order := curve.Params().P
|
||||
bitLen := order.BitLen()
|
||||
size := bitLen / 8
|
||||
if bitLen%8 != 0 {
|
||||
size = size + 1
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
func fromSymmetricKey(key []byte) (*rawJSONWebKey, error) {
|
||||
return &rawJSONWebKey{
|
||||
Kty: "oct",
|
||||
K: newBuffer(key),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (key rawJSONWebKey) symmetricKey() ([]byte, error) {
|
||||
if key.K == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: invalid OCT (symmetric) key, missing k value")
|
||||
}
|
||||
return key.K.bytes(), nil
|
||||
}
|
884
vendor/gopkg.in/square/go-jose.v2/jwk_test.go
generated
vendored
Normal file
884
vendor/gopkg.in/square/go-jose.v2/jwk_test.go
generated
vendored
Normal file
|
@ -0,0 +1,884 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rsa"
|
||||
"crypto/x509"
|
||||
"encoding/hex"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"golang.org/x/crypto/ed25519"
|
||||
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
// Test chain of two X.509 certificates
|
||||
var testCertificates, _ = x509.ParseCertificates(fromBase64Bytes(`
|
||||
MIIDfDCCAmSgAwIBAgIJANWAkzF7PA8/MA0GCSqGSIb3DQEBCwUAMFUxCzAJ
|
||||
BgNVBAYTAlVTMQswCQYDVQQIEwJDQTEQMA4GA1UEChMHY2VydGlnbzEQMA4G
|
||||
A1UECxMHZXhhbXBsZTEVMBMGA1UEAxMMZXhhbXBsZS1sZWFmMB4XDTE2MDYx
|
||||
MDIyMTQxMVoXDTIzMDQxNTIyMTQxMVowVTELMAkGA1UEBhMCVVMxCzAJBgNV
|
||||
BAgTAkNBMRAwDgYDVQQKEwdjZXJ0aWdvMRAwDgYDVQQLEwdleGFtcGxlMRUw
|
||||
EwYDVQQDEwxleGFtcGxlLWxlYWYwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
|
||||
ggEKAoIBAQC7stSvfQyGuHw3v34fisqIdDXberrFoFk9ht/WdXgYzX2uLNKd
|
||||
sR/J5sbWSl8K/5djpzj31eIzqU69w8v7SChM5x9bouDsABHz3kZucx5cSafE
|
||||
gJojysBkcrq3VY+aJanzbL+qErYX+lhRpPcZK6JMWIwar8Y3B2la4yWwieec
|
||||
w2/WfEVvG0M/DOYKnR8QHFsfl3US1dnBM84czKPyt9r40gDk2XiH/lGts5a9
|
||||
4rAGvbr8IMCtq0mA5aH3Fx3mDSi3+4MZwygCAHrF5O5iSV9rEI+m2+7j2S+j
|
||||
HDUnvV+nqcpb9m6ENECnYX8FD2KcqlOjTmw8smDy09N2Np6i464lAgMBAAGj
|
||||
TzBNMB0GA1UdJQQWMBQGCCsGAQUFBwMCBggrBgEFBQcDATAsBgNVHREEJTAj
|
||||
hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABgglsb2NhbGhvc3QwDQYJKoZIhvcN
|
||||
AQELBQADggEBAGM4aa/qrURUweZBIwZYv8O9b2+r4l0HjGAh982/B9sMlM05
|
||||
kojyDCUGvj86z18Lm8mKr4/y+i0nJ+vDIksEvfDuzw5ALAXGcBzPJKtICUf7
|
||||
LstA/n9NNpshWz0kld9ylnB5mbUzSFDncVyeXkEf5sGQXdIIZT9ChRBoiloS
|
||||
aa7dvBVCcsX1LGP2LWqKtD+7nUnw5qCwtyAVT8pthEUxFTpywoiJS5ZdzeEx
|
||||
8MNGvUeLFj2kleqPF78EioEQlSOxViCuctEtnQuPcDLHNFr10byTZY9roObi
|
||||
qdsJLMVvb2XliJjAqaPa9AkYwGE6xHw2ispwg64Rse0+AtKups19WIUwggNT
|
||||
MIICO6ADAgECAgkAqD4tCWKt9/AwDQYJKoZIhvcNAQELBQAwVTELMAkGA1UE
|
||||
BhMCVVMxCzAJBgNVBAgTAkNBMRAwDgYDVQQKEwdjZXJ0aWdvMRAwDgYDVQQL
|
||||
EwdleGFtcGxlMRUwEwYDVQQDEwxleGFtcGxlLXJvb3QwHhcNMTYwNjEwMjIx
|
||||
NDExWhcNMjMwNDE1MjIxNDExWjBVMQswCQYDVQQGEwJVUzELMAkGA1UECBMC
|
||||
Q0ExEDAOBgNVBAoTB2NlcnRpZ28xEDAOBgNVBAsTB2V4YW1wbGUxFTATBgNV
|
||||
BAMTDGV4YW1wbGUtcm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
|
||||
ggEBAMo4ShKI2MxDz/NQVxBbz0tbD5R5NcobA0NKkaPKLyMEpnWVY9ucyauM
|
||||
joNn1F568cfOoF0pm3700U8UTPt2MMxEHIi4mFG/OF8UF+Voh1J42Tb42lRo
|
||||
W5RRR3ogh4+7QB1G94nxkYddHAJ4QMhUJlLigFg8c6Ff/MxYODy9I7ilLFOM
|
||||
Zzsjx8fFpRKRXNQFt471P/V4WTSba7GzdTOJRyTZf/xipF36n8RoEQPvyde8
|
||||
pEAsCC4oDOrEiCTdxw8rRJVAU0Wr55XX+qjxyi55C6oykIC/BWR+lUqGd7IL
|
||||
Y2Uyt/OVxllt8b+KuVKNCfn4TFlfgizLWkJRs6JV9KuwJ20CAwEAAaMmMCQw
|
||||
DgYDVR0PAQH/BAQDAgIEMBIGA1UdEwEB/wQIMAYBAf8CAQAwDQYJKoZIhvcN
|
||||
AQELBQADggEBAIsQlTrm9NT6gts0cs4JHp8AutuMrvGyLpIUOlJcEybvgxaz
|
||||
LebIMGZek5w3yEJiCyCK9RdNDP3Kdc/+nM6PhvzfPOVo58+0tMCYyEpZVXhD
|
||||
zmasNDP4fMbiUpczvx5OwPw/KuhwD+1ITuZUQnQlqXgTYoj9n39+qlgUsHos
|
||||
WXHmfzd6Fcz96ADSXg54IL2cEoJ41Q3ewhA7zmWWPLMAl21aex2haiAmzqqN
|
||||
xXyfZTnGNnE3lkV1yVguOrqDZyMRdcxDFvxvtmEeMtYV2Mc/zlS9ccrcOkrc
|
||||
mZSDxthLu3UMl98NA2NrCGWwzJwpk36vQ0PRSbibsCMarFspP8zbIoU=`))
|
||||
|
||||
func TestCurveSize(t *testing.T) {
|
||||
size256 := curveSize(elliptic.P256())
|
||||
size384 := curveSize(elliptic.P384())
|
||||
size521 := curveSize(elliptic.P521())
|
||||
if size256 != 32 {
|
||||
t.Error("P-256 have 32 bytes")
|
||||
}
|
||||
if size384 != 48 {
|
||||
t.Error("P-384 have 48 bytes")
|
||||
}
|
||||
if size521 != 66 {
|
||||
t.Error("P-521 have 66 bytes")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundtripRsaPrivate(t *testing.T) {
|
||||
jwk, err := fromRsaPrivateKey(rsaTestKey)
|
||||
if err != nil {
|
||||
t.Error("problem constructing JWK from rsa key", err)
|
||||
}
|
||||
|
||||
rsa2, err := jwk.rsaPrivateKey()
|
||||
if err != nil {
|
||||
t.Error("problem converting RSA private -> JWK", err)
|
||||
}
|
||||
|
||||
if rsa2.N.Cmp(rsaTestKey.N) != 0 {
|
||||
t.Error("RSA private N mismatch")
|
||||
}
|
||||
if rsa2.E != rsaTestKey.E {
|
||||
t.Error("RSA private E mismatch")
|
||||
}
|
||||
if rsa2.D.Cmp(rsaTestKey.D) != 0 {
|
||||
t.Error("RSA private D mismatch")
|
||||
}
|
||||
if len(rsa2.Primes) != 2 {
|
||||
t.Error("RSA private roundtrip expected two primes")
|
||||
}
|
||||
if rsa2.Primes[0].Cmp(rsaTestKey.Primes[0]) != 0 {
|
||||
t.Error("RSA private P mismatch")
|
||||
}
|
||||
if rsa2.Primes[1].Cmp(rsaTestKey.Primes[1]) != 0 {
|
||||
t.Error("RSA private Q mismatch")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundtripRsaPrivatePrecomputed(t *testing.T) {
|
||||
// Isolate a shallow copy of the rsaTestKey to avoid polluting it with Precompute
|
||||
localKey := &(*rsaTestKey)
|
||||
localKey.Precompute()
|
||||
|
||||
jwk, err := fromRsaPrivateKey(localKey)
|
||||
if err != nil {
|
||||
t.Error("problem constructing JWK from rsa key", err)
|
||||
}
|
||||
|
||||
rsa2, err := jwk.rsaPrivateKey()
|
||||
if err != nil {
|
||||
t.Error("problem converting RSA private -> JWK", err)
|
||||
}
|
||||
|
||||
if rsa2.Precomputed.Dp == nil {
|
||||
t.Error("RSA private Dp nil")
|
||||
}
|
||||
if rsa2.Precomputed.Dq == nil {
|
||||
t.Error("RSA private Dq nil")
|
||||
}
|
||||
if rsa2.Precomputed.Qinv == nil {
|
||||
t.Error("RSA private Qinv nil")
|
||||
}
|
||||
|
||||
if rsa2.Precomputed.Dp.Cmp(localKey.Precomputed.Dp) != 0 {
|
||||
t.Error("RSA private Dp mismatch")
|
||||
}
|
||||
if rsa2.Precomputed.Dq.Cmp(localKey.Precomputed.Dq) != 0 {
|
||||
t.Error("RSA private Dq mismatch")
|
||||
}
|
||||
if rsa2.Precomputed.Qinv.Cmp(localKey.Precomputed.Qinv) != 0 {
|
||||
t.Error("RSA private Qinv mismatch")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRsaPrivateInsufficientPrimes(t *testing.T) {
|
||||
brokenRsaPrivateKey := rsa.PrivateKey{
|
||||
PublicKey: rsa.PublicKey{
|
||||
N: rsaTestKey.N,
|
||||
E: rsaTestKey.E,
|
||||
},
|
||||
D: rsaTestKey.D,
|
||||
Primes: []*big.Int{rsaTestKey.Primes[0]},
|
||||
}
|
||||
|
||||
_, err := fromRsaPrivateKey(&brokenRsaPrivateKey)
|
||||
if err != ErrUnsupportedKeyType {
|
||||
t.Error("expected unsupported key type error, got", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRsaPrivateExcessPrimes(t *testing.T) {
|
||||
brokenRsaPrivateKey := rsa.PrivateKey{
|
||||
PublicKey: rsa.PublicKey{
|
||||
N: rsaTestKey.N,
|
||||
E: rsaTestKey.E,
|
||||
},
|
||||
D: rsaTestKey.D,
|
||||
Primes: []*big.Int{
|
||||
rsaTestKey.Primes[0],
|
||||
rsaTestKey.Primes[1],
|
||||
big.NewInt(3),
|
||||
},
|
||||
}
|
||||
|
||||
_, err := fromRsaPrivateKey(&brokenRsaPrivateKey)
|
||||
if err != ErrUnsupportedKeyType {
|
||||
t.Error("expected unsupported key type error, got", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundtripEcPublic(t *testing.T) {
|
||||
for i, ecTestKey := range []*ecdsa.PrivateKey{ecTestKey256, ecTestKey384, ecTestKey521} {
|
||||
jwk, err := fromEcPublicKey(&ecTestKey.PublicKey)
|
||||
|
||||
ec2, err := jwk.ecPublicKey()
|
||||
if err != nil {
|
||||
t.Error("problem converting ECDSA private -> JWK", i, err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(ec2.Curve, ecTestKey.Curve) {
|
||||
t.Error("ECDSA private curve mismatch", i)
|
||||
}
|
||||
if ec2.X.Cmp(ecTestKey.X) != 0 {
|
||||
t.Error("ECDSA X mismatch", i)
|
||||
}
|
||||
if ec2.Y.Cmp(ecTestKey.Y) != 0 {
|
||||
t.Error("ECDSA Y mismatch", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundtripEcPrivate(t *testing.T) {
|
||||
for i, ecTestKey := range []*ecdsa.PrivateKey{ecTestKey256, ecTestKey384, ecTestKey521} {
|
||||
jwk, err := fromEcPrivateKey(ecTestKey)
|
||||
|
||||
ec2, err := jwk.ecPrivateKey()
|
||||
if err != nil {
|
||||
t.Fatalf("problem converting ECDSA private -> JWK for %#v: %s", ecTestKey, err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(ec2.Curve, ecTestKey.Curve) {
|
||||
t.Error("ECDSA private curve mismatch", i)
|
||||
}
|
||||
if ec2.X.Cmp(ecTestKey.X) != 0 {
|
||||
t.Error("ECDSA X mismatch", i)
|
||||
}
|
||||
if ec2.Y.Cmp(ecTestKey.Y) != 0 {
|
||||
t.Error("ECDSA Y mismatch", i)
|
||||
}
|
||||
if ec2.D.Cmp(ecTestKey.D) != 0 {
|
||||
t.Error("ECDSA D mismatch", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRoundtripX5C(t *testing.T) {
|
||||
jwk := JSONWebKey{
|
||||
Key: rsaTestKey,
|
||||
KeyID: "bar",
|
||||
Algorithm: "foo",
|
||||
Certificates: testCertificates,
|
||||
}
|
||||
|
||||
jsonbar, err := jwk.MarshalJSON()
|
||||
if err != nil {
|
||||
t.Error("problem marshaling", err)
|
||||
}
|
||||
|
||||
var jwk2 JSONWebKey
|
||||
err = jwk2.UnmarshalJSON(jsonbar)
|
||||
if err != nil {
|
||||
t.Fatal("problem unmarshalling", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(testCertificates, jwk2.Certificates) {
|
||||
t.Error("Certificates not equal", jwk.Certificates, jwk2.Certificates)
|
||||
}
|
||||
|
||||
jsonbar2, err := jwk2.MarshalJSON()
|
||||
if err != nil {
|
||||
t.Error("problem marshaling", err)
|
||||
}
|
||||
if !bytes.Equal(jsonbar, jsonbar2) {
|
||||
t.Error("roundtrip should not lose information")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMarshalUnmarshal(t *testing.T) {
|
||||
kid := "DEADBEEF"
|
||||
|
||||
for i, key := range []interface{}{ecTestKey256, ecTestKey384, ecTestKey521, rsaTestKey, ed25519PrivateKey} {
|
||||
for _, use := range []string{"", "sig", "enc"} {
|
||||
jwk := JSONWebKey{Key: key, KeyID: kid, Algorithm: "foo"}
|
||||
if use != "" {
|
||||
jwk.Use = use
|
||||
}
|
||||
|
||||
jsonbar, err := jwk.MarshalJSON()
|
||||
if err != nil {
|
||||
t.Error("problem marshaling", i, err)
|
||||
}
|
||||
|
||||
var jwk2 JSONWebKey
|
||||
err = jwk2.UnmarshalJSON(jsonbar)
|
||||
if err != nil {
|
||||
t.Fatal("problem unmarshalling", i, err)
|
||||
}
|
||||
|
||||
jsonbar2, err := jwk2.MarshalJSON()
|
||||
if err != nil {
|
||||
t.Fatal("problem marshaling", i, err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(jsonbar, jsonbar2) {
|
||||
t.Error("roundtrip should not lose information", i)
|
||||
}
|
||||
if jwk2.KeyID != kid {
|
||||
t.Error("kid did not roundtrip JSON marshalling", i)
|
||||
}
|
||||
|
||||
if jwk2.Algorithm != "foo" {
|
||||
t.Error("alg did not roundtrip JSON marshalling", i)
|
||||
}
|
||||
|
||||
if jwk2.Use != use {
|
||||
t.Error("use did not roundtrip JSON marshalling", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMarshalNonPointer(t *testing.T) {
|
||||
type EmbedsKey struct {
|
||||
Key JSONWebKey
|
||||
}
|
||||
|
||||
keyJSON := []byte(`{
|
||||
"e": "AQAB",
|
||||
"kty": "RSA",
|
||||
"n": "vd7rZIoTLEe-z1_8G1FcXSw9CQFEJgV4g9V277sER7yx5Qjz_Pkf2YVth6wwwFJEmzc0hoKY-MMYFNwBE4hQHw"
|
||||
}`)
|
||||
var parsedKey JSONWebKey
|
||||
err := json.Unmarshal(keyJSON, &parsedKey)
|
||||
if err != nil {
|
||||
t.Errorf("Error unmarshalling key: %v", err)
|
||||
return
|
||||
}
|
||||
ek := EmbedsKey{
|
||||
Key: parsedKey,
|
||||
}
|
||||
out, err := json.Marshal(ek)
|
||||
if err != nil {
|
||||
t.Errorf("Error marshalling JSON: %v", err)
|
||||
return
|
||||
}
|
||||
expected := "{\"Key\":{\"kty\":\"RSA\",\"n\":\"vd7rZIoTLEe-z1_8G1FcXSw9CQFEJgV4g9V277sER7yx5Qjz_Pkf2YVth6wwwFJEmzc0hoKY-MMYFNwBE4hQHw\",\"e\":\"AQAB\"}}"
|
||||
if string(out) != expected {
|
||||
t.Error("Failed to marshal embedded non-pointer JWK properly:", string(out))
|
||||
}
|
||||
}
|
||||
|
||||
func TestMarshalUnmarshalInvalid(t *testing.T) {
|
||||
// Make an invalid curve coordinate by creating a byte array that is one
|
||||
// byte too large, and setting the first byte to 1 (otherwise it's just zero).
|
||||
invalidCoord := make([]byte, curveSize(ecTestKey256.Curve)+1)
|
||||
invalidCoord[0] = 1
|
||||
|
||||
keys := []interface{}{
|
||||
// Empty keys
|
||||
&rsa.PrivateKey{},
|
||||
&ecdsa.PrivateKey{},
|
||||
// Invalid keys
|
||||
&ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
// Missing values in pub key
|
||||
Curve: elliptic.P256(),
|
||||
},
|
||||
},
|
||||
&ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
// Invalid curve
|
||||
Curve: nil,
|
||||
X: ecTestKey256.X,
|
||||
Y: ecTestKey256.Y,
|
||||
},
|
||||
},
|
||||
&ecdsa.PrivateKey{
|
||||
// Valid pub key, but missing priv key values
|
||||
PublicKey: ecTestKey256.PublicKey,
|
||||
},
|
||||
&ecdsa.PrivateKey{
|
||||
// Invalid pub key, values too large
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: ecTestKey256.Curve,
|
||||
X: big.NewInt(0).SetBytes(invalidCoord),
|
||||
Y: big.NewInt(0).SetBytes(invalidCoord),
|
||||
},
|
||||
D: ecTestKey256.D,
|
||||
},
|
||||
nil,
|
||||
}
|
||||
|
||||
for i, key := range keys {
|
||||
jwk := JSONWebKey{Key: key}
|
||||
_, err := jwk.MarshalJSON()
|
||||
if err == nil {
|
||||
t.Error("managed to serialize invalid key", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestWebKeyVectorsInvalid(t *testing.T) {
|
||||
keys := []string{
|
||||
// Invalid JSON
|
||||
"{X",
|
||||
// Empty key
|
||||
"{}",
|
||||
// Invalid RSA keys
|
||||
`{"kty":"RSA"}`,
|
||||
`{"kty":"RSA","e":""}`,
|
||||
`{"kty":"RSA","e":"XXXX"}`,
|
||||
`{"kty":"RSA","d":"XXXX"}`,
|
||||
// Invalid EC keys
|
||||
`{"kty":"EC","crv":"ABC"}`,
|
||||
`{"kty":"EC","crv":"P-256"}`,
|
||||
`{"kty":"EC","crv":"P-256","d":"XXX"}`,
|
||||
`{"kty":"EC","crv":"ABC","d":"dGVzdA","x":"dGVzdA"}`,
|
||||
`{"kty":"EC","crv":"P-256","d":"dGVzdA","x":"dGVzdA"}`,
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
var jwk2 JSONWebKey
|
||||
err := jwk2.UnmarshalJSON([]byte(key))
|
||||
if err == nil {
|
||||
t.Error("managed to parse invalid key:", key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors from RFC 7520
|
||||
var cookbookJWKs = []string{
|
||||
// EC Public
|
||||
stripWhitespace(`{
|
||||
"kty": "EC",
|
||||
"kid": "bilbo.baggins@hobbiton.example",
|
||||
"use": "sig",
|
||||
"crv": "P-521",
|
||||
"x": "AHKZLLOsCOzz5cY97ewNUajB957y-C-U88c3v13nmGZx6sYl_oJXu9
|
||||
A5RkTKqjqvjyekWF-7ytDyRXYgCF5cj0Kt",
|
||||
"y": "AdymlHvOiLxXkEhayXQnNCvDX4h9htZaCJN34kfmC6pV5OhQHiraVy
|
||||
SsUdaQkAgDPrwQrJmbnX9cwlGfP-HqHZR1"
|
||||
}`),
|
||||
|
||||
//ED Private
|
||||
stripWhitespace(`{
|
||||
"kty": "OKP",
|
||||
"crv": "Ed25519",
|
||||
"x": "11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo",
|
||||
"d": "nWGxne_9WmC6hEr0kuwsxERJxWl7MmkZcDusAxyuf2A"
|
||||
}`),
|
||||
|
||||
// EC Private
|
||||
stripWhitespace(`{
|
||||
"kty": "EC",
|
||||
"kid": "bilbo.baggins@hobbiton.example",
|
||||
"use": "sig",
|
||||
"crv": "P-521",
|
||||
"x": "AHKZLLOsCOzz5cY97ewNUajB957y-C-U88c3v13nmGZx6sYl_oJXu9
|
||||
A5RkTKqjqvjyekWF-7ytDyRXYgCF5cj0Kt",
|
||||
"y": "AdymlHvOiLxXkEhayXQnNCvDX4h9htZaCJN34kfmC6pV5OhQHiraVy
|
||||
SsUdaQkAgDPrwQrJmbnX9cwlGfP-HqHZR1",
|
||||
"d": "AAhRON2r9cqXX1hg-RoI6R1tX5p2rUAYdmpHZoC1XNM56KtscrX6zb
|
||||
KipQrCW9CGZH3T4ubpnoTKLDYJ_fF3_rJt"
|
||||
}`),
|
||||
|
||||
// RSA Public
|
||||
stripWhitespace(`{
|
||||
"kty": "RSA",
|
||||
"kid": "bilbo.baggins@hobbiton.example",
|
||||
"use": "sig",
|
||||
"n": "n4EPtAOCc9AlkeQHPzHStgAbgs7bTZLwUBZdR8_KuKPEHLd4rHVTeT
|
||||
-O-XV2jRojdNhxJWTDvNd7nqQ0VEiZQHz_AJmSCpMaJMRBSFKrKb2wqV
|
||||
wGU_NsYOYL-QtiWN2lbzcEe6XC0dApr5ydQLrHqkHHig3RBordaZ6Aj-
|
||||
oBHqFEHYpPe7Tpe-OfVfHd1E6cS6M1FZcD1NNLYD5lFHpPI9bTwJlsde
|
||||
3uhGqC0ZCuEHg8lhzwOHrtIQbS0FVbb9k3-tVTU4fg_3L_vniUFAKwuC
|
||||
LqKnS2BYwdq_mzSnbLY7h_qixoR7jig3__kRhuaxwUkRz5iaiQkqgc5g
|
||||
HdrNP5zw",
|
||||
"e": "AQAB"
|
||||
}`),
|
||||
|
||||
// RSA Private
|
||||
stripWhitespace(`{"kty":"RSA",
|
||||
"kid":"juliet@capulet.lit",
|
||||
"use":"enc",
|
||||
"n":"t6Q8PWSi1dkJj9hTP8hNYFlvadM7DflW9mWepOJhJ66w7nyoK1gPNqFMSQRy
|
||||
O125Gp-TEkodhWr0iujjHVx7BcV0llS4w5ACGgPrcAd6ZcSR0-Iqom-QFcNP
|
||||
8Sjg086MwoqQU_LYywlAGZ21WSdS_PERyGFiNnj3QQlO8Yns5jCtLCRwLHL0
|
||||
Pb1fEv45AuRIuUfVcPySBWYnDyGxvjYGDSM-AqWS9zIQ2ZilgT-GqUmipg0X
|
||||
OC0Cc20rgLe2ymLHjpHciCKVAbY5-L32-lSeZO-Os6U15_aXrk9Gw8cPUaX1
|
||||
_I8sLGuSiVdt3C_Fn2PZ3Z8i744FPFGGcG1qs2Wz-Q",
|
||||
"e":"AQAB",
|
||||
"d":"GRtbIQmhOZtyszfgKdg4u_N-R_mZGU_9k7JQ_jn1DnfTuMdSNprTeaSTyWfS
|
||||
NkuaAwnOEbIQVy1IQbWVV25NY3ybc_IhUJtfri7bAXYEReWaCl3hdlPKXy9U
|
||||
vqPYGR0kIXTQRqns-dVJ7jahlI7LyckrpTmrM8dWBo4_PMaenNnPiQgO0xnu
|
||||
ToxutRZJfJvG4Ox4ka3GORQd9CsCZ2vsUDmsXOfUENOyMqADC6p1M3h33tsu
|
||||
rY15k9qMSpG9OX_IJAXmxzAh_tWiZOwk2K4yxH9tS3Lq1yX8C1EWmeRDkK2a
|
||||
hecG85-oLKQt5VEpWHKmjOi_gJSdSgqcN96X52esAQ",
|
||||
"p":"2rnSOV4hKSN8sS4CgcQHFbs08XboFDqKum3sc4h3GRxrTmQdl1ZK9uw-PIHf
|
||||
QP0FkxXVrx-WE-ZEbrqivH_2iCLUS7wAl6XvARt1KkIaUxPPSYB9yk31s0Q8
|
||||
UK96E3_OrADAYtAJs-M3JxCLfNgqh56HDnETTQhH3rCT5T3yJws",
|
||||
"q":"1u_RiFDP7LBYh3N4GXLT9OpSKYP0uQZyiaZwBtOCBNJgQxaj10RWjsZu0c6I
|
||||
edis4S7B_coSKB0Kj9PaPaBzg-IySRvvcQuPamQu66riMhjVtG6TlV8CLCYK
|
||||
rYl52ziqK0E_ym2QnkwsUX7eYTB7LbAHRK9GqocDE5B0f808I4s",
|
||||
"dp":"KkMTWqBUefVwZ2_Dbj1pPQqyHSHjj90L5x_MOzqYAJMcLMZtbUtwKqvVDq3
|
||||
tbEo3ZIcohbDtt6SbfmWzggabpQxNxuBpoOOf_a_HgMXK_lhqigI4y_kqS1w
|
||||
Y52IwjUn5rgRrJ-yYo1h41KR-vz2pYhEAeYrhttWtxVqLCRViD6c",
|
||||
"dq":"AvfS0-gRxvn0bwJoMSnFxYcK1WnuEjQFluMGfwGitQBWtfZ1Er7t1xDkbN9
|
||||
GQTB9yqpDoYaN06H7CFtrkxhJIBQaj6nkF5KKS3TQtQ5qCzkOkmxIe3KRbBy
|
||||
mXxkb5qwUpX5ELD5xFc6FeiafWYY63TmmEAu_lRFCOJ3xDea-ots",
|
||||
"qi":"lSQi-w9CpyUReMErP1RsBLk7wNtOvs5EQpPqmuMvqW57NBUczScEoPwmUqq
|
||||
abu9V0-Py4dQ57_bapoKRu1R90bvuFnU63SHWEFglZQvJDMeAvmj4sm-Fp0o
|
||||
Yu_neotgQ0hzbI5gry7ajdYy9-2lNx_76aBZoOUu9HCJ-UsfSOI8"}`),
|
||||
|
||||
// X.509 Certificate Chain
|
||||
stripWhitespace(`{"kty":"RSA",
|
||||
"use":"sig",
|
||||
"kid":"1b94c",
|
||||
"n":"vrjOfz9Ccdgx5nQudyhdoR17V-IubWMeOZCwX_jj0hgAsz2J_pqYW08
|
||||
PLbK_PdiVGKPrqzmDIsLI7sA25VEnHU1uCLNwBuUiCO11_-7dYbsr4iJmG0Q
|
||||
u2j8DsVyT1azpJC_NG84Ty5KKthuCaPod7iI7w0LK9orSMhBEwwZDCxTWq4a
|
||||
YWAchc8t-emd9qOvWtVMDC2BXksRngh6X5bUYLy6AyHKvj-nUy1wgzjYQDwH
|
||||
MTplCoLtU-o-8SNnZ1tmRoGE9uJkBLdh5gFENabWnU5m1ZqZPdwS-qo-meMv
|
||||
VfJb6jJVWRpl2SUtCnYG2C32qvbWbjZ_jBPD5eunqsIo1vQ",
|
||||
"e":"AQAB",
|
||||
"x5c":
|
||||
["MIIDQjCCAiqgAwIBAgIGATz/FuLiMA0GCSqGSIb3DQEBBQUAMGIxCzAJB
|
||||
gNVBAYTAlVTMQswCQYDVQQIEwJDTzEPMA0GA1UEBxMGRGVudmVyMRwwGgYD
|
||||
VQQKExNQaW5nIElkZW50aXR5IENvcnAuMRcwFQYDVQQDEw5CcmlhbiBDYW1
|
||||
wYmVsbDAeFw0xMzAyMjEyMzI5MTVaFw0xODA4MTQyMjI5MTVaMGIxCzAJBg
|
||||
NVBAYTAlVTMQswCQYDVQQIEwJDTzEPMA0GA1UEBxMGRGVudmVyMRwwGgYDV
|
||||
QQKExNQaW5nIElkZW50aXR5IENvcnAuMRcwFQYDVQQDEw5CcmlhbiBDYW1w
|
||||
YmVsbDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL64zn8/QnH
|
||||
YMeZ0LncoXaEde1fiLm1jHjmQsF/449IYALM9if6amFtPDy2yvz3YlRij66
|
||||
s5gyLCyO7ANuVRJx1NbgizcAblIgjtdf/u3WG7K+IiZhtELto/A7Fck9Ws6
|
||||
SQvzRvOE8uSirYbgmj6He4iO8NCyvaK0jIQRMMGQwsU1quGmFgHIXPLfnpn
|
||||
fajr1rVTAwtgV5LEZ4Iel+W1GC8ugMhyr4/p1MtcIM42EA8BzE6ZQqC7VPq
|
||||
PvEjZ2dbZkaBhPbiZAS3YeYBRDWm1p1OZtWamT3cEvqqPpnjL1XyW+oyVVk
|
||||
aZdklLQp2Btgt9qr21m42f4wTw+Xrp6rCKNb0CAwEAATANBgkqhkiG9w0BA
|
||||
QUFAAOCAQEAh8zGlfSlcI0o3rYDPBB07aXNswb4ECNIKG0CETTUxmXl9KUL
|
||||
+9gGlqCz5iWLOgWsnrcKcY0vXPG9J1r9AqBNTqNgHq2G03X09266X5CpOe1
|
||||
zFo+Owb1zxtp3PehFdfQJ610CDLEaS9V9Rqp17hCyybEpOGVwe8fnk+fbEL
|
||||
2Bo3UPGrpsHzUoaGpDftmWssZkhpBJKVMJyf/RuP2SmmaIzmnw9JiSlYhzo
|
||||
4tpzd5rFXhjRbg4zW9C+2qok+2+qDM1iJ684gPHMIY8aLWrdgQTxkumGmTq
|
||||
gawR+N5MDtdPTEQ0XfIBc2cJEUyMTY5MPvACWpkA6SdS4xSvdXK3IVfOWA=="]}`),
|
||||
}
|
||||
|
||||
// SHA-256 thumbprints of the above keys, hex-encoded
|
||||
var cookbookJWKThumbprints = []string{
|
||||
"747ae2dd2003664aeeb21e4753fe7402846170a16bc8df8f23a8cf06d3cbe793",
|
||||
"f6934029a341ddf81dceb753e91d17efe16664f40d9f4ed84bc5ea87e111f29d",
|
||||
"747ae2dd2003664aeeb21e4753fe7402846170a16bc8df8f23a8cf06d3cbe793",
|
||||
"f63838e96077ad1fc01c3f8405774dedc0641f558ebb4b40dccf5f9b6d66a932",
|
||||
"0fc478f8579325fcee0d4cbc6d9d1ce21730a6e97e435d6008fb379b0ebe47d4",
|
||||
"0ddb05bfedbec2070fa037324ba397396561d3425d6d69245570c261dc49dee3",
|
||||
}
|
||||
|
||||
func TestWebKeyVectorsValid(t *testing.T) {
|
||||
for _, key := range cookbookJWKs {
|
||||
var jwk2 JSONWebKey
|
||||
err := jwk2.UnmarshalJSON([]byte(key))
|
||||
if err != nil {
|
||||
t.Error("unable to parse valid key:", key, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEd25519Serialization(t *testing.T) {
|
||||
jwk := JSONWebKey{
|
||||
Key: ed25519PrivateKey,
|
||||
}
|
||||
serialized, _ := json.Marshal(jwk)
|
||||
|
||||
var jwk2 JSONWebKey
|
||||
json.Unmarshal(serialized, &jwk2)
|
||||
|
||||
assert.True(t, bytes.Equal(
|
||||
[]byte(jwk.Key.(ed25519.PrivateKey).Public().(ed25519.PublicKey)),
|
||||
[]byte(jwk2.Key.(ed25519.PrivateKey).Public().(ed25519.PublicKey))))
|
||||
}
|
||||
|
||||
func TestThumbprint(t *testing.T) {
|
||||
for i, key := range cookbookJWKs {
|
||||
var jwk2 JSONWebKey
|
||||
err := jwk2.UnmarshalJSON([]byte(key))
|
||||
if err != nil {
|
||||
t.Error("unable to parse valid key:", key, err)
|
||||
}
|
||||
|
||||
tp, err := jwk2.Thumbprint(crypto.SHA256)
|
||||
if err != nil {
|
||||
t.Error("unable to compute thumbprint:", key, err)
|
||||
}
|
||||
|
||||
tpHex := hex.EncodeToString(tp)
|
||||
if cookbookJWKThumbprints[i] != tpHex {
|
||||
t.Error("incorrect thumbprint:", i, cookbookJWKThumbprints[i], tpHex)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMarshalUnmarshalJWKSet(t *testing.T) {
|
||||
jwk1 := JSONWebKey{Key: rsaTestKey, KeyID: "ABCDEFG", Algorithm: "foo"}
|
||||
jwk2 := JSONWebKey{Key: rsaTestKey, KeyID: "GFEDCBA", Algorithm: "foo"}
|
||||
var set JSONWebKeySet
|
||||
set.Keys = append(set.Keys, jwk1)
|
||||
set.Keys = append(set.Keys, jwk2)
|
||||
|
||||
jsonbar, err := json.Marshal(&set)
|
||||
if err != nil {
|
||||
t.Error("problem marshalling set", err)
|
||||
}
|
||||
var set2 JSONWebKeySet
|
||||
err = json.Unmarshal(jsonbar, &set2)
|
||||
if err != nil {
|
||||
t.Fatal("problem unmarshalling set", err)
|
||||
}
|
||||
jsonbar2, err := json.Marshal(&set2)
|
||||
if err != nil {
|
||||
t.Fatal("problem marshalling set", err)
|
||||
}
|
||||
if !bytes.Equal(jsonbar, jsonbar2) {
|
||||
t.Error("roundtrip should not lose information")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKSetKey(t *testing.T) {
|
||||
jwk1 := JSONWebKey{Key: rsaTestKey, KeyID: "ABCDEFG", Algorithm: "foo"}
|
||||
jwk2 := JSONWebKey{Key: rsaTestKey, KeyID: "GFEDCBA", Algorithm: "foo"}
|
||||
var set JSONWebKeySet
|
||||
set.Keys = append(set.Keys, jwk1)
|
||||
set.Keys = append(set.Keys, jwk2)
|
||||
k := set.Key("ABCDEFG")
|
||||
if len(k) != 1 {
|
||||
t.Errorf("method should return slice with one key not %d", len(k))
|
||||
}
|
||||
if k[0].KeyID != "ABCDEFG" {
|
||||
t.Error("method should return key with ID ABCDEFG")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKSymmetricKey(t *testing.T) {
|
||||
sample1 := `{"kty":"oct","alg":"A128KW","k":"GawgguFyGrWKav7AX4VKUg"}`
|
||||
sample2 := `{"kty":"oct","k":"AyM1SysPpbyDfgZld3umj1qzKObwVMkoqQ-EstJQLr_T-1qS0gZH75aKtMN3Yj0iPS4hcgUuTwjAzZr1Z9CAow","kid":"HMAC key used in JWS spec Appendix A.1 example"}`
|
||||
|
||||
var jwk1 JSONWebKey
|
||||
json.Unmarshal([]byte(sample1), &jwk1)
|
||||
|
||||
if jwk1.Algorithm != "A128KW" {
|
||||
t.Errorf("expected Algorithm to be A128KW, but was '%s'", jwk1.Algorithm)
|
||||
}
|
||||
expected1 := fromHexBytes("19ac2082e1721ab58a6afec05f854a52")
|
||||
if !bytes.Equal(jwk1.Key.([]byte), expected1) {
|
||||
t.Errorf("expected Key to be '%s', but was '%s'", hex.EncodeToString(expected1), hex.EncodeToString(jwk1.Key.([]byte)))
|
||||
}
|
||||
|
||||
var jwk2 JSONWebKey
|
||||
json.Unmarshal([]byte(sample2), &jwk2)
|
||||
|
||||
if jwk2.KeyID != "HMAC key used in JWS spec Appendix A.1 example" {
|
||||
t.Errorf("expected KeyID to be 'HMAC key used in JWS spec Appendix A.1 example', but was '%s'", jwk2.KeyID)
|
||||
}
|
||||
expected2 := fromHexBytes(`
|
||||
0323354b2b0fa5bc837e0665777ba68f5ab328e6f054c928a90f84b2d2502ebf
|
||||
d3fb5a92d20647ef968ab4c377623d223d2e2172052e4f08c0cd9af567d080a3`)
|
||||
if !bytes.Equal(jwk2.Key.([]byte), expected2) {
|
||||
t.Errorf("expected Key to be '%s', but was '%s'", hex.EncodeToString(expected2), hex.EncodeToString(jwk2.Key.([]byte)))
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKSymmetricRoundtrip(t *testing.T) {
|
||||
jwk1 := JSONWebKey{Key: []byte{1, 2, 3, 4}}
|
||||
marshaled, err := jwk1.MarshalJSON()
|
||||
if err != nil {
|
||||
t.Error("failed to marshal valid JWK object", err)
|
||||
}
|
||||
|
||||
var jwk2 JSONWebKey
|
||||
err = jwk2.UnmarshalJSON(marshaled)
|
||||
if err != nil {
|
||||
t.Error("failed to unmarshal valid JWK object", err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(jwk1.Key.([]byte), jwk2.Key.([]byte)) {
|
||||
t.Error("round-trip of symmetric JWK gave different raw keys")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKSymmetricInvalid(t *testing.T) {
|
||||
invalid := JSONWebKey{}
|
||||
_, err := invalid.MarshalJSON()
|
||||
if err == nil {
|
||||
t.Error("excepted error on marshaling invalid symmetric JWK object")
|
||||
}
|
||||
|
||||
var jwk JSONWebKey
|
||||
err = jwk.UnmarshalJSON([]byte(`{"kty":"oct"}`))
|
||||
if err == nil {
|
||||
t.Error("excepted error on unmarshaling invalid symmetric JWK object")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKIsPublic(t *testing.T) {
|
||||
bigInt := big.NewInt(0)
|
||||
eccPub := ecdsa.PublicKey{elliptic.P256(), bigInt, bigInt}
|
||||
rsaPub := rsa.PublicKey{bigInt, 1}
|
||||
|
||||
cases := []struct {
|
||||
key interface{}
|
||||
expectedIsPublic bool
|
||||
}{
|
||||
{&eccPub, true},
|
||||
{&ecdsa.PrivateKey{eccPub, bigInt}, false},
|
||||
{&rsaPub, true},
|
||||
{&rsa.PrivateKey{rsaPub, bigInt, []*big.Int{bigInt, bigInt}, rsa.PrecomputedValues{}}, false},
|
||||
{ed25519PublicKey, true},
|
||||
{ed25519PrivateKey, false},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
k := &JSONWebKey{Key: tc.key}
|
||||
if public := k.IsPublic(); public != tc.expectedIsPublic {
|
||||
t.Errorf("expected IsPublic to return %t, got %t", tc.expectedIsPublic, public)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKValid(t *testing.T) {
|
||||
bigInt := big.NewInt(0)
|
||||
eccPub := ecdsa.PublicKey{elliptic.P256(), bigInt, bigInt}
|
||||
rsaPub := rsa.PublicKey{bigInt, 1}
|
||||
edPubEmpty := ed25519.PublicKey([]byte{})
|
||||
edPrivEmpty := ed25519.PublicKey([]byte{})
|
||||
|
||||
cases := []struct {
|
||||
key interface{}
|
||||
expectedValidity bool
|
||||
}{
|
||||
{nil, false},
|
||||
{&ecdsa.PublicKey{}, false},
|
||||
{&eccPub, true},
|
||||
{&ecdsa.PrivateKey{}, false},
|
||||
{&ecdsa.PrivateKey{eccPub, bigInt}, true},
|
||||
{&rsa.PublicKey{}, false},
|
||||
{&rsaPub, true},
|
||||
{&rsa.PrivateKey{}, false},
|
||||
{&rsa.PrivateKey{rsaPub, bigInt, []*big.Int{bigInt, bigInt}, rsa.PrecomputedValues{}}, true},
|
||||
{ed25519PublicKey, true},
|
||||
{ed25519PrivateKey, true},
|
||||
{edPubEmpty, false},
|
||||
{edPrivEmpty, false},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
k := &JSONWebKey{Key: tc.key}
|
||||
valid := k.Valid()
|
||||
if valid != tc.expectedValidity {
|
||||
t.Errorf("expected Valid to return %t, got %t", tc.expectedValidity, valid)
|
||||
}
|
||||
if valid {
|
||||
wasPublic := k.IsPublic()
|
||||
p := k.Public() // all aforemention keys are asymmetric
|
||||
if !p.Valid() {
|
||||
t.Errorf("unable to derive public key from valid asymmetric key")
|
||||
}
|
||||
if wasPublic != k.IsPublic() {
|
||||
t.Errorf("original key was touched during public key derivation")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKBufferSizeCheck(t *testing.T) {
|
||||
key := `{
|
||||
"kty":"EC",
|
||||
"crv":"P-256",
|
||||
"x":"m9GSmJ5iGmAYlMlaOJGSFN_CjN9cIn8GGYExP-C0FBiIXlWTNvGN38R9WdrHcppfsKF0FXMOMyutpHIRaiMxYSA",
|
||||
"y":"ZaPcRZ3q_7T3h-Gwz2i-T2JjJXfj6YVGgKHcFz5zqmg"}`
|
||||
var jwk JSONWebKey
|
||||
jwk.UnmarshalJSON([]byte(key))
|
||||
jwk.Valid() // true
|
||||
// panic: square/go-jose: invalid call to newFixedSizeBuffer (len(data) > length)
|
||||
// github.com/square/go-jose.newFixedSizeBuffer(0xc420014557, 0x41, 0x41, 0x20, 0x0)
|
||||
jwk.Thumbprint(crypto.SHA256)
|
||||
}
|
||||
|
||||
func TestJWKPaddingPrivateX(t *testing.T) {
|
||||
key := `{
|
||||
"kty": "EC",
|
||||
"crv": "P-256",
|
||||
"x": "nPTIABcDASY6FNGSNfHCB51tY7qChtgzeVazOtLrwQ",
|
||||
"y": "vEEs4V0egJkNyM2Q4pp001zu14VcpQ0_Ei8xOOPxKZs",
|
||||
"d": "nIVCvMR2wkLmeGJErOpI23VDHl2s3JwGdbzKy0odir0"
|
||||
}`
|
||||
var jwk JSONWebKey
|
||||
err := jwk.UnmarshalJSON([]byte(key))
|
||||
if err == nil {
|
||||
t.Errorf("Expected key with short x to fail unmarshalling")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "wrong length for x") {
|
||||
t.Errorf("Wrong error for short x, got %q", err)
|
||||
}
|
||||
if jwk.Valid() {
|
||||
t.Errorf("Expected key to be invalid, but it was valid.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKPaddingPrivateY(t *testing.T) {
|
||||
key := `{
|
||||
"kty": "EC",
|
||||
"crv": "P-256",
|
||||
"x": "vEEs4V0egJkNyM2Q4pp001zu14VcpQ0_Ei8xOOPxKZs",
|
||||
"y": "nPTIABcDASY6FNGSNfHCB51tY7qChtgzeVazOtLrwQ",
|
||||
"d": "nIVCvMR2wkLmeGJErOpI23VDHl2s3JwGdbzKy0odir0"
|
||||
}`
|
||||
var jwk JSONWebKey
|
||||
err := jwk.UnmarshalJSON([]byte(key))
|
||||
if err == nil {
|
||||
t.Errorf("Expected key with short x to fail unmarshalling")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "wrong length for y") {
|
||||
t.Errorf("Wrong error for short y, got %q", err)
|
||||
}
|
||||
if jwk.Valid() {
|
||||
t.Errorf("Expected key to be invalid, but it was valid.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKPaddingPrivateD(t *testing.T) {
|
||||
key := `{
|
||||
"kty": "EC",
|
||||
"crv": "P-256",
|
||||
"x": "vEEs4V0egJkNyM2Q4pp001zu14VcpQ0_Ei8xOOPxKZs",
|
||||
"y": "qnPTIABcDASY6FNGSNfHCB51tY7qChtgzeVazOtLrwQ",
|
||||
"d": "IVCvMR2wkLmeGJErOpI23VDHl2s3JwGdbzKy0odir0"
|
||||
}`
|
||||
var jwk JSONWebKey
|
||||
err := jwk.UnmarshalJSON([]byte(key))
|
||||
if err == nil {
|
||||
t.Errorf("Expected key with short x to fail unmarshalling")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "wrong length for d") {
|
||||
t.Errorf("Wrong error for short d, got %q", err)
|
||||
}
|
||||
if jwk.Valid() {
|
||||
t.Errorf("Expected key to be invalid, but it was valid.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKPaddingX(t *testing.T) {
|
||||
key := `{
|
||||
"kty": "EC",
|
||||
"crv": "P-256",
|
||||
"x": "nPTIABcDASY6FNGSNfHCB51tY7qChtgzeVazOtLrwQ",
|
||||
"y": "vEEs4V0egJkNyM2Q4pp001zu14VcpQ0_Ei8xOOPxKZs"
|
||||
}`
|
||||
var jwk JSONWebKey
|
||||
err := jwk.UnmarshalJSON([]byte(key))
|
||||
if err == nil {
|
||||
t.Errorf("Expected key with short x to fail unmarshalling")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "wrong length for x") {
|
||||
t.Errorf("Wrong error for short x, got %q", err)
|
||||
}
|
||||
if jwk.Valid() {
|
||||
t.Errorf("Expected key to be invalid, but it was valid.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWKPaddingY(t *testing.T) {
|
||||
key := `{
|
||||
"kty": "EC",
|
||||
"crv": "P-256",
|
||||
"x": "vEEs4V0egJkNyM2Q4pp001zu14VcpQ0_Ei8xOOPxKZs",
|
||||
"y": "nPTIABcDASY6FNGSNfHCB51tY7qChtgzeVazOtLrwQ"
|
||||
}`
|
||||
var jwk JSONWebKey
|
||||
err := jwk.UnmarshalJSON([]byte(key))
|
||||
if err == nil {
|
||||
t.Errorf("Expected key with short y to fail unmarshalling")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "wrong length for y") {
|
||||
t.Errorf("Wrong error for short y, got %q", err)
|
||||
}
|
||||
if jwk.Valid() {
|
||||
t.Errorf("Expected key to be invalid, but it was valid.")
|
||||
}
|
||||
}
|
321
vendor/gopkg.in/square/go-jose.v2/jws.go
generated
vendored
Normal file
321
vendor/gopkg.in/square/go-jose.v2/jws.go
generated
vendored
Normal file
|
@ -0,0 +1,321 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
// rawJSONWebSignature represents a raw JWS JSON object. Used for parsing/serializing.
|
||||
type rawJSONWebSignature struct {
|
||||
Payload *byteBuffer `json:"payload,omitempty"`
|
||||
Signatures []rawSignatureInfo `json:"signatures,omitempty"`
|
||||
Protected *byteBuffer `json:"protected,omitempty"`
|
||||
Header *rawHeader `json:"header,omitempty"`
|
||||
Signature *byteBuffer `json:"signature,omitempty"`
|
||||
}
|
||||
|
||||
// rawSignatureInfo represents a single JWS signature over the JWS payload and protected header.
|
||||
type rawSignatureInfo struct {
|
||||
Protected *byteBuffer `json:"protected,omitempty"`
|
||||
Header *rawHeader `json:"header,omitempty"`
|
||||
Signature *byteBuffer `json:"signature,omitempty"`
|
||||
}
|
||||
|
||||
// JSONWebSignature represents a signed JWS object after parsing.
|
||||
type JSONWebSignature struct {
|
||||
payload []byte
|
||||
// Signatures attached to this object (may be more than one for multi-sig).
|
||||
// Be careful about accessing these directly, prefer to use Verify() or
|
||||
// VerifyMulti() to ensure that the data you're getting is verified.
|
||||
Signatures []Signature
|
||||
}
|
||||
|
||||
// Signature represents a single signature over the JWS payload and protected header.
|
||||
type Signature struct {
|
||||
// Merged header fields. Contains both protected and unprotected header
|
||||
// values. Prefer using Protected and Unprotected fields instead of this.
|
||||
// Values in this header may or may not have been signed and in general
|
||||
// should not be trusted.
|
||||
Header Header
|
||||
|
||||
// Protected header. Values in this header were signed and
|
||||
// will be verified as part of the signature verification process.
|
||||
Protected Header
|
||||
|
||||
// Unprotected header. Values in this header were not signed
|
||||
// and in general should not be trusted.
|
||||
Unprotected Header
|
||||
|
||||
// The actual signature value
|
||||
Signature []byte
|
||||
|
||||
protected *rawHeader
|
||||
header *rawHeader
|
||||
original *rawSignatureInfo
|
||||
}
|
||||
|
||||
// ParseSigned parses a signed message in compact or full serialization format.
|
||||
func ParseSigned(input string) (*JSONWebSignature, error) {
|
||||
input = stripWhitespace(input)
|
||||
if strings.HasPrefix(input, "{") {
|
||||
return parseSignedFull(input)
|
||||
}
|
||||
|
||||
return parseSignedCompact(input)
|
||||
}
|
||||
|
||||
// Get a header value
|
||||
func (sig Signature) mergedHeaders() rawHeader {
|
||||
out := rawHeader{}
|
||||
out.merge(sig.protected)
|
||||
out.merge(sig.header)
|
||||
return out
|
||||
}
|
||||
|
||||
// Compute data to be signed
|
||||
func (obj JSONWebSignature) computeAuthData(payload []byte, signature *Signature) []byte {
|
||||
var serializedProtected string
|
||||
|
||||
if signature.original != nil && signature.original.Protected != nil {
|
||||
serializedProtected = signature.original.Protected.base64()
|
||||
} else if signature.protected != nil {
|
||||
serializedProtected = base64.RawURLEncoding.EncodeToString(mustSerializeJSON(signature.protected))
|
||||
} else {
|
||||
serializedProtected = ""
|
||||
}
|
||||
|
||||
return []byte(fmt.Sprintf("%s.%s",
|
||||
serializedProtected,
|
||||
base64.RawURLEncoding.EncodeToString(payload)))
|
||||
}
|
||||
|
||||
// parseSignedFull parses a message in full format.
|
||||
func parseSignedFull(input string) (*JSONWebSignature, error) {
|
||||
var parsed rawJSONWebSignature
|
||||
err := json.Unmarshal([]byte(input), &parsed)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parsed.sanitized()
|
||||
}
|
||||
|
||||
// sanitized produces a cleaned-up JWS object from the raw JSON.
|
||||
func (parsed *rawJSONWebSignature) sanitized() (*JSONWebSignature, error) {
|
||||
if parsed.Payload == nil {
|
||||
return nil, fmt.Errorf("square/go-jose: missing payload in JWS message")
|
||||
}
|
||||
|
||||
obj := &JSONWebSignature{
|
||||
payload: parsed.Payload.bytes(),
|
||||
Signatures: make([]Signature, len(parsed.Signatures)),
|
||||
}
|
||||
|
||||
if len(parsed.Signatures) == 0 {
|
||||
// No signatures array, must be flattened serialization
|
||||
signature := Signature{}
|
||||
if parsed.Protected != nil && len(parsed.Protected.bytes()) > 0 {
|
||||
signature.protected = &rawHeader{}
|
||||
err := json.Unmarshal(parsed.Protected.bytes(), signature.protected)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Check that there is not a nonce in the unprotected header
|
||||
if parsed.Header != nil && parsed.Header.getNonce() != "" {
|
||||
return nil, ErrUnprotectedNonce
|
||||
}
|
||||
|
||||
signature.header = parsed.Header
|
||||
signature.Signature = parsed.Signature.bytes()
|
||||
// Make a fake "original" rawSignatureInfo to store the unprocessed
|
||||
// Protected header. This is necessary because the Protected header can
|
||||
// contain arbitrary fields not registered as part of the spec. See
|
||||
// https://tools.ietf.org/html/draft-ietf-jose-json-web-signature-41#section-4
|
||||
// If we unmarshal Protected into a rawHeader with its explicit list of fields,
|
||||
// we cannot marshal losslessly. So we have to keep around the original bytes.
|
||||
// This is used in computeAuthData, which will first attempt to use
|
||||
// the original bytes of a protected header, and fall back on marshaling the
|
||||
// header struct only if those bytes are not available.
|
||||
signature.original = &rawSignatureInfo{
|
||||
Protected: parsed.Protected,
|
||||
Header: parsed.Header,
|
||||
Signature: parsed.Signature,
|
||||
}
|
||||
|
||||
var err error
|
||||
signature.Header, err = signature.mergedHeaders().sanitized()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if signature.header != nil {
|
||||
signature.Unprotected, err = signature.header.sanitized()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if signature.protected != nil {
|
||||
signature.Protected, err = signature.protected.sanitized()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// As per RFC 7515 Section 4.1.3, only public keys are allowed to be embedded.
|
||||
jwk := signature.Header.JSONWebKey
|
||||
if jwk != nil && (!jwk.Valid() || !jwk.IsPublic()) {
|
||||
return nil, errors.New("square/go-jose: invalid embedded jwk, must be public key")
|
||||
}
|
||||
|
||||
obj.Signatures = append(obj.Signatures, signature)
|
||||
}
|
||||
|
||||
for i, sig := range parsed.Signatures {
|
||||
if sig.Protected != nil && len(sig.Protected.bytes()) > 0 {
|
||||
obj.Signatures[i].protected = &rawHeader{}
|
||||
err := json.Unmarshal(sig.Protected.bytes(), obj.Signatures[i].protected)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Check that there is not a nonce in the unprotected header
|
||||
if sig.Header != nil && sig.Header.getNonce() != "" {
|
||||
return nil, ErrUnprotectedNonce
|
||||
}
|
||||
|
||||
var err error
|
||||
obj.Signatures[i].Header, err = obj.Signatures[i].mergedHeaders().sanitized()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if obj.Signatures[i].header != nil {
|
||||
obj.Signatures[i].Unprotected, err = obj.Signatures[i].header.sanitized()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if obj.Signatures[i].protected != nil {
|
||||
obj.Signatures[i].Protected, err = obj.Signatures[i].protected.sanitized()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
obj.Signatures[i].Signature = sig.Signature.bytes()
|
||||
|
||||
// As per RFC 7515 Section 4.1.3, only public keys are allowed to be embedded.
|
||||
jwk := obj.Signatures[i].Header.JSONWebKey
|
||||
if jwk != nil && (!jwk.Valid() || !jwk.IsPublic()) {
|
||||
return nil, errors.New("square/go-jose: invalid embedded jwk, must be public key")
|
||||
}
|
||||
|
||||
// Copy value of sig
|
||||
original := sig
|
||||
|
||||
obj.Signatures[i].header = sig.Header
|
||||
obj.Signatures[i].original = &original
|
||||
}
|
||||
|
||||
return obj, nil
|
||||
}
|
||||
|
||||
// parseSignedCompact parses a message in compact format.
|
||||
func parseSignedCompact(input string) (*JSONWebSignature, error) {
|
||||
parts := strings.Split(input, ".")
|
||||
if len(parts) != 3 {
|
||||
return nil, fmt.Errorf("square/go-jose: compact JWS format must have three parts")
|
||||
}
|
||||
|
||||
rawProtected, err := base64.RawURLEncoding.DecodeString(parts[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
payload, err := base64.RawURLEncoding.DecodeString(parts[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
signature, err := base64.RawURLEncoding.DecodeString(parts[2])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
raw := &rawJSONWebSignature{
|
||||
Payload: newBuffer(payload),
|
||||
Protected: newBuffer(rawProtected),
|
||||
Signature: newBuffer(signature),
|
||||
}
|
||||
return raw.sanitized()
|
||||
}
|
||||
|
||||
// CompactSerialize serializes an object using the compact serialization format.
|
||||
func (obj JSONWebSignature) CompactSerialize() (string, error) {
|
||||
if len(obj.Signatures) != 1 || obj.Signatures[0].header != nil || obj.Signatures[0].protected == nil {
|
||||
return "", ErrNotSupported
|
||||
}
|
||||
|
||||
serializedProtected := mustSerializeJSON(obj.Signatures[0].protected)
|
||||
|
||||
return fmt.Sprintf(
|
||||
"%s.%s.%s",
|
||||
base64.RawURLEncoding.EncodeToString(serializedProtected),
|
||||
base64.RawURLEncoding.EncodeToString(obj.payload),
|
||||
base64.RawURLEncoding.EncodeToString(obj.Signatures[0].Signature)), nil
|
||||
}
|
||||
|
||||
// FullSerialize serializes an object using the full JSON serialization format.
|
||||
func (obj JSONWebSignature) FullSerialize() string {
|
||||
raw := rawJSONWebSignature{
|
||||
Payload: newBuffer(obj.payload),
|
||||
}
|
||||
|
||||
if len(obj.Signatures) == 1 {
|
||||
if obj.Signatures[0].protected != nil {
|
||||
serializedProtected := mustSerializeJSON(obj.Signatures[0].protected)
|
||||
raw.Protected = newBuffer(serializedProtected)
|
||||
}
|
||||
raw.Header = obj.Signatures[0].header
|
||||
raw.Signature = newBuffer(obj.Signatures[0].Signature)
|
||||
} else {
|
||||
raw.Signatures = make([]rawSignatureInfo, len(obj.Signatures))
|
||||
for i, signature := range obj.Signatures {
|
||||
raw.Signatures[i] = rawSignatureInfo{
|
||||
Header: signature.header,
|
||||
Signature: newBuffer(signature.Signature),
|
||||
}
|
||||
|
||||
if signature.protected != nil {
|
||||
raw.Signatures[i].Protected = newBuffer(mustSerializeJSON(signature.protected))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return string(mustSerializeJSON(raw))
|
||||
}
|
616
vendor/gopkg.in/square/go-jose.v2/jws_test.go
generated
vendored
Normal file
616
vendor/gopkg.in/square/go-jose.v2/jws_test.go
generated
vendored
Normal file
|
@ -0,0 +1,616 @@
|
|||
/*-
|
||||
* Copyright 2014 Square Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jose
|
||||
|
||||
import (
|
||||
"crypto/x509"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const trustedCA = `
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIE6DCCAtCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDEwlUcnVz
|
||||
dGVkQ0EwHhcNMTgwMzI4MTg0MzA0WhcNMzgwMzI4MTg0MzA0WjAUMRIwEAYDVQQD
|
||||
EwlUcnVzdGVkQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCsHcd3
|
||||
uaKBilWQUe2epNf86xvq2HZV+JDULjJlKfUQAkpG+huHDEMiPEFPSlQK17bFj7gc
|
||||
qOx/INeeCU2nBVtZDtlm3U0jfQWO2F2kZgH1JWnEArrAWWy3BP/NYv7apBLcl7nD
|
||||
hkL4USVUnXF8mtuegiSMI2YT7TVchGzYMjrj/j+oRuDm1GF1OxoIMeUuVmqyJ6jK
|
||||
Kxv9YVmCB+e/QaUltkPGwxl2dKWdBwECXDgSr7hcZhT8ANmgFR1dJjLCy0Us12yw
|
||||
5eKUANDlfNP+z9urykoAwHXpBlmga1ze45aL+p+7K+8sl/PgMqKO7VdT5GBsOCzf
|
||||
xaBDG5Qy92Di34Sc27ZZz0mfaIy5kySnceBclMyWb8vdhEGkyHVsGpWc63JBmtg+
|
||||
bKeh876m7KVLfiykfpMqHUhq/ImQwiQTwX2RonFK5gP+XU0I9V+4rE0iqucbcvCS
|
||||
HuHzhf6B+TybhalRsvOZ6GB/SokF5YCmf8ylAq4be/HSxnJQcBhpSSQp0zz4ZKOD
|
||||
ikXuwf29yhWZ0lgIyaZpT9H4QecWNcyx4UcqO3wQAGjxadTG3gzjLu/OJwPkw+bK
|
||||
RvXWSBZjlQ9+JPmrHH+oKMgHshR4TQmtmXqXLaarrAe+HXCZEiBKFOqPgeo2RMxr
|
||||
LAO+MYIsVtEz39gISRhEvqcAls01sV1l7oGicQIDAQABo0UwQzAOBgNVHQ8BAf8E
|
||||
BAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBATAdBgNVHQ4EFgQUy9Nqk0mDRwC5tcmN
|
||||
xQ1YWO5MAhgwDQYJKoZIhvcNAQELBQADggIBAHbpsqY+tPSj8BSky+acZoZjF7fZ
|
||||
Ae3MKVogwm5tecmwTKf5xDj9J99ZpGvcWCKtoxxWw0LZ+JI/aeqANSRDXZIelcIw
|
||||
yZefw06z/coQJwAIy1RSoKJPV72mkG0Es9w2HxSEoLaZ9tql0TyV8D/QseUM8Yt/
|
||||
nNtShRoj6iMnZjhmut5pLfrLWHwQkt4fguBpL7rtydS/wAsOmnJ7lmOrU6zrBJzD
|
||||
vEER3AJtgdIt4GvKf4MupKLgKvYDB4sUQVmMyAS78B9+WZDDRTClsx+/Oc1ggkWz
|
||||
8X7EmIw+3U9V2hd67qZ81EwcSB8ixV06E7ZcbhnJs7ds7swqUjwMArFWuzqO4cjW
|
||||
2BnyVzCO9pymFLI7qol32xCEgaQlOVS/kFHP3meygfeaeYe902sJw6NevOA4e0AO
|
||||
AKR8FDfGRXJ9cOmYzeHeWKex8yt1Ul6+N8SXzjOhf39JM0QqTfHN7pPfFthTAFOs
|
||||
9rI/buJteJqR1WxgVk/jY4wLGEOcEyO6Y/Uj5iWWTvm5G/C1yZfSg+NvWoytxZ7P
|
||||
3S0qtEfmT4UwuHBsd5ZfEZoxb+GbqL/nhrKz/0B9LyKS0SJP9+mz7nSORz7t35Uc
|
||||
BhiG6T9W7P/NRW4Tqb2tEN1VwU6eP5SEf7c7C1VVaepk0fvc1p5dl67IERqPucPD
|
||||
dT2rDsCMBV7SXMUM
|
||||
-----END CERTIFICATE-----`
|
||||
|
||||
const intermediateCA = `
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEHTCCAgWgAwIBAgIQXzZsEQv0cvSRLJAkS9FmWTANBgkqhkiG9w0BAQsFADAU
|
||||
MRIwEAYDVQQDEwlUcnVzdGVkQ0EwHhcNMTgwMzI4MTg0MzMzWhcNMzgwMzI4MTg0
|
||||
MzAzWjAZMRcwFQYDVQQDEw5JbnRlcm1lZGlhdGVDQTCCASIwDQYJKoZIhvcNAQEB
|
||||
BQADggEPADCCAQoCggEBAN3aYpH/1yEYf/kHuHWyO3AO4tgwlYYLhCDT2GvaPdaE
|
||||
cqhe/VuYiqx3xY7IRDqsW2rau/OXgW6KzLHdRZHogK07hUj1Lfr7X+Oqbp22IV4y
|
||||
dyiL7jwK9AtVXvDuuv5ET+oRfV82j0uhyk0ueGD9r6C/h+6NTzHBD+3xo6Yuc0Vk
|
||||
BfY5zIyhaFqlm1aRYvupDRjC/63uBgAlrGxy2LyiTMVnYMuxoJM5ahDepz3sqjuN
|
||||
WVyPhfGwIezjXuXRdEvlmWX05XLnsTdP4zu4fHq9Z7c3TKWWONM3z64ECAZmGQVf
|
||||
MAcEDX7qP0gZX5PCT+0WcvTgTWE4Q+WIh5AmYyxQ04cCAwEAAaNmMGQwDgYDVR0P
|
||||
AQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMAYlq86RZzT
|
||||
WxLpYE7KTTM7DHOuMB8GA1UdIwQYMBaAFMvTapNJg0cAubXJjcUNWFjuTAIYMA0G
|
||||
CSqGSIb3DQEBCwUAA4ICAQBmYRpQoWEm5g16kwUrpwWrH7OIqqMtUhM1dcskECfk
|
||||
i3/hcsV+MQRkGHLIItucYIWqs7oOQIglsyGcohAbnvE1PVtKKojUHC0lfbjgIenD
|
||||
Pbvz15QB6A3KLDR82QbQGeGniACy924p66zlfPwHJbkMo5ZaqtNqI//EIa2YCpyy
|
||||
okhFXaSFmPWXXrTOCsEEsFJKsoSCH1KUpTcwACGkkilNseg1edZB6/lBDwybxVuY
|
||||
+dbUlHip3r5tFcP66Co3tKAaEcVY0AsZ/8GKwH+IM2AR6q7jdn9Gp2OX4E1ul9Wy
|
||||
+hW5GHMmfixkgTVwRowuKgkCPEKV2/Xy3k9rlSpnKr2NpYYq0mu6An9HYt8THQ+e
|
||||
wGZHwWufuDFDWuzlu7CxFOjpXLKv8qqVnwSFC91S3HsPAzPKLC9ZMEC+iQs2Vkes
|
||||
Os0nFLZeMaMGAO5W6xiyQ5p94oo0bqa1XbmSV1bNp1HWuNEGIiZKrEUDxfYuDc6f
|
||||
C6hJZKsjJkMkBeadlQAlLcjIx1rDV171CKLLTxy/dT5kv4p9UrJlnleyMVG6S/3d
|
||||
6nX/WLSgZIMYbOwiZVVPlSrobuG38ULJMCSuxndxD0l+HahJaH8vYXuR67A0XT+b
|
||||
TEe305AI6A/9MEaRrActBnq6/OviQgBsKAvtTv1FmDbnpZsKeoFuwc3OPdTveQdC
|
||||
RA==
|
||||
-----END CERTIFICATE-----`
|
||||
|
||||
func TestEmbeddedHMAC(t *testing.T) {
|
||||
// protected: {"alg":"HS256", "jwk":{"kty":"oct", "k":"MTEx"}}, aka HMAC key.
|
||||
msg := `{"payload":"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ","protected":"eyJhbGciOiJIUzI1NiIsICJqd2siOnsia3R5Ijoib2N0IiwgImsiOiJNVEV4In19","signature":"lvo41ZZsuHwQvSh0uJtEXRR3vmuBJ7in6qMoD7p9jyo"}`
|
||||
|
||||
_, err := ParseSigned(msg)
|
||||
if err == nil {
|
||||
t.Error("should not allow parsing JWS with embedded JWK with HMAC key")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompactParseJWS(t *testing.T) {
|
||||
// Should parse
|
||||
msg := "eyJhbGciOiJYWVoifQ.cGF5bG9hZA.c2lnbmF0dXJl"
|
||||
_, err := ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Error("Unable to parse valid message:", err)
|
||||
}
|
||||
|
||||
// Should parse (detached signature missing payload)
|
||||
msg = "eyJhbGciOiJYWVoifQ..c2lnbmF0dXJl"
|
||||
_, err = ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Error("Unable to parse valid message:", err)
|
||||
}
|
||||
|
||||
// Messages that should fail to parse
|
||||
failures := []string{
|
||||
// Not enough parts
|
||||
"eyJhbGciOiJYWVoifQ.cGF5bG9hZA",
|
||||
// Invalid signature
|
||||
"eyJhbGciOiJYWVoifQ.cGF5bG9hZA.////",
|
||||
// Invalid payload
|
||||
"eyJhbGciOiJYWVoifQ.////.c2lnbmF0dXJl",
|
||||
// Invalid header
|
||||
"////.eyJhbGciOiJYWVoifQ.c2lnbmF0dXJl",
|
||||
// Invalid header
|
||||
"cGF5bG9hZA.cGF5bG9hZA.c2lnbmF0dXJl",
|
||||
}
|
||||
|
||||
for i := range failures {
|
||||
_, err = ParseSigned(failures[i])
|
||||
if err == nil {
|
||||
t.Error("Able to parse invalid message")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFullParseJWS(t *testing.T) {
|
||||
// Messages that should succeed to parse
|
||||
successes := []string{
|
||||
"{\"payload\":\"CUJD\",\"signatures\":[{\"protected\":\"e30\",\"header\":{\"kid\":\"XYZ\"},\"signature\":\"CUJD\"},{\"protected\":\"e30\",\"signature\":\"CUJD\"}]}",
|
||||
}
|
||||
|
||||
for i := range successes {
|
||||
_, err := ParseSigned(successes[i])
|
||||
if err != nil {
|
||||
t.Error("Unble to parse valid message", err, successes[i])
|
||||
}
|
||||
}
|
||||
|
||||
// Messages that should fail to parse
|
||||
failures := []string{
|
||||
// Empty
|
||||
"{}",
|
||||
// Invalid JSON
|
||||
"{XX",
|
||||
// Invalid protected header
|
||||
"{\"payload\":\"CUJD\",\"signatures\":[{\"protected\":\"CUJD\",\"header\":{\"kid\":\"XYZ\"},\"signature\":\"CUJD\"}]}",
|
||||
// Invalid protected header
|
||||
"{\"payload\":\"CUJD\",\"protected\":\"CUJD\",\"header\":{\"kid\":\"XYZ\"},\"signature\":\"CUJD\"}",
|
||||
// Invalid protected header
|
||||
"{\"payload\":\"CUJD\",\"signatures\":[{\"protected\":\"###\",\"header\":{\"kid\":\"XYZ\"},\"signature\":\"CUJD\"}]}",
|
||||
// Invalid payload
|
||||
"{\"payload\":\"###\",\"signatures\":[{\"protected\":\"CUJD\",\"header\":{\"kid\":\"XYZ\"},\"signature\":\"CUJD\"}]}",
|
||||
// Invalid payload
|
||||
"{\"payload\":\"CUJD\",\"signatures\":[{\"protected\":\"e30\",\"header\":{\"kid\":\"XYZ\"},\"signature\":\"###\"}]}",
|
||||
}
|
||||
|
||||
for i := range failures {
|
||||
_, err := ParseSigned(failures[i])
|
||||
if err == nil {
|
||||
t.Error("Able to parse invalid message", err, failures[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRejectUnprotectedJWSNonce(t *testing.T) {
|
||||
// No need to test compact, since that's always protected
|
||||
|
||||
// Flattened JSON
|
||||
input := `{
|
||||
"header": { "nonce": "should-cause-an-error" },
|
||||
"payload": "does-not-matter",
|
||||
"signature": "does-not-matter"
|
||||
}`
|
||||
_, err := ParseSigned(input)
|
||||
if err == nil {
|
||||
t.Error("JWS with an unprotected nonce parsed as valid.")
|
||||
} else if err != ErrUnprotectedNonce {
|
||||
t.Errorf("Improper error for unprotected nonce: %v", err)
|
||||
}
|
||||
|
||||
// Full JSON
|
||||
input = `{
|
||||
"payload": "does-not-matter",
|
||||
"signatures": [{
|
||||
"header": { "nonce": "should-cause-an-error" },
|
||||
"signature": "does-not-matter"
|
||||
}]
|
||||
}`
|
||||
_, err = ParseSigned(input)
|
||||
if err == nil {
|
||||
t.Error("JWS with an unprotected nonce parsed as valid.")
|
||||
} else if err != ErrUnprotectedNonce {
|
||||
t.Errorf("Improper error for unprotected nonce: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyFlattenedWithIncludedUnprotectedKey(t *testing.T) {
|
||||
input := `{
|
||||
"header": {
|
||||
"alg": "RS256",
|
||||
"jwk": {
|
||||
"e": "AQAB",
|
||||
"kty": "RSA",
|
||||
"n": "tSwgy3ORGvc7YJI9B2qqkelZRUC6F1S5NwXFvM4w5-M0TsxbFsH5UH6adigV0jzsDJ5imAechcSoOhAh9POceCbPN1sTNwLpNbOLiQQ7RD5mY_pSUHWXNmS9R4NZ3t2fQAzPeW7jOfF0LKuJRGkekx6tXP1uSnNibgpJULNc4208dgBaCHo3mvaE2HV2GmVl1yxwWX5QZZkGQGjNDZYnjFfa2DKVvFs0QbAk21ROm594kAxlRlMMrvqlf24Eq4ERO0ptzpZgm_3j_e4hGRD39gJS7kAzK-j2cacFQ5Qi2Y6wZI2p-FCq_wiYsfEAIkATPBiLKl_6d_Jfcvs_impcXQ"
|
||||
}
|
||||
},
|
||||
"payload": "Zm9vCg",
|
||||
"signature": "hRt2eYqBd_MyMRNIh8PEIACoFtmBi7BHTLBaAhpSU6zyDAFdEBaX7us4VB9Vo1afOL03Q8iuoRA0AT4akdV_mQTAQ_jhTcVOAeXPr0tB8b8Q11UPQ0tXJYmU4spAW2SapJIvO50ntUaqU05kZd0qw8-noH1Lja-aNnU-tQII4iYVvlTiRJ5g8_CADsvJqOk6FcHuo2mG643TRnhkAxUtazvHyIHeXMxydMMSrpwUwzMtln4ZJYBNx4QGEq6OhpAD_VSp-w8Lq5HOwGQoNs0bPxH1SGrArt67LFQBfjlVr94E1sn26p4vigXm83nJdNhWAMHHE9iV67xN-r29LT-FjA"
|
||||
}`
|
||||
|
||||
jws, err := ParseSigned(input)
|
||||
if err != nil {
|
||||
t.Error("Unable to parse valid message.")
|
||||
}
|
||||
if len(jws.Signatures) != 1 {
|
||||
t.Error("Too many or too few signatures.")
|
||||
}
|
||||
sig := jws.Signatures[0]
|
||||
if sig.Header.JSONWebKey == nil {
|
||||
t.Error("No JWK in signature header.")
|
||||
}
|
||||
payload, err := jws.Verify(sig.Header.JSONWebKey)
|
||||
if err != nil {
|
||||
t.Errorf("Signature did not validate: %v", err)
|
||||
}
|
||||
if string(payload) != "foo\n" {
|
||||
t.Errorf("Payload was incorrect: '%s' should have been 'foo\\n'", string(payload))
|
||||
}
|
||||
}
|
||||
|
||||
// Test verification of a detached signature
|
||||
func TestDetachedVerifyJWS(t *testing.T) {
|
||||
rsaPublicKey, err := x509.ParsePKIXPublicKey(fromBase64Bytes(`
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3aLSGwbeX0ZA2Ha+EvELaIFGzO
|
||||
91+Q15JQc/tdGdCgGW3XAbrh7ZUhDh1XKzbs+UOQxqn3Eq4YOx18IG0WsJSuCaHQIxnDlZ
|
||||
t/GP8WLwjMC0izlJLm2SyfM/EEoNpmTC3w6MQ2dHK7SZ9Zoq+sKijQd+V7CYdr8zHMpDrd
|
||||
NKoEcR0HjmvzzdMoUChhkGH5TaNbZyollULTggepaYUKS8QphqdSDMWiSetKG+g6V87lv6
|
||||
CVYyK1FF6g7Esp5OOj5pNn3/bmF+7V+b7TvK91NCIlURCjE9toRgNoIP4TDnWRn/vvfZ3G
|
||||
zNrtWmlizqz3r5KdvIs71ahWgMUSD4wfazrwIDAQAB`))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
sampleMessages := []string{
|
||||
"eyJhbGciOiJSUzI1NiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.YHX849fvekz6wJGeyqnQhFqyHFcUXNJKj3o2w3ddR46YLlsCopUJrlifRU_ZuTWzpYxt5oC--T2eoqMhlCvltSWrE5_1_EumqiMfAYsZULx9E6Jns7q3w7mttonYFSIh7aR3-yg2HMMfTCgoAY1y_AZ4VjXwHDcZ5gu1oZDYgvZF4uXtCmwT6e5YtR1m8abiWPF8BgoTG_BD3KV6ClLj_QQiNFdfdxAMDw7vKVOKG1T7BFtz6cDs2Q3ILS4To5E2IjcVSSYS8mi77EitCrWmrqbK_G3WCdKeUFGnMnyuKXaCDy_7FLpAZ6Z5RomRr5iskXeJZdZqIKcJV8zl4fpsPA",
|
||||
"eyJhbGciOiJSUzM4NCJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.meyfoOTjAAjXHFYiNlU7EEnsYtbeUYeEglK6BL_cxISEr2YAGLr1Gwnn2HnucTnH6YilyRio7ZC1ohy_ZojzmaljPHqpr8kn1iqNFu9nFE2M16ZPgJi38-PGzppcDNliyzOQO-c7L-eA-v8Gfww5uyRaOJdiWg-hUJmeGBIngPIeLtSVmhJtz8oTeqeNdUOqQv7f7VRCuvagLhW1PcEM91VUS-gS0WEUXoXWZ2lp91No0v1O24izgX3__FKiX_16XhrOfAgJ82F61vjbTIQYwhexHPZyYTlXYt_scNRzFGhSKeGFin4zVdFLOXWJqKWdUd5IrDP5Nya3FSoWbWDXAg",
|
||||
}
|
||||
|
||||
for _, msg := range sampleMessages {
|
||||
obj, err := ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", msg, err)
|
||||
continue
|
||||
}
|
||||
payload := obj.payload
|
||||
obj.payload = nil
|
||||
err = obj.DetachedVerify(payload, rsaPublicKey)
|
||||
if err != nil {
|
||||
t.Error("unable to verify message", msg, err)
|
||||
continue
|
||||
}
|
||||
idx, _, err := obj.DetachedVerifyMulti(payload, rsaPublicKey)
|
||||
if idx != 0 || err != nil {
|
||||
t.Error("unable to verify message", msg, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyFlattenedWithPrivateProtected(t *testing.T) {
|
||||
// The protected field contains a Private Header Parameter name, per
|
||||
// https://tools.ietf.org/html/draft-ietf-jose-json-web-signature-41#section-4
|
||||
// Base64-decoded, it's '{"nonce":"8HIepUNFZUa-exKTrXVf4g"}'
|
||||
input := `{"header":{"alg":"RS256","jwk":{"kty":"RSA","n":"7ixeydcbxxppzxrBphrW1atUiEZqTpiHDpI-79olav5XxAgWolHmVsJyxzoZXRxmtED8PF9-EICZWBGdSAL9ZTD0hLUCIsPcpdgT_LqNW3Sh2b2caPL2hbMF7vsXvnCGg9varpnHWuYTyRrCLUF9vM7ES-V3VCYTa7LcCSRm56Gg9r19qar43Z9kIKBBxpgt723v2cC4bmLmoAX2s217ou3uCpCXGLOeV_BesG4--Nl3pso1VhCfO85wEWjmW6lbv7Kg4d7Jdkv5DjDZfJ086fkEAYZVYGRpIgAvJBH3d3yKDCrSByUEud1bWuFjQBmMaeYOrVDXO_mbYg5PwUDMhw","e":"AQAB"}},"protected":"eyJub25jZSI6IjhISWVwVU5GWlVhLWV4S1RyWFZmNGcifQ","payload":"eyJjb250YWN0IjpbIm1haWx0bzpmb29AYmFyLmNvbSJdfQ","signature":"AyvVGMgXsQ1zTdXrZxE_gyO63pQgotL1KbI7gv6Wi8I7NRy0iAOkDAkWcTQT9pcCYApJ04lXfEDZfP5i0XgcFUm_6spxi5mFBZU-NemKcvK9dUiAbXvb4hB3GnaZtZiuVnMQUb_ku4DOaFFKbteA6gOYCnED_x7v0kAPHIYrQnvIa-KZ6pTajbV9348zgh9TL7NgGIIsTcMHd-Jatr4z1LQ0ubGa8tS300hoDhVzfoDQaEetYjCo1drR1RmdEN1SIzXdHOHfubjA3ZZRbrF_AJnNKpRRoIwzu1VayOhRmdy1qVSQZq_tENF4VrQFycEL7DhG7JLoXC4T2p1urwMlsw"}`
|
||||
|
||||
jws, err := ParseSigned(input)
|
||||
if err != nil {
|
||||
t.Error("Unable to parse valid message.")
|
||||
}
|
||||
if len(jws.Signatures) != 1 {
|
||||
t.Error("Too many or too few signatures.")
|
||||
}
|
||||
sig := jws.Signatures[0]
|
||||
if sig.Header.JSONWebKey == nil {
|
||||
t.Error("No JWK in signature header.")
|
||||
}
|
||||
payload, err := jws.Verify(sig.Header.JSONWebKey)
|
||||
if err != nil {
|
||||
t.Errorf("Signature did not validate: %v", err)
|
||||
}
|
||||
expected := "{\"contact\":[\"mailto:foo@bar.com\"]}"
|
||||
if string(payload) != expected {
|
||||
t.Errorf("Payload was incorrect: '%s' should have been '%s'", string(payload), expected)
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors generated with nimbus-jose-jwt
|
||||
func TestSampleNimbusJWSMessagesRSA(t *testing.T) {
|
||||
rsaPublicKey, err := x509.ParsePKIXPublicKey(fromBase64Bytes(`
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3aLSGwbeX0ZA2Ha+EvELaIFGzO
|
||||
91+Q15JQc/tdGdCgGW3XAbrh7ZUhDh1XKzbs+UOQxqn3Eq4YOx18IG0WsJSuCaHQIxnDlZ
|
||||
t/GP8WLwjMC0izlJLm2SyfM/EEoNpmTC3w6MQ2dHK7SZ9Zoq+sKijQd+V7CYdr8zHMpDrd
|
||||
NKoEcR0HjmvzzdMoUChhkGH5TaNbZyollULTggepaYUKS8QphqdSDMWiSetKG+g6V87lv6
|
||||
CVYyK1FF6g7Esp5OOj5pNn3/bmF+7V+b7TvK91NCIlURCjE9toRgNoIP4TDnWRn/vvfZ3G
|
||||
zNrtWmlizqz3r5KdvIs71ahWgMUSD4wfazrwIDAQAB`))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
rsaSampleMessages := []string{
|
||||
"eyJhbGciOiJSUzI1NiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.YHX849fvekz6wJGeyqnQhFqyHFcUXNJKj3o2w3ddR46YLlsCopUJrlifRU_ZuTWzpYxt5oC--T2eoqMhlCvltSWrE5_1_EumqiMfAYsZULx9E6Jns7q3w7mttonYFSIh7aR3-yg2HMMfTCgoAY1y_AZ4VjXwHDcZ5gu1oZDYgvZF4uXtCmwT6e5YtR1m8abiWPF8BgoTG_BD3KV6ClLj_QQiNFdfdxAMDw7vKVOKG1T7BFtz6cDs2Q3ILS4To5E2IjcVSSYS8mi77EitCrWmrqbK_G3WCdKeUFGnMnyuKXaCDy_7FLpAZ6Z5RomRr5iskXeJZdZqIKcJV8zl4fpsPA",
|
||||
"eyJhbGciOiJSUzM4NCJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.meyfoOTjAAjXHFYiNlU7EEnsYtbeUYeEglK6BL_cxISEr2YAGLr1Gwnn2HnucTnH6YilyRio7ZC1ohy_ZojzmaljPHqpr8kn1iqNFu9nFE2M16ZPgJi38-PGzppcDNliyzOQO-c7L-eA-v8Gfww5uyRaOJdiWg-hUJmeGBIngPIeLtSVmhJtz8oTeqeNdUOqQv7f7VRCuvagLhW1PcEM91VUS-gS0WEUXoXWZ2lp91No0v1O24izgX3__FKiX_16XhrOfAgJ82F61vjbTIQYwhexHPZyYTlXYt_scNRzFGhSKeGFin4zVdFLOXWJqKWdUd5IrDP5Nya3FSoWbWDXAg",
|
||||
"eyJhbGciOiJSUzUxMiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.rQPz0PDh8KyE2AX6JorgI0MLwv-qi1tcWlz6tuZuWQG1hdrlzq5tR1tQg1evYNc_SDDX87DWTSKXT7JEqhKoFixLfZa13IJrOc7FB8r5ZLx7OwOBC4F--OWrvxMA9Y3MTJjPN3FemQePUo-na2vNUZv-YgkcbuOgbO3hTxwQ7j1JGuqy-YutXOFnccdXvntp3t8zYZ4Mg1It_IyL9pzgGqHIEmMV1pCFGHsDa-wStB4ffmdhrADdYZc0q_SvxUdobyC_XzZCz9ENzGIhgwYxyyrqg7kjqUGoKmCLmoSlUFW7goTk9IC5SXdUyLPuESxOWNfHoRClGav230GYjPFQFA",
|
||||
"eyJhbGciOiJQUzI1NiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.UTtxjsv_6x4CdlAmZfAW6Lun3byMjJbcwRp_OlPH2W4MZaZar7aql052mIB_ddK45O9VUz2aphYVRvKPZY8WHmvlTUU30bk0z_cDJRYB9eIJVMOiRCYj0oNkz1iEZqsP0YgngxwuUDv4Q4A6aJ0Bo5E_rZo3AnrVHMHUjPp_ZRRSBFs30tQma1qQ0ApK4Gxk0XYCYAcxIv99e78vldVRaGzjEZmQeAVZx4tGcqZP20vG1L84nlhSGnOuZ0FhR8UjRFLXuob6M7EqtMRoqPgRYw47EI3fYBdeSivAg98E5S8R7R1NJc7ef-l03RvfUSY0S3_zBq_4PlHK6A-2kHb__w",
|
||||
"eyJhbGciOiJSUzM4NCJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.meyfoOTjAAjXHFYiNlU7EEnsYtbeUYeEglK6BL_cxISEr2YAGLr1Gwnn2HnucTnH6YilyRio7ZC1ohy_ZojzmaljPHqpr8kn1iqNFu9nFE2M16ZPgJi38-PGzppcDNliyzOQO-c7L-eA-v8Gfww5uyRaOJdiWg-hUJmeGBIngPIeLtSVmhJtz8oTeqeNdUOqQv7f7VRCuvagLhW1PcEM91VUS-gS0WEUXoXWZ2lp91No0v1O24izgX3__FKiX_16XhrOfAgJ82F61vjbTIQYwhexHPZyYTlXYt_scNRzFGhSKeGFin4zVdFLOXWJqKWdUd5IrDP5Nya3FSoWbWDXAg",
|
||||
"eyJhbGciOiJSUzUxMiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.rQPz0PDh8KyE2AX6JorgI0MLwv-qi1tcWlz6tuZuWQG1hdrlzq5tR1tQg1evYNc_SDDX87DWTSKXT7JEqhKoFixLfZa13IJrOc7FB8r5ZLx7OwOBC4F--OWrvxMA9Y3MTJjPN3FemQePUo-na2vNUZv-YgkcbuOgbO3hTxwQ7j1JGuqy-YutXOFnccdXvntp3t8zYZ4Mg1It_IyL9pzgGqHIEmMV1pCFGHsDa-wStB4ffmdhrADdYZc0q_SvxUdobyC_XzZCz9ENzGIhgwYxyyrqg7kjqUGoKmCLmoSlUFW7goTk9IC5SXdUyLPuESxOWNfHoRClGav230GYjPFQFA",
|
||||
}
|
||||
|
||||
for _, msg := range rsaSampleMessages {
|
||||
obj, err := ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", msg, err)
|
||||
continue
|
||||
}
|
||||
payload, err := obj.Verify(rsaPublicKey)
|
||||
if err != nil {
|
||||
t.Error("unable to verify message", msg, err)
|
||||
continue
|
||||
}
|
||||
if string(payload) != "Lorem ipsum dolor sit amet" {
|
||||
t.Error("payload is not what we expected for msg", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors generated with nimbus-jose-jwt
|
||||
func TestSampleNimbusJWSMessagesEC(t *testing.T) {
|
||||
ecPublicKeyP256, err := x509.ParsePKIXPublicKey(fromBase64Bytes("MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEIg62jq6FyL1otEj9Up7S35BUrwGF9TVrAzrrY1rHUKZqYIGEg67u/imjgadVcr7y9Q32I0gB8W8FHqbqt696rA=="))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
ecPublicKeyP384, err := x509.ParsePKIXPublicKey(fromBase64Bytes("MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEPXsVlqCtN2oTY+F+hFZm3M0ldYpb7IeeJM5wYmT0k1RaqzBFDhDMNnYK5Q5x+OyssZrAtHgYDFw02AVJhhng/eHRp7mqmL/vI3wbxJtrLKYldIbBA+9fYBQcKeibjlu5"))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
ecPublicKeyP521, err := x509.ParsePKIXPublicKey(fromBase64Bytes("MIGbMBAGByqGSM49AgEGBSuBBAAjA4GGAAQAa2w3MMJ5FWD6tSf68G+Wy5jIhWXOD3IA7pE5IC/myQzo1lWcD8KS57SM6nm4POtPcxyLmDhL7FLuh8DKoIZyvtAAdK8+tOQP7XXRlT2bkvzIuazp05It3TAPu00YzTIpKfDlc19Y1lvf7etrbFqhShD92B+hHmhT4ddrdbPCBDW8hvU="))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ecPublicKeys := []interface{}{ecPublicKeyP256, ecPublicKeyP384, ecPublicKeyP521}
|
||||
|
||||
ecSampleMessages := []string{
|
||||
"eyJhbGciOiJFUzI1NiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.MEWJVlvGRQyzMEGOYm4rwuiwxrX-6LjnlbaRDAuhwmnBm2Gtn7pRpGXRTMFZUXsSGDz2L1p-Hz1qn8j9bFIBtQ",
|
||||
"eyJhbGciOiJFUzM4NCJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.nbdjPnJPYQtVNNdBIx8-KbFKplTxrz-hnW5UNhYUY7SBkwHK4NZnqc2Lv4DXoA0aWHq9eiypgOh1kmyPWGEmqKAHUx0xdIEkBoHk3ZsbmhOQuq2jL_wcMUG6nTWNhLrB",
|
||||
"eyJhbGciOiJFUzUxMiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.AeYNFC1rwIgQv-5fwd8iRyYzvTaSCYTEICepgu9gRId-IW99kbSVY7yH0MvrQnqI-a0L8zwKWDR35fW5dukPAYRkADp3Y1lzqdShFcEFziUVGo46vqbiSajmKFrjBktJcCsfjKSaLHwxErF-T10YYPCQFHWb2nXJOOI3CZfACYqgO84g",
|
||||
}
|
||||
|
||||
for i, msg := range ecSampleMessages {
|
||||
obj, err := ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", msg, err)
|
||||
continue
|
||||
}
|
||||
payload, err := obj.Verify(ecPublicKeys[i])
|
||||
if err != nil {
|
||||
t.Error("unable to verify message", msg, err)
|
||||
continue
|
||||
}
|
||||
if string(payload) != "Lorem ipsum dolor sit amet" {
|
||||
t.Error("payload is not what we expected for msg", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors generated with nimbus-jose-jwt
|
||||
func TestSampleNimbusJWSMessagesHMAC(t *testing.T) {
|
||||
hmacTestKey := fromHexBytes("DF1FA4F36FFA7FC42C81D4B3C033928D")
|
||||
|
||||
hmacSampleMessages := []string{
|
||||
"eyJhbGciOiJIUzI1NiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.W5tc_EUhxexcvLYEEOckyyvdb__M5DQIVpg6Nmk1XGM",
|
||||
"eyJhbGciOiJIUzM4NCJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.sBu44lXOJa4Nd10oqOdYH2uz3lxlZ6o32QSGHaoGdPtYTDG5zvSja6N48CXKqdAh",
|
||||
"eyJhbGciOiJIUzUxMiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.M0yR4tmipsORIix-BitIbxEPGaxPchDfj8UNOpKuhDEfnb7URjGvCKn4nOlyQ1z9mG1FKbwnqR1hOVAWSzAU_w",
|
||||
}
|
||||
|
||||
for _, msg := range hmacSampleMessages {
|
||||
obj, err := ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Error("unable to parse message", msg, err)
|
||||
continue
|
||||
}
|
||||
payload, err := obj.Verify(hmacTestKey)
|
||||
if err != nil {
|
||||
t.Error("unable to verify message", msg, err)
|
||||
continue
|
||||
}
|
||||
if string(payload) != "Lorem ipsum dolor sit amet" {
|
||||
t.Error("payload is not what we expected for msg", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestHeaderFieldsCompact(t *testing.T) {
|
||||
msg := "eyJhbGciOiJFUzUxMiJ9.TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ.AeYNFC1rwIgQv-5fwd8iRyYzvTaSCYTEICepgu9gRId-IW99kbSVY7yH0MvrQnqI-a0L8zwKWDR35fW5dukPAYRkADp3Y1lzqdShFcEFziUVGo46vqbiSajmKFrjBktJcCsfjKSaLHwxErF-T10YYPCQFHWb2nXJOOI3CZfACYqgO84g"
|
||||
|
||||
obj, err := ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Fatal("unable to parse message", msg, err)
|
||||
}
|
||||
if obj.Signatures[0].Header.Algorithm != "ES512" {
|
||||
t.Error("merged header did not contain expected alg value")
|
||||
}
|
||||
if obj.Signatures[0].Protected.Algorithm != "ES512" {
|
||||
t.Error("protected header did not contain expected alg value")
|
||||
}
|
||||
if obj.Signatures[0].Unprotected.Algorithm != "" {
|
||||
t.Error("unprotected header contained an alg value")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHeaderFieldsFull(t *testing.T) {
|
||||
msg := `{"payload":"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQ","protected":"eyJhbGciOiJFUzUxMiJ9","header":{"custom":"test"},"signature":"AeYNFC1rwIgQv-5fwd8iRyYzvTaSCYTEICepgu9gRId-IW99kbSVY7yH0MvrQnqI-a0L8zwKWDR35fW5dukPAYRkADp3Y1lzqdShFcEFziUVGo46vqbiSajmKFrjBktJcCsfjKSaLHwxErF-T10YYPCQFHWb2nXJOOI3CZfACYqgO84g"}`
|
||||
|
||||
obj, err := ParseSigned(msg)
|
||||
if err != nil {
|
||||
t.Fatal("unable to parse message", msg, err)
|
||||
}
|
||||
if obj.Signatures[0].Header.Algorithm != "ES512" {
|
||||
t.Error("merged header did not contain expected alg value")
|
||||
}
|
||||
if obj.Signatures[0].Protected.Algorithm != "ES512" {
|
||||
t.Error("protected header did not contain expected alg value")
|
||||
}
|
||||
if obj.Signatures[0].Unprotected.Algorithm != "" {
|
||||
t.Error("unprotected header contained an alg value")
|
||||
}
|
||||
if obj.Signatures[0].Unprotected.ExtraHeaders["custom"] != "test" {
|
||||
t.Error("unprotected header did not contain custom header value")
|
||||
}
|
||||
}
|
||||
|
||||
// Test vectors generated with nimbus-jose-jwt
|
||||
func TestErrorMissingPayloadJWS(t *testing.T) {
|
||||
_, err := (&rawJSONWebSignature{}).sanitized()
|
||||
if err == nil {
|
||||
t.Error("was able to parse message with missing payload")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "missing payload") {
|
||||
t.Errorf("unexpected error message, should contain 'missing payload': %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Test that a null value in the header doesn't panic
|
||||
func TestNullHeaderValue(t *testing.T) {
|
||||
msg := `{
|
||||
"payload":
|
||||
"eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGF
|
||||
tcGxlLmNvbS9pc19yb290Ijp0cnVlfQ",
|
||||
"protected":"eyJhbGciOiJFUzI1NiIsIm5vbmNlIjpudWxsfQ",
|
||||
"header":
|
||||
{"kid":"e9bc097a-ce51-4036-9562-d2ade882db0d"},
|
||||
"signature":
|
||||
"DtEhU3ljbEg8L38VWAfUAqOyKAM6-Xx-F4GawxaepmXFCgfTjDxw5djxLa8IS
|
||||
lSApmWQxfKTUJqPP3-Kg6NU1Q"
|
||||
}`
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
t.Errorf("ParseSigned panic'd when parsing a message with a null protected header value")
|
||||
}
|
||||
}()
|
||||
ParseSigned(msg)
|
||||
}
|
||||
|
||||
// Test for bug:
|
||||
// https://github.com/square/go-jose/issues/157
|
||||
func TestEmbedJWKBug(t *testing.T) {
|
||||
signerKey := SigningKey{
|
||||
Key: &JSONWebKey{
|
||||
Key: rsaTestKey,
|
||||
KeyID: "rsa-test-key",
|
||||
},
|
||||
Algorithm: RS256,
|
||||
}
|
||||
|
||||
signer, err := NewSigner(signerKey, &SignerOptions{EmbedJWK: true})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
signerNoEmbed, err := NewSigner(signerKey, &SignerOptions{EmbedJWK: false})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
jws, err := signer.Sign([]byte("Lorem ipsum dolor sit amet"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
jwsNoEmbed, err := signerNoEmbed.Sign([]byte("Lorem ipsum dolor sit amet"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// This used to panic with:
|
||||
// json: error calling MarshalJSON for type *jose.JSONWebKey: square/go-jose: unknown key type '%!s(<nil>)'
|
||||
output := jws.FullSerialize()
|
||||
outputNoEmbed := jwsNoEmbed.FullSerialize()
|
||||
|
||||
// Expected output with embed set to true is a JWS with the public JWK embedded, with kid header empty.
|
||||
// Expected output with embed set to false is that we set the kid header for key identification instead.
|
||||
parsed, err := ParseSigned(output)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
parsedNoEmbed, err := ParseSigned(outputNoEmbed)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if parsed.Signatures[0].Header.KeyID != "" {
|
||||
t.Error("expected kid field in protected header to be empty")
|
||||
}
|
||||
if parsed.Signatures[0].Header.JSONWebKey.KeyID != "rsa-test-key" {
|
||||
t.Error("expected rsa-test-key to be kid in embedded JWK in protected header")
|
||||
}
|
||||
if parsedNoEmbed.Signatures[0].Header.KeyID != "rsa-test-key" {
|
||||
t.Error("expected kid field in protected header to be rsa-test-key")
|
||||
}
|
||||
if parsedNoEmbed.Signatures[0].Header.JSONWebKey != nil {
|
||||
t.Error("expected no embedded JWK to be present")
|
||||
}
|
||||
}
|
||||
|
||||
func TestJWSWithCertificateChain(t *testing.T) {
|
||||
signerKey := SigningKey{
|
||||
Key: rsaTestKey,
|
||||
Algorithm: RS256,
|
||||
}
|
||||
|
||||
certs := []string{
|
||||
// CN=TrustedSigner, signed by IntermediateCA
|
||||
"MIIDLDCCAhSgAwIBAgIQNsV1i7m3kXGugqOQuuC7FzANBgkqhkiG9w0BAQsFADAZMRcwFQYDVQQDEw5JbnRlcm1lZGlhdGVDQTAeFw0xODAzMjgxODQzNDlaFw0zODAzMjgxODQzMDJaMBgxFjAUBgNVBAMTDVRydXN0ZWRTaWduZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDLpvmOEDRxzQJUKHLkLQSsFDo9eGnolSERa6fz1E1F4wmk6nieHqssPd28C6Vb1sHJFne/j93DXNrx7W9Gy9fQvWa+VNHfGuYAodaS2pyV4VUPWMXI2a+qjxW85orq34XtcHzU+qm+ekR5W06ypW+xewbXJW//P9ulrsv3bDoDFaiggHY/u3p5CRSB9mg+Pbpf6E/k/N85sFJUsRE9hzgwg27Kqhh6p3hP3QnA+0WZRcWhwG0gykoD6layRLCPVcxlTSUdpyStDiK8w2whLJQfixCBGLS3/tB/GKb726bxTQK72OLzIMtOo4ZMtTva7bcA2PRgwfRz7bJg4DXz7oHTAgMBAAGjcTBvMA4GA1UdDwEB/wQEAwIDuDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwHQYDVR0OBBYEFCpZEyJGAyK//NsYSSC4xkOqNnh3MB8GA1UdIwQYMBaAFMAYlq86RZzTWxLpYE7KTTM7DHOuMA0GCSqGSIb3DQEBCwUAA4IBAQBSIln6jPFkctPC17le0O+wkCStFOuqUM9cjwPuj4xBQ47RxmC0Pjv52N3TuVH7slmMykITQO/vVqQZguf+N5u4BCh223qWiu1muYBTfBPXCPgJjJ79bUL/dy9QEocOfPiIqTFC6xHKeSUCu6qi5jCPFynOaoVvlNPZEb2MR+QrkKVzg09aDEfk6J+wE6eH9+kNOtwvd/z2a2t2hterURtJEnYt7AQGviEpUf1gbHxCE9f3FW5iJGdgcshrk5ZwUfxvND2x4qFq2fYQRxNBnkO+TSYzwYoAItcGAUvlZFH+rdsq3N+UpRptXRkj5iMq59VlcXFOT675EkkNREgromWn",
|
||||
// CN=IntermediateCA, signed by TrustedCA
|
||||
"MIIEHTCCAgWgAwIBAgIQXzZsEQv0cvSRLJAkS9FmWTANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDEwlUcnVzdGVkQ0EwHhcNMTgwMzI4MTg0MzMzWhcNMzgwMzI4MTg0MzAzWjAZMRcwFQYDVQQDEw5JbnRlcm1lZGlhdGVDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN3aYpH/1yEYf/kHuHWyO3AO4tgwlYYLhCDT2GvaPdaEcqhe/VuYiqx3xY7IRDqsW2rau/OXgW6KzLHdRZHogK07hUj1Lfr7X+Oqbp22IV4ydyiL7jwK9AtVXvDuuv5ET+oRfV82j0uhyk0ueGD9r6C/h+6NTzHBD+3xo6Yuc0VkBfY5zIyhaFqlm1aRYvupDRjC/63uBgAlrGxy2LyiTMVnYMuxoJM5ahDepz3sqjuNWVyPhfGwIezjXuXRdEvlmWX05XLnsTdP4zu4fHq9Z7c3TKWWONM3z64ECAZmGQVfMAcEDX7qP0gZX5PCT+0WcvTgTWE4Q+WIh5AmYyxQ04cCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMAYlq86RZzTWxLpYE7KTTM7DHOuMB8GA1UdIwQYMBaAFMvTapNJg0cAubXJjcUNWFjuTAIYMA0GCSqGSIb3DQEBCwUAA4ICAQBmYRpQoWEm5g16kwUrpwWrH7OIqqMtUhM1dcskECfki3/hcsV+MQRkGHLIItucYIWqs7oOQIglsyGcohAbnvE1PVtKKojUHC0lfbjgIenDPbvz15QB6A3KLDR82QbQGeGniACy924p66zlfPwHJbkMo5ZaqtNqI//EIa2YCpyyokhFXaSFmPWXXrTOCsEEsFJKsoSCH1KUpTcwACGkkilNseg1edZB6/lBDwybxVuY+dbUlHip3r5tFcP66Co3tKAaEcVY0AsZ/8GKwH+IM2AR6q7jdn9Gp2OX4E1ul9Wy+hW5GHMmfixkgTVwRowuKgkCPEKV2/Xy3k9rlSpnKr2NpYYq0mu6An9HYt8THQ+ewGZHwWufuDFDWuzlu7CxFOjpXLKv8qqVnwSFC91S3HsPAzPKLC9ZMEC+iQs2VkesOs0nFLZeMaMGAO5W6xiyQ5p94oo0bqa1XbmSV1bNp1HWuNEGIiZKrEUDxfYuDc6fC6hJZKsjJkMkBeadlQAlLcjIx1rDV171CKLLTxy/dT5kv4p9UrJlnleyMVG6S/3d6nX/WLSgZIMYbOwiZVVPlSrobuG38ULJMCSuxndxD0l+HahJaH8vYXuR67A0XT+bTEe305AI6A/9MEaRrActBnq6/OviQgBsKAvtTv1FmDbnpZsKeoFuwc3OPdTveQdCRA==",
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
// Cert chain to embed in message
|
||||
chain []string
|
||||
// Intermediates & root certificate to verify against
|
||||
intermediates []string
|
||||
root string
|
||||
// Should this test case verify?
|
||||
success bool
|
||||
}{
|
||||
{certs, nil, trustedCA, true},
|
||||
{certs, []string{intermediateCA}, trustedCA, true},
|
||||
{certs[0:1], nil, intermediateCA, true},
|
||||
{certs[0:1], nil, trustedCA, false},
|
||||
{[]string{}, nil, trustedCA, false},
|
||||
}
|
||||
|
||||
for i, testCase := range testCases {
|
||||
signer, err := NewSigner(signerKey, &SignerOptions{
|
||||
ExtraHeaders: map[HeaderKey]interface{}{HeaderKey("x5c"): testCase.chain},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
signed, err := signer.Sign([]byte("Lorem ipsum dolor sit amet"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
parsed, err := ParseSigned(signed.FullSerialize())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
opts := x509.VerifyOptions{
|
||||
DNSName: "TrustedSigner",
|
||||
Roots: x509.NewCertPool(),
|
||||
}
|
||||
|
||||
ok := opts.Roots.AppendCertsFromPEM([]byte(testCase.root))
|
||||
if !ok {
|
||||
t.Fatal("failed to parse trusted root certificate")
|
||||
}
|
||||
|
||||
if len(testCase.intermediates) > 0 {
|
||||
opts.Intermediates = x509.NewCertPool()
|
||||
for _, intermediate := range testCase.intermediates {
|
||||
ok := opts.Intermediates.AppendCertsFromPEM([]byte(intermediate))
|
||||
if !ok {
|
||||
t.Fatal("failed to parse trusted root certificate")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
chains, err := parsed.Signatures[0].Protected.Certificates(opts)
|
||||
if testCase.success && (len(chains) == 0 || err != nil) {
|
||||
t.Fatalf("failed to verify certificate chain for test case %d: %s", i, err)
|
||||
}
|
||||
if !testCase.success && (len(chains) != 0 && err == nil) {
|
||||
t.Fatalf("incorrectly verified certificate chain for test case %d (should fail)", i)
|
||||
}
|
||||
}
|
||||
}
|
334
vendor/gopkg.in/square/go-jose.v2/jwt/builder.go
generated
vendored
Normal file
334
vendor/gopkg.in/square/go-jose.v2/jwt/builder.go
generated
vendored
Normal file
|
@ -0,0 +1,334 @@
|
|||
/*-
|
||||
* Copyright 2016 Zbigniew Mandziejewicz
|
||||
* Copyright 2016 Square, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jwt
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"reflect"
|
||||
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
|
||||
"gopkg.in/square/go-jose.v2"
|
||||
)
|
||||
|
||||
// Builder is a utility for making JSON Web Tokens. Calls can be chained, and
|
||||
// errors are accumulated until the final call to CompactSerialize/FullSerialize.
|
||||
type Builder interface {
|
||||
// Claims encodes claims into JWE/JWS form. Multiple calls will merge claims
|
||||
// into single JSON object. If you are passing private claims, make sure to set
|
||||
// struct field tags to specify the name for the JSON key to be used when
|
||||
// serializing.
|
||||
Claims(i interface{}) Builder
|
||||
// Token builds a JSONWebToken from provided data.
|
||||
Token() (*JSONWebToken, error)
|
||||
// FullSerialize serializes a token using the full serialization format.
|
||||
FullSerialize() (string, error)
|
||||
// CompactSerialize serializes a token using the compact serialization format.
|
||||
CompactSerialize() (string, error)
|
||||
}
|
||||
|
||||
// NestedBuilder is a utility for making Signed-Then-Encrypted JSON Web Tokens.
|
||||
// Calls can be chained, and errors are accumulated until final call to
|
||||
// CompactSerialize/FullSerialize.
|
||||
type NestedBuilder interface {
|
||||
// Claims encodes claims into JWE/JWS form. Multiple calls will merge claims
|
||||
// into single JSON object. If you are passing private claims, make sure to set
|
||||
// struct field tags to specify the name for the JSON key to be used when
|
||||
// serializing.
|
||||
Claims(i interface{}) NestedBuilder
|
||||
// Token builds a NestedJSONWebToken from provided data.
|
||||
Token() (*NestedJSONWebToken, error)
|
||||
// FullSerialize serializes a token using the full serialization format.
|
||||
FullSerialize() (string, error)
|
||||
// CompactSerialize serializes a token using the compact serialization format.
|
||||
CompactSerialize() (string, error)
|
||||
}
|
||||
|
||||
type builder struct {
|
||||
payload map[string]interface{}
|
||||
err error
|
||||
}
|
||||
|
||||
type signedBuilder struct {
|
||||
builder
|
||||
sig jose.Signer
|
||||
}
|
||||
|
||||
type encryptedBuilder struct {
|
||||
builder
|
||||
enc jose.Encrypter
|
||||
}
|
||||
|
||||
type nestedBuilder struct {
|
||||
builder
|
||||
sig jose.Signer
|
||||
enc jose.Encrypter
|
||||
}
|
||||
|
||||
// Signed creates builder for signed tokens.
|
||||
func Signed(sig jose.Signer) Builder {
|
||||
return &signedBuilder{
|
||||
sig: sig,
|
||||
}
|
||||
}
|
||||
|
||||
// Encrypted creates builder for encrypted tokens.
|
||||
func Encrypted(enc jose.Encrypter) Builder {
|
||||
return &encryptedBuilder{
|
||||
enc: enc,
|
||||
}
|
||||
}
|
||||
|
||||
// SignedAndEncrypted creates builder for signed-then-encrypted tokens.
|
||||
// ErrInvalidContentType will be returned if encrypter doesn't have JWT content type.
|
||||
func SignedAndEncrypted(sig jose.Signer, enc jose.Encrypter) NestedBuilder {
|
||||
if contentType, _ := enc.Options().ExtraHeaders[jose.HeaderContentType].(jose.ContentType); contentType != "JWT" {
|
||||
return &nestedBuilder{
|
||||
builder: builder{
|
||||
err: ErrInvalidContentType,
|
||||
},
|
||||
}
|
||||
}
|
||||
return &nestedBuilder{
|
||||
sig: sig,
|
||||
enc: enc,
|
||||
}
|
||||
}
|
||||
|
||||
func (b builder) claims(i interface{}) builder {
|
||||
if b.err != nil {
|
||||
return b
|
||||
}
|
||||
|
||||
m, ok := i.(map[string]interface{})
|
||||
switch {
|
||||
case ok:
|
||||
return b.merge(m)
|
||||
case reflect.Indirect(reflect.ValueOf(i)).Kind() == reflect.Struct:
|
||||
m, err := normalize(i)
|
||||
if err != nil {
|
||||
return builder{
|
||||
err: err,
|
||||
}
|
||||
}
|
||||
return b.merge(m)
|
||||
default:
|
||||
return builder{
|
||||
err: ErrInvalidClaims,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func normalize(i interface{}) (map[string]interface{}, error) {
|
||||
m := make(map[string]interface{})
|
||||
|
||||
raw, err := json.Marshal(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
d := json.NewDecoder(bytes.NewReader(raw))
|
||||
d.UseNumber()
|
||||
|
||||
if err := d.Decode(&m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func (b *builder) merge(m map[string]interface{}) builder {
|
||||
p := make(map[string]interface{})
|
||||
for k, v := range b.payload {
|
||||
p[k] = v
|
||||
}
|
||||
for k, v := range m {
|
||||
p[k] = v
|
||||
}
|
||||
|
||||
return builder{
|
||||
payload: p,
|
||||
}
|
||||
}
|
||||
|
||||
func (b *builder) token(p func(interface{}) ([]byte, error), h []jose.Header) (*JSONWebToken, error) {
|
||||
return &JSONWebToken{
|
||||
payload: p,
|
||||
Headers: h,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *signedBuilder) Claims(i interface{}) Builder {
|
||||
return &signedBuilder{
|
||||
builder: b.builder.claims(i),
|
||||
sig: b.sig,
|
||||
}
|
||||
}
|
||||
|
||||
func (b *signedBuilder) Token() (*JSONWebToken, error) {
|
||||
sig, err := b.sign()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
h := make([]jose.Header, len(sig.Signatures))
|
||||
for i, v := range sig.Signatures {
|
||||
h[i] = v.Header
|
||||
}
|
||||
|
||||
return b.builder.token(sig.Verify, h)
|
||||
}
|
||||
|
||||
func (b *signedBuilder) CompactSerialize() (string, error) {
|
||||
sig, err := b.sign()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return sig.CompactSerialize()
|
||||
}
|
||||
|
||||
func (b *signedBuilder) FullSerialize() (string, error) {
|
||||
sig, err := b.sign()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return sig.FullSerialize(), nil
|
||||
}
|
||||
|
||||
func (b *signedBuilder) sign() (*jose.JSONWebSignature, error) {
|
||||
if b.err != nil {
|
||||
return nil, b.err
|
||||
}
|
||||
|
||||
p, err := json.Marshal(b.payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return b.sig.Sign(p)
|
||||
}
|
||||
|
||||
func (b *encryptedBuilder) Claims(i interface{}) Builder {
|
||||
return &encryptedBuilder{
|
||||
builder: b.builder.claims(i),
|
||||
enc: b.enc,
|
||||
}
|
||||
}
|
||||
|
||||
func (b *encryptedBuilder) CompactSerialize() (string, error) {
|
||||
enc, err := b.encrypt()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return enc.CompactSerialize()
|
||||
}
|
||||
|
||||
func (b *encryptedBuilder) FullSerialize() (string, error) {
|
||||
enc, err := b.encrypt()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return enc.FullSerialize(), nil
|
||||
}
|
||||
|
||||
func (b *encryptedBuilder) Token() (*JSONWebToken, error) {
|
||||
enc, err := b.encrypt()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return b.builder.token(enc.Decrypt, []jose.Header{enc.Header})
|
||||
}
|
||||
|
||||
func (b *encryptedBuilder) encrypt() (*jose.JSONWebEncryption, error) {
|
||||
if b.err != nil {
|
||||
return nil, b.err
|
||||
}
|
||||
|
||||
p, err := json.Marshal(b.payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return b.enc.Encrypt(p)
|
||||
}
|
||||
|
||||
func (b *nestedBuilder) Claims(i interface{}) NestedBuilder {
|
||||
return &nestedBuilder{
|
||||
builder: b.builder.claims(i),
|
||||
sig: b.sig,
|
||||
enc: b.enc,
|
||||
}
|
||||
}
|
||||
|
||||
func (b *nestedBuilder) Token() (*NestedJSONWebToken, error) {
|
||||
enc, err := b.signAndEncrypt()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &NestedJSONWebToken{
|
||||
enc: enc,
|
||||
Headers: []jose.Header{enc.Header},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *nestedBuilder) CompactSerialize() (string, error) {
|
||||
enc, err := b.signAndEncrypt()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return enc.CompactSerialize()
|
||||
}
|
||||
|
||||
func (b *nestedBuilder) FullSerialize() (string, error) {
|
||||
enc, err := b.signAndEncrypt()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return enc.FullSerialize(), nil
|
||||
}
|
||||
|
||||
func (b *nestedBuilder) signAndEncrypt() (*jose.JSONWebEncryption, error) {
|
||||
if b.err != nil {
|
||||
return nil, b.err
|
||||
}
|
||||
|
||||
p, err := json.Marshal(b.payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sig, err := b.sig.Sign(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p2, err := sig.CompactSerialize()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return b.enc.Encrypt([]byte(p2))
|
||||
}
|
507
vendor/gopkg.in/square/go-jose.v2/jwt/builder_test.go
generated
vendored
Normal file
507
vendor/gopkg.in/square/go-jose.v2/jwt/builder_test.go
generated
vendored
Normal file
|
@ -0,0 +1,507 @@
|
|||
/*-
|
||||
* Copyright 2016 Zbigniew Mandziejewicz
|
||||
* Copyright 2016 Square, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package jwt
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/x509"
|
||||
"encoding/hex"
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gopkg.in/square/go-jose.v2"
|
||||
"gopkg.in/square/go-jose.v2/json"
|
||||
)
|
||||
|
||||
type testClaims struct {
|
||||
Subject string `json:"sub"`
|
||||
}
|
||||
|
||||
type invalidMarshalClaims struct {
|
||||
}
|
||||
|
||||
var errInvalidMarshalClaims = errors.New("Failed marshaling invalid claims.")
|
||||
|
||||
func (c invalidMarshalClaims) MarshalJSON() ([]byte, error) {
|
||||
return nil, errInvalidMarshalClaims
|
||||
}
|
||||
|
||||
var sampleClaims = Claims{
|
||||
Subject: "42",
|
||||
IssuedAt: NewNumericDate(time.Date(2016, 1, 1, 0, 0, 0, 0, time.UTC)),
|
||||
Issuer: "issuer",
|
||||
Audience: Audience{"a1", "a2"},
|
||||
}
|
||||
|
||||
type numberClaims struct {
|
||||
Int int64 `json:"int"`
|
||||
Float float64 `json:"float"`
|
||||
}
|
||||
|
||||
func TestIntegerAndFloatsNormalize(t *testing.T) {
|
||||
c := numberClaims{1 << 60, 12345.6789}
|
||||
|
||||
normalized, err := normalize(c)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
ni, err := (normalized["int"].(json.Number)).Int64()
|
||||
nf, err := (normalized["float"].(json.Number)).Float64()
|
||||
|
||||
if ni != c.Int {
|
||||
t.Error(fmt.Sprintf("normalize failed to preserve int64 (got %v, wanted %v, type %s)", normalized["int"], c.Int, reflect.TypeOf(normalized["int"])))
|
||||
}
|
||||
if nf != c.Float {
|
||||
t.Error(fmt.Sprintf("normalize failed to preserve float64 (got %v, wanted %v, type %s)", normalized["float"], c.Float, reflect.TypeOf(normalized["float"])))
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuilderCustomClaimsNonPointer(t *testing.T) {
|
||||
jwt, err := Signed(rsaSigner).Claims(testClaims{"foo"}).CompactSerialize()
|
||||
require.NoError(t, err, "Error creating JWT.")
|
||||
|
||||
parsed, err := ParseSigned(jwt)
|
||||
require.NoError(t, err, "Error parsing JWT.")
|
||||
|
||||
out := &testClaims{}
|
||||
if assert.NoError(t, parsed.Claims(&testPrivRSAKey1.PublicKey, out), "Error unmarshaling claims.") {
|
||||
assert.Equal(t, "foo", out.Subject)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuilderCustomClaimsPointer(t *testing.T) {
|
||||
jwt, err := Signed(rsaSigner).Claims(&testClaims{"foo"}).CompactSerialize()
|
||||
require.NoError(t, err, "Error creating JWT.")
|
||||
|
||||
parsed, err := ParseSigned(jwt)
|
||||
require.NoError(t, err, "Error parsing JWT.")
|
||||
|
||||
out := &testClaims{}
|
||||
if assert.NoError(t, parsed.Claims(&testPrivRSAKey1.PublicKey, out), "Error unmarshaling claims.") {
|
||||
assert.Equal(t, "foo", out.Subject)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuilderMergeClaims(t *testing.T) {
|
||||
jwt, err := Signed(rsaSigner).
|
||||
Claims(&Claims{
|
||||
Subject: "42",
|
||||
}).
|
||||
Claims(map[string]interface{}{
|
||||
"Scopes": []string{"read:users"},
|
||||
}).
|
||||
CompactSerialize()
|
||||
require.NoError(t, err, "Error creating JWT.")
|
||||
|
||||
parsed, err := ParseSigned(jwt)
|
||||
require.NoError(t, err, "Error parsing JWT.")
|
||||
|
||||
out := make(map[string]interface{})
|
||||
if assert.NoError(t, parsed.Claims(&testPrivRSAKey1.PublicKey, &out), "Error unmarshaling claims.") {
|
||||
assert.Equal(t, map[string]interface{}{
|
||||
"sub": "42",
|
||||
"Scopes": []interface{}{"read:users"},
|
||||
}, out)
|
||||
}
|
||||
|
||||
_, err = Signed(rsaSigner).Claims("invalid-claims").Claims(&testClaims{"foo"}).CompactSerialize()
|
||||
assert.Equal(t, err, ErrInvalidClaims)
|
||||
|
||||
_, err = Signed(rsaSigner).Claims(&invalidMarshalClaims{}).CompactSerialize()
|
||||
assert.EqualError(t, err, "json: error calling MarshalJSON for type *jwt.invalidMarshalClaims: Failed marshaling invalid claims.")
|
||||
}
|
||||
|
||||
func TestSignedFullSerializeAndToken(t *testing.T) {
|
||||
b := Signed(rsaSigner).Claims(&testClaims{"foo"})
|
||||
|
||||
jwt, err := b.FullSerialize()
|
||||
require.NoError(t, err, "Error creating JWT.")
|
||||
parsed, err := ParseSigned(jwt)
|
||||
require.NoError(t, err, "Error parsing JWT.")
|
||||
out := &testClaims{}
|
||||
if assert.NoError(t, parsed.Claims(&testPrivRSAKey1.PublicKey, &out), "Error unmarshaling claims.") {
|
||||
assert.Equal(t, &testClaims{
|
||||
Subject: "foo",
|
||||
}, out)
|
||||
}
|
||||
|
||||
jwt2, err := b.Token()
|
||||
require.NoError(t, err, "Error creating JWT.")
|
||||
out2 := &testClaims{}
|
||||
if assert.NoError(t, jwt2.Claims(&testPrivRSAKey1.PublicKey, &out2), "Error unmarshaling claims.") {
|
||||
assert.Equal(t, &testClaims{
|
||||
Subject: "foo",
|
||||
}, out2)
|
||||
}
|
||||
|
||||
b2 := Signed(rsaSigner).Claims(&invalidMarshalClaims{})
|
||||
_, err = b2.FullSerialize()
|
||||
require.EqualError(t, err, "json: error calling MarshalJSON for type *jwt.invalidMarshalClaims: Failed marshaling invalid claims.")
|
||||
_, err = b2.Token()
|
||||
require.EqualError(t, err, "json: error calling MarshalJSON for type *jwt.invalidMarshalClaims: Failed marshaling invalid claims.")
|
||||
}
|
||||
|
||||
func TestEncryptedFullSerializeAndToken(t *testing.T) {
|
||||
recipient := jose.Recipient{
|
||||
Algorithm: jose.RSA1_5,
|
||||
Key: testPrivRSAKey1.Public(),
|
||||
}
|
||||
encrypter, err := jose.NewEncrypter(jose.A128CBC_HS256, recipient, nil)
|
||||
require.NoError(t, err, "Error creating encrypter.")
|
||||
|
||||
b := Encrypted(encrypter).Claims(&testClaims{"foo"})
|
||||
|
||||
jwt, err := b.FullSerialize()
|
||||
require.NoError(t, err, "Error creating JWT.")
|
||||
parsed, err := ParseEncrypted(jwt)
|
||||
require.NoError(t, err, "Error parsing JWT.")
|
||||
out := &testClaims{}
|
||||
if assert.NoError(t, parsed.Claims(testPrivRSAKey1, &out)) {
|
||||
assert.Equal(t, &testClaims{
|
||||
Subject: "foo",
|
||||
}, out)
|
||||
}
|
||||
|
||||
jwt2, err := b.Token()
|
||||
require.NoError(t, err, "Error creating JWT.")
|
||||
out2 := &testClaims{}
|
||||
if assert.NoError(t, jwt2.Claims(testPrivRSAKey1, &out2)) {
|
||||
assert.Equal(t, &testClaims{
|
||||
Subject: "foo",
|
||||
}, out2)
|
||||
}
|
||||
|
||||
b2 := Encrypted(encrypter).Claims(&invalidMarshalClaims{})
|
||||
|
||||
_, err = b2.FullSerialize()
|
||||
require.EqualError(t, err, "json: error calling MarshalJSON for type *jwt.invalidMarshalClaims: Failed marshaling invalid claims.")
|
||||
_, err = b2.Token()
|
||||
require.EqualError(t, err, "json: error calling MarshalJSON for type *jwt.invalidMarshalClaims: Failed marshaling invalid claims.")
|
||||
}
|
||||
|
||||
func TestBuilderSignedAndEncrypted(t *testing.T) {
|
||||
recipient := jose.Recipient{
|
||||
Algorithm: jose.RSA1_5,
|
||||
Key: testPrivRSAKey1.Public(),
|
||||
}
|
||||
encrypter, err := jose.NewEncrypter(jose.A128CBC_HS256, recipient, (&jose.EncrypterOptions{}).WithContentType("JWT").WithType("JWT"))
|
||||
require.NoError(t, err, "Error creating encrypter.")
|
||||
|
||||
jwt1, err := SignedAndEncrypted(rsaSigner, encrypter).Claims(&testClaims{"foo"}).Token()
|
||||
require.NoError(t, err, "Error marshaling signed-then-encrypted token.")
|
||||
if nested, err := jwt1.Decrypt(testPrivRSAKey1); assert.NoError(t, err, "Error decrypting signed-then-encrypted token.") {
|
||||
out := &testClaims{}
|
||||
assert.NoError(t, nested.Claims(&testPrivRSAKey1.PublicKey, out))
|
||||
assert.Equal(t, &testClaims{"foo"}, out)
|
||||
}
|
||||
|
||||
b := SignedAndEncrypted(rsaSigner, encrypter).Claims(&testClaims{"foo"})
|
||||
tok1, err := b.CompactSerialize()
|
||||
if assert.NoError(t, err) {
|
||||
jwt, err := ParseSignedAndEncrypted(tok1)
|
||||
if assert.NoError(t, err, "Error parsing signed-then-encrypted compact token.") {
|
||||
if nested, err := jwt.Decrypt(testPrivRSAKey1); assert.NoError(t, err) {
|
||||
out := &testClaims{}
|
||||
assert.NoError(t, nested.Claims(&testPrivRSAKey1.PublicKey, out))
|
||||
assert.Equal(t, &testClaims{"foo"}, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tok2, err := b.FullSerialize()
|
||||
if assert.NoError(t, err) {
|
||||
jwe, err := ParseSignedAndEncrypted(tok2)
|
||||
if assert.NoError(t, err, "Error parsing signed-then-encrypted full token.") {
|
||||
assert.Equal(t, []jose.Header{{
|
||||
Algorithm: string(jose.RSA1_5),
|
||||
ExtraHeaders: map[jose.HeaderKey]interface{}{
|
||||
jose.HeaderType: "JWT",
|
||||
jose.HeaderContentType: "JWT",
|
||||
"enc": "A128CBC-HS256",
|
||||
},
|
||||
}}, jwe.Headers)
|
||||
if jws, err := jwe.Decrypt(testPrivRSAKey1); assert.NoError(t, err) {
|
||||
assert.Equal(t, []jose.Header{{
|
||||
Algorithm: string(jose.RS256),
|
||||
ExtraHeaders: map[jose.HeaderKey]interface{}{
|
||||
jose.HeaderType: "JWT",
|
||||
},
|
||||
}}, jws.Headers)
|
||||
out := &testClaims{}
|
||||
assert.NoError(t, jws.Claims(&testPrivRSAKey1.PublicKey, out))
|
||||
assert.Equal(t, &testClaims{"foo"}, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
b2 := SignedAndEncrypted(rsaSigner, encrypter).Claims(&invalidMarshalClaims{})
|
||||
_, err = b2.CompactSerialize()
|
||||
assert.EqualError(t, err, "json: error calling MarshalJSON for type *jwt.invalidMarshalClaims: Failed marshaling invalid claims.")
|
||||
_, err = b2.FullSerialize()
|
||||
assert.EqualError(t, err, "json: error calling MarshalJSON for type *jwt.invalidMarshalClaims: Failed marshaling invalid claims.")
|
||||
|
||||
encrypter2, err := jose.NewEncrypter(jose.A128CBC_HS256, recipient, nil)
|
||||
require.NoError(t, err, "Error creating encrypter.")
|
||||
_, err = SignedAndEncrypted(rsaSigner, encrypter2).CompactSerialize()
|
||||
assert.EqualError(t, err, "square/go-jose/jwt: expected content type to be JWT (cty header)")
|
||||
}
|
||||
|
||||
func TestBuilderHeadersSigner(t *testing.T) {
|
||||
tests := []struct {
|
||||
Keys []*rsa.PrivateKey
|
||||
Claims interface{}
|
||||
}{
|
||||
{
|
||||
Keys: []*rsa.PrivateKey{testPrivRSAKey1},
|
||||
Claims: &Claims{Issuer: "foo"},
|
||||
},
|
||||
{
|
||||
Keys: []*rsa.PrivateKey{testPrivRSAKey1, testPrivRSAKey2},
|
||||
Claims: &Claims{Issuer: "foo"},
|
||||
},
|
||||
}
|
||||
|
||||
for i, tc := range tests {
|
||||
wantKeyIDs := make([]string, len(tc.Keys))
|
||||
signingKeys := make([]jose.SigningKey, len(tc.Keys))
|
||||
|
||||
for j, key := range tc.Keys {
|
||||
keyIDBytes := make([]byte, 20)
|
||||
if _, err := io.ReadFull(rand.Reader, keyIDBytes); err != nil {
|
||||
t.Fatalf("failed to read random bytes: %v", err)
|
||||
}
|
||||
keyID := hex.EncodeToString(keyIDBytes)
|
||||
|
||||
wantKeyIDs[j] = keyID
|
||||
signingKeys[j] = jose.SigningKey{
|
||||
Algorithm: jose.RS256,
|
||||
Key: &jose.JSONWebKey{
|
||||
KeyID: keyID,
|
||||
Algorithm: "RSA",
|
||||
Key: key,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
signer, err := jose.NewMultiSigner(signingKeys, nil)
|
||||
if err != nil {
|
||||
t.Errorf("case %d: NewMultiSigner(): %v", i, err)
|
||||
continue
|
||||
}
|
||||
|
||||
var token string
|
||||
if len(tc.Keys) == 1 {
|
||||
token, err = Signed(signer).Claims(tc.Claims).CompactSerialize()
|
||||
} else {
|
||||
token, err = Signed(signer).Claims(tc.Claims).FullSerialize()
|
||||
}
|
||||
if err != nil {
|
||||
t.Errorf("case %d: failed to create token: %v", i, err)
|
||||
continue
|
||||
}
|
||||
jws, err := jose.ParseSigned(token)
|
||||
if err != nil {
|
||||
t.Errorf("case %d: parse signed: %v", i, err)
|
||||
continue
|
||||
}
|
||||
gotKeyIDs := make([]string, len(jws.Signatures))
|
||||
for i, sig := range jws.Signatures {
|
||||
gotKeyIDs[i] = sig.Header.KeyID
|
||||
}
|
||||
sort.Strings(wantKeyIDs)
|
||||
sort.Strings(gotKeyIDs)
|
||||
if !reflect.DeepEqual(wantKeyIDs, gotKeyIDs) {
|
||||
t.Errorf("case %d: wanted=%q got=%q", i, wantKeyIDs, gotKeyIDs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuilderHeadersEncrypter(t *testing.T) {
|
||||
key := testPrivRSAKey1
|
||||
claims := &Claims{Issuer: "foo"}
|
||||
|
||||
keyIDBytes := make([]byte, 20)
|
||||
if _, err := io.ReadFull(rand.Reader, keyIDBytes); err != nil {
|
||||
t.Fatalf("failed to read random bytes: %v", err)
|
||||
}
|
||||
keyID := hex.EncodeToString(keyIDBytes)
|
||||
|
||||
wantKeyID := keyID
|
||||
recipient := jose.Recipient{
|
||||
Algorithm: jose.RSA1_5,
|
||||
Key: key.Public(),
|
||||
KeyID: keyID,
|
||||
}
|
||||
|
||||
wantType := jose.ContentType("JWT")
|
||||
encrypter, err := jose.NewEncrypter(jose.A128CBC_HS256, recipient, (&jose.EncrypterOptions{}).WithType(wantType))
|
||||
require.NoError(t, err, "failed to create encrypter")
|
||||
|
||||
token, err := Encrypted(encrypter).Claims(claims).CompactSerialize()
|
||||
require.NoError(t, err, "failed to create token")
|
||||
|
||||
jwe, err := jose.ParseEncrypted(token)
|
||||
if assert.NoError(t, err, "error parsing encrypted token") {
|
||||
assert.Equal(t, jose.Header{
|
||||
ExtraHeaders: map[jose.HeaderKey]interface{}{
|
||||
jose.HeaderType: string(wantType),
|
||||
"enc": "A128CBC-HS256",
|
||||
},
|
||||
Algorithm: string(jose.RSA1_5),
|
||||
KeyID: wantKeyID,
|
||||
}, jwe.Header)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkMapClaims(b *testing.B) {
|
||||
m := map[string]interface{}{
|
||||
"sub": "42",
|
||||
"iat": 1451606400,
|
||||
"iss": "issuer",
|
||||
"aud": []string{"a1", "a2"},
|
||||
}
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
Signed(rsaSigner).Claims(m)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkStructClaims(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
Signed(rsaSigner).Claims(sampleClaims)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSignedCompactSerializeRSA(b *testing.B) {
|
||||
tb := Signed(rsaSigner).Claims(sampleClaims)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
tb.CompactSerialize()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSignedCompactSerializeSHA(b *testing.B) {
|
||||
tb := Signed(hmacSigner).Claims(sampleClaims)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
tb.CompactSerialize()
|
||||
}
|
||||
}
|
||||
|
||||
func mustUnmarshalRSA(data string) *rsa.PrivateKey {
|
||||
block, _ := pem.Decode([]byte(data))
|
||||
if block == nil {
|
||||
panic("failed to decode PEM data")
|
||||
}
|
||||
key, err := x509.ParsePKCS8PrivateKey(block.Bytes)
|
||||
if err != nil {
|
||||
panic("failed to parse RSA key: " + err.Error())
|
||||
}
|
||||
if key, ok := key.(*rsa.PrivateKey); ok {
|
||||
return key
|
||||
}
|
||||
panic("key is not of type *rsa.PrivateKey")
|
||||
}
|
||||
|
||||
func mustMakeSigner(alg jose.SignatureAlgorithm, k interface{}) jose.Signer {
|
||||
sig, err := jose.NewSigner(jose.SigningKey{Algorithm: alg, Key: k}, (&jose.SignerOptions{}).WithType("JWT"))
|
||||
if err != nil {
|
||||
panic("failed to create signer:" + err.Error())
|
||||
}
|
||||
|
||||
return sig
|
||||
}
|
||||
|
||||
var (
|
||||
sharedKey = []byte("secret")
|
||||
sharedEncryptionKey = []byte("itsa16bytesecret")
|
||||
|
||||
testPrivRSAKey1 = mustUnmarshalRSA(`-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDIHBvDHAr7jh8h
|
||||
xaqBCl11fjI9YZtdC5b3HtXTXZW3c2dIOImNUjffT8POP6p5OpzivmC1om7iOyuZ
|
||||
3nJjC9LT3zqqs3f2i5d4mImxEuqG6uWdryFfkp0uIv5VkjVO+iQWd6pDAPGP7r1Z
|
||||
foXCleyCtmyNH4JSkJneNPOk/4BxO8vcvRnCMT/Gv81IT6H+OQ6OovWOuJr8RX9t
|
||||
1wuCjC9ezZxeI9ONffhiO5FMrVh5H9LJTl3dPOVa4aEcOvgd45hBmvxAyXqf8daE
|
||||
6Kl2O7vQ4uwgnSTVXYIIjCjbepuersApIMGx/XPSgiU1K3Xtah/TBvep+S3VlwPc
|
||||
q/QH25S9AgMBAAECggEAe+y8XKYfPw4SxY1uPB+5JSwT3ON3nbWxtjSIYy9Pqp5z
|
||||
Vcx9kuFZ7JevQSk4X38m7VzM8282kC/ono+d8yy9Uayq3k/qeOqV0X9Vti1qxEbw
|
||||
ECkG1/MqGApfy4qSLOjINInDDV+mOWa2KJgsKgdCwuhKbVMYGB2ozG2qfYIlfvlY
|
||||
vLcBEpGWmswJHNmkcjTtGFIyJgPbsI6ndkkOeQbqQKAaadXtG1xUzH+vIvqaUl/l
|
||||
AkNf+p4qhPkHsoAWXf1qu9cYa2T8T+mEo79AwlgVC6awXQWNRTiyClDJC7cu6NBy
|
||||
ZHXCLFMbalzWF9qeI2OPaFX2x3IBWrbyDxcJ4TSdQQKBgQD/Fp/uQonMBh1h4Vi4
|
||||
HlxZdqSOArTitXValdLFGVJ23MngTGV/St4WH6eRp4ICfPyldsfcv6MZpNwNm1Rn
|
||||
lB5Gtpqpby1dsrOSfvVbY7U3vpLnd8+hJ/lT5zCYt5Eor46N6iWRkYWzNe4PixiF
|
||||
z1puGUvFCbZdeeACVrPLmW3JKQKBgQDI0y9WTf8ezKPbtap4UEE6yBf49ftohVGz
|
||||
p4iD6Ng1uqePwKahwoVXKOc179CjGGtW/UUBORAoKRmxdHajHq6LJgsBxpaARz21
|
||||
COPy99BUyp9ER5P8vYn63lC7Cpd/K7uyMjaz1DAzYBZIeVZHIw8O9wuGNJKjRFy9
|
||||
SZyD3V0ddQKBgFMdohrWH2QVEfnUnT3Q1rJn0BJdm2bLTWOosbZ7G72TD0xAWEnz
|
||||
sQ1wXv88n0YER6X6YADziEdQykq8s/HT91F/KkHO8e83zP8M0xFmGaQCOoelKEgQ
|
||||
aFMIX3NDTM7+9OoUwwz9Z50PE3SJFAJ1n7eEEoYvNfabQXxBl+/dHEKRAoGAPEvU
|
||||
EaiXacrtg8EWrssB2sFLGU/ZrTciIbuybFCT4gXp22pvXXAHEvVP/kzDqsRhLhwb
|
||||
BNP6OuSkNziNikpjA5pngZ/7fgZly54gusmW/m5bxWdsUl0iOXVYbeAvPlqGH2me
|
||||
LP4Pfs1hw17S/cbT9Z1NE31jbavP4HFikeD73SUCgYEArQfuudml6ei7XZ1Emjq8
|
||||
jZiD+fX6e6BD/ISatVnuyZmGj9wPFsEhY2BpLiAMQHMDIvH9nlKzsFvjkTPB86qG
|
||||
jCh3D67Os8eSBk5uRC6iW3Fc4DXvB5EFS0W9/15Sl+V5vXAcrNMpYS82OTSMG2Gt
|
||||
b9Ym/nxaqyTu0PxajXkKm5Q=
|
||||
-----END PRIVATE KEY-----`)
|
||||
|
||||
testPrivRSAKey2 = mustUnmarshalRSA(`-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCxJ09jkXZ5Okyq
|
||||
FrEKrs+GTzZRvoLziyzDTIZLJC6BVryau4gaFjuBG+pnm4z53oDP0XVnjFsx1mBw
|
||||
R6RHeXlXbxLXsMfJpMzU9I2SRen9DokpD187CAnjLOoN9QRl1h8CA+sqR5Jw9mdl
|
||||
mdaBKC99M9QYAPK3vGNfPC4soo8LDSBiemmt5raL4WSfoYh/6qg5rHUTymY28uxV
|
||||
ew3I9Yp+3ltIw+WlRDtW5l+MM5CSUofjj2zcgcG3LEuPtvyZ+CSObxxcZZugm9zc
|
||||
JdiazNyUxtX8yAj3Xg8Hde0jt0QDXv7A+U0KMVi9lX6PJEaNj4tOhOmQhJVMzAyr
|
||||
1W/bifZVAgMBAAECggEAduKnn21GMZLTUi4KP94SvNK55F/Sp7hVoPbhBNpSL1BT
|
||||
IBAMBV24LyvZwhAcqq8MiOrLPGNv6+EvNQqPD7xQl0GeRouHeCYVpDA+NdSfc8jm
|
||||
eVysjwQVBpTkudsdSW5JvuN8VRJVD2P8/a0gy+p4/C/k/Prd6DoQAiBz6FZrYoEd
|
||||
iYgIegHOMXWd4vzO3ENOWSIUI6ci7Aro+Y0Z75kfiVokAGhUcFgrZ58E82fBYh8I
|
||||
cxO20oMnucGrLicQzj536jx4wX3Cdd4jr9UVEJ9ZII1ldlp03nZlFLXqJH1547Aq
|
||||
ZM+3vVcBGoJ8T9ZQ4VDAL++0K2DLC9JkTARAYCEi/QKBgQDebIc1+2zblhQtVQ/e
|
||||
IbEErZcB7v+TkUoRoBfR0lj7bKBFJgRe37fgu1xf95/s63okdnOw/OuQqtGmgx/J
|
||||
TL3yULBdNcwTCRm41t+cqoGymjK0VRbqk6CWBId0E3r5TaCVWedk2JI2XwTvIJ1A
|
||||
eDiqfJeDHUD44yaonwbysj9ZDwKBgQDL5VQfTppVaJk2PXNwhAkRQklZ8RFmt/7p
|
||||
yA3dddQNdwMk4Fl8F7QuO1gBxDiHdnwIrlEOz6fTsM3LwIS+Q12P1vYFIhpo7HDB
|
||||
wvjfMwCPxBIS4jI28RgcAf0VbZ/+CHAm6bb9iDwsjXhh1J5oOm5VKnju6/rPH/QY
|
||||
+md40pnSWwKBgBnKPbdNquafNUG4XjmkcHEZa6wGuU20CAGZLYnfuP+WLdM2wET7
|
||||
7cc6ElDyVnHTL/twXKPF/85rcBm9lH7zzgZ9wqVcKoh+gqQDDjSNNLKv3Hc6cojK
|
||||
i1E5vzb/Vz/290q5/PGdhv6U7+6GOpWSGwfxoGPMjY8OT5o3rkeP0XaTAoGBALLR
|
||||
GQmr4eZtqZDMK+XNpjYgsDvVE7HGRCW7cY17vNFiQruglloiX778BJ7n+7uxye3D
|
||||
EwuuSj15ncLHwKMsaW2w1GqEEi1azzjfSWxWSnPLPR6aifdtUfueMtsMHXio5dL6
|
||||
vaV0SXG5UI5b7eDy/bhrW0wOYRQtreIKGZz49jZpAoGBAIvxYngkLwmq6g6MmnAc
|
||||
YK4oT6YAm2wfSy2mzpEQP5r1igp1rN7T46o7FMUPDLS9wK3ESAaIYe01qT6Yftcc
|
||||
5qF+yiOGDTr9XQiHwe4BcyrNEMfUjDhDU5ao2gH8+t1VGr1KspLsUNbedrJwZsY4
|
||||
UCZVKEEDHzKfLO/iBgKjJQF7
|
||||
-----END PRIVATE KEY-----`)
|
||||
|
||||
rsaSigner = mustMakeSigner(jose.RS256, testPrivRSAKey1)
|
||||
hmacSigner = mustMakeSigner(jose.HS256, sharedKey)
|
||||
)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue