1
0
Fork 0
mirror of https://github.com/Luzifer/s3sync.git synced 2024-10-18 06:24:20 +00:00

Breaking: Refactor, update deps, add MinIO support

- Drop MD5 checksumming
- Replace logging to stdout
- Add Endpoint flag for MinIO support
- Switch to Go modules support

Signed-off-by: Knut Ahlers <knut@ahlers.me>
This commit is contained in:
Knut Ahlers 2023-06-09 12:52:04 +02:00
parent 37ccbf621f
commit 627fe09bac
Signed by: luzifer
GPG key ID: D91C3E91E4CAD6F5
15 changed files with 745 additions and 441 deletions

74
.github/workflows/test-and-build.yml vendored Normal file
View file

@ -0,0 +1,74 @@
---
name: test-and-build
on: [push]
permissions:
contents: write
jobs:
test-and-build:
defaults:
run:
shell: bash
container:
image: luzifer/archlinux
env:
CGO_ENABLED: 0
GOPATH: /go
runs-on: ubuntu-latest
steps:
- name: Enable custom AUR package repo
run: echo -e "[luzifer]\nSigLevel = Never\nServer = https://s3-eu-west-1.amazonaws.com/arch-luzifer-io/repo/\$arch" >>/etc/pacman.conf
- name: Install required packages
run: |
pacman -Syy --noconfirm \
awk \
curl \
diffutils \
git \
go \
golangci-lint-bin \
make \
nodejs-lts-fermium \
npm \
tar \
unzip \
which \
zip
- uses: actions/checkout@v3
- name: Marking workdir safe
run: git config --global --add safe.directory /__w/s3sync/s3sync
- name: Lint code
run: make lint
- name: Build release
run: make publish
env:
FORCE_SKIP_UPLOAD: 'true'
MOD_MODE: readonly
NODE_ENV: production
NO_TESTS: 'true'
PACKAGES: '.'
- name: Extract changelog
run: 'awk "/^#/ && ++c==2{exit}; /^#/f" "History.md" | tail -n +2 >release_changelog.md'
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
with:
body_path: release_changelog.md
draft: false
fail_on_unmatched_files: true
files: '.build/*'
generate_release_notes: false
...

View file

@ -1,5 +0,0 @@
---
build_matrix:
general:
ldflags:
- "-X main.version $(git describe --tags)"

174
.golangci.yml Normal file
View file

@ -0,0 +1,174 @@
# Derived from https://github.com/golangci/golangci-lint/blob/master/.golangci.example.yml
---
run:
# timeout for analysis, e.g. 30s, 5m, default is 1m
timeout: 5m
# Force readonly modules usage for checking
modules-download-mode: readonly
output:
format: tab
issues:
# This disables the included exclude-list in golangci-lint as that
# list for example fully hides G304 gosec rule, errcheck, exported
# rule of revive and other errors one really wants to see.
# Smme detail: https://github.com/golangci/golangci-lint/issues/456
exclude-use-default: false
# Don't limit the number of shown issues: Report ALL of them
max-issues-per-linter: 0
max-same-issues: 0
linters:
disable-all: true
enable:
- asciicheck # Simple linter to check that your code does not contain non-ASCII identifiers [fast: true, auto-fix: false]
- bidichk # Checks for dangerous unicode character sequences [fast: true, auto-fix: false]
- bodyclose # checks whether HTTP response body is closed successfully [fast: true, auto-fix: false]
- containedctx # containedctx is a linter that detects struct contained context.Context field [fast: true, auto-fix: false]
- contextcheck # check the function whether use a non-inherited context [fast: false, auto-fix: false]
- dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f()) [fast: true, auto-fix: false]
- durationcheck # check for two durations multiplied together [fast: false, auto-fix: false]
- errcheck # Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases [fast: false, auto-fix: false]
- errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and optionally reports occations, where the check for the returned error can be omitted. [fast: false, auto-fix: false]
- exportloopref # checks for pointers to enclosing loop variables [fast: true, auto-fix: false]
- forbidigo # Forbids identifiers [fast: true, auto-fix: false]
- funlen # Tool for detection of long functions [fast: true, auto-fix: false]
- gocognit # Computes and checks the cognitive complexity of functions [fast: true, auto-fix: false]
- goconst # Finds repeated strings that could be replaced by a constant [fast: true, auto-fix: false]
- gocritic # The most opinionated Go source code linter [fast: true, auto-fix: false]
- gocyclo # Computes and checks the cyclomatic complexity of functions [fast: true, auto-fix: false]
- godox # Tool for detection of FIXME, TODO and other comment keywords [fast: true, auto-fix: false]
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification [fast: true, auto-fix: true]
- gofumpt # Gofumpt checks whether code was gofumpt-ed. [fast: true, auto-fix: true]
- goimports # Goimports does everything that gofmt does. Additionally it checks unused imports [fast: true, auto-fix: true]
- gomnd # An analyzer to detect magic numbers. [fast: true, auto-fix: false]
- gosec # Inspects source code for security problems [fast: true, auto-fix: false]
- gosimple # Linter for Go source code that specializes in simplifying a code [fast: true, auto-fix: false]
- govet # Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: true, auto-fix: false]
- ineffassign # Detects when assignments to existing variables are not used [fast: true, auto-fix: false]
- misspell # Finds commonly misspelled English words in comments [fast: true, auto-fix: true]
- nakedret # Finds naked returns in functions greater than a specified function length [fast: true, auto-fix: false]
- nilerr # Finds the code that returns nil even if it checks that the error is not nil. [fast: false, auto-fix: false]
- nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value. [fast: false, auto-fix: false]
- noctx # noctx finds sending http request without context.Context [fast: true, auto-fix: false]
- nolintlint # Reports ill-formed or insufficient nolint directives [fast: true, auto-fix: false]
- revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint. [fast: false, auto-fix: false]
- staticcheck # Staticcheck is a go vet on steroids, applying a ton of static analysis checks [fast: true, auto-fix: false]
- stylecheck # Stylecheck is a replacement for golint [fast: true, auto-fix: false]
- tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17 [fast: false, auto-fix: false]
- typecheck # Like the front-end of a Go compiler, parses and type-checks Go code [fast: true, auto-fix: false]
- unconvert # Remove unnecessary type conversions [fast: true, auto-fix: false]
- unused # Checks Go code for unused constants, variables, functions and types [fast: false, auto-fix: false]
- wastedassign # wastedassign finds wasted assignment statements. [fast: false, auto-fix: false]
- wrapcheck # Checks that errors returned from external packages are wrapped [fast: false, auto-fix: false]
linters-settings:
funlen:
lines: 100
statements: 60
gocyclo:
# minimal code complexity to report, 30 by default (but we recommend 10-20)
min-complexity: 15
gomnd:
settings:
mnd:
ignored-functions: 'strconv.(?:Format|Parse)\B+'
revive:
rules:
#- name: add-constant # Suggests using constant for magic numbers and string literals
# Opinion: Makes sense for strings, not for numbers but checks numbers
#- name: argument-limit # Specifies the maximum number of arguments a function can receive | Opinion: Don't need this
- name: atomic # Check for common mistaken usages of the `sync/atomic` package
- name: banned-characters # Checks banned characters in identifiers
arguments:
- ';' # Greek question mark
- name: bare-return # Warns on bare returns
- name: blank-imports # Disallows blank imports
- name: bool-literal-in-expr # Suggests removing Boolean literals from logic expressions
- name: call-to-gc # Warns on explicit call to the garbage collector
#- name: cognitive-complexity # Sets restriction for maximum Cognitive complexity.
# There is a dedicated linter for this
- name: confusing-naming # Warns on methods with names that differ only by capitalization
- name: confusing-results # Suggests to name potentially confusing function results
- name: constant-logical-expr # Warns on constant logical expressions
- name: context-as-argument # `context.Context` should be the first argument of a function.
- name: context-keys-type # Disallows the usage of basic types in `context.WithValue`.
#- name: cyclomatic # Sets restriction for maximum Cyclomatic complexity.
# There is a dedicated linter for this
#- name: datarace # Spots potential dataraces
# Is not (yet) available?
- name: deep-exit # Looks for program exits in funcs other than `main()` or `init()`
- name: defer # Warns on some [defer gotchas](https://blog.learngoprogramming.com/5-gotchas-of-defer-in-go-golang-part-iii-36a1ab3d6ef1)
- name: dot-imports # Forbids `.` imports.
- name: duplicated-imports # Looks for packages that are imported two or more times
- name: early-return # Spots if-then-else statements that can be refactored to simplify code reading
- name: empty-block # Warns on empty code blocks
- name: empty-lines # Warns when there are heading or trailing newlines in a block
- name: errorf # Should replace `errors.New(fmt.Sprintf())` with `fmt.Errorf()`
- name: error-naming # Naming of error variables.
- name: error-return # The error return parameter should be last.
- name: error-strings # Conventions around error strings.
- name: exported # Naming and commenting conventions on exported symbols.
arguments: ['sayRepetitiveInsteadOfStutters']
#- name: file-header # Header which each file should have.
# Useless without config, have no config for it
- name: flag-parameter # Warns on boolean parameters that create a control coupling
#- name: function-length # Warns on functions exceeding the statements or lines max
# There is a dedicated linter for this
#- name: function-result-limit # Specifies the maximum number of results a function can return
# Opinion: Don't need this
- name: get-return # Warns on getters that do not yield any result
- name: identical-branches # Spots if-then-else statements with identical `then` and `else` branches
- name: if-return # Redundant if when returning an error.
#- name: imports-blacklist # Disallows importing the specified packages
# Useless without config, have no config for it
- name: import-shadowing # Spots identifiers that shadow an import
- name: increment-decrement # Use `i++` and `i--` instead of `i += 1` and `i -= 1`.
- name: indent-error-flow # Prevents redundant else statements.
#- name: line-length-limit # Specifies the maximum number of characters in a lined
# There is a dedicated linter for this
#- name: max-public-structs # The maximum number of public structs in a file.
# Opinion: Don't need this
- name: modifies-parameter # Warns on assignments to function parameters
- name: modifies-value-receiver # Warns on assignments to value-passed method receivers
#- name: nested-structs # Warns on structs within structs
# Opinion: Don't need this
- name: optimize-operands-order # Checks inefficient conditional expressions
#- name: package-comments # Package commenting conventions.
# Opinion: Don't need this
- name: range # Prevents redundant variables when iterating over a collection.
- name: range-val-address # Warns if address of range value is used dangerously
- name: range-val-in-closure # Warns if range value is used in a closure dispatched as goroutine
- name: receiver-naming # Conventions around the naming of receivers.
- name: redefines-builtin-id # Warns on redefinitions of builtin identifiers
#- name: string-format # Warns on specific string literals that fail one or more user-configured regular expressions
# Useless without config, have no config for it
- name: string-of-int # Warns on suspicious casts from int to string
- name: struct-tag # Checks common struct tags like `json`,`xml`,`yaml`
- name: superfluous-else # Prevents redundant else statements (extends indent-error-flow)
- name: time-equal # Suggests to use `time.Time.Equal` instead of `==` and `!=` for equality check time.
- name: time-naming # Conventions around the naming of time variables.
- name: unconditional-recursion # Warns on function calls that will lead to (direct) infinite recursion
- name: unexported-naming # Warns on wrongly named un-exported symbols
- name: unexported-return # Warns when a public return is from unexported type.
- name: unhandled-error # Warns on unhandled errors returned by funcion calls
arguments:
- "fmt.(Fp|P)rint(f|ln|)"
- name: unnecessary-stmt # Suggests removing or simplifying unnecessary statements
- name: unreachable-code # Warns on unreachable code
- name: unused-parameter # Suggests to rename or remove unused function parameters
- name: unused-receiver # Suggests to rename or remove unused method receivers
#- name: use-any # Proposes to replace `interface{}` with its alias `any`
# Is not (yet) available?
- name: useless-break # Warns on useless `break` statements in case clauses
- name: var-declaration # Reduces redundancies around variable declaration.
- name: var-naming # Naming rules.
- name: waitgroup-by-value # Warns on functions taking sync.WaitGroup as a by-value parameter
...

View file

@ -1,10 +0,0 @@
---
image: "quay.io/luzifer/repo-runner-image"
checkout_dir: /go/src/github.com/Luzifer/s3sync
commands:
- make publish
environment:
CGO_ENABLED: 0

View file

@ -1,3 +1,8 @@
default:
lint:
golangci-lint run ./...
publish:
curl -sSLo golang.sh https://raw.githubusercontent.com/Luzifer/github-publish/master/golang.sh
bash golang.sh

View file

@ -8,7 +8,7 @@
## Features
- Static binary, no dependencies required
- Sync files only if required (judged by MD5 checksum)
- Sync files only if required (judged by file size & modify-date)
- Using multiple threads to upload the transfer is quite fast
- Optionally delete files at target
- Optionally make files public on sync (only if file needs sync)
@ -49,7 +49,3 @@ Flags:
(2 / 3) 07/26/bkm.png Skip
(3 / 3) 07/26/luzifer_io.png Skip
```
---
![](https://d2o84fseuhwkxk.cloudfront.net/s3sync.svg)

9
go.mod
View file

@ -3,12 +3,17 @@ module github.com/Luzifer/s3sync
go 1.20
require (
github.com/Luzifer/rconfig/v2 v2.4.0
github.com/aws/aws-sdk-go v1.44.279
github.com/spf13/cobra v1.7.0
github.com/pkg/errors v0.9.1
github.com/sirupsen/logrus v1.9.3
)
require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
golang.org/x/sys v0.8.0 // indirect
gopkg.in/validator.v2 v2.0.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

29
go.sum
View file

@ -1,23 +1,27 @@
github.com/Luzifer/rconfig/v2 v2.4.0 h1:MAdymTlExAZ8mx5VG8xOFAtFQSpWBipKYQHPOmYTn9o=
github.com/Luzifer/rconfig/v2 v2.4.0/go.mod h1:hWF3ZVSusbYlg5bEvCwalEyUSY+0JPJWUiIu7rBmav8=
github.com/aws/aws-sdk-go v1.44.279 h1:g23dxnYjIiPlQo0gIKNR0zVPsSvo1bj5frWln+5sfhk=
github.com/aws/aws-sdk-go v1.44.279/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
@ -33,8 +37,11 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -48,6 +55,12 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/validator.v2 v2.0.1 h1:xF0KWyGWXm/LM2G1TrEjqOu4pa6coO9AlWSf3msVfDY=
gopkg.in/validator.v2 v2.0.1/go.mod h1:lIUZBlB3Im4s/eYp39Ry/wkR02yOPhZ9IwIRBjuPuG8=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View file

@ -1,68 +0,0 @@
package main
import (
"crypto/md5"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
type localProvider struct{}
func newLocalProvider() *localProvider {
return &localProvider{}
}
func (l *localProvider) WriteFile(path string, content io.ReadSeeker, public bool) error {
os.MkdirAll(filepath.Dir(path), 0755)
f, err := os.Create(path)
if err != nil {
return err
}
if _, err := io.Copy(f, content); err != nil {
return err
}
return f.Close()
}
func (l *localProvider) ReadFile(path string) (io.ReadCloser, error) {
return os.Open(path)
}
func (l *localProvider) ListFiles(prefix string) ([]file, error) {
out := []file{}
err := filepath.Walk(prefix, func(path string, f os.FileInfo, err error) error {
if err != nil {
return err
}
if !f.IsDir() {
content, err := ioutil.ReadFile(path)
if err != nil {
return err
}
out = append(out, file{
Filename: strings.TrimLeft(strings.Replace(path, prefix, "", 1), string(os.PathSeparator)),
Size: f.Size(),
MD5: fmt.Sprintf("%x", md5.Sum(content)),
})
}
return nil
})
return out, err
}
func (l *localProvider) DeleteFile(path string) error {
return os.Remove(path)
}
func (l *localProvider) GetAbsolutePath(path string) (string, error) {
return filepath.Abs(path)
}

View file

@ -1,55 +0,0 @@
package logger
import "fmt"
// LogLevel defines a type for named log levels
type LogLevel uint
// Pre-Defined log levels to be used with this logging module
const (
Error LogLevel = iota
Warning
Info
Debug
)
// Logger is a wrapper around output to filter according to levels
type Logger struct {
Level LogLevel
}
// New instanciates a new Logger and sets the preferred log level
func New(logLevel LogLevel) *Logger {
return &Logger{
Level: logLevel,
}
}
// Log is the filtered equivalent to fmt.Println
func (l *Logger) Log(level LogLevel, line string) {
if l.Level >= level {
fmt.Println(line)
}
}
// LogF is the filtered equivalent to fmt.Printf
func (l *Logger) LogF(level LogLevel, line string, args ...interface{}) {
if l.Level >= level {
fmt.Printf(line, args...)
}
}
// ErrorF executes LogF with Error level
func (l *Logger) ErrorF(line string, args ...interface{}) {
l.LogF(Error, line, args...)
}
// InfoF executes LogF with Info level
func (l *Logger) InfoF(line string, args ...interface{}) {
l.LogF(Info, line, args...)
}
// DebugF executes LogF with Debug level
func (l *Logger) DebugF(line string, args ...interface{}) {
l.LogF(Debug, line, args...)
}

225
main.go
View file

@ -1,137 +1,182 @@
package main
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"path"
"strings"
"time"
"github.com/Luzifer/s3sync/logger"
"github.com/spf13/cobra"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/Luzifer/rconfig/v2"
"github.com/Luzifer/s3sync/pkg/fsprovider"
)
var (
cfg = struct {
Delete bool
Public bool
PrintVersion bool
MaxThreads int
logLevel uint
Delete bool `flag:"delete,d" default:"false" description:"Delete files on remote not existing on local"`
Endpoint string `flag:"endpoint" default:"" description:"Switch S3 endpoint (i.e. for MinIO compatibility)"`
LogLevel string `flag:"log-level" default:"info" description:"Log level (debug, info, warn, error, fatal)"`
MaxThreads int `flag:"max-threads" default:"10" description:"Use max N parallel threads for file sync"`
Public bool `flag:"public,P" default:"false" description:"Make files public when syncing to S3"`
VersionAndExit bool `flag:"version" default:"false" description:"Prints current version and exits"`
}{}
stdout *logger.Logger
version = "dev"
)
type file struct {
Filename string
Size int64
MD5 string
func getFSProvider(prefix string) (fsprovider.Provider, error) {
if strings.HasPrefix(prefix, "s3://") {
p, err := fsprovider.NewS3(cfg.Endpoint)
return p, errors.Wrap(err, "getting s3 provider")
}
return fsprovider.NewLocal(), nil
}
type filesystemProvider interface {
WriteFile(path string, content io.ReadSeeker, public bool) error
ReadFile(path string) (io.ReadCloser, error)
ListFiles(prefix string) ([]file, error)
DeleteFile(path string) error
GetAbsolutePath(path string) (string, error)
func initApp() error {
rconfig.AutoEnv(true)
if err := rconfig.ParseAndValidate(&cfg); err != nil {
return errors.Wrap(err, "parsing cli options")
}
l, err := logrus.ParseLevel(cfg.LogLevel)
if err != nil {
return errors.Wrap(err, "parsing log-level")
}
logrus.SetLevel(l)
return nil
}
func main() {
app := cobra.Command{
Use: "s3sync <from> <to>",
Short: "Sync files from <from> to <to>",
Run: execSync,
PreRun: func(cmd *cobra.Command, args []string) {
if cfg.PrintVersion {
fmt.Printf("s3sync %s\n", version)
os.Exit(0)
}
},
var err error
if err = initApp(); err != nil {
logrus.WithError(err).Fatal("initializing app")
}
app.Flags().BoolVarP(&cfg.Public, "public", "P", false, "Make files public when syncing to S3")
app.Flags().BoolVarP(&cfg.Delete, "delete", "d", false, "Delete files on remote not existing on local")
app.Flags().BoolVar(&cfg.PrintVersion, "version", false, "Print version and quit")
app.Flags().IntVar(&cfg.MaxThreads, "max-threads", 10, "Use max N parallel threads for file sync")
app.Flags().UintVar(&cfg.logLevel, "loglevel", 2, "Amount of log output (0 = Error only, 3 = Debug)")
if cfg.VersionAndExit {
logrus.WithField("version", version).Info("s3sync")
os.Exit(0)
}
app.ParseFlags(os.Args[1:])
stdout = logger.New(logger.LogLevel(cfg.logLevel))
app.Execute()
if err = runSync(rconfig.Args()[1:]); err != nil {
logrus.WithError(err).Fatal("running sync")
}
}
func execSync(cmd *cobra.Command, args []string) {
//nolint:funlen,gocognit,gocyclo // Should be kept as single unit
func runSync(args []string) error {
//nolint:gomnd // Simple count of parameters, makes no sense to export
if len(args) != 2 {
cmd.Usage()
os.Exit(1)
return errors.New("missing required arguments: s3sync <from> <to>")
}
local, err := getFSProvider(args[0])
errExit(err)
if err != nil {
return errors.Wrap(err, "getting local provider")
}
remote, err := getFSProvider(args[1])
errExit(err)
if err != nil {
return errors.Wrap(err, "getting remote provider")
}
localPath, err := local.GetAbsolutePath(args[0])
errExit(err)
if err != nil {
return errors.Wrap(err, "getting local path")
}
remotePath, err := remote.GetAbsolutePath(args[1])
errExit(err)
if err != nil {
return errors.Wrap(err, "getting remote path")
}
localFiles, err := local.ListFiles(localPath)
errExit(err)
if err != nil {
return errors.Wrap(err, "listing local files")
}
remoteFiles, err := remote.ListFiles(remotePath)
errExit(err)
if err != nil {
return errors.Wrap(err, "listing remote files")
}
var (
nErr int
syncChannel = make(chan bool, cfg.MaxThreads)
)
syncChannel := make(chan bool, cfg.MaxThreads)
for i, localFile := range localFiles {
syncChannel <- true
go func(i int, localFile file) {
go func(i int, localFile fsprovider.File) {
defer func() { <-syncChannel }()
needsCopy := true
var (
logger = logrus.WithField("filename", localFile.Filename)
debugLogger = logger.WithField("tx_reason", "missing")
needsCopy bool
remoteFound bool
)
for _, remoteFile := range remoteFiles {
if remoteFile.Filename == localFile.Filename && remoteFile.MD5 == localFile.MD5 {
if remoteFile.Filename != localFile.Filename {
// Different file, do not compare
continue
}
// We found a match, lets check whether tx is required
remoteFound = true
switch {
case remoteFile.Size != localFile.Size:
debugLogger = debugLogger.WithField("tx_reason", "size-mismatch").WithField("ls", localFile.Size).WithField("rs", remoteFile.Size)
needsCopy = true
case localFile.LastModified.After(remoteFile.LastModified):
debugLogger = debugLogger.WithField("tx_reason", "local-newer")
needsCopy = true
default:
// No reason to update
needsCopy = false
break
}
break
}
if needsCopy {
l, err := local.ReadFile(path.Join(localPath, localFile.Filename))
if err != nil {
stdout.ErrorF("(%d / %d) %s ERR: %s\n", i+1, len(localFiles), localFile.Filename, err)
return
}
buffer, err := ioutil.ReadAll(l)
if err != nil {
stdout.ErrorF("(%d / %d) %s ERR: %s\n", i+1, len(localFiles), localFile.Filename, err)
return
}
l.Close()
err = remote.WriteFile(path.Join(remotePath, localFile.Filename), bytes.NewReader(buffer), cfg.Public)
if err != nil {
stdout.ErrorF("(%d / %d) %s ERR: %s\n", i+1, len(localFiles), localFile.Filename, err)
return
}
stdout.InfoF("(%d / %d) %s OK\n", i+1, len(localFiles), localFile.Filename)
if remoteFound && !needsCopy {
logger.Debug("skipped transfer")
return
}
stdout.DebugF("(%d / %d) %s Skip\n", i+1, len(localFiles), localFile.Filename)
debugLogger.Debug("starting transfer")
l, err := local.ReadFile(path.Join(localPath, localFile.Filename))
if err != nil {
logger.WithError(err).Error("reading local file")
nErr++
return
}
defer func() {
if err := l.Close(); err != nil {
logger.WithError(err).Error("closing local file")
}
}()
err = remote.WriteFile(path.Join(remotePath, localFile.Filename), l, cfg.Public)
if err != nil {
logger.WithError(err).Error("writing remote file")
nErr++
return
}
logger.Info("transferred file")
}(i, localFile)
}
if cfg.Delete {
for _, remoteFile := range remoteFiles {
syncChannel <- true
go func(remoteFile file) {
go func(remoteFile fsprovider.File) {
defer func() { <-syncChannel }()
needsDeletion := true
@ -143,33 +188,19 @@ func execSync(cmd *cobra.Command, args []string) {
if needsDeletion {
if err := remote.DeleteFile(path.Join(remotePath, remoteFile.Filename)); err != nil {
stdout.ErrorF("delete: %s ERR: %s\n", remoteFile.Filename, err)
logrus.WithField("filename", remoteFile.Filename).WithError(err).Error("deleting remote file")
nErr++
return
}
stdout.InfoF("delete: %s OK\n", remoteFile.Filename)
logrus.WithField("filename", remoteFile.Filename).Info("deleted remote file")
}
}(remoteFile)
}
}
for {
if len(syncChannel) == 0 {
break
}
for len(syncChannel) > 0 {
<-time.After(time.Second)
}
}
func errExit(err error) {
if err != nil {
stdout.ErrorF("ERR: %s\n", err)
os.Exit(1)
}
}
func getFSProvider(prefix string) (filesystemProvider, error) {
if strings.HasPrefix(prefix, "s3://") {
return newS3Provider()
}
return newLocalProvider(), nil
return nil
}

View file

@ -0,0 +1,26 @@
// Package fsprovider contains implementations for filesystem access
// to read files from / write files to
package fsprovider
import (
"io"
"time"
)
type (
// File contains metadata about the file to be copied
File struct {
Filename string
LastModified time.Time
Size int64
}
// Provider describes the implementation of a fsprovider
Provider interface {
WriteFile(path string, content io.Reader, public bool) error
ReadFile(path string) (io.ReadCloser, error)
ListFiles(prefix string) ([]File, error)
DeleteFile(path string) error
GetAbsolutePath(path string) (string, error)
}
)

92
pkg/fsprovider/local.go Normal file
View file

@ -0,0 +1,92 @@
package fsprovider
import (
"io"
"os"
"path/filepath"
"strings"
"github.com/pkg/errors"
)
const directoryCreatePerms = 0o750
type (
// Local implements the Provider for filesystem access
Local struct{}
)
// NewLocal creates a new Local file provider
func NewLocal() *Local {
return &Local{}
}
// DeleteFile deletes a local file
func (*Local) DeleteFile(path string) error {
return errors.Wrap(os.Remove(path), "removing file")
}
// GetAbsolutePath converts the given path into an absolute path
func (*Local) GetAbsolutePath(path string) (string, error) {
fp, err := filepath.Abs(path)
return fp, errors.Wrap(err, "getting absolute filepath")
}
// ListFiles retrieves the metadata of all files with given prefix
func (*Local) ListFiles(prefix string) ([]File, error) {
out := []File{}
err := filepath.Walk(prefix, func(path string, f os.FileInfo, err error) error {
if err != nil {
return err
}
if f.IsDir() {
return nil
}
linkTarget, err := filepath.EvalSymlinks(path)
if err != nil {
return errors.Wrap(err, "evaluating symlinks")
}
if path != linkTarget {
if f, err = os.Stat(linkTarget); err != nil {
return errors.Wrap(err, "getting file-stat of link target")
}
}
out = append(out, File{
Filename: strings.TrimLeft(strings.Replace(path, prefix, "", 1), string(os.PathSeparator)),
LastModified: f.ModTime(),
Size: f.Size(),
})
return nil
})
return out, errors.Wrap(err, "walking prefix")
}
// ReadFile opens the local file for reading
func (*Local) ReadFile(path string) (io.ReadCloser, error) {
f, err := os.Open(path) //#nosec:G304 // The purpose is to read a dynamic file
return f, errors.Wrap(err, "opening file")
}
// WriteFile takes the content of the file and writes it to the local
// filesystem
func (*Local) WriteFile(path string, content io.Reader, _ bool) error {
if err := os.MkdirAll(filepath.Dir(path), directoryCreatePerms); err != nil {
return errors.Wrap(err, "creating file path")
}
f, err := os.Create(path) //#nosec:G304 // The purpose is to create a dynamic file
if err != nil {
return errors.Wrap(err, "creating file")
}
if _, err := io.Copy(f, content); err != nil {
return errors.Wrap(err, "copying content")
}
return errors.Wrap(f.Close(), "closing file")
}

217
pkg/fsprovider/s3.go Normal file
View file

@ -0,0 +1,217 @@
package fsprovider
import (
"io"
"mime"
"os"
"path/filepath"
"regexp"
"strings"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
const (
channelBufferSizeHuge = 10000
channelBufferSizeSmall = 10
doneTickerInterval = 500 * time.Millisecond
maxKeysPerPage = 1000
)
type (
// S3 implements the Provider for S3 / MinIO access
S3 struct {
conn *s3.S3
requestedPrefix string
}
)
// NewS3 creates a new S3 / MinIO file provider
func NewS3(endpoint string) (*S3, error) {
var cfgs []*aws.Config
if endpoint != "" {
cfgs = append(cfgs, &aws.Config{
Endpoint: &endpoint,
S3ForcePathStyle: aws.Bool(true),
})
}
sess := session.Must(session.NewSession(cfgs...))
return &S3{
conn: s3.New(sess),
}, nil
}
// DeleteFile deletes an object from the bucket
func (s *S3) DeleteFile(path string) error {
bucket, path, err := s.getBucketPath(path)
if err != nil {
return errors.Wrap(err, "getting bucket path")
}
_, err = s.conn.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(path),
})
return errors.Wrap(err, "deleting object")
}
// GetAbsolutePath converts the given path into an absolute path
func (*S3) GetAbsolutePath(path string) (string, error) {
return path, nil
}
// ListFiles retrieves the metadata of all objects with given prefix
func (s *S3) ListFiles(prefix string) ([]File, error) {
out := []File{}
bucket, path, err := s.getBucketPath(prefix)
if err != nil {
return out, errors.Wrap(err, "getting bucket path")
}
processedPrefixes := []string{}
prefixChan := make(chan *string, channelBufferSizeHuge)
outputChan := make(chan File, channelBufferSizeHuge)
errChan := make(chan error, channelBufferSizeSmall)
syncChan := make(chan bool, channelBufferSizeSmall)
doneTimer := time.NewTicker(doneTickerInterval)
prefixChan <- aws.String(path)
for {
select {
case prefix := <-prefixChan:
if len(syncChan) == channelBufferSizeSmall {
prefixChan <- prefix
} else {
found := false
for _, v := range processedPrefixes {
if v == *prefix {
found = true
}
}
if !found {
syncChan <- true
go s.readS3FileList(bucket, prefix, outputChan, prefixChan, errChan, syncChan)
processedPrefixes = append(processedPrefixes, *prefix)
}
}
case o := <-outputChan:
out = append(out, o)
case err := <-errChan:
return out, err
case <-doneTimer.C:
logrus.Debugf("scanning prefixes (%d working, %d left)...", len(syncChan), len(prefixChan))
if len(prefixChan) == 0 && len(syncChan) == 0 {
return out, nil
}
}
}
}
// ReadFile retrieves the object body for reading
func (s *S3) ReadFile(path string) (io.ReadCloser, error) {
bucket, path, err := s.getBucketPath(path)
if err != nil {
return nil, errors.Wrap(err, "getting bucket path")
}
o, err := s.conn.GetObject(&s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(path),
})
if err != nil {
return nil, errors.Wrap(err, "getting object")
}
return o.Body, nil
}
// WriteFile copies the content into an S3 object
//
//revive:disable-next-line:flag-parameter // That's not a control coupling but a config flag
func (s *S3) WriteFile(path string, content io.Reader, public bool) error {
bucket, path, err := s.getBucketPath(path)
if err != nil {
return errors.Wrap(err, "getting bucket path")
}
ext := filepath.Ext(path)
mimeType := mime.TypeByExtension(ext)
if mimeType == "" {
mimeType = "application/octet-stream"
}
params := &s3manager.UploadInput{
Bucket: aws.String(bucket),
Key: aws.String(path),
Body: content,
ContentType: aws.String(mimeType),
}
if public {
params.ACL = aws.String("public-read")
}
_, err = s3manager.NewUploaderWithClient(s.conn).Upload(params)
return errors.Wrap(err, "uploading file")
}
func (s *S3) getBucketPath(prefix string) (bucket string, path string, err error) {
rex := regexp.MustCompile(`^s3://?([^/]+)/(.*)$`)
matches := rex.FindStringSubmatch(prefix)
if matches == nil {
return "", "", errors.New("prefix did not match requirements")
}
bucket = matches[1]
path = strings.ReplaceAll(matches[2], string(os.PathSeparator), "/")
s.requestedPrefix = path
return bucket, path, nil
}
func (s *S3) readS3FileList(bucket string, path *string, outputChan chan File, prefixChan chan *string, errorChan chan error, syncChan chan bool) {
defer func() { <-syncChan }()
in := &s3.ListObjectsInput{
Bucket: aws.String(bucket),
Prefix: path,
MaxKeys: aws.Int64(maxKeysPerPage),
Delimiter: aws.String("/"),
}
for {
o, err := s.conn.ListObjects(in)
if err != nil {
errorChan <- errors.Wrap(err, "listing objects")
return
}
for _, v := range o.Contents {
outputChan <- File{
Filename: strings.Replace(*v.Key, s.requestedPrefix, "", 1),
LastModified: *v.LastModified,
Size: *v.Size,
}
}
if len(o.CommonPrefixes) > 0 {
for _, cp := range o.CommonPrefixes {
prefixChan <- cp.Prefix
}
}
if !*o.IsTruncated {
break
}
in.Marker = o.NextMarker
}
}

191
s3.go
View file

@ -1,191 +0,0 @@
package main
import (
"fmt"
"io"
"mime"
"os"
"path/filepath"
"regexp"
"strings"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
)
type s3Provider struct {
conn *s3.S3
requestedPrefix string
}
func newS3Provider() (*s3Provider, error) {
sess := session.Must(session.NewSession())
return &s3Provider{
conn: s3.New(sess),
}, nil
}
func (s *s3Provider) getBucketPath(prefix string) (bucket string, path string, err error) {
rex := regexp.MustCompile(`^s3://?([^/]+)/(.*)$`)
matches := rex.FindStringSubmatch(prefix)
if len(matches) != 3 {
err = fmt.Errorf("prefix did not match requirements")
return
}
bucket = matches[1]
path = strings.Replace(matches[2], string(os.PathSeparator), "/", -1)
s.requestedPrefix = path
return
}
func (s *s3Provider) ListFiles(prefix string) ([]file, error) {
out := []file{}
bucket, path, err := s.getBucketPath(prefix)
if err != nil {
return out, err
}
processedPrefixes := []string{}
prefixChan := make(chan *string, 10000)
outputChan := make(chan file, 10000)
errChan := make(chan error, 10)
syncChan := make(chan bool, 10)
doneTimer := time.NewTicker(500 * time.Millisecond)
prefixChan <- aws.String(path)
for {
select {
case prefix := <-prefixChan:
if len(syncChan) == 10 {
prefixChan <- prefix
} else {
found := false
for _, v := range processedPrefixes {
if v == *prefix {
found = true
}
}
if !found {
syncChan <- true
go s.readS3FileList(bucket, prefix, outputChan, prefixChan, errChan, syncChan)
processedPrefixes = append(processedPrefixes, *prefix)
}
}
case o := <-outputChan:
out = append(out, o)
case err := <-errChan:
return out, err
case <-doneTimer.C:
stdout.DebugF("Scanning prefixes (%d working, %d left)...\r", len(syncChan), len(prefixChan))
if len(prefixChan) == 0 && len(syncChan) == 0 {
fmt.Printf("\n")
return out, nil
}
}
}
}
func (s *s3Provider) readS3FileList(bucket string, path *string, outputChan chan file, prefixChan chan *string, errorChan chan error, syncChan chan bool) {
defer func() { <-syncChan }()
in := &s3.ListObjectsInput{
Bucket: aws.String(bucket),
Prefix: path,
MaxKeys: aws.Int64(1000),
Delimiter: aws.String("/"),
}
for {
o, err := s.conn.ListObjects(in)
if err != nil {
errorChan <- err
return
}
for _, v := range o.Contents {
outputChan <- file{
Filename: strings.Replace(*v.Key, s.requestedPrefix, "", 1),
Size: *v.Size,
MD5: strings.Trim(*v.ETag, "\""), // Wat?
}
}
if len(o.CommonPrefixes) > 0 {
for _, cp := range o.CommonPrefixes {
prefixChan <- cp.Prefix
}
}
if !*o.IsTruncated {
break
}
in.Marker = o.NextMarker
}
}
func (s *s3Provider) WriteFile(path string, content io.ReadSeeker, public bool) error {
bucket, path, err := s.getBucketPath(path)
if err != nil {
return err
}
ext := filepath.Ext(path)
mimeType := mime.TypeByExtension(ext)
if mimeType == "" {
mimeType = "application/octet-stream"
}
params := &s3.PutObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(path),
Body: content,
ContentType: aws.String(mimeType),
}
if public {
params.ACL = aws.String("public-read")
}
_, err = s.conn.PutObject(params)
return err
}
func (s *s3Provider) ReadFile(path string) (io.ReadCloser, error) {
bucket, path, err := s.getBucketPath(path)
if err != nil {
return nil, err
}
o, err := s.conn.GetObject(&s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(path),
})
if err != nil {
return nil, err
}
return o.Body, nil
}
func (s *s3Provider) DeleteFile(path string) error {
bucket, path, err := s.getBucketPath(path)
if err != nil {
return err
}
_, err = s.conn.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(path),
})
return err
}
func (s *s3Provider) GetAbsolutePath(path string) (string, error) {
return path, nil
}