mirror of
https://github.com/Luzifer/archrepo.git
synced 2024-11-08 09:30:01 +00:00
Move repo to subdir, remove repoctl dependency
Signed-off-by: Knut Ahlers <knut@ahlers.me>
This commit is contained in:
parent
b5e4cbe9dc
commit
7c2a1c3578
8 changed files with 137 additions and 32 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -3,6 +3,7 @@
|
|||
*.files
|
||||
*.files.sig
|
||||
packages.txt
|
||||
repo
|
||||
.repo_cache
|
||||
*.tar.xz
|
||||
*.tar.xz.sig
|
||||
|
|
28
Makefile
28
Makefile
|
@ -1,5 +1,7 @@
|
|||
export DATABASE:=$(shell find . -maxdepth 1 -mindepth 1 -name '*.db.tar.xz' -or -name '*.db.tar.zst')
|
||||
export REPO_DIR:=$(CURDIR)/repo
|
||||
export REPOKEY:=D0391BF9
|
||||
export RETAIN:=1
|
||||
export DATABASE:=$(shell find $(REPO_DIR) -maxdepth 1 -mindepth 1 -name '*.db.tar.xz' -or -name '*.db.tar.zst')
|
||||
|
||||
|
||||
maintanance: do_updates do_cleanup list_packages upload
|
||||
|
@ -17,7 +19,7 @@ download:
|
|||
--exclude '*.old*' \
|
||||
--exclude '.git/*' \
|
||||
--acl=public-read \
|
||||
s3://arch-luzifer-io/repo/x86_64/ $(CURDIR)/
|
||||
s3://arch-luzifer-io/repo/x86_64/ $(REPO_DIR)/
|
||||
|
||||
upload: cleanup_files check_archive_mix
|
||||
vault2env --key=secret/aws/private -- aws s3 sync \
|
||||
|
@ -25,14 +27,14 @@ upload: cleanup_files check_archive_mix
|
|||
--exclude '*.old*' \
|
||||
--exclude '.git/*' \
|
||||
--acl=public-read \
|
||||
$(CURDIR)/ s3://arch-luzifer-io/repo/x86_64/
|
||||
$(REPO_DIR)/ s3://arch-luzifer-io/repo/x86_64/
|
||||
|
||||
# Maintenance targets
|
||||
|
||||
aur_update: check_tools check_database
|
||||
bash -euo pipefail -c 'for pkg in $$(script_level=1 ./scripts/check_aur_updates.sh); do script_level=1 ./scripts/update-aur.sh $${pkg}; done'
|
||||
|
||||
check_aur_update:
|
||||
check_aur_update: check_database
|
||||
bash ./scripts/check_aur_updates.sh
|
||||
|
||||
check_database:
|
||||
|
@ -45,28 +47,24 @@ check_tools:
|
|||
@which docker
|
||||
@which jq
|
||||
@which repo-add
|
||||
@which repoctl
|
||||
@which vault
|
||||
@which vault2env
|
||||
|
||||
cleanup_files:
|
||||
rm -f *.old* scripts/repoctl.toml
|
||||
rm -f *.old*
|
||||
|
||||
cleanup_orphan_signatures: check_database
|
||||
bash -euo pipefail -c 'for i in *.sig; do [[ -f $${i//.sig} ]] || rm $${i}; done'
|
||||
cleanup_orphan_signatures:
|
||||
bash -euo pipefail -c 'for i in $(REPO_DIR)/*.sig; do [[ -f $${i//.sig} ]] || rm $${i}; done'
|
||||
|
||||
cleanup_repo: check_tools check_database scripts/repoctl.toml
|
||||
repoctl update
|
||||
cleanup_repo: check_tools
|
||||
bash ./scripts/do_cleanup.sh
|
||||
|
||||
list_packages:
|
||||
tar -tf luzifer.db.tar.xz | grep -v '/desc' | sed -E 's/(.*)-([^-]+-[0-9]+)\//\1\t\2/' | sort | column -t >packages.txt
|
||||
tar -tf $(DATABASE) | grep -v '/desc' | sed -E 's/(.*)-([^-]+-[0-9]+)\//\1\t\2/' | sort | column -t >$(REPO_DIR)/packages.txt
|
||||
|
||||
repo_update: check_tools check_database load_ssh_key
|
||||
repo_update: check_tools load_ssh_key
|
||||
bash -euo pipefail -c 'for repo in $$(grep -v "^#" repo-urls); do script_level=1 ./scripts/update-repo.sh $${repo}; done'
|
||||
|
||||
scripts/repoctl.toml:
|
||||
./scripts/repoctl.sh
|
||||
|
||||
sign_database:
|
||||
repo-add -s --key $(REPOKEY) $(DATABASE)
|
||||
|
||||
|
|
|
@ -11,15 +11,15 @@ This repository contains four essential parts:
|
|||
- The `scripts` folder containing bash scripts to control all actions
|
||||
- The `Makefile` to orchestrate everything. The main functionality is the `maintenance` target (or just `make`)
|
||||
- The package lists (`aur-packages` and `repo-urls`)
|
||||
- The `luzifer.asc` public key
|
||||
- The `repo/luzifer.asc` public key
|
||||
|
||||
It currently relies on my [`luzifer/arch-repo-builder`](https://github.com/luzifer-docker/arch-repo-builder) docker image which does all of the building within a clean environment for each package and on [`repoctl`](https://github.com/cassava/repoctl) for some cleanup tasks (which is subject to change as I want the setup to be as self-contained as possible).
|
||||
It currently relies on my [`luzifer/arch-repo-builder`](https://github.com/luzifer-docker/arch-repo-builder) docker image which does all of the building within a clean environment for each package.
|
||||
|
||||
For the initial setup you need to do some steps:
|
||||
|
||||
- Adjust the `Makefile` as you need different `download` and `upload` targets
|
||||
- Create an empty database `tar -cJf luzifer.db.tar.xz -T /dev/null` (adjust the filename)
|
||||
- Put the public key for your repo into `luzifer.asc` (filename should match the database, makes it easier to find)
|
||||
- Create an empty database `tar -cJf repo/luzifer.db.tar.xz -T /dev/null` (adjust the filename)
|
||||
- Put the public key for your repo into `repo/luzifer.asc` (filename should match the database, makes it easier to find)
|
||||
- Set up your `aur-packages` and `repo-urls` package lists
|
||||
- `aur-packages` contains just names of AUR packages (no comments or other stuff!)
|
||||
- `repo-urls` contains one git repository URL per line (comments allowed)
|
||||
|
@ -34,6 +34,5 @@ The repo should be updated on a regular base by just executing `make` on it. Thi
|
|||
## Flaws / Remarks / TODOs
|
||||
|
||||
- The whole build already strongly relies on Archlinux tools so will not run on any other distro
|
||||
- Currently `repoctl` as an external tool is used to clean up old packages and the package database. This shall be done by local scripts in the future.
|
||||
- For `aur-packages` having dynamic `pkgver` calculation the update check will not work properly until the `PKGBUILD` in AUR is updated (those packages can be built manually using `bash ./scripts/update-aur.sh <packagename> && make do_cleanup upload`
|
||||
- For `repo-urls` the same applies: for example my `vim-go-tools` package relies on periodic re-builds which are not executed as commits are quite rare. For those packages at the moment a call to `bash ./scripts/update-repo.sh <url> && make do_cleanup upload` is required
|
||||
|
|
|
@ -16,7 +16,7 @@ declare -A aur_versions
|
|||
|
||||
IFS=$'\n'
|
||||
|
||||
database=$(find . -maxdepth 1 -mindepth 1 -name '*.db.tar.xz' -or -name '*.db.tar.zst')
|
||||
database=$(find ${REPO_DIR:-$(pwd)} -maxdepth 1 -mindepth 1 -name '*.db.tar.xz' -or -name '*.db.tar.zst')
|
||||
|
||||
aur_query=("https://aur.archlinux.org/rpc/?v=5&type=info")
|
||||
|
||||
|
|
29
scripts/do_cleanup.sh
Normal file
29
scripts/do_cleanup.sh
Normal file
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
function log() {
|
||||
echo "[$(date +%H:%M:%S)] $@" >&2
|
||||
}
|
||||
|
||||
syncdir="${REPO_DIR:-$(pwd)}"
|
||||
|
||||
log "Cleaning up old package versions..."
|
||||
BASE_PATH="${syncdir}" python scripts/remove_old_versions.py
|
||||
|
||||
# Ensure removal of previous repo files
|
||||
log "Cleaning up old database files..."
|
||||
find "${syncdir}" -regextype egrep -regex '^.*\.(db|files)(|\.tar|\.tar\.xz|\.tar\.zst)$' -delete
|
||||
|
||||
log "Adding remaining packages to database..."
|
||||
packages=($(find "${syncdir}" -regextype egrep -regex '^.*\.pkg\.tar(\.xz|\.zst)$' | sort))
|
||||
|
||||
if [ "${#packages[@]}" -eq 0 ]; then
|
||||
log "No packages found to add to repo, this looks like an error!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Adding packages..."
|
||||
repo-add --new --prevent-downgrade "${syncdir}/luzifer.db.tar.xz" "${packages[@]}"
|
||||
|
||||
log "All packages added, removing *.old copies..."
|
||||
find "${syncdir}" -name '*.old' -delete
|
89
scripts/remove_old_versions.py
Normal file
89
scripts/remove_old_versions.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Available only on Archlinux systems through extra/pyalpm package
|
||||
from pyalpm import vercmp
|
||||
|
||||
FILENAME_REGEX = r'^(?P<pkgname>.*)-(?P<version>(?:[0-9]+:)?(?:[^:-]+)-(?:[0-9]+))-[^-]+\.pkg\.tar\.(?:xz|zst)$'
|
||||
|
||||
base_path = os.environ['BASE_PATH'] if 'BASE_PATH' in os.environ else './repo'
|
||||
retain_versions = int(os.environ['RETAIN']) if 'RETAIN' in os.environ else 2
|
||||
|
||||
class VercmpSort:
|
||||
def __init__(self, obj, *args):
|
||||
self.obj = obj
|
||||
|
||||
def __lt__(self, other):
|
||||
return vercmp(self.obj['version'], other.obj['version']) < 0
|
||||
|
||||
def __gt__(self, other):
|
||||
return vercmp(self.obj['version'], other.obj['version']) > 0
|
||||
|
||||
def __eq__(self, other):
|
||||
return vercmp(self.obj['version'], other.obj['version']) == 0
|
||||
|
||||
def __le__(self, other):
|
||||
return vercmp(self.obj['version'], other.obj['version']) <= 0
|
||||
|
||||
def __ge__(self, other):
|
||||
return vercmp(self.obj['version'], other.obj['version']) >= 0
|
||||
|
||||
def __ne__(self, other):
|
||||
return vercmp(self.obj['version'], other.obj['version']) != 0
|
||||
|
||||
def get_package_versions():
|
||||
packages = {}
|
||||
for filename in os.listdir(base_path):
|
||||
match = re.match(FILENAME_REGEX, filename)
|
||||
|
||||
if match is None:
|
||||
# Not a package file
|
||||
continue
|
||||
|
||||
if match.group('pkgname') not in packages:
|
||||
packages[match.group('pkgname')] = []
|
||||
|
||||
packages[match.group('pkgname')].append({
|
||||
'filename': filename,
|
||||
'version': match.group('version'),
|
||||
})
|
||||
|
||||
return packages
|
||||
|
||||
def main():
|
||||
packages = get_package_versions()
|
||||
for pkgname in sorted(packages.keys()):
|
||||
versions = packages[pkgname]
|
||||
|
||||
print('')
|
||||
print('Analysing package "{}"...'.format(pkgname))
|
||||
|
||||
if len(versions) <= retain_versions:
|
||||
print(' Retaining all versions: {}'.format(
|
||||
', '.join([x['version'] for x in versions]),
|
||||
))
|
||||
continue
|
||||
|
||||
ndrop = len(versions) - retain_versions
|
||||
|
||||
versions = sorted(versions, key=VercmpSort)
|
||||
drop = versions[:ndrop]
|
||||
retain = versions[ndrop:]
|
||||
|
||||
print(' Dropped versions: {}'.format(
|
||||
', '.join([x['version'] for x in drop]),
|
||||
))
|
||||
print(' Retained versions: {}'.format(
|
||||
', '.join([x['version'] for x in retain]),
|
||||
))
|
||||
|
||||
for filename in [x['filename'] for x in drop]:
|
||||
os.unlink(os.path.join(base_path, filename))
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
cat -s <<EOF >scripts/repoctl.toml
|
||||
repo = "$(find $(pwd) -mindepth 1 -maxdepth 1 -name '*.db.tar.xz' -or -name '*.db.tar.xstd')"
|
||||
backup = false
|
||||
interactive = false
|
||||
columnate = false
|
||||
color = "auto"
|
||||
quiet = false
|
||||
EOF
|
Loading…
Reference in a new issue