buildbot: repo.py

This commit is contained in:
JerryXiao 2019-04-02 22:03:28 +08:00
commit 1509183847
Signed by: Jerry
GPG Key ID: 9D9CE43650FF2BAA
8 changed files with 317 additions and 0 deletions

6
.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
.vscode/*
__pycache__/
*.py[cod]
buildbot.log*
buildbot.sql
test

25
README.md Normal file
View File

@ -0,0 +1,25 @@
# Buildbot
## Typical autobuild.yaml format
### Note
Anything starting with bash will be considered as bash commands.
`e.g. - bash ls -al`
All of the four blocks: updates, prebuild, build, postbuild can be ommited, and their first value will be used.
### Example
```
updates:
- repo (repo only, it means the package will only be built when a new commit is pushed to repo.)
- git <url> <remote/branch>* (* means optional)
- ?? (tbd)
prebuild:
- standard (do nothing)
- git-cherrypick <url> <remote/branch> <local_branch> push*
- ??
build:
- standard (makepkg -s, note that missing aur dependencies will not be resolved.)
- ??
postbuild:
- standard (sign and upload)
- do_nothing (leave it alone)
- ??
```

8
config.py Normal file
View File

@ -0,0 +1,8 @@
# config
REPO_NAME='jerryxiao'
PKG_COMPRESSION='xz'
ARCHS = ['aarch64', 'any', 'armv7h', 'x86_64']
BUILD_ARCHS = ['aarch64', 'any', 'x86_64']
REPO_CMD = 'repo-add --verify --remove'

2
master.py Executable file
View File

@ -0,0 +1,2 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

13
package.py Normal file
View File

@ -0,0 +1,13 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging
from utils import bash
logger = logging.getLogger(name='package')
# makepkg -o
# makepkg -e
# makepkg --nosign
# makepkg --packagelist
# gpg --default-key {GPG_KEY} --no-armor --pinentry-mode loopback --passphrase '' --detach-sign --yes -- aaa

131
repo.py Executable file
View File

@ -0,0 +1,131 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# repo.py: Automatic management tool for an arch repo.
# This file is part of Buildbot by JerryXiao
# Directory structure of the repo:
# buildbot -- buildbot (git)
# buildbot/repo -- repo root
# /updates/ -- new packages goes in here
# /updates/archive -- archive dir, old packages goes in here
# /www/ -- http server root
# /www/archive => /updates/archive -- archive dir for users
# /www/aarch64 -- packages for "aarch64"
# /www/any -- packages for "any"
# /www/armv7h -- packages for "armv7h" (No build bot)
# /www/x86_64 -- packages for "x86_64"
# /www/robots.txt => /r_r_n/r.txt -- robots.txt
import os
from pathlib import Path
import logging
from utils import bash, Pkg, get_pkg_details_from_name
from time import time
from config import REPO_NAME, PKG_COMPRESSION, ARCHS, REPO_CMD
abspath = os.path.abspath(__file__)
repocwd = Path(abspath).parent / 'repo'
repocwd.mkdir(mode=0o755, exist_ok=True)
os.chdir(repocwd)
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def symlink(dst, src, exist_ok=True):
assert issubclass(type(dst), os.PathLike) and type(src) is str
try:
dst.symlink_to(src)
except FileExistsError:
if not exist_ok:
raise
def checkenv():
(Path(abspath).parent / 'recycled').mkdir(mode=0o755, exist_ok=True)
dirs = [Path('updates/archive')] + [Path('www/') / arch for arch in ARCHS]
for mydir in dirs:
mydir.mkdir(mode=0o755, exist_ok=True, parents=True)
try:
symlink(Path('www/archive'), 'updates/archive')
except FileExistsError:
pass
checkenv()
def repo_add(fpath):
assert issubclass(type(fpath), os.PathLike) and fpath.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}')
dbpath = fpath.parent / f'{REPO_NAME}.db.tar.gz'
return bash(f'{REPO_CMD} {dbpath} {fpath}')
def throw_away(fpath):
assert issubclass(type(fpath), os.PathLike)
newPath = Path(abspath).parent / 'recycled' / f"{fpath.name}_{time()}"
assert not newPath.exists()
fpath.rename(newPath)
def _check_repo():
rn = REPO_NAME
repo_files = (f"{rn}.db {rn}.db.tar.gz {rn}.db.tar.gz.old "
f"{rn}.files {rn}.files.tar.gz {rn}.files.tar.gz.old")
repo_files = repo_files.split(' ')
repo_files_essential = [fname for fname in repo_files if not fname.endswith('.old')]
assert repo_files_essential
# make symlink for arch=any pkgs
basedir = Path('www') / 'any'
if basedir.exists():
for pkgfile in basedir.iterdir():
if pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}') and \
get_pkg_details_from_name(pkgfile.name).arch == 'any':
sigfile = Path(f"{str(pkgfile)}.sig")
if sigfile.exists():
logger.info(f'Creating symlink for {pkgfile}, {sigfile}')
for arch in ARCHS:
if arch == 'any':
continue
symlink(pkgfile.parent / '..' / arch / pkgfile.name, f'../any/{pkgfile.name}')
symlink(sigfile.parent / '..' / arch / sigfile.name, f'../any/{sigfile.name}')
else:
logger.error(f'{arch} dir does not exist!')
# run repo_add
for arch in ARCHS:
basedir = Path('www') / arch
repo_files_count = list()
if not basedir.exists():
logger.error(f'{arch} dir does not exist!')
continue
pkgfiles = [f for f in basedir.iterdir()]
for pkgfile in pkgfiles:
if pkgfile.name in repo_files:
repo_files_count.append(pkgfile.name)
continue
if pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}.sig'):
if not Path(str(pkgfile)[:-4]).exists() and pkgfile.exists():
logger.warning(f"{pkgfile} has no package!")
throw_away(pkgfile)
continue
elif pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}'):
sigfile = Path(f"{str(pkgfile)}.sig")
if not sigfile.exists():
logger.warning(f"{pkgfile} has no signature!")
throw_away(pkgfile)
continue
realarch = get_pkg_details_from_name(pkgfile.name).arch
if realarch != 'any' and realarch != arch:
newpath = pkgfile.parent / '..' / realarch / pkgfile.name
assert not newpath.exists()
pkgfile.rename(newpath)
newSigpath = pkgfile.parent / '..' / realarch / f"{pkgfile.name}.sig"
assert not newSigpath.exists()
sigfile.rename(newSigpath)
logger.info(f'Moving {pkgfile} to {newpath}, {sigfile} to {newSigpath}')
logger.debug("repo-add: %s", repo_add(newpath))
else:
logger.debug("repo-add: %s", repo_add(pkgfile))
else:
logger.warning(f"{pkgfile} is garbage!")
throw_away(pkgfile)
for rfile in repo_files_essential:
if rfile not in repo_files_count:
logger.error(f'{rfile} does not exist in {arch}!')

108
utils.py Normal file
View File

@ -0,0 +1,108 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import subprocess
import logging
from time import time
import re
from threading import Thread, Lock
from config import PKG_COMPRESSION
logger = logging.getLogger(name='utils')
def background(func):
def wrapped(*args, **kwargs):
tr = Thread(target=func, args=args, kwargs=kwargs)
tr.daemon = True
tr.start()
return tr
return wrapped
def bash(cmdline, **kwargs):
assert type(cmdline) is str
logger.info(f'bash: {cmdline}')
return(run_cmd(['/bin/bash', '-x', '-e', '-c', cmdline], **kwargs))
def long_bash(cmdline, hours=2):
assert type(hours) is int and hours >= 1
logger.info(f'longbash{hours}: {cmdline}')
return bash(cmdline, keepalive=True, KEEPALIVE_TIMEOUT=60, RUN_CMD_TIMEOUT=hours*60*60)
def run_cmd(cmd, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOUT=60):
RUN_CMD_LOOP_TIME = KEEPALIVE_TIMEOUT - 1 if KEEPALIVE_TIMEOUT >= 10 else 5
stopped = False
last_read = [int(time()), ""]
output = list()
stdout_lock = Lock()
@background
def check_stdout(stdout):
nonlocal stopped, last_read, output
stdout_lock.acquire()
last_read_time = int(time())
while stopped is False:
line = stdout.readline(4096)
last_read_time = int(time())
logger.debug(line)
output.append(line)
last_read[0] = last_read_time
last_read[1] = line
stdout_lock.release()
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, encoding='utf-8')
check_stdout(p.stdout)
process_start = int(time())
while True:
try:
p.wait(timeout=RUN_CMD_LOOP_TIME)
except subprocess.TimeoutExpired:
time_passed = int(time()) - last_read[0]
if time_passed >= KEEPALIVE_TIMEOUT*2:
logger.info('Timeout expired. No action.')
output.append('+ Buildbot: Timeout expired. No action.\n')
elif time_passed >= KEEPALIVE_TIMEOUT:
if keepalive:
logger.info('Timeout expired, writing nl')
output.append('+ Buildbot: Timeout expired, writing nl\n')
p.stdin.write('\n')
p.stdin.flush()
else:
logger.info('Timeout expired, not writing nl')
output.append('+ Buildbot: Timeout expired, not writing nl\n')
if int(time()) - process_start >= RUN_CMD_TIMEOUT:
stopped = True
logger.error('Process timeout expired, terminating.')
output.append('+ Buildbot: Process timeout expired, terminating.\n')
p.terminate()
try:
p.wait(timeout=10)
except subprocess.TimeoutExpired:
logger.error('Cannot terminate, killing.')
output.append('+ Buildbot: Cannot terminate, killing.\n')
p.kill()
break
else:
stopped = True
break
code = p.returncode
stdout_lock.acquire(10)
outstr = ''.join(output)
if code != 0:
raise subprocess.CalledProcessError(code, cmd, outstr)
return outstr
class Pkg:
def __init__(self, pkgname, pkgver, pkgrel, arch):
self.pkgname = pkgname
self.pkgver = pkgver
self.pkgrel = pkgrel
self.arch = arch
def get_pkg_details_from_name(name):
if name.endswith(f'pkg.tar.{PKG_COMPRESSION}'):
arch = re.match(r'(.+)-([^-]+)-([^-]+)-([^-]+)\.pkg\.tar\.\w+', name)
assert arch and arch.groups() and len(arch.groups()) == 4
(pkgname, pkgver, pkgrel, arch) = arch.groups()
return Pkg(pkgname, pkgver, pkgrel, arch)

24
worker.py Executable file
View File

@ -0,0 +1,24 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import logging
from utils import bash
from yaml import load, dump
from pathlib import Path
logger = logging.getLogger(__name__)
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)
os.chdir(abspath)
# include all autobuild.yaml files
REPO_NAME = Path('repo')
BUTOBUILD_FNAME = 'autobuild.yaml'
for mydir in REPO_NAME.iterdir():
if mydir.is_dir() and (mydir / BUTOBUILD_FNAME).exists():
# parsing yaml
logger.info('Bulidbot: found %s in %s', BUTOBUILD_FNAME, mydir / BUTOBUILD_FNAME)