buildbot.py: wip

This commit is contained in:
JerryXiao 2019-04-06 20:59:27 +08:00
parent 61787e30d9
commit 715d101d01
Signed by: Jerry
GPG Key ID: 9D9CE43650FF2BAA
9 changed files with 320 additions and 73 deletions

View File

@ -1,24 +1,22 @@
# Buildbot
## Typical autobuild.yaml format
### Note
Anything starting with bash will be considered as bash commands.
`e.g. - bash ls -al`
All of the four blocks: updates, prebuild, build, postbuild can be ommited, and their first value will be used.
### Example
```
updates:
- repo (repo only, it means the package will only be built when a new commit is pushed to repo.)
- git <url> <remote/branch>* (* means optional)
- ?? (tbd)
prebuild:
- standard (do nothing)
- ??
build:
- standard (makepkg -s, note that missing aur dependencies will not be resolved.)
- ??
postbuild:
- standard (sign and upload)
- do_nothing (leave it alone)
- ??
```
type:
auto (by package name)
git (this is a git package and will check source for updates)
manual (this package will only be updated when new release is pushed)
cleanbuild:
true / false
timeout:
30 (30 mins, int only)
extra:
- update:
- /bin/true
- prebuild:
- echo "Hello World!"
- postbuild:
- ls > list
- failure:
- rm file
```

183
buildbot.py Executable file
View File

@ -0,0 +1,183 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# buildbot.py: Automatic management tool for an arch repo.
# This file is part of Buildbot by JerryXiao
import logging
from multiprocessing.connection import Listener
from time import time, sleep
import os
from pathlib import Path
from subprocess import CalledProcessError
from utils import print_exc_plus, background
from config import ARCHS, BUILD_ARCHS, BUILD_ARCH_MAPPING, \
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD, \
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD
from utils import bash, get_pkg_details_from_name, vercmp
import json
from yamlparse import load_all as load_all_yaml
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)
os.chdir(abspath)
REPO_ROOT = Path(PKGBUILD_DIR)
class Job:
def __init__(self, arch, pkgdir, packagelist, version):
buildarch = BUILD_ARCH_MAPPING.get(arch, None)
assert buildarch in BUILD_ARCHS
self.arch = arch
self.buildarch = buildarch
self.pkgdir = pkgdir
self.packagelist = packagelist
self.version = version
self.added = time()
self.claimed = 0
class jobsManager:
def __init__(self):
self.__buildjobs = dict()
for arch in BUILD_ARCHS:
self.__buildjobs.setdefault(arch, list())
self.__uploadjobs = list()
self.__curr_job = None
self.pkgconfigs = load_all_yaml()
def _new_buildjob(self, job, buildarch):
assert type(job) is Job
self.__buildjobs.get(buildarch).append(job)
def claim_job(self, buildarch):
assert buildarch in BUILD_ARCHS
if self.__curr_job:
return None
jobs = self.__buildjobs.get(buildarch, list())
if jobs:
self.__curr_job = jobs.pop(0)
return self.__curr_job
def __finish_job(self, pkgdir):
assert pkgdir == self.__curr_job.pkgdir
# do upload
self.__curr_job = None
return True
def tick(self):
'''
check for updates,
create new jobs
and run them
'''
if self.__curr_job is None:
updates = updmgr.check_update()
for update in updates:
(pkg, packagelist, ver) = update
jobsmgr = jobsManager()
class updateManager:
def __init__(self, filename='pkgver.json'):
self.__filename = filename
self.__pkgvers = dict()
self.__load()
def __load(self):
if Path(self.__filename).exists():
with open(self.__filename,"r") as f:
try:
pkgvers = json.loads(f.read())
except json.JSONDecodeError:
logger.error('pkgver.json - Bad json')
print_exc_plus
exit(1)
else:
logger.warning(f'No {self.__filename} found')
pkgvers = dict()
assert type(pkgvers) is dict
for pkgname in pkgvers:
assert type(pkgname) is str
self.__pkgvers = pkgvers
def _save(self):
pkgvers = json.dumps(self.__pkgvers, indent=4)
pkgvers += '\n'
with open(self.__filename,"w") as f:
if f.writable:
f.write(pkgvers)
else:
logger.error('pkgver.json - Not writable')
def __get_package_list(self, dirname):
pkgdir = REPO_ROOT / dirname
assert pkgdir.exists()
pkglist = bash(MAKEPKG_PKGLIST_CMD, cwd=pkgdir)
pkglist = pkglist.split('\n')
return pkglist
def __get_new_ver(self, dirname):
pkgfiles = self.__get_package_list(dirname)
ver = get_pkg_details_from_name(pkgfiles[0])
return (ver, pkgfiles)
def check_update(self):
updates = list()
for pkg in jobsmgr.pkgconfigs:
pkgdir = REPO_ROOT / pkg.dirname
logger.info(f'checking update: {pkg.dirname}')
bash(MAKEPKG_UPD_CMD, cwd=pkgdir, RUN_CMD_TIMEOUT=60*60)
if pkg.type in ('git', 'manual'):
(ver, pkgfiles) = self.__get_new_ver(pkg.dirname)
oldver = self.__pkgvers.get(pkg.dirname, None)
if oldver is None or vercmp(ver, oldver) == 1:
self.__pkgvers[pkg.dirname] = ver
updates.append((pkg, pkgfiles, ver))
else:
logger.warning(f'package: {pkg.dirname} downgrade attempted')
else:
logger.warning(f'unknown package type: {pkg.type}')
self._save()
return updates
updmgr = updateManager()
@background
def __main():
pass
def run(funcname, args=list(), kwargs=dict()):
if funcname in ('clean', 'regenerate', 'remove',
'update', 'push_files', 'add_files'):
logger.info('running: %s %s %s',funcname, args, kwargs)
ret = eval(funcname)(*args, **kwargs)
logger.info('done: %s %s',funcname, ret)
return ret
else:
logger.error('unexpected: %s %s %s',funcname, args, kwargs)
return False
if __name__ == '__main__':
__main() # start the main worker thread
while True:
try:
with Listener(MASTER_BIND_ADDRESS, authkey=MASTER_BIND_PASSWD) as listener:
with listener.accept() as conn:
logger.info('connection accepted from %s', listener.last_accepted)
myrecv = conn.recv()
if type(myrecv) is list and len(myrecv) == 3:
(funcname, args, kwargs) = myrecv
funcname = str(funcname)
conn.send(run(funcname, args=args, kwargs=kwargs))
except Exception:
print_exc_plus()
except KeyboardInterrupt:
logger.info('KeyboardInterrupt')
print_exc_plus()
break

View File

@ -6,7 +6,10 @@
ARCHS = ['aarch64', 'any', 'armv7h', 'x86_64']
REPO_NAME='jerryxiao'
PKG_COMPRESSION='xz'
BUILD_ARCHS = ['aarch64', 'any', 'x86_64']
BUILD_ARCHS = ['aarch64', 'x86_64']
BUILD_ARCH_MAPPING = {'aarch64': 'aarch64', 'x86_64': 'x86_64', 'any': 'x86_64', 'armv7h': None}
AUTOBUILD_FNAME = 'autobuild.yaml'
#### config for repo.py
@ -38,3 +41,11 @@ GPG_SIGN_CMD = (f'gpg --default-key {GPG_KEY} --no-armor'
MASTER_BIND_ADDRESS = ('localhost', 7011)
MASTER_BIND_PASSWD = b'mypassword'
PKGBUILD_DIR = 'pkgbuilds'
MAKEPKG = 'makepkg --nosign --needed --noconfirm --noprogressbar --nocolor'
MAKEPKG_UPD_CMD = 'makepkg --syncdeps --nobuild'
MAKEPKG_MAKE_CMD = 'makepkg --syncdeps --noextract'
MAKEPKG_MAKE_CMD_CLEAN = 'makepkg --syncdeps --noextract --clean --cleanbuild'
MAKEPKG_PKGLIST_CMD = f'{MAKEPKG} --packagelist'

View File

@ -1,2 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

View File

@ -1,13 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging
from utils import bash
logger = logging.getLogger(name='package')
# makepkg -o
# makepkg -e
# makepkg --nosign
# makepkg --packagelist
# gpg --default-key {GPG_KEY} --no-armor --pinentry-mode loopback --passphrase '' --detach-sign --yes -- aaa

View File

@ -4,7 +4,6 @@
# This file is part of Buildbot by JerryXiao
import logging
from threading import Thread
from multiprocessing.connection import Listener
from time import time, sleep
from pathlib import Path
@ -106,6 +105,17 @@ class pushFm:
pfm = pushFm()
def push_files(filename, size):
pfm.tick()
return pfm.start(filename, size)
def add_files(filename, overwrite=False):
return pfm.done(filename, overwrite=overwrite)
# server part
def run(funcname, args=list(), kwargs=dict()):
if funcname in ('clean', 'regenerate', 'remove',
'update', 'push_files', 'add_files'):
@ -117,13 +127,6 @@ def run(funcname, args=list(), kwargs=dict()):
logger.error('unexpected: %s %s %s',funcname, args, kwargs)
return False
def push_files(filename, size):
pfm.tick()
return pfm.start(filename, size)
def add_files(filename, overwrite=False):
return pfm.done(filename, overwrite=overwrite)
if __name__ == '__main__':
while True:
try:

View File

@ -25,12 +25,12 @@ def bash(cmdline, **kwargs):
logger.info(f'bash: {cmdline}')
return(run_cmd(['/bin/bash', '-x', '-e', '-c', cmdline], **kwargs))
def long_bash(cmdline, hours=2):
def long_bash(cmdline, cwd=None, hours=2):
assert type(hours) is int and hours >= 1
logger.info(f'longbash{hours}: {cmdline}')
return bash(cmdline, keepalive=True, KEEPALIVE_TIMEOUT=60, RUN_CMD_TIMEOUT=hours*60*60)
return bash(cmdline, cwd=cwd, keepalive=True, KEEPALIVE_TIMEOUT=60, RUN_CMD_TIMEOUT=hours*60*60)
def run_cmd(cmd, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOUT=60):
def run_cmd(cmd, cwd=None, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOUT=60):
logger.debug('run_cmd: %s', cmd)
RUN_CMD_LOOP_TIME = KEEPALIVE_TIMEOUT - 1 if KEEPALIVE_TIMEOUT >= 10 else 5
stopped = False
@ -50,7 +50,7 @@ def run_cmd(cmd, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOUT=60):
last_read[0] = last_read_time
last_read[1] = line
stdout_lock.release()
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
p = subprocess.Popen(cmd, cwd=cwd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, encoding='utf-8')
check_stdout(p.stdout)
process_start = int(time())
@ -182,4 +182,3 @@ def print_exc_plus():
print(value)
except:
print("<ERROR WHILE PRINTING VALUE>")

View File

@ -1,24 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import logging
from utils import bash
from yaml import load, dump
from pathlib import Path
logger = logging.getLogger(__name__)
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)
os.chdir(abspath)
# include all autobuild.yaml files
REPO_NAME = Path('repo')
BUTOBUILD_FNAME = 'autobuild.yaml'
for mydir in REPO_NAME.iterdir():
if mydir.is_dir() and (mydir / BUTOBUILD_FNAME).exists():
# parsing yaml
logger.info('Bulidbot: found %s in %s', BUTOBUILD_FNAME, mydir / BUTOBUILD_FNAME)

92
yamlparse.py Normal file
View File

@ -0,0 +1,92 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import logging
from yaml import load, Loader
from pathlib import Path
from utils import print_exc_plus
from config import PKGBUILD_DIR, AUTOBUILD_FNAME
logger = logging.getLogger(__name__)
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)
os.chdir(abspath)
# parse all autobuild.yaml files
REPO_ROOT = Path(PKGBUILD_DIR)
class pkgConfig:
def __init__(self, dirname, pkgtype, cleanbuild, timeout, extra):
self.dirname = dirname
self.type = pkgtype
self.__determine_type()
if cleanbuild is None:
cleanbuild = True
assert type(cleanbuild) is bool
self.cleanbuild = cleanbuild
self.timeout = 30 if timeout is None else int(timeout)
# timeout in minutes
self.__extra = extra
self.__process_extra()
def __determine_type(self):
if self.type in (None, 'auto'):
if self.dirname.endswith('-git'):
self.type = 'git'
return
self.type = 'manual'
def __process_extra(self):
stages = ('prebuild', 'postbuild', 'update', 'failure')
for stage in stages:
setattr(self, stage, list())
for entry in self.__extra:
assert type(entry) is dict and len(entry) == 1
for k in entry:
if k in stages:
cmd = entry.get(k, list())
assert type(cmd) is list
setattr(self, k, cmd)
def __repr__(self):
ret = "pkgConfig("
for myproperty in \
(
'dirname', 'type', 'cleanbuild', 'timeout'
'prebuild', 'postbuild', 'update', 'failure'
):
ret += f'{myproperty}={getattr(self, myproperty)},'
ret += ')'
return ret
def load_all():
pkgconfigs = list()
for mydir in REPO_ROOT.iterdir():
try:
if mydir.is_dir() and (mydir / AUTOBUILD_FNAME).exists():
# parsing yaml
logger.info('Bulidbot: found %s in %s', AUTOBUILD_FNAME,
mydir / AUTOBUILD_FNAME)
with open(mydir / AUTOBUILD_FNAME, 'r') as f:
content = f.read()
content = load(content, Loader=Loader)
assert type(content) is dict
args = [content.get(part, None) for part in \
('type', 'cleanbuild', 'timeout', 'extra')]
args = [mydir.name] + args
pkgconfigs.append(pkgConfig(*args))
except Exception:
logger.error(f'Error while parsing {AUTOBUILD_FNAME} for {mydir.name}')
print_exc_plus()
return pkgconfigs
if __name__ == '__main__':
print(load_all())