buildbot/buildbot.py

283 lines
10 KiB
Python
Raw Normal View History

2019-04-06 20:59:27 +08:00
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# buildbot.py: Automatic management tool for an arch repo.
# This file is part of Buildbot by JerryXiao
import logging
from multiprocessing.connection import Listener
from time import time, sleep
import os
from pathlib import Path
2019-04-09 15:43:17 +08:00
from shutil import rmtree
2019-04-06 20:59:27 +08:00
from subprocess import CalledProcessError
2019-04-09 15:43:17 +08:00
from shared_vars import PKG_SUFFIX, PKG_SIG_SUFFIX
2019-04-06 20:59:27 +08:00
from config import ARCHS, BUILD_ARCHS, BUILD_ARCH_MAPPING, \
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD, \
2019-04-07 17:14:50 +08:00
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
2019-04-09 15:43:17 +08:00
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL
2019-04-07 17:14:50 +08:00
from utils import print_exc_plus, background, \
bash, get_pkg_details_from_name, vercmp, \
2019-04-09 15:43:17 +08:00
nspawn_shell, mon_nspawn_shell, get_arch_from_pkgbuild, \
configure_logger
from client import run as rrun
2019-04-06 20:59:27 +08:00
import json
from yamlparse import load_all as load_all_yaml
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)
os.chdir(abspath)
2019-04-09 15:43:17 +08:00
logger = logging.getLogger('buildbot')
configure_logger(logger, logfile='buildbot.log', rotate_size=1024*1024*10)
2019-04-06 20:59:27 +08:00
REPO_ROOT = Path(PKGBUILD_DIR)
class Job:
2019-04-07 17:14:50 +08:00
def __init__(self, buildarch, pkgconfig, version, multiarch=False):
2019-04-06 20:59:27 +08:00
assert buildarch in BUILD_ARCHS
2019-04-07 17:14:50 +08:00
self.arch = buildarch
self.pkgconfig = pkgconfig
2019-04-06 20:59:27 +08:00
self.version = version
2019-04-07 17:14:50 +08:00
self.multiarch = multiarch
2019-04-06 20:59:27 +08:00
self.added = time()
class jobsManager:
def __init__(self):
2019-04-07 17:14:50 +08:00
self.__buildjobs = list()
2019-04-06 20:59:27 +08:00
self.__uploadjobs = list()
self.__curr_job = None
self.pkgconfigs = load_all_yaml()
2019-04-09 15:43:17 +08:00
self.last_updatecheck = 0.0
2019-04-09 15:59:34 +08:00
self.idle = False
2019-04-07 17:14:50 +08:00
def _new_buildjob(self, job):
2019-04-06 20:59:27 +08:00
assert type(job) is Job
2019-04-07 17:14:50 +08:00
job_to_remove = list()
for previous_job in self.__buildjobs:
if job.pkgconfig.dirname == previous_job.pkgconfig.dirname and \
job.arch == previous_job.arch:
job_to_remove.append(previous_job)
for oldjob in job_to_remove:
self.__buildjobs.remove(oldjob)
logger.info('removed an old job for %s %s, %s => %s',
job.pkgconfig.dirname, job.arch,
oldjob.version, job.version)
logger.info('new job for %s %s %s',
job.pkgconfig.dirname, job.arch, job.version)
self.__buildjobs.append(job)
def __get_job(self):
2019-04-06 20:59:27 +08:00
if self.__curr_job:
return None
2019-04-07 17:14:50 +08:00
jobs = self.__buildjobs
2019-04-06 20:59:27 +08:00
if jobs:
self.__curr_job = jobs.pop(0)
return self.__curr_job
def __finish_job(self, pkgdir):
2019-04-07 17:14:50 +08:00
assert pkgdir == self.__curr_job.pkgconfig.dirname
2019-04-06 20:59:27 +08:00
# do upload
self.__curr_job = None
return True
2019-04-07 17:14:50 +08:00
def __makepkg(self, job):
mkcmd = MAKEPKG_MAKE_CMD_CLEAN if job.pkgconfig.cleanbuild \
else MAKEPKG_MAKE_CMD
cwd = REPO_ROOT / job.pkgconfig.dirname
logger.info('makepkg in %s %s', job.pkgconfig.dirname, job.arch)
return mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=mkcmd,
2019-04-09 15:43:17 +08:00
logfile = cwd / 'buildbot.log.makepkg',
2019-04-07 17:14:50 +08:00
short_return = True)
2019-04-09 15:43:17 +08:00
def __clean(self, job, remove_pkg=False):
2019-04-07 17:14:50 +08:00
cwd = REPO_ROOT / job.pkgconfig.dirname
2019-04-09 15:43:17 +08:00
logger.info('cleaning build dir for %s, %sremoving pkg',
job.pkgconfig.dirname, '' if remove_pkg else 'not ')
for fpath in [f for f in cwd.iterdir()]:
fpath = Path()
if fpath.is_dir() and fpath.name in ('pkg', 'src'):
rmtree(fpath)
elif remove_pkg and fpath.is_file() and \
(fpath.name.endswith(PKG_SUFFIX) or \
fpath.name.endswith(PKG_SIG_SUFFIX)):
fpath.unlink()
2019-04-07 17:14:50 +08:00
def __sign(self, job):
cwd = REPO_ROOT / job.pkgconfig.dirname
2019-04-09 15:43:17 +08:00
for fpath in cwd.iterdir():
if fpath.name.endswith(PKG_SUFFIX):
bash(f'{GPG_SIGN_CMD} {fpath.name}', cwd=cwd)
2019-04-07 17:14:50 +08:00
def __upload(self, job):
'''
wip
'''
cwd = REPO_ROOT / job.pkgconfig.dirname
2019-04-09 15:43:17 +08:00
print(bash('ls -l', cwd=cwd))
2019-04-07 17:14:50 +08:00
#nspawn_shell(job.arch, 'rm -rf src pkg', cwd=cwd)
2019-04-09 15:43:17 +08:00
#rrun()
2019-04-06 20:59:27 +08:00
def tick(self):
'''
check for updates,
create new jobs
and run them
'''
2019-04-07 17:14:50 +08:00
if not self.__buildjobs:
# This part check for updates
2019-04-09 15:43:17 +08:00
if time() - self.last_updatecheck <= UPDATE_INTERVAL:
2019-04-09 15:59:34 +08:00
if not self.idle:
logger.info('Buildbot is idling for package updates.')
self.idle = True
2019-04-09 15:43:17 +08:00
sleep(60)
2019-04-09 15:49:25 +08:00
return
self.last_updatecheck = time()
2019-04-09 15:59:34 +08:00
self.idle = False
2019-04-06 20:59:27 +08:00
updates = updmgr.check_update()
for update in updates:
2019-04-07 17:14:50 +08:00
(pkgconfig, ver, buildarchs) = update
march = True if len(buildarchs) >= 2 else False
for arch in buildarchs:
newjob = Job(arch, pkgconfig, ver, multiarch=march)
self._new_buildjob(newjob)
else:
# This part does the job
2019-04-09 15:43:17 +08:00
job = self.__get_job()
cwd = REPO_ROOT / job.pkgconfig.dirname
if job.multiarch:
# wip
pass
else:
self.__makepkg(job)
self.__sign(job)
self.__upload(job)
if job.pkgconfig.cleanbuild:
self.__clean(job ,remove_pkg=True)
self.__finish_job(job.pkgconfig.dirname)
2019-04-06 20:59:27 +08:00
jobsmgr = jobsManager()
class updateManager:
def __init__(self, filename='pkgver.json'):
self.__filename = filename
self.__pkgvers = dict()
self.__load()
def __load(self):
if Path(self.__filename).exists():
with open(self.__filename,"r") as f:
try:
pkgvers = json.loads(f.read())
except json.JSONDecodeError:
logger.error('pkgver.json - Bad json')
print_exc_plus
exit(1)
else:
logger.warning(f'No {self.__filename} found')
pkgvers = dict()
assert type(pkgvers) is dict
for pkgname in pkgvers:
assert type(pkgname) is str
self.__pkgvers = pkgvers
def _save(self):
pkgvers = json.dumps(self.__pkgvers, indent=4)
pkgvers += '\n'
with open(self.__filename,"w") as f:
if f.writable:
f.write(pkgvers)
else:
logger.error('pkgver.json - Not writable')
2019-04-07 17:14:50 +08:00
def __get_package_list(self, dirname, arch):
2019-04-06 20:59:27 +08:00
pkgdir = REPO_ROOT / dirname
assert pkgdir.exists()
2019-04-07 17:14:50 +08:00
pkglist = nspawn_shell(arch, MAKEPKG_PKGLIST_CMD, cwd=pkgdir)
2019-04-06 20:59:27 +08:00
pkglist = pkglist.split('\n')
2019-04-07 22:19:06 +08:00
pkglist = [line for line in pkglist if not line.startswith('+')]
2019-04-06 20:59:27 +08:00
return pkglist
2019-04-07 17:14:50 +08:00
def __get_new_ver(self, dirname, arch):
pkgfiles = self.__get_package_list(dirname, arch)
ver = get_pkg_details_from_name(pkgfiles[0]).ver
return ver
2019-04-06 20:59:27 +08:00
def check_update(self):
updates = list()
for pkg in jobsmgr.pkgconfigs:
pkgdir = REPO_ROOT / pkg.dirname
logger.info(f'checking update: {pkg.dirname}')
2019-04-07 17:14:50 +08:00
pkgbuild = pkgdir / 'PKGBUILD'
archs = get_arch_from_pkgbuild(pkgbuild)
buildarchs = [BUILD_ARCH_MAPPING.get(arch, None) for arch in archs]
buildarchs = [arch for arch in buildarchs if arch is not None]
# hopefully we only need to check one arch for update
arch = 'x86_64' if 'x86_64' in buildarchs else buildarchs[0] # prefer x86
mon_nspawn_shell(arch, MAKEPKG_UPD_CMD, cwd=pkgdir, minutes=60,
logfile = pkgdir / 'buildbot.log.update',
short_return = True)
2019-04-06 20:59:27 +08:00
if pkg.type in ('git', 'manual'):
2019-04-07 17:14:50 +08:00
ver = self.__get_new_ver(pkg.dirname, arch)
2019-04-06 20:59:27 +08:00
oldver = self.__pkgvers.get(pkg.dirname, None)
2019-04-09 15:43:17 +08:00
has_update = False
if oldver:
res = vercmp(ver, oldver)
if res == 1:
has_update = True
elif res == -1:
logger.warning(f'package: {pkg.dirname} downgrade attempted')
elif res == 0:
logger.info(f'package: {pkg.dirname} is up to date')
else:
has_update = True
if has_update:
2019-04-06 20:59:27 +08:00
self.__pkgvers[pkg.dirname] = ver
2019-04-07 17:14:50 +08:00
updates.append((pkg, ver, buildarchs))
2019-04-06 20:59:27 +08:00
else:
logger.warning(f'unknown package type: {pkg.type}')
self._save()
return updates
updmgr = updateManager()
@background
def __main():
2019-04-07 17:16:16 +08:00
while True:
2019-04-07 22:19:06 +08:00
try:
jobsmgr.tick()
except:
print_exc_plus()
2019-04-09 15:43:17 +08:00
sleep(1)
2019-04-06 20:59:27 +08:00
def run(funcname, args=list(), kwargs=dict()):
if funcname in ('clean', 'regenerate', 'remove',
'update', 'push_files', 'add_files'):
logger.info('running: %s %s %s',funcname, args, kwargs)
ret = eval(funcname)(*args, **kwargs)
logger.info('done: %s %s',funcname, ret)
return ret
else:
logger.error('unexpected: %s %s %s',funcname, args, kwargs)
return False
if __name__ == '__main__':
__main() # start the main worker thread
while True:
try:
with Listener(MASTER_BIND_ADDRESS, authkey=MASTER_BIND_PASSWD) as listener:
with listener.accept() as conn:
logger.info('connection accepted from %s', listener.last_accepted)
myrecv = conn.recv()
if type(myrecv) is list and len(myrecv) == 3:
(funcname, args, kwargs) = myrecv
funcname = str(funcname)
2019-04-07 17:14:50 +08:00
logger.info('running: %s %s %s', funcname, args, kwargs)
2019-04-06 20:59:27 +08:00
conn.send(run(funcname, args=args, kwargs=kwargs))
except Exception:
print_exc_plus()
except KeyboardInterrupt:
logger.info('KeyboardInterrupt')
print_exc_plus()
break