mirror of
https://github.com/archlinux-jerry/buildbot
synced 2024-11-22 21:10:41 +08:00
buildbot: add git and client
This commit is contained in:
parent
84f8afd8a0
commit
7203913efc
5 changed files with 123 additions and 17 deletions
93
buildbot.py
93
buildbot.py
|
@ -18,7 +18,8 @@ from config import ARCHS, BUILD_ARCHS, BUILD_ARCH_MAPPING, \
|
||||||
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
|
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
|
||||||
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
|
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
|
||||||
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL, \
|
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL, \
|
||||||
MAKEPKG_MAKE_CMD_MARCH, UPLOAD_CMD
|
MAKEPKG_MAKE_CMD_MARCH, UPLOAD_CMD, \
|
||||||
|
GIT_PULL, GIT_RESET_SUBDIR
|
||||||
|
|
||||||
from utils import print_exc_plus, background, \
|
from utils import print_exc_plus, background, \
|
||||||
bash, get_pkg_details_from_name, vercmp, \
|
bash, get_pkg_details_from_name, vercmp, \
|
||||||
|
@ -48,7 +49,14 @@ class Job:
|
||||||
self.version = version
|
self.version = version
|
||||||
self.multiarch = multiarch
|
self.multiarch = multiarch
|
||||||
self.added = time()
|
self.added = time()
|
||||||
|
def __repr__(self):
|
||||||
|
ret = "Job("
|
||||||
|
for myproperty in (
|
||||||
|
'arch', 'pkgconfig', 'version', 'multiarch', 'added'
|
||||||
|
):
|
||||||
|
ret += f'{myproperty}={getattr(self, myproperty, None)},'
|
||||||
|
ret += ')'
|
||||||
|
return ret
|
||||||
class jobsManager:
|
class jobsManager:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.__buildjobs = list()
|
self.__buildjobs = list()
|
||||||
|
@ -57,6 +65,52 @@ class jobsManager:
|
||||||
self.pkgconfigs = None
|
self.pkgconfigs = None
|
||||||
self.last_updatecheck = 0.0
|
self.last_updatecheck = 0.0
|
||||||
self.idle = False
|
self.idle = False
|
||||||
|
def __repr__(self):
|
||||||
|
ret = "jobsManager("
|
||||||
|
for myproperty in (
|
||||||
|
'__buildjobs', '__uploadjobs', '__curr_job',
|
||||||
|
'pkgconfigs', 'last_updatecheck', 'idle'
|
||||||
|
):
|
||||||
|
ret += f'{myproperty}={getattr(self, myproperty, None)},'
|
||||||
|
ret += ')'
|
||||||
|
return ret
|
||||||
|
def reset_dir(self, pkgdirname=None, all=False):
|
||||||
|
if all:
|
||||||
|
logger.info('git checkout all: %s', bash(GIT_RESET_SUBDIR, cwd=REPO_ROOT))
|
||||||
|
else:
|
||||||
|
if not pkgdirname:
|
||||||
|
return False
|
||||||
|
cwd = REPO_ROOT / pkgdirname
|
||||||
|
if cwd.exists():
|
||||||
|
logger.info('git checkout: %s', bash(GIT_RESET_SUBDIR, cwd=cwd))
|
||||||
|
for fpath in [f for f in cwd.iterdir()]:
|
||||||
|
if fpath.is_dir() and \
|
||||||
|
fpath.name in ('pkg', 'src'):
|
||||||
|
rmtree(fpath)
|
||||||
|
elif fpath.is_file() and \
|
||||||
|
(fpath.name.endswith(PKG_SUFFIX) or \
|
||||||
|
fpath.name.endswith(PKG_SIG_SUFFIX)):
|
||||||
|
fpath.unlink()
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
def rebuild_package(self, pkgdirname, clean=False):
|
||||||
|
self.pkgconfigs = load_all_yaml()
|
||||||
|
updates = updmgr.check_update(rebuild_package=pkgdirname)
|
||||||
|
if updates and len(updates) == 1:
|
||||||
|
if clean:
|
||||||
|
self.reset_dir(pkgdirname)
|
||||||
|
(pkgconfig, ver, buildarchs) = updates[0]
|
||||||
|
march = True if len(buildarchs) >= 2 else False
|
||||||
|
for arch in buildarchs:
|
||||||
|
newjob = Job(arch, pkgconfig, ver, multiarch=march)
|
||||||
|
self._new_buildjob(newjob)
|
||||||
|
ret = f'rebuild job added for {pkgdirname} {" ".join(buildarchs)}'
|
||||||
|
logger.info(ret)
|
||||||
|
else:
|
||||||
|
ret = f'rebuild failed: no such dir {pkgdirname}'
|
||||||
|
logger.warning(ret)
|
||||||
|
return ret
|
||||||
def _new_buildjob(self, job):
|
def _new_buildjob(self, job):
|
||||||
assert type(job) is Job
|
assert type(job) is Job
|
||||||
job_to_remove = list()
|
job_to_remove = list()
|
||||||
|
@ -74,7 +128,9 @@ class jobsManager:
|
||||||
self.__buildjobs.append(job)
|
self.__buildjobs.append(job)
|
||||||
def __get_job(self):
|
def __get_job(self):
|
||||||
if self.__curr_job:
|
if self.__curr_job:
|
||||||
return None
|
logger.error(f'Job {self.__curr_job} failed')
|
||||||
|
self.__finish_job(self.__curr_job)
|
||||||
|
return self.__get_job()
|
||||||
jobs = self.__buildjobs
|
jobs = self.__buildjobs
|
||||||
if jobs:
|
if jobs:
|
||||||
self.__curr_job = jobs.pop(0)
|
self.__curr_job = jobs.pop(0)
|
||||||
|
@ -168,6 +224,11 @@ class jobsManager:
|
||||||
return
|
return
|
||||||
self.last_updatecheck = time()
|
self.last_updatecheck = time()
|
||||||
self.idle = False
|
self.idle = False
|
||||||
|
# git pull repo
|
||||||
|
try:
|
||||||
|
bash(GIT_PULL, cwd=REPO_ROOT)
|
||||||
|
except Exception:
|
||||||
|
print_exc_plus()
|
||||||
self.pkgconfigs = load_all_yaml()
|
self.pkgconfigs = load_all_yaml()
|
||||||
updates = updmgr.check_update()
|
updates = updmgr.check_update()
|
||||||
for update in updates:
|
for update in updates:
|
||||||
|
@ -179,6 +240,9 @@ class jobsManager:
|
||||||
else:
|
else:
|
||||||
# This part does the job
|
# This part does the job
|
||||||
job = self.__get_job()
|
job = self.__get_job()
|
||||||
|
if not job:
|
||||||
|
logging.error('No job got')
|
||||||
|
return
|
||||||
if job.multiarch:
|
if job.multiarch:
|
||||||
self.__clean(job, remove_pkg=True)
|
self.__clean(job, remove_pkg=True)
|
||||||
self.__makepkg(job)
|
self.__makepkg(job)
|
||||||
|
@ -236,15 +300,21 @@ class updateManager:
|
||||||
pkgfiles = self.__get_package_list(dirname, arch)
|
pkgfiles = self.__get_package_list(dirname, arch)
|
||||||
ver = get_pkg_details_from_name(pkgfiles[0]).ver
|
ver = get_pkg_details_from_name(pkgfiles[0]).ver
|
||||||
return ver
|
return ver
|
||||||
def check_update(self):
|
def check_update(self, rebuild_package=None):
|
||||||
updates = list()
|
updates = list()
|
||||||
for pkg in jobsmgr.pkgconfigs:
|
for pkg in jobsmgr.pkgconfigs:
|
||||||
|
if rebuild_package and \
|
||||||
|
rebuild_package != pkg.dirname:
|
||||||
|
continue
|
||||||
pkgdir = REPO_ROOT / pkg.dirname
|
pkgdir = REPO_ROOT / pkg.dirname
|
||||||
logger.info(f'checking update: {pkg.dirname}')
|
logger.info(f'checking update: {pkg.dirname}')
|
||||||
pkgbuild = pkgdir / 'PKGBUILD'
|
pkgbuild = pkgdir / 'PKGBUILD'
|
||||||
archs = get_arch_from_pkgbuild(pkgbuild)
|
archs = get_arch_from_pkgbuild(pkgbuild)
|
||||||
buildarchs = [BUILD_ARCH_MAPPING.get(arch, None) for arch in archs]
|
buildarchs = [BUILD_ARCH_MAPPING.get(arch, None) for arch in archs]
|
||||||
buildarchs = [arch for arch in buildarchs if arch is not None]
|
buildarchs = [arch for arch in buildarchs if arch is not None]
|
||||||
|
if not buildarchs:
|
||||||
|
logger.warning(f'No build arch for {pkg.dirname}, refuse to build.')
|
||||||
|
continue
|
||||||
# hopefully we only need to check one arch for update
|
# hopefully we only need to check one arch for update
|
||||||
arch = 'x86_64' if 'x86_64' in buildarchs else buildarchs[0] # prefer x86
|
arch = 'x86_64' if 'x86_64' in buildarchs else buildarchs[0] # prefer x86
|
||||||
mon_nspawn_shell(arch, MAKEPKG_UPD_CMD, cwd=pkgdir, seconds=60*60,
|
mon_nspawn_shell(arch, MAKEPKG_UPD_CMD, cwd=pkgdir, seconds=60*60,
|
||||||
|
@ -277,11 +347,20 @@ updmgr = updateManager()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def info(*args, **kwargs):
|
def info():
|
||||||
return (args, kwargs)
|
return (str(jobsmgr))
|
||||||
|
|
||||||
|
def rebuild_package(pkgdirname, clean=False):
|
||||||
|
return jobsmgr.rebuild_package(pkgdirname, clean=clean)
|
||||||
|
|
||||||
|
def clean(pkgdirname):
|
||||||
|
return jobsmgr.reset_dir(pkgdirname=pkgdirname)
|
||||||
|
|
||||||
|
def clean_all():
|
||||||
|
return jobsmgr.reset_dir(all=True)
|
||||||
|
|
||||||
def run(funcname, args=list(), kwargs=dict()):
|
def run(funcname, args=list(), kwargs=dict()):
|
||||||
if funcname in ('info',):
|
if funcname in ('info', 'rebuild_package', 'clean', 'clean_all'):
|
||||||
logger.info('running: %s %s %s',funcname, args, kwargs)
|
logger.info('running: %s %s %s',funcname, args, kwargs)
|
||||||
ret = eval(funcname)(*args, **kwargs)
|
ret = eval(funcname)(*args, **kwargs)
|
||||||
logger.info('done: %s %s',funcname, ret)
|
logger.info('done: %s %s',funcname, ret)
|
||||||
|
|
41
client.py
Normal file → Executable file
41
client.py
Normal file → Executable file
|
@ -7,22 +7,25 @@ import logging
|
||||||
from multiprocessing.connection import Client
|
from multiprocessing.connection import Client
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
from config import REPOD_BIND_ADDRESS, REPOD_BIND_PASSWD
|
from config import REPOD_BIND_ADDRESS, REPOD_BIND_PASSWD, \
|
||||||
|
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD
|
||||||
|
|
||||||
from utils import print_exc_plus
|
from utils import print_exc_plus
|
||||||
|
|
||||||
logger = logging.getLogger(f'buildbot.{__name__}')
|
logger = logging.getLogger(f'buildbot.{__name__}')
|
||||||
|
|
||||||
def run(funcname, args=list(), kwargs=dict(), retries=0):
|
def run(funcname, args=list(), kwargs=dict(), retries=0, server=(REPOD_BIND_ADDRESS, REPOD_BIND_PASSWD)):
|
||||||
try:
|
try:
|
||||||
logger.info('client: %s %s %s',funcname, args, kwargs)
|
logger.info('client: %s %s %s',funcname, args, kwargs)
|
||||||
with Client(REPOD_BIND_ADDRESS, authkey=REPOD_BIND_PASSWD) as conn:
|
(addr, authkey) = server
|
||||||
|
with Client(addr, authkey=authkey) as conn:
|
||||||
conn.send([funcname, args, kwargs])
|
conn.send([funcname, args, kwargs])
|
||||||
return conn.recv()
|
return conn.recv()
|
||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
if retries <= 10:
|
if retries <= 10:
|
||||||
logger.info("Server refused, retry after 60s")
|
logger.info("Server refused, retry after 60s")
|
||||||
sleep(60)
|
sleep(60)
|
||||||
return ping(funcname, args=args, kwargs=kwargs, retries=retries+1)
|
return run(funcname, args=args, kwargs=kwargs, retries=retries+1)
|
||||||
else:
|
else:
|
||||||
logger.error("Server refused")
|
logger.error("Server refused")
|
||||||
return False
|
return False
|
||||||
|
@ -34,7 +37,29 @@ def run(funcname, args=list(), kwargs=dict(), retries=0):
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import argparse
|
import argparse
|
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
from utils import configure_logger
|
||||||
logger.info('result: %s', run('push_files', args=('aaa', 1)))
|
configure_logger(logger)
|
||||||
logger.info('result: %s', run('add_files', args=('aaa',)))
|
try:
|
||||||
#logger.info('result: %s', run('update'))
|
parser = argparse.ArgumentParser(description='Client for buildbot')
|
||||||
|
parser.add_argument('--update', action='store_true', help='update pushed files to the repo')
|
||||||
|
parser.add_argument('--cleanall', action='store_true', help='checkout pkgbuilds')
|
||||||
|
parser.add_argument('--clean', nargs='?', default=None, help='checkout pkgbuilds in one package')
|
||||||
|
parser.add_argument('--rebuild', nargs='?', default=None, help='rebuild a package with its dirname')
|
||||||
|
args = parser.parse_args()
|
||||||
|
if args.update:
|
||||||
|
server=(REPOD_BIND_ADDRESS, REPOD_BIND_PASSWD)
|
||||||
|
logger.info(run('update', kwargs={'overwrite': False}, server=server))
|
||||||
|
elif args.cleanall:
|
||||||
|
server=(MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD)
|
||||||
|
logger.info(run('clean_all', server=server))
|
||||||
|
elif args.clean:
|
||||||
|
server=(MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD)
|
||||||
|
logger.info(run('clean', args=(args.clean,), server=server))
|
||||||
|
elif args.rebuild:
|
||||||
|
server=(MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD)
|
||||||
|
logger.info(run('rebuild', args=(args.rebuild,), kwargs={'clean': True}, server=server))
|
||||||
|
else:
|
||||||
|
parser.error("Please choose an action")
|
||||||
|
except Exception:
|
||||||
|
print_exc_plus()
|
||||||
|
parser.exit(status=1)
|
||||||
|
|
|
@ -59,3 +59,6 @@ SHELL_ARM64_ADDITIONAL = 'set -e; set -x'
|
||||||
SHELL_TRAP = 'trap \'echo ++ exit $?\' ERR EXIT'
|
SHELL_TRAP = 'trap \'echo ++ exit $?\' ERR EXIT'
|
||||||
|
|
||||||
UPLOAD_CMD = 'rsync -avPh \"{src}\" repoupload:/srv/repo/buildbot/repo/updates/'
|
UPLOAD_CMD = 'rsync -avPh \"{src}\" repoupload:/srv/repo/buildbot/repo/updates/'
|
||||||
|
|
||||||
|
GIT_PULL = 'git pull'
|
||||||
|
GIT_RESET_SUBDIR = 'git checkout HEAD -- .'
|
||||||
|
|
1
utils.py
1
utils.py
|
@ -211,7 +211,6 @@ def get_arch_from_pkgbuild(fpath):
|
||||||
if not matches:
|
if not matches:
|
||||||
raise TypeError('Unexpected PKGBUILD format')
|
raise TypeError('Unexpected PKGBUILD format')
|
||||||
matches = [arch for arch in matches if arch in ARCHS]
|
matches = [arch for arch in matches if arch in ARCHS]
|
||||||
assert matches
|
|
||||||
return matches
|
return matches
|
||||||
raise TypeError('Unexpected PKGBUILD')
|
raise TypeError('Unexpected PKGBUILD')
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ class pkgConfig:
|
||||||
'dirname', 'type', 'cleanbuild', 'timeout'
|
'dirname', 'type', 'cleanbuild', 'timeout'
|
||||||
'prebuild', 'postbuild', 'update', 'failure'
|
'prebuild', 'postbuild', 'update', 'failure'
|
||||||
):
|
):
|
||||||
ret += f'{myproperty}={getattr(self, myproperty)},'
|
ret += f'{myproperty}={getattr(self, myproperty, None)},'
|
||||||
ret += ')'
|
ret += ')'
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue