2019-04-06 20:59:27 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# buildbot.py: Automatic management tool for an arch repo.
|
|
|
|
# This file is part of Buildbot by JerryXiao
|
|
|
|
|
|
|
|
import logging
|
|
|
|
from multiprocessing.connection import Listener
|
|
|
|
from time import time, sleep
|
|
|
|
import os
|
|
|
|
from pathlib import Path
|
2019-04-09 15:43:17 +08:00
|
|
|
from shutil import rmtree
|
2019-04-06 20:59:27 +08:00
|
|
|
from subprocess import CalledProcessError
|
|
|
|
|
2019-04-09 15:43:17 +08:00
|
|
|
from shared_vars import PKG_SUFFIX, PKG_SIG_SUFFIX
|
|
|
|
|
2019-04-06 20:59:27 +08:00
|
|
|
from config import ARCHS, BUILD_ARCHS, BUILD_ARCH_MAPPING, \
|
|
|
|
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD, \
|
2019-04-07 17:14:50 +08:00
|
|
|
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
|
2019-04-09 15:43:17 +08:00
|
|
|
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
|
2019-04-09 20:48:37 +08:00
|
|
|
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL, \
|
2019-04-10 20:36:43 +08:00
|
|
|
MAKEPKG_MAKE_CMD_MARCH, UPLOAD_CMD, \
|
|
|
|
GIT_PULL, GIT_RESET_SUBDIR
|
2019-04-07 17:14:50 +08:00
|
|
|
|
|
|
|
from utils import print_exc_plus, background, \
|
|
|
|
bash, get_pkg_details_from_name, vercmp, \
|
2019-04-09 15:43:17 +08:00
|
|
|
nspawn_shell, mon_nspawn_shell, get_arch_from_pkgbuild, \
|
2019-04-09 20:48:37 +08:00
|
|
|
configure_logger, mon_bash
|
2019-04-09 15:43:17 +08:00
|
|
|
|
|
|
|
from client import run as rrun
|
2019-04-06 20:59:27 +08:00
|
|
|
|
|
|
|
import json
|
|
|
|
|
|
|
|
from yamlparse import load_all as load_all_yaml
|
|
|
|
|
|
|
|
abspath=os.path.abspath(__file__)
|
|
|
|
abspath=os.path.dirname(abspath)
|
|
|
|
os.chdir(abspath)
|
|
|
|
|
2019-04-09 15:43:17 +08:00
|
|
|
logger = logging.getLogger('buildbot')
|
|
|
|
configure_logger(logger, logfile='buildbot.log', rotate_size=1024*1024*10)
|
|
|
|
|
2019-04-11 16:32:39 +08:00
|
|
|
# refuse to run in systemd-nspawn
|
|
|
|
if 'systemd-nspawn' in bash('systemd-detect-virt || true'):
|
|
|
|
logger.error('Refused to run in systemd-nspawn.')
|
|
|
|
raise AssertionError('Refused to run in systemd-nspawn.')
|
|
|
|
|
2019-04-06 20:59:27 +08:00
|
|
|
REPO_ROOT = Path(PKGBUILD_DIR)
|
|
|
|
|
|
|
|
class Job:
|
2019-04-07 17:14:50 +08:00
|
|
|
def __init__(self, buildarch, pkgconfig, version, multiarch=False):
|
2019-04-06 20:59:27 +08:00
|
|
|
assert buildarch in BUILD_ARCHS
|
2019-04-07 17:14:50 +08:00
|
|
|
self.arch = buildarch
|
|
|
|
self.pkgconfig = pkgconfig
|
2019-04-06 20:59:27 +08:00
|
|
|
self.version = version
|
2019-04-07 17:14:50 +08:00
|
|
|
self.multiarch = multiarch
|
2019-04-06 20:59:27 +08:00
|
|
|
self.added = time()
|
2019-04-10 20:36:43 +08:00
|
|
|
def __repr__(self):
|
|
|
|
ret = "Job("
|
|
|
|
for myproperty in (
|
|
|
|
'arch', 'pkgconfig', 'version', 'multiarch', 'added'
|
|
|
|
):
|
|
|
|
ret += f'{myproperty}={getattr(self, myproperty, None)},'
|
|
|
|
ret += ')'
|
|
|
|
return ret
|
2019-04-06 20:59:27 +08:00
|
|
|
class jobsManager:
|
|
|
|
def __init__(self):
|
2019-04-07 17:14:50 +08:00
|
|
|
self.__buildjobs = list()
|
2019-04-06 20:59:27 +08:00
|
|
|
self.__uploadjobs = list()
|
|
|
|
self.__curr_job = None
|
2019-04-09 21:29:13 +08:00
|
|
|
self.pkgconfigs = None
|
2019-04-09 15:43:17 +08:00
|
|
|
self.last_updatecheck = 0.0
|
2019-04-09 15:59:34 +08:00
|
|
|
self.idle = False
|
2019-04-11 14:55:42 +08:00
|
|
|
@property
|
|
|
|
def jobs(self):
|
|
|
|
return \
|
|
|
|
{
|
|
|
|
'build_jobs': self.__buildjobs,
|
|
|
|
'upload_jobs': self.__uploadjobs,
|
|
|
|
'current_job': self.__curr_job
|
|
|
|
}
|
2019-04-10 20:36:43 +08:00
|
|
|
def __repr__(self):
|
|
|
|
ret = "jobsManager("
|
|
|
|
for myproperty in (
|
2019-04-11 14:55:42 +08:00
|
|
|
'jobs', 'pkgconfigs',
|
|
|
|
'last_updatecheck', 'idle'
|
2019-04-10 20:36:43 +08:00
|
|
|
):
|
|
|
|
ret += f'{myproperty}={getattr(self, myproperty, None)},'
|
|
|
|
ret += ')'
|
|
|
|
return ret
|
|
|
|
def reset_dir(self, pkgdirname=None, all=False):
|
|
|
|
if all:
|
2019-04-11 16:32:39 +08:00
|
|
|
logger.info('resetting %s', str(REPO_ROOT))
|
|
|
|
bash(GIT_RESET_SUBDIR, cwd=REPO_ROOT)
|
2019-04-10 20:36:43 +08:00
|
|
|
else:
|
|
|
|
if not pkgdirname:
|
|
|
|
return False
|
|
|
|
cwd = REPO_ROOT / pkgdirname
|
|
|
|
if cwd.exists():
|
2019-04-11 16:32:39 +08:00
|
|
|
logger.info('resetting %s', str(cwd))
|
|
|
|
bash(GIT_RESET_SUBDIR, cwd=cwd)
|
2019-04-10 20:36:43 +08:00
|
|
|
for fpath in [f for f in cwd.iterdir()]:
|
|
|
|
if fpath.is_dir() and \
|
|
|
|
fpath.name in ('pkg', 'src'):
|
|
|
|
rmtree(fpath)
|
|
|
|
elif fpath.is_file() and \
|
|
|
|
(fpath.name.endswith(PKG_SUFFIX) or \
|
|
|
|
fpath.name.endswith(PKG_SIG_SUFFIX)):
|
|
|
|
fpath.unlink()
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
def rebuild_package(self, pkgdirname, clean=False):
|
|
|
|
self.pkgconfigs = load_all_yaml()
|
2019-04-10 21:20:30 +08:00
|
|
|
if (REPO_ROOT / pkgdirname).exists() and clean:
|
|
|
|
self.reset_dir(pkgdirname)
|
2019-04-10 20:36:43 +08:00
|
|
|
updates = updmgr.check_update(rebuild_package=pkgdirname)
|
|
|
|
if updates and len(updates) == 1:
|
|
|
|
(pkgconfig, ver, buildarchs) = updates[0]
|
|
|
|
march = True if len(buildarchs) >= 2 else False
|
|
|
|
for arch in buildarchs:
|
|
|
|
newjob = Job(arch, pkgconfig, ver, multiarch=march)
|
|
|
|
self._new_buildjob(newjob)
|
|
|
|
ret = f'rebuild job added for {pkgdirname} {" ".join(buildarchs)}'
|
|
|
|
logger.info(ret)
|
|
|
|
else:
|
|
|
|
ret = f'rebuild failed: no such dir {pkgdirname}'
|
|
|
|
logger.warning(ret)
|
|
|
|
return ret
|
2019-04-07 17:14:50 +08:00
|
|
|
def _new_buildjob(self, job):
|
2019-04-06 20:59:27 +08:00
|
|
|
assert type(job) is Job
|
2019-04-07 17:14:50 +08:00
|
|
|
job_to_remove = list()
|
|
|
|
for previous_job in self.__buildjobs:
|
|
|
|
if job.pkgconfig.dirname == previous_job.pkgconfig.dirname and \
|
|
|
|
job.arch == previous_job.arch:
|
|
|
|
job_to_remove.append(previous_job)
|
|
|
|
for oldjob in job_to_remove:
|
|
|
|
self.__buildjobs.remove(oldjob)
|
|
|
|
logger.info('removed an old job for %s %s, %s => %s',
|
|
|
|
job.pkgconfig.dirname, job.arch,
|
|
|
|
oldjob.version, job.version)
|
|
|
|
logger.info('new job for %s %s %s',
|
|
|
|
job.pkgconfig.dirname, job.arch, job.version)
|
|
|
|
self.__buildjobs.append(job)
|
|
|
|
def __get_job(self):
|
2019-04-06 20:59:27 +08:00
|
|
|
if self.__curr_job:
|
2019-04-11 14:55:42 +08:00
|
|
|
logger.error(f'Job {self.__curr_job} failed. Correct the error and rebuild')
|
2019-04-10 21:07:08 +08:00
|
|
|
self.__finish_job(self.__curr_job, force=True)
|
2019-04-10 20:36:43 +08:00
|
|
|
return self.__get_job()
|
2019-04-07 17:14:50 +08:00
|
|
|
jobs = self.__buildjobs
|
2019-04-06 20:59:27 +08:00
|
|
|
if jobs:
|
|
|
|
self.__curr_job = jobs.pop(0)
|
|
|
|
return self.__curr_job
|
2019-04-10 21:07:08 +08:00
|
|
|
def __finish_job(self, pkgdir, force=False):
|
|
|
|
if not force:
|
|
|
|
assert pkgdir == self.__curr_job.pkgconfig.dirname
|
2019-04-06 20:59:27 +08:00
|
|
|
self.__curr_job = None
|
|
|
|
return True
|
2019-04-07 17:14:50 +08:00
|
|
|
def __makepkg(self, job):
|
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 20:48:37 +08:00
|
|
|
if job.multiarch:
|
|
|
|
# assume a clean env, no source avail
|
|
|
|
mkcmd = MAKEPKG_MAKE_CMD_MARCH
|
|
|
|
else:
|
|
|
|
mkcmd = MAKEPKG_MAKE_CMD_CLEAN if job.pkgconfig.cleanbuild \
|
|
|
|
else MAKEPKG_MAKE_CMD
|
2019-04-07 17:14:50 +08:00
|
|
|
logger.info('makepkg in %s %s', job.pkgconfig.dirname, job.arch)
|
|
|
|
return mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=mkcmd,
|
2019-04-09 15:43:17 +08:00
|
|
|
logfile = cwd / 'buildbot.log.makepkg',
|
2019-04-09 20:48:37 +08:00
|
|
|
short_return = True,
|
|
|
|
seconds=job.pkgconfig.timeout*60)
|
|
|
|
def __clean(self, job, remove_pkg=False, rm_src=True):
|
2019-04-07 17:14:50 +08:00
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 15:43:17 +08:00
|
|
|
logger.info('cleaning build dir for %s, %sremoving pkg',
|
|
|
|
job.pkgconfig.dirname, '' if remove_pkg else 'not ')
|
|
|
|
for fpath in [f for f in cwd.iterdir()]:
|
2019-04-09 20:48:37 +08:00
|
|
|
if rm_src and fpath.is_dir() and \
|
|
|
|
fpath.name in ('pkg', 'src'):
|
2019-04-09 15:43:17 +08:00
|
|
|
rmtree(fpath)
|
|
|
|
elif remove_pkg and fpath.is_file() and \
|
2019-04-09 20:48:37 +08:00
|
|
|
((not job.multiarch) or job.arch in fpath.name) and \
|
2019-04-09 15:43:17 +08:00
|
|
|
(fpath.name.endswith(PKG_SUFFIX) or \
|
2019-04-09 20:48:37 +08:00
|
|
|
fpath.name.endswith(PKG_SIG_SUFFIX)):
|
2019-04-09 15:43:17 +08:00
|
|
|
fpath.unlink()
|
2019-04-07 17:14:50 +08:00
|
|
|
def __sign(self, job):
|
2019-04-11 14:55:42 +08:00
|
|
|
logger.info('signing in %s %s', job.pkgconfig.dirname, job.arch)
|
2019-04-07 17:14:50 +08:00
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 15:43:17 +08:00
|
|
|
for fpath in cwd.iterdir():
|
|
|
|
if fpath.name.endswith(PKG_SUFFIX):
|
|
|
|
bash(f'{GPG_SIGN_CMD} {fpath.name}', cwd=cwd)
|
2019-04-07 17:14:50 +08:00
|
|
|
def __upload(self, job):
|
2019-04-09 21:15:03 +08:00
|
|
|
suc = True
|
2019-04-07 17:14:50 +08:00
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 20:48:37 +08:00
|
|
|
f_to_upload = list()
|
|
|
|
for fpath in cwd.iterdir():
|
|
|
|
if fpath.name.endswith(PKG_SUFFIX) and \
|
|
|
|
get_pkg_details_from_name(fpath.name).ver == job.version:
|
|
|
|
sigpath = fpath.parent / f'{fpath.name}.sig'
|
|
|
|
assert sigpath.exists()
|
|
|
|
f_to_upload.append(sigpath)
|
|
|
|
f_to_upload.append(fpath)
|
|
|
|
for f in f_to_upload:
|
|
|
|
size = f.stat().st_size / 1000 / 1000
|
2019-04-09 21:15:03 +08:00
|
|
|
if f.name.endswith(PKG_SUFFIX):
|
|
|
|
for _ in range(10):
|
|
|
|
timeout = rrun('push_start', args=(f.name, size))
|
|
|
|
if timeout > 0:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
logger.warning('Remote is busy (-1), wait 1 min x10')
|
|
|
|
sleep(60)
|
|
|
|
else:
|
|
|
|
timeout = 60
|
2019-04-09 20:48:37 +08:00
|
|
|
logger.info(f'Uploading {f}, timeout in {timeout}s')
|
2019-04-10 21:03:38 +08:00
|
|
|
mon_bash(UPLOAD_CMD.format(src=f), seconds=int(timeout))
|
2019-04-09 20:48:37 +08:00
|
|
|
if f.name.endswith(PKG_SUFFIX):
|
|
|
|
logger.info(f'Requesting repo update for {f.name}')
|
2019-04-09 21:15:03 +08:00
|
|
|
res = rrun('push_done', args=(f.name,), kwargs={'overwrite': False,})
|
2019-04-09 20:48:37 +08:00
|
|
|
if res is None:
|
|
|
|
logger.info(f'Update success for {f.name}')
|
|
|
|
else:
|
|
|
|
logger.error(f'Update failed for {f.name}, reason: {res}')
|
2019-04-09 21:15:03 +08:00
|
|
|
suc = False
|
|
|
|
return suc
|
2019-04-06 20:59:27 +08:00
|
|
|
def tick(self):
|
|
|
|
'''
|
|
|
|
check for updates,
|
|
|
|
create new jobs
|
|
|
|
and run them
|
|
|
|
'''
|
2019-04-07 17:14:50 +08:00
|
|
|
if not self.__buildjobs:
|
|
|
|
# This part check for updates
|
2019-04-09 16:33:25 +08:00
|
|
|
if time() - self.last_updatecheck <= UPDATE_INTERVAL * 60:
|
2019-04-09 15:59:34 +08:00
|
|
|
if not self.idle:
|
|
|
|
logger.info('Buildbot is idling for package updates.')
|
|
|
|
self.idle = True
|
2019-04-09 15:43:17 +08:00
|
|
|
sleep(60)
|
2019-04-09 15:49:25 +08:00
|
|
|
return
|
|
|
|
self.last_updatecheck = time()
|
2019-04-09 15:59:34 +08:00
|
|
|
self.idle = False
|
2019-04-10 20:36:43 +08:00
|
|
|
# git pull repo
|
|
|
|
try:
|
|
|
|
bash(GIT_PULL, cwd=REPO_ROOT)
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
2019-04-09 21:29:13 +08:00
|
|
|
self.pkgconfigs = load_all_yaml()
|
2019-04-06 20:59:27 +08:00
|
|
|
updates = updmgr.check_update()
|
|
|
|
for update in updates:
|
2019-04-07 17:14:50 +08:00
|
|
|
(pkgconfig, ver, buildarchs) = update
|
|
|
|
march = True if len(buildarchs) >= 2 else False
|
|
|
|
for arch in buildarchs:
|
|
|
|
newjob = Job(arch, pkgconfig, ver, multiarch=march)
|
|
|
|
self._new_buildjob(newjob)
|
|
|
|
else:
|
|
|
|
# This part does the job
|
2019-04-09 15:43:17 +08:00
|
|
|
job = self.__get_job()
|
2019-04-10 20:36:43 +08:00
|
|
|
if not job:
|
|
|
|
logging.error('No job got')
|
|
|
|
return
|
2019-04-09 15:43:17 +08:00
|
|
|
if job.multiarch:
|
2019-04-09 20:48:37 +08:00
|
|
|
self.__clean(job, remove_pkg=True)
|
2019-04-09 22:47:48 +08:00
|
|
|
self.__makepkg(job)
|
2019-04-09 20:48:37 +08:00
|
|
|
self.__sign(job)
|
2019-04-09 21:15:03 +08:00
|
|
|
if self.__upload(job):
|
|
|
|
self.__clean(job, remove_pkg=True)
|
2019-04-09 15:43:17 +08:00
|
|
|
else:
|
|
|
|
self.__makepkg(job)
|
|
|
|
self.__sign(job)
|
2019-04-09 21:15:03 +08:00
|
|
|
if self.__upload(job):
|
|
|
|
if job.pkgconfig.cleanbuild:
|
|
|
|
self.__clean(job, remove_pkg=True)
|
|
|
|
else:
|
|
|
|
self.__clean(job, rm_src=False, remove_pkg=True)
|
2019-04-09 15:43:17 +08:00
|
|
|
self.__finish_job(job.pkgconfig.dirname)
|
2019-04-06 20:59:27 +08:00
|
|
|
jobsmgr = jobsManager()
|
|
|
|
|
|
|
|
class updateManager:
|
|
|
|
def __init__(self, filename='pkgver.json'):
|
|
|
|
self.__filename = filename
|
|
|
|
self.__pkgvers = dict()
|
|
|
|
self.__load()
|
|
|
|
def __load(self):
|
|
|
|
if Path(self.__filename).exists():
|
|
|
|
with open(self.__filename,"r") as f:
|
|
|
|
try:
|
|
|
|
pkgvers = json.loads(f.read())
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
logger.error('pkgver.json - Bad json')
|
|
|
|
print_exc_plus
|
|
|
|
exit(1)
|
|
|
|
else:
|
|
|
|
logger.warning(f'No {self.__filename} found')
|
|
|
|
pkgvers = dict()
|
|
|
|
assert type(pkgvers) is dict
|
|
|
|
for pkgname in pkgvers:
|
|
|
|
assert type(pkgname) is str
|
|
|
|
self.__pkgvers = pkgvers
|
|
|
|
def _save(self):
|
|
|
|
pkgvers = json.dumps(self.__pkgvers, indent=4)
|
|
|
|
pkgvers += '\n'
|
|
|
|
with open(self.__filename,"w") as f:
|
|
|
|
if f.writable:
|
|
|
|
f.write(pkgvers)
|
|
|
|
else:
|
|
|
|
logger.error('pkgver.json - Not writable')
|
2019-04-07 17:14:50 +08:00
|
|
|
def __get_package_list(self, dirname, arch):
|
2019-04-06 20:59:27 +08:00
|
|
|
pkgdir = REPO_ROOT / dirname
|
|
|
|
assert pkgdir.exists()
|
2019-04-07 17:14:50 +08:00
|
|
|
pkglist = nspawn_shell(arch, MAKEPKG_PKGLIST_CMD, cwd=pkgdir)
|
2019-04-06 20:59:27 +08:00
|
|
|
pkglist = pkglist.split('\n')
|
2019-04-07 22:19:06 +08:00
|
|
|
pkglist = [line for line in pkglist if not line.startswith('+')]
|
2019-04-06 20:59:27 +08:00
|
|
|
return pkglist
|
2019-04-07 17:14:50 +08:00
|
|
|
def __get_new_ver(self, dirname, arch):
|
|
|
|
pkgfiles = self.__get_package_list(dirname, arch)
|
|
|
|
ver = get_pkg_details_from_name(pkgfiles[0]).ver
|
|
|
|
return ver
|
2019-04-10 20:36:43 +08:00
|
|
|
def check_update(self, rebuild_package=None):
|
2019-04-06 20:59:27 +08:00
|
|
|
updates = list()
|
|
|
|
for pkg in jobsmgr.pkgconfigs:
|
2019-04-10 20:36:43 +08:00
|
|
|
if rebuild_package and \
|
|
|
|
rebuild_package != pkg.dirname:
|
|
|
|
continue
|
2019-04-06 20:59:27 +08:00
|
|
|
pkgdir = REPO_ROOT / pkg.dirname
|
|
|
|
logger.info(f'checking update: {pkg.dirname}')
|
2019-04-07 17:14:50 +08:00
|
|
|
pkgbuild = pkgdir / 'PKGBUILD'
|
|
|
|
archs = get_arch_from_pkgbuild(pkgbuild)
|
|
|
|
buildarchs = [BUILD_ARCH_MAPPING.get(arch, None) for arch in archs]
|
|
|
|
buildarchs = [arch for arch in buildarchs if arch is not None]
|
2019-04-10 20:36:43 +08:00
|
|
|
if not buildarchs:
|
|
|
|
logger.warning(f'No build arch for {pkg.dirname}, refuse to build.')
|
|
|
|
continue
|
2019-04-07 17:14:50 +08:00
|
|
|
# hopefully we only need to check one arch for update
|
|
|
|
arch = 'x86_64' if 'x86_64' in buildarchs else buildarchs[0] # prefer x86
|
2019-04-09 20:48:37 +08:00
|
|
|
mon_nspawn_shell(arch, MAKEPKG_UPD_CMD, cwd=pkgdir, seconds=60*60,
|
2019-04-07 17:14:50 +08:00
|
|
|
logfile = pkgdir / 'buildbot.log.update',
|
|
|
|
short_return = True)
|
2019-04-06 20:59:27 +08:00
|
|
|
if pkg.type in ('git', 'manual'):
|
2019-04-07 17:14:50 +08:00
|
|
|
ver = self.__get_new_ver(pkg.dirname, arch)
|
2019-04-06 20:59:27 +08:00
|
|
|
oldver = self.__pkgvers.get(pkg.dirname, None)
|
2019-04-09 15:43:17 +08:00
|
|
|
has_update = False
|
2019-04-10 21:13:27 +08:00
|
|
|
if rebuild_package:
|
|
|
|
has_update = True
|
2019-04-09 15:43:17 +08:00
|
|
|
if oldver:
|
|
|
|
res = vercmp(ver, oldver)
|
|
|
|
if res == 1:
|
|
|
|
has_update = True
|
|
|
|
elif res == -1:
|
|
|
|
logger.warning(f'package: {pkg.dirname} downgrade attempted')
|
|
|
|
elif res == 0:
|
|
|
|
logger.info(f'package: {pkg.dirname} is up to date')
|
|
|
|
else:
|
|
|
|
has_update = True
|
|
|
|
if has_update:
|
2019-04-06 20:59:27 +08:00
|
|
|
self.__pkgvers[pkg.dirname] = ver
|
2019-04-07 17:14:50 +08:00
|
|
|
updates.append((pkg, ver, buildarchs))
|
2019-04-06 20:59:27 +08:00
|
|
|
else:
|
|
|
|
logger.warning(f'unknown package type: {pkg.type}')
|
|
|
|
self._save()
|
|
|
|
return updates
|
|
|
|
|
|
|
|
updmgr = updateManager()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-04-10 20:36:43 +08:00
|
|
|
def info():
|
2019-04-11 14:55:42 +08:00
|
|
|
ret = str(jobsmgr)
|
|
|
|
ret += '\nhuman-readable:\n'
|
|
|
|
ret += "".join([f"{k} = {jobsmgr.jobs[k]}\n" for k in jobsmgr.jobs])
|
|
|
|
ret += f"idle: {jobsmgr.idle}"
|
2019-04-11 14:58:24 +08:00
|
|
|
return ret
|
2019-04-10 20:36:43 +08:00
|
|
|
|
|
|
|
def rebuild_package(pkgdirname, clean=False):
|
|
|
|
return jobsmgr.rebuild_package(pkgdirname, clean=clean)
|
|
|
|
|
|
|
|
def clean(pkgdirname):
|
|
|
|
return jobsmgr.reset_dir(pkgdirname=pkgdirname)
|
|
|
|
|
|
|
|
def clean_all():
|
|
|
|
return jobsmgr.reset_dir(all=True)
|
2019-04-06 20:59:27 +08:00
|
|
|
|
|
|
|
def run(funcname, args=list(), kwargs=dict()):
|
2019-04-10 20:36:43 +08:00
|
|
|
if funcname in ('info', 'rebuild_package', 'clean', 'clean_all'):
|
2019-04-11 14:55:42 +08:00
|
|
|
logger.debug('running: %s %s %s',funcname, args, kwargs)
|
2019-04-06 20:59:27 +08:00
|
|
|
ret = eval(funcname)(*args, **kwargs)
|
2019-04-11 14:55:42 +08:00
|
|
|
logger.info('done: %s %s %s',funcname, args, kwargs)
|
2019-04-06 20:59:27 +08:00
|
|
|
return ret
|
|
|
|
else:
|
|
|
|
logger.error('unexpected: %s %s %s',funcname, args, kwargs)
|
|
|
|
return False
|
|
|
|
|
2019-04-09 20:48:37 +08:00
|
|
|
@background
|
|
|
|
def __main():
|
2019-04-06 20:59:27 +08:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
with Listener(MASTER_BIND_ADDRESS, authkey=MASTER_BIND_PASSWD) as listener:
|
|
|
|
with listener.accept() as conn:
|
|
|
|
logger.info('connection accepted from %s', listener.last_accepted)
|
|
|
|
myrecv = conn.recv()
|
|
|
|
if type(myrecv) is list and len(myrecv) == 3:
|
|
|
|
(funcname, args, kwargs) = myrecv
|
|
|
|
funcname = str(funcname)
|
2019-04-11 14:55:42 +08:00
|
|
|
logger.debug('running: %s %s %s', funcname, args, kwargs)
|
2019-04-06 20:59:27 +08:00
|
|
|
conn.send(run(funcname, args=args, kwargs=kwargs))
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
2019-04-09 20:48:37 +08:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
logger.info('Buildbot started.')
|
|
|
|
__main() # start the Listener thread
|
|
|
|
logger.info('Listener started.')
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
jobsmgr.tick()
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
2019-04-06 20:59:27 +08:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
logger.info('KeyboardInterrupt')
|
|
|
|
print_exc_plus()
|
|
|
|
break
|
2019-04-09 20:48:37 +08:00
|
|
|
sleep(1)
|