mirror of
https://github.com/archlinux-jerry/buildbot
synced 2024-11-22 13:00:40 +08:00
completed
This commit is contained in:
parent
d4476c1a29
commit
c2859dff0a
5 changed files with 105 additions and 68 deletions
95
buildbot.py
95
buildbot.py
|
@ -17,12 +17,13 @@ from config import ARCHS, BUILD_ARCHS, BUILD_ARCH_MAPPING, \
|
||||||
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD, \
|
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD, \
|
||||||
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
|
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
|
||||||
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
|
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
|
||||||
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL
|
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL, \
|
||||||
|
MAKEPKG_MAKE_CMD_MARCH, UPLOAD_CMD
|
||||||
|
|
||||||
from utils import print_exc_plus, background, \
|
from utils import print_exc_plus, background, \
|
||||||
bash, get_pkg_details_from_name, vercmp, \
|
bash, get_pkg_details_from_name, vercmp, \
|
||||||
nspawn_shell, mon_nspawn_shell, get_arch_from_pkgbuild, \
|
nspawn_shell, mon_nspawn_shell, get_arch_from_pkgbuild, \
|
||||||
configure_logger
|
configure_logger, mon_bash
|
||||||
|
|
||||||
from client import run as rrun
|
from client import run as rrun
|
||||||
|
|
||||||
|
@ -84,24 +85,30 @@ class jobsManager:
|
||||||
self.__curr_job = None
|
self.__curr_job = None
|
||||||
return True
|
return True
|
||||||
def __makepkg(self, job):
|
def __makepkg(self, job):
|
||||||
mkcmd = MAKEPKG_MAKE_CMD_CLEAN if job.pkgconfig.cleanbuild \
|
|
||||||
else MAKEPKG_MAKE_CMD
|
|
||||||
cwd = REPO_ROOT / job.pkgconfig.dirname
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
||||||
|
if job.multiarch:
|
||||||
|
# assume a clean env, no source avail
|
||||||
|
mkcmd = MAKEPKG_MAKE_CMD_MARCH
|
||||||
|
else:
|
||||||
|
mkcmd = MAKEPKG_MAKE_CMD_CLEAN if job.pkgconfig.cleanbuild \
|
||||||
|
else MAKEPKG_MAKE_CMD
|
||||||
logger.info('makepkg in %s %s', job.pkgconfig.dirname, job.arch)
|
logger.info('makepkg in %s %s', job.pkgconfig.dirname, job.arch)
|
||||||
return mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=mkcmd,
|
return mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=mkcmd,
|
||||||
logfile = cwd / 'buildbot.log.makepkg',
|
logfile = cwd / 'buildbot.log.makepkg',
|
||||||
short_return = True)
|
short_return = True,
|
||||||
def __clean(self, job, remove_pkg=False):
|
seconds=job.pkgconfig.timeout*60)
|
||||||
|
def __clean(self, job, remove_pkg=False, rm_src=True):
|
||||||
cwd = REPO_ROOT / job.pkgconfig.dirname
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
||||||
logger.info('cleaning build dir for %s, %sremoving pkg',
|
logger.info('cleaning build dir for %s, %sremoving pkg',
|
||||||
job.pkgconfig.dirname, '' if remove_pkg else 'not ')
|
job.pkgconfig.dirname, '' if remove_pkg else 'not ')
|
||||||
for fpath in [f for f in cwd.iterdir()]:
|
for fpath in [f for f in cwd.iterdir()]:
|
||||||
fpath = Path()
|
if rm_src and fpath.is_dir() and \
|
||||||
if fpath.is_dir() and fpath.name in ('pkg', 'src'):
|
fpath.name in ('pkg', 'src'):
|
||||||
rmtree(fpath)
|
rmtree(fpath)
|
||||||
elif remove_pkg and fpath.is_file() and \
|
elif remove_pkg and fpath.is_file() and \
|
||||||
|
((not job.multiarch) or job.arch in fpath.name) and \
|
||||||
(fpath.name.endswith(PKG_SUFFIX) or \
|
(fpath.name.endswith(PKG_SUFFIX) or \
|
||||||
fpath.name.endswith(PKG_SIG_SUFFIX)):
|
fpath.name.endswith(PKG_SIG_SUFFIX)):
|
||||||
fpath.unlink()
|
fpath.unlink()
|
||||||
def __sign(self, job):
|
def __sign(self, job):
|
||||||
cwd = REPO_ROOT / job.pkgconfig.dirname
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
||||||
|
@ -113,9 +120,27 @@ class jobsManager:
|
||||||
wip
|
wip
|
||||||
'''
|
'''
|
||||||
cwd = REPO_ROOT / job.pkgconfig.dirname
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
||||||
print(bash('ls -l', cwd=cwd))
|
f_to_upload = list()
|
||||||
#nspawn_shell(job.arch, 'rm -rf src pkg', cwd=cwd)
|
for fpath in cwd.iterdir():
|
||||||
#rrun()
|
if fpath.name.endswith(PKG_SUFFIX) and \
|
||||||
|
get_pkg_details_from_name(fpath.name).ver == job.version:
|
||||||
|
sigpath = fpath.parent / f'{fpath.name}.sig'
|
||||||
|
assert sigpath.exists()
|
||||||
|
f_to_upload.append(sigpath)
|
||||||
|
f_to_upload.append(fpath)
|
||||||
|
for f in f_to_upload:
|
||||||
|
size = f.stat().st_size / 1000 / 1000
|
||||||
|
timeout = rrun('push_start', args=(f.name, size))
|
||||||
|
logger.info(f'Uploading {f}, timeout in {timeout}s')
|
||||||
|
assert timeout > 0
|
||||||
|
mon_bash(f'{UPLOAD_CMD} \"{f}\"', seconds=timeout)
|
||||||
|
if f.name.endswith(PKG_SUFFIX):
|
||||||
|
logger.info(f'Requesting repo update for {f.name}')
|
||||||
|
res = rrun('push_done', kwargs={'overwrite': False,})
|
||||||
|
if res is None:
|
||||||
|
logger.info(f'Update success for {f.name}')
|
||||||
|
else:
|
||||||
|
logger.error(f'Update failed for {f.name}, reason: {res}')
|
||||||
def tick(self):
|
def tick(self):
|
||||||
'''
|
'''
|
||||||
check for updates,
|
check for updates,
|
||||||
|
@ -142,16 +167,19 @@ class jobsManager:
|
||||||
else:
|
else:
|
||||||
# This part does the job
|
# This part does the job
|
||||||
job = self.__get_job()
|
job = self.__get_job()
|
||||||
cwd = REPO_ROOT / job.pkgconfig.dirname
|
|
||||||
if job.multiarch:
|
if job.multiarch:
|
||||||
# wip
|
self.__clean(job, remove_pkg=True)
|
||||||
pass
|
self.__sign(job)
|
||||||
|
self.__upload(job)
|
||||||
|
self.__clean(job, remove_pkg=True)
|
||||||
else:
|
else:
|
||||||
self.__makepkg(job)
|
self.__makepkg(job)
|
||||||
self.__sign(job)
|
self.__sign(job)
|
||||||
self.__upload(job)
|
self.__upload(job)
|
||||||
if job.pkgconfig.cleanbuild:
|
if job.pkgconfig.cleanbuild:
|
||||||
self.__clean(job ,remove_pkg=True)
|
self.__clean(job, remove_pkg=True)
|
||||||
|
else:
|
||||||
|
self.__clean(job, rm_src=False, remove_pkg=True)
|
||||||
self.__finish_job(job.pkgconfig.dirname)
|
self.__finish_job(job.pkgconfig.dirname)
|
||||||
jobsmgr = jobsManager()
|
jobsmgr = jobsManager()
|
||||||
|
|
||||||
|
@ -206,7 +234,7 @@ class updateManager:
|
||||||
buildarchs = [arch for arch in buildarchs if arch is not None]
|
buildarchs = [arch for arch in buildarchs if arch is not None]
|
||||||
# hopefully we only need to check one arch for update
|
# hopefully we only need to check one arch for update
|
||||||
arch = 'x86_64' if 'x86_64' in buildarchs else buildarchs[0] # prefer x86
|
arch = 'x86_64' if 'x86_64' in buildarchs else buildarchs[0] # prefer x86
|
||||||
mon_nspawn_shell(arch, MAKEPKG_UPD_CMD, cwd=pkgdir, minutes=60,
|
mon_nspawn_shell(arch, MAKEPKG_UPD_CMD, cwd=pkgdir, seconds=60*60,
|
||||||
logfile = pkgdir / 'buildbot.log.update',
|
logfile = pkgdir / 'buildbot.log.update',
|
||||||
short_return = True)
|
short_return = True)
|
||||||
if pkg.type in ('git', 'manual'):
|
if pkg.type in ('git', 'manual'):
|
||||||
|
@ -234,24 +262,13 @@ class updateManager:
|
||||||
updmgr = updateManager()
|
updmgr = updateManager()
|
||||||
|
|
||||||
|
|
||||||
@background
|
|
||||||
def __main():
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
jobsmgr.tick()
|
|
||||||
except:
|
|
||||||
print_exc_plus()
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def info(*args, **kwargs):
|
||||||
|
return (args, kwargs)
|
||||||
|
|
||||||
def run(funcname, args=list(), kwargs=dict()):
|
def run(funcname, args=list(), kwargs=dict()):
|
||||||
if funcname in ('clean', 'regenerate', 'remove',
|
if funcname in ('info',):
|
||||||
'update', 'push_files', 'add_files'):
|
|
||||||
logger.info('running: %s %s %s',funcname, args, kwargs)
|
logger.info('running: %s %s %s',funcname, args, kwargs)
|
||||||
ret = eval(funcname)(*args, **kwargs)
|
ret = eval(funcname)(*args, **kwargs)
|
||||||
logger.info('done: %s %s',funcname, ret)
|
logger.info('done: %s %s',funcname, ret)
|
||||||
|
@ -260,9 +277,8 @@ def run(funcname, args=list(), kwargs=dict()):
|
||||||
logger.error('unexpected: %s %s %s',funcname, args, kwargs)
|
logger.error('unexpected: %s %s %s',funcname, args, kwargs)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@background
|
||||||
if __name__ == '__main__':
|
def __main():
|
||||||
__main() # start the main worker thread
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
with Listener(MASTER_BIND_ADDRESS, authkey=MASTER_BIND_PASSWD) as listener:
|
with Listener(MASTER_BIND_ADDRESS, authkey=MASTER_BIND_PASSWD) as listener:
|
||||||
|
@ -276,7 +292,18 @@ if __name__ == '__main__':
|
||||||
conn.send(run(funcname, args=args, kwargs=kwargs))
|
conn.send(run(funcname, args=args, kwargs=kwargs))
|
||||||
except Exception:
|
except Exception:
|
||||||
print_exc_plus()
|
print_exc_plus()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
logger.info('Buildbot started.')
|
||||||
|
__main() # start the Listener thread
|
||||||
|
logger.info('Listener started.')
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
jobsmgr.tick()
|
||||||
|
except Exception:
|
||||||
|
print_exc_plus()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
logger.info('KeyboardInterrupt')
|
logger.info('KeyboardInterrupt')
|
||||||
print_exc_plus()
|
print_exc_plus()
|
||||||
break
|
break
|
||||||
|
sleep(1)
|
||||||
|
|
10
config.py
10
config.py
|
@ -48,10 +48,14 @@ MAKEPKG = 'makepkg --nosign --needed --noconfirm --noprogressbar --nocolor'
|
||||||
MAKEPKG_UPD_CMD = 'makepkg --syncdeps --nobuild'
|
MAKEPKG_UPD_CMD = 'makepkg --syncdeps --nobuild'
|
||||||
MAKEPKG_MAKE_CMD = 'makepkg --syncdeps --noextract'
|
MAKEPKG_MAKE_CMD = 'makepkg --syncdeps --noextract'
|
||||||
MAKEPKG_MAKE_CMD_CLEAN = 'makepkg --syncdeps --noextract --clean --cleanbuild'
|
MAKEPKG_MAKE_CMD_CLEAN = 'makepkg --syncdeps --noextract --clean --cleanbuild'
|
||||||
|
MAKEPKG_MAKE_CMD_MARCH = 'makepkg --syncdeps --clean --cleanbuild'
|
||||||
|
|
||||||
MAKEPKG_PKGLIST_CMD = f'{MAKEPKG} --packagelist'
|
MAKEPKG_PKGLIST_CMD = f'{MAKEPKG} --packagelist'
|
||||||
|
|
||||||
CONTAINER_BUILDBOT_ROOT = 'shared/buildbot'
|
CONTAINER_BUILDBOT_ROOT = 'shared/buildbot'
|
||||||
# single quote may cause problem here
|
SHELL_ARCH_X64 = ['/usr/bin/sudo', 'machinectl', '--quiet', 'shell', 'build@archlinux', '/bin/bash', '-x', '-e', '-c']
|
||||||
SHELL_ARCH_X64 = 'sudo machinectl --quiet shell build@archlinux /bin/bash -c \'{command}\''
|
SHELL_ARCH_ARM64 = ['/usr/bin/sudo', 'machinectl', '--quiet', 'shell', 'root@alarm', '/bin/su', '-l', 'alarm', '-c']
|
||||||
SHELL_ARCH_ARM64 = 'sudo machinectl --quiet shell root@alarm /bin/bash -c $\'su -l alarm -c \\\'{command}\\\'\''
|
SHELL_ARM64_ADDITIONAL = 'set -e; set -x'
|
||||||
|
SHELL_TRAP = 'trap \'echo ++ exit $?\' ERR EXIT'
|
||||||
|
|
||||||
|
UPLOAD_CMD = 'rsync -avPh {src} repoupload:/srv/repo/buildbot/repo/updates/'
|
||||||
|
|
15
repod.py
15
repod.py
|
@ -39,17 +39,20 @@ class pushFm:
|
||||||
def start(self, fname, size):
|
def start(self, fname, size):
|
||||||
'''
|
'''
|
||||||
size is in MB
|
size is in MB
|
||||||
|
returns -1 when busy
|
||||||
'''
|
'''
|
||||||
if self.is_busy():
|
if self.is_busy():
|
||||||
return f'busy with {self.fname}'
|
return -1
|
||||||
self.fname = fname
|
self.fname = fname
|
||||||
self.start_time = time()
|
self.start_time = time()
|
||||||
self.size = size
|
self.size = size
|
||||||
if size <= 7.5:
|
if size <= 7.5:
|
||||||
|
timeout = 120
|
||||||
self.end_time = self.start_time + 120
|
self.end_time = self.start_time + 120
|
||||||
else:
|
else:
|
||||||
self.end_time = self.start_time + size / (REPO_PUSH_BANDWIDTH / 8) * 2
|
timeout = size / (REPO_PUSH_BANDWIDTH / 8) * 2
|
||||||
return None
|
self.end_time = self.start_time + timeout
|
||||||
|
return timeout
|
||||||
def tick(self):
|
def tick(self):
|
||||||
'''
|
'''
|
||||||
return None means success
|
return None means success
|
||||||
|
@ -107,11 +110,11 @@ class pushFm:
|
||||||
|
|
||||||
pfm = pushFm()
|
pfm = pushFm()
|
||||||
|
|
||||||
def push_files(filename, size):
|
def push_start(filename, size):
|
||||||
pfm.tick()
|
pfm.tick()
|
||||||
return pfm.start(filename, size)
|
return pfm.start(filename, size)
|
||||||
|
|
||||||
def add_files(filename, overwrite=False):
|
def push_done(filename, overwrite=False):
|
||||||
return pfm.done(filename, overwrite=overwrite)
|
return pfm.done(filename, overwrite=overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
@ -120,7 +123,7 @@ def add_files(filename, overwrite=False):
|
||||||
|
|
||||||
def run(funcname, args=list(), kwargs=dict()):
|
def run(funcname, args=list(), kwargs=dict()):
|
||||||
if funcname in ('clean', 'regenerate', 'remove',
|
if funcname in ('clean', 'regenerate', 'remove',
|
||||||
'update', 'push_files', 'add_files'):
|
'update', 'push_start', 'push_done'):
|
||||||
logger.info('running: %s %s %s', funcname, args, kwargs)
|
logger.info('running: %s %s %s', funcname, args, kwargs)
|
||||||
ret = eval(funcname)(*args, **kwargs)
|
ret = eval(funcname)(*args, **kwargs)
|
||||||
logger.info('done: %s %s',funcname, ret)
|
logger.info('done: %s %s',funcname, ret)
|
||||||
|
|
27
utils.py
27
utils.py
|
@ -11,6 +11,7 @@ import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from config import PKG_COMPRESSION, SHELL_ARCH_ARM64, SHELL_ARCH_X64, \
|
from config import PKG_COMPRESSION, SHELL_ARCH_ARM64, SHELL_ARCH_X64, \
|
||||||
|
SHELL_ARM64_ADDITIONAL, SHELL_TRAP, \
|
||||||
CONTAINER_BUILDBOT_ROOT, ARCHS
|
CONTAINER_BUILDBOT_ROOT, ARCHS
|
||||||
|
|
||||||
logger = logging.getLogger(f'buildbot.{__name__}')
|
logger = logging.getLogger(f'buildbot.{__name__}')
|
||||||
|
@ -28,10 +29,10 @@ def bash(cmdline, **kwargs):
|
||||||
logger.info(f'bash: {cmdline}, kwargs: {kwargs}')
|
logger.info(f'bash: {cmdline}, kwargs: {kwargs}')
|
||||||
return(run_cmd(['/bin/bash', '-x', '-e', '-c', cmdline], **kwargs))
|
return(run_cmd(['/bin/bash', '-x', '-e', '-c', cmdline], **kwargs))
|
||||||
|
|
||||||
def mon_bash(cmdline, cwd=None, minutes=30, **kwargs):
|
def mon_bash(cmdline, seconds=60*30, **kwargs):
|
||||||
assert type(minutes) is int and minutes >= 1
|
assert type(seconds) is int and seconds >= 1
|
||||||
return bash(cmdline, cwd=cwd, keepalive=True, KEEPALIVE_TIMEOUT=60,
|
return bash(cmdline, keepalive=True, KEEPALIVE_TIMEOUT=60,
|
||||||
RUN_CMD_TIMEOUT=minutes*60, **kwargs)
|
RUN_CMD_TIMEOUT=seconds, **kwargs)
|
||||||
|
|
||||||
def nspawn_shell(arch, cmdline, cwd=None, **kwargs):
|
def nspawn_shell(arch, cmdline, cwd=None, **kwargs):
|
||||||
root = Path(CONTAINER_BUILDBOT_ROOT)
|
root = Path(CONTAINER_BUILDBOT_ROOT)
|
||||||
|
@ -39,22 +40,23 @@ def nspawn_shell(arch, cmdline, cwd=None, **kwargs):
|
||||||
cwd = root / cwd
|
cwd = root / cwd
|
||||||
else:
|
else:
|
||||||
cwd = root
|
cwd = root
|
||||||
|
logger.info(f'bash_{arch}: {cmdline}, cwd: {cwd}, kwargs: {kwargs}')
|
||||||
if arch in ('aarch64', 'arm64'):
|
if arch in ('aarch64', 'arm64'):
|
||||||
ret = bash(SHELL_ARCH_ARM64.format(
|
command=f'{SHELL_ARM64_ADDITIONAL}; {SHELL_TRAP}; cd \'{cwd}\'; {cmdline}'
|
||||||
command=f'cd \"{cwd}\" || echo \"++ exit 1\" && exit 1; {cmdline}; echo \"++ exit $?\"'), **kwargs)
|
ret = run_cmd(SHELL_ARCH_ARM64 + [command,], **kwargs)
|
||||||
elif arch in ('x64', 'x86', 'x86_64'):
|
elif arch in ('x64', 'x86', 'x86_64'):
|
||||||
ret = bash(SHELL_ARCH_X64.format(
|
command=f'{SHELL_TRAP}; cd \'{cwd}\'; {cmdline}'
|
||||||
command=f'cd \"{cwd}\" || echo \"++ exit 1\" && exit 1; {cmdline}; echo \"++ exit $?\"'), **kwargs)
|
ret = run_cmd(SHELL_ARCH_X64 + [command,], **kwargs)
|
||||||
else:
|
else:
|
||||||
raise TypeError('nspawn_shell: wrong arch')
|
raise TypeError('nspawn_shell: wrong arch')
|
||||||
if not ret.endswith('++ exit 0\n'):
|
if not ret.endswith('++ exit 0\n'):
|
||||||
raise subprocess.CalledProcessError(1, cmdline, ret)
|
raise subprocess.CalledProcessError(1, cmdline, ret)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def mon_nspawn_shell(arch, cmdline, cwd, minutes=30, **kwargs):
|
def mon_nspawn_shell(arch, cmdline, cwd, seconds=60*30, **kwargs):
|
||||||
assert type(minutes) is int and minutes >= 1
|
assert type(seconds) is int and seconds >= 1
|
||||||
return nspawn_shell(arch, cmdline, cwd=cwd, keepalive=True, KEEPALIVE_TIMEOUT=60,
|
return nspawn_shell(arch, cmdline, cwd=cwd, keepalive=True, KEEPALIVE_TIMEOUT=60,
|
||||||
RUN_CMD_TIMEOUT=minutes*60, **kwargs)
|
RUN_CMD_TIMEOUT=seconds, **kwargs)
|
||||||
|
|
||||||
def run_cmd(cmd, cwd=None, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOUT=60,
|
def run_cmd(cmd, cwd=None, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOUT=60,
|
||||||
logfile=None, short_return=False):
|
logfile=None, short_return=False):
|
||||||
|
@ -96,7 +98,6 @@ def run_cmd(cmd, cwd=None, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOU
|
||||||
continue
|
continue
|
||||||
line.replace('\x0f', '\n')
|
line.replace('\x0f', '\n')
|
||||||
last_read_time = int(time())
|
last_read_time = int(time())
|
||||||
logger.debug(line)
|
|
||||||
output.append(line)
|
output.append(line)
|
||||||
last_read[0] = last_read_time
|
last_read[0] = last_read_time
|
||||||
last_read[1] = line
|
last_read[1] = line
|
||||||
|
@ -253,7 +254,7 @@ def format_exc_plus():
|
||||||
ret += '\n'
|
ret += '\n'
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def configure_logger(logger, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
def configure_logger(logger, format='%(asctime)s - %(name)-18s - %(levelname)s - %(message)s',
|
||||||
level=logging.INFO, logfile=None, flevel=logging.INFO, rotate_size=None):
|
level=logging.INFO, logfile=None, flevel=logging.INFO, rotate_size=None):
|
||||||
class ExceptionFormatter(logging.Formatter):
|
class ExceptionFormatter(logging.Formatter):
|
||||||
def format(self, record):
|
def format(self, record):
|
||||||
|
|
26
yamlparse.py
26
yamlparse.py
|
@ -73,18 +73,20 @@ def load_all():
|
||||||
pkgconfigs = list()
|
pkgconfigs = list()
|
||||||
for mydir in REPO_ROOT.iterdir():
|
for mydir in REPO_ROOT.iterdir():
|
||||||
try:
|
try:
|
||||||
if mydir.is_dir() and (mydir / AUTOBUILD_FNAME).exists():
|
if mydir.is_dir():
|
||||||
# parsing yaml
|
if (mydir / AUTOBUILD_FNAME).exists():
|
||||||
logger.info('Bulidbot: found %s in %s', AUTOBUILD_FNAME,
|
# parsing yaml
|
||||||
mydir / AUTOBUILD_FNAME)
|
logger.info('Bulidbot: found %s in %s', AUTOBUILD_FNAME, mydir)
|
||||||
with open(mydir / AUTOBUILD_FNAME, 'r') as f:
|
with open(mydir / AUTOBUILD_FNAME, 'r') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
content = load(content, Loader=Loader)
|
content = load(content, Loader=Loader)
|
||||||
assert type(content) is dict
|
assert type(content) is dict
|
||||||
args = [content.get(part, None) for part in \
|
args = [content.get(part, None) for part in \
|
||||||
('type', 'cleanbuild', 'timeout', 'extra')]
|
('type', 'cleanbuild', 'timeout', 'extra')]
|
||||||
args = [mydir.name] + args
|
args = [mydir.name] + args
|
||||||
pkgconfigs.append(pkgConfig(*args))
|
pkgconfigs.append(pkgConfig(*args))
|
||||||
|
else:
|
||||||
|
logger.warning('Bulidbot: NO %s in %s', AUTOBUILD_FNAME, mydir)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error(f'Error while parsing {AUTOBUILD_FNAME} for {mydir.name}')
|
logger.error(f'Error while parsing {AUTOBUILD_FNAME} for {mydir.name}')
|
||||||
print_exc_plus()
|
print_exc_plus()
|
||||||
|
|
Loading…
Reference in a new issue