Browse Source

buildbot: new logging system

master
JerryXiao 3 years ago
parent
commit
87b7d11651
Signed by: Jerry
GPG Key ID: 9D9CE43650FF2BAA
  1. 9
      .gitignore
  2. 86
      buildbot.py
  3. 10
      client.py
  4. 1
      config.py
  5. 6
      repo.py
  6. 12
      repod.py
  7. 76
      utils.py
  8. 2
      yamlparse.py

9
.gitignore vendored

@ -1,6 +1,9 @@
.vscode/*
__pycache__/
*.py[cod]
buildbot.log*
buildbot.sql
test
/buildbot.log*
/repo.log*
/repod.log*
/test
/pkgbuilds
/repo/

86
buildbot.py

@ -8,28 +8,35 @@ from multiprocessing.connection import Listener
from time import time, sleep
import os
from pathlib import Path
from shutil import rmtree
from subprocess import CalledProcessError
from shared_vars import PKG_SUFFIX, PKG_SIG_SUFFIX
from config import ARCHS, BUILD_ARCHS, BUILD_ARCH_MAPPING, \
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD, \
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL
from utils import print_exc_plus, background, \
bash, get_pkg_details_from_name, vercmp, \
nspawn_shell, mon_nspawn_shell, get_arch_from_pkgbuild
nspawn_shell, mon_nspawn_shell, get_arch_from_pkgbuild, \
configure_logger
from client import run as rrun
import json
from yamlparse import load_all as load_all_yaml
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)
os.chdir(abspath)
logger = logging.getLogger('buildbot')
configure_logger(logger, logfile='buildbot.log', rotate_size=1024*1024*10)
REPO_ROOT = Path(PKGBUILD_DIR)
class Job:
@ -47,6 +54,7 @@ class jobsManager:
self.__uploadjobs = list()
self.__curr_job = None
self.pkgconfigs = load_all_yaml()
self.last_updatecheck = 0.0
def _new_buildjob(self, job):
assert type(job) is Job
job_to_remove = list()
@ -80,27 +88,33 @@ class jobsManager:
cwd = REPO_ROOT / job.pkgconfig.dirname
logger.info('makepkg in %s %s', job.pkgconfig.dirname, job.arch)
return mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=mkcmd,
logfile = cwd / 'buildbot.log.update',
logfile = cwd / 'buildbot.log.makepkg',
short_return = True)
def __clean(self, job):
def __clean(self, job, remove_pkg=False):
cwd = REPO_ROOT / job.pkgconfig.dirname
logger.info('cleaning build dir for %s %s',
job.pkgconfig.dirname, job.arch)
nspawn_shell(job.arch, 'rm -rf src pkg', cwd=cwd)
logger.info('cleaning build dir for %s, %sremoving pkg',
job.pkgconfig.dirname, '' if remove_pkg else 'not ')
for fpath in [f for f in cwd.iterdir()]:
fpath = Path()
if fpath.is_dir() and fpath.name in ('pkg', 'src'):
rmtree(fpath)
elif remove_pkg and fpath.is_file() and \
(fpath.name.endswith(PKG_SUFFIX) or \
fpath.name.endswith(PKG_SIG_SUFFIX)):
fpath.unlink()
def __sign(self, job):
'''
wip
'''
cwd = REPO_ROOT / job.pkgconfig.dirname
print(nspawn_shell(job.arch, 'ls -l', cwd=cwd))
#nspawn_shell(job.arch, 'rm -rf src pkg', cwd=cwd)
for fpath in cwd.iterdir():
if fpath.name.endswith(PKG_SUFFIX):
bash(f'{GPG_SIGN_CMD} {fpath.name}', cwd=cwd)
def __upload(self, job):
'''
wip
'''
cwd = REPO_ROOT / job.pkgconfig.dirname
print(nspawn_shell(job.arch, 'ls -l', cwd=cwd))
print(bash('ls -l', cwd=cwd))
#nspawn_shell(job.arch, 'rm -rf src pkg', cwd=cwd)
#rrun()
def tick(self):
'''
check for updates,
@ -109,6 +123,8 @@ class jobsManager:
'''
if not self.__buildjobs:
# This part check for updates
if time() - self.last_updatecheck <= UPDATE_INTERVAL:
sleep(60)
updates = updmgr.check_update()
for update in updates:
(pkgconfig, ver, buildarchs) = update
@ -118,15 +134,18 @@ class jobsManager:
self._new_buildjob(newjob)
else:
# This part does the job
for job in self.__buildjobs:
cwd = REPO_ROOT / job.pkgconfig.dirname
if job.multiarch:
# wip
pass
else:
self.__makepkg(job)
self.__sign(job)
self.__upload(job)
job = self.__get_job()
cwd = REPO_ROOT / job.pkgconfig.dirname
if job.multiarch:
# wip
pass
else:
self.__makepkg(job)
self.__sign(job)
self.__upload(job)
if job.pkgconfig.cleanbuild:
self.__clean(job ,remove_pkg=True)
self.__finish_job(job.pkgconfig.dirname)
jobsmgr = jobsManager()
class updateManager:
@ -186,11 +205,20 @@ class updateManager:
if pkg.type in ('git', 'manual'):
ver = self.__get_new_ver(pkg.dirname, arch)
oldver = self.__pkgvers.get(pkg.dirname, None)
if oldver is None or vercmp(ver, oldver) == 1:
has_update = False
if oldver:
res = vercmp(ver, oldver)
if res == 1:
has_update = True
elif res == -1:
logger.warning(f'package: {pkg.dirname} downgrade attempted')
elif res == 0:
logger.info(f'package: {pkg.dirname} is up to date')
else:
has_update = True
if has_update:
self.__pkgvers[pkg.dirname] = ver
updates.append((pkg, ver, buildarchs))
else:
logger.warning(f'package: {pkg.dirname} downgrade attempted')
else:
logger.warning(f'unknown package type: {pkg.type}')
self._save()
@ -206,7 +234,7 @@ def __main():
jobsmgr.tick()
except:
print_exc_plus()
sleep(60)
sleep(1)

10
client.py

@ -10,9 +10,9 @@ from time import sleep
from config import REPOD_BIND_ADDRESS, REPOD_BIND_PASSWD
from utils import print_exc_plus
logger = logging.getLogger(__name__)
logger = logging.getLogger(f'buildbot.{__name__}')
def ping(funcname, args=list(), kwargs=dict(), retries=0):
def run(funcname, args=list(), kwargs=dict(), retries=0):
try:
logger.info('client: %s %s %s',funcname, args, kwargs)
with Client(REPOD_BIND_ADDRESS, authkey=REPOD_BIND_PASSWD) as conn:
@ -35,6 +35,6 @@ def ping(funcname, args=list(), kwargs=dict(), retries=0):
if __name__ == '__main__':
import argparse
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger.info('result: %s', ping('push_files', args=('aaa', 1)))
logger.info('result: %s', ping('add_files', args=('aaa',)))
#logger.info('result: %s', ping('update'))
logger.info('result: %s', run('push_files', args=('aaa', 1)))
logger.info('result: %s', run('add_files', args=('aaa',)))
#logger.info('result: %s', run('update'))

1
config.py

@ -39,6 +39,7 @@ GPG_SIGN_CMD = (f'gpg --default-key {GPG_KEY} --no-armor'
#### config for buildbot.py
UPDATE_INTERVAL = 60 # mins
MASTER_BIND_ADDRESS = ('localhost', 7011)
MASTER_BIND_PASSWD = b'mypassword'
PKGBUILD_DIR = 'pkgbuilds'

6
repo.py

@ -22,7 +22,7 @@ from pathlib import Path
from shutil import copyfile as __copy_file
import logging
from utils import bash, Pkg, get_pkg_details_from_name, \
print_exc_plus
print_exc_plus, configure_logger
from time import time
from config import REPO_NAME, PKG_COMPRESSION, ARCHS, REPO_CMD, \
@ -34,7 +34,7 @@ repocwd = Path(abspath).parent / 'repo'
repocwd.mkdir(mode=0o755, exist_ok=True)
os.chdir(repocwd)
logger = logging.getLogger(__name__)
logger = logging.getLogger(f'buildbot.{__name__}')
def symlink(dst, src, exist_ok=True):
@ -313,7 +313,7 @@ def _remove(pkgnames, target_archs=[a for a in ARCHS if a != 'any']):
return True
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
configure_logger(logger, logfile='repo.log', rotate_size=1024*1024*10)
import argparse
try:
parser = argparse.ArgumentParser(description='Automatic management tool for an arch repo.')

12
repod.py

@ -8,8 +8,7 @@ from multiprocessing.connection import Listener
from time import time, sleep
from pathlib import Path
from subprocess import CalledProcessError
from utils import print_exc_plus
import os
from config import REPOD_BIND_ADDRESS, REPOD_BIND_PASSWD, REPO_PUSH_BANDWIDTH, \
GPG_VERIFY_CMD
@ -22,11 +21,14 @@ from repo import _clean_archive as clean, \
_remove as remove, \
_update as update
from utils import bash
from utils import bash, configure_logger, print_exc_plus
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)
os.chdir(abspath)
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
logger = logging.getLogger(f'buildbot.{__name__}')
configure_logger(logger, logfile='repod.log', rotate_size=1024*1024*10)
class pushFm:
def __init__(self):

76
utils.py

@ -1,8 +1,8 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import subprocess
import logging
from time import time
import logging, logging.handlers
from time import time, sleep
import re
from threading import Thread, Lock
from pathlib import Path
@ -13,7 +13,7 @@ import traceback
from config import PKG_COMPRESSION, SHELL_ARCH_ARM64, SHELL_ARCH_X64, \
CONTAINER_BUILDBOT_ROOT, ARCHS
logger = logging.getLogger(name='utils')
logger = logging.getLogger(f'buildbot.{__name__}')
def background(func):
def wrapped(*args, **kwargs):
@ -40,9 +40,9 @@ def nspawn_shell(arch, cmdline, cwd=None, **kwargs):
else:
cwd = root
if arch in ('aarch64', 'arm64'):
return bash(SHELL_ARCH_ARM64.format(command=f'cd \"{cwd}\" || exit 1; {cmdline}'))
return bash(SHELL_ARCH_ARM64.format(command=f'cd \"{cwd}\" || exit 1; {cmdline}'), **kwargs)
elif arch in ('x64', 'x86', 'x86_64'):
return bash(SHELL_ARCH_X64.format(command=f'cd \"{cwd}\" || exit 1; {cmdline}'))
return bash(SHELL_ARCH_X64.format(command=f'cd \"{cwd}\" || exit 1; {cmdline}'), **kwargs)
raise TypeError('nspawn_shell: wrong arch')
def mon_nspawn_shell(arch, cmdline, cwd, minutes=30, **kwargs):
@ -57,18 +57,20 @@ def run_cmd(cmd, cwd=None, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOU
stopped = False
last_read = [int(time()), ""]
class Output(list):
def append(self, mystring):
if not self.__short_return:
super().append(mystring)
if self.__file and type(mystring) is str:
self.__file.write(mystring)
def __enter__(self, logfile=None, short_return=False):
def __init__(self, logfile=None, short_return=False):
super().__init__()
self.__short_return = short_return
if logfile:
assert issubclass(type(logfile), os.PathLike)
self.__file = open(logfile, 'w')
else:
self.__file = None
def append(self, mystring):
if not self.__short_return:
super().append(mystring)
if self.__file and type(mystring) is str:
self.__file.write(mystring)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.__file:
@ -122,6 +124,8 @@ def run_cmd(cmd, cwd=None, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOU
p.kill()
break
else:
# sometimes the process ended too quickly and stdout is not captured
sleep(0.1)
stopped = True
break
code = p.returncode
@ -198,11 +202,15 @@ def get_arch_from_pkgbuild(fpath):
raise TypeError('Unexpected PKGBUILD')
def print_exc_plus():
logger.log(49, format_exc_plus())
def format_exc_plus():
"""
Print the usual traceback information, followed by a listing of all the
local variables in each frame.
from Python Cookbook by David Ascher, Alex Martelli
"""
ret = str()
tb = sys.exc_info()[2]
while True:
if not tb.tb_next:
@ -214,20 +222,50 @@ def print_exc_plus():
stack.append(f)
f = f.f_back
stack.reverse()
traceback.print_exc()
print("Locals by frame, innermost last")
ret += traceback.format_exc()
ret += "\nLocals by frame, innermost last\n"
for frame in stack:
print("Frame %s in %s at line %s" % (frame.f_code.co_name,
frame.f_code.co_filename,
frame.f_lineno))
ret += "Frame %s in %s at line %s\n" % (frame.f_code.co_name,
frame.f_code.co_filename,
frame.f_lineno)
for key, value in frame.f_locals.items( ):
print("\t%20s = " % key, end=' ')
ret += "\t%20s = " % key
# We have to be VERY careful not to cause a new error in our error
# printer! Calling str( ) on an unknown object could cause an
# error we don't want, so we must use try/except to catch it --
# we can't stop it from happening, but we can and should
# stop it from propagating if it does happen!
try:
print(value)
ret += str(value)
except:
print("<ERROR WHILE PRINTING VALUE>")
ret += "<ERROR WHILE PRINTING VALUE>"
ret += '\n'
return ret
def configure_logger(logger, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO, logfile=None, flevel=logging.INFO, rotate_size=None):
class ExceptionFormatter(logging.Formatter):
def format(self, record):
if record.levelno == 49:
record.msg = 'Exception caught.\nPrinting stack traceback\n' + record.msg
return super().format(record)
logger.setLevel(level)
formatter = ExceptionFormatter(fmt=format)
logging.addLevelName(49, 'Exception')
# create file handler
if logfile:
assert type(logfile) is str
if rotate_size and type(rotate_size) is int and rotate_size >= 1000:
fh = logging.handlers.RotatingFileHandler(logfile, mode='a',
maxBytes=rotate_size, backupCount=8)
else:
fh = logging.FileHandler(logfile)
fh.setLevel(flevel)
fh.setFormatter(formatter)
logger.addHandler(fh)
# create console handler
ch = logging.StreamHandler()
ch.setLevel(level)
ch.setFormatter(formatter)
logger.addHandler(ch)

2
yamlparse.py

@ -9,7 +9,7 @@ from utils import print_exc_plus
from config import PKGBUILD_DIR, AUTOBUILD_FNAME
logger = logging.getLogger(__name__)
logger = logging.getLogger(f'buildbot.{__name__}')
abspath=os.path.abspath(__file__)
abspath=os.path.dirname(abspath)

Loading…
Cancel
Save