imporve logging

This commit is contained in:
JerryXiao 2019-04-11 14:55:42 +08:00
parent d77f1929b3
commit 2123d2334d
Signed by: Jerry
GPG key ID: 9D9CE43650FF2BAA
2 changed files with 26 additions and 15 deletions

View file

@ -65,13 +65,19 @@ class jobsManager:
self.pkgconfigs = None self.pkgconfigs = None
self.last_updatecheck = 0.0 self.last_updatecheck = 0.0
self.idle = False self.idle = False
@property
def jobs(self):
return \
{
'build_jobs': self.__buildjobs,
'upload_jobs': self.__uploadjobs,
'current_job': self.__curr_job
}
def __repr__(self): def __repr__(self):
ret = "jobsManager(" ret = "jobsManager("
for myproperty in ( for myproperty in (
'_jobsManager__buildjobs', 'jobs', 'pkgconfigs',
'_jobsManager__uploadjobs', 'last_updatecheck', 'idle'
'_jobsManager__curr_job',
'pkgconfigs', 'last_updatecheck', 'idle'
): ):
ret += f'{myproperty}={getattr(self, myproperty, None)},' ret += f'{myproperty}={getattr(self, myproperty, None)},'
ret += ')' ret += ')'
@ -130,7 +136,7 @@ class jobsManager:
self.__buildjobs.append(job) self.__buildjobs.append(job)
def __get_job(self): def __get_job(self):
if self.__curr_job: if self.__curr_job:
logger.error(f'Job {self.__curr_job} failed') logger.error(f'Job {self.__curr_job} failed. Correct the error and rebuild')
self.__finish_job(self.__curr_job, force=True) self.__finish_job(self.__curr_job, force=True)
return self.__get_job() return self.__get_job()
jobs = self.__buildjobs jobs = self.__buildjobs
@ -169,14 +175,12 @@ class jobsManager:
fpath.name.endswith(PKG_SIG_SUFFIX)): fpath.name.endswith(PKG_SIG_SUFFIX)):
fpath.unlink() fpath.unlink()
def __sign(self, job): def __sign(self, job):
logger.info('signing in %s %s', job.pkgconfig.dirname, job.arch)
cwd = REPO_ROOT / job.pkgconfig.dirname cwd = REPO_ROOT / job.pkgconfig.dirname
for fpath in cwd.iterdir(): for fpath in cwd.iterdir():
if fpath.name.endswith(PKG_SUFFIX): if fpath.name.endswith(PKG_SUFFIX):
bash(f'{GPG_SIGN_CMD} {fpath.name}', cwd=cwd) bash(f'{GPG_SIGN_CMD} {fpath.name}', cwd=cwd)
def __upload(self, job): def __upload(self, job):
'''
wip
'''
suc = True suc = True
cwd = REPO_ROOT / job.pkgconfig.dirname cwd = REPO_ROOT / job.pkgconfig.dirname
f_to_upload = list() f_to_upload = list()
@ -352,7 +356,10 @@ updmgr = updateManager()
def info(): def info():
return (str(jobsmgr)) ret = str(jobsmgr)
ret += '\nhuman-readable:\n'
ret += "".join([f"{k} = {jobsmgr.jobs[k]}\n" for k in jobsmgr.jobs])
ret += f"idle: {jobsmgr.idle}"
def rebuild_package(pkgdirname, clean=False): def rebuild_package(pkgdirname, clean=False):
return jobsmgr.rebuild_package(pkgdirname, clean=clean) return jobsmgr.rebuild_package(pkgdirname, clean=clean)
@ -365,9 +372,9 @@ def clean_all():
def run(funcname, args=list(), kwargs=dict()): def run(funcname, args=list(), kwargs=dict()):
if funcname in ('info', 'rebuild_package', 'clean', 'clean_all'): if funcname in ('info', 'rebuild_package', 'clean', 'clean_all'):
logger.info('running: %s %s %s',funcname, args, kwargs) logger.debug('running: %s %s %s',funcname, args, kwargs)
ret = eval(funcname)(*args, **kwargs) ret = eval(funcname)(*args, **kwargs)
logger.info('done: %s %s',funcname, ret) logger.info('done: %s %s %s',funcname, args, kwargs)
return ret return ret
else: else:
logger.error('unexpected: %s %s %s',funcname, args, kwargs) logger.error('unexpected: %s %s %s',funcname, args, kwargs)
@ -384,7 +391,7 @@ def __main():
if type(myrecv) is list and len(myrecv) == 3: if type(myrecv) is list and len(myrecv) == 3:
(funcname, args, kwargs) = myrecv (funcname, args, kwargs) = myrecv
funcname = str(funcname) funcname = str(funcname)
logger.info('running: %s %s %s', funcname, args, kwargs) logger.debug('running: %s %s %s', funcname, args, kwargs)
conn.send(run(funcname, args=args, kwargs=kwargs)) conn.send(run(funcname, args=args, kwargs=kwargs))
except Exception: except Exception:
print_exc_plus() print_exc_plus()

View file

@ -26,7 +26,7 @@ def background(func):
def bash(cmdline, **kwargs): def bash(cmdline, **kwargs):
assert type(cmdline) is str assert type(cmdline) is str
logger.info(f'bash: {cmdline}, kwargs: {kwargs}') logger.debug(f'bash: {cmdline}, kwargs: {kwargs}')
return(run_cmd(['/bin/bash', '-x', '-e', '-c', cmdline], **kwargs)) return(run_cmd(['/bin/bash', '-x', '-e', '-c', cmdline], **kwargs))
def mon_bash(cmdline, seconds=60*30, **kwargs): def mon_bash(cmdline, seconds=60*30, **kwargs):
@ -40,7 +40,7 @@ def nspawn_shell(arch, cmdline, cwd=None, **kwargs):
cwd = root / cwd cwd = root / cwd
else: else:
cwd = root cwd = root
logger.info(f'bash_{arch}: {cmdline}, cwd: {cwd}, kwargs: {kwargs}') logger.debug(f'bash_{arch}: {cmdline}, cwd: {cwd}, kwargs: {kwargs}')
if arch in ('aarch64', 'arm64'): if arch in ('aarch64', 'arm64'):
command=f'{SHELL_ARM64_ADDITIONAL}; {SHELL_TRAP}; cd \'{cwd}\'; {cmdline}' command=f'{SHELL_ARM64_ADDITIONAL}; {SHELL_TRAP}; cd \'{cwd}\'; {cmdline}'
ret = run_cmd(SHELL_ARCH_ARM64 + [command,], **kwargs) ret = run_cmd(SHELL_ARCH_ARM64 + [command,], **kwargs)
@ -147,6 +147,10 @@ def run_cmd(cmd, cwd=None, keepalive=False, KEEPALIVE_TIMEOUT=30, RUN_CMD_TIMEOU
if code != 0: if code != 0:
raise subprocess.CalledProcessError(code, cmd, outstr) raise subprocess.CalledProcessError(code, cmd, outstr)
if logfile:
logger.debug('run_cmd: logfile written to %s', str(logfile))
else:
logger.debug('run_cmd: %s, ret = %s', cmd, outstr)
return outstr return outstr
@ -256,7 +260,7 @@ def format_exc_plus():
return ret return ret
def configure_logger(logger, format='%(asctime)s - %(name)-18s - %(levelname)s - %(message)s', def configure_logger(logger, format='%(asctime)s - %(name)-18s - %(levelname)s - %(message)s',
level=logging.INFO, logfile=None, flevel=logging.INFO, rotate_size=None): level=logging.INFO, logfile=None, flevel=logging.DEBUG, rotate_size=None):
class ExceptionFormatter(logging.Formatter): class ExceptionFormatter(logging.Formatter):
def format(self, record): def format(self, record):
if record.levelno == 49: if record.levelno == 49: