From 3864a17b60ebb39d619034061e5a0d8bd803405e Mon Sep 17 00:00:00 2001 From: Jerry Date: Wed, 3 Apr 2019 12:34:11 +0800 Subject: [PATCH] repo.py: finished update and cmdline args --- config.py | 3 ++ repo.py | 105 ++++++++++++++++++++++++++++++++++++++++--------- shared_vars.py | 7 ++++ utils.py | 37 +++++++++++++++++ 4 files changed, 133 insertions(+), 19 deletions(-) create mode 100644 shared_vars.py diff --git a/config.py b/config.py index b730c22..5979d94 100644 --- a/config.py +++ b/config.py @@ -1,3 +1,6 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + # config REPO_NAME='jerryxiao' PKG_COMPRESSION='xz' diff --git a/repo.py b/repo.py index e5482d2..502a409 100755 --- a/repo.py +++ b/repo.py @@ -19,10 +19,12 @@ import os from pathlib import Path import logging -from utils import bash, Pkg, get_pkg_details_from_name +from utils import bash, Pkg, get_pkg_details_from_name, print_exc_plus from time import time +import argparse from config import REPO_NAME, PKG_COMPRESSION, ARCHS, REPO_CMD +from shared_vars import PKG_SUFFIX, PKG_SIG_SUFFIX abspath = os.path.abspath(__file__) repocwd = Path(abspath).parent / 'repo' @@ -54,17 +56,23 @@ def repo_add(fpaths): assert type(fpaths) is list for fpath in fpaths: assert issubclass(type(fpath), os.PathLike) and \ - fpath.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}') + fpath.name.endswith(PKG_SUFFIX) dbpath = fpath.parent / f'{REPO_NAME}.db.tar.gz' - return bash(f'{REPO_CMD} {dbpath} {" ".join([str(fpath) for fpath in fpaths])}') + return bash(f'{REPO_CMD} {dbpath} {" ".join([str(fpath) for fpath in fpaths])}', RUN_CMD_TIMEOUT=5*60) def throw_away(fpath): assert issubclass(type(fpath), os.PathLike) newPath = Path(abspath).parent / 'recycled' / f"{fpath.name}_{time()}" assert not newPath.exists() + logger.warning('Throwing away %s', fpath) fpath.rename(newPath) -def _check_repo(): + +def _regenerate(target_archs=ARCHS, just_symlink=False): + if just_symlink: + logger.info('starting regenerate symlinks %s', target_archs) + else: + logger.info('starting regenerate %s', target_archs) rn = REPO_NAME repo_files = (f"{rn}.db {rn}.db.tar.gz {rn}.db.tar.gz.old " f"{rn}.files {rn}.files.tar.gz {rn}.files.tar.gz.old") @@ -75,23 +83,25 @@ def _check_repo(): basedir = Path('www') / 'any' if basedir.exists(): for pkgfile in basedir.iterdir(): - if pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}') and \ + if pkgfile.name.endswith(PKG_SUFFIX) and \ get_pkg_details_from_name(pkgfile.name).arch == 'any': sigfile = Path(f"{str(pkgfile)}.sig") if sigfile.exists(): logger.info(f'Creating symlink for {pkgfile}, {sigfile}') - for arch in ARCHS: + for arch in target_archs: if arch == 'any': continue symlink(pkgfile.parent / '..' / arch / pkgfile.name, f'../any/{pkgfile.name}') symlink(sigfile.parent / '..' / arch / sigfile.name, f'../any/{sigfile.name}') else: logger.error(f'{arch} dir does not exist!') + if just_symlink: + return # run repo_add - for arch in ARCHS: + for arch in target_archs: basedir = Path('www') / arch repo_files_count = list() - pkg_to_add = list() + pkgs_to_add = list() if not basedir.exists(): logger.error(f'{arch} dir does not exist!') continue @@ -100,12 +110,12 @@ def _check_repo(): if pkgfile.name in repo_files: repo_files_count.append(pkgfile.name) continue - if pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}.sig'): + if pkgfile.name.endswith(PKG_SIG_SUFFIX): if not Path(str(pkgfile)[:-4]).exists() and pkgfile.exists(): logger.warning(f"{pkgfile} has no package!") throw_away(pkgfile) continue - elif pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}'): + elif pkgfile.name.endswith(PKG_SUFFIX): sigfile = Path(f"{str(pkgfile)}.sig") if not sigfile.exists(): logger.warning(f"{pkgfile} has no signature!") @@ -114,23 +124,80 @@ def _check_repo(): realarch = get_pkg_details_from_name(pkgfile.name).arch if realarch != 'any' and realarch != arch: newpath = pkgfile.parent / '..' / realarch / pkgfile.name - assert not newpath.exists() - pkgfile.rename(newpath) - newSigpath = pkgfile.parent / '..' / realarch / f"{pkgfile.name}.sig" - assert not newSigpath.exists() - sigfile.rename(newSigpath) + newSigpath= Path(f'{str(newpath)}.sig') logger.info(f'Moving {pkgfile} to {newpath}, {sigfile} to {newSigpath}') - pkg_to_add.append(newpath) + assert not (newpath.exists() or newSigpath.exists()) + pkgfile.rename(newpath) + sigfile.rename(newSigpath) + pkgs_to_add.append(newpath) else: - pkg_to_add.append(pkgfile) + pkgs_to_add.append(pkgfile) else: logger.warning(f"{pkgfile} is garbage!") throw_away(pkgfile) - if pkg_to_add: - logger.info("repo-add: %s", repo_add(pkg_to_add)) + if pkgs_to_add: + logger.info("repo-add: %s", repo_add(pkgs_to_add)) else: logger.warning('repo-add: Nothing to do in %s', arch) for rfile in repo_files_essential: if rfile not in repo_files_count: logger.error(f'{rfile} does not exist in {arch}!') +def _update(): + logger.info('starting update') + update_path = Path('updates') + assert update_path.exists() + pkgs_to_add = dict() + for pkg_to_add in update_path.iterdir(): + if pkg_to_add.is_dir(): + continue + else: + if pkg_to_add.name.endswith(PKG_SUFFIX): + sigfile = Path(f"{str(pkg_to_add)}.sig") + if sigfile.exists(): + arch = get_pkg_details_from_name(pkg_to_add).arch + pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name + sig_nlocation = Path(f'{str(pkg_nlocation)}.sig') + logger.info(f'Moving {pkg_to_add} to {pkg_nlocation}, {sigfile} to {sig_nlocation}') + assert not (pkg_nlocation.exists() or sig_nlocation.exists()) + pkg_to_add.rename(pkg_nlocation) + sigfile.rename(sig_nlocation) + if arch == 'any': + for arch in ARCHS: + pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name + pkgs_to_add.setdefault(arch, list()).append(pkg_nlocation) + else: + pkgs_to_add.setdefault(arch, list()).append(pkg_nlocation) + else: + logger.warning(f'{pkg_to_add} has no signature!') + throw_away(pkg_to_add) + if 'any' in pkgs_to_add: + _regenerate(target_archs=ARCHS, just_symlink=True) + for arch in pkgs_to_add: + logger.info("repo-add: %s", repo_add(pkgs_to_add[arch])) + # remove add other things + for other in update_path.iterdir(): + if other.is_dir(): + continue + else: + logger.warning(f"{other} is garbage!") + throw_away(other) + +if __name__ == '__main__': + try: + parser = argparse.ArgumentParser(description='Automatic management tool for an arch repo.') + parser.add_argument('-a', '--arch', nargs='?', default='all', help='arch to regenerate, split by comma, defaults to all') + parser.add_argument('-u', '--update', action='store_true', help='get updates from updates dir, push them to the repo') + parser.add_argument('-r', '--regenerate', action='store_true', help='regenerate the whole package database') + args = parser.parse_args() + arch = args.arch + arch = arch.split(',') if arch != 'all' else ARCHS + assert not [None for a in arch if a not in ARCHS] # ensure arch (= ARCHS + if args.update: + _update() + elif args.regenerate: + _regenerate(target_archs=arch) + else: + parser.error("Please choose an action") + except Exception as err: + print_exc_plus() diff --git a/shared_vars.py b/shared_vars.py new file mode 100644 index 0000000..b102ed0 --- /dev/null +++ b/shared_vars.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from config import PKG_COMPRESSION + +PKG_SUFFIX = f'.pkg.tar.{PKG_COMPRESSION}' +PKG_SIG_SUFFIX = f'.pkg.tar.{PKG_COMPRESSION}.sig' diff --git a/utils.py b/utils.py index 4fe4ba5..bddb9ef 100644 --- a/utils.py +++ b/utils.py @@ -5,6 +5,8 @@ import logging from time import time import re from threading import Thread, Lock +import sys +import traceback from config import PKG_COMPRESSION @@ -106,3 +108,38 @@ def get_pkg_details_from_name(name): (pkgname, pkgver, pkgrel, arch) = m.groups() return Pkg(pkgname, pkgver, pkgrel, arch) +def print_exc_plus(): + """ + Print the usual traceback information, followed by a listing of all the + local variables in each frame. + from Python Cookbook by David Ascher, Alex Martelli + """ + tb = sys.exc_info()[2] + while True: + if not tb.tb_next: + break + tb = tb.tb_next + stack = [] + f = tb.tb_frame + while f: + stack.append(f) + f = f.f_back + stack.reverse() + traceback.print_exc() + print("Locals by frame, innermost last") + for frame in stack: + print("Frame %s in %s at line %s" % (frame.f_code.co_name, + frame.f_code.co_filename, + frame.f_lineno)) + for key, value in frame.f_locals.items( ): + print("\t%20s = " % key, end=' ') + # We have to be VERY careful not to cause a new error in our error + # printer! Calling str( ) on an unknown object could cause an + # error we don't want, so we must use try/except to catch it -- + # we can't stop it from happening, but we can and should + # stop it from propagating if it does happen! + try: + print(value) + except: + print("") +