repo.py: finished update and cmdline args

This commit is contained in:
JerryXiao 2019-04-03 12:34:11 +08:00
parent 1397bb4391
commit 3864a17b60
Signed by: Jerry
GPG key ID: 9D9CE43650FF2BAA
4 changed files with 133 additions and 19 deletions

View file

@ -1,3 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# config # config
REPO_NAME='jerryxiao' REPO_NAME='jerryxiao'
PKG_COMPRESSION='xz' PKG_COMPRESSION='xz'

105
repo.py
View file

@ -19,10 +19,12 @@
import os import os
from pathlib import Path from pathlib import Path
import logging import logging
from utils import bash, Pkg, get_pkg_details_from_name from utils import bash, Pkg, get_pkg_details_from_name, print_exc_plus
from time import time from time import time
import argparse
from config import REPO_NAME, PKG_COMPRESSION, ARCHS, REPO_CMD from config import REPO_NAME, PKG_COMPRESSION, ARCHS, REPO_CMD
from shared_vars import PKG_SUFFIX, PKG_SIG_SUFFIX
abspath = os.path.abspath(__file__) abspath = os.path.abspath(__file__)
repocwd = Path(abspath).parent / 'repo' repocwd = Path(abspath).parent / 'repo'
@ -54,17 +56,23 @@ def repo_add(fpaths):
assert type(fpaths) is list assert type(fpaths) is list
for fpath in fpaths: for fpath in fpaths:
assert issubclass(type(fpath), os.PathLike) and \ assert issubclass(type(fpath), os.PathLike) and \
fpath.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}') fpath.name.endswith(PKG_SUFFIX)
dbpath = fpath.parent / f'{REPO_NAME}.db.tar.gz' dbpath = fpath.parent / f'{REPO_NAME}.db.tar.gz'
return bash(f'{REPO_CMD} {dbpath} {" ".join([str(fpath) for fpath in fpaths])}') return bash(f'{REPO_CMD} {dbpath} {" ".join([str(fpath) for fpath in fpaths])}', RUN_CMD_TIMEOUT=5*60)
def throw_away(fpath): def throw_away(fpath):
assert issubclass(type(fpath), os.PathLike) assert issubclass(type(fpath), os.PathLike)
newPath = Path(abspath).parent / 'recycled' / f"{fpath.name}_{time()}" newPath = Path(abspath).parent / 'recycled' / f"{fpath.name}_{time()}"
assert not newPath.exists() assert not newPath.exists()
logger.warning('Throwing away %s', fpath)
fpath.rename(newPath) fpath.rename(newPath)
def _check_repo():
def _regenerate(target_archs=ARCHS, just_symlink=False):
if just_symlink:
logger.info('starting regenerate symlinks %s', target_archs)
else:
logger.info('starting regenerate %s', target_archs)
rn = REPO_NAME rn = REPO_NAME
repo_files = (f"{rn}.db {rn}.db.tar.gz {rn}.db.tar.gz.old " repo_files = (f"{rn}.db {rn}.db.tar.gz {rn}.db.tar.gz.old "
f"{rn}.files {rn}.files.tar.gz {rn}.files.tar.gz.old") f"{rn}.files {rn}.files.tar.gz {rn}.files.tar.gz.old")
@ -75,23 +83,25 @@ def _check_repo():
basedir = Path('www') / 'any' basedir = Path('www') / 'any'
if basedir.exists(): if basedir.exists():
for pkgfile in basedir.iterdir(): for pkgfile in basedir.iterdir():
if pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}') and \ if pkgfile.name.endswith(PKG_SUFFIX) and \
get_pkg_details_from_name(pkgfile.name).arch == 'any': get_pkg_details_from_name(pkgfile.name).arch == 'any':
sigfile = Path(f"{str(pkgfile)}.sig") sigfile = Path(f"{str(pkgfile)}.sig")
if sigfile.exists(): if sigfile.exists():
logger.info(f'Creating symlink for {pkgfile}, {sigfile}') logger.info(f'Creating symlink for {pkgfile}, {sigfile}')
for arch in ARCHS: for arch in target_archs:
if arch == 'any': if arch == 'any':
continue continue
symlink(pkgfile.parent / '..' / arch / pkgfile.name, f'../any/{pkgfile.name}') symlink(pkgfile.parent / '..' / arch / pkgfile.name, f'../any/{pkgfile.name}')
symlink(sigfile.parent / '..' / arch / sigfile.name, f'../any/{sigfile.name}') symlink(sigfile.parent / '..' / arch / sigfile.name, f'../any/{sigfile.name}')
else: else:
logger.error(f'{arch} dir does not exist!') logger.error(f'{arch} dir does not exist!')
if just_symlink:
return
# run repo_add # run repo_add
for arch in ARCHS: for arch in target_archs:
basedir = Path('www') / arch basedir = Path('www') / arch
repo_files_count = list() repo_files_count = list()
pkg_to_add = list() pkgs_to_add = list()
if not basedir.exists(): if not basedir.exists():
logger.error(f'{arch} dir does not exist!') logger.error(f'{arch} dir does not exist!')
continue continue
@ -100,12 +110,12 @@ def _check_repo():
if pkgfile.name in repo_files: if pkgfile.name in repo_files:
repo_files_count.append(pkgfile.name) repo_files_count.append(pkgfile.name)
continue continue
if pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}.sig'): if pkgfile.name.endswith(PKG_SIG_SUFFIX):
if not Path(str(pkgfile)[:-4]).exists() and pkgfile.exists(): if not Path(str(pkgfile)[:-4]).exists() and pkgfile.exists():
logger.warning(f"{pkgfile} has no package!") logger.warning(f"{pkgfile} has no package!")
throw_away(pkgfile) throw_away(pkgfile)
continue continue
elif pkgfile.name.endswith(f'.pkg.tar.{PKG_COMPRESSION}'): elif pkgfile.name.endswith(PKG_SUFFIX):
sigfile = Path(f"{str(pkgfile)}.sig") sigfile = Path(f"{str(pkgfile)}.sig")
if not sigfile.exists(): if not sigfile.exists():
logger.warning(f"{pkgfile} has no signature!") logger.warning(f"{pkgfile} has no signature!")
@ -114,23 +124,80 @@ def _check_repo():
realarch = get_pkg_details_from_name(pkgfile.name).arch realarch = get_pkg_details_from_name(pkgfile.name).arch
if realarch != 'any' and realarch != arch: if realarch != 'any' and realarch != arch:
newpath = pkgfile.parent / '..' / realarch / pkgfile.name newpath = pkgfile.parent / '..' / realarch / pkgfile.name
assert not newpath.exists() newSigpath= Path(f'{str(newpath)}.sig')
pkgfile.rename(newpath)
newSigpath = pkgfile.parent / '..' / realarch / f"{pkgfile.name}.sig"
assert not newSigpath.exists()
sigfile.rename(newSigpath)
logger.info(f'Moving {pkgfile} to {newpath}, {sigfile} to {newSigpath}') logger.info(f'Moving {pkgfile} to {newpath}, {sigfile} to {newSigpath}')
pkg_to_add.append(newpath) assert not (newpath.exists() or newSigpath.exists())
pkgfile.rename(newpath)
sigfile.rename(newSigpath)
pkgs_to_add.append(newpath)
else: else:
pkg_to_add.append(pkgfile) pkgs_to_add.append(pkgfile)
else: else:
logger.warning(f"{pkgfile} is garbage!") logger.warning(f"{pkgfile} is garbage!")
throw_away(pkgfile) throw_away(pkgfile)
if pkg_to_add: if pkgs_to_add:
logger.info("repo-add: %s", repo_add(pkg_to_add)) logger.info("repo-add: %s", repo_add(pkgs_to_add))
else: else:
logger.warning('repo-add: Nothing to do in %s', arch) logger.warning('repo-add: Nothing to do in %s', arch)
for rfile in repo_files_essential: for rfile in repo_files_essential:
if rfile not in repo_files_count: if rfile not in repo_files_count:
logger.error(f'{rfile} does not exist in {arch}!') logger.error(f'{rfile} does not exist in {arch}!')
def _update():
logger.info('starting update')
update_path = Path('updates')
assert update_path.exists()
pkgs_to_add = dict()
for pkg_to_add in update_path.iterdir():
if pkg_to_add.is_dir():
continue
else:
if pkg_to_add.name.endswith(PKG_SUFFIX):
sigfile = Path(f"{str(pkg_to_add)}.sig")
if sigfile.exists():
arch = get_pkg_details_from_name(pkg_to_add).arch
pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name
sig_nlocation = Path(f'{str(pkg_nlocation)}.sig')
logger.info(f'Moving {pkg_to_add} to {pkg_nlocation}, {sigfile} to {sig_nlocation}')
assert not (pkg_nlocation.exists() or sig_nlocation.exists())
pkg_to_add.rename(pkg_nlocation)
sigfile.rename(sig_nlocation)
if arch == 'any':
for arch in ARCHS:
pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name
pkgs_to_add.setdefault(arch, list()).append(pkg_nlocation)
else:
pkgs_to_add.setdefault(arch, list()).append(pkg_nlocation)
else:
logger.warning(f'{pkg_to_add} has no signature!')
throw_away(pkg_to_add)
if 'any' in pkgs_to_add:
_regenerate(target_archs=ARCHS, just_symlink=True)
for arch in pkgs_to_add:
logger.info("repo-add: %s", repo_add(pkgs_to_add[arch]))
# remove add other things
for other in update_path.iterdir():
if other.is_dir():
continue
else:
logger.warning(f"{other} is garbage!")
throw_away(other)
if __name__ == '__main__':
try:
parser = argparse.ArgumentParser(description='Automatic management tool for an arch repo.')
parser.add_argument('-a', '--arch', nargs='?', default='all', help='arch to regenerate, split by comma, defaults to all')
parser.add_argument('-u', '--update', action='store_true', help='get updates from updates dir, push them to the repo')
parser.add_argument('-r', '--regenerate', action='store_true', help='regenerate the whole package database')
args = parser.parse_args()
arch = args.arch
arch = arch.split(',') if arch != 'all' else ARCHS
assert not [None for a in arch if a not in ARCHS] # ensure arch (= ARCHS
if args.update:
_update()
elif args.regenerate:
_regenerate(target_archs=arch)
else:
parser.error("Please choose an action")
except Exception as err:
print_exc_plus()

7
shared_vars.py Normal file
View file

@ -0,0 +1,7 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from config import PKG_COMPRESSION
PKG_SUFFIX = f'.pkg.tar.{PKG_COMPRESSION}'
PKG_SIG_SUFFIX = f'.pkg.tar.{PKG_COMPRESSION}.sig'

View file

@ -5,6 +5,8 @@ import logging
from time import time from time import time
import re import re
from threading import Thread, Lock from threading import Thread, Lock
import sys
import traceback
from config import PKG_COMPRESSION from config import PKG_COMPRESSION
@ -106,3 +108,38 @@ def get_pkg_details_from_name(name):
(pkgname, pkgver, pkgrel, arch) = m.groups() (pkgname, pkgver, pkgrel, arch) = m.groups()
return Pkg(pkgname, pkgver, pkgrel, arch) return Pkg(pkgname, pkgver, pkgrel, arch)
def print_exc_plus():
"""
Print the usual traceback information, followed by a listing of all the
local variables in each frame.
from Python Cookbook by David Ascher, Alex Martelli
"""
tb = sys.exc_info()[2]
while True:
if not tb.tb_next:
break
tb = tb.tb_next
stack = []
f = tb.tb_frame
while f:
stack.append(f)
f = f.f_back
stack.reverse()
traceback.print_exc()
print("Locals by frame, innermost last")
for frame in stack:
print("Frame %s in %s at line %s" % (frame.f_code.co_name,
frame.f_code.co_filename,
frame.f_lineno))
for key, value in frame.f_locals.items( ):
print("\t%20s = " % key, end=' ')
# We have to be VERY careful not to cause a new error in our error
# printer! Calling str( ) on an unknown object could cause an
# error we don't want, so we must use try/except to catch it --
# we can't stop it from happening, but we can and should
# stop it from propagating if it does happen!
try:
print(value)
except:
print("<ERROR WHILE PRINTING VALUE>")