mirror of
https://github.com/archlinux-jerry/buildbot
synced 2024-11-22 13:00:40 +08:00
repo.py and repod.py: minor change
This commit is contained in:
parent
08cdf87c7b
commit
df266b9ace
2 changed files with 48 additions and 45 deletions
28
repo.py
28
repo.py
|
@ -34,7 +34,6 @@ repocwd = Path(abspath).parent / 'repo'
|
||||||
repocwd.mkdir(mode=0o755, exist_ok=True)
|
repocwd.mkdir(mode=0o755, exist_ok=True)
|
||||||
os.chdir(repocwd)
|
os.chdir(repocwd)
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -78,7 +77,7 @@ def repo_remove(fpaths):
|
||||||
dbpath = fpaths[0].parent / f'{REPO_NAME}.db.tar.gz'
|
dbpath = fpaths[0].parent / f'{REPO_NAME}.db.tar.gz'
|
||||||
for fpath in fpaths:
|
for fpath in fpaths:
|
||||||
throw_away(fpath)
|
throw_away(fpath)
|
||||||
sigpath = fpath.parent / f'{str(fpath.name)}.sig'
|
sigpath = fpath.parent / f'{fpath.name}.sig'
|
||||||
# there is a fscking problem that fscking pathlib always follow symlinks
|
# there is a fscking problem that fscking pathlib always follow symlinks
|
||||||
if sigpath.exists() or sigpath.is_symlink():
|
if sigpath.exists() or sigpath.is_symlink():
|
||||||
throw_away(sigpath)
|
throw_away(sigpath)
|
||||||
|
@ -139,7 +138,7 @@ def filter_old_pkg(fpaths, keep_new=1, archive=False, recycle=False):
|
||||||
new_pkgs += family
|
new_pkgs += family
|
||||||
for pkg in old_pkgs:
|
for pkg in old_pkgs:
|
||||||
fullpath = fpaths[0].parent / pkg.fname
|
fullpath = fpaths[0].parent / pkg.fname
|
||||||
sigpath = fpaths[0].parent / f'{str(pkg.fname)}.sig'
|
sigpath = fpaths[0].parent / f'{pkg.fname}.sig'
|
||||||
if archive:
|
if archive:
|
||||||
archive_pkg(fullpath)
|
archive_pkg(fullpath)
|
||||||
if sigpath.exists():
|
if sigpath.exists():
|
||||||
|
@ -176,7 +175,7 @@ def _regenerate(target_archs=ARCHS, just_symlink=False):
|
||||||
for pkgfile in basedir.iterdir():
|
for pkgfile in basedir.iterdir():
|
||||||
if pkgfile.name.endswith(PKG_SUFFIX) and \
|
if pkgfile.name.endswith(PKG_SUFFIX) and \
|
||||||
get_pkg_details_from_name(pkgfile.name).arch == 'any':
|
get_pkg_details_from_name(pkgfile.name).arch == 'any':
|
||||||
sigfile = Path(f"{str(pkgfile)}.sig")
|
sigfile = Path(f"{pkgfile}.sig")
|
||||||
if sigfile.exists():
|
if sigfile.exists():
|
||||||
logger.info(f'Creating symlink for {pkgfile}, {sigfile}')
|
logger.info(f'Creating symlink for {pkgfile}, {sigfile}')
|
||||||
for arch in target_archs:
|
for arch in target_archs:
|
||||||
|
@ -209,7 +208,7 @@ def _regenerate(target_archs=ARCHS, just_symlink=False):
|
||||||
throw_away(pkgfile)
|
throw_away(pkgfile)
|
||||||
continue
|
continue
|
||||||
elif pkgfile.name.endswith(PKG_SUFFIX):
|
elif pkgfile.name.endswith(PKG_SUFFIX):
|
||||||
sigfile = Path(f"{str(pkgfile)}.sig")
|
sigfile = Path(f"{pkgfile}.sig")
|
||||||
if not sigfile.exists():
|
if not sigfile.exists():
|
||||||
logger.warning(f"{pkgfile} has no signature!")
|
logger.warning(f"{pkgfile} has no signature!")
|
||||||
throw_away(pkgfile)
|
throw_away(pkgfile)
|
||||||
|
@ -217,7 +216,7 @@ def _regenerate(target_archs=ARCHS, just_symlink=False):
|
||||||
realarch = get_pkg_details_from_name(pkgfile.name).arch
|
realarch = get_pkg_details_from_name(pkgfile.name).arch
|
||||||
if realarch != 'any' and realarch != arch:
|
if realarch != 'any' and realarch != arch:
|
||||||
newpath = pkgfile.parent / '..' / realarch / pkgfile.name
|
newpath = pkgfile.parent / '..' / realarch / pkgfile.name
|
||||||
newSigpath= Path(f'{str(newpath)}.sig')
|
newSigpath= Path(f'{newpath}.sig')
|
||||||
logger.info(f'Moving {pkgfile} to {newpath}, {sigfile} to {newSigpath}')
|
logger.info(f'Moving {pkgfile} to {newpath}, {sigfile} to {newSigpath}')
|
||||||
assert not (newpath.exists() or newSigpath.exists())
|
assert not (newpath.exists() or newSigpath.exists())
|
||||||
pkgfile.rename(newpath)
|
pkgfile.rename(newpath)
|
||||||
|
@ -238,7 +237,7 @@ def _regenerate(target_archs=ARCHS, just_symlink=False):
|
||||||
logger.info('finished regenerate')
|
logger.info('finished regenerate')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _update():
|
def _update(overwrite=False):
|
||||||
logger.info('starting update')
|
logger.info('starting update')
|
||||||
update_path = Path('updates')
|
update_path = Path('updates')
|
||||||
assert update_path.exists()
|
assert update_path.exists()
|
||||||
|
@ -250,13 +249,18 @@ def _update():
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if pkg_to_add.name.endswith(PKG_SUFFIX):
|
if pkg_to_add.name.endswith(PKG_SUFFIX):
|
||||||
sigfile = Path(f"{str(pkg_to_add)}.sig")
|
sigfile = Path(f"{pkg_to_add}.sig")
|
||||||
if sigfile.exists():
|
if sigfile.exists():
|
||||||
arch = get_pkg_details_from_name(pkg_to_add.name).arch
|
arch = get_pkg_details_from_name(pkg_to_add.name).arch
|
||||||
pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name
|
pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name
|
||||||
sig_nlocation = Path(f'{str(pkg_nlocation)}.sig')
|
sig_nlocation = Path(f'{pkg_nlocation}.sig')
|
||||||
logger.info(f'Copying {pkg_to_add} to {pkg_nlocation}, {sigfile} to {sig_nlocation}')
|
logger.info(f'Copying {pkg_to_add} to {pkg_nlocation}, {sigfile} to {sig_nlocation}')
|
||||||
assert not (pkg_nlocation.exists() or sig_nlocation.exists())
|
if overwrite:
|
||||||
|
for nlocation in (pkg_nlocation, sig_nlocation):
|
||||||
|
if nlocation.exists():
|
||||||
|
logger.warning(f'Overwriting {nlocation}')
|
||||||
|
else:
|
||||||
|
assert not (pkg_nlocation.exists() or sig_nlocation.exists())
|
||||||
copyfile(pkg_to_add, pkg_nlocation)
|
copyfile(pkg_to_add, pkg_nlocation)
|
||||||
copyfile(sigfile, sig_nlocation)
|
copyfile(sigfile, sig_nlocation)
|
||||||
archive_pkg(pkg_to_add)
|
archive_pkg(pkg_to_add)
|
||||||
|
@ -307,10 +311,12 @@ def _remove(pkgnames, target_archs=[a for a in ARCHS if a != 'any']):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||||
import argparse
|
import argparse
|
||||||
try:
|
try:
|
||||||
parser = argparse.ArgumentParser(description='Automatic management tool for an arch repo.')
|
parser = argparse.ArgumentParser(description='Automatic management tool for an arch repo.')
|
||||||
parser.add_argument('-a', '--arch', nargs='?', default=False, help='arch to regenerate, split by comma, defaults to all')
|
parser.add_argument('-a', '--arch', nargs='?', default=False, help='arch to regenerate, split by comma, defaults to all')
|
||||||
|
parser.add_argument('-o', '--overwrite', action='store_true', help='overwrite when updating existing packages')
|
||||||
parser.add_argument('-u', '--update', action='store_true', help='get updates from updates dir, push them to the repo')
|
parser.add_argument('-u', '--update', action='store_true', help='get updates from updates dir, push them to the repo')
|
||||||
parser.add_argument('-r', '--regenerate', action='store_true', help='regenerate the whole package database')
|
parser.add_argument('-r', '--regenerate', action='store_true', help='regenerate the whole package database')
|
||||||
parser.add_argument('-R', '--remove', nargs='?', default=False, help='remove comma split packages from the database')
|
parser.add_argument('-R', '--remove', nargs='?', default=False, help='remove comma split packages from the database')
|
||||||
|
@ -323,7 +329,7 @@ if __name__ == '__main__':
|
||||||
if arch is not None:
|
if arch is not None:
|
||||||
assert not [None for a in arch if a not in ARCHS] # ensure arch (= ARCHS
|
assert not [None for a in arch if a not in ARCHS] # ensure arch (= ARCHS
|
||||||
if args.update:
|
if args.update:
|
||||||
_update()
|
_update(overwrite=args.overwrite)
|
||||||
elif args.regenerate:
|
elif args.regenerate:
|
||||||
if arch:
|
if arch:
|
||||||
_regenerate(target_archs=arch)
|
_regenerate(target_archs=arch)
|
||||||
|
|
65
repod.py
65
repod.py
|
@ -64,34 +64,41 @@ class pushFm:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
def done(self, fname):
|
def done(self, fname, overwrite=False):
|
||||||
'''
|
'''
|
||||||
return None means success
|
return None means success
|
||||||
else returns an error string
|
else returns an error string
|
||||||
'''
|
'''
|
||||||
if fname == self.fname:
|
if fname == self.fname:
|
||||||
self.__init__()
|
try:
|
||||||
update_path = Path('updates')
|
update_path = Path('updates')
|
||||||
pkg_found = False
|
pkg_found = False
|
||||||
sig_found = False
|
sig_found = False
|
||||||
for fpath in update_path.iterdir():
|
for fpath in update_path.iterdir():
|
||||||
if fpath.is_dir:
|
if fpath.is_dir:
|
||||||
continue
|
continue
|
||||||
if fpath.name == self.fname:
|
if fpath.name == self.fname:
|
||||||
pkg_found = fpath
|
pkg_found = fpath
|
||||||
elif fpath.name == f'{self.fname}.sig':
|
elif fpath.name == f'{self.fname}.sig':
|
||||||
sig_found = fpath
|
sig_found = fpath
|
||||||
if pkg_found and sig_found:
|
if pkg_found and sig_found:
|
||||||
try:
|
try:
|
||||||
bash(f'{GPG_VERIFY_CMD} {str(sig_found)} {str(pkg_found)}')
|
bash(f'{GPG_VERIFY_CMD} {sig_found} {pkg_found}')
|
||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
print_exc_plus()
|
print_exc_plus()
|
||||||
return 'GPG verify error'
|
return 'GPG verify error'
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
if update(overwrite=overwrite):
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
print_exc_plus()
|
||||||
|
return 'update error'
|
||||||
else:
|
else:
|
||||||
return None
|
return f'file missing: pkg {pkg_found} sig {sig_found}'
|
||||||
else:
|
return "unexpected error"
|
||||||
return f'file missing: pkg {str(pkg_found)} sig {str(sig_found)}'
|
finally:
|
||||||
return "unexpected error"
|
self.__init__()
|
||||||
else:
|
else:
|
||||||
return "Wrong file"
|
return "Wrong file"
|
||||||
def is_busy(self):
|
def is_busy(self):
|
||||||
|
@ -114,18 +121,8 @@ def push_files(filename, size):
|
||||||
pfm.tick()
|
pfm.tick()
|
||||||
return pfm.start(filename, size)
|
return pfm.start(filename, size)
|
||||||
|
|
||||||
def add_files(filename):
|
def add_files(filename, overwrite=False):
|
||||||
res = pfm.done(filename)
|
return pfm.done(filename, overwrite=overwrite)
|
||||||
if res:
|
|
||||||
return res
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
if update():
|
|
||||||
return None
|
|
||||||
except Exception:
|
|
||||||
print_exc_plus()
|
|
||||||
return 'update error'
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
while True:
|
while True:
|
||||||
|
|
Loading…
Reference in a new issue