Mercurial > treepkg
view bin/publishdebianpackages.py @ 484:2e947f508553
sawmill: Improved HTML 4.01 transitional compat a bit.
Reduce text output of inner loop to reduce overall
output size of detail page.
author | Sascha Teichmann <teichmann@intevation.de> |
---|---|
date | Sat, 18 Sep 2010 10:15:45 +0000 |
parents | eacfd3744d16 |
children | b7aad4cb58bb |
line wrap: on
line source
#! /usr/bin/python # Copyright (C) 2007 - 2010 by Intevation GmbH # Authors: # Bernhard Herzog <bh@intevation.de> # Bjoern Ricks <bjoern.ricks@intevation.de> # # This program is free software under the GPL (>=v2) # Read the file COPYING coming with the software for details. """Publishes selected packages created by treepkg""" import os import sys import shlex from optparse import OptionParser from ConfigParser import SafeConfigParser import treepkgcmd from treepkg.readconfig import read_config_section, convert_bool from treepkg.run import call, capture_stdout from treepkg.cmdexpand import cmdexpand from treepkg.publish import copy_arch_to_publishdir, prefix_for_remote_command,\ get_binary_arch from treepkg.util import md5sum, expand_filename, remove_trailing_slashes from treepkg.info.status import TreepkgInfo from treepkg.info.data import Package from treepkg.info.data import CacheDb config_desc = ["distribution", "section", "num_newest", "build_user", "build_host", "build_listpackages", "publish_user", "publish_host", ("architectures", shlex.split, "armel i386 source"), ("after_upload_hook", shlex.split, ""), ("after_copy_hook", shlex.split, ""), ("publish_packages", convert_bool, "True"), ("publish_remove_old_packages", convert_bool), ("publish_dir", remove_trailing_slashes), ("cachedb", lambda s: expand_filename(s)), ("cachedir", lambda s: expand_filename(remove_trailing_slashes(s)))] def read_config(filename): if not os.path.exists(filename): print >>sys.stderr, "Config file %s does not exist" % filename sys.exit(1) parser = SafeConfigParser() parser.read([filename]) return read_config_section(parser, "publishpackages", config_desc) def parse_commandline(): parser = OptionParser() parser.set_defaults(config_file=os.path.join(treepkgcmd.topdir, "publishpackages.cfg"), quiet=False) parser.add_option("--config-file", help=("The configuration file." " Default is publishpackages.cfg")) parser.add_option("--dist", help=("The debian distribution name to use on" " the publishing system")) parser.add_option("--section", help=("The debian distribution section name to use on" " the publishing system")) parser.add_option("--track", help=("The package track whose files are to be" " published. If not given, files of all tracks" " will be published")) parser.add_option("--quiet", action="store_true", help=("Do not print progress meters or other" " informational output")) return parser.parse_args() def get_treepkg_info(variables): runremote = prefix_for_remote_command(variables["build_user"], variables["build_host"]) xml = capture_stdout(cmdexpand("@runremote $build_listpackages" " --newest=$num_newest" " --only-successful", runremote=runremote, **variables)) return TreepkgInfo.fromxml(xml) def check_package_is_new(packagename, destdir, packagemd5sum): destpackage = os.path.join(destdir, packagename) if not os.path.isfile(destpackage): return True destmd5sum = md5sum(destpackage) return (destmd5sum != packagemd5sum) def get_md5sum(packageinfo): md5sum = "" if packageinfo: for checksum in packageinfo.checksums: if checksum.type == "md5": md5sum = checksum.checksum break return md5sum def sort_trackname_arch(a, b): if a.trackname < b.trackname: return -1 if a.trackname > b.trackname: return +1 return cmp(a.arch, b.arch) def copy_files_to_destdir(destdir, files, variables, quiet=False): scp_flags = [] if quiet: scp_flags.append("-q") if not os.path.exists(destdir): os.makedirs(destdir) if files: if variables["build_host"]: userhost = "%(build_user)s@%(build_host)s:" % variables files = [userhost + filename for filename in files] # scp the packages to the cache dir call(cmdexpand("scp -p @scp_flags @files $cachedir/", files=files, scp_flags=scp_flags, cachedir=destdir)) def remove_old_packages(cachedb, newpackages, quiet): newfiles = [package.filename for package in newpackages] oldpackages = cachedb.get_old_packages(newfiles) for package in oldpackages: # better check if the file really exists if os.path.isfile(package.filename): if not quiet: print "removing file %s" % package.filename os.remove(package.filename) cachedb.remove_packages(oldpackages) def copy_packages_to_destdir(cachedb, dir, packages, variables, quiet=False): packages.sort(cmp=sort_trackname_arch) package = packages[0] trackname = package.trackname arch = package.arch destdir = os.path.join(dir, arch, trackname) files = [] for package in packages: cachedb.add_package(package) if package.trackname != trackname or package.arch != arch: copy_files_to_destdir(destdir, files, variables, quiet) trackname = package.trackname arch = package.arch destdir = os.path.join(dir, arch, trackname) files = [] # add only to copy files list if the packages differ if check_package_is_new(package.name, destdir, package.md5sum): files.append(package.sourcepath) if not quiet: print "copy new file: %s" % package.name copy_files_to_destdir(destdir, files, variables, quiet) def copy_to_cachedir(variables, track, revision, quiet=False, architectures=()): cachedir = variables["cachedir"] cachdebfilename = variables["cachedb"] if not quiet: print "using cachedb: %s" % cachdebfilename cachedb = CacheDb(cachdebfilename) newpackages = [] treepkginfo = get_treepkg_info(variables) allarchs = set() # contains all present architectures (incl. source) binaryallpackages = [] # change e.g. armel in binary-armel allowedarchs = set([get_binary_arch(a) for a in architectures]) for track in treepkginfo.tracks: for rev in track.revisions: for packageinfo in rev.packages: arch = get_binary_arch(packageinfo.arch) if packageinfo.type == "binary": # skip other files if packageinfo.arch is None: continue # handle binary-all if arch == "binary-all": # add trackname for subdir name packageinfo.trackname = track.name binaryallpackages.append(packageinfo) continue allarchs.add(arch) elif packageinfo.type == "source": arch = packageinfo.type # only copy requested archs if len(allowedarchs) == 0 or \ arch in allowedarchs: filename = os.path.join(cachedir, arch, track.name, packageinfo.name) newpackage = Package(filename, track.name, packageinfo.name, packageinfo.path, arch, get_md5sum(packageinfo)) newpackages.append(newpackage) # copy binary-all packages sourcearch = set(["source"]) if not allowedarchs: binallarchs = allarchs - sourcearch elif allarchs: binallarchs = allowedarchs - sourcearch else: binallarchs = (allowedarchs & allarchs) - sourcearch for packageinfo in binaryallpackages: for arch in binallarchs: filename = os.path.join(cachedir, arch, packageinfo.trackname, packageinfo.name) newpackage = Package(filename, packageinfo.trackname, packageinfo.name, packageinfo.path, arch, get_md5sum(packageinfo)) newpackages.append(newpackage) copy_packages_to_destdir(cachedb, cachedir, newpackages, variables, quiet) remove_old_packages(cachedb, newpackages, quiet) return binallarchs def publish_packages(config_filename, track, revision, dist, section, quiet): config = read_config(config_filename) if dist is None: dist = config["distribution"] if section is None: section = config["section"] architectures = config["architectures"] copy_to_cachedir(config, track, revision, quiet, architectures) if config["after_copy_hook"]: if not quiet: print "running after copy hook" call(config["after_copy_hook"]) if config["publish_packages"]: for arch in architectures: if not quiet: print "publish packages for architecture %s" % arch copy_arch_to_publishdir(config, dist, section, get_binary_arch(arch), quiet) # update apt archive if config["after_upload_hook"]: if not quiet: print "running after upload hook" call(config["after_upload_hook"]) def main(): options, args = parse_commandline() revision = None # for future use cases publish_packages(options.config_file, options.track, revision, options.dist, options.section, options.quiet) if __name__ == "__main__": main()