Mercurial > treepkg
changeset 429:9cfa9f64387a treepkg-status
only rsync specified architectures from cachedir to publishdir
author | Bjoern Ricks <bricks@intevation.de> |
---|---|
date | Wed, 28 Jul 2010 11:38:00 +0000 |
parents | 3d65b3176159 |
children | 4aaf73a6225e |
files | bin/publishdebianpackages.py treepkg/publish.py |
diffstat | 2 files changed, 46 insertions(+), 7 deletions(-) [+] |
line wrap: on
line diff
--- a/bin/publishdebianpackages.py Wed Jul 28 08:38:50 2010 +0000 +++ b/bin/publishdebianpackages.py Wed Jul 28 11:38:00 2010 +0000 @@ -11,7 +11,6 @@ import os import os.path -import re import sys import shlex @@ -28,8 +27,6 @@ from treepkg.info.data import Package from treepkg.info.data import CacheDb -EMPTY = re.compile(r'\s*') - config_desc = ["distribution", "section", "num_newest", "build_user", "build_host", "build_listpackages", "publish_user", "publish_host", @@ -230,13 +227,14 @@ section = config["section"] architectures = config["architectures"] - allarchs = copy_to_cachedir(config, track, revision, quiet, architectures) - for arch in allarchs: - copy_to_publishdir(config, dist, section, arch, quiet) + copy_to_cachedir(config, track, revision, quiet, architectures) + for arch in architectures: + copy_arch_to_publishdir(config, dist, section, get_binary_arch(arch), + quiet) # update apt archive if config["after_upload_hook"] and \ - not EMPTY.match(config["after_upload_hook"]): + len((config["after_upload_hook"][0]).strip()) > 0: if not quiet: print "running after upload hook" call(config["after_upload_hook"])
--- a/treepkg/publish.py Wed Jul 28 08:38:50 2010 +0000 +++ b/treepkg/publish.py Wed Jul 28 11:38:00 2010 +0000 @@ -9,6 +9,7 @@ import os.path +import util from treepkg.run import call, capture_output from treepkg.cmdexpand import cmdexpand @@ -32,6 +33,46 @@ prefix.extend(["ssh", "%s@%s" % (user, host)]) return prefix +def copy_arch_to_publishdir(variables, dist, section, arch=None, quiet=False, + create=True): + if not arch: + cachedir = variables["cachedir"] + else: + cachedir = os.path.join(variables["cachedir"], arch) + + # if cachedir does not exist rsync will fail therefore + # it must be created or skipped. if it is created remote + # content will be deleted + if not os.path.exists(cachedir): + if create: + util.ensure_directory(cachedir) + else: + return + + destdir = os.path.join(variables["publish_dir"], dist, section) + remote_destdir = destdir + if variables["publish_host"]: + remote_destdir = (("%(publish_user)s@%(publish_host)s:" % variables) + + remote_destdir) + runremote = prefix_for_remote_command(variables["publish_user"], + variables["publish_host"]) + + call(cmdexpand("@runremote mkdir --parents $destdir", + runremote=runremote, destdir=destdir, **variables)) + rsync_flags = [] + if variables["publish_remove_old_packages"]: + rsync_flags.append("--delete") + if quiet: + rsync_flags.append("--quiet") + cmd = cmdexpand("rsync @rsync_flags -r --perms --times --omit-dir-times" + " $cachedir $remote_destdir/", + rsync_flags=rsync_flags, remote_destdir=remote_destdir, + cachedir=cachedir) + #print "rsync cmd: %s" % cmd + call(cmd) + + + def copy_to_publishdir(variables, dist, section, arch=None, quiet=False): if not arch: destdir = os.path.join(variables["publish_dir"], dist, section)