# HG changeset patch # User Bjoern Ricks # Date 1280139160 0 # Node ID 4980f8d5014a50173f54748ea788039c1a185b4f # Parent 2d31eea39f70ef8ad9c4eb2d3391660affc42068 first version of incremental copying debian packages to cachedir diff -r 2d31eea39f70 -r 4980f8d5014a bin/publishdebianpackages.py --- a/bin/publishdebianpackages.py Mon Jul 26 09:42:07 2010 +0000 +++ b/bin/publishdebianpackages.py Mon Jul 26 10:12:40 2010 +0000 @@ -10,6 +10,7 @@ """Publishes selected packages created by treepkg""" import os +import os.path import sys import shlex @@ -28,7 +29,7 @@ config_desc = ["distribution", "section", "num_newest", "build_user", "build_host", "build_listpackages", - "publish_user", "publish_host", + "publish_user", "publish_host", "cachedb", ("architectures", shlex.split, "armel i386 source"), ("after_upload_hook", shlex.split), ("publish_remove_old_packages", convert_bool), @@ -105,7 +106,7 @@ if a.trackname > b.trackname: return +1 return cmp(a.arch, b.arch) -def copy_files(destdir, files, quiet): +def copy_files_to_destdir(destdir, files, quiet): scp_flags = [] if quiet: scp_flags.append("-q") @@ -117,7 +118,16 @@ call(cmdexpand("scp -p @scp_flags @files $cachedir/", files=files, scp_flags=scp_flags, cachedir=destdir)) -def copy_to_destdir(dir, packages, quiet = False): +def remove_old_packages(cachedb, newpackages): + newfiles = [package.filename for package in newpackages] + oldpackages = cachedb.get_old_packages(newfiles) + for package in oldpackages: + # better check if the file really exists + if os.path.isfile(package.filename): + os.remove(package.filename) + cachedb.remove_packages(oldpackages) + +def copy_packages_to_destdir(cachedb, dir, packages, quiet = False): packages.sort(cmp=sort_trackname_arch) package = packages[0] trackname = package.name @@ -125,20 +135,24 @@ destdir = os.path.join(dir, arch, trackname) files = [] for package in packages: + cachedb.add_package(package) if package.trackname != trackname or \ package.arch != arch: - #copy_files(destdir, files, quiet) + copy_files_to_destdir(destdir, files, quiet) trackname = package.trackname arch = package.arch destdir = os.path.join(dir, arch, trackname) files = [] - # add only if the packages differ + # add only to copy files list if the packages differ if check_package_is_new(package.name, destdir, package.md5sum): files.append(package.sourcepath) - print "package: %s, destdir: %s" % (package.name, destdir) + print "copy package: %s, source: %s, destdir: %s" % (package.name, + package.sourcepath, destdir) -def copy_to_cache(variables, track, revision, quiet = False, architectures=None): +def copy_to_cachedir(variables, track, revision, quiet = False, architectures=None): cachedir = variables["cachedir"] + cachdebfilename = variables["cachedb"] + cachedb = CacheDb(cachdebfilename) newpackages = [] treepkginfo = get_treepkg_info(variables) #allowedarchs = set([]) # contains all wanted architectures (incl. source) @@ -149,32 +163,31 @@ allowedarchs = set([get_binary_arch(a) for a in architectures]) else: allowedarchs = set([]) - print "allowedarchs: %s" % allowedarchs for track in treepkginfo.tracks: for rev in track.revisions: - for package in rev.packages: - arch = get_binary_arch(package.arch) - if package.type == "binary": + for packageinfo in rev.packages: + arch = get_binary_arch(packageinfo.arch) + if packageinfo.type == "binary": # skip other files - if package.arch is None: + if packageinfo.arch is None: continue # handle binary-all if arch == "binary-all": # add trackname for subdir name - package.trackname = track.name - binaryallpackages.append(package) + packageinfo.trackname = track.name + binaryallpackages.append(packageinfo) continue allarchs.add(arch) - elif package.type == "source": - arch = package.type - print "package: %s %s" % (package.name, arch) + elif packageinfo.type == "source": + arch = packageinfo.type # only copy requested archs if len(allowedarchs) == 0 or \ arch in allowedarchs: filename = os.path.join(cachedir, arch, track.name, - package.name) - newpackage = Package(filename, track.name, package.name, - package.path, arch, get_md5sum(package)) + packageinfo.name) + newpackage = Package(filename, track.name, packageinfo.name, + packageinfo.path, arch, + get_md5sum(packageinfo)) newpackages.append(newpackage) # copy binary-all packages sourcearch = set(["source"]) @@ -184,20 +197,15 @@ binallarchs = allowedarchs - sourcearch else: binallarchs = (allowedarchs & allarchs) - sourcearch - for package in binaryallpackages: + for packageinfo in binaryallpackages: for arch in binallarchs: filename = os.path.join(cachedir, arch, track.name, - package.name) - newpackage = Package(filename, package.trackname, package.name, - package.path, arch, get_md5sum(package)) + packageinfo.name) + newpackage = Package(filename, packageinfo.trackname, packageinfo.name, + packageinfo.path, arch, get_md5sum(packageinfo)) newpackages.append(newpackage) - print newpackages - copy_to_destdir(cachedir, newpackages, quiet) - -def publish_packages_arch(variables, track, revision, dist, section, - quiet, architectures): - copy_to_cache(variables, track, revision, quiet, architectures) -# copy_to_publishdir(variables, dist, section, arch, quiet) + copy_packages_to_destdir(cachedb, cachedir, newpackages, quiet) + remove_old_packages(cachedb, newpackages) def publish_packages(config_filename, track, revision, dist, section, quiet): config = read_config(config_filename) @@ -207,9 +215,13 @@ if section is None: section = config["section"] + if not "cachedb" in config.keys(): + print >>sys.stderr, "Error. Please specifiy a cachedb in your config file" + sys.exit(1) + architectures = config["architectures"] - publish_packages_arch(config, track, revision, dist, section, - quiet, architectures) + copy_to_cachedir(config, track, revision, quiet, architectures) +# copy_to_publishdir(config, dist, section, arch, quiet) # update apt archive # call(config["after_upload_hook"]) diff -r 2d31eea39f70 -r 4980f8d5014a test/test_info_data.py --- a/test/test_info_data.py Mon Jul 26 09:42:07 2010 +0000 +++ b/test/test_info_data.py Mon Jul 26 10:12:40 2010 +0000 @@ -31,9 +31,18 @@ dbfile = os.path.join(tmpdir, "cachedb2") db = CacheDb(dbfile) + # insert + filename = package.filename db.add_package(package) + package = db.get_package(filename) + self.assertEquals("1234567", package.md5sum) + # update + package.md5sum = "01234567" + db.add_package(package) + package = db.get_package(filename) + self.assertEquals("01234567", package.md5sum) - # test get_package and get_timestamp + # test get_package package2 = db.get_package(package.filename) self.assertNotEquals(None, package2) self.assertEquals(package.filename, package2.filename) @@ -42,16 +51,21 @@ self.assertEquals(package.arch, package2.arch) self.assertEquals(package.md5sum, package2.md5sum) - # test get_old_packages package3 = Package("/tmp/foo/foo_0.2.i386.deb", "foo", - "foo_0.2_i386.deb", "/tmp/foo/foo_0.2.i386.deb", + "foo_0.2_i386.deb", "/tmp/source/foo/foo_0.2.i386.deb", "binary-i386", "987654321") + package4 = Package("/tmp/john/doe_0.3.i386.deb", "john", + "doe_0.3.i386.deb", "/tmp/source/john/doe_0.3.i386.deb", + "binary-i386", "5671234") db.add_package(package3) + db.add_package(package4) + + # test get_old_packages oldpackages = db.get_old_packages([package.filename]) - self.assertEquals(1, len(oldpackages)) + self.assertEquals(2, len(oldpackages)) packages = db.get_packages() - self.assertEquals(2, len(packages)) - db.remove_packages([package.filename for package in oldpackages]) + self.assertEquals(3, len(packages)) + db.remove_packages(oldpackages) packages = db.get_packages() self.assertEquals(1, len(packages)) diff -r 2d31eea39f70 -r 4980f8d5014a treepkg/info/data.py --- a/treepkg/info/data.py Mon Jul 26 09:42:07 2010 +0000 +++ b/treepkg/info/data.py Mon Jul 26 10:12:40 2010 +0000 @@ -64,7 +64,7 @@ self.conn.commit() def update_package(self, package): - UPDATE_TMPL = """UPDATE packages set md5sum = '?' + UPDATE_TMPL = """UPDATE packages set md5sum = ? WHERE filename in (?)""" self.cursor.execute(UPDATE_TMPL, (package.md5sum, package.filename)) self.conn.commit() @@ -83,11 +83,12 @@ self.cursor.execute(SELECT_TMPL % tmp, newfiles) return [Package(*row) for row in self.cursor.fetchall()] - def remove_packages(self, files): + def remove_packages(self, packages): DELET_TMPL = """DELETE FROM packages WHERE filename in (%s)""" - tmp = ", ".join(['?'] * len(files)) - self.cursor.execute(DELET_TMPL % tmp, files) + tmp = ", ".join(['?'] * len(packages)) + self.cursor.execute(DELET_TMPL % tmp, + [package.filename for package in packages]) self.conn.commit() def get_packages(self):