1
0
mirror of https://github.com/moparisthebest/pacman synced 2025-03-01 01:41:52 -05:00

pactest: generate sync DB's in memory

Sync database are no longer exploded on the filesystem. Rework the logic
used to generate our test databases so we can create them completely in
memory without having to write the individual files to disk at all. The
local database is unaffected.

Note that several shortcomings in libalpm parsing were discovered by
this change, which have since been temporarily patched around in this
test suite:

* archive_fgets() did not properly handle a file that ended in a
  non-newline, and would silently drop the data in this line.
* sync database with only the file entries and not the directories would
  fail to parse properly, and even cause segfaults in some cases.

Signed-off-by: Dan McGee <dan@archlinux.org>
This commit is contained in:
Dan McGee 2011-06-22 15:45:09 -05:00
parent 63335859d1
commit 624a878701
3 changed files with 45 additions and 45 deletions

View File

@ -17,6 +17,8 @@
import os
import shutil
from StringIO import StringIO
import tarfile
import pmpkg
@ -49,6 +51,7 @@ class pmdb(object):
def __init__(self, treename, root):
self.treename = treename
self.root = root
self.pkgs = []
self.option = {}
if self.treename == "local":
@ -56,9 +59,7 @@ class pmdb(object):
self.dbfile = None
self.is_local = True
else:
self.dbdir = os.path.join(root, util.PM_SYNCDBPATH, treename)
# TODO: we should be doing this, don't need a sync db dir
#self.dbdir = None
self.dbdir = None
self.dbfile = os.path.join(root, util.PM_SYNCDBPATH, treename + ".db")
self.is_local = False
@ -77,12 +78,11 @@ class pmdb(object):
return pkg
def db_read(self, name):
path = self.dbdir
if not os.path.isdir(path):
if not self.dbdir or not os.path.isdir(self.dbdir):
return None
dbentry = ""
for roots, dirs, files in os.walk(path):
for roots, dirs, files in os.walk(self.dbdir):
for i in dirs:
[pkgname, pkgver, pkgrel] = i.rsplit("-", 2)
if pkgname == name:
@ -90,7 +90,7 @@ class pmdb(object):
break
if not dbentry:
return None
path = os.path.join(path, dbentry)
path = os.path.join(self.dbdir, dbentry)
[pkgname, pkgver, pkgrel] = dbentry.rsplit("-", 2)
pkg = pmpkg.pmpkg(pkgname, pkgver + "-" + pkgrel)
@ -179,9 +179,7 @@ class pmdb(object):
# db_write is used to add both 'local' and 'sync' db entries
#
def db_write(self, pkg):
path = os.path.join(self.dbdir, pkg.fullname())
util.mkdir(path)
entry = {}
# desc/depends type entries
data = []
make_section(data, "NAME", pkg.name)
@ -209,32 +207,48 @@ class pmdb(object):
make_section(data, "MD5SUM", pkg.md5sum)
make_section(data, "PGPSIG", pkg.pgpsig)
filename = os.path.join(path, "desc")
util.mkfile(filename, "\n".join(data))
entry["desc"] = "\n".join(data) + "\n"
# files and install
if self.is_local:
data = []
make_section(data, "FILES", pkg.full_filelist())
make_section(data, "BACKUP", pkg.local_backup_entries())
filename = os.path.join(path, "files")
util.mkfile(filename, "\n".join(data))
entry["files"] = "\n".join(data) + "\n"
if any(pkg.install.values()):
filename = os.path.join(path, "install")
util.mkfile(filename, pkg.installfile())
entry["install"] = pkg.installfile() + "\n"
def gensync(self):
if not self.dbfile:
return
curdir = os.getcwd()
os.chdir(self.dbdir)
return entry
tar = tarfile.open(self.dbfile, "w:gz")
for i in os.listdir("."):
tar.add(i)
tar.close()
def generate(self):
pkg_entries = [(pkg, self.db_write(pkg)) for pkg in self.pkgs]
os.chdir(curdir)
if self.dbdir:
for pkg, entry in pkg_entries:
path = os.path.join(self.dbdir, pkg.fullname())
util.mkdir(path)
for name, data in entry.iteritems():
filename = os.path.join(path, name)
util.mkfile(filename, data)
if self.dbfile:
tar = tarfile.open(self.dbfile, "w:gz")
for pkg, entry in pkg_entries:
# TODO: the addition of the directory is currently a
# requirement for successful reading of a DB by libalpm
info = tarfile.TarInfo(pkg.fullname())
info.type = tarfile.DIRTYPE
tar.addfile(info)
for name, data in entry.iteritems():
filename = os.path.join(pkg.fullname(), name)
info = tarfile.TarInfo(filename)
info.size = len(data)
tar.addfile(info, StringIO(data))
tar.close()
# TODO: this is a bit unnecessary considering only one test uses it
serverpath = os.path.join(self.root, util.SYNCREPO, self.treename)
util.mkdir(serverpath)
shutil.copy(self.dbfile, serverpath)
# vim: set ts=4 sw=4 et:

View File

@ -190,7 +190,7 @@ class pmpkg(object):
data = []
for key, value in self.install.iteritems():
if value:
data.append("%s() {\n%s\n}" % (key, value))
data.append("%s() {\n%s\n}\n" % (key, value))
return "\n".join(data)

View File

@ -55,7 +55,7 @@ class pmtest(object):
either sync databases or the local package collection. The local database
is allowed to match if allow_local is True."""
for db in self.db.itervalues():
if db.treename == "local" and not allow_local:
if db.is_local and not allow_local:
continue
pkg = db.getpkg(name)
if pkg and pkg.version == version:
@ -110,7 +110,7 @@ class pmtest(object):
# Create directory structure
vprint(" Creating directory structure:")
dbdir = os.path.join(self.root, util.PM_DBPATH)
dbdir = os.path.join(self.root, util.PM_SYNCDBPATH)
cachedir = os.path.join(self.root, util.PM_CACHEDIR)
syncdir = os.path.join(self.root, util.SYNCREPO)
tmpdir = os.path.join(self.root, util.TMPDIR)
@ -146,25 +146,11 @@ class pmtest(object):
pkg.md5sum = util.getmd5sum(pkg.path)
pkg.csize = os.stat(pkg.path)[stat.ST_SIZE]
# Populating databases
vprint(" Populating databases")
for key, value in self.db.iteritems():
for pkg in value.pkgs:
vprint("\t%s/%s" % (key, pkg.fullname()))
if key == "local":
pkg.installdate = time.ctime()
value.db_write(pkg)
# Creating sync database archives
vprint(" Creating sync database archives")
vprint(" Creating databases")
for key, value in self.db.iteritems():
if key == "local":
continue
vprint("\t" + value.treename)
value.gensync()
serverpath = os.path.join(syncdir, value.treename)
util.mkdir(serverpath)
shutil.copy(value.dbfile, serverpath)
value.generate()
# Filesystem
vprint(" Populating file system")