METADATA_DOWNLOAD_FILE = "repomd.json"
METADATA_DATABASE_FILE = "packages.solv"
-PACKAGE_FORMAT = 4
+PACKAGE_FORMAT = 5
# XXX implement this properly
-PACKAGE_FORMATS_SUPPORTED = [0, 1, 2, 3, 4]
+PACKAGE_FORMATS_SUPPORTED = [0, 1, 2, 3, 4, 5]
PACKAGE_EXTENSION = "pfm"
MAKEFILE_EXTENSION = "nm"
-DATABASE_FORMAT = 4
-DATABASE_FORMATS_SUPPORTED = [0, 1, 2, 3, 4]
+DATABASE_FORMAT = 5
+DATABASE_FORMATS_SUPPORTED = [0, 1, 2, 3, 4, 5]
PACKAGE_FILENAME_FMT = "%(name)s-%(version)s-%(release)s.%(arch)s.%(ext)s"
# XXX TODO
return False
+ def is_datafile(self):
+ return False
+
class File(_File):
def __init__(self, pakfire):
self.group = 0
self.mtime = 0
self.capabilities = None
+ self.datafile = False
def is_dir(self):
return self.type == TYPE_DIR_INT \
def is_config(self):
return self.config
+ def is_datafile(self):
+ return self.datafile
+
class FileDatabase(_File):
def __init__(self, pakfire, db, row_id, row=None):
def is_config(self):
return self.row["config"] == 1
+ def is_datafile(self):
+ return self.row["datafile"] == 1
+
@property
def pkg(self):
return self.db.get_package_by_id(self.row["pkg"])
def filelist(self):
raise NotImplementedError
+ @property
+ def configfiles(self):
+ return []
+
+ @property
+ def datafiles(self):
+ return []
+
def extract(self, path, prefix=None):
raise NotImplementedError, "%s" % repr(self)
# kept. files and configfiles are disjunct.
files = []
configfiles = self.configfiles
+ datafiles = self.datafiles
for file in self.filelist:
if file in configfiles:
continue
+ if file in datafiles:
+ continue
+
assert file.startswith("/")
files.append(file)
messages.append(_("Config file saved as %s.") % file_save)
continue
+ # Preserve datafiles.
+ if _file.is_datafile():
+ log.debug(_("Preserving datafile '/%s'") % _file)
+ continue
+
# Handle regular files and symlinks.
if os.path.isfile(file) or os.path.islink(file):
log.debug("Removing %s..." % _file)
messages.append(_("Config file created as %s") % config_new)
continue
+ # Don't overwrite target files if they already exist.
+ if file.is_datafile() and os.path.exists(target):
+ log.debug(_("Don't overwrite already existing datafile '/%s'") % member.name)
+ continue
+
# If the member is a directory and if it already exists, we
# don't need to create it again.
if os.path.exists(target):
return inst_size
+ def read_plain_filelist(self, filename):
+ a = self.open_archive()
+ files = []
+
+ try:
+ f = a.extractfile(filename)
+ for line in f.readlines():
+ # Strip newline at end of line.
+ file = line.rstrip()
+
+ # Add a leading / is not present.
+ if not file.startswith("/"):
+ file = "/%s" % file
+
+ files.append(file)
+ f.close()
+
+ # Ignore if 'filename' does not exist.
+ except KeyError:
+ pass
+
+ finally:
+ a.close()
+
+ return files
+
def get_filelist(self):
"""
Return a list of the files that are contained in the package
a = self.open_archive()
# Cache configfiles.
- configfiles = []
+ if self.format >= 5:
+ filename = "configfiles"
+ else:
+ filename = "configs"
+ configfiles = self.read_plain_filelist(filename)
- try:
- f = a.extractfile("configs")
- for line in f.readlines():
- line = line.rstrip()
- if not line.startswith("/"):
- line = "/%s" % line
- configfiles.append(line)
- f.close()
- except KeyError:
- pass # Package has no configuration files. Never mind.
+ # Cache datafiles.
+ datafiles = self.read_plain_filelist("datafiles")
f = a.extractfile("filelist")
for line in f.readlines():
if name in configfiles:
file.config = True
+ # Check if this is a datafile.
+ if name in datafiles:
+ file.datafile = True
+
# Parse file type.
try:
file.type = int(line[0])
def configfiles(self):
return [f for f in self.filelist if f.is_config()]
+ @property
+ def datafiles(self):
+ return [f for f in self.filelist if f.is_datafile()]
+
@property
def payload_compression(self):
"""
return ret
+ @property
+ def datafiles(self):
+ ret = []
+
+ for file in self.filelist:
+ if not file.is_datafile():
+ continue
+
+ ret.append(file)
+
+ return ret
+
def _does_provide_file(self, requires):
"""
A faster version to find a file in the database.
def configfiles(self):
return self.lexer.get_var("configfiles").split()
+ @property
+ def datafiles(self):
+ files = self.lexer.get_var("datafiles")
+ return files.split()
+
@property
def files(self):
return self.lexer.get_var("files").split()
return scriptlets
- def create_configs(self, datafile):
+ def find_files(self, datafile, patterns):
if self.payload_compression == "xz":
datafile = InnerTarFileXz.open(datafile)
else:
members = datafile.getmembers()
- configfiles = []
- configdirs = []
+ files = []
+ dirs = []
- # Find all directories in the config file list.
- for file in self.pkg.configfiles:
+ # Find all directories in the file list.
+ for file in patterns:
if file.startswith("/"):
file = file[1:]
for member in members:
if member.name == file and member.isdir():
- configdirs.append(file)
+ dirs.append(file)
- for configdir in configdirs:
+ for d in dirs:
for member in members:
- if not member.isdir() and member.name.startswith(configdir):
- configfiles.append(member.name)
+ if not member.isdir() and member.name.startswith(d):
+ files.append(member.name)
- for pattern in self.pkg.configfiles:
+ for pattern in patterns:
if pattern.startswith("/"):
pattern = pattern[1:]
if not fnmatch.fnmatch(member.name, pattern):
continue
- if member.name in configfiles:
+ if member.name in files:
continue
- configfiles.append(member.name)
+ files.append(member.name)
# Sort list alphabetically.
- configfiles.sort()
+ files.sort()
+
+ return files
+
+ def create_configfiles(self, datafile):
+ files = self.find_files(datafile, self.pkg.configfiles)
configsfile = self.mktemp()
f = open(configsfile, "w")
- for file in configfiles:
+ for file in files:
f.write("%s\n" % file)
f.close()
return configsfile
+ def create_datafiles(self, datafile):
+ files = self.find_files(datafile, self.pkg.datafiles)
+
+ datafile = self.mktemp()
+
+ f = open(datafile, "w")
+ for file in files:
+ f.write("%s\n" % file)
+ f.close()
+
+ return datafile
+
def run(self, resultdir):
# Add all files to this package.
datafile = self.create_datafile()
# Get filelist from datafile.
filelist = self.create_filelist(datafile)
- configs = self.create_configs(datafile)
+ configfiles = self.create_configfiles(datafile)
+ datafiles = self.create_datafiles(datafile)
# Create script files.
scriptlets = self.create_scriptlets()
# Add files to the tar archive in correct order.
self.add(metafile, "info")
self.add(filelist, "filelist")
- self.add(configs, "configs")
+ self.add(configfiles, "configfiles")
+ self.add(datafiles, "datafiles")
self.add(datafile, "data.img")
for scriptlet_name, scriptlet_file in scriptlets:
size INTEGER,
type INTEGER,
config INTEGER,
+ datafile INTEGER,
mode INTEGER,
user TEXT,
`group` TEXT,
c.execute("ALTER TABLE packages ADD COLUMN recommends TEXT AFTER obsoletes")
c.execute("ALTER TABLE packages ADD COLUMN suggests TEXT AFTER recommends")
+ if self.format < 5:
+ c.execute("ALTER TABLE files ADD COLUMN datafile INTEGER AFTER config")
+
# In the end, we can easily update the version of the database.
c.execute("UPDATE settings SET val = ? WHERE key = 'version'", (DATABASE_FORMAT,))
self.__format = DATABASE_FORMAT
pkg_id = c.lastrowid
- c.executemany("INSERT INTO files(`name`, `pkg`, `size`, `config`, `type`, `hash1`, `mode`, `user`, `group`, `mtime`, `capabilities`)"
- " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
- ((f.name, pkg_id, f.size, f.is_config(), f.type, f.hash1, f.mode, f.user, f.group, f.mtime, f.capabilities or "") for f in pkg.filelist))
+ c.executemany("INSERT INTO files(`name`, `pkg`, `size`, `config`, `datafile`, `type`, `hash1`, `mode`, `user`, `group`, `mtime`, `capabilities`)"
+ " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ ((f.name, pkg_id, f.size, f.is_config(), f.is_datafile(), f.type, f.hash1, f.mode, f.user, f.group, f.mtime, f.capabilities or "") for f in pkg.filelist))
except:
raise