Newer
Older
# Installsystems is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Installsystems is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with Installsystems. If not, see <http://www.gnu.org/licenses/>.
import cStringIO
from installsystems.exception import *
from installsystems.printer import *
from installsystems.tarball import Tarball
from installsystems.tools import PipeFile
from installsystems.image import Image, PackageImage
from installsystems.database import Database
class RepositoryFactory(object):
'''
Repository factory
'''
def __init__(self):
self.repo_class = {
1: Repository_v1,
2: Repository
}
def create(self, config):
db = None
if not config.offline:
try:
db = Database(config.dbpath)
except ISWarning as e:
warn('[%s]: %s' % (config.name, e))
config.offline = True
except ISError:
debug(u"Unable to load database %s" % config.dbpath)
config.offline = True
if config.offline:
debug(u"Repository %s is offline" % config.name)
if db is None:
return Repository(config)
else:
return self.repo_class[int(db.version)](config, db)
@staticmethod
def is_repository_name(name):
return re.match("^[-_\w]+$", name) is not None
@staticmethod
def check_repository_name(name):
'''
Raise exception is repository name is invalid
'''
if not Repository.is_repository_name(name):
raise ISError(u"Invalid repository name %s" % name)
return name
@staticmethod
def split_image_path(path):
'''
Split an image path (repo/image:version)
in a tuple (repo, image, version)
'''
x = re.match(u"^(?:([^/:]+)/)?([^/:]+)?(?::v?([^/:]+)?)?$", path)
if x is None:
raise ISError(u"invalid image path: %s" % path)
return x.group(1, 2, 3)
@staticmethod
def split_repository_list(repolist, filter=None):
'''
Return a list of repository from a comma/spaces separated names of repo
'''
if filter is None:
filter = Repository.is_repository_name
return [r for r in re.split("[ ,\n\t\v]+", repolist) if filter(r)]
arrow(u"Diff between repositories #y#%s#R# and #g#%s#R#" % (repo1.config.name,
repo2.config.name))
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
# Get info from databases
i_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask(
"SELECT md5, name, version FROM image").fetchall())
i_set1 = set(i_dict1.keys())
i_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask(
"SELECT md5, name, version FROM image").fetchall())
i_set2 = set(i_dict2.keys())
p_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask(
"SELECT md5, name FROM payload").fetchall())
p_set1 = set(p_dict1.keys())
p_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask(
"SELECT md5, name FROM payload").fetchall())
p_set2 = set(p_dict2.keys())
# computing diff
i_only1 = i_set1 - i_set2
i_only2 = i_set2 - i_set1
p_only1 = p_set1 - p_set2
p_only2 = p_set2 - p_set1
# printing functions
pimg = lambda r,c,m,d,: out("#%s#Image only in repository %s: %s v%s (%s)#R#" %
(c, r.config.name, d[m][0], d[m][1], m))
ppay = lambda r,c,m,d,: out("#%s#Payload only in repository %s: %s (%s)#R#" %
(c, r.config.name, d[m][0], m))
# printing image diff
for md5 in i_only1: pimg(repo1, "y", md5, i_dict1)
for md5 in p_only1: ppay(repo1, "y", md5, p_dict1)
for md5 in i_only2: pimg(repo2, "g", md5, i_dict2)
for md5 in p_only2: ppay(repo2, "g", md5, p_dict2)
self.local = istools.isfile(self.config.path)
def __getattribute__(self, name):
'''
Raise an error if repository is unavailable
Unavailable can be caused because db is not accessible or
because repository is not initialized
'''
config = object.__getattribute__(self, "config")
# config, init, local and upgrade_db are always accessible
if name in ("init", "config", "local", "upgrade_db"):
return object.__getattribute__(self, name)
# if no db (not init or not accessible) raise error
raise ISError(u"Repository %s is offline" % config.name)
@property
def version(self):
'''
Return repository version
'''
return self.db.version
@property
def uuid(self):
'''
Return repository UUID
'''
return self.db.ask("SELECT uuid from repository").fetchone()[0]
raise ISError(u"Repository creation must be local")
# creating local directory
try:
if os.path.exists(config.path):
arrow(u"%s already exists" % config.path)
istools.mkdir(config.path, config.uid, config.gid, config.dmod)
arrow(u"%s directory created" % config.path)
raise ISError(u"Unable to create directory %s" % config.path, e)
d = Database.create(config.dbpath)
istools.chrights(config.dbpath, uid=config.uid,
gid=config.gid, mode=config.fmod)
# load database
self.db = Database(config.dbpath)
# mark repo as not offline
self.config.offline = False
self.update_last()
def update_last(self):
'''
Update last file to current time
'''
raise ISError(u"Repository addition must be local")
last_path = os.path.join(self.config.path, self.config.lastname)
open(last_path, "w").write("%s\n" % int(time.time()))
istools.chrights(last_path, self.config.uid, self.config.gid, self.config.fmod)
raise ISError(u"Update last file failed", e)
def last(self, name):
Return last version of name in repo or None if not found
r = self.db.ask("SELECT version FROM image WHERE name = ?", (name,)).fetchall()
# no row => no way
if r is None:
return None
f = lambda x,y: x[0] if istools.compare_versions(x[0], y[0]) > 0 else y[0]
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
def _add(self, image):
'''
Add description to db
'''
arrow("Adding metadata")
self.db.begin()
# insert image information
arrow("Image", 1)
self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?,?,?)",
(image.md5,
image.name,
image.version,
image.date,
image.author,
image.description,
image.size,
image.is_min_version,
image.img_format,
))
# insert data information
arrow("Payloads", 1)
for name, obj in image.payload.items():
self.db.ask("INSERT INTO payload values (?,?,?,?,?)",
(obj.md5,
image.md5,
name,
obj.isdir,
obj.size,
))
# on commit
self.db.commit()
# update last file
self.update_last()
if delete is true, remove original files
raise ISError(u"Repository addition must be local")
# cannot add already existant image
if self.has(image.name, image.version):
raise ISError(u"Image already in database, delete first!")
for obj in [ image ] + image.payload.values():
dest = os.path.join(self.config.path, obj.md5)
basesrc = os.path.basename(obj.path)
arrow(u"Skipping %s: already exists" % basesrc, 1)
arrow(u"Adding %s (%s)" % (basesrc, obj.md5), 1)
dfo = open(dest, "wb")
sfo = PipeFile(obj.path, "r", progressbar=True)
sfo.consume(dfo)
sfo.close()
dfo.close()
istools.chrights(dest, self.config.uid,
self.config.gid, self.config.fmod)
# copy is done. create a image inside repo
r_image = PackageImage(os.path.join(self.config.path, image.md5),
# checking must be done with original md5
r_image.md5 = image.md5
# checking image and payload after copy
r_image.check("Check image and payload")
# removing orginal files
if delete:
arrow("Removing original files")
for obj in [ image ] + image.payload.values():
arrow(os.path.basename(obj.path), 1)
os.unlink(obj.path)
def getallmd5(self):
'''
Get list of all md5 in DB
'''
res = self.db.ask("SELECT md5 FROM image UNION SELECT md5 FROM payload").fetchall()
return [ md5[0] for md5 in res ]
def check(self):
'''
Check repository for unreferenced and missing files
'''
# Check if the repo is local
raise ISError(u"Repository must be local")
local_files.remove(self.config.dbname)
local_files.remove(self.config.lastname)
# check missing files
arrow("Checking missing files")
missing_files = db_files - local_files
if len(missing_files) > 0:
out(os.linesep.join(missing_files))
# check unreferenced files
arrow("Checking unreferenced files")
unref_files = local_files - db_files
if len(unref_files) > 0:
out(os.linesep.join(unref_files))
# check corruption of local files
arrow("Checking corrupted files")
for f in local_files:
fo = PipeFile(os.path.join(self.config.path, f))
fo.consume()
fo.close()
if fo.md5 != f:
out(f)
'''
Clean the repository's content
'''
# Check if the repo is local
raise ISError(u"Repository must be local")
allmd5 = set(self.getallmd5())
repofiles = set(os.listdir(self.config.path)) - set([self.config.dbname, self.config.lastname])
dirtyfiles = repofiles - allmd5
if len(dirtyfiles) > 0:
# print dirty files
arrow("Dirty files:")
for f in dirtyfiles:
arrow(f, 1)
# ask confirmation
if not force and not confirm("Remove dirty files? (yes) "):
for f in dirtyfiles:
p = os.path.join(self.config.path, f)
arrow(u"Removing %s" % p, 1)
try:
if os.path.isdir(p):
os.rmdir(p)
else:
os.unlink(p)
except:
warn(u"Removing %s failed" % p)
def delete(self, name, version, payloads=True):
'''
Delete an image from repository
'''
raise ISError(u"Repository deletion must be local")
# get md5 of files related to images (exception is raised if not exists
md5s = self.getmd5(name, version)
# cleaning db (must be done before cleaning)
arrow("Cleaning database")
arrow("Remove payloads from database", 1)
self.db.begin()
for md5 in md5s[1:]:
self.db.ask("DELETE FROM payload WHERE md5 = ? AND image_md5 = ?",
(md5, md5s[0])).fetchone()
arrow("Remove image from database", 1)
self.db.ask("DELETE FROM image WHERE md5 = ?",
(md5s[0],)).fetchone()
self.db.commit()
# Removing files
# if asked don't remove payloads
if not payloads:
md5s = [ md5s[0] ]
db_images = self.db.ask("SELECT md5, name, version, date,\
author, description, size FROM image ORDER BY name, version").fetchall()
images = []
field = ("md5", "name", "version", "date", "author", "description", "size")
for info in db_images:
d = dict(zip(field, info))
d["repo"] = self.config.name
d["url"] = os.path.join(self.config.path, d["md5"])
images.append(d)
return images
'''
db_payloads = self.db.ask("SELECT payload.md5,payload.size,payload.isdir,image.name,image.version,payload.name FROM payload inner join image on payload.image_md5 = image.md5").fetchall()
res = {}
for payload in db_payloads:
md5 = payload[0]
# create entry if not exists
if md5 not in res:
res[md5] = {"size": payload[1], "isdir": payload[2], "images": {}}
# add image to list
imgpath = u"%s/%s:%s" % (self.config.name, payload[3], payload[4])
res[md5]["images"][imgpath] = {"repo": self.config.name,
"imgname": payload[3],
"imgver": payload[4],
"payname": payload[5]}
return res
def search(self, pattern):
'''
Search pattern in a repository
'''
images = self.db.ask("SELECT name, version, author, description\
FROM image\
WHERE name LIKE ? OR\
description LIKE ? OR\
author LIKE ?",
tuple( [u"%%%s%%" % pattern ] * 3)
).fetchall()
for name, version, author, description in images:
arrow(u"%s v%s" % (name, version), 1)
out(u" #yellow#Author:#reset# %s" % author)
out(u" #yellow#Description:#reset# %s" % description)
def _remove_file(self, filename):
'''
Remove a filename from pool. Check if it's not needed by db before
'''
# check existance in table image
have = False
for table in ("image", "payload"):
have = have or self.db.ask(u"SELECT md5 FROM %s WHERE md5 = ? LIMIT 1" % table,
(filename,)).fetchone() is not None
# if no reference, delete!
if not have:
arrow(u"%s, deleted" % filename)
os.unlink(os.path.join(self.config.path, filename))
else:
arrow(u"%s, skipped" % filename)
'''
Return the existance of a package
'''
return self.db.ask("SELECT name,version FROM image WHERE name = ? AND version = ? LIMIT 1", (name,version)).fetchone() is not None
Return an image from a name and version
# is no version take the last
if version is None:
version = self.last(name)
raise ISError(u"Unable to find image %s in %s" % (name,
self.config.name))
# get file md5 from db
r = self.db.ask("select md5 from image where name = ? and version = ? limit 1",
(name, version)).fetchone()
raise ISError(u"Unable to find image %s v%s in %s" % (name, version,
self.config.name))
# getting the file
arrow(u"Loading image %s v%s from repository %s" % (name,
version,
self.config.name))
memfile = cStringIO.StringIO()
fo.consume(memfile)
fo.close()
except Exception as e:
raise ISError(u"Loading image %s v%s failed" % (name, version), e)
memfile.seek(0)
pkg = PackageImage(path, fileobj=memfile, md5name=True)
raise ISError(u"Image MD5 verification failure")
return pkg
def getmd5(self, name, version):
'''
Return an image md5 and payload md5 from name and version. Order matter !
Image md5 will still be the first
'''
# get file md5 from db
a = self.db.ask("SELECT md5 FROM image WHERE name = ? AND version = ? LIMIT 1",
(name,version)).fetchone()
if a is None:
raise ISError(u"No such image %s version %s" % (name, version))
b = self.db.ask("SELECT md5 FROM payload WHERE image_md5 = ?",
(a[0],)).fetchall()
return [ a[0] ] + [ x[0] for x in b ]
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
def upgrade_db(self):
if self.version == Database.version:
info("Database already up-to-date (%s)" % self.version)
return
else:
arrow("Start repository upgrade")
arrowlevel(1)
# Create dummy repository
tmpdir = tempfile.mkdtemp()
try:
repoconf = RepositoryConfig("tmp_migrate_repo", path=tmpdir)
dstrepo = Repository(repoconf)
# Symlink content from repository into dummy repo
for file in os.listdir(self.config.path):
os.symlink(os.path.join(self.config.path, file),
os.path.join(tmpdir, file))
os.unlink(repoconf.dbpath)
os.unlink(repoconf.lastpath)
old_verbosity = installsystems.verbosity
arrow("Initialize new database")
# Disable unwanted message during upgrade
installsystems.verbosity = 0
dstrepo.init()
# Restore verbosity
installsystems.verbosity = old_verbosity
md5s = self.db.ask("SELECT md5 FROM image").fetchall()
# Copy images to dummy repository (fill new database)
arrow("Fill database with images")
arrowlevel(1)
installsystems.verbosity = 0
for img in [PackageImage(os.path.join(self.config.path, md5[0]),
md5name=True) for md5 in md5s]:
installsystems.verbosity = old_verbosity
arrow("%s v%s" % (img.name, img.version))
installsystems.verbosity = 0
dstrepo.add(img)
installsystems.verbosity = old_verbosity
arrowlevel(-1)
arrow("Backup old database")
shutil.move(self.config.dbpath,
os.path.join("%s.bak" % self.config.dbpath))
# Replace old db with the new from dummy repository
shutil.move(repoconf.dbpath, self.config.dbpath)
self.update_last()
arrowlevel(-1)
arrow("Repository upgrade complete")
finally:
# Remove dummy repository
shutil.rmtree(tmpdir)
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
class Repository_v1(Repository):
def _add(self, image):
'''
Add description to db
'''
arrow("Adding metadata")
self.db.begin()
# insert image information
arrow("Image", 1)
self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?)",
(image.md5,
image.name,
image.version,
image.date,
image.author,
image.description,
image.size,
))
# insert data information
arrow("Payloads", 1)
for name, obj in image.payload.items():
self.db.ask("INSERT INTO payload values (?,?,?,?,?)",
(obj.md5,
image.md5,
name,
obj.isdir,
obj.size,
))
# on commit
self.db.commit()
# update last file
self.update_last()
@property
def uuid(self):
'''
Return repository UUID
'''
# repository v1 don't have UUID, generate one with repository url
# encode in ascii for hash digest
return str(uuid.uuid5(uuid.NAMESPACE_URL,
self.config.path.encode('ascii')))
def images(self):
'''
Return a dict of information on images
'''
db_images = self.db.ask("SELECT md5, name, version, date, author, \
description, size \
FROM image ORDER BY name, version").fetchall()
images = []
field = ("md5", "name", "version", "date", "author", "description",
"size")
for info in db_images:
d = dict(zip(field, info))
d["repo"] = self.config.name
d["url"] = os.path.join(self.config.path, d["md5"])
d["format"] = 1
d["is_min_version"] = 9
images.append(d)
return images
This call implement a cache and a manager for multiple repositories
def __init__(self, cache_path=None, timeout=None, filter=None, search=None):
self.filter = [] if filter is None else filter
self.search = [] if search is None else search
if cache_path is None:
self.cache_path = None
debug("No repository cache")
raise NotImplementedError("Repository cache must be local")
self.cache_path = os.path.abspath(cache_path)
# must_path is a list of directory which must exists
# create directory if not exists
if not os.path.exists(self.cache_path):
os.mkdir(self.cache_path)
# ensure directories are avaiblable
if not os.access(self.cache_path, os.W_OK | os.X_OK):
raise ISError(u"%s is not writable or executable" % self.cache_path)
debug(u"Repository cache is in %s" % self.cache_path)
def __del__(self):
# delete temporary files (used by db)
for f in self.tempfiles:
try:
debug(u"Removing temporary db file %s" % f)
def __len__(self):
'''
Return the number of repository registered
'''
return len(self.repos)
def __getitem__(self, key):
'''
Return a repostiory by its position in list
if isinstance(key, int):
elif isinstance(key, basestring):
for repo in self.repos:
if repo.config.name == key:
return repo
# if not found, match uuid
# we need at least 4 chars to avoid ambiguous uuid matching
if len(key) >= 4:
for repo in [r for r in self.repos if not r.config.offline]:
if fnmatch.fnmatch(repo.uuid, "%s*" % key):
return repo
else:
raise ISWarning("Ambiguous argument: we need at least 4 chars "
"to match an uuid")
raise IndexError(u"No repository named: %s" % key)
raise TypeError(u"Invalid type %s for %s" % (type(key), key))
def __contains__(self, key):
'''
Check if a key is a repository name
'''
for r in self.repos:
if r.config.name == key:
return True
return False
def register(self, config, temp=False, nosync=False, offline=False):
temp: repository is stored in a temporary location
nosync: register repository as online, but no sync is done before
if len(self.filter) > 0:
if config.name not in self.filter:
debug(u"Filtering repository %s" % config.name)
# repository is offline
debug(u"Registering offline repository %s (%s)" % (config.path, config.name))
# we must force offline in cast of argument offline
config.offline = True
self.repos.append(self.factory.create(config))
# if path is local, no needs to create a cache
elif istools.isfile(config.path):
debug(u"Registering direct repository %s (%s)" % (config.path, config.name))
self.repos.append(self.factory.create(config))
# path is remote, we need to create a cache
debug(u"Registering cached repository %s (%s)" % (config.path, config.name))
self.repos.append(self._cachify(config, temp, nosync))
def _cachify(self, config, temp=False, nosync=False):
'''
Return a config of a cached repository from an orignal config file
:param config: repository configuration
:param temp: repository db should be stored in a temporary location
:param nosync: if a cache exists, don't try to update it
# if cache is disable => temp =True
if self.cache_path is None:
temp = True
original_dbpath = config.dbpath
raise ISError("sync is disabled")
elif temp:
# this is a temporary cached repository
tempfd, config.dbpath = tempfile.mkstemp()
self.tempfiles.append(config.dbpath)
config.dbpath = os.path.join(self.cache_path, config.name)
if not nosync:
# Open remote database
rdb = PipeFile(original_dbpath, timeout=self.timeout)
# get remote last modification
if rdb.mtime is None:
# We doesn't have modification time, we use the last file
try:
rlast = int(PipeFile(config.lastpath, mode='r',
timeout=self.timeout).read().strip())
rlast = -1
else:
rlast = rdb.mtime
# get local last value
if os.path.exists(config.dbpath):
llast = int(os.stat(config.dbpath).st_mtime)
else:
llast = -2
# if repo is out of date, download it
if rlast != llast:
try:
arrow(u"Downloading %s" % original_dbpath)
rdb.progressbar = True
ldb = open(config.dbpath, "wb")
rdb.consume(ldb)
ldb.close()
rdb.close()
istools.chrights(config.dbpath,
uid=config.uid,
gid=config.gid,
mode=config.fmod,
mtime=rlast)
except:
if os.path.exists(config.dbpath):
os.unlink(config.dbpath)
raise
except ISError as e :
# if something append bad during caching, we mark repo as offline
debug(u"Unable to cache repository %s: %s" % (config.name, e))
@property
def names(self):
'''
'''
return [ r.config.name for r in self.repos ]
@property
def onlines(self):
'''
Return list of online repository names
'''
return [ r.config.name for r in self.repos if not r.config.offline ]
@property
def offlines(self):
'''
Return list of offlines repository names
'''
return [ r.config.name for r in self.repos if r.config.offline ]
def select_images(self, patterns):
'''
Return a list of available images
'''
raise ISError(u"No online repository")
ans = {}
for pattern in patterns:
path, image, version = Repository.split_image_path(pattern)
if image is None:
if path is None or version is None:
image = "*"
else:
# empty pattern
continue
# building image list
images = {}
for reponame in self.onlines:
for img in self[reponame].images():
imgname = u"%s/%s:%s" % (reponame, img["name"], img["version"])
images[imgname] = img
# No path means only in searchable repositories
if path is None:
for k, v in images.items():
if v["repo"] not in self.search:
uuid = self[v["repo"]].uuid
# match uuid
if not [uuid for pat in self.search
if fnmatch.fnmatch(uuid, '%s*' % pat)]:
del images[k]
path = "*"
# No version means last version
if version is None:
version = "*"
for repo in set((images[i]["repo"] for i in images)):
for img in set((images[i]["name"] for i in images if images[i]["repo"] == repo)):
versions = [ images[i]['version']
for i in images if images[i]["repo"] == repo and images[i]["name"] == img ]
f = lambda x,y: x if istools.compare_versions(x, y) > 0 else y
last = reduce(f, versions)
versions.remove(last)
for rmv in versions:
del images[u"%s/%s:%s" % (repo, img, rmv)]
# if 'path*' do not match a repo name, it may be an uuid, so add
# globbing for smart uuid matching
if not fnmatch.filter(self.onlines, "%s*" % path):
path = "%s*" % path
# filter with pattern on path
filter_pattern = u"%s/%s:%s" % (path, image, version)
for k, img in images.items():
if not (fnmatch.fnmatch(k, filter_pattern) or
fnmatch.fnmatch("%s/%s" % (self[img["repo"]].uuid, k.split("/")[1]), filter_pattern)):
ans.update(images)
return ans
'''
Search pattern accross all registered repositories
'''
arrow(self[repo].config.name)
self[repo].search(pattern)
def show_images(self, patterns, o_json=False, o_long=False, o_md5=False,
o_date=False, o_author=False, o_size=False,
o_url=False, o_description=False):
'''
Show images inside manager
'''
# get images list
images = self.select_images(patterns)
# display result
if o_json:
s = json.dumps(images)
else:
l = []
for imgp in sorted(images.keys()):
img = images[imgp]
l.append(u"%s#R#/#l##b#%s#R#:#p#%s#R#" % (
img["repo"], img["name"], img["version"]))
if o_md5 or o_long:
l[-1] = l[-1] + u" (#y#%s#R#)" % img["md5"]
if o_date or o_long:
l.append(u" #l#date:#R# %s" % istools.time_rfc2822(img["date"]))
if o_author or o_long:
l.append(u" #l#author:#R# %s" % img["author"])
if o_size or o_long:
l.append(u" #l#size:#R# %s" % istools.human_size(img["size"]))
if o_url or o_long:
l.append(u" #l#url:#R# %s" % img["url"])
if o_description or o_long:
l.append(u" #l#description:#R# %s" % img["description"])
s = os.linesep.join(l)
if len(s) > 0:
out(s)
def select_payloads(self, patterns):
'''
Return a list of available payloads
'''
if len(self.onlines) == 0:
raise ISError(u"No online repository")
# building payload list
paylist = {}
for reponame in self.onlines:
for md5, info in self[reponame].payloads().items():
if md5 not in paylist:
paylist[md5] = info
else:
paylist[md5]["images"].update(info["images"])
# check if pattern is md5 startpath
ans = {}
for pattern in patterns:
if md5.startswith(pattern):
ans[md5] = paylist[md5]
return ans
def show_payloads(self, patterns, o_images=False, o_json=False):
'''
Show payloads inside manager
'''
# get payload list
payloads = self.select_payloads(patterns)
# display result
if o_json:
s = json.dumps(payloads)
else:
l = []
for payname in sorted(payloads.keys()):
pay = payloads[payname]
l.append(u"#l##y#%s#R#" % payname)
l.append(u" size: %s" % istools.human_size(pay["size"]))
l.append(u" directory: %s" % bool(pay["isdir"]))
l.append(u" image count: %d" % len(pay["images"]))
l.append(u" names: %s" % ", ".join(set((v["payname"] for v in pay["images"].values()))))
if o_images:
l.append(u" images:")
for path, obj in pay["images"].items():
l.append(u" %s#R#/#l##b#%s#R#:#p#%s#R# (%s)" % (
obj["repo"], obj["imgname"], obj["imgver"], obj["payname"]))
s = os.linesep.join(l)
if len(s) > 0:
out(s)
def select_repositories(self, patterns):
'''
Return a list of repository
'''
ans = set()
for pattern in patterns:
ans |= set(fnmatch.filter(self.names, pattern))
return sorted(ans)
def purge_repositories(self, patterns):