diff --git a/Makefile.am b/Makefile.am
index c77b608637861a18cdeebfc320ac8cb037d01f1d..fb337a5e6046f5146d48f863cea459feddd0561b 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -28,20 +28,37 @@ dist_doc_DATA = README COPYRIGHT LICENSE AUTHORS DEPENDENCIES
dist_bin_SCRIPTS = bin/is
noinst_SCRIPTS = installsystems/__init__.py
-# python library
+# python installsystems package
installsystemsdir=$(pythondir)/installsystems
installsystems_PYTHON = \
installsystems/__init__.py \
installsystems/config.py \
- installsystems/database.py \
installsystems/exception.py \
- installsystems/image.py \
installsystems/printer.py \
- installsystems/repository.py \
- installsystems/tarball.py \
installsystems/template.py \
installsystems/tools.py
+installsystemsimagedir=$(pythondir)/installsystems/image
+installsystemsimage_PYTHON = \
+ installsystems/image/__init__.py \
+ installsystems/image/changelog.py \
+ installsystems/image/image.py \
+ installsystems/image/package.py \
+ installsystems/image/payload.py \
+ installsystems/image/source.py \
+ installsystems/image/tarball.py
+
+installsystemsrepositorydir=$(pythondir)/installsystems/repository
+installsystemsrepository_PYTHON = \
+ installsystems/repository/__init__.py \
+ installsystems/repository/config.py \
+ installsystems/repository/database.py \
+ installsystems/repository/factory.py \
+ installsystems/repository/manager.py \
+ installsystems/repository/repository.py \
+ installsystems/repository/repository1.py \
+ installsystems/repository/repository2.py
+
do_substitution = $(SED) -e 's,[@]pythondir[@],$(pythondir),g' \
-e 's,[@]PACKAGE[@],$(PACKAGE),g' \
-e 's,[@]VERSION[@],$(VERSION),g'
diff --git a/bin/is b/bin/is
index e1e0a5826f1148fc9809f1554cd9f3d4f7c6a58b..7b65f79887ee0b70d8e570de50a75e822e0e9d03 100755
--- a/bin/is
+++ b/bin/is
@@ -20,25 +20,24 @@
InstallSystems Command line Tool
'''
-import os
-import datetime
-import re
-import fnmatch
-import warnings
-import argparse
-import psutil
-import socket
-import sys
-import installsystems
-import installsystems.printer
-import installsystems.tools as istools
-from installsystems.exception import *
-from installsystems.printer import *
-from installsystems.repository import Repository
-from installsystems.repository import RepositoryManager
-from installsystems.repository import RepositoryConfig
-from installsystems.image import PackageImage, SourceImage
+from argparse import ArgumentParser
+from datetime import timedelta
+from installsystems import VERSION
from installsystems.config import MainConfigFile, RepoConfigFile
+from installsystems.exception import ISError, ISException
+from installsystems.image import PackageImage, SourceImage
+from installsystems.printer import arrow, arrowlevel, setmode
+from installsystems.printer import out, warn, error, debug, confirm
+from installsystems.repository import Repository, RepositoryManager, RepositoryConfig
+from installsystems.tools import chroot, prepare_chroot, unprepare_chroot
+from installsystems.tools import isfile, smd5sum, argv
+from os import getpid, getcwdu, chdir
+from psutil import IOPRIO_CLASS_RT, IOPRIO_CLASS_BE, IOPRIO_CLASS_IDLE
+from psutil import Process, IOPRIO_CLASS_NONE
+from socket import setdefaulttimeout
+
+# used by os.path.isfile
+import os
################################################################################
# Common functions
@@ -52,15 +51,15 @@ def load_repositories(args):
if args.no_cache:
args.cache = None
# split filter and search in list
- args.repo_filter = Repository.split_repository_list(args.repo_filter)
- args.repo_search = Repository.split_repository_list(args.repo_search)
+ args.repo_filter = Repository.split_list(args.repo_filter)
+ args.repo_search = Repository.split_list(args.repo_search)
# init repo cache object
repoman = RepositoryManager(args.cache, timeout=args.repo_timeout or args.timeout,
filter=args.repo_filter, search=args.repo_search)
# register repositories (order matter)
# load repo configs from command line
if args.repo_path != "":
- repoconf = RepositoryConfig(istools.smd5sum(args.repo_path)[:8],
+ repoconf = RepositoryConfig(smd5sum(args.repo_path)[:8],
path=args.repo_path)
repoman.register(repoconf, temp=True, nosync=args.no_sync)
# load repo configs from config
@@ -80,7 +79,7 @@ def get_images(patterns, repoman, local=True, min=None, max=None):
ans = []
for pattern in patterns:
# check if image is a local file
- if local and istools.isfile(pattern) and os.path.isfile(pattern):
+ if local and isfile(pattern) and os.path.isfile(pattern):
ans.append((pattern, None))
else: # we need to find image in a repository
ans += sorted(repoman.select_images([pattern]).items())
@@ -128,8 +127,8 @@ def c_build(args):
arrow("Build %s" % path)
# chdir inside path if --chdir
if args.chdir:
- cwd = os.getcwdu()
- os.chdir(path)
+ cwd = getcwdu()
+ chdir(path)
path = "."
arrowlevel(1)
# load source image
@@ -138,19 +137,19 @@ def c_build(args):
dt = simg.build(force=args.force, force_payload=args.payload,
check=not args.no_check, script=not args.no_script)
gdt += dt
- arrow(u"Build time: %s" % datetime.timedelta(seconds=dt))
+ arrow(u"Build time: %s" % timedelta(seconds=dt))
if args.chdir:
- os.chdir(cwd)
+ chdir(cwd)
arrowlevel(-1)
if len(args.paths) > 1:
- arrow(u"Global build time: %s" % datetime.timedelta(seconds=gdt))
+ arrow(u"Global build time: %s" % timedelta(seconds=gdt))
def c_cat(args):
'''
Display files inside a packaged image
'''
repoman = load_repositories(args)
- image, repo = next(get_images([args.pattern], repoman, min=1, max=1))
+ image = next(get_images([args.pattern], repoman, min=1, max=1))[0]
for filename in args.file:
image.cat(filename)
@@ -160,7 +159,7 @@ def c_changelog(args):
'''
repoman = load_repositories(args)
images = list(get_images(args.pattern, repoman, min=1))
- for image, repo in images:
+ for image in images:
if len(images) > 1:
out("--- #yellow#image: %s v%s#reset#" % (image.name, image.version))
if args.all_version:
@@ -183,7 +182,7 @@ def c_chroot(args):
'''
Helper to go cleanly inside a chroot
'''
- istools.chroot(args.path, shell=args.shell, mount=not args.no_mount)
+ chroot(args.path, shell=args.shell, mount=not args.no_mount)
def c_clean(args):
'''
@@ -334,7 +333,7 @@ def c_install(args):
arrow(u"Installing %s v%s" % (image.name, image.version))
# let's go
dt = image.run(args.parser, subparser, run_setup=not args.dry_run)
- arrow(u"Install time: %s" % datetime.timedelta(seconds=dt))
+ arrow(u"Install time: %s" % timedelta(seconds=dt))
def c_list(args):
'''
@@ -421,7 +420,7 @@ def c_prepare_chroot(args):
'''
Helper to prepare a path to be chrooted
'''
- istools.prepare_chroot(args.path, mount=not args.no_mount)
+ prepare_chroot(args.path, mount=not args.no_mount)
def c_repo(args):
'''
@@ -451,7 +450,7 @@ def c_unprepare_chroot(args):
'''
Helper to remove chroot preparation of a path
'''
- istools.unprepare_chroot(args.path, mount=not args.no_umount)
+ unprepare_chroot(args.path, mount=not args.no_umount)
def c_upgrade(args):
'''
@@ -464,16 +463,16 @@ def c_version(args):
'''
Display installsystems version
'''
- out(installsystems.version)
+ out(VERSION)
def arg_parser_init():
'''
Create command parser
'''
# top level argument parsing
- parser = argparse.ArgumentParser()
+ parser = ArgumentParser()
parser.add_argument("-V", "--version", action="version",
- version=installsystems.version)
+ version=VERSION)
# exclusive group on verbosity
g = parser.add_mutually_exclusive_group()
g.add_argument("-v", "--verbosity", default=1,
@@ -761,29 +760,26 @@ def main():
Program main
'''
try:
+ # by default full debug
+ setmode(2)
# init arg parser
arg_parser = arg_parser_init()
# encode command line arguments to utf-8
- args = istools.argv()[1:]
+ args = argv()[1:]
# first partial parsing, to get early debug and config path
options = arg_parser.parse_known_args(args=args)[0]
# set early command line verbosity and color
- installsystems.verbosity = options.verbosity
- installsystems.printer.NOCOLOR = options.no_color
+ setmode(options.verbosity, options.no_color)
# load main config file options
config_parser = MainConfigFile(options.config, "installsystems")
options = config_parser.parse()
# second partial parsing, command line option overwrite config file
options = arg_parser.parse_known_args(args=args, namespace=options)[0]
# set verbosity and color
- installsystems.verbosity = options.verbosity
- installsystems.printer.NOCOLOR = options.no_color
- # no warning if we are not in debug mode
- if installsystems.verbosity < 2:
- warnings.filterwarnings("ignore")
+ setmode(options.verbosity, options.no_color)
# nice and ionice process
if options.nice is not None or options.ionice_class is not None:
- proc = psutil.Process(os.getpid())
+ proc = Process(getpid())
if options.nice is not None:
try:
proc.nice = options.nice
@@ -793,10 +789,10 @@ def main():
if options.ionice_class is not None:
try:
ioclassmap = {
- "none": psutil.IOPRIO_CLASS_NONE,
- "rt": psutil.IOPRIO_CLASS_RT,
- "be": psutil.IOPRIO_CLASS_BE,
- "idle": psutil.IOPRIO_CLASS_IDLE}
+ "none": IOPRIO_CLASS_NONE,
+ "rt": IOPRIO_CLASS_RT,
+ "be": IOPRIO_CLASS_BE,
+ "idle": IOPRIO_CLASS_IDLE}
proc.set_ionice(ioclassmap[options.ionice_class], options.ionice_level)
debug(u"Setting ionice to class %s, level %s" %
(options.ionice_class, options.ionice_level))
@@ -804,7 +800,7 @@ def main():
warn(u"Unable to ionice process to %s" % options.ionice_class)
# set timeout option
if options.timeout is not None:
- socket.setdefaulttimeout(options.timeout)
+ setdefaulttimeout(options.timeout)
debug("Global timeout setted to %ds" % options.timeout)
# except for install command we parse all args!
# install command is responsible of parsing
@@ -813,15 +809,15 @@ def main():
# let's go
options.func(options)
exit(0)
- except UnicodeDecodeError as e:
+ except UnicodeDecodeError:
error("Unable to decode some characters. Check your locale settings.")
except KeyboardInterrupt:
warn("Keyboard Interrupted")
exit(1)
- except ISError as e:
- error(exception=e)
- except Exception as e:
- error(u"Unexpected error, please report it with debug enabled", exception=e)
+ except ISException as err:
+ error(exception=err)
+ except Exception as err:
+ error(u"Unexpected error, please report it with debug enabled", exception=err)
# Entry point
diff --git a/installsystems/__init__.py.in b/installsystems/__init__.py.in
index 1b868ec31b03c1564aa7b8656b12caf0c476835e..f7dc24bf5715db2d209b60ac290f942402d56f10 100644
--- a/installsystems/__init__.py.in
+++ b/installsystems/__init__.py.in
@@ -17,33 +17,36 @@
# along with Installsystems. If not, see .
'''
-InstallSystems module
+InstallSystems package
'''
-canonical_name="installsystems"
-version = "@VERSION@"
-verbosity = 1 # 0: quiet, 1: normal, 2: debug
-
def git_version():
- import os
- import sys
+ '''
+ Retrieve current git version
+ '''
+ from os import getcwd, chdir, devnull
+ from os.path import dirname
from subprocess import check_output, CalledProcessError
+ from sys import argv
version = ""
- cwd = os.getcwd()
+ cwd = getcwd()
try:
- os.chdir(os.path.dirname(sys.argv[0]))
+ chdir(dirname(argv[0]))
version = check_output(["git", "describe", "--tags", "--always" ],
- stdin=open(os.devnull, 'rb'),
- stderr=open(os.devnull, "wb")).strip()
+ stdin=open(devnull, 'rb'),
+ stderr=open(devnull, "wb")).strip()
if len(version) > 0:
version = "-" + version
except (OSError, CalledProcessError):
pass
finally:
- os.chdir(cwd)
+ chdir(cwd)
return version
-if version.find("+git") >= 0:
- version += git_version()
+CANONICAL_NAME = "@PACKAGE@"
+VERSION = "@VERSION@"
+
+if VERSION.find("+git") >= 0:
+ VERSION += git_version()
__all__ = []
diff --git a/installsystems/config.py b/installsystems/config.py
index a2430942225769786a30e011d8d828d909c36780..244f19686b21d3600d3894e39f38b39132dc10aa 100644
--- a/installsystems/config.py
+++ b/installsystems/config.py
@@ -17,53 +17,18 @@
# along with Installsystems. If not, see .
'''
-InstallSystems Configuration files class
+InstallSystems configuration files module
'''
-import codecs
-import os
-import sys
from argparse import Namespace
from configobj import ConfigObj, flatten_errors
+from installsystems.exception import ISWarning, ISError
+from installsystems.printer import warn, debug
+from installsystems.repository.config import RepositoryConfig
+from os import access, mkdir, getuid, R_OK, W_OK, X_OK
+from os.path import join, expanduser, isfile, basename, abspath, exists, isdir
+from sys import argv
from validate import Validator
-from installsystems.exception import *
-from installsystems.printer import *
-from installsystems.repository import RepositoryConfig
-
-# This must not be an unicode string, because configobj don't decode configspec
-# with the provided encoding
-MAIN_CONFIG_SPEC = '''\
-[installsystems]
-verbosity = integer(0, 2)
-repo_config = string
-repo_search = string
-repo_filter = string
-repo_timeout = integer
-cache = string(default=%s)
-timeout = integer
-no_cache = boolean
-no_check = boolean
-no-sync = boolean
-no_color = boolean
-nice = integer
-ionice_class = option("none", "rt", "be", "idle")
-ionice_level = integer
-'''
-
-# This must not be an unicode string, because configobj don't decode configspec
-# with the provided encoding
-REPO_CONFIG_SPEC = '''\
-[__many__]
- path = string
- fmod = string
- dmod = string
- uid = string
- gid = string
- offline = boolean
- lastpath = string
- dbpath = string
-'''
-
class ConfigFile(object):
'''
@@ -75,8 +40,8 @@ class ConfigFile(object):
Filename can be full path to config file or a name in config directory
'''
# try to get filename in default config dir
- if os.path.isfile(filename):
- self.path = os.path.abspath(filename)
+ if isfile(filename):
+ self.path = abspath(filename)
else:
self.path = self._config_path(filename)
# loading config file if exists
@@ -104,53 +69,58 @@ class ConfigFile(object):
# remove wrong value to avoid merging it with argparse value
del self.config[section[0]][optname]
- def _config_path(self, name):
+ @staticmethod
+ def _config_path(name):
'''
Return path of the best config file
'''
- for cf in [ os.path.join(os.path.expanduser(u"~/.config/installsystems/%s.conf" % name)),
- u"/etc/installsystems/%s.conf" % name ]:
- if (os.path.isfile(cf) and os.access(cf, os.R_OK)):
- return cf
+ for cfp in [join(expanduser(u"~/.config/installsystems/%s.conf" %name)),
+ u"/etc/installsystems/%s.conf" % name ]:
+ if (isfile(cfp) and access(cfp, R_OK)):
+ return cfp
return None
+ @property
+ def configspec(self):
+ '''Return configobj spec'''
+ raise NotImplementedError()
+
class MainConfigFile(ConfigFile):
'''
Program configuration class
'''
- def __init__(self, filename, prefix=os.path.basename(sys.argv[0])):
+ def __init__(self, filename, prefix=basename(argv[0])):
self.prefix = prefix
- self.configspec = (MAIN_CONFIG_SPEC % self.cache).splitlines()
try:
super(MainConfigFile, self).__init__(filename)
debug(u"Loading main config file: %s" % self.path)
except ISWarning:
debug("No main config file to load")
- except Exception as e:
- raise ISError(u"Unable load main config file %s" % self.path, e)
+ except Exception as exc:
+ raise ISError(u"Unable load main config file %s" % self.path, exc)
def _cache_paths(self):
'''
List all candidates to cache directories. Alive or not
'''
- dirs = [os.path.expanduser("~/.cache"), "/var/tmp", "/tmp"]
+ dirs = [expanduser("~/.cache"), "/var/tmp", "/tmp"]
# we have an additional directory if we are root
- if os.getuid() == 0:
+ if getuid() == 0:
dirs.insert(0, "/var/cache")
- return map(lambda x: os.path.join(x, self.prefix), dirs)
+ return [ join(x, self.prefix) for x in dirs ]
def _cache_path(self):
'''
Return path of the best cache directory
'''
# find a good directory
- for di in self._cache_paths():
- if (os.path.exists(di)
- and os.path.isdir(di)
- and os.access(di, os.R_OK|os.W_OK|os.X_OK)):
- return di
+ for directory in self._cache_paths():
+ if (exists(directory)
+ and isdir(directory)
+ and access(directory, R_OK|W_OK|X_OK)):
+ return directory
return None
@property
@@ -159,12 +129,12 @@ class MainConfigFile(ConfigFile):
Find a cache directory
'''
if self._cache_path() is None:
- for di in self._cache_paths():
+ for directory in self._cache_paths():
try:
- os.mkdir(di)
+ mkdir(directory)
break
- except Exception as e:
- debug(u"Unable to create %s: %s" % (di, e))
+ except Exception as exc:
+ debug(u"Unable to create %s: %s" % (directory, exc))
return self._cache_path()
def parse(self, namespace=None):
@@ -178,6 +148,11 @@ class MainConfigFile(ConfigFile):
setattr(namespace, option, value)
return namespace
+ @property
+ def configspec(self):
+ '''Return configobj spec'''
+ return (MAIN_CONFIG_SPEC % self.cache).splitlines()
+
class RepoConfigFile(ConfigFile):
'''
@@ -188,7 +163,7 @@ class RepoConfigFile(ConfigFile):
# seting default config
self._config = {}
self._repos = []
- self.configspec = REPO_CONFIG_SPEC.splitlines()
+
try:
super(RepoConfigFile, self).__init__(filename)
debug(u"Loading repository config file: %s" % self.path)
@@ -207,9 +182,11 @@ class RepoConfigFile(ConfigFile):
if "path" not in self.config[rep]:
continue
# get all options in repo
- self._repos.append(RepositoryConfig(rep, **dict(self.config[rep].items())))
- except Exception as e:
- raise ISError(u"Unable to load repository file %s" % self.path, e)
+ self._repos.append(
+ RepositoryConfig(rep, **dict(self.config[rep].items()))
+ )
+ except Exception as exc:
+ raise ISError(u"Unable to load repository file %s" % self.path, exc)
@property
def repos(self):
@@ -218,3 +195,43 @@ class RepoConfigFile(ConfigFile):
'''
# deep copy
return list(self._repos)
+
+ @property
+ def configspec(self):
+ '''Return configobj spec'''
+ return REPO_CONFIG_SPEC.splitlines()
+
+
+# This must not be an unicode string, because configobj don't decode configspec
+# with the provided encoding
+MAIN_CONFIG_SPEC = '''\
+[installsystems]
+verbosity = integer(0, 2)
+repo_config = string
+repo_search = string
+repo_filter = string
+repo_timeout = integer
+cache = string(default=%s)
+timeout = integer
+no_cache = boolean
+no_check = boolean
+no-sync = boolean
+no_color = boolean
+nice = integer
+ionice_class = option("none", "rt", "be", "idle")
+ionice_level = integer
+'''
+
+# This must not be an unicode string, because configobj don't decode configspec
+# with the provided encoding
+REPO_CONFIG_SPEC = '''\
+[__many__]
+ path = string
+ fmod = string
+ dmod = string
+ uid = string
+ gid = string
+ offline = boolean
+ lastpath = string
+ dbpath = string
+'''
diff --git a/installsystems/exception.py b/installsystems/exception.py
index 2ba9a2b3166a78095a24d1e752b8a60f48b7c942..67af5f26a9ae1ed1952e9d9aa007f130ce051761 100644
--- a/installsystems/exception.py
+++ b/installsystems/exception.py
@@ -1,30 +1,27 @@
# -*- python -*-
# -*- coding: utf-8 -*-
-# Installsystems - Python installation framework
-# Copyright © 2011-2012 Smartjog S.A
-# Copyright © 2011-2012 Sébastien Luttringer
+# This file is part of Installsystems.
#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
#
-# This program is distributed in the hope that it will be useful,
+# Installsystems is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
+# GNU Lesser General Public License for more details.
#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
'''
InstallSystems Exceptions
'''
-import traceback
-import sys
+from traceback import print_tb, print_exc, format_exception_only
+from sys import exc_info, stderr
class ISException(Exception):
'''
@@ -32,8 +29,9 @@ class ISException(Exception):
'''
def __init__(self, message=u"", exception=None):
+ Exception.__init__(self)
self.message = unicode(message)
- self.exception = None if exception is None else sys.exc_info()
+ self.exception = None if exception is None else exc_info()
def __str__(self):
'''
@@ -44,7 +42,7 @@ class ISException(Exception):
else:
return self.message
- def print_sub_tb(self, fd=sys.stderr):
+ def print_sub_tb(self, fd=stderr):
'''
Print stored exception traceback and exception message
'''
@@ -53,20 +51,21 @@ class ISException(Exception):
return
# print traceback and exception separatly to avoid recursive print of
# "Traceback (most recent call last)" from traceback.print_exception
- traceback.print_tb(self.exception[2], file=fd)
- fd.write("".join(traceback.format_exception_only(self.exception[0], self.exception[1])))
+ print_tb(self.exception[2], file=fd)
+ fd.write("".join(format_exception_only(self.exception[0],
+ self.exception[1])))
# recursively call traceback print on ISException error
if isinstance(self.exception[1], ISException):
self.exception[1].print_sub_tb()
- def print_tb(self, fd=sys.stderr):
+ def print_tb(self, fd=stderr):
'''
Print traceback from embeded exception or current one
'''
from installsystems.printer import out
# coloring
out("#l##B#", fd=fd, endl="")
- traceback.print_exc(file=fd)
+ print_exc(file=fd)
self.print_sub_tb(fd)
# reset color
out("#R#", fd=fd, endl="")
@@ -89,5 +88,5 @@ class InvalidSourceImage(ISError):
Invalid source image errors
'''
- def __init(self, message=u"", exception=None):
- ISException(self, u"Invalid source image: " + message, exception)
+ def __init__(self, message=u"", exception=None):
+ ISError.__init__(self, u"Invalid source image: " + message, exception)
diff --git a/installsystems/image.py b/installsystems/image.py
deleted file mode 100644
index 3e48cbacc164699eac53e6dd78abed77515a4a84..0000000000000000000000000000000000000000
--- a/installsystems/image.py
+++ /dev/null
@@ -1,1461 +0,0 @@
-# -*- python -*-
-# -*- coding: utf-8 -*-
-
-# This file is part of Installsystems.
-#
-# Installsystems is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Installsystems is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with Installsystems. If not, see .
-
-
-'''
-Image stuff
-'''
-
-import codecs
-import configobj
-import cStringIO
-import difflib
-import imp
-import fnmatch
-import json
-import locale
-import math
-import os
-import re
-import shutil
-import stat
-import subprocess
-import sys
-import tarfile
-import time
-import validate
-import installsystems
-import installsystems.template as istemplate
-import installsystems.tools as istools
-from installsystems.exception import *
-from installsystems.printer import *
-from installsystems.tools import PipeFile
-from installsystems.tarball import Tarball
-
-
-# This must not be an unicode string, because configobj don't decode configspec
-# with the provided encoding
-DESCRIPTION_CONFIG_SPEC = '''\
-[image]
-name = IS_name
-version = IS_version
-description = string
-author = string
-is_min_version = IS_min_version
-
-[compressor]
-__many__ = force_list
-'''
-
-
-class Image(object):
- '''
- Abstract class of images
- '''
-
- extension = ".isimage"
- default_compressor = "gzip"
-
- @staticmethod
- def check_image_name(buf):
- '''
- Check if @buf is a valid image name
- '''
- if re.match("^[-_\w]+$", buf) is None:
- raise ISError(u"Invalid image name %s" % buf)
- # return the image name, because this function is used by ConfigObj
- # validate to ensure the image name is correct
- return buf
-
- @staticmethod
- def check_image_version(buf):
- '''
- Check if @buf is a valid image version
- '''
- if re.match("^\d+(\.\d+)*(([~+]).*)?$", buf) is None:
- raise ISError(u"Invalid image version %s" % buf)
- # return the image version, because this function is used by ConfigObj
- # validate to ensure the image version is correct
- return buf
-
- @staticmethod
- def check_min_version(version):
- '''
- Check InstallSystems min version
- '''
- if istools.compare_versions(installsystems.version, version) < 0:
- raise ISError("Minimum Installsystems version not satisfied "
- "(%s)" % version)
- # return the version, because this function is used by ConfigObj
- # validate to ensure the version is correct
- return version
-
- @staticmethod
- def compare_versions(v1, v2):
- '''
- For backward compatibility, image class offer a method to compare image versions
- But code is now inside tools
- '''
- return istools.compare_versions(v1, v2)
-
- def __init__(self):
- self.modules = {}
-
- def _load_module(self, name, filename, code=None):
- '''
- Create a python module from a string or a filename
- '''
- # unicode safety check
- assert(isinstance(name, unicode))
- assert(isinstance(filename, unicode))
- assert(code is None or isinstance(code, str))
- # load code if not provided
- if code is None:
- code = open(filename, "r").read()
- # create an empty module
- module = imp.new_module(name)
- # compile module code
- try:
- bytecode = compile(code, filename.encode(locale.getpreferredencoding()), "exec")
- except Exception as e:
- raise ISError(u"Unable to compile %s" % filename, e)
- # load module
- try:
- self.secure_exec_bytecode(bytecode, name, module.__dict__)
- except Exception as e:
- raise ISError(u"Unable to load %s" % filename, e)
- return module
-
- def load_modules(self, select_scripts):
- '''
- Load all modules selected by generator select_scripts
-
- select_scripts is a generator which return tuples (fp, fn, fc) where:
- fp is unicode file path of the module
- fn is unicode file name of the module (basename)
- fc is unicode file content
- '''
- arrow(u"Load lib scripts")
- old_level = arrowlevel(1)
- self.modules = {}
- for fp, fn, fc in select_scripts():
- # check input unicode stuff
- assert(isinstance(fp, unicode))
- assert(isinstance(fn, unicode))
- assert(isinstance(fc, str))
- arrow(fn)
- module_name = os.path.splitext(fn.split('-', 1)[1])[0]
- self.modules[module_name] = self._load_module(module_name, fp, fc)
- arrowlevel(level=old_level)
-
- def run_scripts(self, scripts_name, select_scripts, exec_directory, global_dict):
- '''
- Execute scripts selected by generator select_scripts
-
- scripts_name is only for display the first arrow before execution
-
- select_scripts is a generator which return tuples (fp, fn, fc) where:
- fp is file path of the scripts
- fn is file name of the scripts (basename)
- fc is file content
-
- exec_directory is the cwd of the running script
-
- global_dict is the globals environment given to scripts
- '''
- arrow(u"Run %s scripts" % scripts_name)
- # backup current directory and loaded modules
- cwd = os.getcwd()
- for fp, fn, fc in select_scripts():
- # check input unicode stuff
- assert(isinstance(fp, unicode))
- assert(isinstance(fn, unicode))
- assert(isinstance(fc, str))
- arrow(fn, 1)
- # backup arrow level
- old_level = arrowlevel(2)
- # chdir in exec_directory
- os.chdir(exec_directory)
- # compile source code
- try:
- bytecode = compile(fc, fn.encode(locale.getpreferredencoding()), "exec")
- except Exception as e:
- raise ISError(u"Unable to compile script %s" % fp, e)
- # add current image
- global_dict["image"] = self
- # execute source code
- self.secure_exec_bytecode(bytecode, fp, global_dict)
- arrowlevel(level=old_level)
- os.chdir(cwd)
-
- def secure_exec_bytecode(self, bytecode, path, global_dict):
- '''
- Execute bytecode in a clean modules' environment, without altering
- Installsystems' sys.modules
- '''
- # system modules dict
- sysmodules = sys.modules
- sysmodules_backup = sysmodules.copy()
- # autoload modules
- global_dict.update(self.modules)
- try:
- # replace system modules by image loaded
- # we must use the same directory and not copy it (probably C reference)
- sysmodules.clear()
- # sys must be in sys.module to allow loading of modules
- sysmodules["sys"] = sys
- sysmodules.update(self.modules)
- # we need installsystems.printer to conserve arrow level
- sysmodules["installsystems.printer"] = installsystems.printer
- exec bytecode in global_dict
- except SystemExit as e:
- # skip a script which call exit(0) or exit()
- if e.code is None or e.code == 0:
- return
- else:
- raise ISError(u"Script %s exits with status" % path, e)
- except Exception as e:
- raise ISError(u"Fail to execute script %s" % path, e)
- finally:
- sysmodules.clear()
- sysmodules.update(sysmodules_backup)
-
-
-class SourceImage(Image):
- '''
- Image source manipulation class
- '''
-
- # format should be a float X.Y but for compatibility reason it's a string
- # before version 6, it's strict string comparaison
- format = "2.0"
-
-
- @classmethod
- def create(cls, path, force=False):
- '''
- Create an empty source image
- '''
- # check local repository
- if not istools.isfile(path):
- raise NotImplementedError("SourceImage must be local")
- # main path
- build_path = os.path.join(path, "build")
- parser_path = os.path.join(path, "parser")
- setup_path = os.path.join(path, "setup")
- payload_path = os.path.join(path, "payload")
- lib_path = os.path.join(path, "lib")
- # create base directories
- arrow("Creating base directories")
- try:
- for d in (path, build_path, parser_path, setup_path, payload_path,
- lib_path):
- if not os.path.exists(d) or not os.path.isdir(d):
- os.mkdir(d)
- except Exception as e:
- raise ISError(u"Unable to create directory: %s" % d, e)
- # create example files
- arrow("Creating examples")
- arrowlevel(1)
- # create dict of file to create
- examples = {}
- # create description example from template
- examples["description"] = {"path": "description",
- "content": istemplate.description % {
- "name": "",
- "version": "1",
- "description": "",
- "author": "",
- "is_min_version": installsystems.version,
- "compressor": "gzip = *\nnone = *.gz, *.bz2, *.xz"}}
- # create changelog example from template
- examples["changelog"] = {"path": "changelog", "content": istemplate.changelog}
- # create build example from template
- examples["build"] = {"path": "build/01-build.py", "content": istemplate.build}
- # create parser example from template
- examples["parser"] = {"path": "parser/01-parser.py", "content": istemplate.parser}
- # create setup example from template
- examples["setup"] = {"path": "setup/01-setup.py", "content": istemplate.setup}
- for name in examples:
- try:
- arrow(u"Creating %s example" % name)
- expath = os.path.join(path, examples[name]["path"])
- if not force and os.path.exists(expath):
- warn(u"%s already exists. Skipping!" % expath)
- continue
- open(expath, "w").write(examples[name]["content"])
- except Exception as e:
- raise ISError(u"Unable to create example file", e)
- try:
- # setting executable rights on files in setup and parser
- arrow("Setting executable rights on scripts")
- umask = os.umask(0)
- os.umask(umask)
- for dpath in (build_path, parser_path, setup_path):
- for f in os.listdir(dpath):
- istools.chrights(os.path.join(dpath, f), mode=0777 & ~umask)
- except Exception as e:
- raise ISError(u"Unable to set rights on %s" % pf, e)
- arrowlevel(-1)
-
- def __init__(self, path):
- '''
- Initialize source image
- '''
- Image.__init__(self)
- # check local repository
- if not istools.isfile(path):
- raise NotImplementedError("SourceImage must be local")
- self.base_path = os.path.abspath(path)
- for pathtype in ("build", "parser", "setup", "payload", "lib"):
- setattr(self, u"%s_path" % pathtype, os.path.join(self.base_path, pathtype))
- self.check_source_image()
- self.description = self.parse_description()
- self.changelog = self.parse_changelog()
- self.modules = {}
- # script tarball path
- self.image_name = u"%s-%s%s" % (self.description["name"],
- self.description["version"],
- self.extension)
-
- def check_source_image(self):
- '''
- Check if we are a valid SourceImage directories
- A vaild SourceImage contains at least a description and a setup directory.
- Payload directory is mandatory is build scripts are present
- '''
- # Ensure setup_path exists
- if not os.path.exists(self.setup_path):
- raise InvalidSourceImage(u"setup directory is missing.")
- # Ensure description exists
- if not os.path.exists(os.path.join(self.base_path, u"description")):
- raise InvalidSourceImage(u"no description file.")
- # Ensure payload directory exists if there is build directory
- if not os.path.exists(self.payload_path) and os.path.exists(self.build_path):
- raise InvalidSourceImage(u"payload directory is mandatory with a build directory.")
- # Ensure directories are directories and accessible
- for d in (self.base_path, self.build_path, self.parser_path,
- self.setup_path, self.payload_path, self.lib_path):
- if os.path.exists(d):
- if not os.path.isdir(d):
- raise InvalidSourceImage(u"%s is not a directory." % d)
- if not os.access(d, os.R_OK|os.X_OK):
- raise InvalidSourceImage(u"unable to access to %s." % d)
-
- def build(self, force=False, force_payload=False, check=True, script=True):
- '''
- Create packaged image
- '''
- # check if free to create script tarball
- if os.path.exists(self.image_name) and force == False:
- raise ISError("Tarball already exists. Remove it before")
- # register start time
- t0 = time.time()
- # check python scripts
- if check:
- for d in (self.build_path, self.parser_path, self.setup_path,
- self.lib_path):
- if os.path.exists(d):
- self.check_scripts(d)
- # load modules
- self.load_modules(lambda: self.select_scripts(self.lib_path))
- # remove list
- rl = set()
- # run build script
- if script and os.path.exists(self.build_path):
- rl |= set(self.run_build())
- if force_payload:
- rl |= set(self.select_payloads())
- # remove payloads
- self.remove_payloads(rl)
- # create payload files
- self.create_payloads()
- # generate a json description
- jdesc = self.generate_json_description()
- # creating scripts tarball
- self.create_image(jdesc)
- # compute building time
- return int(time.time() - t0)
-
- def create_image(self, jdescription):
- '''
- Create a script tarball in current directory
- '''
- # create tarball
- arrow("Creating image tarball")
- arrowlevel(1)
- arrow(u"Name %s" % self.image_name)
- try:
- try:
- tarball = Tarball.open(self.image_name, mode="w:gz", dereference=True)
- except Exception as e:
- raise ISError(u"Unable to create tarball %s" % self.image_name, e)
- # add description.json
- arrow("Add description.json")
- tarball.add_str("description.json", jdescription, tarfile.REGTYPE, 0644)
- # add changelog
- if self.changelog is not None:
- arrow("Add changelog")
- tarball.add_str("changelog", self.changelog.verbatim, tarfile.REGTYPE, 0644)
- # add format
- arrow("Add format")
- tarball.add_str("format", self.format, tarfile.REGTYPE, 0644)
- # add setup scripts
- self.add_scripts(tarball, self.setup_path)
- # add optional scripts
- for d in (self.build_path, self.parser_path, self.lib_path):
- if os.path.exists(d):
- self.add_scripts(tarball, d)
- # closing tarball file
- tarball.close()
- except (SystemExit, KeyboardInterrupt):
- if os.path.exists(self.image_name):
- os.unlink(self.image_name)
- arrowlevel(-1)
-
- def describe_payload(self, name):
- '''
- Return information about a payload
- '''
- ans = {}
- ans["source_path"] = os.path.join(self.payload_path, name)
- ans["dest_path"] = u"%s-%s%s" % (self.description["name"],
- name,
- Payload.extension)
- ans["link_path"] = u"%s-%s-%s%s" % (self.description["name"],
- self.description["version"],
- name,
- Payload.extension)
- source_stat = os.stat(ans["source_path"])
- ans["isdir"] = stat.S_ISDIR(source_stat.st_mode)
- ans["uid"] = source_stat.st_uid
- ans["gid"] = source_stat.st_gid
- ans["mode"] = stat.S_IMODE(source_stat.st_mode)
- ans["mtime"] = source_stat.st_mtime
- ans["compressor"] = self.compressor(name)
- return ans
-
- def select_payloads(self):
- '''
- Return a generator on image payloads
- '''
- if not os.path.isdir(self.payload_path):
- raise StopIteration()
- for payname in os.listdir(self.payload_path):
- yield payname
-
- def remove_payloads(self, paylist):
- '''
- Remove payload list if exists
- '''
- arrow("Removing payloads")
- for pay in paylist:
- arrow(pay, 1)
- desc = self.describe_payload(pay)
- for f in (desc["dest_path"], desc["link_path"]):
- if os.path.lexists(f):
- os.unlink(f)
-
- def create_payloads(self):
- '''
- Create all missing data payloads in current directory
- Doesn't compute md5 during creation because tarball can
- be created manually
- Also create symlink to versionned payload
- '''
- arrow("Creating payloads")
- for payload_name in self.select_payloads():
- paydesc = self.describe_payload(payload_name)
- if os.path.exists(paydesc["link_path"]):
- continue
- arrow(payload_name, 1)
- try:
- # create non versionned payload file
- if not os.path.exists(paydesc["dest_path"]):
- if paydesc["isdir"]:
- self.create_payload_tarball(paydesc["dest_path"],
- paydesc["source_path"],
- paydesc["compressor"])
- else:
- self.create_payload_file(paydesc["dest_path"],
- paydesc["source_path"],
- paydesc["compressor"])
- # create versionned payload file
- if os.path.lexists(paydesc["link_path"]):
- os.unlink(paydesc["link_path"])
- os.symlink(paydesc["dest_path"], paydesc["link_path"])
- except Exception as e:
- raise ISError(u"Unable to create payload %s" % payload_name, e)
-
- def create_payload_tarball(self, tar_path, data_path, compressor):
- '''
- Create a payload tarball
- '''
- try:
- # get compressor argv (first to escape file creation if not found)
- a_comp = istools.get_compressor_path(compressor, compress=True)
- a_tar = ["tar", "--create", "--numeric-owner", "--directory",
- data_path, "."]
- # create destination file
- f_dst = PipeFile(tar_path, "w", progressbar=True)
- # run tar process
- p_tar = subprocess.Popen(a_tar, shell=False, close_fds=True,
- stdout=subprocess.PIPE)
- # run compressor process
- p_comp = subprocess.Popen(a_comp, shell=False, close_fds=True,
- stdin=p_tar.stdout, stdout=subprocess.PIPE)
- # write data from compressor to tar_path
- f_dst.consume(p_comp.stdout)
- # close all fd
- p_tar.stdout.close()
- p_comp.stdout.close()
- f_dst.close()
- # check tar return 0
- if p_tar.wait() != 0:
- raise ISError("Tar return is not zero")
- # check compressor return 0
- if p_comp.wait() != 0:
- raise ISError(u"Compressor %s return is not zero" % a_comp[0])
- except (SystemExit, KeyboardInterrupt):
- if os.path.exists(tar_path):
- os.unlink(tar_path)
- raise
-
- def create_payload_file(self, dest, source, compressor):
- '''
- Create a payload file
- '''
- try:
- # get compressor argv (first to escape file creation if not found)
- a_comp = istools.get_compressor_path(compressor, compress=True)
- # open source file
- f_src = open(source, "r")
- # create destination file
- f_dst = PipeFile(dest, "w", progressbar=True)
- # run compressor
- p_comp = subprocess.Popen(a_comp, shell=False, close_fds=True,
- stdin=f_src, stdout=subprocess.PIPE)
- # close source file fd
- f_src.close()
- # write data from compressor to dest file
- f_dst.consume(p_comp.stdout)
- # close compressor stdin and destination file
- p_comp.stdout.close()
- f_dst.close()
- # check compressor return 0
- if p_comp.wait() != 0:
- raise ISError(u"Compressor %s return is not zero" % a_comp[0])
- except (SystemExit, KeyboardInterrupt):
- if os.path.exists(dest):
- os.unlink(dest)
- raise
-
- def select_scripts(self, directory):
- '''
- Generator of tuples (fp,fn,fc) of scripts witch are allocatable
- in a real directory
- '''
- # ensure directory is unicode to have fn and fp in unicode
- if not isinstance(directory, unicode):
- directory = unicode(directory, locale.getpreferredencoding())
- if not os.path.exists(directory):
- return
- for fn in sorted(os.listdir(directory)):
- fp = os.path.join(directory, fn)
- # check name
- if not re.match("^\d+-.*\.py$", fn):
- continue
- # check execution bit
- if not os.access(fp, os.X_OK):
- continue
- # get module content
- try:
- fc = open(fp, "r").read()
- except Exception as e:
- raise ISError(u"Unable to read script %s" % n_scripts, e)
- # yield complet file path, file name and file content
- yield (fp, fn, fc)
-
- def add_scripts(self, tarball, directory):
- '''
- Add scripts inside a directory into a tarball
- '''
- basedirectory = os.path.basename(directory)
- arrow(u"Add %s scripts" % basedirectory)
- arrowlevel(1)
- # adding base directory
- ti = tarball.gettarinfo(directory, arcname=basedirectory)
- ti.mode = 0755
- ti.uid = ti.gid = 0
- ti.uname = ti.gname = ""
- tarball.addfile(ti)
- # adding each file
- for fp, fn, fc in self.select_scripts(directory):
- # check input unicode stuff
- assert(isinstance(fp, unicode))
- assert(isinstance(fn, unicode))
- assert(isinstance(fc, str))
- # add file into tarball
- tarball.add_str(os.path.join(basedirectory, fn),
- fc,
- tarfile.REGTYPE,
- 0755,
- int(os.stat(fp).st_mtime))
- arrow(u"%s added" % fn)
- arrowlevel(-1)
-
- def check_scripts(self, directory):
- '''
- Check if scripts inside a directory can be compiled
- '''
- basedirectory = os.path.basename(directory)
- arrow(u"Checking %s scripts" % basedirectory)
- arrowlevel(1)
- # checking each file
- for fp, fn, fc in self.select_scripts(directory):
- # check input unicode stuff
- assert(isinstance(fp, unicode))
- assert(isinstance(fn, unicode))
- assert(isinstance(fc, str))
- arrow(fn)
- try:
- compile(fc, fn.encode(locale.getpreferredencoding()), "exec")
- except SyntaxError as e:
- raise ISError(exception=e)
- arrowlevel(-1)
-
- def run_build(self):
- '''
- Run build scripts
- '''
- rebuild_list = []
- self.run_scripts(os.path.basename(self.build_path),
- lambda: self.select_scripts(self.build_path),
- self.payload_path,
- {"rebuild": rebuild_list})
- return rebuild_list
-
- def generate_json_description(self):
- '''
- Generate a JSON description file
- '''
- arrow("Generating JSON description")
- arrowlevel(1)
- # copy description
- desc = self.description.copy()
- # only store compressor patterns
- desc["compressor"] = desc["compressor"]["patterns"]
- # timestamp image
- arrow("Timestamping")
- desc["date"] = int(time.time())
- # watermark
- desc["is_build_version"] = installsystems.version
- # append payload infos
- arrow("Checksumming payloads")
- desc["payload"] = {}
- for payload_name in self.select_payloads():
- arrow(payload_name, 1)
- # getting payload info
- payload_desc = self.describe_payload(payload_name)
- # compute md5 and size
- fileobj = PipeFile(payload_desc["link_path"], "r")
- fileobj.consume()
- fileobj.close()
- # create payload entry
- desc["payload"][payload_name] = {
- "md5": fileobj.md5,
- "size": fileobj.size,
- "isdir": payload_desc["isdir"],
- "uid": payload_desc["uid"],
- "gid": payload_desc["gid"],
- "mode": payload_desc["mode"],
- "mtime": payload_desc["mtime"],
- "compressor": payload_desc["compressor"]
- }
- arrowlevel(-1)
- # check md5 are uniq
- md5s = [v["md5"] for v in desc["payload"].values()]
- if len(md5s) != len(set(md5s)):
- raise ISError("Two payloads cannot have the same md5")
- # serialize
- return json.dumps(desc)
-
- def parse_description(self):
- '''
- Raise an exception is description file is invalid and return vars to include
- '''
- arrow("Parsing description")
- d = dict()
- try:
- descpath = os.path.join(self.base_path, "description")
- cp = configobj.ConfigObj(descpath,
- configspec=DESCRIPTION_CONFIG_SPEC.splitlines(),
- encoding="utf8", file_error=True)
- res = cp.validate(validate.Validator({"IS_name": Image.check_image_name,
- "IS_version": Image.check_image_version,
- "IS_min_version": Image.check_min_version}), preserve_errors=True)
- # If everything is fine, the validation return True
- # Else, it returns a list of (section, optname, error)
- if res is not True:
- for section, optname, error in configobj.flatten_errors(cp, res):
- # If error is False, this mean no value as been supplied,
- # so we use the default value
- # Else, the check has failed
- if error:
- installsystems.printer.error('Wrong description file, %s %s: %s' % (section, optname, error))
- for n in ("name","version", "description", "author", "is_min_version"):
- d[n] = cp["image"][n]
- d["compressor"] = {}
- # set payload compressor
- d["compressor"]["patterns"] = cp["compressor"].items()
- if not d["compressor"]["patterns"]:
- d["compressor"]["patterns"] = [(Image.default_compressor, "*")]
- for compressor, patterns in cp["compressor"].items():
- # is a valid compressor?
- istools.get_compressor_path(compressor)
- for pattern in patterns:
- for payname in fnmatch.filter(self.select_payloads(), pattern):
- d["compressor"][payname] = compressor
- except Exception as e:
- raise ISError(u"Bad description", e)
- return d
-
- def parse_changelog(self):
- '''
- Create a changelog object from a file
- '''
- # try to find a changelog file
- try:
- path = os.path.join(self.base_path, "changelog")
- fo = codecs.open(path, "r", "utf8")
- except IOError:
- return None
- # we have it, we need to check everything is ok
- arrow("Parsing changelog")
- try:
- cl = Changelog(fo.read())
- except Exception as e:
- raise ISError(u"Bad changelog", e)
- return cl
-
- def compressor(self, payname):
- '''
- Return payload compressor
- '''
- try:
- return self.description["compressor"][payname]
- except KeyError:
- # set default compressor if no compressor is specified
- return Image.default_compressor
-
-
-class PackageImage(Image):
- '''
- Packaged image manipulation class
- '''
-
- @classmethod
- def diff(cls, pkg1, pkg2):
- '''
- Diff two packaged images
- '''
- arrow(u"Difference from images #y#%s v%s#R# to #r#%s v%s#R#:" % (pkg1.name,
- pkg1.version,
- pkg2.name,
- pkg2.version))
- # extract images for diff scripts files
- fromfiles = set(pkg1._tarball.getnames(re_pattern="(parser|setup)/.*"))
- tofiles = set(pkg2._tarball.getnames(re_pattern="(parser|setup)/.*"))
- for f in fromfiles | tofiles:
- # preparing from info
- if f in fromfiles:
- fromfile = os.path.join(pkg1.filename, f)
- fromdata = pkg1._tarball.extractfile(f).readlines()
- else:
- fromfile = "/dev/null"
- fromdata = ""
- # preparing to info
- if f in tofiles:
- tofile = os.path.join(pkg2.filename, f)
- todata = pkg2._tarball.extractfile(f).readlines()
- else:
- tofile = "/dev/null"
- todata = ""
- # generate diff
- for line in difflib.unified_diff(fromdata, todata,
- fromfile=fromfile, tofile=tofile):
- # coloring diff
- if line.startswith("+"):
- out(u"#g#%s#R#" % line, endl="")
- elif line.startswith("-"):
- out(u"#r#%s#R#" % line, endl="")
- elif line.startswith("@@"):
- out(u"#c#%s#R#" % line, endl="")
- else:
- out(line, endl="")
-
- def __init__(self, path, fileobj=None, md5name=False):
- '''
- Initialize a package image
-
- fileobj must be a seekable fileobj
- '''
- Image.__init__(self)
- self.path = istools.abspath(path)
- self.base_path = os.path.dirname(self.path)
- # tarball are named by md5 and not by real name
- self.md5name = md5name
- try:
- if fileobj is None:
- fileobj = PipeFile(self.path, "r")
- else:
- fileobj = PipeFile(mode="r", fileobj=fileobj)
- memfile = cStringIO.StringIO()
- fileobj.consume(memfile)
- # close source
- fileobj.close()
- # get downloaded size and md5
- self.size = fileobj.read_size
- self.md5 = fileobj.md5
- memfile.seek(0)
- self._tarball = Tarball.open(fileobj=memfile, mode='r:gz')
- except Exception as e:
- raise ISError(u"Unable to open image %s" % path, e)
- self._metadata = self.read_metadata()
- # print info
- arrow(u"Image %s v%s loaded" % (self.name, self.version))
- arrow(u"Author: %s" % self.author, 1)
- arrow(u"Date: %s" % istools.time_rfc2822(self.date), 1)
- # build payloads info
- self.payload = {}
- for pname, pval in self._metadata["payload"].items():
- pfilename = u"%s-%s%s" % (self.filename[:-len(Image.extension)],
- pname, Payload.extension)
- if self.md5name:
- ppath = os.path.join(self.base_path,
- self._metadata["payload"][pname]["md5"])
- else:
- ppath = os.path.join(self.base_path, pfilename)
- self.payload[pname] = Payload(pname, pfilename, ppath, **pval)
-
- def __getattr__(self, name):
- '''
- Give direct access to description field
- '''
- if name in self._metadata:
- return self._metadata[name]
- raise AttributeError
-
- @property
- def filename(self):
- '''
- Return image filename
- '''
- return u"%s-%s%s" % (self.name, self.version, self.extension)
-
- def read_metadata(self):
- '''
- Parse tarball and return metadata dict
- '''
- desc = {}
- # check format
- img_format = self._tarball.get_utf8("format")
- try:
- if float(img_format) >= math.floor(float(SourceImage.format)) + 1.0:
- raise Exception()
- except:
- raise ISError(u"Invalid image format %s" % img_format)
- desc["format"] = img_format
- # check description
- try:
- img_desc = self._tarball.get_utf8("description.json")
- desc.update(json.loads(img_desc))
- self.check_image_name(desc["name"])
- self.check_image_version(desc["version"])
- if "compressor" not in desc:
- desc["compressor"] = "gzip = *"
- else:
- # format compressor pattern string
- compressor_str = ""
- for compressor, patterns in desc["compressor"]:
- # if pattern is not empty
- if patterns != ['']:
- compressor_str += "%s = %s\n" % (compressor, ", ".join(patterns))
- # remove extra endline
- desc["compressor"] = compressor_str[:-1]
- # add is_min_version if not present
- if "is_min_version" not in desc:
- desc["is_min_version"] = 0
- # check installsystems min version
- if self.compare_versions(installsystems.version, desc["is_min_version"]) < 0:
- raise ISError("Minimum Installsystems version not satisfied "
- "(%s)" % desc["is_min_version"])
- except Exception as e:
- raise ISError(u"Invalid description", e)
- # try to load changelog
- try:
- img_changelog = self._tarball.get_utf8("changelog")
- desc["changelog"] = Changelog(img_changelog)
- except KeyError:
- desc["changelog"] = Changelog("")
- except Exception as e:
- warn(u"Invalid changelog: %s" % e)
- return desc
-
- def show(self, o_payloads=False, o_files=False, o_changelog=False, o_json=False):
- '''
- Display image content
- '''
- if o_json:
- out(json.dumps(self._metadata))
- else:
- out(u'#light##yellow#Name:#reset# %s' % self.name)
- out(u'#light##yellow#Version:#reset# %s' % self.version)
- out(u'#yellow#Date:#reset# %s' % istools.time_rfc2822(self.date))
- out(u'#yellow#Description:#reset# %s' % self.description)
- out(u'#yellow#Author:#reset# %s' % self.author)
- # field is_build_version is new in version 5. I can be absent.
- try: out(u'#yellow#IS build version:#reset# %s' % self.is_build_version)
- except AttributeError: pass
- # field is_min_version is new in version 5. I can be absent.
- try: out(u'#yellow#IS minimum version:#reset# %s' % self.is_min_version)
- except AttributeError: pass
- out(u'#yellow#Format:#reset# %s' % self.format)
- out(u'#yellow#MD5:#reset# %s' % self.md5)
- out(u'#yellow#Payload count:#reset# %s' % len(self.payload))
- # display payloads
- if o_payloads:
- payloads = self.payload
- for payload_name in payloads:
- payload = payloads[payload_name]
- out(u'#light##yellow#Payload:#reset# %s' % payload_name)
- out(u' #yellow#Date:#reset# %s' % istools.time_rfc2822(payload.mtime))
- out(u' #yellow#Size:#reset# %s' % (istools.human_size(payload.size)))
- out(u' #yellow#MD5:#reset# %s' % payload.md5)
- # display image content
- if o_files:
- out('#light##yellow#Files:#reset#')
- self._tarball.list(True)
- # display changelog
- if o_changelog:
- out('#light##yellow#Changelog:#reset#')
- self.changelog.show(self.version)
-
- def check(self, message="Check MD5"):
- '''
- Check md5 and size of tarballs are correct
- Download tarball from path and compare the loaded md5 and remote
- '''
- arrow(message)
- arrowlevel(1)
- # check image
- fo = PipeFile(self.path, "r")
- fo.consume()
- fo.close()
- if self.size != fo.read_size:
- raise ISError(u"Invalid size of image %s" % self.name)
- if self.md5 != fo.md5:
- raise ISError(u"Invalid MD5 of image %s" % self.name)
- # check payloads
- for pay_name, pay_obj in self.payload.items():
- arrow(pay_name)
- pay_obj.check()
- arrowlevel(-1)
-
- def cat(self, filename):
- '''
- Display filename in the tarball
- '''
- filelist = self._tarball.getnames(glob_pattern=filename, dir=False)
- if len(filelist) == 0:
- warn(u"No file matching %s" % filename)
- for filename in filelist:
- arrow(filename)
- out(self._tarball.get_utf8(filename))
-
- def download(self, directory, force=False, image=True, payload=False):
- '''
- Download image in directory
- Doesn't use in memory image because we cannot access it
- This is done to don't parasitize self._tarfile access to memfile
- '''
- # check if destination exists
- directory = os.path.abspath(directory)
- if image:
- dest = os.path.join(directory, self.filename)
- if not force and os.path.exists(dest):
- raise ISError(u"Image destination already exists: %s" % dest)
- # some display
- arrow(u"Downloading image in %s" % directory)
- debug(u"Downloading %s from %s" % (self.filename, self.path))
- # open source
- fs = PipeFile(self.path, progressbar=True)
- # check if announced file size is good
- if fs.size is not None and self.size != fs.size:
- raise ISError(u"Downloading image %s failed: Invalid announced size" % self.name)
- # open destination
- fd = open(self.filename, "wb")
- fs.consume(fd)
- fs.close()
- fd.close()
- if self.size != fs.consumed_size:
- raise ISError(u"Download image %s failed: Invalid size" % self.name)
- if self.md5 != fs.md5:
- raise ISError(u"Download image %s failed: Invalid MD5" % self.name)
- if payload:
- for payname in self.payload:
- arrow(u"Downloading payload %s in %s" % (payname, directory))
- self.payload[payname].info
- self.payload[payname].download(directory, force=force)
-
- def extract(self, directory, force=False, payload=False, gendescription=False):
- '''
- Extract content of the image inside a repository
- '''
- # check validity of dest
- if os.path.exists(directory):
- if not os.path.isdir(directory):
- raise ISError(u"Destination %s is not a directory" % directory)
- if not force and len(os.listdir(directory)) > 0:
- raise ISError(u"Directory %s is not empty (need force)" % directory)
- else:
- istools.mkdir(directory)
- # extract content
- arrow(u"Extracting image in %s" % directory)
- self._tarball.extractall(directory)
- # generate description file from description.json
- if gendescription:
- arrow(u"Generating description file in %s" % directory)
- with open(os.path.join(directory, "description"), "w") as f:
- f.write((istemplate.description % self._metadata).encode("UTF-8"))
- # launch payload extraction
- if payload:
- for payname in self.payload:
- # here we need to decode payname which is in unicode to escape
- # tarfile to encode filename of file inside tarball inside unicode
- dest = os.path.join(directory, "payload", payname.encode("UTF-8"))
- arrow(u"Extracting payload %s in %s" % (payname, dest))
- self.payload[payname].extract(dest, force=force)
-
- def run(self, parser, extparser, load_modules=True, run_parser=True,
- run_setup=True):
- '''
- Run images scripts
-
- parser is the whole command line parser
- extparser is the parser extensible by parser scripts
-
- if load_modules is true load image modules
- if run_parser is true run parser scripts
- if run_setup is true run setup scripts
- '''
- # register start time
- t0 = time.time()
- # load image modules
- if load_modules:
- self.load_modules(lambda: self.select_scripts("lib"))
- # run parser scripts to extend extparser
- # those scripts should only extend the parser or produce error
- if run_parser:
- self.run_scripts("parser",
- lambda: self.select_scripts("parser"),
- "/",
- {"parser": extparser})
- # call parser (again), with full options
- arrow("Parsing command line")
- # encode command line arguments to utf-8
- args = istools.argv()[1:]
- # catch exception in custom argparse action
- try:
- args = parser.parse_args(args=args)
- except Exception as e:
- raise ISError("Argument parser", e)
- # run setup scripts
- if run_setup:
- self.run_scripts("setup",
- lambda: self.select_scripts("setup"),
- "/",
- {"namespace": args})
- # return the building time
- return int(time.time() - t0)
-
- def select_scripts(self, directory):
- '''
- Generator of tuples (fp,fn,fc) of scripts witch are allocatable
- in a tarball directory
- '''
- for fp in sorted(self._tarball.getnames(re_pattern="%s/.*\.py" % directory)):
- fn = os.path.basename(fp)
- # extract source code
- try:
- fc = self._tarball.get_str(fp)
- except Exception as e:
- raise ISError(u"Unable to extract script %s" % fp, e)
- # yield complet file path, file name and file content
- yield (fp, fn, fc)
-
-
-class Payload(object):
- '''
- Payload class represents a payload object
- '''
- extension = ".isdata"
- legit_attr = ("isdir", "md5", "size", "uid", "gid", "mode", "mtime", "compressor")
-
- def __init__(self, name, filename, path, **kwargs):
- object.__setattr__(self, "name", name)
- object.__setattr__(self, "filename", filename)
- object.__setattr__(self, "path", path)
- # register legit param
- for attr in self.legit_attr:
- setattr(self, attr, None)
- # set all named param
- for kwarg in kwargs:
- # do not use hasattr which use getattr and so call md5 checksum...
- if kwarg in self.legit_attr:
- setattr(self, kwarg, kwargs[kwarg])
-
- def __getattr__(self, name):
- # get all value with an understance as if there is no underscore
- if hasattr(self, u"_%s" % name):
- return getattr(self, u"_%s" % name)
- raise AttributeError
-
- def __setattr__(self, name, value):
- # set all value which exists have no underscore, but where underscore exists
- if name in self.legit_attr:
- object.__setattr__(self, u"_%s" % name, value)
- else:
- object.__setattr__(self, name, value)
-
- def checksummize(self):
- '''
- Fill missing md5/size about payload
- '''
- fileobj = PipeFile(self.path, "r")
- fileobj.consume()
- fileobj.close()
- if self._size is None:
- self._size = fileobj.read_size
- if self._md5 is None:
- self._md5 = fileobj.md5
-
- @property
- def md5(self):
- '''
- Return md5 of payload
- '''
- if self._md5 is None:
- self.checksummize()
- return self._md5
-
- @property
- def size(self):
- '''
- Return size of payload
- '''
- if self._size is None:
- self.checksummize()
- return self._size
-
- @property
- def uid(self):
- '''
- Return uid of owner of orginal payload
- '''
- return self._uid if self._uid is not None else 0
-
- @property
- def gid(self):
- '''
- Return gid of owner of orginal payload
- '''
- return self._gid if self._gid is not None else 0
-
- @property
- def mode(self):
- '''
- Return mode of orginal payload
- '''
- if self._mode is not None:
- return self._mode
- else:
- umask = os.umask(0)
- os.umask(umask)
- return 0666 & ~umask
-
- @property
- def mtime(self):
- '''
- Return last modification time of orginal payload
- '''
- return self._mtime if self._mtime is not None else time.time()
-
- @property
- def compressor(self):
- '''
- Return payload compress format
- '''
- return self._compressor if self._compressor is not None else Image.default_compressor
-
- @property
- def info(self):
- '''
- Return a dict of info about current payload
- Auto calculated info like name and filename must not be here
- '''
- return {"md5": self.md5,
- "size": self.size,
- "isdir": self.isdir,
- "uid": self.uid,
- "gid": self.gid,
- "mode": self.mode,
- "mtime": self.mtime}
-
- def check(self):
- '''
- Check that path correspond to current md5 and size
- '''
- if self._size is None or self._md5 is None:
- debug("Check is called on payload with nothing to check")
- return True
- fileobj = PipeFile(self.path, "r")
- fileobj.consume()
- fileobj.close()
- if self._size != fileobj.read_size:
- raise ISError(u"Invalid size of payload %s" % self.name)
- if self._md5 != fileobj.md5:
- raise ISError(u"Invalid MD5 of payload %s" % self._md5)
-
- def download(self, dest, force=False):
- '''
- Download payload in directory
- '''
- # if dest is a directory try to create file inside
- if os.path.isdir(dest):
- dest = os.path.join(dest, self.filename)
- # try to create leading directories
- elif not os.path.exists(os.path.dirname(dest)):
- istools.mkdir(os.path.dirname(dest))
- # check validity of dest
- if os.path.exists(dest):
- if os.path.isdir(dest):
- raise ISError(u"Destination %s is a directory" % dest)
- if not force:
- raise ISError(u"File %s already exists" % dest)
- # open remote file
- debug(u"Downloading payload %s from %s" % (self.filename, self.path))
- fs = PipeFile(self.path, progressbar=True)
- # check if announced file size is good
- if fs.size is not None and self.size != fs.size:
- raise ISError(u"Downloading payload %s failed: Invalid announced size" %
- self.name)
- fd = open(dest, "wb")
- fs.consume(fd)
- # closing fo
- fs.close()
- fd.close()
- # checking download size
- if self.size != fs.read_size:
- raise ISError(u"Downloading payload %s failed: Invalid size" % self.name)
- if self.md5 != fs.md5:
- raise ISError(u"Downloading payload %s failed: Invalid MD5" % self.name)
-
- def extract(self, dest, force=False, filelist=None):
- '''
- Extract payload into dest
- filelist is a filter of file in tarball
- force will overwrite existing file if exists
- '''
- try:
- if self.isdir:
- self.extract_tar(dest, force=force, filelist=filelist)
- else:
- self.extract_file(dest, force=force)
- except Exception as e:
- raise ISError(u"Extracting payload %s failed" % self.name, e)
-
- def extract_tar(self, dest, force=False, filelist=None):
- '''
- Extract a payload which is a tarball.
- This is used mainly to extract payload from a directory
- '''
- # check validity of dest
- if os.path.exists(dest):
- if not os.path.isdir(dest):
- raise ISError(u"Destination %s is not a directory" % dest)
- if not force and len(os.listdir(dest)) > 0:
- raise ISError(u"Directory %s is not empty (need force)" % dest)
- else:
- istools.mkdir(dest)
- # try to open payload file
- try:
- fo = PipeFile(self.path, progressbar=True)
- except Exception as e:
- raise ISError(u"Unable to open %s" % self.path)
- # check if announced file size is good
- if fo.size is not None and self.size != fo.size:
- raise ISError(u"Invalid announced size on %s" % self.path)
- # get compressor argv (first to escape file creation if not found)
- a_comp = istools.get_compressor_path(self.compressor, compress=False)
- a_tar = ["tar", "--extract", "--numeric-owner", "--ignore-zeros",
- "--preserve-permissions", "--directory", dest]
- # add optionnal selected filename for decompression
- if filelist is not None:
- a_tar += filelist
- p_tar = subprocess.Popen(a_tar, shell=False, close_fds=True,
- stdin=subprocess.PIPE)
- p_comp = subprocess.Popen(a_comp, shell=False, close_fds=True,
- stdin=subprocess.PIPE, stdout=p_tar.stdin)
- # close tar fd
- p_tar.stdin.close()
- # push data into compressor
- fo.consume(p_comp.stdin)
- # close source fd
- fo.close()
- # checking downloaded size
- if self.size != fo.read_size:
- raise ISError("Invalid size")
- # checking downloaded md5
- if self.md5 != fo.md5:
- raise ISError("Invalid MD5")
- # close compressor pipe
- p_comp.stdin.close()
- # check compressor return 0
- if p_comp.wait() != 0:
- raise ISError(u"Compressor %s return is not zero" % a_comp[0])
- # check tar return 0
- if p_tar.wait() != 0:
- raise ISError("Tar return is not zero")
-
- def extract_file(self, dest, force=False):
- '''
- Copy a payload directly to a file
- Check md5 on the fly
- '''
- # if dest is a directory try to create file inside
- if os.path.isdir(dest):
- dest = os.path.join(dest, self.name)
- # try to create leading directories
- elif not os.path.exists(os.path.dirname(dest)):
- istools.mkdir(os.path.dirname(dest))
- # check validity of dest
- if os.path.exists(dest):
- if os.path.isdir(dest):
- raise ISError(u"Destination %s is a directory" % dest)
- if not force:
- raise ISError(u"File %s already exists" % dest)
- # get compressor argv (first to escape file creation if not found)
- a_comp = istools.get_compressor_path(self.compressor, compress=False)
- # try to open payload file (source)
- try:
- f_src = PipeFile(self.path, "r", progressbar=True)
- except Exception as e:
- raise ISError(u"Unable to open payload file %s" % self.path, e)
- # check if announced file size is good
- if f_src.size is not None and self.size != f_src.size:
- raise ISError(u"Invalid announced size on %s" % self.path)
- # opening destination
- try:
- f_dst = open(dest, "wb")
- except Exception as e:
- raise ISError(u"Unable to open destination file %s" % dest, e)
- # run compressor process
- p_comp = subprocess.Popen(a_comp, shell=False, close_fds=True,
- stdin=subprocess.PIPE, stdout=f_dst)
- # close destination file
- f_dst.close()
- # push data into compressor
- f_src.consume(p_comp.stdin)
- # closing source fo
- f_src.close()
- # checking download size
- if self.size != f_src.read_size:
- raise ISError("Invalid size")
- # checking downloaded md5
- if self.md5 != f_src.md5:
- raise ISError("Invalid MD5")
- # close compressor pipe
- p_comp.stdin.close()
- # check compressor return 0
- if p_comp.wait() != 0:
- raise ISError(u"Compressor %s return is not zero" % a_comp[0])
- # settings file orginal rights
- istools.chrights(dest, self.uid, self.gid, self.mode, self.mtime)
-
-
-class Changelog(dict):
- '''
- Object representing a changelog in memory
- '''
- def __init__(self, data):
- self.verbatim = u""
- self.load(data)
-
- def load(self, data):
- '''
- Load a changelog file
- '''
- # ensure data are correct UTF-8
- if isinstance(data, str):
- try:
- data = unicode(data, "UTF-8")
- except UnicodeDecodeError:
- raise ISError("Invalid character encoding in changelog")
- version = None
- lines = data.split("\n")
- for line in lines:
- # ignore empty lines
- if len(line.strip()) == 0:
- continue
- # ignore comments
- if line.lstrip().startswith("#"):
- continue
- # try to match a new version
- m = re.match("\[(\d+(?:\.\d+)*)(?:([~+]).*)?\]", line.lstrip())
- if m is not None:
- version = m.group(1)
- self[version] = []
- continue
- # if line are out of a version => invalid format
- if version is None:
- raise ISError("Invalid format: Line outside version")
- # add line to version changelog
- self[version] += [line]
- # save original
- self.verbatim = data
-
- def show(self, version=None):
- '''
- Show changelog for a given version
- '''
- assert(isinstance(version, unicode))
- # if no version take the hightest
- if version is None:
- version = max(self, istools.strvercmp)
- # display asked version
- if version in self:
- out(os.linesep.join(self[version]))
-
- def show_all(self):
- '''
- Show changelog for all versions
- '''
- for ver in sorted(self, istools.strvercmp, reverse=True):
- out(u'-- #purple#version:#reset# %s' % ver)
- out(os.linesep.join(self[ver]))
diff --git a/installsystems/image/__init__.py b/installsystems/image/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d16ede9da4ba796a4a543022754d38452d99167
--- /dev/null
+++ b/installsystems/image/__init__.py
@@ -0,0 +1,27 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+InstallSystems image package
+'''
+
+from installsystems.image.image import Image
+from installsystems.image.source import SourceImage
+from installsystems.image.package import PackageImage
+from installsystems.image.payload import Payload
+from installsystems.image.changelog import Changelog
diff --git a/installsystems/image/changelog.py b/installsystems/image/changelog.py
new file mode 100644
index 0000000000000000000000000000000000000000..75da7d853adc879ea8d82a692ea09b761cee7179
--- /dev/null
+++ b/installsystems/image/changelog.py
@@ -0,0 +1,88 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Image Changelog module
+'''
+
+from installsystems.exception import ISError
+from installsystems.printer import out
+from installsystems.tools import strvercmp
+from os import linesep
+from re import match
+
+class Changelog(dict):
+ '''
+ Object representing a changelog in memory
+ '''
+ def __init__(self, data):
+ self.verbatim = u""
+ self.load(data)
+
+ def load(self, data):
+ '''
+ Load a changelog file
+ '''
+ # ensure data are correct UTF-8
+ if isinstance(data, str):
+ try:
+ data = unicode(data, "UTF-8")
+ except UnicodeDecodeError:
+ raise ISError("Invalid character encoding in changelog")
+ version = None
+ lines = data.split("\n")
+ for line in lines:
+ # ignore empty lines
+ if len(line.strip()) == 0:
+ continue
+ # ignore comments
+ if line.lstrip().startswith("#"):
+ continue
+ # try to match a new version
+ m = match("\[(\d+(?:\.\d+)*)(?:([~+]).*)?\]", line.lstrip())
+ if m is not None:
+ version = m.group(1)
+ self[version] = []
+ continue
+ # if line are out of a version => invalid format
+ if version is None:
+ raise ISError("Invalid format: Line outside version")
+ # add line to version changelog
+ self[version] += [line]
+ # save original
+ self.verbatim = data
+
+ def show(self, version=None):
+ '''
+ Show changelog for a given version
+ '''
+ assert(isinstance(version, unicode))
+ # if no version take the hightest
+ if version is None:
+ version = max(self, strvercmp)
+ # display asked version
+ if version in self:
+ out(linesep.join(self[version]))
+
+ def show_all(self):
+ '''
+ Show changelog for all versions
+ '''
+ for ver in sorted(self, strvercmp, reverse=True):
+ out(u'-- #purple#version:#reset# %s' % ver)
+ out(linesep.join(self[ver]))
diff --git a/installsystems/image/image.py b/installsystems/image/image.py
new file mode 100644
index 0000000000000000000000000000000000000000..55487d17dd60d0f5c25d213389e2e381e2df2178
--- /dev/null
+++ b/installsystems/image/image.py
@@ -0,0 +1,207 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+
+'''
+Image abstract module
+'''
+
+from imp import new_module
+from installsystems import VERSION
+from installsystems.exception import ISError
+from installsystems.printer import arrow, arrowlevel
+from installsystems.tools import compare_versions
+from locale import getpreferredencoding
+from os import getcwd, chdir
+from os.path import splitext
+from re import match, split
+
+class Image(object):
+ '''
+ Abstract class of images
+ '''
+
+ extension = ".isimage"
+ default_compressor = "gzip"
+
+ def __init__(self):
+ self.modules = {}
+
+ def _load_module(self, name, filename, code=None):
+ '''
+ Create a python module from a string or a filename
+ '''
+ # unicode safety check
+ assert(isinstance(name, unicode))
+ assert(isinstance(filename, unicode))
+ assert(code is None or isinstance(code, str))
+ # load code if not provided
+ if code is None:
+ code = open(filename, "r").read()
+ # create an empty module
+ module = new_module(name)
+ # compile module code
+ try:
+ bytecode = compile(code, filename.encode(getpreferredencoding()), "exec")
+ except Exception as e:
+ raise ISError(u"Unable to compile %s" % filename, e)
+ # load module
+ try:
+ self.secure_exec_bytecode(bytecode, name, module.__dict__)
+ except Exception as e:
+ raise ISError(u"Unable to load %s" % filename, e)
+ return module
+
+ def load_modules(self, select_scripts):
+ '''
+ Load all modules selected by generator select_scripts
+
+ select_scripts is a generator which return tuples (fp, fn, fc) where:
+ fp is unicode file path of the module
+ fn is unicode file name of the module (basename)
+ fc is unicode file content
+ '''
+ arrow(u"Load lib scripts")
+ old_level = arrowlevel(1)
+ self.modules = {}
+ for fp, fn, fc in select_scripts():
+ # check input unicode stuff
+ assert(isinstance(fp, unicode))
+ assert(isinstance(fn, unicode))
+ assert(isinstance(fc, str))
+ arrow(fn)
+ module_name = splitext(fn.split('-', 1)[1])[0]
+ self.modules[module_name] = self._load_module(module_name, fp, fc)
+ arrowlevel(level=old_level)
+
+ def run_scripts(self, scripts_name, select_scripts, exec_directory, global_dict):
+ '''
+ Execute scripts selected by generator select_scripts
+
+ scripts_name is only for display the first arrow before execution
+
+ select_scripts is a generator which return tuples (fp, fn, fc) where:
+ fp is file path of the scripts
+ fn is file name of the scripts (basename)
+ fc is file content
+
+ exec_directory is the cwd of the running script
+
+ global_dict is the globals environment given to scripts
+ '''
+ arrow(u"Run %s scripts" % scripts_name)
+ # backup current directory and loaded modules
+ cwd = getcwd()
+ for fp, fn, fc in select_scripts():
+ # check input unicode stuff
+ assert(isinstance(fp, unicode))
+ assert(isinstance(fn, unicode))
+ assert(isinstance(fc, str))
+ arrow(fn, 1)
+ # backup arrow level
+ old_level = arrowlevel(2)
+ # chdir in exec_directory
+ chdir(exec_directory)
+ # compile source code
+ try:
+ bytecode = compile(fc, fn.encode(getpreferredencoding()), "exec")
+ except Exception as e:
+ raise ISError(u"Unable to compile script %s" % fp, e)
+ # add current image
+ global_dict["image"] = self
+ # execute source code
+ self.secure_exec_bytecode(bytecode, fp, global_dict)
+ arrowlevel(level=old_level)
+ chdir(cwd)
+
+ def secure_exec_bytecode(self, bytecode, path, global_dict):
+ '''
+ Execute bytecode in a clean modules' environment, without altering
+ Installsystems' sys.modules
+ '''
+ import sys
+ import installsystems.printer
+
+ # system modules dict
+ sysmodules = sys.modules
+ sysmodules_backup = sysmodules.copy()
+ # autoload modules
+ global_dict.update(self.modules)
+ try:
+ # replace system modules by image loaded
+ # we must use the same directory and not copy it (probably C reference)
+ sysmodules.clear()
+ # sys must be in sys.module to allow loading of modules
+ sysmodules["sys"] = sys
+ sysmodules.update(self.modules)
+ # we need installsystems.printer to conserve arrow level
+ sysmodules["installsystems.printer"] = installsystems.printer
+ exec bytecode in global_dict
+ except SystemExit as e:
+ # skip a script which call exit(0) or exit()
+ if e.code is None or e.code == 0:
+ return
+ else:
+ raise ISError(u"Script %s exits with status" % path, e)
+ except Exception as e:
+ raise ISError(u"Fail to execute script %s" % path, e)
+ finally:
+ sysmodules.clear()
+ sysmodules.update(sysmodules_backup)
+
+ @staticmethod
+ def check_name(buf):
+ '''
+ Check if @buf is a valid image name
+ '''
+ if match("^[-_\w]+$", buf) is None:
+ raise ISError(u"Invalid image name %s" % buf)
+ # return the image name, because this function is used by ConfigObj
+ # validate to ensure the image name is correct
+ return buf
+
+ @staticmethod
+ def check_version(buf):
+ '''
+ Check if @buf is a valid image version
+ '''
+ if match("^\d+(\.\d+)*(([~+]).*)?$", buf) is None:
+ raise ISError(u"Invalid image version %s" % buf)
+ # return the image version, because this function is used by ConfigObj
+ # validate to ensure the image version is correct
+ return buf
+
+ @staticmethod
+ def check_min_version(version):
+ '''
+ Check InstallSystems min version
+ '''
+ if compare_versions(VERSION, version) < 0:
+ raise ISError("Minimum Installsystems version not satisfied "
+ "(%s)" % version)
+ # return the version, because this function is used by ConfigObj
+ # validate to ensure the version is correct
+ return version
+
+ @staticmethod
+ def compare_versions(v1, v2):
+ '''
+ For backward compatibility, image class offer a method to compare image versions
+ But code is now inside tools
+ '''
+ return compare_versions(v1, v2)
diff --git a/installsystems/image/package.py b/installsystems/image/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..029cb74ef15edc291eba700142acbb3cc7327a52
--- /dev/null
+++ b/installsystems/image/package.py
@@ -0,0 +1,386 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Package Image module
+'''
+
+from cStringIO import StringIO
+from difflib import unified_diff
+from installsystems import VERSION
+from installsystems.exception import ISError
+from installsystems.image.changelog import Changelog
+from installsystems.image.image import Image
+from installsystems.image.payload import Payload
+from installsystems.image.source import SourceImage, DESCRIPTION_TPL
+from installsystems.image.tarball import Tarball
+from installsystems.printer import warn, arrow, arrowlevel, out, debug
+from installsystems.tools import mkdir, abspath, time_rfc2822, human_size, argv, PipeFile
+from json import loads, dumps
+from math import floor
+from os import listdir
+from os.path import join, basename, exists, isdir, dirname, abspath
+from time import time
+
+class PackageImage(Image):
+ '''
+ Packaged image manipulation class
+ '''
+
+ @classmethod
+ def diff(cls, pkg1, pkg2):
+ '''
+ Diff two packaged images
+ '''
+ arrow(u"Difference from images #y#%s v%s#R# to #r#%s v%s#R#:" % (pkg1.name,
+ pkg1.version,
+ pkg2.name,
+ pkg2.version))
+ # extract images for diff scripts files
+ fromfiles = set(pkg1._tarball.getnames(re_pattern="(parser|setup)/.*"))
+ tofiles = set(pkg2._tarball.getnames(re_pattern="(parser|setup)/.*"))
+ for f in fromfiles | tofiles:
+ # preparing from info
+ if f in fromfiles:
+ fromfile = join(pkg1.filename, f)
+ fromdata = pkg1._tarball.extractfile(f).readlines()
+ else:
+ fromfile = "/dev/null"
+ fromdata = ""
+ # preparing to info
+ if f in tofiles:
+ tofile = join(pkg2.filename, f)
+ todata = pkg2._tarball.extractfile(f).readlines()
+ else:
+ tofile = "/dev/null"
+ todata = ""
+ # generate diff
+ for line in unified_diff(fromdata,
+ todata,
+ fromfile=fromfile,
+ tofile=tofile):
+ # coloring diff
+ if line.startswith("+"):
+ out(u"#g#%s#R#" % line, endl="")
+ elif line.startswith("-"):
+ out(u"#r#%s#R#" % line, endl="")
+ elif line.startswith("@@"):
+ out(u"#c#%s#R#" % line, endl="")
+ else:
+ out(line, endl="")
+
+ def __init__(self, path, fileobj=None, md5name=False):
+ '''
+ Initialize a package image
+
+ fileobj must be a seekable fileobj
+ '''
+ Image.__init__(self)
+ self.path = abspath(path)
+ self.base_path = dirname(self.path)
+ # tarball are named by md5 and not by real name
+ self.md5name = md5name
+ try:
+ if fileobj is None:
+ fileobj = PipeFile(self.path, "r")
+ else:
+ fileobj = PipeFile(mode="r", fileobj=fileobj)
+ memfile = StringIO()
+ fileobj.consume(memfile)
+ # close source
+ fileobj.close()
+ # get downloaded size and md5
+ self.size = fileobj.read_size
+ self.md5 = fileobj.md5
+ memfile.seek(0)
+ self._tarball = Tarball.open(fileobj=memfile, mode='r:gz')
+ except Exception as e:
+ raise ISError(u"Unable to open image %s" % path, e)
+ self._metadata = self.read_metadata()
+ # print info
+ arrow(u"Image %s v%s loaded" % (self.name, self.version))
+ arrow(u"Author: %s" % self.author, 1)
+ arrow(u"Date: %s" % time_rfc2822(self.date), 1)
+ # build payloads info
+ self.payload = {}
+ for pname, pval in self._metadata["payload"].items():
+ pfilename = u"%s-%s%s" % (self.filename[:-len(Image.extension)],
+ pname, Payload.extension)
+ if self.md5name:
+ ppath = join(self.base_path,
+ self._metadata["payload"][pname]["md5"])
+ else:
+ ppath = join(self.base_path, pfilename)
+ self.payload[pname] = Payload(pname, pfilename, ppath, **pval)
+
+ def __getattr__(self, name):
+ '''
+ Give direct access to description field
+ '''
+ if name in self._metadata:
+ return self._metadata[name]
+ raise AttributeError
+
+ @property
+ def filename(self):
+ '''
+ Return image filename
+ '''
+ return u"%s-%s%s" % (self.name, self.version, self.extension)
+
+ def read_metadata(self):
+ '''
+ Parse tarball and return metadata dict
+ '''
+ desc = {}
+ # check format
+ img_format = self._tarball.get_utf8("format")
+ try:
+ if float(img_format) >= floor(float(SourceImage.format)) + 1.0:
+ raise Exception()
+ except:
+ raise ISError(u"Invalid image format %s" % img_format)
+ desc["format"] = img_format
+ # check description
+ try:
+ img_desc = self._tarball.get_utf8("description.json")
+ desc.update(loads(img_desc))
+ self.check_name(desc["name"])
+ self.check_version(desc["version"])
+ if "compressor" not in desc:
+ desc["compressor"] = "gzip = *"
+ else:
+ # format compressor pattern string
+ compressor_str = ""
+ for compressor, patterns in desc["compressor"]:
+ # if pattern is not empty
+ if patterns != ['']:
+ compressor_str += "%s = %s\n" % (compressor, ", ".join(patterns))
+ # remove extra endline
+ desc["compressor"] = compressor_str[:-1]
+ # add is_min_version if not present
+ if "is_min_version" not in desc:
+ desc["is_min_version"] = 0
+ # check installsystems min version
+ if self.compare_versions(VERSION, desc["is_min_version"]) < 0:
+ raise ISError("Minimum Installsystems version not satisfied "
+ "(%s)" % desc["is_min_version"])
+ except Exception as e:
+ raise ISError(u"Invalid description", e)
+ # try to load changelog
+ try:
+ img_changelog = self._tarball.get_utf8("changelog")
+ desc["changelog"] = Changelog(img_changelog)
+ except KeyError:
+ desc["changelog"] = Changelog("")
+ except Exception as e:
+ warn(u"Invalid changelog: %s" % e)
+ return desc
+
+ def show(self, o_payloads=False, o_files=False, o_changelog=False, o_json=False):
+ '''
+ Display image content
+ '''
+ if o_json:
+ out(dumps(self._metadata))
+ else:
+ out(u'#light##yellow#Name:#reset# %s' % self.name)
+ out(u'#light##yellow#Version:#reset# %s' % self.version)
+ out(u'#yellow#Date:#reset# %s' % time_rfc2822(self.date))
+ out(u'#yellow#Description:#reset# %s' % self.description)
+ out(u'#yellow#Author:#reset# %s' % self.author)
+ # field is_build_version is new in version 5. I can be absent.
+ try: out(u'#yellow#IS build version:#reset# %s' % self.is_build_version)
+ except AttributeError: pass
+ # field is_min_version is new in version 5. I can be absent.
+ try: out(u'#yellow#IS minimum version:#reset# %s' % self.is_min_version)
+ except AttributeError: pass
+ out(u'#yellow#Format:#reset# %s' % self.format)
+ out(u'#yellow#MD5:#reset# %s' % self.md5)
+ out(u'#yellow#Payload count:#reset# %s' % len(self.payload))
+ # display payloads
+ if o_payloads:
+ payloads = self.payload
+ for payload_name in payloads:
+ payload = payloads[payload_name]
+ out(u'#light##yellow#Payload:#reset# %s' % payload_name)
+ out(u' #yellow#Date:#reset# %s' % time_rfc2822(payload.mtime))
+ out(u' #yellow#Size:#reset# %s' % (human_size(payload.size)))
+ out(u' #yellow#MD5:#reset# %s' % payload.md5)
+ # display image content
+ if o_files:
+ out('#light##yellow#Files:#reset#')
+ self._tarball.list(True)
+ # display changelog
+ if o_changelog:
+ out('#light##yellow#Changelog:#reset#')
+ self.changelog.show(self.version)
+
+ def check(self, message="Check MD5"):
+ '''
+ Check md5 and size of tarballs are correct
+ Download tarball from path and compare the loaded md5 and remote
+ '''
+ arrow(message)
+ arrowlevel(1)
+ # check image
+ fo = PipeFile(self.path, "r")
+ fo.consume()
+ fo.close()
+ if self.size != fo.read_size:
+ raise ISError(u"Invalid size of image %s" % self.name)
+ if self.md5 != fo.md5:
+ raise ISError(u"Invalid MD5 of image %s" % self.name)
+ # check payloads
+ for pay_name, pay_obj in self.payload.items():
+ arrow(pay_name)
+ pay_obj.check()
+ arrowlevel(-1)
+
+ def cat(self, filename):
+ '''
+ Display filename in the tarball
+ '''
+ filelist = self._tarball.getnames(glob_pattern=filename, dir=False)
+ if len(filelist) == 0:
+ warn(u"No file matching %s" % filename)
+ for filename in filelist:
+ arrow(filename)
+ out(self._tarball.get_utf8(filename))
+
+ def download(self, directory, force=False, image=True, payload=False):
+ '''
+ Download image in directory
+ Doesn't use in memory image because we cannot access it
+ This is done to don't parasitize self._tarfile access to memfile
+ '''
+ # check if destination exists
+ directory = abspath(directory)
+ if image:
+ dest = join(directory, self.filename)
+ if not force and exists(dest):
+ raise ISError(u"Image destination already exists: %s" % dest)
+ # some display
+ arrow(u"Downloading image in %s" % directory)
+ debug(u"Downloading %s from %s" % (self.filename, self.path))
+ # open source
+ fs = PipeFile(self.path, progressbar=True)
+ # check if announced file size is good
+ if fs.size is not None and self.size != fs.size:
+ raise ISError(u"Downloading image %s failed: Invalid announced size" % self.name)
+ # open destination
+ fd = open(self.filename, "wb")
+ fs.consume(fd)
+ fs.close()
+ fd.close()
+ if self.size != fs.consumed_size:
+ raise ISError(u"Download image %s failed: Invalid size" % self.name)
+ if self.md5 != fs.md5:
+ raise ISError(u"Download image %s failed: Invalid MD5" % self.name)
+ if payload:
+ for payname in self.payload:
+ arrow(u"Downloading payload %s in %s" % (payname, directory))
+ self.payload[payname].info
+ self.payload[payname].download(directory, force=force)
+
+ def extract(self, directory, force=False, payload=False, gendescription=False):
+ '''
+ Extract content of the image inside a repository
+ '''
+ # check validity of dest
+ if exists(directory):
+ if not isdir(directory):
+ raise ISError(u"Destination %s is not a directory" % directory)
+ if not force and len(listdir(directory)) > 0:
+ raise ISError(u"Directory %s is not empty (need force)" % directory)
+ else:
+ mkdir(directory)
+ # extract content
+ arrow(u"Extracting image in %s" % directory)
+ self._tarball.extractall(directory)
+ # generate description file from description.json
+ if gendescription:
+ arrow(u"Generating description file in %s" % directory)
+ with open(join(directory, "description"), "w") as f:
+ f.write((DESCRIPTION_TPL % self._metadata).encode("UTF-8"))
+ # launch payload extraction
+ if payload:
+ for payname in self.payload:
+ # here we need to decode payname which is in unicode to escape
+ # tarfile to encode filename of file inside tarball inside unicode
+ dest = join(directory, "payload", payname.encode("UTF-8"))
+ arrow(u"Extracting payload %s in %s" % (payname, dest))
+ self.payload[payname].extract(dest, force=force)
+
+ def run(self, parser, extparser, load_modules=True, run_parser=True,
+ run_setup=True):
+ '''
+ Run images scripts
+
+ parser is the whole command line parser
+ extparser is the parser extensible by parser scripts
+
+ if load_modules is true load image modules
+ if run_parser is true run parser scripts
+ if run_setup is true run setup scripts
+ '''
+ # register start time
+ t0 = time()
+ # load image modules
+ if load_modules:
+ self.load_modules(lambda: self.select_scripts("lib"))
+ # run parser scripts to extend extparser
+ # those scripts should only extend the parser or produce error
+ if run_parser:
+ self.run_scripts("parser",
+ lambda: self.select_scripts("parser"),
+ "/",
+ {"parser": extparser})
+ # call parser (again), with full options
+ arrow("Parsing command line")
+ # encode command line arguments to utf-8
+ args = argv()[1:]
+ # catch exception in custom argparse action
+ try:
+ args = parser.parse_args(args=args)
+ except Exception as e:
+ raise ISError("Argument parser", e)
+ # run setup scripts
+ if run_setup:
+ self.run_scripts("setup",
+ lambda: self.select_scripts("setup"),
+ "/",
+ {"namespace": args})
+ # return the building time
+ return int(time() - t0)
+
+ def select_scripts(self, directory):
+ '''
+ Generator of tuples (fp,fn,fc) of scripts witch are allocatable
+ in a tarball directory
+ '''
+ for fp in sorted(self._tarball.getnames(re_pattern="%s/.*\.py" % directory)):
+ fn = basename(fp)
+ # extract source code
+ try:
+ fc = self._tarball.get_str(fp)
+ except Exception as e:
+ raise ISError(u"Unable to extract script %s" % fp, e)
+ # yield complet file path, file name and file content
+ yield (fp, fn, fc)
+
diff --git a/installsystems/image/payload.py b/installsystems/image/payload.py
new file mode 100644
index 0000000000000000000000000000000000000000..8758f85b3a0b7398947f107630cd2535410de4f3
--- /dev/null
+++ b/installsystems/image/payload.py
@@ -0,0 +1,319 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+from installsystems.exception import ISError
+from installsystems.image.image import Image
+from installsystems.printer import debug
+from installsystems.tools import PipeFile, mkdir
+from installsystems.tools import chrights, get_compressor_path
+from os import umask, listdir
+from os.path import join, isdir, exists, dirname
+from subprocess import Popen, PIPE
+from time import time
+
+'''
+Image payload module
+'''
+
+class Payload(object):
+ '''
+ Payload class represents a payload object
+ '''
+ extension = ".isdata"
+ legit_attr = ("isdir", "md5", "size", "uid", "gid", "mode", "mtime", "compressor")
+
+ def __init__(self, name, filename, path, **kwargs):
+ object.__setattr__(self, "name", name)
+ object.__setattr__(self, "filename", filename)
+ object.__setattr__(self, "path", path)
+ # register legit param
+ for attr in self.legit_attr:
+ setattr(self, attr, None)
+ # set all named param
+ for kwarg in kwargs:
+ # do not use hasattr which use getattr and so call md5 checksum...
+ if kwarg in self.legit_attr:
+ setattr(self, kwarg, kwargs[kwarg])
+
+ def __getattr__(self, name):
+ # get all value with an understance as if there is no underscore
+ if hasattr(self, u"_%s" % name):
+ return getattr(self, u"_%s" % name)
+ raise AttributeError
+
+ def __setattr__(self, name, value):
+ # set all value which exists have no underscore, but where underscore exists
+ if name in self.legit_attr:
+ object.__setattr__(self, u"_%s" % name, value)
+ else:
+ object.__setattr__(self, name, value)
+
+ def checksummize(self):
+ '''
+ Fill missing md5/size about payload
+ '''
+ fileobj = PipeFile(self.path, "r")
+ fileobj.consume()
+ fileobj.close()
+ if self._size is None:
+ self._size = fileobj.read_size
+ if self._md5 is None:
+ self._md5 = fileobj.md5
+
+ @property
+ def md5(self):
+ '''
+ Return md5 of payload
+ '''
+ if self._md5 is None:
+ self.checksummize()
+ return self._md5
+
+ @property
+ def size(self):
+ '''
+ Return size of payload
+ '''
+ if self._size is None:
+ self.checksummize()
+ return self._size
+
+ @property
+ def uid(self):
+ '''
+ Return uid of owner of orginal payload
+ '''
+ return self._uid if self._uid is not None else 0
+
+ @property
+ def gid(self):
+ '''
+ Return gid of owner of orginal payload
+ '''
+ return self._gid if self._gid is not None else 0
+
+ @property
+ def mode(self):
+ '''
+ Return mode of orginal payload
+ '''
+ if self._mode is not None:
+ return self._mode
+ else:
+ oldmask = umask(0)
+ umask(oldmask)
+ return 0666 & ~oldmask
+
+ @property
+ def mtime(self):
+ '''
+ Return last modification time of orginal payload
+ '''
+ return self._mtime if self._mtime is not None else time()
+
+ @property
+ def compressor(self):
+ '''
+ Return payload compress format
+ '''
+ return self._compressor if self._compressor is not None else Image.default_compressor
+
+ @property
+ def info(self):
+ '''
+ Return a dict of info about current payload
+ Auto calculated info like name and filename must not be here
+ '''
+ return {"md5": self.md5,
+ "size": self.size,
+ "isdir": self.isdir,
+ "uid": self.uid,
+ "gid": self.gid,
+ "mode": self.mode,
+ "mtime": self.mtime}
+
+ def check(self):
+ '''
+ Check that path correspond to current md5 and size
+ '''
+ if self._size is None or self._md5 is None:
+ debug("Check is called on payload with nothing to check")
+ return True
+ fileobj = PipeFile(self.path, "r")
+ fileobj.consume()
+ fileobj.close()
+ if self._size != fileobj.read_size:
+ raise ISError(u"Invalid size of payload %s" % self.name)
+ if self._md5 != fileobj.md5:
+ raise ISError(u"Invalid MD5 of payload %s" % self._md5)
+
+ def download(self, dest, force=False):
+ '''
+ Download payload in directory
+ '''
+ # if dest is a directory try to create file inside
+ if isdir(dest):
+ dest = join(dest, self.filename)
+ # try to create leading directories
+ elif not exists(dirname(dest)):
+ mkdir(dirname(dest))
+ # check validity of dest
+ if exists(dest):
+ if isdir(dest):
+ raise ISError(u"Destination %s is a directory" % dest)
+ if not force:
+ raise ISError(u"File %s already exists" % dest)
+ # open remote file
+ debug(u"Downloading payload %s from %s" % (self.filename, self.path))
+ fs = PipeFile(self.path, progressbar=True)
+ # check if announced file size is good
+ if fs.size is not None and self.size != fs.size:
+ raise ISError(u"Downloading payload %s failed: Invalid announced size" %
+ self.name)
+ fd = open(dest, "wb")
+ fs.consume(fd)
+ # closing fo
+ fs.close()
+ fd.close()
+ # checking download size
+ if self.size != fs.read_size:
+ raise ISError(u"Downloading payload %s failed: Invalid size" % self.name)
+ if self.md5 != fs.md5:
+ raise ISError(u"Downloading payload %s failed: Invalid MD5" % self.name)
+
+ def extract(self, dest, force=False, filelist=None):
+ '''
+ Extract payload into dest
+ filelist is a filter of file in tarball
+ force will overwrite existing file if exists
+ '''
+ try:
+ if self.isdir:
+ self.extract_tar(dest, force=force, filelist=filelist)
+ else:
+ self.extract_file(dest, force=force)
+ except Exception as e:
+ raise ISError(u"Extracting payload %s failed" % self.name, e)
+
+ def extract_tar(self, dest, force=False, filelist=None):
+ '''
+ Extract a payload which is a tarball.
+ This is used mainly to extract payload from a directory
+ '''
+ # check validity of dest
+ if exists(dest):
+ if not isdir(dest):
+ raise ISError(u"Destination %s is not a directory" % dest)
+ if not force and len(listdir(dest)) > 0:
+ raise ISError(u"Directory %s is not empty (need force)" % dest)
+ else:
+ mkdir(dest)
+ # try to open payload file
+ try:
+ fo = PipeFile(self.path, progressbar=True)
+ except Exception as e:
+ raise ISError(u"Unable to open %s" % self.path)
+ # check if announced file size is good
+ if fo.size is not None and self.size != fo.size:
+ raise ISError(u"Invalid announced size on %s" % self.path)
+ # get compressor argv (first to escape file creation if not found)
+ a_comp = get_compressor_path(self.compressor, compress=False)
+ a_tar = ["tar", "--extract", "--numeric-owner", "--ignore-zeros",
+ "--preserve-permissions", "--directory", dest]
+ # add optionnal selected filename for decompression
+ if filelist is not None:
+ a_tar += filelist
+ p_tar = Popen(a_tar, shell=False, close_fds=True,
+ stdin=PIPE)
+ p_comp = Popen(a_comp, shell=False, close_fds=True,
+ stdin=PIPE, stdout=p_tar.stdin)
+ # close tar fd
+ p_tar.stdin.close()
+ # push data into compressor
+ fo.consume(p_comp.stdin)
+ # close source fd
+ fo.close()
+ # checking downloaded size
+ if self.size != fo.read_size:
+ raise ISError("Invalid size")
+ # checking downloaded md5
+ if self.md5 != fo.md5:
+ raise ISError("Invalid MD5")
+ # close compressor pipe
+ p_comp.stdin.close()
+ # check compressor return 0
+ if p_comp.wait() != 0:
+ raise ISError(u"Compressor %s return is not zero" % a_comp[0])
+ # check tar return 0
+ if p_tar.wait() != 0:
+ raise ISError("Tar return is not zero")
+
+ def extract_file(self, dest, force=False):
+ '''
+ Copy a payload directly to a file
+ Check md5 on the fly
+ '''
+ # if dest is a directory try to create file inside
+ if isdir(dest):
+ dest = join(dest, self.name)
+ # try to create leading directories
+ elif not exists(dirname(dest)):
+ mkdir(dirname(dest))
+ # check validity of dest
+ if exists(dest):
+ if isdir(dest):
+ raise ISError(u"Destination %s is a directory" % dest)
+ if not force:
+ raise ISError(u"File %s already exists" % dest)
+ # get compressor argv (first to escape file creation if not found)
+ a_comp = get_compressor_path(self.compressor, compress=False)
+ # try to open payload file (source)
+ try:
+ f_src = PipeFile(self.path, "r", progressbar=True)
+ except Exception as e:
+ raise ISError(u"Unable to open payload file %s" % self.path, e)
+ # check if announced file size is good
+ if f_src.size is not None and self.size != f_src.size:
+ raise ISError(u"Invalid announced size on %s" % self.path)
+ # opening destination
+ try:
+ f_dst = open(dest, "wb")
+ except Exception as e:
+ raise ISError(u"Unable to open destination file %s" % dest, e)
+ # run compressor process
+ p_comp = Popen(a_comp, shell=False, close_fds=True,
+ stdin=PIPE, stdout=f_dst)
+ # close destination file
+ f_dst.close()
+ # push data into compressor
+ f_src.consume(p_comp.stdin)
+ # closing source fo
+ f_src.close()
+ # checking download size
+ if self.size != f_src.read_size:
+ raise ISError("Invalid size")
+ # checking downloaded md5
+ if self.md5 != f_src.md5:
+ raise ISError("Invalid MD5")
+ # close compressor pipe
+ p_comp.stdin.close()
+ # check compressor return 0
+ if p_comp.wait() != 0:
+ raise ISError(u"Compressor %s return is not zero" % a_comp[0])
+ # settings file orginal rights
+ chrights(dest, self.uid, self.gid, self.mode, self.mtime)
diff --git a/installsystems/image/source.py b/installsystems/image/source.py
new file mode 100644
index 0000000000000000000000000000000000000000..943154e618ca46383e3abf35d692d099fc0ef71e
--- /dev/null
+++ b/installsystems/image/source.py
@@ -0,0 +1,662 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+
+'''
+Source image module
+'''
+
+
+from configobj import ConfigObj, flatten_errors
+from installsystems import VERSION
+from installsystems.exception import ISError, InvalidSourceImage
+from installsystems.image.changelog import Changelog
+from installsystems.image.image import Image
+from installsystems.image.payload import Payload
+from installsystems.image.tarball import Tarball, REGTYPE
+from installsystems.printer import arrow, arrowlevel, warn, error
+from installsystems.tools import PipeFile, isfile, get_compressor_path, chrights
+from json import dumps
+from locale import getpreferredencoding
+from os import stat, listdir, mkdir, umask, access, unlink, symlink, R_OK, X_OK
+from os.path import join, exists, isdir, abspath, lexists, basename
+from re import match
+from stat import S_ISDIR, S_IMODE
+from subprocess import Popen, PIPE
+from time import time
+from validate import Validator
+
+# use module prefix because function is named open
+import codecs
+# use module prefix because function is named filter
+import fnmatch
+
+
+class SourceImage(Image):
+ '''
+ Image source manipulation class
+ '''
+
+ # format should be a float X.Y but for compatibility reason it's a string
+ # before version 6, it's strict string comparaison
+ format = "2.0"
+
+
+ @classmethod
+ def create(cls, path, force=False):
+ '''
+ Create an empty source image
+ '''
+ # check local repository
+ if not isfile(path):
+ raise NotImplementedError("SourceImage must be local")
+ # main path
+ build_path = join(path, "build")
+ parser_path = join(path, "parser")
+ setup_path = join(path, "setup")
+ payload_path = join(path, "payload")
+ lib_path = join(path, "lib")
+ # create base directories
+ arrow("Creating base directories")
+ try:
+ for d in (path, build_path, parser_path, setup_path, payload_path,
+ lib_path):
+ if not exists(d) or not isdir(d):
+ mkdir(d)
+ except Exception as e:
+ raise ISError(u"Unable to create directory: %s" % d, e)
+ # create example files
+ arrow("Creating examples")
+ arrowlevel(1)
+ # create dict of file to create
+ examples = {}
+ # create description example from template
+ examples["description"] = {
+ "path": "description",
+ "content": DESCRIPTION_TPL % {
+ "name": "",
+ "version": "1",
+ "description": "",
+ "author": "",
+ "is_min_version": VERSION,
+ "compressor": "gzip = *\nnone = *.gz, *.bz2, *.xz"}
+ }
+ # create changelog example from template
+ examples["changelog"] = {"path": "changelog", "content": CHANGELOG_TPL}
+ # create build example from template
+ examples["build"] = {"path": "build/01-build.py", "content": BUILD_TPL}
+ # create parser example from template
+ examples["parser"] = {"path": "parser/01-parser.py", "content": PARSER_TPL}
+ # create setup example from template
+ examples["setup"] = {"path": "setup/01-setup.py", "content": SETUP_TPL}
+ for name in examples:
+ try:
+ arrow(u"Creating %s example" % name)
+ expath = join(path, examples[name]["path"])
+ if not force and exists(expath):
+ warn(u"%s already exists. Skipping!" % expath)
+ continue
+ open(expath, "w").write(examples[name]["content"])
+ except Exception as e:
+ raise ISError(u"Unable to create example file", e)
+ try:
+ # setting executable rights on files in setup and parser
+ arrow("Setting executable rights on scripts")
+ oldmask = umask(0)
+ umask(oldmask)
+ for dpath in (build_path, parser_path, setup_path):
+ for f in listdir(dpath):
+ chrights(join(dpath, f), mode=0777 & ~oldmask)
+ except Exception as e:
+ raise ISError(u"Unable to set rights", e)
+ arrowlevel(-1)
+
+ def __init__(self, path):
+ '''
+ Initialize source image
+ '''
+ Image.__init__(self)
+ # check local repository
+ if not isfile(path):
+ raise NotImplementedError("SourceImage must be local")
+ self.base_path = abspath(path)
+ for pathtype in ("build", "parser", "setup", "payload", "lib"):
+ setattr(self, u"%s_path" % pathtype, join(self.base_path, pathtype))
+ self.check_source_image()
+ self.description = self.parse_description()
+ self.changelog = self.parse_changelog()
+ self.modules = {}
+ # script tarball path
+ self.image_name = u"%s-%s%s" % (self.description["name"],
+ self.description["version"],
+ self.extension)
+
+ def check_source_image(self):
+ '''
+ Check if we are a valid SourceImage directories
+ A vaild SourceImage contains at least a description and a setup directory.
+ Payload directory is mandatory is build scripts are present
+ '''
+ # Ensure setup_path exists
+ if not exists(self.setup_path):
+ raise InvalidSourceImage(u"setup directory is missing.")
+ # Ensure description exists
+ if not exists(join(self.base_path, u"description")):
+ raise InvalidSourceImage(u"no description file.")
+ # Ensure payload directory exists if there is build directory
+ if not exists(self.payload_path) and exists(self.build_path):
+ raise InvalidSourceImage(u"payload directory is mandatory with a build directory.")
+ # Ensure directories are directories and accessible
+ for d in (self.base_path, self.build_path, self.parser_path,
+ self.setup_path, self.payload_path, self.lib_path):
+ if exists(d):
+ if not isdir(d):
+ raise InvalidSourceImage(u"%s is not a directory." % d)
+ if not access(d, R_OK|X_OK):
+ raise InvalidSourceImage(u"unable to access to %s." % d)
+
+ def build(self, force=False, force_payload=False, check=True, script=True):
+ '''
+ Create packaged image
+ '''
+ # check if free to create script tarball
+ if exists(self.image_name) and force == False:
+ raise ISError("Tarball already exists. Remove it before")
+ # register start time
+ t0 = time()
+ # check python scripts
+ if check:
+ for d in (self.build_path, self.parser_path, self.setup_path,
+ self.lib_path):
+ if exists(d):
+ self.check_scripts(d)
+ # load modules
+ self.load_modules(lambda: self.select_scripts(self.lib_path))
+ # remove list
+ rl = set()
+ # run build script
+ if script and exists(self.build_path):
+ rl |= set(self.run_build())
+ if force_payload:
+ rl |= set(self.select_payloads())
+ # remove payloads
+ self.remove_payloads(rl)
+ # create payload files
+ self.create_payloads()
+ # generate a json description
+ jdesc = self.generate_json_description()
+ # creating scripts tarball
+ self.create_image(jdesc)
+ # compute building time
+ return int(time() - t0)
+
+ def create_image(self, jdescription):
+ '''
+ Create a script tarball in current directory
+ '''
+ # create tarball
+ arrow("Creating image tarball")
+ arrowlevel(1)
+ arrow(u"Name %s" % self.image_name)
+ try:
+ try:
+ tarball = Tarball.open(self.image_name, mode="w:gz", dereference=True)
+ except Exception as e:
+ raise ISError(u"Unable to create tarball %s" % self.image_name, e)
+ # add description.json
+ arrow("Add description.json")
+ tarball.add_str("description.json", jdescription, REGTYPE, 0644)
+ # add changelog
+ if self.changelog is not None:
+ arrow("Add changelog")
+ tarball.add_str("changelog", self.changelog.verbatim, REGTYPE, 0644)
+ # add format
+ arrow("Add format")
+ tarball.add_str("format", self.format, REGTYPE, 0644)
+ # add setup scripts
+ self.add_scripts(tarball, self.setup_path)
+ # add optional scripts
+ for d in (self.build_path, self.parser_path, self.lib_path):
+ if exists(d):
+ self.add_scripts(tarball, d)
+ # closing tarball file
+ tarball.close()
+ except (SystemExit, KeyboardInterrupt):
+ if exists(self.image_name):
+ unlink(self.image_name)
+ arrowlevel(-1)
+
+ def describe_payload(self, name):
+ '''
+ Return information about a payload
+ '''
+ ans = {}
+ ans["source_path"] = join(self.payload_path, name)
+ ans["dest_path"] = u"%s-%s%s" % (self.description["name"],
+ name,
+ Payload.extension)
+ ans["link_path"] = u"%s-%s-%s%s" % (self.description["name"],
+ self.description["version"],
+ name,
+ Payload.extension)
+ source_stat = stat(ans["source_path"])
+ ans["isdir"] = S_ISDIR(source_stat.st_mode)
+ ans["uid"] = source_stat.st_uid
+ ans["gid"] = source_stat.st_gid
+ ans["mode"] = S_IMODE(source_stat.st_mode)
+ ans["mtime"] = source_stat.st_mtime
+ ans["compressor"] = self.compressor(name)
+ return ans
+
+ def select_payloads(self):
+ '''
+ Return a generator on image payloads
+ '''
+ if not isdir(self.payload_path):
+ raise StopIteration()
+ for payname in listdir(self.payload_path):
+ yield payname
+
+ def remove_payloads(self, paylist):
+ '''
+ Remove payload list if exists
+ '''
+ arrow("Removing payloads")
+ for pay in paylist:
+ arrow(pay, 1)
+ desc = self.describe_payload(pay)
+ for f in (desc["dest_path"], desc["link_path"]):
+ if lexists(f):
+ unlink(f)
+
+ def create_payloads(self):
+ '''
+ Create all missing data payloads in current directory
+ Doesn't compute md5 during creation because tarball can
+ be created manually
+ Also create symlink to versionned payload
+ '''
+ arrow("Creating payloads")
+ for payload_name in self.select_payloads():
+ paydesc = self.describe_payload(payload_name)
+ if exists(paydesc["link_path"]):
+ continue
+ arrow(payload_name, 1)
+ try:
+ # create non versionned payload file
+ if not exists(paydesc["dest_path"]):
+ if paydesc["isdir"]:
+ self.create_payload_tarball(paydesc["dest_path"],
+ paydesc["source_path"],
+ paydesc["compressor"])
+ else:
+ self.create_payload_file(paydesc["dest_path"],
+ paydesc["source_path"],
+ paydesc["compressor"])
+ # create versionned payload file
+ if lexists(paydesc["link_path"]):
+ unlink(paydesc["link_path"])
+ symlink(paydesc["dest_path"], paydesc["link_path"])
+ except Exception as e:
+ raise ISError(u"Unable to create payload %s" % payload_name, e)
+
+ def create_payload_tarball(self, tar_path, data_path, compressor):
+ '''
+ Create a payload tarball
+ '''
+ try:
+ # get compressor argv (first to escape file creation if not found)
+ a_comp = get_compressor_path(compressor, compress=True)
+ a_tar = ["tar", "--create", "--numeric-owner", "--directory",
+ data_path, "."]
+ # create destination file
+ f_dst = PipeFile(tar_path, "w", progressbar=True)
+ # run tar process
+ p_tar = Popen(a_tar, shell=False, close_fds=True,
+ stdout=PIPE)
+ # run compressor process
+ p_comp = Popen(a_comp, shell=False, close_fds=True,
+ stdin=p_tar.stdout, stdout=PIPE)
+ # write data from compressor to tar_path
+ f_dst.consume(p_comp.stdout)
+ # close all fd
+ p_tar.stdout.close()
+ p_comp.stdout.close()
+ f_dst.close()
+ # check tar return 0
+ if p_tar.wait() != 0:
+ raise ISError("Tar return is not zero")
+ # check compressor return 0
+ if p_comp.wait() != 0:
+ raise ISError(u"Compressor %s return is not zero" % a_comp[0])
+ except (SystemExit, KeyboardInterrupt):
+ if exists(tar_path):
+ unlink(tar_path)
+ raise
+
+ def create_payload_file(self, dest, source, compressor):
+ '''
+ Create a payload file
+ '''
+ try:
+ # get compressor argv (first to escape file creation if not found)
+ a_comp = get_compressor_path(compressor, compress=True)
+ # open source file
+ f_src = open(source, "r")
+ # create destination file
+ f_dst = PipeFile(dest, "w", progressbar=True)
+ # run compressor
+ p_comp = Popen(a_comp, shell=False, close_fds=True,
+ stdin=f_src, stdout=PIPE)
+ # close source file fd
+ f_src.close()
+ # write data from compressor to dest file
+ f_dst.consume(p_comp.stdout)
+ # close compressor stdin and destination file
+ p_comp.stdout.close()
+ f_dst.close()
+ # check compressor return 0
+ if p_comp.wait() != 0:
+ raise ISError(u"Compressor %s return is not zero" % a_comp[0])
+ except (SystemExit, KeyboardInterrupt):
+ if exists(dest):
+ unlink(dest)
+ raise
+
+ def select_scripts(self, directory):
+ '''
+ Generator of tuples (fp,fn,fc) of scripts witch are allocatable
+ in a real directory
+ '''
+ # ensure directory is unicode to have fn and fp in unicode
+ if not isinstance(directory, unicode):
+ directory = unicode(directory, getpreferredencoding())
+ if not exists(directory):
+ return
+ for fn in sorted(listdir(directory)):
+ fp = join(directory, fn)
+ # check name
+ if not match("^\d+-.*\.py$", fn):
+ continue
+ # check execution bit
+ if not access(fp, X_OK):
+ continue
+ # get module content
+ try:
+ fc = open(fp, "r").read()
+ except Exception as e:
+ raise ISError(u"Unable to read script %s" % fp, e)
+ # yield complet file path, file name and file content
+ yield (fp, fn, fc)
+
+ def add_scripts(self, tarball, directory):
+ '''
+ Add scripts inside a directory into a tarball
+ '''
+ basedirectory = basename(directory)
+ arrow(u"Add %s scripts" % basedirectory)
+ arrowlevel(1)
+ # adding base directory
+ ti = tarball.gettarinfo(directory, arcname=basedirectory)
+ ti.mode = 0755
+ ti.uid = ti.gid = 0
+ ti.uname = ti.gname = ""
+ tarball.addfile(ti)
+ # adding each file
+ for fp, fn, fc in self.select_scripts(directory):
+ # check input unicode stuff
+ assert(isinstance(fp, unicode))
+ assert(isinstance(fn, unicode))
+ assert(isinstance(fc, str))
+ # add file into tarball
+ tarball.add_str(join(basedirectory, fn),
+ fc,
+ REGTYPE,
+ 0755,
+ int(stat(fp).st_mtime))
+ arrow(u"%s added" % fn)
+ arrowlevel(-1)
+
+ def check_scripts(self, directory):
+ '''
+ Check if scripts inside a directory can be compiled
+ '''
+ basedirectory = basename(directory)
+ arrow(u"Checking %s scripts" % basedirectory)
+ arrowlevel(1)
+ # checking each file
+ for fp, fn, fc in self.select_scripts(directory):
+ # check input unicode stuff
+ assert(isinstance(fp, unicode))
+ assert(isinstance(fn, unicode))
+ assert(isinstance(fc, str))
+ arrow(fn)
+ try:
+ compile(fc, fn.encode(getpreferredencoding()), "exec")
+ except SyntaxError as e:
+ raise ISError(exception=e)
+ arrowlevel(-1)
+
+ def run_build(self):
+ '''
+ Run build scripts
+ '''
+ rebuild_list = []
+ self.run_scripts(basename(self.build_path),
+ lambda: self.select_scripts(self.build_path),
+ self.payload_path,
+ {"rebuild": rebuild_list})
+ return rebuild_list
+
+ def generate_json_description(self):
+ '''
+ Generate a JSON description file
+ '''
+ arrow("Generating JSON description")
+ arrowlevel(1)
+ # copy description
+ desc = self.description.copy()
+ # only store compressor patterns
+ desc["compressor"] = desc["compressor"]["patterns"]
+ # timestamp image
+ arrow("Timestamping")
+ desc["date"] = int(time())
+ # watermark
+ desc["is_build_version"] = VERSION
+ # append payload infos
+ arrow("Checksumming payloads")
+ desc["payload"] = {}
+ for payload_name in self.select_payloads():
+ arrow(payload_name, 1)
+ # getting payload info
+ payload_desc = self.describe_payload(payload_name)
+ # compute md5 and size
+ fileobj = PipeFile(payload_desc["link_path"], "r")
+ fileobj.consume()
+ fileobj.close()
+ # create payload entry
+ desc["payload"][payload_name] = {
+ "md5": fileobj.md5,
+ "size": fileobj.size,
+ "isdir": payload_desc["isdir"],
+ "uid": payload_desc["uid"],
+ "gid": payload_desc["gid"],
+ "mode": payload_desc["mode"],
+ "mtime": payload_desc["mtime"],
+ "compressor": payload_desc["compressor"]
+ }
+ arrowlevel(-1)
+ # check md5 are uniq
+ md5s = [v["md5"] for v in desc["payload"].values()]
+ if len(md5s) != len(set(md5s)):
+ raise ISError("Two payloads cannot have the same md5")
+ # serialize
+ return dumps(desc)
+
+ def parse_description(self):
+ '''
+ Raise an exception is description file is invalid and return vars to include
+ '''
+ arrow("Parsing description")
+ d = dict()
+ try:
+ descpath = join(self.base_path, "description")
+ cp = ConfigObj(descpath,
+ configspec=DESCRIPTION_CONFIG_SPEC.splitlines(),
+ encoding="utf8", file_error=True)
+ res = cp.validate(Validator({"IS_name": Image.check_name,
+ "IS_version": Image.check_version,
+ "IS_min_version": Image.check_min_version}),
+ preserve_errors=True)
+ # If everything is fine, the validation return True
+ # Else, it returns a list of (section, optname, error)
+ if res is not True:
+ for section, optname, error in flatten_errors(cp, res):
+ # If error is False, this mean no value as been supplied,
+ # so we use the default value
+ # Else, the check has failed
+ if error:
+ error('Wrong description file, %s %s: %s' % (section, optname, error))
+ for n in ("name","version", "description", "author", "is_min_version"):
+ d[n] = cp["image"][n]
+ d["compressor"] = {}
+ # set payload compressor
+ d["compressor"]["patterns"] = cp["compressor"].items()
+ if not d["compressor"]["patterns"]:
+ d["compressor"]["patterns"] = [(Image.default_compressor, "*")]
+ for compressor, patterns in cp["compressor"].items():
+ # is a valid compressor?
+ get_compressor_path(compressor)
+ for pattern in patterns:
+ for payname in fnmatch.filter(self.select_payloads(), pattern):
+ d["compressor"][payname] = compressor
+ except Exception as e:
+ raise ISError(u"Bad description", e)
+ return d
+
+ def parse_changelog(self):
+ '''
+ Create a changelog object from a file
+ '''
+ # try to find a changelog file
+ try:
+ path = join(self.base_path, "changelog")
+ fo = codecs.open(path, "r", "utf8")
+ except IOError:
+ return None
+ # we have it, we need to check everything is ok
+ arrow("Parsing changelog")
+ try:
+ cl = Changelog(fo.read())
+ except Exception as e:
+ raise ISError(u"Bad changelog", e)
+ return cl
+
+ def compressor(self, payname):
+ '''
+ Return payload compressor
+ '''
+ try:
+ return self.description["compressor"][payname]
+ except KeyError:
+ # set default compressor if no compressor is specified
+ return Image.default_compressor
+
+
+DESCRIPTION_TPL = u"""[image]
+name = %(name)s
+version = %(version)s
+description = %(description)s
+author = %(author)s
+is_min_version = %(is_min_version)s
+
+[compressor]
+%(compressor)s
+"""
+
+CHANGELOG_TPL = u"""[1]
+- Initial version
+"""
+
+BUILD_TPL = u"""# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# global rebuild object allow you to force rebuild of payloads
+# to force rebuild of payload nammed rootfs add it to the rebuild list
+# rebuild list is empty by default
+#rebuild += ["rootfs"]
+
+# vim:set ts=4 sw=4 et:
+"""
+
+PARSER_TPL = u"""# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# global image object is a reference to current image
+# global parser object is your installsystems subparser (argparse)
+
+# you can use exit() to break the execution of the script
+
+import os
+import argparse
+from installsystems.printer import arrow
+
+class TargetAction(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ if not os.path.isdir(values):
+ raise Exception(u"Invalid target directory %s" % values)
+ namespace.target = values
+
+parser.add_argument("-n", "--hostname", dest="hostname", type=str, required=True)
+parser.add_argument("target", type=str, action=TargetAction,
+ help="target installation directory")
+
+# vim:set ts=4 sw=4 et:
+"""
+
+SETUP_TPL = u"""# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# global image object is a reference to current image
+# namespace object is the persistant, it can be used to store data accross scripts
+
+# you can use exit() to break the execution of the script
+
+from installsystems.printer import arrow
+
+arrow(u"hostname: %s" % namespace.hostname)
+
+# uncomment to extract payload named root in namespace.target directory
+#image.payload["rootfs"].extract(namespace.target)
+
+# vim:set ts=4 sw=4 et:
+"""
+
+# This must not be an unicode string, because configobj don't decode configspec
+# with the provided encoding
+DESCRIPTION_CONFIG_SPEC = """\
+[image]
+name = IS_name
+version = IS_version
+description = string
+author = string
+is_min_version = IS_min_version
+
+[compressor]
+__many__ = force_list
+"""
diff --git a/installsystems/tarball.py b/installsystems/image/tarball.py
similarity index 84%
rename from installsystems/tarball.py
rename to installsystems/image/tarball.py
index bc449ecd756ae49eea24d36026ca9a9bf11acc42..38aff64d4958746e0a7c59d1481fbe64c15127db 100644
--- a/installsystems/tarball.py
+++ b/installsystems/image/tarball.py
@@ -16,16 +16,20 @@
# You should have received a copy of the GNU Lesser General Public License
# along with Installsystems. If not, see .
+from StringIO import StringIO
+from installsystems.exception import ISError
+from os import chown, lchown
+from re import match
+from sys import platform
+from tarfile import TarFile, TarInfo, REGTYPE, ExtractError
+from time import time
+
+# use module prefix because we test presence of geteuid
import os
-import sys
-import time
-import tarfile
-import StringIO
-import re
+# use module prefix because a function is named filter
import fnmatch
-from installsystems.exception import *
-class Tarball(tarfile.TarFile):
+class Tarball(TarFile):
'''
Tarball wrapper
'''
@@ -37,19 +41,19 @@ class Tarball(tarfile.TarFile):
'''
if isinstance(name, unicode):
name = name.encode("UTF-8")
- ti = tarfile.TarInfo(name)
+ ti = TarInfo(name)
# set tarinfo attribute
for v in ("name", "ftype", "mode", "mtime", "uid", "gid", "uname", "gname"):
if vars()[v] is not None:
vars(ti)[v] = vars()[v]
# set mtime to current if not specified
if mtime is None:
- ti.mtime = int(time.time())
+ ti.mtime = int(time())
# unicode char is encoded in UTF-8, has changelog must be in UTF-8
if isinstance(content, unicode):
content = content.encode("UTF-8")
ti.size = len(content) if content is not None else 0
- self.addfile(ti, StringIO.StringIO(content))
+ self.addfile(ti, StringIO(content))
def get_str(self, name):
'''
@@ -76,7 +80,7 @@ class Tarball(tarfile.TarFile):
names = super(Tarball, self).getnames()
# regexp matching
if re_pattern is not None:
- names = filter(lambda x: re.match(re_pattern, x), names)
+ names = filter(lambda x: match(re_pattern, x), names)
# globbing matching
if glob_pattern is not None:
names = fnmatch.filter(names, glob_pattern)
@@ -110,9 +114,9 @@ class Tarball(tarfile.TarFile):
# We have to be root to do so.
try:
if tarinfo.issym() and hasattr(os, "lchown"):
- os.lchown(targetpath, tarinfo.uid, tarinfo.gid)
+ lchown(targetpath, tarinfo.uid, tarinfo.gid)
else:
- if sys.platform != "os2emx":
- os.chown(targetpath, tarinfo.uid, tarinfo.gid)
+ if platform != "os2emx":
+ chown(targetpath, tarinfo.uid, tarinfo.gid)
except EnvironmentError, e:
raise ExtractError("could not change owner")
diff --git a/installsystems/printer.py b/installsystems/printer.py
index 6407384b1c438698ae59c9c1f2d1f7d1d50c264c..c7a22b7cea4c22dc256c3e515b6feccae6c26703 100644
--- a/installsystems/printer.py
+++ b/installsystems/printer.py
@@ -20,13 +20,16 @@
Install Systems Printer module
'''
-import locale
-import sys
-import os
-import re
-import installsystems
-from installsystems.exception import *
+from installsystems.exception import ISException
+from locale import getpreferredencoding
+from os import linesep, _exit
+from re import sub
+from sys import stdout, stderr, exc_info
+from traceback import print_exc
+from warnings import filterwarnings
+
+VERBOSITY = 1 # 0: quiet, 1: normal, 2: debug
NOCOLOR = False
COLOR = {
@@ -58,9 +61,9 @@ COLOR = {
# arrow_level is between 1 and 3
# is the level of indentation of arrow
-_arrow_level = 1
+_ARROW_LEVEL = 1
-def out(message="", fd=sys.stdout, endl=os.linesep, flush=True):
+def out(message="", fd=stdout, endl=linesep, flush=True):
'''
Print message colorised in fd ended by endl
'''
@@ -72,30 +75,30 @@ def out(message="", fd=sys.stdout, endl=os.linesep, flush=True):
f = lambda obj: ""
else:
f = lambda obj: COLOR[obj.group(1)]
- message = re.sub(color_pattern, f, message)
+ message = sub(color_pattern, f, message)
# convert unicode into str before write
# this can cause issue on python 2.6
if type(message) == unicode:
- message = message.encode(locale.getpreferredencoding(), "replace")
+ message = message.encode(getpreferredencoding(), "replace")
# printing
fd.write("%s%s" % (message, endl))
if flush:
fd.flush()
-def err(message, fd=sys.stderr, endl=os.linesep):
+def err(message, fd=stderr, endl=linesep):
'''
Print a message on stderr
'''
out(message, fd, endl)
-def fatal(message, quit=True, fd=sys.stderr, endl=os.linesep):
+def fatal(message, quit=True, fd=stderr, endl=linesep):
out(u"#light##red#Fatal:#reset# #red#%s#reset#" % message, fd, endl)
- if sys.exc_info()[0] is not None and installsystems.verbosity > 1:
+ if exc_info()[0] is not None and VERBOSITY > 1:
raise
if quit:
- os._exit(21)
+ _exit(21)
-def error(message=None, exception=None, quit=True, fd=sys.stderr, endl=os.linesep):
+def error(message=None, exception=None, quit=True, fd=stderr, endl=linesep):
# create error message
pmesg = u""
if message is not None:
@@ -109,59 +112,69 @@ def error(message=None, exception=None, quit=True, fd=sys.stderr, endl=os.linese
if pmesg != "":
out(u"#light##red#Error:#reset# #red#%s#reset#" % pmesg, fd, endl)
# print traceback in debug mode
- if installsystems.verbosity > 1 and isinstance(exception, ISException):
+ if VERBOSITY > 1 and isinstance(exception, ISException):
exception.print_tb(fd)
- elif installsystems.verbosity > 1:
+ elif VERBOSITY > 1:
out("#l##B#", fd=fd, endl="")
- traceback.print_exc(file=fd)
+ print_exc(file=fd)
out("#R#", fd=fd, endl="")
if quit:
exit(42)
-def warn(message, fd=sys.stderr, endl=os.linesep):
+def warn(message, fd=stderr, endl=linesep):
out(u"#light##yellow#Warning:#reset# #yellow#%s#reset#" % message, fd, endl)
-def info(message, fd=sys.stderr, endl=os.linesep):
- if installsystems.verbosity > 0:
+def info(message, fd=stderr, endl=linesep):
+ if VERBOSITY > 0:
out(u"#light#Info:#reset# %s" % message, fd, endl)
-def debug(message, fd=sys.stderr, endl=os.linesep):
- if installsystems.verbosity > 1:
+def debug(message, fd=stderr, endl=linesep):
+ '''
+ Print debug information
+ '''
+ if VERBOSITY > 1:
out(u"#light##black#%s#reset#" % message, fd, endl)
def arrowlevel(inc=None, level=None):
- global _arrow_level
- old_level = _arrow_level
+ '''
+ Modify the current arrow level
+ '''
+ global _ARROW_LEVEL
+ old_level = _ARROW_LEVEL
if level is not None:
- _arrow_level = max(1, min(4, level))
+ _ARROW_LEVEL = max(1, min(4, level))
if inc is not None:
- _arrow_level = max(1, min(4, _arrow_level + inc))
+ _ARROW_LEVEL = max(1, min(4, _ARROW_LEVEL + inc))
return old_level
-def arrow(message, inclevel=None, level=None, fd=sys.stdout, endl=os.linesep):
- if installsystems.verbosity == 0:
+def arrow(message, inclevel=None, level=None, fd=stdout, endl=linesep):
+ '''
+ Print a message prefixed by an arrow
+ Arrows have indentation levels
+ '''
+ if VERBOSITY == 0:
return
# define new level
old_level = arrowlevel(inc=inclevel, level=level)
- if _arrow_level == 1:
+ if _ARROW_LEVEL == 1:
out(u"#light##red#=>#reset# %s" % message, fd=fd, endl=endl)
- elif _arrow_level == 2:
+ elif _ARROW_LEVEL == 2:
out(u" #light##yellow#=>#reset# %s" % message, fd=fd, endl=endl)
- elif _arrow_level == 3:
+ elif _ARROW_LEVEL == 3:
out(u" #light##blue#=>#reset# %s" % message, fd=fd, endl=endl)
- elif _arrow_level == 4:
+ elif _ARROW_LEVEL == 4:
out(u" #light##green#=>#reset# %s" % message, fd=fd, endl=endl)
# restore old on one shot level
arrowlevel(level = old_level)
-def ask(message, fd=sys.stdout, endl=""):
+def ask(message, fd=stdout, endl=""):
'''
Ask a question on stdin
'''
out(message, fd=fd, endl=endl, flush=True)
return raw_input()
-def confirm(message=None, ans=None, fd=sys.stdout, endl=""):
+def confirm(message=None, ans=None, fd=stdout, endl=""):
'''
Ask a question on stdin
'''
@@ -170,3 +183,17 @@ def confirm(message=None, ans=None, fd=sys.stdout, endl=""):
if message is None:
message = u"#u##l##w#Are you sure?#R# (%s) " % ans
return ask(message, fd, endl) == ans
+
+def setmode(verbosity=None, nocolor=None):
+ '''
+ Set printer mode
+ This is done to allow write access to global variables
+ '''
+ global VERBOSITY, NOCOLOR
+ if verbosity is not None:
+ # no warning if we are not in debug mode
+ if verbosity < 2:
+ filterwarnings("ignore")
+ VERBOSITY = verbosity
+ if nocolor is not None:
+ NOCOLOR = nocolor
diff --git a/installsystems/repository.py b/installsystems/repository.py
deleted file mode 100644
index de0272d44298ee5fcf8df0d0cf4789c3a9df7385..0000000000000000000000000000000000000000
--- a/installsystems/repository.py
+++ /dev/null
@@ -1,1312 +0,0 @@
-# -*- python -*-
-# -*- coding: utf-8 -*-
-
-# This file is part of Installsystems.
-#
-# Installsystems is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Installsystems is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with Installsystems. If not, see .
-
-'''
-Repository stuff
-'''
-
-import os
-import re
-import time
-import shutil
-import pwd
-import grp
-import tempfile
-import fnmatch
-import cStringIO
-import json
-import uuid
-import string
-import installsystems
-import installsystems.tools as istools
-from installsystems.exception import *
-from installsystems.printer import *
-from installsystems.tarball import Tarball
-from installsystems.tools import PipeFile
-from installsystems.image import Image, PackageImage
-from installsystems.database import Database
-
-class RepositoryFactory(object):
- '''
- Repository factory
- '''
-
- def __init__(self):
- self.repo_class = {
- 1: Repository_v1,
- 2: Repository
- }
-
- def create(self, config):
- db = None
- if not config.offline:
- try:
- db = Database(config.dbpath)
- except ISWarning as e:
- warn('[%s]: %s' % (config.name, e))
- config.offline = True
- except ISError:
- debug(u"Unable to load database %s" % config.dbpath)
- config.offline = True
- if config.offline:
- debug(u"Repository %s is offline" % config.name)
- if db is None:
- return Repository(config)
- else:
- return self.repo_class[int(db.version)](config, db)
-
-class Repository(object):
- '''
- Repository class
- '''
-
- @staticmethod
- def is_repository_name(name):
- return re.match("^[-_\w]+$", name) is not None
-
- @staticmethod
- def check_repository_name(name):
- '''
- Raise exception is repository name is invalid
- '''
- if not Repository.is_repository_name(name):
- raise ISError(u"Invalid repository name %s" % name)
- return name
-
- @staticmethod
- def split_image_path(path):
- '''
- Split an image path (repo/image:version)
- in a tuple (repo, image, version)
- '''
- x = re.match(u"^(?:([^/:]+)/)?([^/:]+)?(?::v?([^/:]+)?)?$", path)
- if x is None:
- raise ISError(u"invalid image path: %s" % path)
- return x.group(1, 2, 3)
-
- @staticmethod
- def split_repository_list(repolist, filter=None):
- '''
- Return a list of repository from a comma/spaces separated names of repo
- '''
- if filter is None:
- filter = Repository.is_repository_name
- return [r for r in re.split("[ ,\n\t\v]+", repolist) if filter(r)]
-
- @classmethod
- def diff(cls, repo1, repo2):
- '''
- Compute a diff between two repositories
- '''
- arrow(u"Diff between repositories #y#%s#R# and #g#%s#R#" % (repo1.config.name,
- repo2.config.name))
- # Get info from databases
- i_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask(
- "SELECT md5, name, version FROM image").fetchall())
- i_set1 = set(i_dict1.keys())
- i_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask(
- "SELECT md5, name, version FROM image").fetchall())
- i_set2 = set(i_dict2.keys())
- p_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask(
- "SELECT md5, name FROM payload").fetchall())
- p_set1 = set(p_dict1.keys())
- p_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask(
- "SELECT md5, name FROM payload").fetchall())
- p_set2 = set(p_dict2.keys())
- # computing diff
- i_only1 = i_set1 - i_set2
- i_only2 = i_set2 - i_set1
- p_only1 = p_set1 - p_set2
- p_only2 = p_set2 - p_set1
- # printing functions
- pimg = lambda r,c,m,d,: out("#%s#Image only in repository %s: %s v%s (%s)#R#" %
- (c, r.config.name, d[m][0], d[m][1], m))
- ppay = lambda r,c,m,d,: out("#%s#Payload only in repository %s: %s (%s)#R#" %
- (c, r.config.name, d[m][0], m))
- # printing image diff
- for md5 in i_only1: pimg(repo1, "y", md5, i_dict1)
- for md5 in p_only1: ppay(repo1, "y", md5, p_dict1)
- for md5 in i_only2: pimg(repo2, "g", md5, i_dict2)
- for md5 in p_only2: ppay(repo2, "g", md5, p_dict2)
-
- def __init__(self, config, db=None):
- self.config = config
- self.local = istools.isfile(self.config.path)
- self.db = db
-
- def __getattribute__(self, name):
- '''
- Raise an error if repository is unavailable
- Unavailable can be caused because db is not accessible or
- because repository is not initialized
- '''
- config = object.__getattribute__(self, "config")
- # config, init, local and upgrade are always accessible
- if name in ("init", "config", "local", "upgrade"):
- return object.__getattribute__(self, name)
- # if no db (not init or not accessible) raise error
- if config.offline:
- raise ISError(u"Repository %s is offline" % config.name)
- return object.__getattribute__(self, name)
-
- @property
- def version(self):
- '''
- Return repository version
- '''
- return self.db.version
-
- @property
- def uuid(self):
- '''
- Return repository UUID
- '''
- return self.db.ask("SELECT uuid from repository").fetchone()[0]
-
- def init(self):
- '''
- Initialize an empty base repository
- '''
- config = self.config
- # check local repository
- if not self.local:
- raise ISError(u"Repository creation must be local")
- # create base directories
- arrow("Creating base directories")
- arrowlevel(1)
- # creating local directory
- try:
- if os.path.exists(config.path):
- arrow(u"%s already exists" % config.path)
- else:
- istools.mkdir(config.path, config.uid, config.gid, config.dmod)
- arrow(u"%s directory created" % config.path)
- except Exception as e:
- raise ISError(u"Unable to create directory %s" % config.path, e)
- arrowlevel(-1)
- # create database
- d = Database.create(config.dbpath)
- istools.chrights(config.dbpath, uid=config.uid,
- gid=config.gid, mode=config.fmod)
- # load database
- self.db = Database(config.dbpath)
- # mark repo as not offline
- self.config.offline = False
- # create/update last file
- self.update_last()
-
- def update_last(self):
- '''
- Update last file to current time
- '''
- # check local repository
- if not self.local:
- raise ISError(u"Repository must be local")
- try:
- arrow("Updating last file")
- last_path = os.path.join(self.config.path, self.config.lastname)
- open(last_path, "w").write("%s\n" % int(time.time()))
- istools.chrights(last_path, self.config.uid, self.config.gid, self.config.fmod)
- except Exception as e:
- raise ISError(u"Update last file failed", e)
-
- def last(self, name):
- '''
- Return last version of name in repo or None if not found
- '''
- r = self.db.ask("SELECT version FROM image WHERE name = ?", (name,)).fetchall()
- # no row => no way
- if r is None:
- return None
- f = lambda x,y: x[0] if istools.compare_versions(x[0], y[0]) > 0 else y[0]
- # return last
- return reduce(f, r)
-
- def _add(self, image):
- '''
- Add description to db
- '''
- arrow("Adding metadata")
- self.db.begin()
- # insert image information
- arrow("Image", 1)
- self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?,?,?)",
- (image.md5,
- image.name,
- image.version,
- image.date,
- image.author,
- image.description,
- image.size,
- image.is_min_version,
- image.format,
- ))
- # insert data information
- arrow("Payloads", 1)
- for name, obj in image.payload.items():
- self.db.ask("INSERT INTO payload values (?,?,?,?,?)",
- (obj.md5,
- image.md5,
- name,
- obj.isdir,
- obj.size,
- ))
- # on commit
- self.db.commit()
- # update last file
- self.update_last()
-
- def add(self, image, delete=False):
- '''
- Add a packaged image to repository
- if delete is true, remove original files
- '''
- # check local repository
- if not self.local:
- raise ISError(u"Repository addition must be local")
- # cannot add already existant image
- if self.has(image.name, image.version):
- raise ISError(u"Image already in database, delete first!")
- # adding file to repository
- arrow("Copying images and payload")
- for obj in [ image ] + image.payload.values():
- dest = os.path.join(self.config.path, obj.md5)
- basesrc = os.path.basename(obj.path)
- if os.path.exists(dest):
- arrow(u"Skipping %s: already exists" % basesrc, 1)
- else:
- arrow(u"Adding %s (%s)" % (basesrc, obj.md5), 1)
- dfo = open(dest, "wb")
- sfo = PipeFile(obj.path, "r", progressbar=True)
- sfo.consume(dfo)
- sfo.close()
- dfo.close()
- istools.chrights(dest, self.config.uid,
- self.config.gid, self.config.fmod)
- # copy is done. create a image inside repo
- r_image = PackageImage(os.path.join(self.config.path, image.md5),
- md5name=True)
- # checking must be done with original md5
- r_image.md5 = image.md5
- # checking image and payload after copy
- r_image.check("Check image and payload")
- self._add(image)
- # removing orginal files
- if delete:
- arrow("Removing original files")
- for obj in [ image ] + image.payload.values():
- arrow(os.path.basename(obj.path), 1)
- os.unlink(obj.path)
-
- def getallmd5(self):
- '''
- Get list of all md5 in DB
- '''
- res = self.db.ask("SELECT md5 FROM image UNION SELECT md5 FROM payload").fetchall()
- return [ md5[0] for md5 in res ]
-
- def check(self):
- '''
- Check repository for unreferenced and missing files
- '''
- # Check if the repo is local
- if not self.local:
- raise ISError(u"Repository must be local")
- local_files = set(os.listdir(self.config.path))
- local_files.remove(self.config.dbname)
- local_files.remove(self.config.lastname)
- db_files = set(self.getallmd5())
- # check missing files
- arrow("Checking missing files")
- missing_files = db_files - local_files
- if len(missing_files) > 0:
- out(os.linesep.join(missing_files))
- # check unreferenced files
- arrow("Checking unreferenced files")
- unref_files = local_files - db_files
- if len(unref_files) > 0:
- out(os.linesep.join(unref_files))
- # check corruption of local files
- arrow("Checking corrupted files")
- for f in local_files:
- fo = PipeFile(os.path.join(self.config.path, f))
- fo.consume()
- fo.close()
- if fo.md5 != f:
- out(f)
-
- def clean(self, force=False):
- '''
- Clean the repository's content
- '''
- # Check if the repo is local
- if not self.local:
- raise ISError(u"Repository must be local")
- allmd5 = set(self.getallmd5())
- repofiles = set(os.listdir(self.config.path)) - set([self.config.dbname, self.config.lastname])
- dirtyfiles = repofiles - allmd5
- if len(dirtyfiles) > 0:
- # print dirty files
- arrow("Dirty files:")
- for f in dirtyfiles:
- arrow(f, 1)
- # ask confirmation
- if not force and not confirm("Remove dirty files? (yes) "):
- raise ISError(u"Aborted!")
- # start cleaning
- arrow("Cleaning")
- for f in dirtyfiles:
- p = os.path.join(self.config.path, f)
- arrow(u"Removing %s" % p, 1)
- try:
- if os.path.isdir(p):
- os.rmdir(p)
- else:
- os.unlink(p)
- except:
- warn(u"Removing %s failed" % p)
- else:
- arrow("Nothing to clean")
-
- def delete(self, name, version, payloads=True):
- '''
- Delete an image from repository
- '''
- # check local repository
- if not self.local:
- raise ISError(u"Repository deletion must be local")
- # get md5 of files related to images (exception is raised if not exists
- md5s = self.getmd5(name, version)
- # cleaning db (must be done before cleaning)
- arrow("Cleaning database")
- arrow("Remove payloads from database", 1)
- self.db.begin()
- for md5 in md5s[1:]:
- self.db.ask("DELETE FROM payload WHERE md5 = ? AND image_md5 = ?",
- (md5, md5s[0])).fetchone()
- arrow("Remove image from database", 1)
- self.db.ask("DELETE FROM image WHERE md5 = ?",
- (md5s[0],)).fetchone()
- self.db.commit()
- # Removing files
- arrow("Removing files from pool")
- # if asked don't remove payloads
- if not payloads:
- md5s = [ md5s[0] ]
- arrowlevel(1)
- for md5 in md5s:
- self._remove_file(md5)
- arrowlevel(-1)
- # update last file
- self.update_last()
-
- def images(self):
- '''
- Return a dict of information on images
- '''
- db_images = self.db.ask("SELECT md5, name, version, date, author, \
- description, size, is_min_version, format \
- FROM image ORDER BY name, version").fetchall()
-
- images = []
- field = ("md5", "name", "version", "date", "author", "description",
- "size", "is_min_version", "format")
- for info in db_images:
- d = dict(zip(field, info))
- d["repo"] = self.config.name
- d["url"] = os.path.join(self.config.path, d["md5"])
- images.append(d)
- return images
-
- def payloads(self):
- '''
- Return a dict of information on payloads
- '''
- db_payloads = self.db.ask("SELECT payload.md5,payload.size,payload.isdir,image.name,image.version,payload.name FROM payload inner join image on payload.image_md5 = image.md5").fetchall()
- res = {}
- for payload in db_payloads:
- md5 = payload[0]
- # create entry if not exists
- if md5 not in res:
- res[md5] = {"size": payload[1], "isdir": payload[2], "images": {}}
- # add image to list
- imgpath = u"%s/%s:%s" % (self.config.name, payload[3], payload[4])
- res[md5]["images"][imgpath] = {"repo": self.config.name,
- "imgname": payload[3],
- "imgver": payload[4],
- "payname": payload[5]}
- return res
-
- def search(self, pattern):
- '''
- Search pattern in a repository
- '''
- images = self.db.ask("SELECT name, version, author, description\
- FROM image\
- WHERE name LIKE ? OR\
- description LIKE ? OR\
- author LIKE ?",
- tuple( [u"%%%s%%" % pattern ] * 3)
- ).fetchall()
- for name, version, author, description in images:
- arrow(u"%s v%s" % (name, version), 1)
- out(u" #yellow#Author:#reset# %s" % author)
- out(u" #yellow#Description:#reset# %s" % description)
-
- def _remove_file(self, filename):
- '''
- Remove a filename from pool. Check if it's not needed by db before
- '''
- # check existance in table image
- have = False
- for table in ("image", "payload"):
- have = have or self.db.ask(u"SELECT md5 FROM %s WHERE md5 = ? LIMIT 1" % table,
- (filename,)).fetchone() is not None
- # if no reference, delete!
- if not have:
- arrow(u"%s, deleted" % filename)
- os.unlink(os.path.join(self.config.path, filename))
- else:
- arrow(u"%s, skipped" % filename)
-
- def has(self, name, version):
- '''
- Return the existance of a package
- '''
- return self.db.ask("SELECT name,version FROM image WHERE name = ? AND version = ? LIMIT 1", (name,version)).fetchone() is not None
-
- def get(self, name, version=None):
- '''
- Return an image from a name and version
- '''
- # is no version take the last
- if version is None:
- version = self.last(name)
- if version is None:
- raise ISError(u"Unable to find image %s in %s" % (name,
- self.config.name))
- # get file md5 from db
- r = self.db.ask("select md5 from image where name = ? and version = ? limit 1",
- (name, version)).fetchone()
- if r is None:
- raise ISError(u"Unable to find image %s v%s in %s" % (name, version,
- self.config.name))
- path = os.path.join(self.config.path, r[0])
- # getting the file
- arrow(u"Loading image %s v%s from repository %s" % (name,
- version,
- self.config.name))
- memfile = cStringIO.StringIO()
- try:
- fo = PipeFile(path, "r")
- fo.consume(memfile)
- fo.close()
- except Exception as e:
- raise ISError(u"Loading image %s v%s failed" % (name, version), e)
- memfile.seek(0)
- pkg = PackageImage(path, fileobj=memfile, md5name=True)
- if pkg.md5 != r[0]:
- raise ISError(u"Image MD5 verification failure")
- return pkg
-
- def getmd5(self, name, version):
- '''
- Return an image md5 and payload md5 from name and version. Order matter !
- Image md5 will still be the first
- '''
- # get file md5 from db
- a = self.db.ask("SELECT md5 FROM image WHERE name = ? AND version = ? LIMIT 1",
- (name,version)).fetchone()
- if a is None:
- raise ISError(u"No such image %s version %s" % (name, version))
- b = self.db.ask("SELECT md5 FROM payload WHERE image_md5 = ?",
- (a[0],)).fetchall()
- return [ a[0] ] + [ x[0] for x in b ]
-
- def upgrade(self):
- if self.version == Database.version:
- info("Repository already up-to-date (%s)" % self.version)
- return
- else:
- arrow("Start repository upgrade")
- arrowlevel(1)
- # Create dummy repository
- tmpdir = tempfile.mkdtemp()
- try:
- repoconf = RepositoryConfig("tmp_migrate_repo", path=tmpdir)
- dstrepo = Repository(repoconf)
- # Symlink content from repository into dummy repo
- for file in os.listdir(self.config.path):
- os.symlink(os.path.join(self.config.path, file),
- os.path.join(tmpdir, file))
- os.unlink(repoconf.dbpath)
- os.unlink(repoconf.lastpath)
- old_verbosity = installsystems.verbosity
- arrow("Initialize new database")
- # Disable unwanted message during upgrade
- installsystems.verbosity = 0
- dstrepo.init()
- # Restore verbosity
- installsystems.verbosity = old_verbosity
- md5s = self.db.ask("SELECT md5 FROM image").fetchall()
- # Copy images to dummy repository (fill new database)
- arrow("Fill database with images")
- arrowlevel(1)
- installsystems.verbosity = 0
- for img in [PackageImage(os.path.join(self.config.path, md5[0]),
- md5name=True) for md5 in md5s]:
- installsystems.verbosity = old_verbosity
- arrow("%s v%s" % (img.name, img.version))
- installsystems.verbosity = 0
- dstrepo.add(img)
- installsystems.verbosity = old_verbosity
- arrowlevel(-1)
- arrow("Backup old database")
- shutil.move(self.config.dbpath,
- os.path.join("%s.bak" % self.config.dbpath))
- # Replace old db with the new from dummy repository
- shutil.move(repoconf.dbpath, self.config.dbpath)
- self.update_last()
- arrowlevel(-1)
- arrow("Repository upgrade complete")
- finally:
- # Remove dummy repository
- shutil.rmtree(tmpdir)
- @property
- def motd(self):
- '''
- Return repository message of the day
- '''
- motd = self.db.ask("SELECT motd FROM repository").fetchone()[0]
- return None if len(motd) == 0 else motd
-
- def setmotd(self, value=""):
- '''
- Set repository message of the day
- '''
- # check local repository
- if not self.local:
- raise ISError(u"Repository must be local")
- arrow("Updating motd")
- self.db.ask("UPDATE repository SET motd = ?", (value,))
- self.update_last()
-
-
-class Repository_v1(Repository):
-
- def _add(self, image):
- '''
- Add description to db
- '''
- arrow("Adding metadata")
- self.db.begin()
- # insert image information
- arrow("Image", 1)
- self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?)",
- (image.md5,
- image.name,
- image.version,
- image.date,
- image.author,
- image.description,
- image.size,
- ))
- # insert data information
- arrow("Payloads", 1)
- for name, obj in image.payload.items():
- self.db.ask("INSERT INTO payload values (?,?,?,?,?)",
- (obj.md5,
- image.md5,
- name,
- obj.isdir,
- obj.size,
- ))
- # on commit
- self.db.commit()
- # update last file
- self.update_last()
-
- @property
- def uuid(self):
- '''
- Repository v1 doesn't support UUID
- '''
- return None
-
- def images(self):
- '''
- Return a dict of information on images
- '''
- db_images = self.db.ask("SELECT md5, name, version, date, author, \
- description, size \
- FROM image ORDER BY name, version").fetchall()
-
- images = []
- field = ("md5", "name", "version", "date", "author", "description",
- "size")
- for info in db_images:
- d = dict(zip(field, info))
- d["repo"] = self.config.name
- d["url"] = os.path.join(self.config.path, d["md5"])
- d["format"] = 1
- d["is_min_version"] = 9
- images.append(d)
- return images
-
- @property
- def motd(self):
- '''
- Return repository message of the day.
- Repository v1 don't have message of day
- '''
- return None
-
- def setmotd(self, value=""):
- '''
- Don't set repository message of the day. Not supported by v1.
- '''
- # check local repository
- warn(u"Repository v1 doesn't support motd. Unable to set")
- return
-
-class RepositoryManager(object):
- '''
- Manage multiple repositories
-
- This call implement a cache and a manager for multiple repositories
- Default repository timeout is 3
- '''
-
- def __init__(self, cache_path=None, timeout=None, filter=None, search=None):
- self.repos = []
- self.tempfiles = []
- self.filter = [] if filter is None else filter
- self.search = [] if search is None else search
- self.timeout = timeout or 3
- self.factory = RepositoryFactory()
- debug(u"Repository timeout setted to %ds" % self.timeout)
- if cache_path is None:
- self.cache_path = None
- debug("No repository cache")
- else:
- if not istools.isfile(cache_path):
- raise NotImplementedError("Repository cache must be local")
- self.cache_path = os.path.abspath(cache_path)
- # must_path is a list of directory which must exists
- # create directory if not exists
- if not os.path.exists(self.cache_path):
- os.mkdir(self.cache_path)
- # ensure directories are avaiblable
- if not os.access(self.cache_path, os.W_OK | os.X_OK):
- raise ISError(u"%s is not writable or executable" % self.cache_path)
- debug(u"Repository cache is in %s" % self.cache_path)
-
- def __del__(self):
- # delete temporary files (used by db)
- for f in self.tempfiles:
- try:
- debug(u"Removing temporary db file %s" % f)
- os.unlink(f)
- except OSError:
- pass
-
- def __len__(self):
- '''
- Return the number of repository registered
- '''
- return len(self.repos)
-
- def __getitem__(self, key):
- '''
- Return a repostiory by its position in list
- '''
- if isinstance(key, int):
- return self.repos[key]
- elif isinstance(key, basestring):
- # match name
- for repo in self.repos:
- if repo.config.name == key:
- return repo
- # if not found, match uuid
- # we need at least 4 chars to avoid ambiguous uuid matching
- if len(key) >= 4:
- for repo in [r for r in self.repos if not r.config.offline]:
- if fnmatch.fnmatch(repo.uuid, "%s*" % key):
- return repo
- else:
- raise ISWarning("Ambiguous argument: we need at least 4 chars "
- "to match an uuid")
- raise IndexError(u"No repository named: %s" % key)
- else:
- raise TypeError(u"Invalid type %s for %s" % (type(key), key))
-
- def __contains__(self, key):
- '''
- Check if a key is a repository name
- '''
- for r in self.repos:
- if r.config.name == key:
- return True
- return False
-
- def register(self, config, temp=False, nosync=False, offline=False):
- '''
- Register a repository from its config
- temp: repository is stored in a temporary location
- nosync: register repository as online, but no sync is done before
- offline: repository is marked offline
- '''
- # check filter on name
- if len(self.filter) > 0:
- if config.name not in self.filter:
- debug(u"Filtering repository %s" % config.name)
- return
- # repository is offline
- if config.offline or offline:
- debug(u"Registering offline repository %s (%s)" % (config.path, config.name))
- # we must force offline in cast of argument offline
- config.offline = True
- self.repos.append(self.factory.create(config))
- # if path is local, no needs to create a cache
- elif istools.isfile(config.path):
- debug(u"Registering direct repository %s (%s)" % (config.path, config.name))
- self.repos.append(self.factory.create(config))
- # path is remote, we need to create a cache
- else:
- debug(u"Registering cached repository %s (%s)" % (config.path, config.name))
- self.repos.append(self._cachify(config, temp, nosync))
-
- def _cachify(self, config, temp=False, nosync=False):
- '''
- Return a config of a cached repository from an orignal config file
- :param config: repository configuration
- :param temp: repository db should be stored in a temporary location
- :param nosync: if a cache exists, don't try to update it
- '''
- # if cache is disable => temp =True
- if self.cache_path is None:
- temp = True
- try:
- original_dbpath = config.dbpath
- if temp and nosync:
- raise ISError("sync is disabled")
- elif temp:
- # this is a temporary cached repository
- tempfd, config.dbpath = tempfile.mkstemp()
- os.close(tempfd)
- self.tempfiles.append(config.dbpath)
- else:
- config.dbpath = os.path.join(self.cache_path, config.name)
- if not nosync:
- # Open remote database
- rdb = PipeFile(original_dbpath, timeout=self.timeout)
- # get remote last modification
- if rdb.mtime is None:
- # We doesn't have modification time, we use the last file
- try:
- rlast = int(PipeFile(config.lastpath, mode='r',
- timeout=self.timeout).read().strip())
- except ISError:
- rlast = -1
- else:
- rlast = rdb.mtime
- # get local last value
- if os.path.exists(config.dbpath):
- llast = int(os.stat(config.dbpath).st_mtime)
- else:
- llast = -2
- # if repo is out of date, download it
- if rlast != llast:
- try:
- arrow(u"Downloading %s" % original_dbpath)
- rdb.progressbar = True
- ldb = open(config.dbpath, "wb")
- rdb.consume(ldb)
- ldb.close()
- rdb.close()
- istools.chrights(config.dbpath,
- uid=config.uid,
- gid=config.gid,
- mode=config.fmod,
- mtime=rlast)
- except:
- if os.path.exists(config.dbpath):
- os.unlink(config.dbpath)
- raise
- except ISError as e :
- # if something append bad during caching, we mark repo as offline
- debug(u"Unable to cache repository %s: %s" % (config.name, e))
- config.offline = True
- return self.factory.create(config)
-
- @property
- def names(self):
- '''
- Return list of repository names
- '''
- return [ r.config.name for r in self.repos ]
-
- @property
- def uuids(self):
- '''
- Return a dict of repository UUID and associated names
- '''
- d = {}
- for r in self.repos:
- uuid = r.uuid
- if uuid is None:
- continue
- if uuid in d:
- d[uuid].append(r)
- else:
- d[uuid] = [r]
- return d
-
- @property
- def onlines(self):
- '''
- Return list of online repository names
- '''
- return [ r.config.name for r in self.repos if not r.config.offline ]
-
- @property
- def offlines(self):
- '''
- Return list of offlines repository names
- '''
- return [ r.config.name for r in self.repos if r.config.offline ]
-
- def select_images(self, patterns):
- '''
- Return a list of available images
- '''
- if len(self.onlines) == 0:
- raise ISError(u"No online repository")
- ans = {}
- for pattern in patterns:
- path, image, version = Repository.split_image_path(pattern)
- if image is None:
- if path is None or version is None:
- image = "*"
- else:
- # empty pattern
- continue
- # building image list
- images = {}
- for reponame in self.onlines:
- for img in self[reponame].images():
- imgname = u"%s/%s:%s" % (reponame, img["name"], img["version"])
- images[imgname] = img
- # No path means only in searchable repositories
- if path is None:
- for k, v in images.items():
- # match name
- if v["repo"] not in self.search:
- uuid = self[v["repo"]].uuid
- # match uuid
- if not [uuid for pat in self.search
- if fnmatch.fnmatch(uuid, '%s*' % pat)]:
- del images[k]
- path = "*"
- # No version means last version
- if version is None:
- version = "*"
- for repo in set((images[i]["repo"] for i in images)):
- for img in set((images[i]["name"] for i in images if images[i]["repo"] == repo)):
- versions = [ images[i]['version']
- for i in images if images[i]["repo"] == repo and images[i]["name"] == img ]
- f = lambda x,y: x if istools.compare_versions(x, y) > 0 else y
- last = reduce(f, versions)
- versions.remove(last)
- for rmv in versions:
- del images[u"%s/%s:%s" % (repo, img, rmv)]
- # if 'path*' do not match a repo name, it may be an uuid, so add
- # globbing for smart uuid matching
- if not fnmatch.filter(self.onlines, "%s*" % path):
- path = "%s*" % path
- # filter with pattern on path
- filter_pattern = u"%s/%s:%s" % (path, image, version)
- for k, img in images.items():
- if not (fnmatch.fnmatch(k, filter_pattern) or
- fnmatch.fnmatch("%s/%s" % (self[img["repo"]].uuid, k.split("/")[1]), filter_pattern)):
- del images[k]
- ans.update(images)
- return ans
-
- def search_image(self, pattern):
- '''
- Search pattern accross all registered repositories
- '''
- for repo in self.onlines:
- arrow(self[repo].config.name)
- self[repo].search(pattern)
-
- def show_images(self, patterns, o_json=False, o_long=False, o_md5=False,
- o_date=False, o_author=False, o_size=False, o_url=False,
- o_description=False, o_format=False, o_min_version=False):
- '''
- Show images inside manager
- '''
- # get images list
- images = self.select_images(patterns)
- # display result
- if o_json:
- s = json.dumps(images)
- else:
- l = []
- for imgp in sorted(images.keys()):
- img = images[imgp]
- l.append(u"%s#R#/#l##b#%s#R#:#p#%s#R#" % (
- img["repo"], img["name"], img["version"]))
- if o_md5 or o_long:
- l[-1] = l[-1] + u" (#y#%s#R#)" % img["md5"]
- if o_date or o_long:
- l.append(u" #l#date:#R# %s" % istools.time_rfc2822(img["date"]))
- if o_author or o_long:
- l.append(u" #l#author:#R# %s" % img["author"])
- if o_size or o_long:
- l.append(u" #l#size:#R# %s" % istools.human_size(img["size"]))
- if o_url or o_long:
- l.append(u" #l#url:#R# %s" % img["url"])
- if o_description or o_long:
- l.append(u" #l#description:#R# %s" % img["description"])
- if o_format or o_long:
- l.append(u" #l#format:#R# %s" % img["format"])
- if o_min_version or o_long:
- l.append(u" #l#is min version:#R# %s" % img["is_min_version"])
- s = os.linesep.join(l)
- if len(s) > 0:
- out(s)
-
- def select_payloads(self, patterns):
- '''
- Return a list of available payloads
- '''
- if len(self.onlines) == 0:
- raise ISError(u"No online repository")
- # building payload list
- paylist = {}
- for reponame in self.onlines:
- for md5, info in self[reponame].payloads().items():
- if md5 not in paylist:
- paylist[md5] = info
- else:
- paylist[md5]["images"].update(info["images"])
- # check if pattern is md5 startpath
- ans = {}
- for pattern in patterns:
- for md5 in paylist.keys():
- if md5.startswith(pattern):
- ans[md5] = paylist[md5]
- return ans
-
- def show_payloads(self, patterns, o_images=False, o_json=False):
- '''
- Show payloads inside manager
- '''
- # get payload list
- payloads = self.select_payloads(patterns)
- # display result
- if o_json:
- s = json.dumps(payloads)
- else:
- l = []
- for payname in sorted(payloads.keys()):
- pay = payloads[payname]
- l.append(u"#l##y#%s#R#" % payname)
- l.append(u" size: %s" % istools.human_size(pay["size"]))
- l.append(u" directory: %s" % bool(pay["isdir"]))
- l.append(u" image count: %d" % len(pay["images"]))
- l.append(u" names: %s" % ", ".join(set((v["payname"] for v in pay["images"].values()))))
- if o_images:
- l.append(u" images:")
- for path, obj in pay["images"].items():
- l.append(u" %s#R#/#l##b#%s#R#:#p#%s#R# (%s)" % (
- obj["repo"], obj["imgname"], obj["imgver"], obj["payname"]))
- s = os.linesep.join(l)
- if len(s) > 0:
- out(s)
-
- def select_repositories(self, patterns):
- '''
- Return a list of repository
- '''
- ans = set()
- uuidb = self.uuids
- for pattern in patterns:
- ans |= set(fnmatch.filter(self.names, pattern))
- if istools.strcspn(pattern, string.hexdigits + "-") == 0:
- for uuid in filter(lambda x: x.startswith(pattern), uuidb.keys()):
- ans |= set((r.config.name for r in uuidb[uuid]))
- return sorted(ans)
-
- def purge_repositories(self, patterns):
- '''
- Remove local cached repository files
- '''
- for reponame in self.select_repositories(patterns):
- arrow(u"Purging cache of repository %s" % reponame)
- db = os.path.join(self.cache_path, reponame)
- if os.path.lexists(db):
- try:
- os.unlink(db)
- arrow("done", 1)
- except:
- arrow("failed", 1)
- else:
- arrow("nothing to do", 1)
-
- def show_repositories(self, patterns, local=None, online=None, o_url=False,
- o_state=False, o_uuid=False, o_json=False, o_version=False):
- '''
- Show repository inside manager
- if :param online: is true, list only online repositories
- if :param online: is false, list only offline repostiories
- if :param online: is None, list both online and offline repostiories.
- if :param local: is true, list only local repositories
- if :param local: is false, list only remote repostiories
- if :param local: is None, list both local and remote repostiories.
- '''
- # build repositories dict
- repos = {}
- for reponame in self.select_repositories(patterns):
- repo = self[reponame]
- if repo.config.offline and online is True:
- continue
- if not repo.config.offline and online is False:
- continue
- if repo.local and local is False:
- continue
- if not repo.local and local is True:
- continue
- repos[reponame] = dict(repo.config.items())
- repos[reponame]["local"] = repo.local
- if not repo.config.offline:
- repos[reponame]["uuid"] = repo.uuid
- repos[reponame]["version"] = repo.version
- # display result
- if o_json:
- s = json.dumps(repos)
- else:
- l = []
- for name, repo in repos.items():
- ln = ""
- so = "#l##r#Off#R# " if repo["offline"] else "#l##g#On#R# "
- sl = "#l##y#Local#R# " if repo["local"] else "#l##c#Remote#R# "
- rc = "#l##r#" if repo["offline"] else "#l##g#"
- if o_state:
- ln += u"%s%s " % (so, sl)
- rc = "#l##b#"
- ln += u"%s%s#R#"% (rc, name)
- if o_url:
- ln += u" (%s)" % repo["path"]
- if not repo["offline"]:
- if o_version:
- ln += u" (#p#v%s#R#)" % repo["version"]
- if o_uuid and repo["uuid"] is not None:
- ln += u" [%s]" % repo["uuid"]
- l.append(ln)
- s = os.linesep.join(l)
- out(s)
-
-
-class RepositoryConfig(object):
- '''
- Repository configuration container
- '''
-
- def __init__(self, name, **kwargs):
- # set default value for arguments
- self._valid_param = ("name", "path", "dbpath", "lastpath",
- "uid", "gid", "fmod", "dmod", "offline")
- self.name = Repository.check_repository_name(name)
- self.path = ""
- self._offline = False
- self._dbpath = None
- self.dbname = "db"
- self._lastpath = None
- self.lastname = "last"
- self._uid = os.getuid()
- self._gid = os.getgid()
- umask = os.umask(0)
- os.umask(umask)
- self._fmod = 0666 & ~umask
- self._dmod = 0777 & ~umask
- self.update(**kwargs)
-
- def __str__(self):
- l = []
- for k, v in self.items():
- l.append(u"%s: %s" % (k, v))
- return os.linesep.join(l)
-
- def __eq__(self, other):
- return vars(self) == vars(other)
-
- def __ne__(self, other):
- return not (self == other)
-
- def __contains__(self, key):
- return key in self.__dict__
-
- def __getitem__(self, key):
- if key not in self._valid_param:
- raise IndexError(key)
- return getattr(self, key)
-
- def __iter__(self):
- for p in self._valid_param:
- yield p
-
- def items(self):
- for p in self:
- yield p, self[p]
-
- @property
- def lastpath(self):
- '''
- Return the last file complete path
- '''
- if self._lastpath is None:
- return os.path.join(self.path, self.lastname)
- return self._lastpath
-
- @lastpath.setter
- def lastpath(self, value):
- '''
- Set last path
- '''
- self._lastpath = value
-
- @property
- def dbpath(self):
- '''
- Return the db complete path
- '''
- if self._dbpath is None:
- return os.path.join(self.path, self.dbname)
- return self._dbpath
-
- @dbpath.setter
- def dbpath(self, value):
- '''
- Set db path
- '''
- # dbpath must be local, sqlite3 requirement
- if not istools.isfile(value):
- raise ValueError("Database path must be local")
- self._dbpath = os.path.abspath(value)
-
- @property
- def uid(self):
- '''
- Return owner of repository
- '''
- return self._uid
-
- @uid.setter
- def uid(self, value):
- '''
- Define user name owning repository
- '''
- if not value.isdigit():
- self._uid = pwd.getpwnam(value).pw_uid
- else:
- self._uid = int(value)
-
- @property
- def gid(self):
- '''
- Return group of the repository
- '''
- return self._gid
-
- @gid.setter
- def gid(self, value):
- '''
- Define group owning repository
- '''
- if not value.isdigit():
- self._gid = grp.getgrnam(value).gr_gid
- else:
- self._gid = int(value)
-
- @property
- def fmod(self):
- '''
- Return new file mode
- '''
- return self._fmod
-
- @fmod.setter
- def fmod(self, value):
- '''
- Define new file mode
- '''
- if value.isdigit():
- self._fmod = int(value, 8)
- else:
- raise ValueError("File mode must be an integer")
-
- @property
- def dmod(self):
- '''
- Return new directory mode
- '''
- return self._dmod
-
- @dmod.setter
- def dmod(self, value):
- '''
- Define new directory mode
- '''
- if value.isdigit():
- self._dmod = int(value, 8)
- else:
- raise ValueError("Directory mode must be an integer")
-
- @property
- def offline(self):
- '''
- Get the offline state of a repository
- '''
- return self._offline
-
- @offline.setter
- def offline(self, value):
- if type(value) in (str, unicode):
- value = value.lower() not in ("false", "no", "0")
- elif type(value) is not bool:
- value = bool(value)
- self._offline = value
-
- def update(self, *args, **kwargs):
- '''
- Update attribute with checking value
- All attribute must already exists
- '''
- # autoset parameter in cmdline
- for k in kwargs:
- if hasattr(self, k):
- try:
- setattr(self, k, kwargs[k])
- except Exception as e:
- warn(u"Unable to set config parameter %s in repository %s: %s" %
- (k, self.name, e))
- else:
- debug(u"No such repository parameter: %s" % k)
diff --git a/installsystems/repository/__init__.py b/installsystems/repository/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea33e7fbe550f201557074f4a22fd2b6acaa5147
--- /dev/null
+++ b/installsystems/repository/__init__.py
@@ -0,0 +1,27 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+InstallSystems repository package
+'''
+
+from installsystems.repository.manager import RepositoryManager
+from installsystems.repository.config import RepositoryConfig
+from installsystems.repository.repository import Repository
+from installsystems.repository.repository1 import Repository1
+from installsystems.repository.repository2 import Repository2
diff --git a/installsystems/repository/config.py b/installsystems/repository/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..c71dedf023911e73b58733fe54aa0a5662fa930f
--- /dev/null
+++ b/installsystems/repository/config.py
@@ -0,0 +1,215 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Repository configuration module
+'''
+
+from grp import getgrnam
+from installsystems.printer import warn, debug
+from installsystems.repository.repository import Repository
+from installsystems.tools import isfile, chrights, mkdir, compare_versions
+from os import getuid, getgid, umask, linesep
+from os.path import join, abspath
+from pwd import getpwnam
+
+class RepositoryConfig(object):
+ '''
+ Repository configuration container
+ '''
+
+ def __init__(self, name, **kwargs):
+ # set default value for arguments
+ self._valid_param = ("name", "path", "dbpath", "lastpath",
+ "uid", "gid", "fmod", "dmod", "offline")
+ self.name = Repository.check_name(name)
+ self.path = ""
+ self._offline = False
+ self._dbpath = None
+ self.dbname = "db"
+ self._lastpath = None
+ self.lastname = "last"
+ self._uid = getuid()
+ self._gid = getgid()
+ oldmask = umask(0)
+ umask(oldmask)
+ self._fmod = 0666 & ~oldmask
+ self._dmod = 0777 & ~oldmask
+ self.update(**kwargs)
+
+ def __str__(self):
+ l = []
+ for k, v in self.items():
+ l.append(u"%s: %s" % (k, v))
+ return linesep.join(l)
+
+ def __eq__(self, other):
+ return vars(self) == vars(other)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __contains__(self, key):
+ return key in self.__dict__
+
+ def __getitem__(self, key):
+ if key not in self._valid_param:
+ raise IndexError(key)
+ return getattr(self, key)
+
+ def __iter__(self):
+ for p in self._valid_param:
+ yield p
+
+ def items(self):
+ for p in self:
+ yield p, self[p]
+
+ @property
+ def lastpath(self):
+ '''
+ Return the last file complete path
+ '''
+ if self._lastpath is None:
+ return join(self.path, self.lastname)
+ return self._lastpath
+
+ @lastpath.setter
+ def lastpath(self, value):
+ '''
+ Set last path
+ '''
+ self._lastpath = value
+
+ @property
+ def dbpath(self):
+ '''
+ Return the db complete path
+ '''
+ if self._dbpath is None:
+ return join(self.path, self.dbname)
+ return self._dbpath
+
+ @dbpath.setter
+ def dbpath(self, value):
+ '''
+ Set db path
+ '''
+ # dbpath must be local, sqlite3 requirement
+ if not isfile(value):
+ raise ValueError("Database path must be local")
+ self._dbpath = abspath(value)
+
+ @property
+ def uid(self):
+ '''
+ Return owner of repository
+ '''
+ return self._uid
+
+ @uid.setter
+ def uid(self, value):
+ '''
+ Define user name owning repository
+ '''
+ if not value.isdigit():
+ self._uid = getpwnam(value).pw_uid
+ else:
+ self._uid = int(value)
+
+ @property
+ def gid(self):
+ '''
+ Return group of the repository
+ '''
+ return self._gid
+
+ @gid.setter
+ def gid(self, value):
+ '''
+ Define group owning repository
+ '''
+ if not value.isdigit():
+ self._gid = getgrnam(value).gr_gid
+ else:
+ self._gid = int(value)
+
+ @property
+ def fmod(self):
+ '''
+ Return new file mode
+ '''
+ return self._fmod
+
+ @fmod.setter
+ def fmod(self, value):
+ '''
+ Define new file mode
+ '''
+ if value.isdigit():
+ self._fmod = int(value, 8)
+ else:
+ raise ValueError("File mode must be an integer")
+
+ @property
+ def dmod(self):
+ '''
+ Return new directory mode
+ '''
+ return self._dmod
+
+ @dmod.setter
+ def dmod(self, value):
+ '''
+ Define new directory mode
+ '''
+ if value.isdigit():
+ self._dmod = int(value, 8)
+ else:
+ raise ValueError("Directory mode must be an integer")
+
+ @property
+ def offline(self):
+ '''
+ Get the offline state of a repository
+ '''
+ return self._offline
+
+ @offline.setter
+ def offline(self, value):
+ if type(value) in (str, unicode):
+ value = value.lower() not in ("false", "no", "0")
+ elif type(value) is not bool:
+ value = bool(value)
+ self._offline = value
+
+ def update(self, *args, **kwargs):
+ '''
+ Update attribute with checking value
+ All attribute must already exists
+ '''
+ # autoset parameter in cmdline
+ for k in kwargs:
+ if hasattr(self, k):
+ try:
+ setattr(self, k, kwargs[k])
+ except Exception as e:
+ warn(u"Unable to set config parameter %s in repository %s: %s" %
+ (k, self.name, e))
+ else:
+ debug(u"No such repository parameter: %s" % k)
diff --git a/installsystems/database.py b/installsystems/repository/database.py
similarity index 77%
rename from installsystems/database.py
rename to installsystems/repository/database.py
index eec9118fa16234770bada8580134b951f1af8639..ba9934ecbf51d8fb520a4696fef5168845d36ba2 100644
--- a/installsystems/database.py
+++ b/installsystems/repository/database.py
@@ -25,8 +25,6 @@ import os
import sqlite3
import uuid
import installsystems.tools as istools
-import installsystems.template as istemplate
-from installsystems.tarball import Tarball
from installsystems.exception import *
from installsystems.printer import *
@@ -50,7 +48,7 @@ class Database(object):
try:
conn = sqlite3.connect(path, isolation_level=None)
conn.execute("PRAGMA foreign_keys = ON")
- conn.executescript(istemplate.createdb)
+ conn.executescript(TEMPLATE_EMPTY_DB)
conn.execute("INSERT INTO repository values (?,?,?)",
(str(uuid.uuid4()), Database.version, "",))
conn.commit()
@@ -104,3 +102,28 @@ class Database(object):
Ask question to db
'''
return self.conn.execute(sql, args)
+
+
+TEMPLATE_EMPTY_DB = u"""
+CREATE TABLE image (md5 TEXT NOT NULL PRIMARY KEY,
+ name TEXT NOT NULL,
+ version TEXT NOT NULL,
+ date INTEGER NOT NULL,
+ author TEXT,
+ description TEXT,
+ size INTEGER NOT NULL,
+ is_min_version INTEGER NOT NULL,
+ format INTEGER NOT NULL,
+ UNIQUE(name, version));
+
+CREATE TABLE payload (md5 TEXT NOT NULL,
+ image_md5 TEXT NOT NULL REFERENCES image(md5),
+ name TEXT NOT NULL,
+ isdir INTEGER NOT NULL,
+ size INTEGER NOT NULL,
+ PRIMARY KEY(md5, image_md5));
+
+CREATE TABLE repository (uuid TEXT NOT NULL PRIMARY KEY,
+ version FLOAT NOT NULL,
+ motd TEXT NOT NULL);
+"""
diff --git a/installsystems/repository/factory.py b/installsystems/repository/factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d920b45f7dfbb1be40645a814cf7c85f0acaaa9
--- /dev/null
+++ b/installsystems/repository/factory.py
@@ -0,0 +1,58 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Repository Factory
+'''
+
+from installsystems.printer import debug, warn
+from installsystems.exception import ISWarning, ISError
+from installsystems.repository.database import Database
+from installsystems.repository.repository1 import Repository1
+from installsystems.repository.repository2 import Repository2
+
+class RepositoryFactory(object):
+ '''
+ Repository factory
+ '''
+
+ def __init__(self):
+
+ self.repo_class = {
+ 1: Repository1,
+ 2: Repository2,
+ }
+
+ def create(self, config):
+ db = None
+ if not config.offline:
+ try:
+ db = Database(config.dbpath)
+ except ISWarning as e:
+ warn('[%s]: %s' % (config.name, e))
+ config.offline = True
+ except ISError:
+ debug(u"Unable to load database %s" % config.dbpath)
+ config.offline = True
+ if config.offline:
+ debug(u"Repository %s is offline" % config.name)
+ if db is None:
+ return Repository2(config)
+ else:
+ return self.repo_class[int(db.version)](config, db)
+
diff --git a/installsystems/repository/manager.py b/installsystems/repository/manager.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc2fcb92028e97262657f4fd62109ba5db450eaf
--- /dev/null
+++ b/installsystems/repository/manager.py
@@ -0,0 +1,472 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Repository management module
+'''
+
+from installsystems.exception import ISError, ISWarning
+from installsystems.printer import out, debug, arrow
+from installsystems.repository.factory import RepositoryFactory
+from installsystems.repository.repository import Repository
+from installsystems.tools import isfile, chrights, PipeFile, compare_versions
+from installsystems.tools import time_rfc2822, human_size, strcspn
+from json import dumps
+from os import mkdir, access, W_OK, X_OK, unlink, stat, linesep, close
+from os.path import abspath, exists, lexists, join
+from string import hexdigits
+from tempfile import mkstemp
+
+# use module prefix because a function is named filter
+import fnmatch
+
+class RepositoryManager(object):
+ '''
+ Manage multiple repositories
+
+ This call implement a cache and a manager for multiple repositories
+ Default repository timeout is 3
+ '''
+
+ def __init__(self, cache_path=None, timeout=None, filter=None, search=None):
+ self.repos = []
+ self.tempfiles = []
+ self.filter = [] if filter is None else filter
+ self.search = [] if search is None else search
+ self.timeout = timeout or 3
+ self.factory = RepositoryFactory()
+ debug(u"Repository timeout setted to %ds" % self.timeout)
+ if cache_path is None:
+ self.cache_path = None
+ debug("No repository cache")
+ else:
+ if not isfile(cache_path):
+ raise NotImplementedError("Repository cache must be local")
+ self.cache_path = abspath(cache_path)
+ # must_path is a list of directory which must exists
+ # create directory if not exists
+ if not exists(self.cache_path):
+ mkdir(self.cache_path)
+ # ensure directories are avaiblable
+ if not access(self.cache_path, W_OK | X_OK):
+ raise ISError(u"%s is not writable or executable" % self.cache_path)
+ debug(u"Repository cache is in %s" % self.cache_path)
+
+ def __del__(self):
+ # delete temporary files (used by db)
+ for f in self.tempfiles:
+ try:
+ debug(u"Removing temporary db file %s" % f)
+ unlink(f)
+ except OSError:
+ pass
+
+ def __len__(self):
+ '''
+ Return the number of repository registered
+ '''
+ return len(self.repos)
+
+ def __getitem__(self, key):
+ '''
+ Return a repostiory by its position in list
+ '''
+ if isinstance(key, int):
+ return self.repos[key]
+ elif isinstance(key, basestring):
+ # match name
+ for repo in self.repos:
+ if repo.config.name == key:
+ return repo
+ # if not found, match uuid
+ # we need at least 4 chars to avoid ambiguous uuid matching
+ if len(key) >= 4:
+ for repo in [r for r in self.repos if not r.config.offline]:
+ if fnmatch.fnmatch(repo.uuid, "%s*" % key):
+ return repo
+ else:
+ raise ISWarning("Ambiguous argument: we need at least 4 chars "
+ "to match an uuid")
+ raise IndexError(u"No repository named: %s" % key)
+ else:
+ raise TypeError(u"Invalid type %s for %s" % (type(key), key))
+
+ def __contains__(self, key):
+ '''
+ Check if a key is a repository name
+ '''
+ for r in self.repos:
+ if r.config.name == key:
+ return True
+ return False
+
+ def register(self, config, temp=False, nosync=False, offline=False):
+ '''
+ Register a repository from its config
+ temp: repository is stored in a temporary location
+ nosync: register repository as online, but no sync is done before
+ offline: repository is marked offline
+ '''
+ # check filter on name
+ if len(self.filter) > 0:
+ if config.name not in self.filter:
+ debug(u"Filtering repository %s" % config.name)
+ return
+ # repository is offline
+ if config.offline or offline:
+ debug(u"Registering offline repository %s (%s)" % (config.path, config.name))
+ # we must force offline in cast of argument offline
+ config.offline = True
+ self.repos.append(self.factory.create(config))
+ # if path is local, no needs to create a cache
+ elif isfile(config.path):
+ debug(u"Registering direct repository %s (%s)" % (config.path, config.name))
+ self.repos.append(self.factory.create(config))
+ # path is remote, we need to create a cache
+ else:
+ debug(u"Registering cached repository %s (%s)" % (config.path, config.name))
+ self.repos.append(self._cachify(config, temp, nosync))
+
+ def _cachify(self, config, temp=False, nosync=False):
+ '''
+ Return a config of a cached repository from an orignal config file
+ :param config: repository configuration
+ :param temp: repository db should be stored in a temporary location
+ :param nosync: if a cache exists, don't try to update it
+ '''
+ # if cache is disable => temp =True
+ if self.cache_path is None:
+ temp = True
+ try:
+ original_dbpath = config.dbpath
+ if temp and nosync:
+ raise ISError("sync is disabled")
+ elif temp:
+ # this is a temporary cached repository
+ tempfd, config.dbpath = mkstemp()
+ close(tempfd)
+ self.tempfiles.append(config.dbpath)
+ else:
+ config.dbpath = join(self.cache_path, config.name)
+ if not nosync:
+ # Open remote database
+ rdb = PipeFile(original_dbpath, timeout=self.timeout)
+ # get remote last modification
+ if rdb.mtime is None:
+ # We doesn't have modification time, we use the last file
+ try:
+ rlast = int(PipeFile(config.lastpath, mode='r',
+ timeout=self.timeout).read().strip())
+ except ISError:
+ rlast = -1
+ else:
+ rlast = rdb.mtime
+ # get local last value
+ if exists(config.dbpath):
+ llast = int(stat(config.dbpath).st_mtime)
+ else:
+ llast = -2
+ # if repo is out of date, download it
+ if rlast != llast:
+ try:
+ arrow(u"Downloading %s" % original_dbpath)
+ rdb.progressbar = True
+ ldb = open(config.dbpath, "wb")
+ rdb.consume(ldb)
+ ldb.close()
+ rdb.close()
+ chrights(config.dbpath,
+ uid=config.uid,
+ gid=config.gid,
+ mode=config.fmod,
+ mtime=rlast)
+ except:
+ if exists(config.dbpath):
+ unlink(config.dbpath)
+ raise
+ except ISError as e :
+ # if something append bad during caching, we mark repo as offline
+ debug(u"Unable to cache repository %s: %s" % (config.name, e))
+ config.offline = True
+ return self.factory.create(config)
+
+ @property
+ def names(self):
+ '''
+ Return list of repository names
+ '''
+ return [ r.config.name for r in self.repos ]
+
+ @property
+ def uuids(self):
+ '''
+ Return a dict of repository UUID and associated names
+ '''
+ d = {}
+ for r in self.repos:
+ uuid = r.uuid
+ if uuid is None:
+ continue
+ if uuid in d:
+ d[uuid].append(r)
+ else:
+ d[uuid] = [r]
+ return d
+
+ @property
+ def onlines(self):
+ '''
+ Return list of online repository names
+ '''
+ return [ r.config.name for r in self.repos if not r.config.offline ]
+
+ @property
+ def offlines(self):
+ '''
+ Return list of offlines repository names
+ '''
+ return [ r.config.name for r in self.repos if r.config.offline ]
+
+ def select_images(self, patterns):
+ '''
+ Return a list of available images
+ '''
+ if len(self.onlines) == 0:
+ raise ISError(u"No online repository")
+ ans = {}
+ for pattern in patterns:
+ path, image, version = Repository.split_path(pattern)
+ if image is None:
+ if path is None or version is None:
+ image = "*"
+ else:
+ # empty pattern
+ continue
+ # building image list
+ images = {}
+ for reponame in self.onlines:
+ for img in self[reponame].images():
+ imgname = u"%s/%s:%s" % (reponame, img["name"], img["version"])
+ images[imgname] = img
+ # No path means only in searchable repositories
+ if path is None:
+ for k, v in images.items():
+ # match name
+ if v["repo"] not in self.search and self[v["repo"]].uuid not in self.search:
+ del images[k]
+ path = "*"
+ # No version means last version
+ if version is None:
+ version = "*"
+ for repo in set((images[i]["repo"] for i in images)):
+ for img in set((images[i]["name"] for i in images if images[i]["repo"] == repo)):
+ versions = [ images[i]['version']
+ for i in images if images[i]["repo"] == repo and images[i]["name"] == img ]
+ f = lambda x,y: x if compare_versions(x, y) > 0 else y
+ last = reduce(f, versions)
+ versions.remove(last)
+ for rmv in versions:
+ del images[u"%s/%s:%s" % (repo, img, rmv)]
+ # if 'path*' do not match a repo name, it may be an uuid, so add
+ # globbing for smart uuid matching
+ if not fnmatch.filter(self.onlines, "%s*" % path):
+ path = "%s*" % path
+ # filter with pattern on path
+ filter_pattern = u"%s/%s:%s" % (path, image, version)
+ for k, img in images.items():
+ if not (fnmatch.fnmatch(k, filter_pattern) or
+ fnmatch.fnmatch("%s/%s" % (self[img["repo"]].uuid, k.split("/")[1]), filter_pattern)):
+ del images[k]
+ ans.update(images)
+ return ans
+
+ def search_image(self, pattern):
+ '''
+ Search pattern accross all registered repositories
+ '''
+ for repo in self.onlines:
+ arrow(self[repo].config.name)
+ self[repo].search(pattern)
+
+ def show_images(self, patterns, o_json=False, o_long=False, o_md5=False,
+ o_date=False, o_author=False, o_size=False, o_url=False,
+ o_description=False, o_format=False, o_min_version=False):
+ '''
+ Show images inside manager
+ '''
+ # get images list
+ images = self.select_images(patterns)
+ # display result
+ if o_json:
+ s = dumps(images)
+ else:
+ l = []
+ for imgp in sorted(images.keys()):
+ img = images[imgp]
+ l.append(u"%s#R#/#l##b#%s#R#:#p#%s#R#" % (
+ img["repo"], img["name"], img["version"]))
+ if o_md5 or o_long:
+ l[-1] = l[-1] + u" (#y#%s#R#)" % img["md5"]
+ if o_date or o_long:
+ l.append(u" #l#date:#R# %s" % time_rfc2822(img["date"]))
+ if o_author or o_long:
+ l.append(u" #l#author:#R# %s" % img["author"])
+ if o_size or o_long:
+ l.append(u" #l#size:#R# %s" % human_size(img["size"]))
+ if o_url or o_long:
+ l.append(u" #l#url:#R# %s" % img["url"])
+ if o_description or o_long:
+ l.append(u" #l#description:#R# %s" % img["description"])
+ if o_format or o_long:
+ l.append(u" #l#format:#R# %s" % img["format"])
+ if o_min_version or o_long:
+ l.append(u" #l#is min version:#R# %s" % img["is_min_version"])
+ s = linesep.join(l)
+ if len(s) > 0:
+ out(s)
+
+ def select_payloads(self, patterns):
+ '''
+ Return a list of available payloads
+ '''
+ if len(self.onlines) == 0:
+ raise ISError(u"No online repository")
+ # building payload list
+ paylist = {}
+ for reponame in self.onlines:
+ for md5, info in self[reponame].payloads().items():
+ if md5 not in paylist:
+ paylist[md5] = info
+ else:
+ paylist[md5]["images"].update(info["images"])
+ # check if pattern is md5 startpath
+ ans = {}
+ for pattern in patterns:
+ for md5 in paylist.keys():
+ if md5.startswith(pattern):
+ ans[md5] = paylist[md5]
+ return ans
+
+ def show_payloads(self, patterns, o_images=False, o_json=False):
+ '''
+ Show payloads inside manager
+ '''
+ # get payload list
+ payloads = self.select_payloads(patterns)
+ # display result
+ if o_json:
+ s = dumps(payloads)
+ else:
+ l = []
+ for payname in sorted(payloads.keys()):
+ pay = payloads[payname]
+ l.append(u"#l##y#%s#R#" % payname)
+ l.append(u" size: %s" % human_size(pay["size"]))
+ l.append(u" directory: %s" % bool(pay["isdir"]))
+ l.append(u" image count: %d" % len(pay["images"]))
+ l.append(u" names: %s" % ", ".join(set((v["payname"] for v in pay["images"].values()))))
+ if o_images:
+ l.append(u" images:")
+ for path, obj in pay["images"].items():
+ l.append(u" %s#R#/#l##b#%s#R#:#p#%s#R# (%s)" % (
+ obj["repo"], obj["imgname"], obj["imgver"], obj["payname"]))
+ s = linesep.join(l)
+ if len(s) > 0:
+ out(s)
+
+ def select_repositories(self, patterns):
+ '''
+ Return a list of repository
+ '''
+ ans = set()
+ uuidb = self.uuids
+ for pattern in patterns:
+ ans |= set(fnmatch.filter(self.names, pattern))
+ if strcspn(pattern, hexdigits + "-") == 0:
+ for uuid in filter(lambda x: x.startswith(pattern), uuidb.keys()):
+ ans |= set((r.config.name for r in uuidb[uuid]))
+ return sorted(ans)
+
+ def purge_repositories(self, patterns):
+ '''
+ Remove local cached repository files
+ '''
+ for reponame in self.select_repositories(patterns):
+ arrow(u"Purging cache of repository %s" % reponame)
+ db = join(self.cache_path, reponame)
+ if lexists(db):
+ try:
+ unlink(db)
+ arrow("done", 1)
+ except:
+ arrow("failed", 1)
+ else:
+ arrow("nothing to do", 1)
+
+ def show_repositories(self, patterns, local=None, online=None, o_url=False,
+ o_state=False, o_uuid=False, o_json=False, o_version=False):
+ '''
+ Show repository inside manager
+ if :param online: is true, list only online repositories
+ if :param online: is false, list only offline repostiories
+ if :param online: is None, list both online and offline repostiories.
+ if :param local: is true, list only local repositories
+ if :param local: is false, list only remote repostiories
+ if :param local: is None, list both local and remote repostiories.
+ '''
+ # build repositories dict
+ repos = {}
+ for reponame in self.select_repositories(patterns):
+ repo = self[reponame]
+ if repo.config.offline and online is True:
+ continue
+ if not repo.config.offline and online is False:
+ continue
+ if repo.local and local is False:
+ continue
+ if not repo.local and local is True:
+ continue
+ repos[reponame] = dict(repo.config.items())
+ repos[reponame]["local"] = repo.local
+ if not repo.config.offline:
+ repos[reponame]["uuid"] = repo.uuid
+ repos[reponame]["version"] = repo.version
+ # display result
+ if o_json:
+ s = dumps(repos)
+ else:
+ l = []
+ for name, repo in repos.items():
+ ln = ""
+ so = "#l##r#Off#R# " if repo["offline"] else "#l##g#On#R# "
+ sl = "#l##y#Local#R# " if repo["local"] else "#l##c#Remote#R# "
+ rc = "#l##r#" if repo["offline"] else "#l##g#"
+ if o_state:
+ ln += u"%s%s " % (so, sl)
+ rc = "#l##b#"
+ ln += u"%s%s#R#"% (rc, name)
+ if o_url:
+ ln += u" (%s)" % repo["path"]
+ if not repo["offline"]:
+ if o_version:
+ ln += u" (#p#v%s#R#)" % repo["version"]
+ if o_uuid and repo["uuid"] is not None:
+ ln += u" [%s]" % repo["uuid"]
+ l.append(ln)
+ s = linesep.join(l)
+ out(s)
diff --git a/installsystems/repository/repository.py b/installsystems/repository/repository.py
new file mode 100644
index 0000000000000000000000000000000000000000..8167e1d3abda1549ae2bf65dfa296523f8b4848c
--- /dev/null
+++ b/installsystems/repository/repository.py
@@ -0,0 +1,521 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Abstract repository module
+'''
+
+from cStringIO import StringIO
+from installsystems.exception import ISError
+from installsystems.image.package import PackageImage
+from installsystems.printer import arrow, arrowlevel, out, warn, confirm
+from installsystems.repository.database import Database
+from installsystems.tools import isfile, chrights, mkdir, compare_versions, PipeFile
+from os import unlink, listdir, linesep, rmdir
+from os.path import join
+from os.path import join, basename, exists, isdir
+from re import match, split
+from time import time
+
+class Repository(object):
+ '''
+ Repository class
+ '''
+
+ @staticmethod
+ def is_name(name):
+ '''Check if name is a valid repository name'''
+ return match("^[-_\w]+$", name) is not None
+
+ @staticmethod
+ def check_name(name):
+ '''
+ Raise exception is repository name is invalid
+ '''
+ if not Repository.is_name(name):
+ raise ISError(u"Invalid repository name %s" % name)
+ return name
+
+ @staticmethod
+ def split_path(path):
+ '''
+ Split an image path (repo/image:version)
+ in a tuple (repo, image, version)
+ '''
+ x = match(u"^(?:([^/:]+)/)?([^/:]+)?(?::v?([^/:]+)?)?$", path)
+ if x is None:
+ raise ISError(u"invalid image path: %s" % path)
+ return x.group(1, 2, 3)
+
+ @staticmethod
+ def split_list(repolist, filter=None):
+ '''
+ Return a list of repository from a comma/spaces separated names of repo
+ '''
+ if filter is None:
+ filter = Repository.is_name
+ return [r for r in split("[ ,\n\t\v]+", repolist) if filter(r)]
+
+ @staticmethod
+ def diff(repo1, repo2):
+ '''
+ Compute a diff between two repositories
+ '''
+ arrow(u"Diff between repositories #y#%s#R# and #g#%s#R#" % (repo1.config.name,
+ repo2.config.name))
+ # Get info from databases
+ i_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask(
+ "SELECT md5, name, version FROM image").fetchall())
+ i_set1 = set(i_dict1.keys())
+ i_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask(
+ "SELECT md5, name, version FROM image").fetchall())
+ i_set2 = set(i_dict2.keys())
+ p_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask(
+ "SELECT md5, name FROM payload").fetchall())
+ p_set1 = set(p_dict1.keys())
+ p_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask(
+ "SELECT md5, name FROM payload").fetchall())
+ p_set2 = set(p_dict2.keys())
+ # computing diff
+ i_only1 = i_set1 - i_set2
+ i_only2 = i_set2 - i_set1
+ p_only1 = p_set1 - p_set2
+ p_only2 = p_set2 - p_set1
+ # printing functions
+ pimg = lambda r,c,m,d,: out("#%s#Image only in repository %s: %s v%s (%s)#R#" %
+ (c, r.config.name, d[m][0], d[m][1], m))
+ ppay = lambda r,c,m,d,: out("#%s#Payload only in repository %s: %s (%s)#R#" %
+ (c, r.config.name, d[m][0], m))
+ # printing image diff
+ for md5 in i_only1: pimg(repo1, "y", md5, i_dict1)
+ for md5 in p_only1: ppay(repo1, "y", md5, p_dict1)
+ for md5 in i_only2: pimg(repo2, "g", md5, i_dict2)
+ for md5 in p_only2: ppay(repo2, "g", md5, p_dict2)
+
+ def __init__(self, config, db=None):
+ self.config = config
+ self.local = isfile(self.config.path)
+ self.db = db
+
+ def __getattribute__(self, name):
+ '''
+ Raise an error if repository is unavailable
+ Unavailable can be caused because db is not accessible or
+ because repository is not initialized
+ '''
+ config = object.__getattribute__(self, "config")
+ # config, init, local and upgrade are always accessible
+ if name in ("init", "config", "local", "upgrade"):
+ return object.__getattribute__(self, name)
+ # if no db (not init or not accessible) raise error
+ if config.offline:
+ raise ISError(u"Repository %s is offline" % config.name)
+ return object.__getattribute__(self, name)
+
+ @property
+ def version(self):
+ '''
+ Return repository version
+ '''
+ raise NotImplementedError()
+
+ @property
+ def uuid(self):
+ '''
+ Return repository UUID
+ '''
+ return self.db.ask("SELECT uuid from repository").fetchone()[0]
+
+ def init(self):
+ '''
+ Initialize an empty base repository
+ '''
+ config = self.config
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository creation must be local")
+ # create base directories
+ arrow("Creating base directories")
+ arrowlevel(1)
+ # creating local directory
+ try:
+ if exists(config.path):
+ arrow(u"%s already exists" % config.path)
+ else:
+ mkdir(config.path, config.uid, config.gid, config.dmod)
+ arrow(u"%s directory created" % config.path)
+ except Exception as e:
+ raise ISError(u"Unable to create directory %s" % config.path, e)
+ arrowlevel(-1)
+ # create database
+ d = Database.create(config.dbpath)
+ chrights(config.dbpath, uid=config.uid,
+ gid=config.gid, mode=config.fmod)
+ # load database
+ self.db = Database(config.dbpath)
+ # mark repo as not offline
+ self.config.offline = False
+ # create/update last file
+ self.update_last()
+
+ def update_last(self):
+ '''
+ Update last file to current time
+ '''
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository must be local")
+ try:
+ arrow("Updating last file")
+ last_path = join(self.config.path, self.config.lastname)
+ open(last_path, "w").write("%s\n" % int(time()))
+ chrights(last_path, self.config.uid, self.config.gid, self.config.fmod)
+ except Exception as e:
+ raise ISError(u"Update last file failed", e)
+
+ def last(self, name):
+ '''
+ Return last version of name in repo or None if not found
+ '''
+ r = self.db.ask("SELECT version FROM image WHERE name = ?", (name,)).fetchall()
+ # no row => no way
+ if r is None:
+ return None
+ f = lambda x,y: x[0] if compare_versions(x[0], y[0]) > 0 else y[0]
+ # return last
+ return reduce(f, r)
+
+ def _add(self, image):
+ '''
+ Add description to db
+ '''
+ arrow("Adding metadata")
+ self.db.begin()
+ # insert image information
+ arrow("Image", 1)
+ self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?,?,?)",
+ (image.md5,
+ image.name,
+ image.version,
+ image.date,
+ image.author,
+ image.description,
+ image.size,
+ image.is_min_version,
+ image.format,
+ ))
+ # insert data information
+ arrow("Payloads", 1)
+ for name, obj in image.payload.items():
+ self.db.ask("INSERT INTO payload values (?,?,?,?,?)",
+ (obj.md5,
+ image.md5,
+ name,
+ obj.isdir,
+ obj.size,
+ ))
+ # on commit
+ self.db.commit()
+ # update last file
+ self.update_last()
+
+ def add(self, image, delete=False):
+ '''
+ Add a packaged image to repository
+ if delete is true, remove original files
+ '''
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository addition must be local")
+ # cannot add already existant image
+ if self.has(image.name, image.version):
+ raise ISError(u"Image already in database, delete first!")
+ # adding file to repository
+ arrow("Copying images and payload")
+ for obj in [ image ] + image.payload.values():
+ dest = join(self.config.path, obj.md5)
+ basesrc = basename(obj.path)
+ if exists(dest):
+ arrow(u"Skipping %s: already exists" % basesrc, 1)
+ else:
+ arrow(u"Adding %s (%s)" % (basesrc, obj.md5), 1)
+ dfo = open(dest, "wb")
+ sfo = PipeFile(obj.path, "r", progressbar=True)
+ sfo.consume(dfo)
+ sfo.close()
+ dfo.close()
+ chrights(dest, self.config.uid,
+ self.config.gid, self.config.fmod)
+ # copy is done. create a image inside repo
+ r_image = PackageImage(join(self.config.path, image.md5),
+ md5name=True)
+ # checking must be done with original md5
+ r_image.md5 = image.md5
+ # checking image and payload after copy
+ r_image.check("Check image and payload")
+ self._add(image)
+ # removing orginal files
+ if delete:
+ arrow("Removing original files")
+ for obj in [ image ] + image.payload.values():
+ arrow(basename(obj.path), 1)
+ unlink(obj.path)
+
+ def getallmd5(self):
+ '''
+ Get list of all md5 in DB
+ '''
+ res = self.db.ask("SELECT md5 FROM image UNION SELECT md5 FROM payload").fetchall()
+ return [ md5[0] for md5 in res ]
+
+ def check(self):
+ '''
+ Check repository for unreferenced and missing files
+ '''
+ # Check if the repo is local
+ if not self.local:
+ raise ISError(u"Repository must be local")
+ local_files = set(listdir(self.config.path))
+ local_files.remove(self.config.dbname)
+ local_files.remove(self.config.lastname)
+ db_files = set(self.getallmd5())
+ # check missing files
+ arrow("Checking missing files")
+ missing_files = db_files - local_files
+ if len(missing_files) > 0:
+ out(linesep.join(missing_files))
+ # check unreferenced files
+ arrow("Checking unreferenced files")
+ unref_files = local_files - db_files
+ if len(unref_files) > 0:
+ out(linesep.join(unref_files))
+ # check corruption of local files
+ arrow("Checking corrupted files")
+ for f in local_files:
+ fo = PipeFile(join(self.config.path, f))
+ fo.consume()
+ fo.close()
+ if fo.md5 != f:
+ out(f)
+
+ def clean(self, force=False):
+ '''
+ Clean the repository's content
+ '''
+ # Check if the repo is local
+ if not self.local:
+ raise ISError(u"Repository must be local")
+ allmd5 = set(self.getallmd5())
+ repofiles = set(listdir(self.config.path)) - set([self.config.dbname, self.config.lastname])
+ dirtyfiles = repofiles - allmd5
+ if len(dirtyfiles) > 0:
+ # print dirty files
+ arrow("Dirty files:")
+ for f in dirtyfiles:
+ arrow(f, 1)
+ # ask confirmation
+ if not force and not confirm("Remove dirty files? (yes) "):
+ raise ISError(u"Aborted!")
+ # start cleaning
+ arrow("Cleaning")
+ for f in dirtyfiles:
+ p = join(self.config.path, f)
+ arrow(u"Removing %s" % p, 1)
+ try:
+ if isdir(p):
+ rmdir(p)
+ else:
+ unlink(p)
+ except:
+ warn(u"Removing %s failed" % p)
+ else:
+ arrow("Nothing to clean")
+
+ def delete(self, name, version, payloads=True):
+ '''
+ Delete an image from repository
+ '''
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository deletion must be local")
+ # get md5 of files related to images (exception is raised if not exists
+ md5s = self.getmd5(name, version)
+ # cleaning db (must be done before cleaning)
+ arrow("Cleaning database")
+ arrow("Remove payloads from database", 1)
+ self.db.begin()
+ for md5 in md5s[1:]:
+ self.db.ask("DELETE FROM payload WHERE md5 = ? AND image_md5 = ?",
+ (md5, md5s[0])).fetchone()
+ arrow("Remove image from database", 1)
+ self.db.ask("DELETE FROM image WHERE md5 = ?",
+ (md5s[0],)).fetchone()
+ self.db.commit()
+ # Removing files
+ arrow("Removing files from pool")
+ # if asked don't remove payloads
+ if not payloads:
+ md5s = [ md5s[0] ]
+ arrowlevel(1)
+ for md5 in md5s:
+ self._remove_file(md5)
+ arrowlevel(-1)
+ # update last file
+ self.update_last()
+
+ def images(self):
+ '''
+ Return a dict of information on images
+ '''
+ db_images = self.db.ask("SELECT md5, name, version, date, author, \
+ description, size, is_min_version, format \
+ FROM image ORDER BY name, version").fetchall()
+
+ images = []
+ field = ("md5", "name", "version", "date", "author", "description",
+ "size", "is_min_version", "format")
+ for info in db_images:
+ d = dict(zip(field, info))
+ d["repo"] = self.config.name
+ d["url"] = join(self.config.path, d["md5"])
+ images.append(d)
+ return images
+
+ def payloads(self):
+ '''
+ Return a dict of information on payloads
+ '''
+ db_payloads = self.db.ask("SELECT payload.md5,payload.size,payload.isdir,image.name,image.version,payload.name FROM payload inner join image on payload.image_md5 = image.md5").fetchall()
+ res = {}
+ for payload in db_payloads:
+ md5 = payload[0]
+ # create entry if not exists
+ if md5 not in res:
+ res[md5] = {"size": payload[1], "isdir": payload[2], "images": {}}
+ # add image to list
+ imgpath = u"%s/%s:%s" % (self.config.name, payload[3], payload[4])
+ res[md5]["images"][imgpath] = {"repo": self.config.name,
+ "imgname": payload[3],
+ "imgver": payload[4],
+ "payname": payload[5]}
+ return res
+
+ def search(self, pattern):
+ '''
+ Search pattern in a repository
+ '''
+ images = self.db.ask("SELECT name, version, author, description\
+ FROM image\
+ WHERE name LIKE ? OR\
+ description LIKE ? OR\
+ author LIKE ?",
+ tuple( [u"%%%s%%" % pattern ] * 3)
+ ).fetchall()
+ for name, version, author, description in images:
+ arrow(u"%s v%s" % (name, version), 1)
+ out(u" #yellow#Author:#reset# %s" % author)
+ out(u" #yellow#Description:#reset# %s" % description)
+
+ def _remove_file(self, filename):
+ '''
+ Remove a filename from pool. Check if it's not needed by db before
+ '''
+ # check existance in table image
+ have = False
+ for table in ("image", "payload"):
+ have = have or self.db.ask(u"SELECT md5 FROM %s WHERE md5 = ? LIMIT 1" % table,
+ (filename,)).fetchone() is not None
+ # if no reference, delete!
+ if not have:
+ arrow(u"%s, deleted" % filename)
+ unlink(join(self.config.path, filename))
+ else:
+ arrow(u"%s, skipped" % filename)
+
+ def has(self, name, version):
+ '''
+ Return the existance of a package
+ '''
+ return self.db.ask("SELECT name,version FROM image WHERE name = ? AND version = ? LIMIT 1", (name,version)).fetchone() is not None
+
+ def get(self, name, version=None):
+ '''
+ Return an image from a name and version
+ '''
+ # is no version take the last
+ if version is None:
+ version = self.last(name)
+ if version is None:
+ raise ISError(u"Unable to find image %s in %s" % (name,
+ self.config.name))
+ # get file md5 from db
+ r = self.db.ask("select md5 from image where name = ? and version = ? limit 1",
+ (name, version)).fetchone()
+ if r is None:
+ raise ISError(u"Unable to find image %s v%s in %s" % (name, version,
+ self.config.name))
+ path = join(self.config.path, r[0])
+ # getting the file
+ arrow(u"Loading image %s v%s from repository %s" % (name,
+ version,
+ self.config.name))
+ memfile = StringIO()
+ try:
+ fo = PipeFile(path, "r")
+ fo.consume(memfile)
+ fo.close()
+ except Exception as e:
+ raise ISError(u"Loading image %s v%s failed" % (name, version), e)
+ memfile.seek(0)
+ pkg = PackageImage(path, fileobj=memfile, md5name=True)
+ if pkg.md5 != r[0]:
+ raise ISError(u"Image MD5 verification failure")
+ return pkg
+
+ def getmd5(self, name, version):
+ '''
+ Return an image md5 and payload md5 from name and version. Order matter !
+ Image md5 will still be the first
+ '''
+ # get file md5 from db
+ a = self.db.ask("SELECT md5 FROM image WHERE name = ? AND version = ? LIMIT 1",
+ (name,version)).fetchone()
+ if a is None:
+ raise ISError(u"No such image %s version %s" % (name, version))
+ b = self.db.ask("SELECT md5 FROM payload WHERE image_md5 = ?",
+ (a[0],)).fetchall()
+ return [ a[0] ] + [ x[0] for x in b ]
+
+ @property
+ def motd(self):
+ '''
+ Return repository message of the day
+ '''
+ motd = self.db.ask("SELECT motd FROM repository").fetchone()[0]
+ return None if len(motd) == 0 else motd
+
+ def setmotd(self, value=""):
+ '''
+ Set repository message of the day
+ '''
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository must be local")
+ arrow("Updating motd")
+ self.db.ask("UPDATE repository SET motd = ?", (value,))
+ self.update_last()
diff --git a/installsystems/repository/repository1.py b/installsystems/repository/repository1.py
new file mode 100644
index 0000000000000000000000000000000000000000..82b3b907397186473398f3f0e5ac494a9e9cb701
--- /dev/null
+++ b/installsystems/repository/repository1.py
@@ -0,0 +1,165 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Repository v1
+'''
+
+from installsystems.image.package import PackageImage
+from installsystems.printer import arrow, arrowlevel, warn, info
+from installsystems.repository.config import RepositoryConfig
+from installsystems.repository.database import Database
+from installsystems.repository.repository import Repository
+from os import listdir, unlink, symlink
+from os.path import join, exists
+from shutil import move, rmtree
+from tempfile import mkdtemp
+
+class Repository1(Repository):
+
+ def _add(self, image):
+ '''
+ Add description to db
+ '''
+ arrow("Adding metadata")
+ self.db.begin()
+ # insert image information
+ arrow("Image", 1)
+ self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?)",
+ (image.md5,
+ image.name,
+ image.version,
+ image.date,
+ image.author,
+ image.description,
+ image.size,
+ ))
+ # insert data information
+ arrow("Payloads", 1)
+ for name, obj in image.payload.items():
+ self.db.ask("INSERT INTO payload values (?,?,?,?,?)",
+ (obj.md5,
+ image.md5,
+ name,
+ obj.isdir,
+ obj.size,
+ ))
+ # on commit
+ self.db.commit()
+ # update last file
+ self.update_last()
+
+ def images(self):
+ '''
+ Return a dict of information on images
+ '''
+ db_images = self.db.ask("SELECT md5, name, version, date, author, \
+ description, size \
+ FROM image ORDER BY name, version").fetchall()
+
+ images = []
+ field = ("md5", "name", "version", "date", "author", "description",
+ "size")
+ for info in db_images:
+ d = dict(zip(field, info))
+ d["repo"] = self.config.name
+ d["url"] = join(self.config.path, d["md5"])
+ d["format"] = 1
+ d["is_min_version"] = 9
+ images.append(d)
+ return images
+
+ @property
+ def uuid(self):
+ '''
+ Repository v1 doesn't support UUID
+ '''
+ return None
+
+ @property
+ def motd(self):
+ '''
+ Return repository message of the day.
+ Repository v1 don't have message of day
+ '''
+ return None
+
+ def setmotd(self, value=""):
+ '''
+ Don't set repository message of the day. Not supported by v1.
+ '''
+ # check local repository
+ warn(u"Repository v1 doesn't support motd. Unable to set")
+
+ @property
+ def version(self):
+ '''
+ Return repository version
+ '''
+ return 1
+
+ def upgrade(self):
+ raise NotImplementedError()
+ # if self.version == Database.version:
+ # info("Repository already up-to-date (%s)" % self.version)
+ # return
+ # else:
+ # arrow("Start repository upgrade")
+ # arrowlevel(1)
+ # # Create dummy repository
+ # tmpdir = mkdtemp()
+ # try:
+ # repoconf = RepositoryConfig("tmp_migrate_repo", path=tmpdir)
+ # dstrepo = Repository(repoconf)
+ # # Symlink content from repository into dummy repo
+ # for file in listdir(self.config.path):
+ # symlink(join(self.config.path, file),
+ # join(tmpdir, file))
+ # unlink(repoconf.dbpath)
+ # unlink(repoconf.lastpath)
+ # old_verbosity = installsystems.verbosity
+ # arrow("Initialize new database")
+ # # Disable unwanted message during upgrade
+ # installsystems.verbosity = 0
+ # dstrepo.init()
+ # # Restore verbosity
+ # installsystems.verbosity = old_verbosity
+ # md5s = self.db.ask("SELECT md5 FROM image").fetchall()
+ # # Copy images to dummy repository (fill new database)
+ # arrow("Fill database with images")
+ # arrowlevel(1)
+ # installsystems.verbosity = 0
+ # for img in [PackageImage(join(self.config.path, md5[0]),
+ # md5name=True) for md5 in md5s]:
+ # installsystems.verbosity = old_verbosity
+ # arrow("%s v%s" % (img.name, img.version))
+ # installsystems.verbosity = 0
+ # dstrepo.add(img)
+ # installsystems.verbosity = old_verbosity
+ # arrowlevel(-1)
+ # arrow("Backup old database")
+ # move(self.config.dbpath,
+ # join("%s.bak" % self.config.dbpath))
+ # # Replace old db with the new from dummy repository
+ # move(repoconf.dbpath, self.config.dbpath)
+ # self.update_last()
+ # arrowlevel(-1)
+ # arrow("Repository upgrade complete")
+ # finally:
+ # # Remove dummy repository
+ # rmtree(tmpdir)
diff --git a/installsystems/repository/repository2.py b/installsystems/repository/repository2.py
new file mode 100644
index 0000000000000000000000000000000000000000..1661015de497492494aef486a5011b134677067a
--- /dev/null
+++ b/installsystems/repository/repository2.py
@@ -0,0 +1,419 @@
+# -*- python -*-
+# -*- coding: utf-8 -*-
+
+# This file is part of Installsystems.
+#
+# Installsystems is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Installsystems is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Installsystems. If not, see .
+
+'''
+Repository v2
+'''
+
+from cStringIO import StringIO
+from installsystems.exception import ISError
+from installsystems.image.package import PackageImage
+from installsystems.printer import arrow, arrowlevel, warn, info, out, confirm
+from installsystems.repository.database import Database
+from installsystems.repository.repository1 import Repository1
+from installsystems.tools import PipeFile, isfile, chrights, mkdir, compare_versions
+from os import unlink, listdir, linesep, rmdir, symlink
+from os.path import join, exists, basename, isdir
+from shutil import move, rmtree
+
+class Repository2(Repository1):
+ '''
+ Repository class
+ '''
+
+ @property
+ def version(self):
+ '''
+ Return repository version
+ '''
+ return 2
+
+ @property
+ def uuid(self):
+ '''
+ Return repository UUID
+ '''
+ return self.db.ask("SELECT uuid from repository").fetchone()[0]
+
+ def init(self):
+ '''
+ Initialize an empty base repository
+ '''
+ config = self.config
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository creation must be local")
+ # create base directories
+ arrow("Creating base directories")
+ arrowlevel(1)
+ # creating local directory
+ try:
+ if exists(config.path):
+ arrow(u"%s already exists" % config.path)
+ else:
+ mkdir(config.path, config.uid, config.gid, config.dmod)
+ arrow(u"%s directory created" % config.path)
+ except Exception as e:
+ raise ISError(u"Unable to create directory %s" % config.path, e)
+ arrowlevel(-1)
+ # create database
+ d = Database.create(config.dbpath)
+ chrights(config.dbpath, uid=config.uid,
+ gid=config.gid, mode=config.fmod)
+ # load database
+ self.db = Database(config.dbpath)
+ # mark repo as not offline
+ self.config.offline = False
+ # create/update last file
+ self.update_last()
+
+
+ def last(self, name):
+ '''
+ Return last version of name in repo or None if not found
+ '''
+ r = self.db.ask("SELECT version FROM image WHERE name = ?", (name,)).fetchall()
+ # no row => no way
+ if r is None:
+ return None
+ f = lambda x,y: x[0] if compare_versions(x[0], y[0]) > 0 else y[0]
+ # return last
+ return reduce(f, r)
+
+ def _add(self, image):
+ '''
+ Add description to db
+ '''
+ arrow("Adding metadata")
+ self.db.begin()
+ # insert image information
+ arrow("Image", 1)
+ self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?,?,?)",
+ (image.md5,
+ image.name,
+ image.version,
+ image.date,
+ image.author,
+ image.description,
+ image.size,
+ image.is_min_version,
+ image.format,
+ ))
+ # insert data information
+ arrow("Payloads", 1)
+ for name, obj in image.payload.items():
+ self.db.ask("INSERT INTO payload values (?,?,?,?,?)",
+ (obj.md5,
+ image.md5,
+ name,
+ obj.isdir,
+ obj.size,
+ ))
+ # on commit
+ self.db.commit()
+ # update last file
+ self.update_last()
+
+ def add(self, image, delete=False):
+ '''
+ Add a packaged image to repository
+ if delete is true, remove original files
+ '''
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository addition must be local")
+ # cannot add already existant image
+ if self.has(image.name, image.version):
+ raise ISError(u"Image already in database, delete first!")
+ # adding file to repository
+ arrow("Copying images and payload")
+ for obj in [ image ] + image.payload.values():
+ dest = join(self.config.path, obj.md5)
+ basesrc = basename(obj.path)
+ if exists(dest):
+ arrow(u"Skipping %s: already exists" % basesrc, 1)
+ else:
+ arrow(u"Adding %s (%s)" % (basesrc, obj.md5), 1)
+ dfo = open(dest, "wb")
+ sfo = PipeFile(obj.path, "r", progressbar=True)
+ sfo.consume(dfo)
+ sfo.close()
+ dfo.close()
+ chrights(dest, self.config.uid,
+ self.config.gid, self.config.fmod)
+ # copy is done. create a image inside repo
+ r_image = PackageImage(join(self.config.path, image.md5),
+ md5name=True)
+ # checking must be done with original md5
+ r_image.md5 = image.md5
+ # checking image and payload after copy
+ r_image.check("Check image and payload")
+ self._add(image)
+ # removing orginal files
+ if delete:
+ arrow("Removing original files")
+ for obj in [ image ] + image.payload.values():
+ arrow(basename(obj.path), 1)
+ unlink(obj.path)
+
+ def getallmd5(self):
+ '''
+ Get list of all md5 in DB
+ '''
+ res = self.db.ask("SELECT md5 FROM image UNION SELECT md5 FROM payload").fetchall()
+ return [ md5[0] for md5 in res ]
+
+ def check(self):
+ '''
+ Check repository for unreferenced and missing files
+ '''
+ # Check if the repo is local
+ if not self.local:
+ raise ISError(u"Repository must be local")
+ local_files = set(listdir(self.config.path))
+ local_files.remove(self.config.dbname)
+ local_files.remove(self.config.lastname)
+ db_files = set(self.getallmd5())
+ # check missing files
+ arrow("Checking missing files")
+ missing_files = db_files - local_files
+ if len(missing_files) > 0:
+ out(linesep.join(missing_files))
+ # check unreferenced files
+ arrow("Checking unreferenced files")
+ unref_files = local_files - db_files
+ if len(unref_files) > 0:
+ out(linesep.join(unref_files))
+ # check corruption of local files
+ arrow("Checking corrupted files")
+ for f in local_files:
+ fo = PipeFile(join(self.config.path, f))
+ fo.consume()
+ fo.close()
+ if fo.md5 != f:
+ out(f)
+
+ def clean(self, force=False):
+ '''
+ Clean the repository's content
+ '''
+ # Check if the repo is local
+ if not self.local:
+ raise ISError(u"Repository must be local")
+ allmd5 = set(self.getallmd5())
+ repofiles = set(listdir(self.config.path)) - set([self.config.dbname, self.config.lastname])
+ dirtyfiles = repofiles - allmd5
+ if len(dirtyfiles) > 0:
+ # print dirty files
+ arrow("Dirty files:")
+ for f in dirtyfiles:
+ arrow(f, 1)
+ # ask confirmation
+ if not force and not confirm("Remove dirty files? (yes) "):
+ raise ISError(u"Aborted!")
+ # start cleaning
+ arrow("Cleaning")
+ for f in dirtyfiles:
+ p = join(self.config.path, f)
+ arrow(u"Removing %s" % p, 1)
+ try:
+ if isdir(p):
+ rmdir(p)
+ else:
+ unlink(p)
+ except:
+ warn(u"Removing %s failed" % p)
+ else:
+ arrow("Nothing to clean")
+
+ def delete(self, name, version, payloads=True):
+ '''
+ Delete an image from repository
+ '''
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository deletion must be local")
+ # get md5 of files related to images (exception is raised if not exists
+ md5s = self.getmd5(name, version)
+ # cleaning db (must be done before cleaning)
+ arrow("Cleaning database")
+ arrow("Remove payloads from database", 1)
+ self.db.begin()
+ for md5 in md5s[1:]:
+ self.db.ask("DELETE FROM payload WHERE md5 = ? AND image_md5 = ?",
+ (md5, md5s[0])).fetchone()
+ arrow("Remove image from database", 1)
+ self.db.ask("DELETE FROM image WHERE md5 = ?",
+ (md5s[0],)).fetchone()
+ self.db.commit()
+ # Removing files
+ arrow("Removing files from pool")
+ # if asked don't remove payloads
+ if not payloads:
+ md5s = [ md5s[0] ]
+ arrowlevel(1)
+ for md5 in md5s:
+ self._remove_file(md5)
+ arrowlevel(-1)
+ # update last file
+ self.update_last()
+
+ def images(self):
+ '''
+ Return a dict of information on images
+ '''
+ db_images = self.db.ask("SELECT md5, name, version, date, author, \
+ description, size, is_min_version, format \
+ FROM image ORDER BY name, version").fetchall()
+
+ images = []
+ field = ("md5", "name", "version", "date", "author", "description",
+ "size", "is_min_version", "format")
+ for info in db_images:
+ d = dict(zip(field, info))
+ d["repo"] = self.config.name
+ d["url"] = join(self.config.path, d["md5"])
+ images.append(d)
+ return images
+
+ def payloads(self):
+ '''
+ Return a dict of information on payloads
+ '''
+ db_payloads = self.db.ask("SELECT payload.md5,payload.size,payload.isdir,image.name,image.version,payload.name FROM payload inner join image on payload.image_md5 = image.md5").fetchall()
+ res = {}
+ for payload in db_payloads:
+ md5 = payload[0]
+ # create entry if not exists
+ if md5 not in res:
+ res[md5] = {"size": payload[1], "isdir": payload[2], "images": {}}
+ # add image to list
+ imgpath = u"%s/%s:%s" % (self.config.name, payload[3], payload[4])
+ res[md5]["images"][imgpath] = {"repo": self.config.name,
+ "imgname": payload[3],
+ "imgver": payload[4],
+ "payname": payload[5]}
+ return res
+
+ def search(self, pattern):
+ '''
+ Search pattern in a repository
+ '''
+ images = self.db.ask("SELECT name, version, author, description\
+ FROM image\
+ WHERE name LIKE ? OR\
+ description LIKE ? OR\
+ author LIKE ?",
+ tuple( [u"%%%s%%" % pattern ] * 3)
+ ).fetchall()
+ for name, version, author, description in images:
+ arrow(u"%s v%s" % (name, version), 1)
+ out(u" #yellow#Author:#reset# %s" % author)
+ out(u" #yellow#Description:#reset# %s" % description)
+
+ def _remove_file(self, filename):
+ '''
+ Remove a filename from pool. Check if it's not needed by db before
+ '''
+ # check existance in table image
+ have = False
+ for table in ("image", "payload"):
+ have = have or self.db.ask(u"SELECT md5 FROM %s WHERE md5 = ? LIMIT 1" % table,
+ (filename,)).fetchone() is not None
+ # if no reference, delete!
+ if not have:
+ arrow(u"%s, deleted" % filename)
+ unlink(join(self.config.path, filename))
+ else:
+ arrow(u"%s, skipped" % filename)
+
+ def has(self, name, version):
+ '''
+ Return the existance of a package
+ '''
+ return self.db.ask("SELECT name,version FROM image WHERE name = ? AND version = ? LIMIT 1", (name,version)).fetchone() is not None
+
+ def get(self, name, version=None):
+ '''
+ Return an image from a name and version
+ '''
+ # is no version take the last
+ if version is None:
+ version = self.last(name)
+ if version is None:
+ raise ISError(u"Unable to find image %s in %s" % (name,
+ self.config.name))
+ # get file md5 from db
+ r = self.db.ask("select md5 from image where name = ? and version = ? limit 1",
+ (name, version)).fetchone()
+ if r is None:
+ raise ISError(u"Unable to find image %s v%s in %s" % (name, version,
+ self.config.name))
+ path = join(self.config.path, r[0])
+ # getting the file
+ arrow(u"Loading image %s v%s from repository %s" % (name,
+ version,
+ self.config.name))
+ memfile = StringIO()
+ try:
+ fo = PipeFile(path, "r")
+ fo.consume(memfile)
+ fo.close()
+ except Exception as e:
+ raise ISError(u"Loading image %s v%s failed" % (name, version), e)
+ memfile.seek(0)
+ pkg = PackageImage(path, fileobj=memfile, md5name=True)
+ if pkg.md5 != r[0]:
+ raise ISError(u"Image MD5 verification failure")
+ return pkg
+
+ def getmd5(self, name, version):
+ '''
+ Return an image md5 and payload md5 from name and version. Order matter !
+ Image md5 will still be the first
+ '''
+ # get file md5 from db
+ a = self.db.ask("SELECT md5 FROM image WHERE name = ? AND version = ? LIMIT 1",
+ (name,version)).fetchone()
+ if a is None:
+ raise ISError(u"No such image %s version %s" % (name, version))
+ b = self.db.ask("SELECT md5 FROM payload WHERE image_md5 = ?",
+ (a[0],)).fetchall()
+ return [ a[0] ] + [ x[0] for x in b ]
+
+ @property
+ def motd(self):
+ '''
+ Return repository message of the day
+ '''
+ motd = self.db.ask("SELECT motd FROM repository").fetchone()[0]
+ return None if len(motd) == 0 else motd
+
+ def setmotd(self, value=""):
+ '''
+ Set repository message of the day
+ '''
+ # check local repository
+ if not self.local:
+ raise ISError(u"Repository must be local")
+ arrow("Updating motd")
+ self.db.ask("UPDATE repository SET motd = ?", (value,))
+ self.update_last()
+
+ def upgrade(self):
+ info("No upgrade available")
diff --git a/installsystems/template.py b/installsystems/template.py
deleted file mode 100644
index 34b109a7bb2fb0def66f857fda31276d3a2d7e83..0000000000000000000000000000000000000000
--- a/installsystems/template.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# -*- python -*-
-# -*- coding: utf-8 -*-
-
-# This file is part of Installsystems.
-#
-# Installsystems is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Installsystems is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with Installsystems. If not, see .
-
-description = u"""[image]
-name = %(name)s
-version = %(version)s
-description = %(description)s
-author = %(author)s
-is_min_version = %(is_min_version)s
-
-[compressor]
-%(compressor)s
-"""
-
-changelog = u"""[1]
-- Initial version
-"""
-
-build = u"""# -*- python -*-
-# -*- coding: utf-8 -*-
-
-# global rebuild object allow you to force rebuild of payloads
-# to force rebuild of payload nammed rootfs add it to the rebuild list
-# rebuild list is empty by default
-#rebuild += ["rootfs"]
-
-# vim:set ts=4 sw=4 et:
-"""
-
-
-parser = u"""# -*- python -*-
-# -*- coding: utf-8 -*-
-
-# global image object is a reference to current image
-# global parser object is your installsystems subparser (argparse)
-
-# you can use exit() to break the execution of the script
-
-import os
-import argparse
-from installsystems.printer import arrow
-
-class TargetAction(argparse.Action):
- def __call__(self, parser, namespace, values, option_string=None):
- if not os.path.isdir(values):
- raise Exception(u"Invalid target directory %s" % values)
- namespace.target = values
-
-parser.add_argument("-n", "--hostname", dest="hostname", type=str, required=True)
-parser.add_argument("target", type=str, action=TargetAction,
- help="target installation directory")
-
-# vim:set ts=4 sw=4 et:
-"""
-
-setup = u"""# -*- python -*-
-# -*- coding: utf-8 -*-
-
-# global image object is a reference to current image
-# namespace object is the persistant, it can be used to store data accross scripts
-
-# you can use exit() to break the execution of the script
-
-from installsystems.printer import arrow
-
-arrow(u"hostname: %s" % namespace.hostname)
-
-# uncomment to extract payload named root in namespace.target directory
-#image.payload["rootfs"].extract(namespace.target)
-
-# vim:set ts=4 sw=4 et:
-"""
-
-createdb = u"""
-CREATE TABLE image (md5 TEXT NOT NULL PRIMARY KEY,
- name TEXT NOT NULL,
- version TEXT NOT NULL,
- date INTEGER NOT NULL,
- author TEXT,
- description TEXT,
- size INTEGER NOT NULL,
- is_min_version INTEGER NOT NULL,
- format INTEGER NOT NULL,
- UNIQUE(name, version));
-
-CREATE TABLE payload (md5 TEXT NOT NULL,
- image_md5 TEXT NOT NULL REFERENCES image(md5),
- name TEXT NOT NULL,
- isdir INTEGER NOT NULL,
- size INTEGER NOT NULL,
- PRIMARY KEY(md5, image_md5));
-
-CREATE TABLE repository (uuid TEXT NOT NULL PRIMARY KEY,
- version FLOAT NOT NULL,
- motd TEXT NOT NULL);
-"""
diff --git a/installsystems/tools.py b/installsystems/tools.py
index 2e5b2fce0ea85db4d76da6b6ecbe841851349e76..c075d5687fb8801fe5849247c253319fc53049d1 100644
--- a/installsystems/tools.py
+++ b/installsystems/tools.py
@@ -20,27 +20,27 @@
InstallSystems Generic Tools Library
'''
-import hashlib
-import jinja2
-import locale
-import math
-import os
-import re
-import shutil
-import socket
-import time
-import urllib2
-
-from subprocess import call, check_call, CalledProcessError
+from hashlib import md5
+from installsystems import VERSION, CANONICAL_NAME
+from installsystems.exception import ISError
+from installsystems.printer import VERBOSITY, warn, debug, arrow
from itertools import takewhile
-
-import installsystems
-from progressbar import Widget, ProgressBar, Percentage
-from progressbar import FileTransferSpeed
+from jinja2 import Template
+from locale import getpreferredencoding
+from math import log
+from os import environ, pathsep, walk, rename, symlink, unlink
+from os import stat, lstat, fstat, makedirs, chown, chmod, utime
+from os.path import exists, join, isdir, ismount, splitext
from progressbar import Bar, BouncingBar, ETA, UnknownLength
-from installsystems.tarball import Tarball
-from installsystems.exception import *
-from installsystems.printer import *
+from progressbar import FileTransferSpeed
+from progressbar import Widget, ProgressBar, Percentage
+from re import match, compile
+from shutil import copy
+from socket import getdefaulttimeout
+from stat import S_ISDIR, S_ISREG
+from subprocess import call, check_call, CalledProcessError
+from time import mktime, gmtime, strftime, strptime
+from urllib2 import urlopen, Request
################################################################################
@@ -73,7 +73,7 @@ class PipeFile(object):
if pbar.currval < 2e-6: # =~ 0
scaled = power = 0
else:
- power = int(math.log(pbar.currval, 1000))
+ power = int(log(pbar.currval, 1000))
scaled = pbar.currval / 1000.**power
return self.format % (scaled, self.prefixes[power], self.unit)
@@ -87,9 +87,9 @@ class PipeFile(object):
raise AttributeError("You must have a path or a fileobj to open")
if mode not in ("r", "w"):
raise AttributeError("Invalid open mode. Must be r or w")
- self.timeout = timeout or socket.getdefaulttimeout()
+ self.timeout = timeout or getdefaulttimeout()
self.mode = mode
- self._md5 = hashlib.md5()
+ self._md5 = md5()
self.size = 0
self.mtime = None
self.consumed_size = 0
@@ -99,7 +99,7 @@ class PipeFile(object):
# seek to 0 and compute filesize if we have and fd
if hasattr(self.fo, "fileno"):
self.seek(0)
- self.size = os.fstat(self.fo.fileno()).st_size
+ self.size = fstat(self.fo.fileno()).st_size
# we need to open the path
else:
ftype = pathtype(path)
@@ -131,7 +131,7 @@ class PipeFile(object):
Open file on the local filesystem
'''
self.fo = open(path, self.mode)
- sta = os.fstat(self.fo.fileno())
+ sta = fstat(self.fo.fileno())
self.size = sta.st_size
self.mtime = sta.st_mtime
@@ -140,10 +140,9 @@ class PipeFile(object):
Open a file accross an http server
'''
try:
- headers = {"User-Agent": "%s v%s" % (installsystems.canonical_name,
- installsystems.version)}
- request = urllib2.Request(path, None, headers)
- self.fo = urllib2.urlopen(request, timeout=self.timeout)
+ headers = {"User-Agent": "%s v%s" % (CANONICAL_NAME, VERSION)}
+ request = Request(path, None, headers)
+ self.fo = urlopen(request, timeout=self.timeout)
except Exception as e:
raise ISError("Unable to open %s" % path, e)
# get file size
@@ -153,7 +152,7 @@ class PipeFile(object):
self.size = 0
# get mtime
try:
- self.mtime = int(time.mktime(time.strptime(self.fo.headers["Last-Modified"],
+ self.mtime = int(mktime(strptime(self.fo.headers["Last-Modified"],
"%a, %d %b %Y %H:%M:%S %Z")))
except:
self.mtime = None
@@ -163,7 +162,7 @@ class PipeFile(object):
Open file via ftp
'''
try:
- self.fo = urllib2.urlopen(path, timeout=self.timeout)
+ self.fo = urlopen(path, timeout=self.timeout)
except Exception as e:
raise ISError("Unable to open %s" % path, e)
# get file size
@@ -182,7 +181,7 @@ class PipeFile(object):
except ImportError:
raise ISError("URL type not supported. Paramiko is missing")
# parse url
- (login, passwd, host, port, path) = re.match(
+ (login, passwd, host, port, path) = match(
"ssh://(([^:]+)(:([^@]+))?@)?([^/:]+)(:(\d+))?(/.*)?", path).group(2, 4, 5, 7, 8)
if port is None: port = 22
if path is None: path = "/"
@@ -278,7 +277,7 @@ class PipeFile(object):
'''
Set this property to true enable progress bar
'''
- if installsystems.verbosity == 0:
+ if VERBOSITY == 0:
return
if val == True and not hasattr(self, "_progressbar_started"):
self._progressbar_started = True
@@ -314,8 +313,8 @@ def smd5sum(buf):
Compute md5 of a string
'''
if isinstance(buf, unicode):
- buf = buf.encode(locale.getpreferredencoding())
- m = hashlib.md5()
+ buf = buf.encode(getpreferredencoding())
+ m = md5()
m.update(buf)
return m.hexdigest()
@@ -323,7 +322,7 @@ def mkdir(path, uid=None, gid=None, mode=None):
'''
Create a directory and set rights
'''
- os.makedirs(path)
+ makedirs(path)
chrights(path, uid, gid, mode)
def chrights(path, uid=None, gid=None, mode=None, mtime=None):
@@ -331,13 +330,13 @@ def chrights(path, uid=None, gid=None, mode=None, mtime=None):
Set rights on a file
'''
if uid is not None:
- os.chown(path, uid, -1)
+ chown(path, uid, -1)
if gid is not None:
- os.chown(path, -1, gid)
+ chown(path, -1, gid)
if mode is not None:
- os.chmod(path, mode)
+ chmod(path, mode)
if mtime is not None:
- os.utime(path, (mtime, mtime))
+ utime(path, (mtime, mtime))
def pathtype(path):
'''
@@ -356,10 +355,10 @@ def pathsearch(name, path=None):
'''
Search PATH for a binary
'''
- path = path or os.environ["PATH"]
- for d in path.split(os.pathsep):
- if os.path.exists(os.path.join(d, name)):
- return os.path.join(os.path.abspath(d), name)
+ path = path or environ["PATH"]
+ for d in path.split(pathsep):
+ if exists(join(d, name)):
+ return join(abspath(d), name)
return None
def isfile(path):
@@ -372,6 +371,7 @@ def abspath(path):
'''
Format a path to be absolute
'''
+ import os
ptype = pathtype(path)
if ptype in ("http", "ftp", "ssh"):
return path
@@ -386,13 +386,14 @@ def getsize(path):
'''
Get size of a path. Recurse if directory
'''
+ import os
total_sz = os.path.getsize(path)
- if os.path.isdir(path):
- for root, dirs, files in os.walk(path):
+ if isdir(path):
+ for root, dirs, files in walk(path):
for filename in dirs + files:
- filepath = os.path.join(root, filename)
- filestat = os.lstat(filepath)
- if stat.S_ISDIR(filestat.st_mode) or stat.S_ISREG(filestat.st_mode):
+ filepath = join(root, filename)
+ filestat = lstat(filepath)
+ if S_ISDIR(filestat.st_mode) or S_ISREG(filestat.st_mode):
total_sz += filestat.st_size
return total_sz
@@ -401,7 +402,7 @@ def human_size(num, unit='B'):
Return human readable size
'''
prefixes = ('','Ki', 'Mi', 'Gi', 'Ti','Pi', 'Ei', 'Zi', 'Yi')
- power = int(math.log(num, 1024))
+ power = int(log(num, 1024))
# max is YiB
if power >= len(prefixes):
power = len(prefixes) - 1
@@ -412,15 +413,15 @@ def time_rfc2822(timestamp):
'''
Return a rfc2822 format time string from an unix timestamp
'''
- return time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime(timestamp))
+ return strftime("%a, %d %b %Y %H:%M:%S %z", gmtime(timestamp))
def guess_distro(path):
'''
Try to detect which distro is inside a directory
'''
- if os.path.exists(os.path.join(path, "etc/debian_version")):
+ if exists(join(path, "etc", "debian_version")):
return "debian"
- elif os.path.exists(os.path.join(path, "etc/arch-release")):
+ elif exists(join(path, "etc", "arch-release")):
return "archlinux"
return None
@@ -435,18 +436,16 @@ def prepare_chroot(path, mount=True):
arrow("Mounting filesystems")
for mp in mps:
origin = u"/%s" % mp
- target = os.path.join(path, mp)
- if os.path.ismount(target):
+ target = join(path, mp)
+ if ismount(target):
warn(u"%s is already a mountpoint, skipped" % target)
- elif os.path.ismount(origin) and os.path.isdir(target):
+ elif ismount(origin) and isdir(target):
arrow(u"%s -> %s" % (origin, target), 1)
try:
check_call(["mount", "--bind", origin, target], close_fds=True)
except CalledProcessError as e:
warn(u"Mount failed: %s.\n" % e)
arrow("Tricks")
- exists = os.path.exists
- join = os.path.join
# check path is a kind of linux FHS
if not exists(join(path, "etc")) or not exists(join(path, "usr")):
return
@@ -460,10 +459,10 @@ def prepare_chroot(path, mount=True):
and not exists(resolv_trick_path)):
arrow("resolv.conf", 1)
if exists(resolv_path):
- os.rename(resolv_path, resolv_backup_path)
+ rename(resolv_path, resolv_backup_path)
else:
open(resolv_trick_path, "wb")
- shutil.copy("/etc/resolv.conf", resolv_path)
+ copy("/etc/resolv.conf", resolv_path)
except Exception as e:
warn(u"resolv.conf tricks fail: %s" % e)
# trick mtab
@@ -473,9 +472,9 @@ def prepare_chroot(path, mount=True):
mtab_trick_path = join(path, "etc", "mtab.istrick")
if not exists(mtab_backup_path) and not exists(mtab_trick_path):
arrow("mtab", 1)
- if os.path.exists(mtab_path):
- os.rename(mtab_path, mtab_backup_path)
- os.symlink("/proc/self/mounts", mtab_path)
+ if exists(mtab_path):
+ rename(mtab_path, mtab_backup_path)
+ symlink("/proc/self/mounts", mtab_path)
except Exception as e:
warn(u"mtab tricks fail: %s" % e)
# try to guest distro
@@ -498,10 +497,8 @@ def unprepare_chroot(path, mount=True):
Rollback preparation of a chroot environment inside a directory
'''
arrow("Untricks")
- exists = os.path.exists
- join = os.path.join
# check path is a kind of linux FHS
- if exists(os.path.join(path, "etc")) and exists(os.path.join(path, "usr")):
+ if exists(join(path, "etc")) and exists(join(path, "usr")):
# untrick mtab
mtab_path = join(path, "etc", "mtab")
mtab_backup_path = join(path, "etc", "mtab.isbackup")
@@ -510,17 +507,17 @@ def unprepare_chroot(path, mount=True):
arrow("mtab", 1)
# order matter !
if exists(mtab_trick_path):
- try: os.unlink(mtab_path)
+ try: unlink(mtab_path)
except OSError: pass
try:
- os.unlink(mtab_trick_path)
+ unlink(mtab_trick_path)
except OSError:
warn(u"Unable to remove %s" % mtab_trick_path)
if exists(mtab_backup_path):
- try: os.unlink(mtab_path)
+ try: unlink(mtab_path)
except OSError: pass
try:
- os.rename(mtab_backup_path, mtab_path)
+ rename(mtab_backup_path, mtab_path)
except OSError:
warn(u"Unable to restore %s" % mtab_backup_path)
@@ -532,17 +529,17 @@ def unprepare_chroot(path, mount=True):
arrow("resolv.conf", 1)
# order matter !
if exists(resolv_trick_path):
- try: os.unlink(resolv_path)
+ try: unlink(resolv_path)
except OSError: pass
try:
- os.unlink(resolv_trick_path)
+ unlink(resolv_trick_path)
except OSError:
warn(u"Unable to remove %s" % resolv_trick_path)
if exists(resolv_backup_path):
- try: os.unlink(resolv_path)
+ try: unlink(resolv_path)
except OSError: pass
try:
- os.rename(resolv_backup_path, resolv_path)
+ rename(resolv_backup_path, resolv_path)
except OSError:
warn(u"Unable to restore %s" % resolv_backup_path)
# try to guest distro
@@ -551,7 +548,7 @@ def unprepare_chroot(path, mount=True):
if distro == "debian":
arrow("Debian specific", 1)
for f in ("etc/debian_chroot", "usr/sbin/policy-rc.d"):
- try: os.unlink(join(path, f))
+ try: unlink(join(path, f))
except: pass
# unmounting
if mount:
@@ -559,7 +556,7 @@ def unprepare_chroot(path, mount=True):
arrow("Unmounting filesystems")
for mp in reversed(mps):
target = join(path, mp)
- if os.path.ismount(target):
+ if ismount(target):
arrow(target, 1)
call(["umount", target], close_fds=True)
@@ -580,8 +577,8 @@ def is_version(version):
'''
Check if version is valid
'''
- if re.match("^(\d+)(?:([-~+]).*)?$", version) is None:
- raise TypeError(u"Invalid version format %s" % buf)
+ if match("^(\d+)(?:([-~+]).*)?$", version) is None:
+ raise TypeError(u"Invalid version format %s" % version)
def compare_versions(v1, v2):
'''
@@ -594,12 +591,12 @@ def compare_versions(v1, v2):
# Ensure versions have the right format
for version in v1, v2:
- iv = re.match("^(\d+(?:\.\d+)*)(?:([~+]).*)?$", str(version))
+ iv = match("^(\d+(?:\.\d+)*)(?:([~+]).*)?$", str(version))
if iv is None:
raise TypeError(u"Invalid version format: %s" % version)
- digitregex = re.compile(r'^([0-9]*)(.*)$')
- nondigitregex = re.compile(r'^([^0-9]*)(.*)$')
+ digitregex = compile(r'^([0-9]*)(.*)$')
+ nondigitregex = compile(r'^([^0-9]*)(.*)$')
digits = True
while v1 or v2:
@@ -702,36 +699,37 @@ def render_templates(target, context, tpl_ext=".istpl", force=False, keep=False)
Render templates according to tpl_ext
Apply template mode/uid/gid to the generated file
'''
- for path in os.walk(target):
+ for path in walk(target):
for filename in path[2]:
- name, ext = os.path.splitext(filename)
+ name, ext = splitext(filename)
if ext == tpl_ext:
- tpl_path = os.path.join(path[0], filename)
- file_path = os.path.join(path[0], name)
+ tpl_path = join(path[0], filename)
+ file_path = join(path[0], name)
arrow(tpl_path)
- if os.path.exists(file_path) and not force:
+ if exists(file_path) and not force:
raise ISError(u"%s will be overwritten, cancel template "
"generation (set force=True if you know "
"what you do)" % file_path)
try:
with open(tpl_path) as tpl_file:
- template = jinja2.Template(tpl_file.read())
+ template = Template(tpl_file.read())
with open(file_path, "w") as rendered_file:
rendered_file.write(template.render(context))
except Exception as e:
raise ISError(u"Render template fail", e)
- st = os.stat(tpl_path)
- os.chown(file_path, st.st_uid, st.st_gid)
- os.chmod(file_path, st.st_mode)
+ st = stat(tpl_path)
+ chown(file_path, st.st_uid, st.st_gid)
+ chmod(file_path, st.st_mode)
if not keep:
- os.unlink(tpl_path)
+ unlink(tpl_path)
def argv():
'''
Return system argv after an unicode transformation with locale preference
'''
+ from sys import argv
try:
- return [unicode(x, encoding=locale.getpreferredencoding()) for x in sys.argv]
+ return [unicode(x, encoding=getpreferredencoding()) for x in argv]
except UnicodeDecodeError as e:
raise ISError("Invalid character encoding in command line")