Newer
Older
# -*- python -*-
# -*- coding: utf-8 -*-
# Started 10/05/2011 by Seblu <seblu@seblu.net>
'''
Image stuff
'''
import codecs
import ConfigParser
import cStringIO
import difflib
import stat
import subprocess
import tarfile
import time
import installsystems.template as istemplate
import installsystems.tools as istools
from installsystems.tools import PipeFile
from installsystems.tarball import Tarball
# format should be a float X.Y but for compatibility reason it's a string
# before version 6, it's strict string comparaison
format = "1"
if re.match("^[-_\w]+$", buf) is None:
raise Exception("Invalid image name %s" % buf)
@staticmethod
def check_image_version(buf):
if re.match("^\d+$", buf) is None:
raise Exception("Invalid image version %s" % buf)
@staticmethod
def compare_versions(v1, v2):
'''
For backward compatibility, image class offer a method to compare image versions
But code is now inside tools
return istools.compare_versions(v1, v2)
def create(cls, path, force=False):
if not istools.isfile(path):
raise NotImplementedError("SourceImage must be local")
# main path
parser_path = os.path.join(path, "parser")
setup_path = os.path.join(path, "setup")
for d in (path, build_path, parser_path, setup_path, payload_path):
if not os.path.exists(d) or not os.path.isdir(d):
os.mkdir(d)
raise Exception("Unable to create directory: %s: %s" % (d, e))
# create dict of file to create
examples = {}
# create description example from template
examples["description"] = {"path": "description",
"content": istemplate.description % {
"name": "",
"version": "1",
"description": "",
"author": "",
"is_min_version": installsystems.version}}
# create changelog example from template
examples["changelog"] = {"path": "changelog", "content": istemplate.changelog}
# create build example from template
examples["build"] = {"path": "build/01-build.py", "content": istemplate.build}
# create parser example from template
examples["parser"] = {"path": "parser/01-parser.py", "content": istemplate.parser}
# create setup example from template
examples["setup"] = {"path": "setup/01-setup.py", "content": istemplate.setup}
for name in examples:
try:
arrow("Creating %s example" % name)
expath = os.path.join(path, examples[name]["path"])
if not force and os.path.exists(expath):
warn("%s already exists. Skipping!" % expath)
continue
open(expath, "w").write(examples[name]["content"])
except Exception as e:
raise Exception("Unable to create example file: %s" % e)
# setting executable rights on files in setup and parser
umask = os.umask(0)
os.umask(umask)
for dpath in (build_path, parser_path, setup_path):
except Exception as e:
raise Exception("Unable to set rights on %s: %s" % (pf, e))
if not istools.isfile(path):
raise NotImplementedError("SourceImage must be local")
self.base_path = os.path.abspath(path)
for pathtype in ("build", "parser", "setup", "payload"):
setattr(self, "%s_path" % pathtype, os.path.join(self.base_path, pathtype))
self.check_source_image()
self.description = self.parse_description()
# script tarball path
self.image_name = "%s-%s%s" % (self.description["name"],
self.description["version"],
self.extension)
'''
Check if we are a valid SourceImage directories
'''
for d in (self.base_path, self.build_path, self.parser_path,
self.setup_path, self.payload_path):
raise Exception("Invalid source image: directory %s is missing" % d)
raise Exception("Invalid source image: %s is not a directory" % d)
if not os.access(d, os.R_OK|os.X_OK):
raise Exception("Invalid source image: unable to access to %s" % d)
if not os.path.exists(os.path.join(self.base_path, "description")):
raise Exception("Invalid source image: no description file")
def build(self, force=False, force_payload=False, check=True, script=True):
# check if free to create script tarball
if os.path.exists(self.image_name) and force == False:
raise Exception("Tarball already exists. Remove it before")
self.check_scripts(self.parser_path)
self.check_scripts(self.setup_path)
# remove list
rl = set()
# run build script
if script:
rl |= set(self.run_scripts(self.build_path, self.payload_path))
if force_payload:
rl |= set(self.select_payloads())
# remove payloads
self.remove_payloads(rl)
# create payload files
self.create_payloads()
# generate a json description
jdesc = self.generate_json_description()
'''
Create a script tarball in current directory
'''
# create tarball
arrow("Creating image tarball")
arrowlevel(1)
arrow("Name %s" % self.image_name)
try:
tarball = Tarball.open(self.image_name, mode="w:gz", dereference=True)
except Exception as e:
raise Exception("Unable to create tarball %s: %s" % (self.image_name, e))
# add description.json
arrow("Add description.json")
tarball.add_str("description.json", jdescription, tarfile.REGTYPE, 0644)
# add changelog
if self.changelog is not None:
arrow("Add changelog")
tarball.add_str("changelog", self.changelog.verbatim, tarfile.REGTYPE, 0644)
# add format
arrow("Add format")
tarball.add_str("format", self.format, tarfile.REGTYPE, 0644)
# add build scripts
self.add_scripts(tarball, self.build_path)
# add parser scripts
# add setup scripts
# closing tarball file
tarball.close()
except (SystemExit, KeyboardInterrupt):
if os.path.exists(self.image_name):
os.unlink(self.image_name)
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
def describe_payload(self, name):
'''
Return information about a payload
'''
ans = {}
ans["source_path"] = os.path.join(self.payload_path, name)
ans["dest_path"] = "%s-%s%s" % (self.description["name"],
name,
Payload.extension)
ans["link_path"] = "%s-%s-%s%s" % (self.description["name"],
self.description["version"],
name,
Payload.extension)
source_stat = os.stat(ans["source_path"])
ans["isdir"] = stat.S_ISDIR(source_stat.st_mode)
ans["uid"] = source_stat.st_uid
ans["gid"] = source_stat.st_gid
ans["mode"] = stat.S_IMODE(source_stat.st_mode)
ans["mtime"] = source_stat.st_mtime
return ans
def select_payloads(self):
'''
Return a generator on image payloads
'''
for payname in os.listdir(self.payload_path):
yield payname
def remove_payloads(self, paylist):
'''
Remove payload list if exists
'''
arrow("Removing payloads")
for pay in paylist:
arrow(pay, 1)
desc = self.describe_payload(pay)
for f in (desc["dest_path"], desc["link_path"]):
if os.path.lexists(f):
os.unlink(f)
def create_payloads(self):
Create all missing data payloads in current directory
Also create symlink to versionned payload
for payload_name in self.select_payloads():
paydesc = self.describe_payload(payload_name)
if os.path.exists(paydesc["link_path"]):
continue
# create non versionned payload file
if not os.path.exists(paydesc["dest_path"]):
if paydesc["isdir"]:
self.create_payload_tarball(paydesc["dest_path"],
paydesc["source_path"])
else:
self.create_payload_file(paydesc["dest_path"],
paydesc["source_path"])
# create versionned payload file
if os.path.lexists(paydesc["link_path"]):
os.unlink(paydesc["link_path"])
os.symlink(paydesc["dest_path"], paydesc["link_path"])
except Exception as e:
raise Exception("Unable to create payload %s: %s" % (payload_name, e))
def create_payload_tarball(self, tar_path, data_path):
# get compressor argv (first to escape file creation if not found)
a_comp = istools.get_compressor_path(self.compressor, compress=True)
a_tar = ["tar", "--create", "--numeric-owner", "--directory",
data_path, "."]
# create destination file
f_dst = PipeFile(tar_path, "w", progressbar=True)
# run tar process
p_tar = subprocess.Popen(a_tar, shell=False, close_fds=True,
stdout=subprocess.PIPE)
# run compressor process
p_comp = subprocess.Popen(a_comp, shell=False, close_fds=True,
stdin=p_tar.stdout, stdout=subprocess.PIPE)
# write data from compressor to tar_path
f_dst.consume(p_comp.stdout)
# close all fd
p_tar.stdout.close()
p_comp.stdout.close()
f_dst.close()
# check tar return 0
if p_tar.wait() != 0:
raise Exception("Tar return is not zero")
# check compressor return 0
if p_comp.wait() != 0:
raise Exception("Compressor %s return is not zero" % a_comp[0])
except (SystemExit, KeyboardInterrupt):
if os.path.exists(tar_path):
os.unlink(tar_path)
raise
def create_payload_file(self, dest, source):
# get compressor argv (first to escape file creation if not found)
a_comp = istools.get_compressor_path(self.compressor, compress=True)
# open source file
f_src = open(source, "r")
# create destination file
f_dst = PipeFile(dest, "w", progressbar=True)
# run compressor
p_comp = subprocess.Popen(a_comp, shell=False, close_fds=True,
stdin=f_src, stdout=subprocess.PIPE)
# close source file fd
f_src.close()
# write data from compressor to dest file
f_dst.consume(p_comp.stdout)
# close compressor stdin and destination file
p_comp.stdout.close()
f_dst.close()
# check compressor return 0
if p_comp.wait() != 0:
raise Exception("Compressor %s return is not zero" % a_comp[0])
except (SystemExit, KeyboardInterrupt):
if os.path.exists(dest):
os.unlink(dest)
raise
Add scripts inside a directory into a tarball
basedirectory = os.path.basename(directory)
arrow("Add %s scripts" % basedirectory)
arrowlevel(1)
# adding base directory
ti = tarball.gettarinfo(directory, arcname=basedirectory)
ti.mode = 0755
ti.uid = ti.gid = 0
ti.uname = ti.gname = "root"
tarball.addfile(ti)
# adding each file
for fp, fn in self.select_scripts(directory):
ti = tarball.gettarinfo(fp, arcname=os.path.join(basedirectory, fn))
ti.mode = 0755
ti.uid = ti.gid = 0
ti.uname = ti.gname = "root"
arrow("%s added" % fn)
'''
Check if scripts inside a directory can be compiled
'''
basedirectory = os.path.basename(directory)
arrow("Checking %s scripts" % basedirectory)
arrowlevel(1)
# checking each file
for fp, fn in self.select_scripts(directory):
# compiling file
fs = open(fp, "r").read()
compile(fs, fp, mode="exec")
arrow(fn)
arrowlevel(-1)
def run_scripts(self, script_directory, exec_directory):
'''
Execute script inside a directory
Return a list of payload to force rebuild
'''
arrow("Run %s scripts" % os.path.basename(script_directory))
rebuild_list = []
cwd = os.getcwd()
arrowlevel(1)
for fp, fn in self.select_scripts(script_directory):
arrow(fn)
os.chdir(exec_directory)
old_level = arrowlevel(1)
# compile source code
try:
o_scripts = compile(open(fp, "r").read(), fn, "exec")
except Exception as e:
raise Exception("Unable to compile %s fail: %s" %
(fn, e))
# define execution context
gl = {"rebuild": rebuild_list,
"image": self}
# execute source code
try:
exec o_scripts in gl
except Exception as e:
raise Exception("Execution script %s fail: %s" %
(fn, e))
arrowlevel(level=old_level)
os.chdir(cwd)
arrowlevel(-1)
return rebuild_list
def select_scripts(self, directory):
'''
Select script with are allocatable in a directory
'''
for fn in sorted(os.listdir(directory)):
fp = os.path.join(directory, fn)
if not re.match("\d+-.*\.py$", fn):
# check execution bit
if not os.access(fp, os.X_OK):
continue
# yield complet filepath and only script name
yield fp, fn
arrow("Generating JSON description")
arrowlevel(1)
# copy description
desc = self.description.copy()
# timestamp image
desc["is_build_version"] = installsystems.version
for payload_name in self.select_payloads():
arrow(payload_name, 1)
# getting payload info
payload_desc = self.describe_payload(payload_name)
# compute md5 and size
fileobj = PipeFile(payload_desc["link_path"], "r")
fileobj.consume()
fileobj.close()
# create payload entry
desc["payload"][payload_name] = {
"md5": fileobj.md5,
"size": fileobj.size,
"isdir": payload_desc["isdir"],
"uid": payload_desc["uid"],
"gid": payload_desc["gid"],
"mode": payload_desc["mode"],
"mtime": payload_desc["mtime"]
}
# check md5 are uniq
md5s = [v["md5"] for v in desc["payload"].values()]
if len(md5s) != len(set(md5s)):
raise Exception("Two payloads cannot have the same md5")
# serialize
return json.dumps(desc)
def parse_description(self):
'''
Raise an exception is description file is invalid and return vars to include
'''
d = dict()
try:
descpath = os.path.join(self.base_path, "description")
cp = ConfigParser.RawConfigParser()
cp.readfp(codecs.open(descpath, "r", "utf8"))
for n in ("name","version", "description", "author"):
d[n] = cp.get("image", n)
# get min image version
if cp.has_option("image", "is_min_version"):
d["is_min_version"] = cp.get("image", "is_min_version")
else:
d["is_min_version"] = 0
# check image name
# check image version
# check installsystems min version
if self.compare_versions(installsystems.version, d["is_min_version"]) < 0:
raise Exception("Minimum Installsystems version not satisfied")
def parse_changelog(self):
'''
Create a changelog object from a file
'''
# try to find a changelog file
try:
path = os.path.join(self.base_path, "changelog")
fo = codecs.open(path, "r", "utf8")
except IOError:
return None
# we have it, we need to check everything is ok
arrow("Parsing changelog")
try:
cl = Changelog(fo.read())
except Exception as e:
raise Exception("Bad changelog: %s" % e)
return cl
@property
def compressor(self):
'''
Return image compressor
'''
# currently only support gzip
return "gzip"
@classmethod
def diff(cls, pkg1, pkg2):
'''
Diff two packaged images
'''
arrow("Difference from images #y#%s v%s#R# to #r#%s v%s#R#:" % (pkg1.name,
pkg1.version,
pkg2.name,
pkg2.version))
# Extract images for diff scripts files
fromfiles = set(pkg1._tarball.getnames(re_pattern="(parser|setup)/.*"))
tofiles = set(pkg2._tarball.getnames(re_pattern="(parser|setup)/.*"))
for f in fromfiles | tofiles:
# preparing from info
if f in fromfiles:
fromfile = os.path.join(pkg1.filename, f)
fromdata = pkg1._tarball.extractfile(f).readlines()
else:
fromfile = "/dev/null"
fromdata = ""
# preparing to info
if f in tofiles:
tofile = os.path.join(pkg2.filename, f)
todata = pkg2._tarball.extractfile(f).readlines()
else:
tofile = "/dev/null"
todata = ""
# generate diff
for line in difflib.unified_diff(fromdata, todata,
fromfile=fromfile, tofile=tofile):
# coloring diff
if line.startswith("+"):
out("#g#%s#R#" % line, endl="")
elif line.startswith("-"):
out("#r#%s#R#" % line, endl="")
elif line.startswith("@@"):
out("#c#%s#R#" % line, endl="")
else:
out(line, endl="")
def __init__(self, path, fileobj=None, md5name=False):
'''
Initialize a package image
fileobj must be a seekable fileobj
'''
self.base_path = os.path.dirname(self.path)
# tarball are named by md5 and not by real name
self.md5name = md5name
fileobj = PipeFile(self.path, "r")
else:
fileobj = PipeFile(mode="r", fileobj=fileobj)
memfile = cStringIO.StringIO()
fileobj.consume(memfile)
# get donwloaded size and md5
self.size = fileobj.read_size
self.md5 = fileobj.md5
memfile.seek(0)
self._tarball = Tarball.open(fileobj=memfile, mode='r:gz')
except Exception as e:
raise Exception("Unable to open image %s: %s" % (path, e))
arrow("Image %s v%s loaded" % (self.name, self.version))
arrow("Author: %s" % self.author, 1)
arrow("Date: %s" % istools.time_rfc2822(self.date), 1)
self.payload = {}
for pname, pval in self._metadata["payload"].items():
pfilename = "%s-%s%s" % (self.filename[:-len(Image.extension)],
pname, Payload.extension)
if self.md5name:
ppath = os.path.join(self.base_path,
self._metadata["payload"][pname]["md5"])
else:
ppath = os.path.join(self.base_path, pfilename)
self.payload[pname] = Payload(pname, pfilename, ppath, **pval)
if name in self._metadata:
return self._metadata[name]
raise AttributeError
@property
def filename(self):
return "%s-%s%s" % (self.name, self.version, self.extension)
try:
if float(img_format) >= math.floor(float(self.format)) + 1.0:
raise Exception()
except:
raise Exception("Invalid image format %s" % img_format)
img_desc = self._tarball.get_str("description.json")
desc.update(json.loads(img_desc))
self.check_image_name(desc["name"])
self.check_image_version(desc["version"])
# add is_min_version if not present
if "is_min_version" not in desc:
desc["is_min_version"] = 0
# check installsystems min version
if self.compare_versions(installsystems.version, desc["is_min_version"]) < 0:
raise Exception("Minimum Installsystems version not satisfied")
# try to load changelog
try:
img_changelog = self._tarball.get_str("changelog")
desc["changelog"] = Changelog(img_changelog)
except KeyError:
except Exception as e:
warn("Invalid changelog: %s" % e)
def show(self, o_verbose=False, o_changelog=False, o_json=False):
'''
Display image content
'''
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
if o_json:
out(json.dumps(self._metadata))
else:
out('#light##yellow#Name:#reset# %s' % self.name)
out('#light##yellow#Version:#reset# %s' % self.version)
out('#yellow#Date:#reset# %s' % istools.time_rfc2822(self.date))
out('#yellow#Description:#reset# %s' % self.description)
out('#yellow#Author:#reset# %s' % self.author)
if o_verbose:
# field is_build_version is new in version 5. I can be absent.
try: out('#yellow#IS build version:#reset# %s' % self.is_build_version)
except AttributeError: pass
# field is_min_version is new in version 5. I can be absent.
try: out('#yellow#IS minimum version:#reset# %s' % self.is_min_version)
except AttributeError: pass
out('#yellow#MD5:#reset# %s' % self.md5)
if o_verbose:
payloads = self.payload
for payload_name in payloads:
payload = payloads[payload_name]
out('#light##yellow#Payload:#reset# %s' % payload_name)
out(' #yellow#Date:#reset# %s' % istools.time_rfc2822(payload.mtime))
out(' #yellow#Size:#reset# %s' % (istools.human_size(payload.size)))
out(' #yellow#MD5:#reset# %s' % payload.md5)
# display image content
out('#light##yellow#Content:#reset#')
self._tarball.list(o_verbose)
# display changelog
if o_changelog:
self.changelog.show(int(self.version), o_verbose)
Download tarball from path and compare the loaded md5 and remote
fo = PipeFile(self.path, "r")
fo.consume()
fo.close()
if self.size != fo.read_size:
raise Exception("Invalid size of image %s" % self.name)
if self.md5 != fo.md5:
raise Exception("Invalid MD5 of image %s" % self.name)
# check payloads
for pay_name, pay_obj in self.payload.items():
'''
Display filename in the tarball
'''
filelist = self._tarball.getnames(glob_pattern=filename, dir=False)
if len(filelist) == 0:
warn("No file matching %s" % filename)
for filename in filelist:
def download(self, directory, force=False, image=True, payload=False):
Doesn't use in memory image because we cannot access it
This is done to don't parasitize self._tarfile access to memfile
'''
# check if destination exists
directory = os.path.abspath(directory)
if image:
dest = os.path.join(directory, self.filename)
if not force and os.path.exists(dest):
raise Exception("Image destination already exists: %s" % dest)
# some display
arrow("Downloading image in %s" % directory)
debug("Downloading %s from %s" % (self.filename, self.path))
# open source
fs = PipeFile(self.path, progressbar=True)
# check if announced file size is good
if fs.size is not None and self.size != fs.size:
raise Exception("Downloading image %s failed: Invalid announced size" % self.name)
# open destination
fd = open(self.filename, "wb")
fs.consume(fd)
fs.close()
fd.close()
if self.size != fs.consumed_size:
raise Exception("Download image %s failed: Invalid size" % self.name)
if self.md5 != fs.md5:
raise Exception("Download image %s failed: Invalid MD5" % self.name)
if payload:
for payname in self.payload:
arrow("Downloading payload %s in %s" % (payname, directory))
self.payload[payname].info
self.payload[payname].download(directory, force=force)
def extract(self, directory, force=False, payload=False, gendescription=False):
'''
Extract content of the image inside a repository
'''
# check validity of dest
if os.path.exists(directory):
if not os.path.isdir(directory):
raise Exception("Destination %s is not a directory" % directory)
if not force and len(os.listdir(directory)) > 0:
raise Exception("Directory %s is not empty (need force)" % directory)
else:
istools.mkdir(directory)
# extract content
arrow("Extracting image in %s" % directory)
self._tarball.extractall(directory)
# generate description file from description.json
if gendescription:
arrow("Generating description file in %s" % directory)
with open(os.path.join(directory, "description"), "w") as f:
f.write((istemplate.description % self._metadata).encode('utf-8'))
# here we need to decode payname which is in unicode to escape
# tarfile to encode filename of file inside tarball inside unicode
dest = os.path.join(directory, "payload", payname.encode("utf-8"))
arrow("Extracting payload %s in %s" % (payname, dest))
self.payload[payname].extract(dest, force=force)
l_scripts = self._tarball.getnames(re_pattern="%s/.*\.py" % directory)
# order matter!
l_scripts.sort()
# run scripts
arrow(os.path.basename(n_scripts))
old_level = arrowlevel(1)
except Exception as e:
raise Exception("Extracting script %s fail: %s" %
o_scripts = compile(s_scripts, n_scripts, "exec")
except Exception as e:
raise Exception("Unable to compile %s fail: %s" %
(n_scripts, e))
# define execution context
gl = {}
for k in kwargs:
gl[k] = kwargs[k]
gl["image"] = self
# execute source code
try:
exec o_scripts in gl
except Exception as e:
raise Exception("Execution script %s fail: %s" %
class Payload(object):
'''
Payload class represents a payload object
'''
extension = ".isdata"
legit_attr = ("isdir", "md5", "size", "uid", "gid", "mode", "mtime", "compressor")
def __init__(self, name, filename, path, **kwargs):
object.__setattr__(self, "filename", filename)
object.__setattr__(self, "path", path)
# register legit param
for attr in self.legit_attr:
setattr(self, attr, None)
# set all named param
for kwarg in kwargs:
# do not use hasattr which use getattr and so call md5 checksum...
if kwarg in self.legit_attr:
setattr(self, kwarg, kwargs[kwarg])
def __getattr__(self, name):
# get all value with an understance as if there is no underscore
if hasattr(self, "_%s" % name):
return getattr(self, "_%s" % name)
raise AttributeError
def __setattr__(self, name, value):
# set all value which exists have no underscore, but where underscore exists
if name in self.legit_attr:
object.__setattr__(self, "_%s" % name, value)
else:
object.__setattr__(self, name, value)
def checksummize(self):
'''
Fill missing md5/size about payload
'''
fileobj = PipeFile(self.path, "r")
fileobj.consume()
fileobj.close()
self._size = fileobj.read_size
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
@property
def md5(self):
'''
Return md5 of payload
'''
if self._md5 is None:
self.checksummize()
return self._md5
@property
def size(self):
'''
Return size of payload
'''
if self._size is None:
self.checksummize()
return self._size
@property
def uid(self):
'''
Return uid of owner of orginal payload
'''
return self._uid if self._uid is not None else 0
@property
def gid(self):
'''
Return gid of owner of orginal payload
'''
return self._gid if self._gid is not None else 0
@property
def mode(self):
'''
Return mode of orginal payload
'''
if self._mode is not None:
return self._mode
else:
umask = os.umask(0)
os.umask(umask)
return 0666 & ~umask
@property
def mtime(self):
'''
Return last modification time of orginal payload
'''
return self._mtime if self._mtime is not None else time.time()
@property
def compressor(self):
'''
Return payload compress format
'''
return self._compressor if self._compressor is not None else "gzip"
Return a dict of info about current payload
Auto calculated info like name and filename must not be here
"size": self.size,
"isdir": self.isdir,
"uid": self.uid,
"gid": self.gid,
"mode": self.mode,
"mtime": self.mtime}
def check(self):
'''
Check that path correspond to current md5 and size
'''
if self._size is None or self._md5 is None:
debug("Check is called on payload with nothing to check")
return True
fileobj = PipeFile(self.path, "r")
fileobj.consume()
fileobj.close()
if self._size != fileobj.read_size:
if self._md5 != fileobj.md5: