From 7dfc089a6a5309283ddb85538dc1a2264fdf709f Mon Sep 17 00:00:00 2001 From: Nadav Goldin Date: Wed, 23 Aug 2017 19:23:19 +0300 Subject: [PATCH] WIP: persistent template store by hash Signed-off-by: Nadav Goldin --- lago.spec.in | 2 + lago/db_utils.py | 37 +++ lago/prefix.py | 36 +-- lago/qemuimg.py | 16 + lago/templates.py | 225 +++++++++++++- lago/templates_store.py | 405 +++++++++++++++++++++++++ lago/utils.py | 30 +- requirements.txt | 1 + tests/unit/lago/test_template_store.py | 118 +++++++ 9 files changed, 834 insertions(+), 36 deletions(-) create mode 100644 lago/db_utils.py create mode 100644 lago/qemuimg.py create mode 100644 lago/templates_store.py create mode 100644 tests/unit/lago/test_template_store.py diff --git a/lago.spec.in b/lago.spec.in index e330c84b..d976e8ba 100644 --- a/lago.spec.in +++ b/lago.spec.in @@ -61,6 +61,7 @@ BuildRequires: python-scp BuildRequires: python2-rpm-macros BuildRequires: python-wrapt BuildRequires: python2-future +BuildRequires: sqlite-3 %if 0%{?fedora} >= 24 BuildRequires: python2-configparser BuildRequires: python2-paramiko >= 2.1.1 @@ -88,6 +89,7 @@ Requires: libguestfs-tools >= 1.30 Requires: libguestfs-devel >= 1.30 Requires: libvirt >= 1.2.8 Requires: libvirt-python +Requires: sqlite-3 Requires: python-libguestfs >= 1.30 Requires: python-lxml Requires: python-lockfile diff --git a/lago/db_utils.py b/lago/db_utils.py new file mode 100644 index 00000000..9eda50f7 --- /dev/null +++ b/lago/db_utils.py @@ -0,0 +1,37 @@ +from contextlib import contextmanager +from collections import namedtuple +from sqlalchemy.sql import func +from sqlalchemy import (Column, DateTime, Integer) + + +def autorepr(self): + cols = (str(col.key) for col in self.__table__.columns) + key_values = ('{0}="{1}"'.format(col, getattr(self, col)) for col in cols) + return '<{0}({1})>'.format(self.__class__.name, ','.join(key_values)) + + +@contextmanager +def autocommit_safe(session): + try: + yield session + session.commit() + except: + session.rollback() + raise + + +def namedtuple_serialize(self): + cols = ','.join([str(col.key) for col in self.__table__.columns]) + record = namedtuple(self.__class__.__name__, cols) + return record._make( + [getattr(self, col.key) for col in self.__table__.columns] + ) + + +class BaseMixin(object): + id = Column(Integer, primary_key=True) + add_date = Column(DateTime, server_default=func.now()) + __repr__ = autorepr + + def serialize(self): + return namedtuple_serialize(self) diff --git a/lago/prefix.py b/lago/prefix.py index 93d35f8c..1c430d8f 100644 --- a/lago/prefix.py +++ b/lago/prefix.py @@ -891,32 +891,24 @@ def _create_link_to_parent(self, base, link_name): def _handle_lago_template( self, disk_path, template_spec, template_store, template_repo ): - disk_metadata = template_spec.get('metadata', {}) - if template_store is None or template_repo is None: - raise RuntimeError('No templates directory provided') - - template = template_repo.get_by_name(template_spec['template_name']) - template_version = template.get_version( - template_spec.get('template_version', None) - ) - if template_version not in template_store: - LOGGER.info( - log_utils.log_always("Template %s not in cache, downloading") % - template_version.name, - ) - template_store.download(template_version) - - disk_metadata.update( - template_store.get_stored_metadata( - template_version, - ), + from templates import LagoImageProvider + from templates_store import ImagesStore + store = ImagesStore(root='/tmp/fancy_store_test') + provider = LagoImageProvider( + store=store, + config={ + 'url': 'http://templates.ovirt.org/repo/repo.metadata', + 'name': 'us-lago-test' + } ) - base = template_store.get_path(template_version) + image = provider.update(template_spec['template_name']) + disk_metadata = template_spec.get('metadata', {}) + disk_metadata.update(store.get_metadata(image.hash)) qemu_cmd = [ 'qemu-img', 'create', '-f', 'qcow2', '-o', 'lazy_refcounts=on', - '-b', base, disk_path + '-b', image.file, disk_path ] - return qemu_cmd, disk_metadata, base + return qemu_cmd, disk_metadata, image.file def _ova_to_spec(self, filename): """ diff --git a/lago/qemuimg.py b/lago/qemuimg.py new file mode 100644 index 00000000..74a6f31a --- /dev/null +++ b/lago/qemuimg.py @@ -0,0 +1,16 @@ +from utils import run_command, run_command_with_validation + + +def convert(src, dst, convert_format='raw'): + result = run_command_with_validation( + [ + 'qemu-img', + 'convert', + '-O', + convert_format, + src, + dst, + ], + msg='qemu-img convert failed:' + ) + return result diff --git a/lago/templates.py b/lago/templates.py index e0f4584a..57a3c86c 100644 --- a/lago/templates.py +++ b/lago/templates.py @@ -18,6 +18,7 @@ having to change the template name everywhere """ +import qemuimg import errno import functools import json @@ -27,15 +28,23 @@ import shutil import urllib import sys - +from datetime import datetime import lockfile - +import tempfile import utils from . import log_utils from .config import config - +from collections import namedtuple +from utils import LagoException +from future.utils import raise_from LOGGER = logging.getLogger(__name__) +ImageName = namedtuple('ImageName', 'name, hash') + + +class LagoImageError(LagoException): + pass + class FileSystemTemplateProvider: """ @@ -387,6 +396,11 @@ def get_by_name(self, name): Raises: KeyError: if no template is found """ + if name not in self._dom['templates']: + raise LagoImageError( + 'No image named {0} at {1}'.format(name, self.name) + ) + spec = self._dom.get('templates', {})[name] return Template( name=name, @@ -421,6 +435,10 @@ def __init__(self, name, versions): self.name = name self._versions = versions + @property + def versions(self): + return self._versions + def get_version(self, ver_name=None): """ Get the given version for this template, or the latest @@ -473,6 +491,12 @@ def __init__(self, name, source, handle, timestamp): self._hash = None self._metadata = None + def __repr__(self): + return ( + '' + ).format(self.name, self._source, self._handle, self._timestamp) + def timestamp(self): """ Getter for the timestamp @@ -515,17 +539,192 @@ def download(self, destination): self._source.download_image(self._handle, destination) -def _locked(func): - """ - Decorator that ensures that the decorated function has the lock of the - repo while running, meant to decorate only bound functions for classes that - have `lock_path` method. - """ +RemoteImage = namedtuple( + 'RemoteImage', 'name, hash, creation_date,repo_name,tags,template_version' +) + + +class LagoImageProvider(object): + def __init__(self, config, store): + self._name = config['name'] + self._url = config['url'] + self.max_versions = config.get('max_versions', 5) + self._store = store + self._config = config + if not self._store.exists_repo(self.name): + store.add_repo(repo_name=self.name, repo_type='lago') + + def update(self, raw_name, fail=False): + image_info = self._make_name(raw_name) + local_images = self.list_local_images(image_info) + remote_images = [] + try: + remote_images = self.list_remote_images(image_info.name) + except LagoImageError: + if fail: + raise + + if image_info.hash is None: + result = self._decide_by_name( + image_info.name, local_images, remote_images + ) + else: + result = self._decide_by_hash( + image_info.hash, local_images, remote_images + ) + + if isinstance(result, RemoteImage): + image = self._add_from_remote(result) + if len(local_images) > self.max_versions: + LOGGER.debug( + 'more than %s images per name, deleting %s', + self.max_versions, local_images[0].hash + ) + self.store.delete_image(local_images[0].hash) + else: + image = result + return image + + def list_local_images(self, image_info): + if image_info.hash is None: + return self._store.search(image_info.name, self.name) + else: + return [self._store.get_image(image_info.hash)] + + def list_remote_images(self, name): + try: + remote_repo = TemplateRepository.from_url(self.url) + except RuntimeError as exc: + raise_from( + exc, + LagoImageError( + 'Unable to fetch Lago images ' + 'repository from ' + '{0}'.format(self._url) + ) + ) + candidates = remote_repo.get_by_name(name) + remote_images = [] + for ver_name, ver in candidates.versions.viewitems(): + try: + sha1 = 'sha1:' + ver.get_metadata()['sha1'] + except KeyError: + LOGGER.warning( + ( + 'Image without hash found at {0}, ignoring ' + 'image: {1}' + ).format(self.url, ver.name) + ) + continue + + remote_images.append( + RemoteImage( + name=name, + repo_name=self.name, + hash=sha1, + creation_date=datetime.fromtimestamp(ver.timestamp()), + tags=[ver_name], + template_version=ver + ) + ) + if remote_images != []: + remote_images.sort(key=lambda image: image.creation_date) - @functools.wraps(func) - def wrapper(self, *args, **kwargs): - with lockfile.LockFile(self.lock_path()): - return func(self, *args, **kwargs) + return remote_images + + @property + def name(self): + return self._name + + @property + def url(self): + return self._url + + def _add_from_remote(self, remote_image): + tmp_dir = self._store.tmp_dir + _, tmp_dest = tempfile.mkstemp(dir=tmp_dir) + try: + remote_image.template_version.download(tmp_dest) + result = utils.verify_hash( + tmp_dest, + remote_image.hash.split(':')[-1], + hash_algo=remote_image.hash.split(':')[0] + ) + if result is False: + raise LagoImageError( + ( + 'Failed verifying hash for image: ' + '{0}.'.format(remote_image) + ) + ) + + image = self._store.add_image( + name=remote_image.name, + repo_name=remote_image.repo_name, + hash=remote_image.hash, + image_file=tmp_dest, + creation_date=remote_image.creation_date, + metadata=remote_image.template_version.get_metadata(), + tags=remote_image.tags, + transfer_function=qemuimg.convert + ) + return image + finally: + os.unlink(tmp_dest) + + def _decide_by_hash(self, hash, local_images, remote_images): + raise LagoException('fetching by hash not implemented yet') + + def _decide_by_name(self, name, local_images, remote_images): + if not local_images and not remote_images: + raise LagoImageError( + ( + 'Unable to list remote images, and no ' + 'local image {0} found.' + ).format(name) + ) + + elif local_images and not remote_images: + LOGGER.debug( + 'no remote image was found with name %s, using local ' + 'image: %s', name, local_images[-1] + ) + return local_images[-1] + + elif not local_images and remote_images: + LOGGER.debug( + 'no local image %s, acquiring remote: %s', name, + remote_images[-1] + ) + return remote_images[-1] + + else: + head_remote = remote_images[-1] + head_local = local_images[-1] + if head_remote.hash != head_local.hash and head_remote.creation_date > head_local.creation_date: + LOGGER.debug( + ( + 'found newer version for image name %s ' + 'remote: %s, local: %s' + ), name, head_remote, head_local + ) + return head_remote + else: + return head_local + + def _make_name(self, name): + components = name.split(':') + if len(components) == 1: + return ImageName(components[0], None) + elif len(components) == 2: + return ImageName(components[0], 'sha1:' + components[1]) + else: + raise LagoImageError( + ( + 'Illegal image name name, should be ' + 'name[:SHA1]: {0}'.format(name) + ) + ) class TemplateStore: diff --git a/lago/templates_store.py b/lago/templates_store.py new file mode 100644 index 00000000..86890d83 --- /dev/null +++ b/lago/templates_store.py @@ -0,0 +1,405 @@ +import datetime +import json +import logging +import os +import uuid +from functools import partial + +from future.builtins import super +from future.utils import raise_from +from sqlalchemy import ( + Column, DateTime, ForeignKey, Integer, String, UniqueConstraint, + create_engine +) +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship, sessionmaker +from sqlalchemy.orm.exc import NoResultFound + +import utils +from db_utils import BaseMixin, autocommit_safe +from utils import LagoException +import shutil + +LOGGER = logging.getLogger(__name__) +Base = declarative_base() + + +class StoreError(LagoException): + pass + + +class RepositoryError(StoreError): + def __init__(self, repo_name): + super().__init__('No repository {0} found'.format(repo_name)) + + +class ImageError(StoreError): + def __init__(self, image): + super().__init__('No image with hash {0} found'.format(image)) + + +class TagError(StoreError): + def __init__(self, tag, hash): + super( + ).__init__('No tag {0} found for image hash {1}'.format(tag, hash)) + + +class Repository(BaseMixin, Base): + __tablename__ = 'repositories' + name = Column(String(256), unique=True) + repo_type = Column(String(256)) + images = relationship('Image', back_populates='repository') + + +class Image(BaseMixin, Base): + __tablename__ = 'images' + repository_id = Column(Integer, ForeignKey('repositories.id')) + repository = relationship('Repository', back_populates='images') + name = Column(String(256)) + creation_date = Column(DateTime) + hash = Column(String(512), unique=True) + file = Column(String(1024)) + tags = relationship('ImageTag', back_populates='image') + __mapper_args__ = {'order_by': creation_date} + + +class ImageTag(BaseMixin, Base): + __tablename__ = 'image_tags' + image_id = Column(Integer, ForeignKey('images.id')) + image = relationship('Image', back_populates='tags') + name = Column(String(256)) + __table_args__ = ( + UniqueConstraint('image_id', 'name', name='_image_tag'), + ) + __mapper_args__ = {'order_by': name} + + +class ImagesStoreDB(object): + def __init__(self, uri): + self._uri = uri + self._engine = create_engine(uri, echo=False) + Base.metadata.create_all(self._engine) + self._sessionmaker = sessionmaker(bind=self._engine) + self._session = partial(autocommit_safe, self._sessionmaker()) + + def add_repo(self, name, repo_type): + with self._session() as session: + if self._exists_repo(session, name): + raise StoreError('Repository {0} already exists'.format(name)) + repo = Repository(name=name, repo_type=repo_type) + session.add(repo) + return self.get_repo(name=name) + + def add_tag(self, hash, name): + with self._session() as session: + image = self._get_image(session, hash) + image.tags.append(ImageTag(name=name)) + return self.get_image(hash=hash) + + def add_image(self, name, repo_name, creation_date, hash, file, tags=None): + with self._session() as session: + if self._exists_image(session, hash): + raise StoreError( + ('Image with hash {0} already ' + 'exists').format(hash) + ) + try: + repo = session.query(Repository).filter( + Repository.name == repo_name + ).one() + except NoResultFound as exc: + raise_from(RepositoryError(repo_name), exc) + + image = Image( + name=name, + creation_date=creation_date, + hash=hash, + file=file, + ) + + if tags is not None: + for tag in tags: + image.tags.append(ImageTag(name=tag)) + repo.images.append(image) + + return self.get_image(hash=hash) + + def delete_tag(self, hash, name): + with self._session() as session: + try: + tag = session.query(ImageTag).join(Image).filter( + Image.hash == hash, ImageTag.name == name + ).one() + except NoResultFound as exc: + raise_from(TagError(name, hash), exc) + session.delete(tag) + + def delete_image(self, hash): + with self._session() as session: + image = self._get_image(session, hash) + session.delete(image) + + def delete_repo(self, name): + with self._session() as session: + repo = session.query(Repository).filter(Repository.name == name + ).one() + session.delete(repo) + + def list(self, top=10): + with self._session() as session: + return [ + row.serialize() + for row in session.query(Image).limit(top).all() + ] + + def list_repos(self, top=10): + with self._session() as session: + return [ + row.serialize() + for row in session.query(Repository).limit(top).all() + ] + + def get_repo(self, name): + with self._session() as session: + repo = self._get_repo(session, name) + return repo.serialize() + + def get_image(self, hash): + with self._session() as session: + image = self._get_image(session, hash) + return image.serialize() + + def get_images_by_name(self, name, repo_name=None): + filters = [Image.name == name] + if repo_name is not None: + filters.append(Repository.name == repo_name) + with self._session() as session: + return [ + row.serialize() + for row in session.query(Image).filter(*filters) + ] + + def get_images_by_repo(self, repo_name): + with self._session() as session: + repo = self._get_repo(session, repo_name) + return [image.serialize() for image in repo.images] + + def exists_image(self, hash): + with self._session() as session: + return self._exists_image(session, hash) + + def exists_repo(self, name): + with self._session() as session: + return self._exists_repo(session, name) + + def get_tags(self, hash): + with self._session() as session: + image = self._get_image(session, hash) + return [tag.serialize() for tag in image.tags] + + def reset(self): + Base.metadata.drop_all(bind=self._engine) + Base.metadata.create_all(bind=self._engine) + + def _get_image(self, session, hash): + try: + image = session.query(Image).filter(Image.hash == hash).one() + return image + except NoResultFound as exc: + raise_from(ImageError(hash), exc) + + def _get_repo(self, session, name): + try: + repo = session.query(Repository).filter(Repository.name == name + ).one() + return repo + except NoResultFound as exc: + raise_from(RepositoryError(name), exc) + + def _exists_repo(self, session, name): + try: + self._get_repo(session, name) + return True + except RepositoryError: + return False + + def _exists_image(self, session, hash): + try: + self._get_image(session, hash) + return True + except ImageError: + return False + + +class ImagesStore(object): + def __init__(self, root, uri=None, tmp_dir=None): + self._root = os.path.abspath(root) + if not os.path.isdir(self.root): + os.makedirs(self.root) + if uri is None: + self._uri = 'sqlite:////' + os.path.join( + self._root, 'store.sqlite' + ) + else: + self._uri = uri + + self._db = ImagesStoreDB(uri=self.uri) + if tmp_dir is None: + self._tmp_dir = os.path.join(self.root, 'tmp') + if not os.path.isdir(os.path.join(self.root, 'tmp')): + os.makedirs(self.tmp_dir) + else: + self._tmp_dir = tmp_dir + + def _lock(self, repo): + return utils.LockFile( + path=os.path.join(self.root, self.__class__.__name__, repo), + timeout=180 + ) + + @property + def tmp_dir(self): + return self._tmp_dir + + @property + def root(self): + return self._root + + @property + def uri(self): + return self._uri + + def add_repo(self, repo_name, repo_type): + if self._db.exists_repo(repo_name): + raise StoreError('Repository {0} already exists'.format(repo_name)) + dst = self._repopath(repo_name) + if os.path.isdir(dst): + raise StoreError( + ( + 'Repository directory {0} exists, but the ' + 'repository is not configured, try removing the ' + 'directory manually.' + ).format(dst) + ) + os.makedirs(dst) + try: + repo = self._db.add_repo(name=repo_name, repo_type=repo_type) + LOGGER.debug('added repository %s', repo) + except: + shutil.rmtree(dst) + raise + + def delete_repo(self, repo_name): + images = self._db.get_images_by_repo(repo_name) + for image in images: + self.delete_image(image.hash) + self._db.delete_repo(repo_name) + shutil.rmtree(self._repopath(repo_name)) + + def add_image( + self, + name, + repo_name, + hash, + image_file, + creation_date, + metadata, + tags=None, + transfer_function=shutil.copy, + ): + if not isinstance(creation_date, datetime.date): + raise StoreError(('creation_date should be a datetime object.')) + + if self._db.exists_image(hash=hash): + image = self._db.get_image(hash) + raise StoreError( + ('Image hash already exists in store: ' + '{0}').format(image) + ) + + dst = os.path.join(self._repopath(repo_name), uuid.uuid4().hex) + metadata_dst = self._metafile(dst) + with self._lock(os.path.dirname(dst)): + try: + self._dump_metadata(metadata_dst, metadata, name, repo_name) + transfer_function(image_file, dst) + if not os.path.isfile(dst): + raise StoreError( + ('failed acquiring file {0} to ' + '{1}').format(image_file, dst) + ) + image = self._db.add_image( + name=name, + repo_name=repo_name, + creation_date=creation_date, + hash=hash, + file=dst, + tags=tags + ) + LOGGER.debug('added to store: %s', image) + return image + except: + utils.safe_unlink(metadata_dst) + utils.safe_unlink(dst) + raise + + def delete_image(self, hash): + image = self._db.get_image(hash) + with self._lock(os.path.dirname(image.file)): + self._db.delete_image(hash) + os.unlink(image.file) + os.unlink(self._metafile(image.file)) + LOGGER.debug('deleted from store: %s', image) + + def add_tags(self, hash, tags): + for tag in tags: + self._db.add_tag(hash, tag) + + def get_image(self, hash): + return self._db.get_image(hash) + + def get_tags(self, hash): + return [tag.name for tag in self._db.get_tags(hash)] + + def get_repo(self, name): + return self._db.get_repo(name) + + def get_images_by_repo(self, repo_name): + return self._db.get_images_by_repo(repo_name) + + def get_metadata(self, hash): + image = self._db.get_image(hash) + with open(self._metafile(image.file)) as meta: + return json.load(meta) + + def search(self, name, repo_name=None): + return self._db.get_images_by_name(name, repo_name) + + def exists_repo(self, repo_name): + return self._db.exists_repo(repo_name) + + def list_images(self, top=10): + return self._db.list(top) + + def list_repos(self): + return self._db.list_repos() + + def _metafile(self, dest): + return dest + '.metadata' + + def _repopath(self, repo_name): + return os.path.join(self.root, repo_name) + + def _dump_metadata(self, dest, metadata, name, repo_name): + metadata['store'] = {'name': name, 'repo_name': repo_name} + with open(dest, 'w') as metafile: + try: + utils.json_dump(metadata, metafile) + except ValueError: + raise StoreError( + ( + 'Unable to serialize metadata, it should be a ' + 'a JSON serializable string: ' + '{0}.' + ).format(metadata) + ) diff --git a/lago/utils.py b/lago/utils.py index b02701f2..392a4e22 100644 --- a/lago/utils.py +++ b/lago/utils.py @@ -20,6 +20,7 @@ import Queue import collections import datetime +import errno import fcntl import functools import json @@ -580,7 +581,9 @@ def run_command_with_validation( ): result = run_command(cmd) if result and fail_on_error: - raise RuntimeError('{}\n{}'.format(msg, result.err)) + raise LagoException( + '{}\nstdout: {}\nstderr: {}'.format(msg, result.out, result.err) + ) return result @@ -696,6 +699,11 @@ def get_hash(file_path, checksum='sha1'): return sha.hexdigest() +def verify_hash(file_path, expected_hash, hash_algo): + calculated_hash = get_hash(file_path, checksum=hash_algo) + return calculated_hash == expected_hash + + def filter_spec(spec, paths, wildcard='*', separator='/'): """ Remove keys from a spec file. @@ -792,6 +800,26 @@ def ver_cmp(ver1, ver2): ) +def safe_unlink(path): + """ + Same as os.unlink, only ignores file does not exist errors + + Args: + path(str): path to remove + + """ + + try: + os.unlink(path) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + +def simple_move(source, dest): + shutil.move(source, dest) + + class LagoException(Exception): pass diff --git a/requirements.txt b/requirements.txt index 4852473f..7fc1e601 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,6 +10,7 @@ PyYAML scp setuptools stevedore +SQLAlchemy future wrapt Jinja2 diff --git a/tests/unit/lago/test_template_store.py b/tests/unit/lago/test_template_store.py new file mode 100644 index 00000000..2c46f418 --- /dev/null +++ b/tests/unit/lago/test_template_store.py @@ -0,0 +1,118 @@ +from pytest import fixture, mark, raises + +from lago.templates_store import ImagesStoreDB +from lago.templates_store import RepositoryError, StoreError, ImageError +from collections import namedtuple +import string +from random import choice +from datetime import datetime +Image = namedtuple('Image', 'name, repo_name, creation_date, hash, file, tags') + + +@fixture +def storedb(): + return ImagesStoreDB(uri='sqlite://') + + +@fixture +def storedb_with_repo(): + store = ImagesStoreDB(uri='sqlite://') + store.add_repo(name='default_test_repo', repo_type='unittest') + return store + + +@fixture +def repo_stream(): + def gen(): + count = 1 + while True: + yield ('repo{0}'.format(count), 'type{0}'.format(count)) + count = count + 1 + + return gen() + + +def random_str(length=20): + return u''.join(choice(string.ascii_letters) for _ in range(20)) + + +def generate_image(**kwargs): + return Image( + name=kwargs.get('name', random_str()), + repo_name=kwargs.get('repo_name', random_str()), + creation_date=kwargs.get('creation_date', datetime.now()), + hash=kwargs.get('hash', 'hash' + random_str()), + file=kwargs.get('file', random_str()), + tags=kwargs.get('tags', []) + ) + + +def assert_images(result, image): + assert result.name == image.name + assert result.creation_date == image.creation_date + assert result.hash == image.hash + assert result.file == image.file + + +# @fixture +# def images_stream(): +# def gen() + + +class TestImageStoreDB(object): + @mark.parametrize('number_of_repos', range(1, 5)) + def test_add_repo(self, storedb, repo_stream, number_of_repos): + # TO-DO: refactor to reduce some code + repos = [next(repo_stream) for _ in range(number_of_repos)] + for repo in repos: + result = storedb.add_repo(*repo) + assert result.name == repo[0] + assert result.repo_type == repo[1] + assert storedb.exists_repo(name=repo[0]) == True + for repo in repos: + with raises(StoreError): + result = storedb.add_repo(*repo) + + new_repos = [next(repo_stream) for _ in range(2)] + for repo in new_repos: + result = storedb.add_repo(*repo) + assert result.name == repo[0] + assert result.repo_type == repo[1] + assert storedb.exists_repo(name=repo[0]) == True + + for repo in new_repos: + with raises(StoreError): + result = storedb.add_repo(*repo) + + def test_add_image_no_repo(self, storedb): + image = generate_image() + with raises(RepositoryError): + storedb.add_image(**image._asdict()) + + def test_add_image_simple(self, storedb_with_repo): + repo_name = storedb_with_repo.list_repos()[0].name + image = generate_image(repo_name=repo_name) + result = storedb_with_repo.add_image(**image._asdict()) + assert_images(result, image) + assert result.repository_id == storedb_with_repo.get_repo(repo_name).id + + def test_add_image_no_duplicate_hash(self, storedb_with_repo): + repo_name = storedb_with_repo.list_repos()[0].name + image = generate_image(repo_name=repo_name) + result = storedb_with_repo.add_image(**image._asdict()) + assert_images(result, image) + assert result.repository_id == storedb_with_repo.get_repo(repo_name).id + with raises(StoreError): + storedb_with_repo.add_image( + **generate_image(hash=image.hash)._asdict() + ) + + def test_retrive_image(self, storedb_with_repo): + repo_name = storedb_with_repo.list_repos()[0].name + image = generate_image(repo_name=repo_name) + storedb_with_repo.add_image(**image._asdict()) + result = storedb_with_repo.get_image(image.hash) + assert_images(result, image) + results = storedb_with_repo.get_image_by_name(image.name) + assert len(results) == 1 + assert_images(results[0], image)