[Python-modules-commits] [python-aptly] 01/03: Import python-aptly_0.12.4.orig.tar.gz

Filip Pytloun fpytloun-guest at moszumanska.debian.org
Mon Dec 11 16:24:39 UTC 2017


This is an automated email from the git hooks/post-receive script.

fpytloun-guest pushed a commit to branch master
in repository python-aptly.

commit 5a34a290b4e2ec582bea955e169516fef1e8eeaf
Author: Filip Pytloun <filip at pytloun.cz>
Date:   Mon Dec 11 17:12:05 2017 +0100

    Import python-aptly_0.12.4.orig.tar.gz
---
 README.rst                  |  27 ++++-
 aptly/client.py             |  18 +++-
 aptly/publisher/__init__.py | 243 ++++++++++++++++++++++++++++++++++++--------
 aptly/publisher/__main__.py | 120 +++++++++++++++++-----
 setup.py                    |   3 +-
 5 files changed, 330 insertions(+), 81 deletions(-)

diff --git a/README.rst b/README.rst
index 9bfd043..15ba11a 100644
--- a/README.rst
+++ b/README.rst
@@ -18,20 +18,24 @@ This is how workflow can look like and what publisher can do for you:
 Features
 --------
 
-- create or update publish from latest snapshots
+- Create or update publish from latest snapshots
 
   - it takes configuration in yaml format which defines what to publish and
     how
   - expected snapshot format is ``<name>-<timestamp>``
 
-- promote publish
+- Promote publish
 
   - use source publish snapshots to create or update another publish (eg.
     testing -> stable)
 
-- cleanup unused snapshots
+- Cleanup unused snapshots
 
-- Supports Python 3 (recommended) and Pythonn 2
+- Purge publishes and repositories
+
+- Restore and dump publishes
+
+- Supports Python 3 (recommended) and Python 2
 
 Create or update publish
 ~~~~~~~~~~~~~~~~~~~~~~~~
@@ -60,6 +64,8 @@ and target distributions for publishing.
       cloudlab:
         # Publish as component cloudlab
         component: cloudlab
+        # Use swift storage named myswift for publish storage
+        storage: swift:myswift
         distributions:
           # We want to publish our packages (that can't break anything for
           # sure) immediately to both nightly and testing repositories
@@ -159,7 +165,7 @@ You can see differences between publishes with following command:
 
   aptly-publisher -v --url http://localhost:8080  \
   --source nightly/trusty --target testing/trusty \
-  publish --diff
+  promote --diff
 
 Example output can look like this:
 
@@ -177,6 +183,17 @@ publish is updated (eg. nightly).
 
   aptly-publisher -v --url http://localhost:8080 cleanup
 
+Purge unused packages from repo and publishes
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+When you are uploading a lot version of the same package, you may want to
+get rid of old packages version in your snapshots.
+Be careful, the option ``--hard`` will remove the packages from your repos.
+
+::
+
+  aptly-publisher -v --url http://localhost:8080 --component extra --hard purge
+
 Installation
 ============
 
diff --git a/aptly/client.py b/aptly/client.py
index 71952ff..ea54f65 100644
--- a/aptly/client.py
+++ b/aptly/client.py
@@ -62,14 +62,26 @@ class Aptly(object):
         )
         return self._process_result(res)
 
-    def do_delete(self, uri, timeout=None):
+    def do_delete(self, uri, data=None, timeout=None):
+        data_json = json.dumps(data) if data else ""
         url = '%s%s' % (self.url, uri)
-        lg.debug("DELETE %s" % url)
+
+        if data:
+            lg.debug("DELETE %s, data=%s" % (url, data_json))
+        else:
+            lg.debug("DELETE %s" % url)
 
         if self.dry:
             return
 
-        res = self.session.delete(
+        if data:
+            res = self.session.delete(
+                url,
+                data=data_json,
+                timeout=timeout or self.timeout,
+            )
+        else:
+            res = self.session.delete(
             url,
             timeout=timeout or self.timeout,
         )
diff --git a/aptly/publisher/__init__.py b/aptly/publisher/__init__.py
index b1f6e72..f1f52f6 100644
--- a/aptly/publisher/__init__.py
+++ b/aptly/publisher/__init__.py
@@ -4,6 +4,7 @@ import time
 import re
 import logging
 import yaml
+import apt_pkg
 from aptly.exceptions import AptlyException, NoSuchPublish
 
 lg = logging.getLogger(__name__)
@@ -18,29 +19,30 @@ class PublishManager(object):
     """
     Manage multiple publishes
     """
-    def __init__(self, client):
+    def __init__(self, client, storage=""):
         self.client = client
         self._publishes = {}
+        self.storage = storage
         self.timestamp = int(time.time())
 
-    def publish(self, distribution):
+    def publish(self, distribution, storage=""):
         """
         Get or create publish
         """
         try:
             return self._publishes[distribution]
         except KeyError:
-            self._publishes[distribution] = Publish(self.client, distribution, timestamp=self.timestamp)
+            self._publishes[distribution] = Publish(self.client, distribution, timestamp=self.timestamp, storage=(storage or self.storage))
             return self._publishes[distribution]
 
-    def add(self, snapshot, distributions, component='main'):
+    def add(self, snapshot, distributions, component='main', storage=""):
         """ Add mirror or repo to publish """
         for dist in distributions:
-            self.publish(dist).add(snapshot, component)
+            self.publish(dist, storage=storage).add(snapshot, component)
 
     def restore_publish(self, components, restore_file, recreate):
         publish_file = load_publish(restore_file)
-        publish_source = Publish(self.client, publish_file.get('publish'))
+        publish_source = Publish(self.client, publish_file.get('publish'), storage=publish_file.get('storage', self.storage))
         publish_source.restore_publish(publish_file,
                                        components=components,
                                        recreate=recreate)
@@ -56,30 +58,42 @@ class PublishManager(object):
         if publishes_to_save and not ('all' in publishes_to_save):
             save_all = False
 
+        re_publish = None
+        if len(publishes_to_save) == 1 and re.search(r'\(.*\)', publishes_to_save[0]):
+            re_publish = re.compile(publishes_to_save[0])
+
         publishes = self.client.do_get('/publish')
         for publish in publishes:
-            name = '{}/{}'.format(publish['Prefix'], publish['Distribution'])
-            if save_all or name in publishes_to_save:
-                save_list.append(Publish(self.client, name, load=True))
-
-        if not save_all and len(save_list) != len(publishes_to_save):
+            name = "{}{}{}".format(publish['Storage']+":" if publish['Storage']
+                                else "", publish['Prefix']+"/" if
+                                publish['Prefix'] else "",
+                                publish['Distribution'])
+
+            if not re_publish or re_publish.match(name):
+                if save_all or name in publishes_to_save or re_publish:
+                    current_publish = Publish(self.client, name, load=True, storage=publish.get('Storage', self.storage))
+                    if current_publish not in save_list:
+                        save_list.append(current_publish)
+
+        if not save_all and not re_publish and len(save_list) != len(publishes_to_save):
             raise Exception('Publish(es) required not found')
 
         for publish in save_list:
-            save_path = ''.join([dump_dir, '/', prefix, publish.name.replace('/', '-')])
+            save_path = ''.join([dump_dir, '/', prefix, publish.name.replace('/', '-'), '.yml'])
             publish.save_publish(save_path)
 
-    def _publish_match(self, publish, names=False):
+    def _publish_match(self, publish, names=False, name_only=False):
         """
         Check if publish name matches list of names or regex patterns
         """
         if names:
             for name in names:
-                if isinstance(name, re._pattern_type):
+                if not name_only and isinstance(name, re._pattern_type):
                     if re.match(name, publish.name):
                         return True
                 else:
-                    if publish in [name, './%s' % name]:
+                    operand = name if name_only else [name, './%s' % name]
+                    if publish in operand:
                         return True
             return False
         else:
@@ -87,12 +101,17 @@ class PublishManager(object):
 
     def do_publish(self, *args, **kwargs):
         try:
+            publish_dist = kwargs.pop('dist')
+        except KeyError:
+            publish_dist = None
+
+        try:
             publish_names = kwargs.pop('names')
         except KeyError:
             publish_names = None
 
         for publish in self._publishes.values():
-            if self._publish_match(publish.name, publish_names):
+            if self._publish_match(publish.name, publish_names or publish_dist, publish_names):
                 publish.do_publish(*args, **kwargs)
             else:
                 lg.info("Skipping publish %s not matching publish names" % publish.name)
@@ -103,6 +122,47 @@ class PublishManager(object):
             keys[e] = 1
         return list(keys.keys())
 
+    def do_purge(self, config, components=[], hard_purge=False):
+        (repo_dict, publish_dict) = self.get_repo_information(config, self.client, hard_purge, components)
+        publishes = self.client.do_get('/publish')
+        publish_list = []
+
+        for publish in publishes:
+            name = '{}/{}'.format(publish['Prefix'].replace("/", "_"), publish['Distribution'])
+            publish_list.append(Publish(self.client, name, load=True))
+
+        for publish in publish_list:
+            repo_dict = publish.purge_publish(repo_dict, publish_dict, components, publish=True)
+
+        if hard_purge:
+            self.remove_unused_packages(repo_dict)
+            self.cleanup_snapshots()
+
+    @staticmethod
+    def get_repo_information(config, client, fill_repo=False, components=[]):
+        """ fill two dictionnaries : one containing all the packages for every repository
+            and the second one associating to every component of every publish its repository"""
+        repo_dict = {}
+        publish_dict = {}
+
+        for origin in ['repo', 'mirror']:
+            for name, repo in config.get(origin, {}).items():
+                if components and repo.get('component') not in components:
+                    continue
+                if fill_repo and origin == 'repo':
+                    packages = client.do_get('/{}/{}/{}'.format("repos", name, "packages"))
+                    repo_dict[name] = packages
+                for distribution in repo.get('distributions'):
+                    publish_name = str.join('/', distribution.split('/')[:-1])
+                    publish_dict[(publish_name, repo.get('component'))] = name
+
+        return (repo_dict, publish_dict)
+
+    def remove_unused_packages(self, repo_dict):
+        for repo_name, packages in repo_dict.items():
+            if packages:
+                self.client.do_delete('/repos/%s/packages' % repo_name, data={'PackageRefs': packages})
+
     def cleanup_snapshots(self):
         snapshots = self.client.do_get('/snapshots', {'sort': 'time'})
         exclude = []
@@ -143,18 +203,30 @@ class Publish(object):
     """
     Single publish object
     """
-    def __init__(self, client, distribution, timestamp=None, recreate=False, load=False, merge_prefix='_'):
+    def __init__(self, client, distribution, timestamp=None, recreate=False, load=False, merge_prefix='_', storage="", architectures=[]):
         self.client = client
         self.recreate = recreate
+        self.architectures = architectures
+
+        # Try to get storage from distribution (eg. s3:mys3:xenial)
+        dist_split = distribution.split(':')
+        if len(dist_split) > 1:
+            self.storage = "{}:{}".format(dist_split[0], dist_split[1])
+            distribution = dist_split[-1]
+        else:
+            self.storage = storage
 
         dist_split = distribution.split('/')
         self.distribution = dist_split[-1]
         if dist_split[0] != self.distribution:
-            self.prefix = dist_split[0]
+            self.prefix = "_".join(dist_split[:-1])
         else:
             self.prefix = ''
 
         self.name = '%s/%s' % (self.prefix or '.', self.distribution)
+        self.full_name = "{}{}{}".format(self.storage+":" if self.storage else
+                                         "", self.prefix+"/" if self.prefix else
+                                         "", self.distribution)
 
         if not timestamp:
             self.timestamp = int(time.time())
@@ -187,14 +259,11 @@ class Publish(object):
 
         Return tuple (diff, equal) of dict {'component': ['snapshot']}
         """
-        lg.debug("Comparing publish %s and %s" % (self.name, other.name))
+        lg.debug("Comparing publish %s (%s) and %s (%s)" % (self.name, self.storage or "local", other.name, other.storage or "local"))
 
         diff, equal = ({}, {})
 
         for component, snapshots in self.components.items():
-            if component not in other.components:
-                continue
-
             if component not in list(other.components.keys()):
                 # Component is missing in other
                 diff[component] = snapshots
@@ -219,9 +288,10 @@ class Publish(object):
         publishes = self.client.do_get('/publish')
         for publish in publishes:
             if publish['Distribution'] == self.distribution and \
-                    publish['Prefix'] == (self.prefix or '.'):
+                    publish['Prefix'].replace("/", "_") == (self.prefix or '.') and \
+                    publish['Storage'] == self.storage:
                 return publish
-        raise NoSuchPublish("Publish %s does not exist" % self.name)
+        raise NoSuchPublish("Publish %s (%s) does not exist" % (self.name, self.storage or "local"))
 
     def _remove_snapshots(self, snapshots):
         for snapshot in snapshots:
@@ -238,6 +308,7 @@ class Publish(object):
         yaml_dict["publish"] = self.name
         yaml_dict["name"] = timestamp
         yaml_dict["components"] = []
+        yaml_dict["storage"] = self.storage
         for component, snapshots in self.components.items():
             packages = self.get_packages(component)
             package_dict = []
@@ -253,6 +324,79 @@ class Publish(object):
         with open(save_path, 'w') as save_file:
             yaml.dump(yaml_dict, save_file, default_flow_style=False)
 
+    def purge_publish(self, repo_dict, publish_dict, components=[], publish=False):
+
+        apt_pkg.init_system()
+
+        new_publish_snapshots = []
+
+        for snapshot in self.publish_snapshots:
+            # packages to be kept
+            processed = []
+            name = snapshot["Name"]
+            component = snapshot["Component"]
+            purge_packages = []
+            location = self.name.split('/')[0].replace('_', '/')
+
+            if (location, component) in publish_dict:
+                repo_name = publish_dict[(location, component)]
+            else:
+                new_publish_snapshots.append(snapshot)
+                continue
+
+            if components and component not in components:
+                new_publish_snapshots.append(snapshot)
+                if repo_dict:
+                    repo_dict[repo_name] = []
+                continue
+
+            packages = self.client.do_get('/{}/{}/{}'.format("snapshots", name, "packages"))
+            packages = sorted(packages, key=lambda x: self.parse_package_ref(x)[2], reverse=True, cmp=apt_pkg.version_compare)
+
+            for package in packages:
+                package_name = self.parse_package_ref(package)[1]
+
+                if package_name not in processed:
+                    processed.append(package_name)
+                    if repo_dict and repo_name in repo_dict and package in repo_dict[repo_name]:
+                        repo_dict[repo_name].remove(package)
+
+                    purge_packages.append(package)
+
+            if purge_packages != packages:
+                snapshot_name = '{}-{}'.format(name, 'purged')
+                try:
+                    lg.debug("Creating new snapshot: %s" % snapshot_name)
+                    self.client.do_post(
+                        '/snapshots',
+                        data={
+                            'Name': snapshot_name,
+                            'SourceSnapshots': [],
+                            'Description': 'Minimal snapshot from {}'.format(repo_name),
+                            'PackageRefs': purge_packages,
+                        }
+                    )
+
+                except AptlyException as e:
+                    if e.res.status_code == 404:
+                        raise Exception('Error while creating snapshot : {}'.format(repr(e)))
+                    else:
+                        lg.debug("Snapshot %s already exist" % snapshot_name)
+
+                new_publish_snapshots.append({
+                    'Component': component,
+                    'Name': snapshot_name
+                })
+            else:
+                new_publish_snapshots.append(snapshot)
+
+        if self.publish_snapshots != new_publish_snapshots:
+            self.publish_snapshots = new_publish_snapshots
+            if publish:
+                self.do_publish(recreate=False, merge_snapshots=False)
+
+        return repo_dict
+
     def restore_publish(self, config, components, recreate=False):
         """
         Restore publish from config file
@@ -331,6 +475,7 @@ class Publish(object):
         Load publish info from remote
         """
         publish = self._get_publish()
+        self.architectures = publish['Architectures']
         for source in publish['Sources']:
             component = source['Component']
             snapshot = source['Name']
@@ -469,16 +614,14 @@ class Publish(object):
             })
 
     def drop_publish(self):
-        lg.info("Deleting publish, distribution=%s" % self.name)
-
-        self.client.do_delete('/publish/%s/%s' % (self.prefix, self.distribution))
+        lg.info("Deleting publish, distribution=%s, storage=%s" % (self.name, self.storage or "local"))
+        self.client.do_delete('/publish/%s' % (self.full_name))
 
     def update_publish(self, force_overwrite=False, publish_contents=False):
-        lg.info("Updating publish, distribution=%s snapshots=%s" %
-                (self.name, self.publish_snapshots))
-
+        lg.info("Updating publish, distribution=%s storage=%s snapshots=%s" %
+                (self.name, self.storage or "local", self.publish_snapshots))
         self.client.do_put(
-            '/publish/%s/%s' % (self.prefix, self.distribution),
+            '/publish/%s' % (self.full_name),
             {
                 'Snapshots': self.publish_snapshots,
                 'ForceOverwrite': force_overwrite,
@@ -487,13 +630,16 @@ class Publish(object):
         )
 
     def create_publish(self, force_overwrite=False, publish_contents=False, architectures=None):
-        lg.info("Creating new publish, distribution=%s snapshots=%s, architectures=%s" %
-                (self.name, self.publish_snapshots, architectures))
+        lg.info("Creating new publish, distribution=%s storage=%s snapshots=%s, architectures=%s" %
+                (self.name, self.storage or "local", self.publish_snapshots, architectures))
 
         if self.prefix:
-            prefix = '/%s' % self.prefix
+            prefix = '%s%s' % ("/"+self.storage+":" or "/", self.prefix)
+        else:
+            prefix = '%s' % ("/"+self.storage+":" or "")
 
         opts = {
+            "Storage": self.storage,
             "SourceKind": "snapshot",
             "Distribution": self.distribution,
             "Sources": self.publish_snapshots,
@@ -501,8 +647,8 @@ class Publish(object):
             'SkipContents': not publish_contents,
         }
 
-        if architectures:
-            opts['Architectures'] = architectures
+        if architectures or self.architectures:
+            opts['Architectures'] = architectures or self.architectures
 
         self.client.do_post(
             '/publish%s' % (prefix or ''),
@@ -511,7 +657,8 @@ class Publish(object):
 
     def do_publish(self, recreate=False, no_recreate=False,
                    force_overwrite=False, publish_contents=False,
-                   architectures=None, merge_snapshots=True):
+                   architectures=None, merge_snapshots=True,
+                   only_latest=False, config=None, components=[]):
         if merge_snapshots:
             self.merge_snapshots()
         try:
@@ -519,9 +666,14 @@ class Publish(object):
         except NoSuchPublish:
             publish = False
 
+        if only_latest:
+            (_, publish_dict) = PublishManager.get_repo_information(config, self.client)
+            self.purge_publish([], publish_dict, components, False)
+
         if not publish:
             # New publish
-            self.create_publish(force_overwrite, publish_contents, architectures)
+            self.create_publish(force_overwrite, publish_contents,
+                                architectures or self.architectures)
         else:
             # Test if publish is up to date
             to_publish = [x['Name'] for x in self.publish_snapshots]
@@ -531,11 +683,12 @@ class Publish(object):
             published.sort()
 
             if recreate:
-                lg.info("Recreating publish %s" % self.name)
+                lg.info("Recreating publish %s (%s)" % (self.name, self.storage or "local"))
                 self.drop_publish()
-                self.create_publish(force_overwrite, publish_contents, architectures)
+                self.create_publish(force_overwrite, publish_contents,
+                                    architectures or self.architectures)
             elif to_publish == published:
-                lg.info("Publish %s is up to date" % self.name)
+                lg.info("Publish %s (%s) is up to date" % (self.name, self.storage or "local"))
             else:
                 try:
                     self.update_publish(force_overwrite, publish_contents)
@@ -544,8 +697,10 @@ class Publish(object):
                         # Publish exists but we are going to add some new
                         # components. Unfortunately only way is to recreate it
                         if no_recreate:
-                            lg.error("Cannot update publish %s (adding new components?), falling back to recreating it is disabled so skipping.")
+                            lg.error("Cannot update publish %s (adding new components?), falling back to recreating it is disabled so skipping." % self.full_name)
                         else:
-                            lg.warning("Cannot update publish %s (adding new components?), falling back to recreating it" % self.name)
+                            lg.warning("Cannot update publish %s (adding new components?), falling back to recreating it" % self.full_name)
                             self.drop_publish()
-                            self.create_publish(force_overwrite, publish_contents, architectures)
+                            self.create_publish(force_overwrite,
+                                                publish_contents,
+                                                architectures or self.architectures)
diff --git a/aptly/publisher/__main__.py b/aptly/publisher/__main__.py
index e0d0b3f..be270b0 100644
--- a/aptly/publisher/__main__.py
+++ b/aptly/publisher/__main__.py
@@ -14,7 +14,7 @@ import copy
 import re
 
 logging.basicConfig()
-lg_root = logging.getLogger('aptly')
+lg_aptly = logging.getLogger('aptly')
 lg = logging.getLogger('aptly-publisher')
 
 
@@ -33,7 +33,7 @@ def main():
     parser = argparse.ArgumentParser("aptly-publisher")
 
     group_common = parser.add_argument_group("Common")
-    parser.add_argument('action', help="Action to perform (publish, promote, cleanup, restore, dump)")
+    parser.add_argument('action', help="Action to perform (publish, promote, cleanup, restore, dump, purge)")
     group_common.add_argument('-v', '--verbose', action="store_true")
     group_common.add_argument('-d', '--debug', action="store_true")
     group_common.add_argument('--dry', '--dry-run', action="store_true")
@@ -43,37 +43,44 @@ def main():
     group_common.add_argument('--no-recreate', action="store_true", help="Never recreate publish (even when we are adding new components where it's the only option)")
     group_common.add_argument('--force-overwrite', action="store_true", help="Overwrite files in pool/ directory without notice")
     group_common.add_argument('--publish-contents', action="store_true", default=False, help="Publish contents. It's slow so disabled by default to support large repositories.")
-    group_common.add_argument('--components', nargs='+', help="Space-separated list of components to promote or restore")
+    group_common.add_argument('--components', nargs='+', help="Space-separated list of components to promote or restore or to purge (in case of purge)")
+    group_common.add_argument('--storage', default="", help="Storage backend to use for all publishes, can be empty (filesystem, default), swift:[name] or s3:[name]")
+    group_common.add_argument('-p', '--publish', nargs='+', help="Space-separated list of publish")
 
     group_publish = parser.add_argument_group("Action 'publish'")
     group_publish.add_argument('-c', '--config', default="/etc/aptly/publisher.yaml", help="Configuration YAML file")
     group_publish.add_argument('--dists', nargs='+', help="Space-separated list of distribution to work with (including prefix), default all.")
     group_publish.add_argument('--architectures', nargs='+', help="List of architectures to publish (also determined by config, defaults to amd64, i386)")
+    group_publish.add_argument('--only-latest', action="store_true", default=False, help="Publish only latest packages of every publishes")
 
     group_promote = parser.add_argument_group("Action 'promote'")
-    group_promote.add_argument('--source', help="Source publish to take snapshots from")
-    group_promote.add_argument('--target', help="Target publish to update")
+    group_promote.add_argument('--source', help="Source publish to take snapshots from. Can be regular expression, eg. jessie(/?.*)/nightly")
+    group_promote.add_argument('--target', help="Target publish to update. Must be format if source is regex, eg. jessie{0}/testing")
     group_promote.add_argument('--packages', nargs='+', help="Space-separated list of packages to promote")
     group_promote.add_argument('--diff', action="store_true", help="Show differences between publishes (snapshots to be updated)")
 
+    group_purge = parser.add_argument_group("Purge")
+    group_purge.add_argument('--hard', action="store_true", default=False, help="Remove all unused packages and snapshots")
+
     group_restore = parser.add_argument_group("Action 'restore'")
     group_restore.add_argument('-r', '--restore-file', help="File used to restore publish")
 
     group_save = parser.add_argument_group("Action 'dump'")
-    group_save.add_argument('-s', '--save-dir', help="Path of where dump of publish will be done")
+    group_save.add_argument('-s', '--save-dir', default='.', help="Path of where dump of publish will be done")
     group_save.add_argument('-x', '--prefix', default="saved-", help="Prefix for dump files' names")
-    group_save.add_argument('-p', '--publish', nargs='+', help="Space-separated list of publishes to save")
 
     args = parser.parse_args()
 
     if args.verbose:
-        lg_root.setLevel(logging.INFO)
+        lg_aptly.setLevel(logging.INFO)
+        lg.setLevel(logging.INFO)
 
     if args.debug:
-        lg_root.setLevel(logging.DEBUG)
+        lg_aptly.setLevel(logging.DEBUG)
+        lg.setLevel(logging.DEBUG)
 
     client = Aptly(args.url, dry=args.dry, timeout=args.timeout)
-    publishmgr = PublishManager(client)
+    publishmgr = PublishManager(client, storage=args.storage)
 
     if args.action == 'publish':
         action_publish(client, publishmgr, config_file=args.config,
@@ -81,8 +88,11 @@ def main():
                        no_recreate=args.no_recreate,
                        force_overwrite=args.force_overwrite,
                        publish_contents=args.publish_contents,
-                       publish_names=args.dists,
-                       architectures=args.architectures)
+                       publish_names=args.publish,
+                       publish_dist=args.dists,
+                       architectures=args.architectures,
+                       only_latest=args.only_latest,
+                       components=args.components)
     elif args.action == 'promote':
         if not args.source or not args.target:
             parser.error("Action 'promote' requires both --source and --target arguments")
@@ -90,28 +100,31 @@ def main():
                        components=args.components, recreate=args.recreate,
                        no_recreate=args.no_recreate, packages=args.packages,
                        diff=args.diff, force_overwrite=args.force_overwrite,
-                       publish_contents=args.publish_contents)
+                       publish_contents=args.publish_contents,
+                       storage=args.storage)
     elif args.action == 'cleanup':
         publishmgr.cleanup_snapshots()
         sys.exit(0)
     elif args.action == 'dump':
         action_dump(publishmgr, args.save_dir, args.publish, args.prefix)
+    elif args.action == 'purge':
+        config = load_config(args.config)
+        publishmgr.do_purge(config, components=args.components, hard_purge=args.hard)
     elif args.action == "restore":
         action_restore(publishmgr, components=args.components,
                        recreate=args.recreate,
                        restore_file=args.restore_file)
 
-
-def action_promote(client, source, target, components=None, recreate=False,
-                   no_recreate=False, packages=None, diff=False,
-                   force_overwrite=False, publish_contents=False):
+def promote(client, source, target, components=None, recreate=False,
+            no_recreate=False, packages=None, diff=False, force_overwrite=False,
+            publish_contents=False, storage=""):
     try:
-        publish_source = Publish(client, source, load=True)
+        publish_source = Publish(client, source, load=True, storage=storage)
     except NoSuchPublish as e:
         lg.error(e)
         sys.exit(1)
 
-    publish_target = Publish(client, target)
+    publish_target = Publish(client, target, storage=storage)
     try:
         publish_target.load()
     except NoSuchPublish:
@@ -129,15 +142,16 @@ def action_promote(client, source, target, components=None, recreate=False,
     # Check if target is not already up to date
     diffs, equals = publish_source.compare(publish_target, components=components)
     if not diffs:
-        lg.warn("Target is up to date with source publish")
+        lg.warn("Target {0} is up to date with source publish {1}".format(target, source))
         if not recreate:
-            lg.warn("There is nothing to do")
+            lg.warn("There is nothing to do with target publish {0}".format(target))
             sys.exit(0)
         else:
-            lg.warn("Recreating publish on your command")
+            lg.warn("Recreating target publish {0} on your command".format(target))
 
     if packages:
         # We are only going to promote specific packages
+        packages_promoted = False
         for component, snapshots in publish_source.components.items():
             if components and component not in components:
                 # We don't want to promote this component
@@ -159,6 +173,10 @@ def action_promote(client, source, target, components=None, recreate=False,
                     }
                 )
                 publish_target.components[component].append(snapshot_name)
+                packages_promoted = True
+        if not packages_promoted:
+            lg.error("No packages were promoted : are you sure components: %s and packages: %s are valid?" % (components, packages))
+            sys.exit(1)
     else:
         # Publish whole components
         # Use source publish components structure for creation of target publish
@@ -174,8 +192,50 @@ def action_promote(client, source, target, components=None, recreate=False,
 
     publish_target.do_publish(recreate=recreate, no_recreate=no_recreate,
                               force_overwrite=force_overwrite,
-                              publish_contents=publish_contents)
+                              publish_contents=publish_contents,
+                              architectures=publish_source.architectures)
 
+def find_publishes(client, source, target):
+    ret = []
+    if not re.search(r'{[0-9]+}', target):
+        lg.error("Source publish is regular expression but target does not refer any match groups. See help for more info.")
+        sys.exit(1)
+    lg.debug("Looking for source publishes matching regular expression: {0}".format(source))
+    publishes = client.do_get('/publish')
+    re_source = re.compile(source)
+    for publish in publishes:
+        name = "{}{}{}".format(publish['Storage']+":" if publish['Storage']
+                                else "", publish['Prefix']+"/" if
+                                publish['Prefix'] else "",
+                                publish['Distribution'])
+        match = re_source.match(name)
+        if match:
+            try:
+                target_parsed = target.format(*match.groups())
+            except IndexError:
+                lg.error("Can't format target publish {0} using groups {1}".format(target_parsed, match.groups()))
+                sys.exit(1)
+            ret.append((name, target_parsed))
+    return ret
+
+
+def action_promote(client, **kwargs):
+    # Determine if source is regular expression with group, in this case, we
+    # will work with multiple publishes
+    if re.search(r'\(.*\)', kwargs['source']):
+        for publish in find_publishes(client, kwargs['source'], kwargs['target']):
+            source = publish[0]
+            target = publish[1]
+            lg.info("Found source publish matching regex, promoting {0} to {1}".format(source, target))
+            kwargs_copy = kwargs.copy()
+            kwargs_copy['source'] = source
+            kwargs_copy['target'] = target
+            try:
+                promote(client, **kwargs_copy)
+            except SystemExit:
+                pass
+    else:
+        promote(client, **kwargs)
 
 def action_dump(publishmgr, path, publish_to_save, prefix):
     publishmgr.dump_publishes(publish_to_save, path, prefix)
@@ -188,7 +248,7 @@ def action_restore(publishmgr, components, restore_file, recreate):
 def action_diff(source, target, components=[], packages=True):
     diff, equal = source.compare(target, components=components)
     if not diff:
-        print("Target is up to date with source publish")
+        print("Target {0} is up to date with source publish {1}".format(target.full_name.replace('_', '/'), source.full_name.replace('_', '/')))
         return
 
     print("\033[1;36m= Differencies per component\033[m")
@@ -239,8 +299,8 @@ def action_diff(source, target, components=[], packages=True):
 
 def action_publish(client, publishmgr, config_file, recreate=False,
                    no_recreate=False, force_overwrite=False,
-                   publish_contents=False, publish_names=None,
-                   architectures=None):
+                   publish_contents=False, publish_dist=None, publish_names=None,
+                   architectures=None, only_latest=False, components=[]):
     if not architectures:
         architectures = []
     snapshots = client.do_get('/snapshots', {'sort': 'time'})
@@ -253,6 +313,7 @@ def action_publish(client, publishmgr, config_file, recreate=False,
         publishmgr.add(
             component=repo.get('component', 'main'),
             distributions=repo['distributions'],
+            storage=repo.get('storage', ""),
             snapshot=snapshot
         )
         for arch in repo.get('architectures', []):
@@ -266,6 +327,7 @@ def action_publish(client, publishmgr, config_file, recreate=False,
         publishmgr.add(
             component=repo.get('component', 'main'),
             distributions=repo['distributions'],
+            storage=repo.get('storage', ""),
             snapshot=snapshot
         )
         for arch in repo.get('architectures', []):
@@ -274,8 +336,10 @@ def action_publish(client, publishmgr, config_file, recreate=False,
 
     publishmgr.do_publish(recreate=recreate, no_recreate=no_recreate,
                           force_overwrite=force_overwrite,
-                          publish_contents=publish_contents,
-                          names=publish_names, architectures=architectures)
+                          publish_contents=publish_contents, dist=publish_dist,
+                          names=publish_names, architectures=architectures,
+                          only_latest=only_latest, config=config,
+                          components=components)
 
 
 if __name__ == '__main__':
diff --git a/setup.py b/setup.py
index c99c1df..18b82eb 100755
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ long_desc = open('README.rst').read()
 
 setup(
     name="python-aptly",
-    version="0.10",
+    version="0.12.4",
     description="Aptly REST API client and tooling",
     long_description=long_desc,
     author="Filip Pytloun",
@@ -18,6 +18,7 @@ setup(
     install_requires=[
         'requests>=0.14',
         'PyYaml',
+        'python-apt',
     ],
     entry_points={
         'console_scripts': ['aptly-publisher = aptly.publisher.__main__:main']

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/python-aptly.git



More information about the Python-modules-commits mailing list