[Python-modules-commits] [python-aptly] 01/03: Import python-aptly_0.12.10.orig.tar.gz

Filip Pytloun fpytloun-guest at moszumanska.debian.org
Fri Jan 26 23:18:42 UTC 2018


This is an automated email from the git hooks/post-receive script.

fpytloun-guest pushed a commit to branch master
in repository python-aptly.

commit 7429af1c16e3eea8528384040bd4293a8c1cf009
Author: Filip Pytloun <filip at pytloun.cz>
Date:   Sat Jan 27 00:13:05 2018 +0100

    Import python-aptly_0.12.10.orig.tar.gz
---
 aptly/decorators.py         | 25 ++++++++++++++
 aptly/publisher/__init__.py | 81 +++++++++++++++++++++++++++++++--------------
 aptly/publisher/__main__.py | 18 ++++++----
 setup.py                    |  2 +-
 4 files changed, 95 insertions(+), 31 deletions(-)

diff --git a/aptly/decorators.py b/aptly/decorators.py
new file mode 100644
index 0000000..e82fc11
--- /dev/null
+++ b/aptly/decorators.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+
+class CachedMethod(object):
+    """
+    Decorator for caching of function results
+    """
+    def __init__(self, function):
+        self.function = function
+        self.mem = {}
+
+    def __call__(self, *args, **kwargs):
+        cached = kwargs.pop('cached', True)
+        if cached is True:
+            if (args, str(kwargs)) in self.mem:
+                return self.mem[args, str(kwargs)]
+
+        tmp = self.function(*args, **kwargs)
+        self.mem[args, str(kwargs)] = tmp
+        return tmp
+
+    def __get__(self, obj, objtype):
+        """ Support instance methods """
+        import functools
+        return functools.partial(self.__call__, obj)
diff --git a/aptly/publisher/__init__.py b/aptly/publisher/__init__.py
index f1f52f6..dc788b4 100644
--- a/aptly/publisher/__init__.py
+++ b/aptly/publisher/__init__.py
@@ -6,6 +6,7 @@ import logging
 import yaml
 import apt_pkg
 from aptly.exceptions import AptlyException, NoSuchPublish
+from aptly.decorators import CachedMethod
 
 lg = logging.getLogger(__name__)
 
@@ -150,7 +151,7 @@ class PublishManager(object):
                 if components and repo.get('component') not in components:
                     continue
                 if fill_repo and origin == 'repo':
-                    packages = client.do_get('/{}/{}/{}'.format("repos", name, "packages"))
+                    packages = Publish._get_packages("repos", name)
                     repo_dict[name] = packages
                 for distribution in repo.get('distributions'):
                     publish_name = str.join('/', distribution.split('/')[:-1])
@@ -281,11 +282,26 @@ class Publish(object):
 
         return (diff, equal)
 
+    @staticmethod
+    @CachedMethod
+    def _get_packages(client, source_type, source_name):
+        return client.do_get('/{}/{}/packages'.format(source_type, source_name))
+
+    @staticmethod
+    @CachedMethod
+    def _get_publishes(client):
+        return client.do_get('/publish')
+
+    @staticmethod
+    @CachedMethod
+    def _get_snapshots(client):
+        return client.do_get('/snapshots', {'sort': 'time'})
+
     def _get_publish(self):
         """
         Find this publish on remote
         """
-        publishes = self.client.do_get('/publish')
+        publishes = self._get_publishes(self.client)
         for publish in publishes:
             if publish['Distribution'] == self.distribution and \
                     publish['Prefix'].replace("/", "_") == (self.prefix or '.') and \
@@ -301,7 +317,6 @@ class Publish(object):
         """
         Serialize publish in YAML
         """
-        name = self.name.replace('/', '-')
         timestamp = time.strftime("%Y%m%d%H%M%S")
 
         yaml_dict = {}
@@ -320,6 +335,7 @@ class Publish(object):
             yaml_dict["components"].append({'component': component, 'snapshot': snapshot['Name'],
                                             'description': snapshot['Description'], 'packages': package_dict})
 
+        name = self.name.replace('/', '-')
         lg.info("Saving publish %s in %s" % (name, save_path))
         with open(save_path, 'w') as save_file:
             yaml.dump(yaml_dict, save_file, default_flow_style=False)
@@ -350,7 +366,7 @@ class Publish(object):
                     repo_dict[repo_name] = []
                 continue
 
-            packages = self.client.do_get('/{}/{}/{}'.format("snapshots", name, "packages"))
+            packages = self._get_packages(self.client, "snapshots", name)
             packages = sorted(packages, key=lambda x: self.parse_package_ref(x)[2], reverse=True, cmp=apt_pkg.version_compare)
 
             for package in packages:
@@ -454,6 +470,8 @@ class Publish(object):
                     # snapshots because the file is corrupted
                     self._remove_snapshots(created_snapshots)
                     raise Exception("Source snapshot or packages don't exist")
+                else:
+                    raise
 
             new_publish_snapshots.append({
                 'Component': component_name,
@@ -501,7 +519,7 @@ class Publish(object):
                 # We don't want packages for this component
                 continue
 
-            component_refs = self.client.do_get('/snapshots/%s/packages' % snapshot['Name'])
+            component_refs = self._get_packages(self.client, "snapshots", snapshot['Name'])
             if packages:
                 # Filter package names
                 for ref in component_refs:
@@ -534,7 +552,7 @@ class Publish(object):
         """
         Find snapshot on remote by name or regular expression
         """
-        remote_snapshots = self.client.do_get('/snapshots', {'sort': 'time'})
+        remote_snapshots = self._get_snapshots(self.client)
         for remote in reversed(remote_snapshots):
             if remote["Name"] == name or \
                     re.match(name, remote["Name"]):
@@ -595,18 +613,24 @@ class Publish(object):
             package_refs = []
             for snapshot in snapshots:
                 # Get package refs from each snapshot
-                packages = self.client.do_get('/snapshots/%s/packages' % snapshot)
+                packages = self._get_packages(self.client, "snapshots", snapshot)
                 package_refs.extend(packages)
 
-            self.client.do_post(
-                '/snapshots',
-                data={
-                    'Name': snapshot_name,
-                    'SourceSnapshots': snapshots,
-                    'Description': "Merged from sources: %s" % ', '.join("'%s'" % snap for snap in snapshots),
-                    'PackageRefs': package_refs,
-                }
-            )
+            try:
+                self.client.do_post(
+                    '/snapshots',
+                    data={
+                        'Name': snapshot_name,
+                        'SourceSnapshots': snapshots,
+                        'Description': "Merged from sources: %s" % ', '.join("'%s'" % snap for snap in snapshots),
+                        'PackageRefs': package_refs,
+                    }
+                )
+            except AptlyException as e:
+                if e.res.status_code == 400:
+                    lg.warning("Error creating snapshot %s, assuming it already exists" % snapshot_name)
+                else:
+                    raise
 
             self.publish_snapshots.append({
                 'Component': component,
@@ -617,7 +641,8 @@ class Publish(object):
         lg.info("Deleting publish, distribution=%s, storage=%s" % (self.name, self.storage or "local"))
         self.client.do_delete('/publish/%s' % (self.full_name))
 
-    def update_publish(self, force_overwrite=False, publish_contents=False):
+    def update_publish(self, force_overwrite=False, publish_contents=False,
+                       acquire_by_hash=True):
         lg.info("Updating publish, distribution=%s storage=%s snapshots=%s" %
                 (self.name, self.storage or "local", self.publish_snapshots))
         self.client.do_put(
@@ -626,10 +651,12 @@ class Publish(object):
                 'Snapshots': self.publish_snapshots,
                 'ForceOverwrite': force_overwrite,
                 'SkipContents': not publish_contents,
+                'AcquireByHash': acquire_by_hash,
             }
         )
 
-    def create_publish(self, force_overwrite=False, publish_contents=False, architectures=None):
+    def create_publish(self, force_overwrite=False, publish_contents=False,
+                       architectures=None, acquire_by_hash=True):
         lg.info("Creating new publish, distribution=%s storage=%s snapshots=%s, architectures=%s" %
                 (self.name, self.storage or "local", self.publish_snapshots, architectures))
 
@@ -645,6 +672,7 @@ class Publish(object):
             "Sources": self.publish_snapshots,
             "ForceOverwrite": force_overwrite,
             'SkipContents': not publish_contents,
+            'AcquireByHash': acquire_by_hash,
         }
 
         if architectures or self.architectures:
@@ -657,8 +685,9 @@ class Publish(object):
 
     def do_publish(self, recreate=False, no_recreate=False,
                    force_overwrite=False, publish_contents=False,
-                   architectures=None, merge_snapshots=True,
-                   only_latest=False, config=None, components=[]):
+                   acquire_by_hash=False, architectures=None,
+                   merge_snapshots=True, only_latest=False, config=None,
+                   components=[]):
         if merge_snapshots:
             self.merge_snapshots()
         try:
@@ -672,8 +701,8 @@ class Publish(object):
 
         if not publish:
             # New publish
-            self.create_publish(force_overwrite, publish_contents,
-                                architectures or self.architectures)
+            self.create_publish(force_overwrite, publish_contents, architectures
+                                or self.architectures, acquire_by_hash)
         else:
             # Test if publish is up to date
             to_publish = [x['Name'] for x in self.publish_snapshots]
@@ -686,12 +715,14 @@ class Publish(object):
                 lg.info("Recreating publish %s (%s)" % (self.name, self.storage or "local"))
                 self.drop_publish()
                 self.create_publish(force_overwrite, publish_contents,
-                                    architectures or self.architectures)
+                                    architectures or self.architectures,
+                                    acquire_by_hash)
             elif to_publish == published:
                 lg.info("Publish %s (%s) is up to date" % (self.name, self.storage or "local"))
             else:
                 try:
-                    self.update_publish(force_overwrite, publish_contents)
+                    self.update_publish(force_overwrite, publish_contents,
+                                        acquire_by_hash)
                 except AptlyException as e:
                     if e.res.status_code == 404:
                         # Publish exists but we are going to add some new
@@ -704,3 +735,5 @@ class Publish(object):
                             self.create_publish(force_overwrite,
                                                 publish_contents,
                                                 architectures or self.architectures)
+                    else:
+                        raise
diff --git a/aptly/publisher/__main__.py b/aptly/publisher/__main__.py
index be270b0..055732c 100644
--- a/aptly/publisher/__main__.py
+++ b/aptly/publisher/__main__.py
@@ -43,6 +43,7 @@ def main():
     group_common.add_argument('--no-recreate', action="store_true", help="Never recreate publish (even when we are adding new components where it's the only option)")
     group_common.add_argument('--force-overwrite', action="store_true", help="Overwrite files in pool/ directory without notice")
     group_common.add_argument('--publish-contents', action="store_true", default=False, help="Publish contents. It's slow so disabled by default to support large repositories.")
+    group_common.add_argument('--acquire-by-hash', action="store_true", default=False, help="Use Acquire-by-hash option. This may help with repository consistency.")
     group_common.add_argument('--components', nargs='+', help="Space-separated list of components to promote or restore or to purge (in case of purge)")
     group_common.add_argument('--storage', default="", help="Storage backend to use for all publishes, can be empty (filesystem, default), swift:[name] or s3:[name]")
     group_common.add_argument('-p', '--publish', nargs='+', help="Space-separated list of publish")
@@ -88,6 +89,7 @@ def main():
                        no_recreate=args.no_recreate,
                        force_overwrite=args.force_overwrite,
                        publish_contents=args.publish_contents,
+                       acquire_by_hash=args.acquire_by_hash,
                        publish_names=args.publish,
                        publish_dist=args.dists,
                        architectures=args.architectures,
@@ -101,6 +103,7 @@ def main():
                        no_recreate=args.no_recreate, packages=args.packages,
                        diff=args.diff, force_overwrite=args.force_overwrite,
                        publish_contents=args.publish_contents,
+                       acquire_by_hash=args.acquire_by_hash,
                        storage=args.storage)
     elif args.action == 'cleanup':
         publishmgr.cleanup_snapshots()
@@ -117,7 +120,7 @@ def main():
 
 def promote(client, source, target, components=None, recreate=False,
             no_recreate=False, packages=None, diff=False, force_overwrite=False,
-            publish_contents=False, storage=""):
+            publish_contents=False, acquire_by_hash=False, storage=""):
     try:
         publish_source = Publish(client, source, load=True, storage=storage)
     except NoSuchPublish as e:
@@ -193,6 +196,7 @@ def promote(client, source, target, components=None, recreate=False,
     publish_target.do_publish(recreate=recreate, no_recreate=no_recreate,
                               force_overwrite=force_overwrite,
                               publish_contents=publish_contents,
+                              acquire_by_hash=acquire_by_hash,
                               architectures=publish_source.architectures)
 
 def find_publishes(client, source, target):
@@ -201,7 +205,7 @@ def find_publishes(client, source, target):
         lg.error("Source publish is regular expression but target does not refer any match groups. See help for more info.")
         sys.exit(1)
     lg.debug("Looking for source publishes matching regular expression: {0}".format(source))
-    publishes = client.do_get('/publish')
+    publishes = Publish._get_publishes(client)
     re_source = re.compile(source)
     for publish in publishes:
         name = "{}{}{}".format(publish['Storage']+":" if publish['Storage']
@@ -213,7 +217,7 @@ def find_publishes(client, source, target):
             try:
                 target_parsed = target.format(*match.groups())
             except IndexError:
-                lg.error("Can't format target publish {0} using groups {1}".format(target_parsed, match.groups()))
+                lg.error("Can't format target publish {0} using groups {1}".format(target, match.groups()))
                 sys.exit(1)
             ret.append((name, target_parsed))
     return ret
@@ -299,11 +303,12 @@ def action_diff(source, target, components=[], packages=True):
 
 def action_publish(client, publishmgr, config_file, recreate=False,
                    no_recreate=False, force_overwrite=False,
-                   publish_contents=False, publish_dist=None, publish_names=None,
-                   architectures=None, only_latest=False, components=[]):
+                   publish_contents=False, acquire_by_hash=False,
+                   publish_dist=None, publish_names=None, architectures=None,
+                   only_latest=False, components=[]):
     if not architectures:
         architectures = []
-    snapshots = client.do_get('/snapshots', {'sort': 'time'})
+    snapshots = Publish._get_snapshots(client)
 
     config = load_config(config_file)
     for name, repo in config.get('mirror', {}).items():
@@ -336,6 +341,7 @@ def action_publish(client, publishmgr, config_file, recreate=False,
 
     publishmgr.do_publish(recreate=recreate, no_recreate=no_recreate,
                           force_overwrite=force_overwrite,
+                          acquire_by_hash=acquire_by_hash,
                           publish_contents=publish_contents, dist=publish_dist,
                           names=publish_names, architectures=architectures,
                           only_latest=only_latest, config=config,
diff --git a/setup.py b/setup.py
index 18b82eb..3e7351c 100755
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ long_desc = open('README.rst').read()
 
 setup(
     name="python-aptly",
-    version="0.12.4",
+    version="0.12.10",
     description="Aptly REST API client and tooling",
     long_description=long_desc,
     author="Filip Pytloun",

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/python-aptly.git



More information about the Python-modules-commits mailing list