[Python-modules-commits] [elasticsearch-curator] 08/14: merge patched into master

Apollon Oikonomopoulos apoikos at moszumanska.debian.org
Tue Jul 5 16:22:09 UTC 2016


This is an automated email from the git hooks/post-receive script.

apoikos pushed a commit to branch master
in repository elasticsearch-curator.

commit 618b4da2d631499d14bb7bcb44c4bb2f57245c00
Merge: 25319d2 b913036
Author: Apollon Oikonomopoulos <apoikos at debian.org>
Date:   Thu Jun 30 12:05:27 2016 +0200

    merge patched into master

 curator/__init__.py                                |   1 +
 curator/es_repo_mgr.py                             |   4 +
 curator/repomgrcli.py                              | 176 +++++++++
 debian/.git-dpm                                    |   4 +-
 .../0002-Restore-es_repo_mgr-endpoint.patch        | 436 +++++++++++++++++++++
 debian/patches/series                              |   1 +
 run_es_repo_mgr.py                                 |   8 +
 setup.py                                           |   6 +-
 test/integration/test_es_repo_mgr.py               | 170 ++++++++
 9 files changed, 802 insertions(+), 4 deletions(-)

diff --cc debian/.git-dpm
index 6012465,0000000..2f5dece
mode 100644,000000..100644
--- a/debian/.git-dpm
+++ b/debian/.git-dpm
@@@ -1,11 -1,0 +1,11 @@@
 +# see git-dpm(1) from git-dpm package
- a5fa71c124cb8786bb3b65b5ed57075305bba447
- a5fa71c124cb8786bb3b65b5ed57075305bba447
++b913036b27edebc272e6b3712c2ec6366d37a9f9
++b913036b27edebc272e6b3712c2ec6366d37a9f9
 +ab0a73cc20ea7786a509a8e69d14f18913d24913
 +ab0a73cc20ea7786a509a8e69d14f18913d24913
 +elasticsearch-curator_4.0.0.orig.tar.gz
 +a8a4ff74875dddf4784a97128038302ffb997c09
 +104518
 +debianTag="debian/%e%v"
 +patchedTag="patched/%e%v"
 +upstreamTag="upstream/%e%u"
diff --cc debian/patches/0002-Restore-es_repo_mgr-endpoint.patch
index 0000000,0000000..435a982
new file mode 100644
--- /dev/null
+++ b/debian/patches/0002-Restore-es_repo_mgr-endpoint.patch
@@@ -1,0 -1,0 +1,436 @@@
++From b913036b27edebc272e6b3712c2ec6366d37a9f9 Mon Sep 17 00:00:00 2001
++From: Aaron Mildenstein <aaron at mildensteins.com>
++Date: Wed, 29 Jun 2016 15:25:40 -0600
++Subject: Restore es_repo_mgr endpoint
++
++This is a stopgap measure until I either add other scripts based on this API, or add repository creation/deletion actions, or both.
++
++It remains undocumented as it was never documented fully before.
++---
++ curator/__init__.py                  |   1 +
++ curator/es_repo_mgr.py               |   4 +
++ curator/repomgrcli.py                | 176 +++++++++++++++++++++++++++++++++++
++ run_es_repo_mgr.py                   |   8 ++
++ setup.py                             |   6 +-
++ test/integration/test_es_repo_mgr.py | 170 +++++++++++++++++++++++++++++++++
++ 6 files changed, 363 insertions(+), 2 deletions(-)
++ create mode 100755 curator/es_repo_mgr.py
++ create mode 100644 curator/repomgrcli.py
++ create mode 100755 run_es_repo_mgr.py
++ create mode 100644 test/integration/test_es_repo_mgr.py
++
++diff --git a/curator/__init__.py b/curator/__init__.py
++index 4e4f3d8..a9b39d3 100644
++--- a/curator/__init__.py
+++++ b/curator/__init__.py
++@@ -6,3 +6,4 @@ from .indexlist import IndexList
++ from .snapshotlist import SnapshotList
++ from .actions import *
++ from .cli import *
+++from .repomgrcli import *
++diff --git a/curator/es_repo_mgr.py b/curator/es_repo_mgr.py
++new file mode 100755
++index 0000000..39be573
++--- /dev/null
+++++ b/curator/es_repo_mgr.py
++@@ -0,0 +1,4 @@
+++from . import repomgrcli
+++
+++def main():
+++    repomgrcli.repo_mgr_cli()
++diff --git a/curator/repomgrcli.py b/curator/repomgrcli.py
++new file mode 100644
++index 0000000..cfc8c4c
++--- /dev/null
+++++ b/curator/repomgrcli.py
++@@ -0,0 +1,176 @@
+++import elasticsearch
+++import click
+++import re
+++import sys
+++import logging
+++from .settings import CLIENT_DEFAULTS, LOGGING_DEFAULTS
+++from .exceptions import *
+++from .utils import *
+++from ._version import __version__
+++from .logtools import LogInfo
+++
+++logger = logging.getLogger(__name__)
+++
+++try:
+++    from logging import NullHandler
+++except ImportError:
+++    from logging import Handler
+++
+++    class NullHandler(Handler):
+++        def emit(self, record):
+++            pass
+++
+++def delete_callback(ctx, param, value):
+++    if not value:
+++        ctx.abort()
+++
+++def fix_hosts(ctx):
+++    if "host" in ctx.parent.params:
+++        ctx.parent.params['hosts'] = ctx.parent.params['host']
+++
+++def show_repos(client):
+++    for repository in sorted(get_repository(client, '_all').keys()):
+++        print('{0}'.format(repository))
+++    sys.exit(0)
+++
+++ at click.command(short_help='Filesystem Repository')
+++ at click.option('--repository', required=True, type=str, help='Repository name')
+++ at click.option('--location', required=True, type=str,
+++            help='Shared file-system location. Must match remote path, & be accessible to all master & data nodes')
+++ at click.option('--compression', type=bool, default=True, show_default=True,
+++            help='Enable/Disable metadata compression.')
+++ at click.option('--chunk_size', type=str,
+++            help='Chunk size, e.g. 1g, 10m, 5k. [unbounded]')
+++ at click.option('--max_restore_bytes_per_sec', type=str, default='20mb',
+++            show_default=True,
+++            help='Throttles per node restore rate (per second).')
+++ at click.option('--max_snapshot_bytes_per_sec', type=str, default='20mb',
+++            show_default=True,
+++            help='Throttles per node snapshot rate (per second).')
+++ at click.pass_context
+++def fs(
+++    ctx, repository, location, compression, chunk_size,
+++    max_restore_bytes_per_sec, max_snapshot_bytes_per_sec):
+++    """
+++    Create a filesystem repository.
+++    """
+++    fix_hosts(ctx)
+++    client = get_client(**ctx.parent.parent.params)
+++    try:
+++        create_repository(client, repo_type='fs', **ctx.params)
+++    except FailedExecution as e:
+++        logger.critical(e)
+++        sys.exit(1)
+++
+++
+++ at click.command(short_help='S3 Repository')
+++ at click.option('--repository', required=True, type=str, help='Repository name')
+++ at click.option('--bucket', required=True, type=str, help='S3 bucket name')
+++ at click.option('--region', type=str, help='S3 region. [US Standard]')
+++ at click.option('--base_path', type=str, help='S3 base path. [root]')
+++ at click.option('--access_key', type=str,
+++            help='S3 access key. [value of cloud.aws.access_key]')
+++ at click.option('--secret_key', type=str,
+++            help='S3 secret key. [value of cloud.aws.secret_key]')
+++ at click.option('--compression', type=bool, default=True, show_default=True,
+++            help='Enable/Disable metadata compression.')
+++ at click.option('--chunk_size', type=str,
+++            help='Chunk size, e.g. 1g, 10m, 5k. [unbounded]')
+++ at click.option('--max_restore_bytes_per_sec', type=str, default='20mb',
+++            show_default=True,
+++            help='Throttles per node restore rate (per second).')
+++ at click.option('--max_snapshot_bytes_per_sec', type=str, default='20mb',
+++            show_default=True,
+++            help='Throttles per node snapshot rate (per second).')
+++ at click.pass_context
+++def s3(
+++    ctx, repository, bucket, region, base_path, access_key, secret_key,
+++    compression, chunk_size, max_restore_bytes_per_sec,
+++    max_snapshot_bytes_per_sec):
+++    """
+++    Create an S3 repository.
+++    """
+++    fix_hosts(ctx)
+++    client = get_client(**ctx.parent.parent.params)
+++    try:
+++        create_repository(client, repo_type='s3', **ctx.params)
+++    except FailedExecution as e:
+++        logger.critical(e)
+++        sys.exit(1)
+++
+++
+++ at click.group()
+++ at click.option(
+++    '--host', help='Elasticsearch host.', default=CLIENT_DEFAULTS['hosts'])
+++ at click.option(
+++    '--url_prefix', help='Elasticsearch http url prefix.',
+++    default=CLIENT_DEFAULTS['url_prefix']
+++)
+++ at click.option('--port', help='Elasticsearch port.', default=CLIENT_DEFAULTS['port'], type=int)
+++ at click.option('--use_ssl', help='Connect to Elasticsearch through SSL.', is_flag=True, default=CLIENT_DEFAULTS['use_ssl'])
+++ at click.option('--certificate', help='Path to certificate to use for SSL validation. (OPTIONAL)', type=str, default=None)
+++ at click.option('--client-cert', help='Path to file containing SSL certificate for client auth. (OPTIONAL)', type=str, default=None)
+++ at click.option('--client-key', help='Path to file containing SSL key for client auth. (OPTIONAL)', type=str, default=None)
+++ at click.option('--ssl-no-validate', help='Do not validate server\'s SSL certificate', is_flag=True)
+++ at click.option('--http_auth', help='Use Basic Authentication ex: user:pass', default=CLIENT_DEFAULTS['http_auth'])
+++ at click.option('--timeout', help='Connection timeout in seconds.', default=CLIENT_DEFAULTS['timeout'], type=int)
+++ at click.option('--master-only', is_flag=True, help='Only operate on elected master node.')
+++ at click.option('--debug', is_flag=True, help='Debug mode')
+++ at click.option('--loglevel', help='Log level', default=LOGGING_DEFAULTS['loglevel'])
+++ at click.option('--logfile', help='log file', default=LOGGING_DEFAULTS['logfile'])
+++ at click.option('--logformat', help='Log output format [default|logstash].', default=LOGGING_DEFAULTS['logformat'])
+++ at click.version_option(version=__version__)
+++ at click.pass_context
+++def repo_mgr_cli(
+++        ctx, host, url_prefix, port, use_ssl, certificate, client_cert,
+++        client_key, ssl_no_validate, http_auth, timeout, master_only, debug,
+++        loglevel, logfile, logformat):
+++    """
+++    Repository manager for Elasticsearch Curator.
+++    """
+++    # Set up logging
+++    if debug:
+++        loglevel = 'DEBUG'
+++    log_opts = {'loglevel':loglevel, 'logfile':logfile, 'logformat':logformat}
+++    loginfo = LogInfo(log_opts)
+++    logging.root.addHandler(loginfo.handler)
+++    logging.root.setLevel(loginfo.numeric_log_level)
+++    # Setting up NullHandler to handle nested elasticsearch.trace Logger
+++    # instance in elasticsearch python client
+++    logging.getLogger('elasticsearch.trace').addHandler(NullHandler())
+++
+++ at repo_mgr_cli.group('create')
+++ at click.pass_context
+++def _create(ctx):
+++    """Create an Elasticsearch repository"""
+++_create.add_command(fs)
+++_create.add_command(s3)
+++
+++ at repo_mgr_cli.command('show')
+++ at click.pass_context
+++def show(ctx):
+++    """
+++    Show all repositories
+++    """
+++    fix_hosts(ctx)
+++    client = get_client(**ctx.parent.params)
+++    show_repos(client)
+++
+++ at repo_mgr_cli.command('delete')
+++ at click.option('--repository', required=True, help='Repository name', type=str)
+++ at click.option('--yes', is_flag=True, callback=delete_callback,
+++                expose_value=False,
+++                prompt='Are you sure you want to delete the repository?')
+++ at click.pass_context
+++def _delete(ctx, repository):
+++    """Delete an Elasticsearch repository"""
+++    fix_hosts(ctx)
+++    client = get_client(**ctx.parent.params)
+++    try:
+++        logger.info('Deleting repository {0}...'.format(repository))
+++        client.snapshot.delete_repository(repository=repository)
+++        # sys.exit(0)
+++    except elasticsearch.NotFoundError:
+++        logger.error(
+++            'Unable to delete repository: {0}  Not Found.'.format(repository))
+++        sys.exit(1)
++diff --git a/run_es_repo_mgr.py b/run_es_repo_mgr.py
++new file mode 100755
++index 0000000..44e0e61
++--- /dev/null
+++++ b/run_es_repo_mgr.py
++@@ -0,0 +1,8 @@
+++#!/usr/bin/env python
+++
+++"""Wrapper for running es_repo_mgr from source."""
+++
+++from curator.es_repo_mgr import main
+++
+++if __name__ == '__main__':
+++    main()
++diff --git a/setup.py b/setup.py
++index f478cb7..dcbc3d2 100644
++--- a/setup.py
+++++ b/setup.py
++@@ -74,7 +74,8 @@ try:
++         packages = ["curator"],
++         include_package_data=True,
++         entry_points = {
++-            "console_scripts" : ["curator = curator.curator:main"]
+++            "console_scripts" : ["curator = curator.curator:main",
+++                                 "es_repo_mgr = curator.es_repo_mgr:main"]
++         },
++         classifiers=[
++             "Intended Audience :: Developers",
++@@ -109,7 +110,8 @@ except ImportError:
++         packages = ["curator"],
++         include_package_data=True,
++         entry_points = {
++-            "console_scripts" : ["curator = curator.curator:main"]
+++            "console_scripts" : ["curator = curator.curator:main",
+++                                 "es_repo_mgr = curator.es_repo_mgr:main"]
++         },
++         classifiers=[
++             "Intended Audience :: Developers",
++diff --git a/test/integration/test_es_repo_mgr.py b/test/integration/test_es_repo_mgr.py
++new file mode 100644
++index 0000000..8e6a36e
++--- /dev/null
+++++ b/test/integration/test_es_repo_mgr.py
++@@ -0,0 +1,170 @@
+++import elasticsearch
+++import curator
+++import os
+++import json
+++import click
+++import string, random, tempfile
+++from click import testing as clicktest
+++from mock import patch, Mock, MagicMock
+++
+++from . import CuratorTestCase
+++
+++import logging
+++logger = logging.getLogger(__name__)
+++
+++host, port = os.environ.get('TEST_ES_SERVER', 'localhost:9200').split(':')
+++port = int(port) if port else 9200
+++
+++class TestLoggingModules(CuratorTestCase):
+++    def test_logger_without_null_handler(self):
+++        mock = Mock()
+++        modules = {'logger': mock, 'logger.NullHandler': mock.module}
+++        with patch.dict('sys.modules', modules):
+++           self.create_repository()
+++           test = clicktest.CliRunner()
+++           result = test.invoke(
+++                       curator.repo_mgr_cli,
+++                       [
+++                           '--logfile', os.devnull,
+++                           '--host', host,
+++                           '--port', str(port),
+++                           'show'
+++                       ],
+++                       obj={"filters":[]})
+++        self.assertEqual(self.args['repository'], result.output.rstrip())
+++
+++
+++class TestCLIRepositoryCreate(CuratorTestCase):
+++    def test_create_fs_repository_success(self):
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logfile', os.devnull,
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'create',
+++                        'fs',
+++                        '--repository', self.args['repository'],
+++                        '--location', self.args['location']
+++                    ],
+++                    obj={"filters":[]})
+++        self.assertTrue(1, len(self.client.snapshot.get_repository(repository=self.args['repository'])))
+++        self.assertEqual(0, result.exit_code)
+++
+++    def test_create_fs_repository_fail(self):
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logfile', os.devnull,
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'create',
+++                        'fs',
+++                        '--repository', self.args['repository'],
+++                        '--location', os.devnull
+++                    ],
+++                    obj={"filters":[]})
+++        self.assertEqual(1, result.exit_code)
+++
+++    def test_create_s3_repository_fail(self):
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logfile', os.devnull,
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'create',
+++                        's3',
+++                        '--bucket', 'mybucket',
+++                        '--repository', self.args['repository'],
+++                    ],
+++                    obj={"filters":[]})
+++        self.assertEqual(1, result.exit_code)
+++
+++
+++class TestCLIDeleteRepository(CuratorTestCase):
+++    def test_delete_repository_success(self):
+++        self.create_repository()
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logfile', os.devnull,
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'delete',
+++                        '--yes', # This ensures no prompting will happen
+++                        '--repository', self.args['repository']
+++                    ],
+++                    obj={"filters":[]})
+++        self.assertFalse(curator.get_repository(self.client, self.args['repository']))
+++    def test_delete_repository_notfound(self):
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logfile', os.devnull,
+++                        '--debug',
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'delete',
+++                        '--yes', # This ensures no prompting will happen
+++                        '--repository', self.args['repository']
+++                    ],
+++                    obj={"filters":[]})
+++        self.assertEqual(1, result.exit_code)
+++
+++class TestCLIShowRepositories(CuratorTestCase):
+++    def test_show_repository(self):
+++        self.create_repository()
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logfile', os.devnull,
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'show'
+++                    ],
+++                    obj={"filters":[]})
+++        self.assertEqual(self.args['repository'], result.output.rstrip())
+++
+++class TestRepoMGR_CLIOptions(CuratorTestCase):
+++    def test_debug_logging(self):
+++        dirname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
+++        logfile = tempfile.mkdtemp(suffix=dirname) + 'logfile'
+++        self.create_repository()
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logfile', logfile,
+++                        '--debug',
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'show'
+++                    ],
+++                    obj={"filters":[]})
+++        self.assertEqual(0, result.exit_code)
+++
+++    def test_logstash_formatting(self):
+++        dirname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
+++        logfile = tempfile.mkdtemp(suffix=dirname) + 'logfile'
+++        self.create_repository()
+++        test = clicktest.CliRunner()
+++        result = test.invoke(
+++                    curator.repo_mgr_cli,
+++                    [
+++                        '--logformat', 'logstash',
+++                        '--debug',
+++                        '--host', host,
+++                        '--port', str(port),
+++                        'show'
+++                    ],
+++                    obj={"filters":[]})
+++        d = json.loads(result.output.splitlines()[:1][0])
+++        keys = sorted(list(d.keys()))
+++        self.assertEqual(['@timestamp','function','linenum','loglevel','message','name'], keys)
diff --cc debian/patches/series
index e2d8982,0000000..7297585
mode 100644,000000..100644
--- a/debian/patches/series
+++ b/debian/patches/series
@@@ -1,1 -1,0 +1,2 @@@
 +setup.py-read-README-utf-8
++0002-Restore-es_repo_mgr-endpoint.patch

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/elasticsearch-curator.git



More information about the Python-modules-commits mailing list