[Python-modules-commits] [elasticsearch-curator] 10/14: Import elasticsearch-curator_4.0.1.orig.tar.gz

Apollon Oikonomopoulos apoikos at moszumanska.debian.org
Tue Jul 5 16:22:09 UTC 2016


This is an automated email from the git hooks/post-receive script.

apoikos pushed a commit to branch master
in repository elasticsearch-curator.

commit 85f6a59b76ddb85142061690b1e2de87c4fc4cfc
Author: Apollon Oikonomopoulos <apoikos at debian.org>
Date:   Tue Jul 5 18:14:55 2016 +0200

    Import elasticsearch-curator_4.0.1.orig.tar.gz
---
 .travis.yml                          |   2 +-
 CONTRIBUTING.md                      |   4 +-
 curator/__init__.py                  |   1 +
 curator/_version.py                  |   2 +-
 curator/actions.py                   |   6 +-
 curator/es_repo_mgr.py               |   4 +
 curator/indexlist.py                 |  15 ++-
 curator/logtools.py                  |   2 +
 curator/repomgrcli.py                | 177 +++++++++++++++++++++++++++++++++++
 curator/snapshotlist.py              |   1 +
 curator/utils.py                     |   2 +-
 docs/Changelog.rst                   |  16 ++++
 docs/asciidoc/actions.asciidoc       |   2 +-
 docs/asciidoc/index.asciidoc         |   2 +-
 run_es_repo_mgr.py                   |   8 ++
 setup.py                             |   6 +-
 test/integration/test_es_repo_mgr.py | 170 +++++++++++++++++++++++++++++++++
 test/unit/test_class_index_list.py   |   8 +-
 test/unit/testvars.py                |  34 +++++++
 19 files changed, 448 insertions(+), 14 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index df5ff7e..fbff486 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -10,7 +10,7 @@ env:
   - ES_VERSION=2.1.1
   - ES_VERSION=2.2.2
   - ES_VERSION=2.3.3
-  - ES_VERSION=5.0.0-alpha3
+  - ES_VERSION=5.0.0-alpha4
 
 os: linux
 
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index f12f5d0..0832dbb 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -46,7 +46,7 @@ irc.freenode.org and ask for help there!
 
 If you think you found a bug, it probably is a bug.
 
-* File it on [github](https://github.com/elastic/logstash/issues)
+* File it on [github](https://github.com/elastic/curator/issues)
 
 # Contributing Documentation and Code Changes
 
@@ -54,7 +54,7 @@ If you have a bugfix or new feature that you would like to contribute to
 Curator, and you think it will take more than a few minutes to produce the fix
 (ie; write code), it is worth discussing the change with the Curator users and
 developers first! You can reach us via
-[github](https://github.com/elastic/logstash/issues), or via IRC (#logstash or
+[github](https://github.com/elastic/curator/issues), or via IRC (#logstash or
  #elasticsearch on freenode irc)
 
 Documentation is in two parts: API and CLI documentation.
diff --git a/curator/__init__.py b/curator/__init__.py
index 4e4f3d8..a9b39d3 100644
--- a/curator/__init__.py
+++ b/curator/__init__.py
@@ -6,3 +6,4 @@ from .indexlist import IndexList
 from .snapshotlist import SnapshotList
 from .actions import *
 from .cli import *
+from .repomgrcli import *
diff --git a/curator/_version.py b/curator/_version.py
index d6497a8..1a3bef5 100644
--- a/curator/_version.py
+++ b/curator/_version.py
@@ -1 +1 @@
-__version__ = '4.0.0'
+__version__ = '4.0.1'
diff --git a/curator/actions.py b/curator/actions.py
index d88c9ca..95ac3d0 100644
--- a/curator/actions.py
+++ b/curator/actions.py
@@ -717,7 +717,11 @@ class Snapshot(object):
         """
         Log what the output would be, but take no action.
         """
-        show_dry_run(self.index_list, 'snapshot', body=self.body)
+        self.loggit.info('DRY-RUN MODE.  No changes will be made.')
+        self.loggit.info(
+            'DRY-RUN: snapshot: {0} in repository {1} with arguments: '
+            '{2}'.format(self.name, self.repository, self.body)
+        )
 
     def do_action(self):
         """
diff --git a/curator/es_repo_mgr.py b/curator/es_repo_mgr.py
new file mode 100755
index 0000000..39be573
--- /dev/null
+++ b/curator/es_repo_mgr.py
@@ -0,0 +1,4 @@
+from . import repomgrcli
+
+def main():
+    repomgrcli.repo_mgr_cli()
diff --git a/curator/indexlist.py b/curator/indexlist.py
index e7caba8..1aa37cf 100644
--- a/curator/indexlist.py
+++ b/curator/indexlist.py
@@ -152,9 +152,18 @@ class IndexList(object):
                 for index in list(working_list.keys()):
                     s = self.index_info[index]
                     wl = working_list[index]
-                    s['age']['creation_date'] = (
-                        fix_epoch(wl['settings']['index']['creation_date'])
-                    )
+                    if not 'creation_date' in wl['settings']['index']:
+                        self.loggit.warn(
+                            'Index: {0} has no "creation_date"! This implies '
+                            'that the index predates Elasticsearch v1.4. For '
+                            'safety, this index will be removed from the '
+                            'actionable list.'.format(index)
+                        )
+                        self.__not_actionable(index)
+                    else:
+                        s['age']['creation_date'] = (
+                            fix_epoch(wl['settings']['index']['creation_date'])
+                        )
                     s['number_of_replicas'] = (
                         wl['settings']['index']['number_of_replicas']
                     )
diff --git a/curator/logtools.py b/curator/logtools.py
index e2114d0..0984df4 100644
--- a/curator/logtools.py
+++ b/curator/logtools.py
@@ -1,6 +1,7 @@
 import sys
 import json
 import logging
+import time
 
 class LogstashFormatter(logging.Formatter):
     # The LogRecord attributes we want to carry over to the Logstash message,
@@ -15,6 +16,7 @@ class LogstashFormatter(logging.Formatter):
     #     return time.gmtime(timevalue)
 
     def format(self, record):
+        self.converter = time.gmtime
         timestamp = '%s.%03dZ' % (
             self.formatTime(record, datefmt='%Y-%m-%dT%H:%M:%S'), record.msecs)
         result = {'message': record.getMessage(),
diff --git a/curator/repomgrcli.py b/curator/repomgrcli.py
new file mode 100644
index 0000000..8d34ab3
--- /dev/null
+++ b/curator/repomgrcli.py
@@ -0,0 +1,177 @@
+import elasticsearch
+import click
+import re
+import sys
+import logging
+from .settings import CLIENT_DEFAULTS, LOGGING_DEFAULTS
+from .exceptions import *
+from .utils import *
+from ._version import __version__
+from .logtools import LogInfo
+
+logger = logging.getLogger(__name__)
+
+try:
+    from logging import NullHandler
+except ImportError:
+    from logging import Handler
+
+    class NullHandler(Handler):
+        def emit(self, record):
+            pass
+
+def delete_callback(ctx, param, value):
+    if not value:
+        ctx.abort()
+
+def fix_hosts(ctx):
+    if "host" in ctx.parent.params:
+        ctx.parent.params['hosts'] = ctx.parent.params['host']
+        del ctx.parent.params['host']
+
+def show_repos(client):
+    for repository in sorted(get_repository(client, '_all').keys()):
+        print('{0}'.format(repository))
+    sys.exit(0)
+
+ at click.command(short_help='Filesystem Repository')
+ at click.option('--repository', required=True, type=str, help='Repository name')
+ at click.option('--location', required=True, type=str,
+            help='Shared file-system location. Must match remote path, & be accessible to all master & data nodes')
+ at click.option('--compression', type=bool, default=True, show_default=True,
+            help='Enable/Disable metadata compression.')
+ at click.option('--chunk_size', type=str,
+            help='Chunk size, e.g. 1g, 10m, 5k. [unbounded]')
+ at click.option('--max_restore_bytes_per_sec', type=str, default='20mb',
+            show_default=True,
+            help='Throttles per node restore rate (per second).')
+ at click.option('--max_snapshot_bytes_per_sec', type=str, default='20mb',
+            show_default=True,
+            help='Throttles per node snapshot rate (per second).')
+ at click.pass_context
+def fs(
+    ctx, repository, location, compression, chunk_size,
+    max_restore_bytes_per_sec, max_snapshot_bytes_per_sec):
+    """
+    Create a filesystem repository.
+    """
+    fix_hosts(ctx)
+    client = get_client(**ctx.parent.parent.params)
+    try:
+        create_repository(client, repo_type='fs', **ctx.params)
+    except FailedExecution as e:
+        logger.critical(e)
+        sys.exit(1)
+
+
+ at click.command(short_help='S3 Repository')
+ at click.option('--repository', required=True, type=str, help='Repository name')
+ at click.option('--bucket', required=True, type=str, help='S3 bucket name')
+ at click.option('--region', type=str, help='S3 region. [US Standard]')
+ at click.option('--base_path', type=str, help='S3 base path. [root]')
+ at click.option('--access_key', type=str,
+            help='S3 access key. [value of cloud.aws.access_key]')
+ at click.option('--secret_key', type=str,
+            help='S3 secret key. [value of cloud.aws.secret_key]')
+ at click.option('--compression', type=bool, default=True, show_default=True,
+            help='Enable/Disable metadata compression.')
+ at click.option('--chunk_size', type=str,
+            help='Chunk size, e.g. 1g, 10m, 5k. [unbounded]')
+ at click.option('--max_restore_bytes_per_sec', type=str, default='20mb',
+            show_default=True,
+            help='Throttles per node restore rate (per second).')
+ at click.option('--max_snapshot_bytes_per_sec', type=str, default='20mb',
+            show_default=True,
+            help='Throttles per node snapshot rate (per second).')
+ at click.pass_context
+def s3(
+    ctx, repository, bucket, region, base_path, access_key, secret_key,
+    compression, chunk_size, max_restore_bytes_per_sec,
+    max_snapshot_bytes_per_sec):
+    """
+    Create an S3 repository.
+    """
+    fix_hosts(ctx)
+    client = get_client(**ctx.parent.parent.params)
+    try:
+        create_repository(client, repo_type='s3', **ctx.params)
+    except FailedExecution as e:
+        logger.critical(e)
+        sys.exit(1)
+
+
+ at click.group()
+ at click.option(
+    '--host', help='Elasticsearch host.', default=CLIENT_DEFAULTS['hosts'])
+ at click.option(
+    '--url_prefix', help='Elasticsearch http url prefix.',
+    default=CLIENT_DEFAULTS['url_prefix']
+)
+ at click.option('--port', help='Elasticsearch port.', default=CLIENT_DEFAULTS['port'], type=int)
+ at click.option('--use_ssl', help='Connect to Elasticsearch through SSL.', is_flag=True, default=CLIENT_DEFAULTS['use_ssl'])
+ at click.option('--certificate', help='Path to certificate to use for SSL validation. (OPTIONAL)', type=str, default=None)
+ at click.option('--client-cert', help='Path to file containing SSL certificate for client auth. (OPTIONAL)', type=str, default=None)
+ at click.option('--client-key', help='Path to file containing SSL key for client auth. (OPTIONAL)', type=str, default=None)
+ at click.option('--ssl-no-validate', help='Do not validate server\'s SSL certificate', is_flag=True)
+ at click.option('--http_auth', help='Use Basic Authentication ex: user:pass', default=CLIENT_DEFAULTS['http_auth'])
+ at click.option('--timeout', help='Connection timeout in seconds.', default=CLIENT_DEFAULTS['timeout'], type=int)
+ at click.option('--master-only', is_flag=True, help='Only operate on elected master node.')
+ at click.option('--debug', is_flag=True, help='Debug mode')
+ at click.option('--loglevel', help='Log level', default=LOGGING_DEFAULTS['loglevel'])
+ at click.option('--logfile', help='log file', default=LOGGING_DEFAULTS['logfile'])
+ at click.option('--logformat', help='Log output format [default|logstash].', default=LOGGING_DEFAULTS['logformat'])
+ at click.version_option(version=__version__)
+ at click.pass_context
+def repo_mgr_cli(
+        ctx, host, url_prefix, port, use_ssl, certificate, client_cert,
+        client_key, ssl_no_validate, http_auth, timeout, master_only, debug,
+        loglevel, logfile, logformat):
+    """
+    Repository manager for Elasticsearch Curator.
+    """
+    # Set up logging
+    if debug:
+        loglevel = 'DEBUG'
+    log_opts = {'loglevel':loglevel, 'logfile':logfile, 'logformat':logformat}
+    loginfo = LogInfo(log_opts)
+    logging.root.addHandler(loginfo.handler)
+    logging.root.setLevel(loginfo.numeric_log_level)
+    # Setting up NullHandler to handle nested elasticsearch.trace Logger
+    # instance in elasticsearch python client
+    logging.getLogger('elasticsearch.trace').addHandler(NullHandler())
+
+ at repo_mgr_cli.group('create')
+ at click.pass_context
+def _create(ctx):
+    """Create an Elasticsearch repository"""
+_create.add_command(fs)
+_create.add_command(s3)
+
+ at repo_mgr_cli.command('show')
+ at click.pass_context
+def show(ctx):
+    """
+    Show all repositories
+    """
+    fix_hosts(ctx)
+    client = get_client(**ctx.parent.params)
+    show_repos(client)
+
+ at repo_mgr_cli.command('delete')
+ at click.option('--repository', required=True, help='Repository name', type=str)
+ at click.option('--yes', is_flag=True, callback=delete_callback,
+                expose_value=False,
+                prompt='Are you sure you want to delete the repository?')
+ at click.pass_context
+def _delete(ctx, repository):
+    """Delete an Elasticsearch repository"""
+    fix_hosts(ctx)
+    client = get_client(**ctx.parent.params)
+    try:
+        logger.info('Deleting repository {0}...'.format(repository))
+        client.snapshot.delete_repository(repository=repository)
+        # sys.exit(0)
+    except elasticsearch.NotFoundError:
+        logger.error(
+            'Unable to delete repository: {0}  Not Found.'.format(repository))
+        sys.exit(1)
diff --git a/curator/snapshotlist.py b/curator/snapshotlist.py
index fbba5c8..3632f6c 100644
--- a/curator/snapshotlist.py
+++ b/curator/snapshotlist.py
@@ -85,6 +85,7 @@ class SnapshotList(object):
             'age': self.filter_by_age,
             'none': self.filter_none,
             'pattern': self.filter_by_regex,
+            'state': self.filter_by_state,
         }
         return methods[ft]
 
diff --git a/curator/utils.py b/curator/utils.py
index 317039b..8c5f5a6 100644
--- a/curator/utils.py
+++ b/curator/utils.py
@@ -443,7 +443,7 @@ def get_client(**kwargs):
 
     :arg hosts: A list of one or more Elasticsearch client hostnames or IP
         addresses to connect to.  Can send a single host.
-    :type port: list
+    :type hosts: list
     :arg port: The Elasticsearch client port to connect to.
     :type port: int
     :arg url_prefix: `Optional` url prefix, if needed to reach the Elasticsearch
diff --git a/docs/Changelog.rst b/docs/Changelog.rst
index 29e6f2f..0c9a110 100644
--- a/docs/Changelog.rst
+++ b/docs/Changelog.rst
@@ -3,6 +3,22 @@
 Changelog
 =========
 
+4.0.1 (1 July 2016)
+-------------------
+
+**Bug Fixes**
+
+  * Coerce Logstash/JSON logformat type timestamp value to always use UTC.
+    #661 (untergeek)
+  * Catch and remove indices from the actionable list if they do not have a
+    `creation_date` field in settings.  This field was introduced in ES v1.4, so
+    that indicates a rather old index. #663 (untergeek)
+  * Replace missing ``state`` filter for ``snapshotlist``. #665 (untergeek)
+  * Restore ``es_repo_mgr`` as a stopgap until other CLI scripts are added.  It
+    will remain undocumented for now, as I am debating whether to make
+    repository creation its own action in the API. #668 (untergeek)
+  * Fix dry run results for snapshot action. #673 (untergeek)
+
 4.0.0 (24 June 2016)
 --------------------
 
diff --git a/docs/asciidoc/actions.asciidoc b/docs/asciidoc/actions.asciidoc
index 8bd058d..2f16512 100644
--- a/docs/asciidoc/actions.asciidoc
+++ b/docs/asciidoc/actions.asciidoc
@@ -95,7 +95,7 @@ TIP: See an example of this action in an <<actionfile,actionfile>>
 [source,text]
 -------------
 action: allocation
-description: "Add/Remove selected indices to or from the specified alias"
+description: "Apply shard allocation filtering rules to the specified indices"
 options:
   key:
   value:
diff --git a/docs/asciidoc/index.asciidoc b/docs/asciidoc/index.asciidoc
index 8156891..0f1ac7d 100644
--- a/docs/asciidoc/index.asciidoc
+++ b/docs/asciidoc/index.asciidoc
@@ -1,4 +1,4 @@
-:curator_version: 4.0.0
+:curator_version: 4.0.1
 :curator_major: 4
 :es_py_version: 2.3.0
 :ref:  http://www.elastic.co/guide/en/elasticsearch/reference/current
diff --git a/run_es_repo_mgr.py b/run_es_repo_mgr.py
new file mode 100755
index 0000000..44e0e61
--- /dev/null
+++ b/run_es_repo_mgr.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+
+"""Wrapper for running es_repo_mgr from source."""
+
+from curator.es_repo_mgr import main
+
+if __name__ == '__main__':
+    main()
diff --git a/setup.py b/setup.py
index 66cff48..d8534c1 100644
--- a/setup.py
+++ b/setup.py
@@ -70,7 +70,8 @@ try:
         packages = ["curator"],
         include_package_data=True,
         entry_points = {
-            "console_scripts" : ["curator = curator.curator:main"]
+            "console_scripts" : ["curator = curator.curator:main",
+                                 "es_repo_mgr = curator.es_repo_mgr:main"]
         },
         classifiers=[
             "Intended Audience :: Developers",
@@ -105,7 +106,8 @@ except ImportError:
         packages = ["curator"],
         include_package_data=True,
         entry_points = {
-            "console_scripts" : ["curator = curator.curator:main"]
+            "console_scripts" : ["curator = curator.curator:main",
+                                 "es_repo_mgr = curator.es_repo_mgr:main"]
         },
         classifiers=[
             "Intended Audience :: Developers",
diff --git a/test/integration/test_es_repo_mgr.py b/test/integration/test_es_repo_mgr.py
new file mode 100644
index 0000000..8e6a36e
--- /dev/null
+++ b/test/integration/test_es_repo_mgr.py
@@ -0,0 +1,170 @@
+import elasticsearch
+import curator
+import os
+import json
+import click
+import string, random, tempfile
+from click import testing as clicktest
+from mock import patch, Mock, MagicMock
+
+from . import CuratorTestCase
+
+import logging
+logger = logging.getLogger(__name__)
+
+host, port = os.environ.get('TEST_ES_SERVER', 'localhost:9200').split(':')
+port = int(port) if port else 9200
+
+class TestLoggingModules(CuratorTestCase):
+    def test_logger_without_null_handler(self):
+        mock = Mock()
+        modules = {'logger': mock, 'logger.NullHandler': mock.module}
+        with patch.dict('sys.modules', modules):
+           self.create_repository()
+           test = clicktest.CliRunner()
+           result = test.invoke(
+                       curator.repo_mgr_cli,
+                       [
+                           '--logfile', os.devnull,
+                           '--host', host,
+                           '--port', str(port),
+                           'show'
+                       ],
+                       obj={"filters":[]})
+        self.assertEqual(self.args['repository'], result.output.rstrip())
+
+
+class TestCLIRepositoryCreate(CuratorTestCase):
+    def test_create_fs_repository_success(self):
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logfile', os.devnull,
+                        '--host', host,
+                        '--port', str(port),
+                        'create',
+                        'fs',
+                        '--repository', self.args['repository'],
+                        '--location', self.args['location']
+                    ],
+                    obj={"filters":[]})
+        self.assertTrue(1, len(self.client.snapshot.get_repository(repository=self.args['repository'])))
+        self.assertEqual(0, result.exit_code)
+
+    def test_create_fs_repository_fail(self):
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logfile', os.devnull,
+                        '--host', host,
+                        '--port', str(port),
+                        'create',
+                        'fs',
+                        '--repository', self.args['repository'],
+                        '--location', os.devnull
+                    ],
+                    obj={"filters":[]})
+        self.assertEqual(1, result.exit_code)
+
+    def test_create_s3_repository_fail(self):
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logfile', os.devnull,
+                        '--host', host,
+                        '--port', str(port),
+                        'create',
+                        's3',
+                        '--bucket', 'mybucket',
+                        '--repository', self.args['repository'],
+                    ],
+                    obj={"filters":[]})
+        self.assertEqual(1, result.exit_code)
+
+
+class TestCLIDeleteRepository(CuratorTestCase):
+    def test_delete_repository_success(self):
+        self.create_repository()
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logfile', os.devnull,
+                        '--host', host,
+                        '--port', str(port),
+                        'delete',
+                        '--yes', # This ensures no prompting will happen
+                        '--repository', self.args['repository']
+                    ],
+                    obj={"filters":[]})
+        self.assertFalse(curator.get_repository(self.client, self.args['repository']))
+    def test_delete_repository_notfound(self):
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logfile', os.devnull,
+                        '--debug',
+                        '--host', host,
+                        '--port', str(port),
+                        'delete',
+                        '--yes', # This ensures no prompting will happen
+                        '--repository', self.args['repository']
+                    ],
+                    obj={"filters":[]})
+        self.assertEqual(1, result.exit_code)
+
+class TestCLIShowRepositories(CuratorTestCase):
+    def test_show_repository(self):
+        self.create_repository()
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logfile', os.devnull,
+                        '--host', host,
+                        '--port', str(port),
+                        'show'
+                    ],
+                    obj={"filters":[]})
+        self.assertEqual(self.args['repository'], result.output.rstrip())
+
+class TestRepoMGR_CLIOptions(CuratorTestCase):
+    def test_debug_logging(self):
+        dirname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
+        logfile = tempfile.mkdtemp(suffix=dirname) + 'logfile'
+        self.create_repository()
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logfile', logfile,
+                        '--debug',
+                        '--host', host,
+                        '--port', str(port),
+                        'show'
+                    ],
+                    obj={"filters":[]})
+        self.assertEqual(0, result.exit_code)
+
+    def test_logstash_formatting(self):
+        dirname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
+        logfile = tempfile.mkdtemp(suffix=dirname) + 'logfile'
+        self.create_repository()
+        test = clicktest.CliRunner()
+        result = test.invoke(
+                    curator.repo_mgr_cli,
+                    [
+                        '--logformat', 'logstash',
+                        '--debug',
+                        '--host', host,
+                        '--port', str(port),
+                        'show'
+                    ],
+                    obj={"filters":[]})
+        d = json.loads(result.output.splitlines()[:1][0])
+        keys = sorted(list(d.keys()))
+        self.assertEqual(['@timestamp','function','linenum','loglevel','message','name'], keys)
diff --git a/test/unit/test_class_index_list.py b/test/unit/test_class_index_list.py
index 7f99e36..9c5b868 100644
--- a/test/unit/test_class_index_list.py
+++ b/test/unit/test_class_index_list.py
@@ -37,7 +37,13 @@ class TestIndexListClientAndInit(TestCase):
         client.indices.stats.return_value = testvars.stats_two
         il = curator.IndexList(client)
         self.assertEqual('close', il.index_info['index-2016.03.03']['state'])
-
+    def test_skip_index_without_creation_date(self):
+        client = Mock()
+        client.indices.get_settings.return_value = testvars.settings_two_no_cd
+        client.cluster.state.return_value = testvars.clu_state_two_no_cd
+        client.indices.stats.return_value = testvars.stats_two
+        il = curator.IndexList(client)
+        self.assertEqual(['index-2016.03.03'], sorted(il.indices))
 class TestIndexListOtherMethods(TestCase):
     def test_empty_list(self):
         client = Mock()
diff --git a/test/unit/testvars.py b/test/unit/testvars.py
index de70e3c..618a11b 100644
--- a/test/unit/testvars.py
+++ b/test/unit/testvars.py
@@ -239,6 +239,35 @@ settings_2_closed = {
     }
 }
 
+settings_two_no_cd  = {
+    u'index-2016.03.03': {
+        u'state': u'open',
+        u'aliases': [u'my_alias'],
+        u'mappings': {},
+        u'settings': {
+            u'index': {
+                u'number_of_replicas': u'1', u'uuid': u'random_uuid_string_here',
+                u'number_of_shards': u'5', u'creation_date': u'1456963200172',
+                u'routing': {u'allocation': {u'include': {u'tag': u'foo'}}},
+                u'version': {u'created': u'2020099'}, u'refresh_interval': u'5s'
+            }
+        }
+    },
+    u'index-2016.03.04': {
+        u'state': u'open',
+        u'aliases': [u'my_alias'],
+        u'mappings': {},
+        u'settings': {
+            u'index': {
+                u'number_of_replicas': u'1', u'uuid': u'another_random_uuid_string',
+                u'number_of_shards': u'5',
+                u'routing': {u'allocation': {u'include': {u'tag': u'bar'}}},
+                u'version': {u'created': u'2020099'}, u'refresh_interval': u'5s'
+            }
+        }
+    }
+}
+
 settings_four  = {
     u'a-2016.03.03': {
         u'state': u'open',
@@ -338,6 +367,11 @@ cs_two_closed  = {
         u'indices': settings_2_closed
     }
 }
+clu_state_two_no_cd  = {
+    u'metadata': {
+        u'indices': settings_two_no_cd
+    }
+}
 clu_state_four = {
     u'metadata': {
         u'indices': settings_four

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/elasticsearch-curator.git



More information about the Python-modules-commits mailing list