[Python-modules-commits] [django-haystack] 01/03: Import django-haystack_2.4.1.orig.tar.gz

Michael Fladischer fladi at moszumanska.debian.org
Wed Nov 4 12:21:39 UTC 2015


This is an automated email from the git hooks/post-receive script.

fladi pushed a commit to branch master
in repository django-haystack.

commit b26fb57547f22e0ad9b1381c59bea34e0a481815
Author: Michael Fladischer <FladischerMichael at fladi.at>
Date:   Wed Nov 4 13:08:15 2015 +0100

    Import django-haystack_2.4.1.orig.tar.gz
---
 PKG-INFO                                   |  3 +-
 README.rst                                 |  1 +
 django_haystack.egg-info/PKG-INFO          |  3 +-
 django_haystack.egg-info/pbr.json          |  2 +-
 docs/conf.py                               |  8 +++---
 docs/faceting.rst                          | 12 +++-----
 docs/installing_search_engines.rst         |  6 ++--
 haystack/__init__.py                       |  2 +-
 haystack/backends/__init__.py              |  2 +-
 haystack/backends/elasticsearch_backend.py | 44 ++++++++++++++++++------------
 haystack/backends/simple_backend.py        |  2 +-
 haystack/backends/solr_backend.py          | 32 ++++++++++++----------
 haystack/backends/whoosh_backend.py        | 24 +++++++++-------
 haystack/generic_views.py                  | 11 ++++++--
 haystack/indexes.py                        |  2 +-
 haystack/models.py                         |  4 +--
 haystack/query.py                          | 21 ++++++++------
 setup.py                                   |  2 +-
 18 files changed, 101 insertions(+), 80 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index 6c444ac..a5bf155 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: django-haystack
-Version: 2.4.0
+Version: 2.4.1
 Summary: Pluggable search for Django.
 Home-page: http://haystacksearch.org/
 Author: Daniel Lindsley
@@ -41,6 +41,7 @@ Description: ========
         =============
         
         * Development version: http://docs.haystacksearch.org/
+        * v2.4.X: http://django-haystack.readthedocs.org/en/v2.4.1/
         * v2.3.X: http://django-haystack.readthedocs.org/en/v2.3.0/
         * v2.2.X: http://django-haystack.readthedocs.org/en/v2.2.0/
         * v2.1.X: http://django-haystack.readthedocs.org/en/v2.1.0/
diff --git a/README.rst b/README.rst
index 2c0e549..bc350d0 100644
--- a/README.rst
+++ b/README.rst
@@ -33,6 +33,7 @@ Documentation
 =============
 
 * Development version: http://docs.haystacksearch.org/
+* v2.4.X: http://django-haystack.readthedocs.org/en/v2.4.1/
 * v2.3.X: http://django-haystack.readthedocs.org/en/v2.3.0/
 * v2.2.X: http://django-haystack.readthedocs.org/en/v2.2.0/
 * v2.1.X: http://django-haystack.readthedocs.org/en/v2.1.0/
diff --git a/django_haystack.egg-info/PKG-INFO b/django_haystack.egg-info/PKG-INFO
index 6c444ac..a5bf155 100644
--- a/django_haystack.egg-info/PKG-INFO
+++ b/django_haystack.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: django-haystack
-Version: 2.4.0
+Version: 2.4.1
 Summary: Pluggable search for Django.
 Home-page: http://haystacksearch.org/
 Author: Daniel Lindsley
@@ -41,6 +41,7 @@ Description: ========
         =============
         
         * Development version: http://docs.haystacksearch.org/
+        * v2.4.X: http://django-haystack.readthedocs.org/en/v2.4.1/
         * v2.3.X: http://django-haystack.readthedocs.org/en/v2.3.0/
         * v2.2.X: http://django-haystack.readthedocs.org/en/v2.2.0/
         * v2.1.X: http://django-haystack.readthedocs.org/en/v2.1.0/
diff --git a/django_haystack.egg-info/pbr.json b/django_haystack.egg-info/pbr.json
index 694ff89..313cb3f 100644
--- a/django_haystack.egg-info/pbr.json
+++ b/django_haystack.egg-info/pbr.json
@@ -1 +1 @@
-{"is_release": false, "git_version": "ebf1a5c"}
\ No newline at end of file
+{"is_release": false, "git_version": "9aed7ad"}
\ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
index db3e990..64ecf6c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -41,16 +41,16 @@ master_doc = 'toc'
 
 # General information about the project.
 project = u'Haystack'
-copyright = u'2009-2013, Daniel Lindsley'
+copyright = u'2009-2015, Daniel Lindsley'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
 # built documents.
-#
+
 # The short X.Y version.
-version = '2.1.1'
+version = '2.4'
 # The full version, including alpha/beta/rc tags.
-release = '2.1.1-dev'
+release = '2.4.1'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff --git a/docs/faceting.rst b/docs/faceting.rst
index f2e64f4..6ff15eb 100644
--- a/docs/faceting.rst
+++ b/docs/faceting.rst
@@ -213,20 +213,16 @@ URLconf should resemble::
 
     from django.conf.urls.defaults import *
     from haystack.forms import FacetedSearchForm
-    from haystack.query import SearchQuerySet
     from haystack.views import FacetedSearchView
     
     
-    sqs = SearchQuerySet().facet('author')
-     
-    
     urlpatterns = patterns('haystack.views',
-        url(r'^$', FacetedSearchView(form_class=FacetedSearchForm, searchqueryset=sqs), name='haystack_search'),
+        url(r'^$', FacetedSearchView(form_class=FacetedSearchForm, facet_fields=['author']), name='haystack_search'),
     )
 
-The ``FacetedSearchView`` will now instantiate the ``FacetedSearchForm`` and use
-the ``SearchQuerySet`` we provided. Now, a ``facets`` variable will be present
-in the context. This is added in an overridden ``extra_context`` method.
+The ``FacetedSearchView`` will now instantiate the ``FacetedSearchForm``.
+The specified ``facet_fields`` will be present in the context variable
+``facets``. This is added in an overridden ``extra_context`` method.
 
 
 3. Display The Facets In The Template
diff --git a/docs/installing_search_engines.rst b/docs/installing_search_engines.rst
index e9599a0..75f8a4a 100644
--- a/docs/installing_search_engines.rst
+++ b/docs/installing_search_engines.rst
@@ -149,16 +149,16 @@ locally. Modifications should be done in a YAML file, the stock one being
       logs: /usr/local/var/log
       data: /usr/local/var/data
 
-You'll also need an Elasticsearch binding: elasticsearch-py_ (**NOT**
+You'll also need an Elasticsearch binding: elasticsearch_ (**NOT**
 ``pyes``). Place ``elasticsearch`` somewhere on your ``PYTHONPATH``
 (usually ``python setup.py install`` or ``pip install elasticsearch``).
 
-.. _elasticsearch-py: http://pypi.python.org/pypi/elasticsearch/
+.. _elasticsearch: http://pypi.python.org/pypi/elasticsearch/
 
 .. note::
 
   Elasticsearch 1.0 is slightly backwards incompatible so you need to make sure
-  you have the proper version of `elasticsearch-py` installed - releases with
+  you have the proper version of `elasticsearch` installed - releases with
   major version 1 (1.X.Y) are to be used with Elasticsearch 1.0 and later, 0.4
   releases are meant to work with Elasticsearch 0.90.X.
 
diff --git a/haystack/__init__.py b/haystack/__init__.py
index a02c845..7b29cf4 100644
--- a/haystack/__init__.py
+++ b/haystack/__init__.py
@@ -13,7 +13,7 @@ from haystack.utils import loading
 
 
 __author__ = 'Daniel Lindsley'
-__version__ = (2, 4, 0)
+__version__ = (2, 4, 1)
 
 
 # Setup default logging.
diff --git a/haystack/backends/__init__.py b/haystack/backends/__init__.py
index bcf8554..9e87b18 100644
--- a/haystack/backends/__init__.py
+++ b/haystack/backends/__init__.py
@@ -101,7 +101,7 @@ class BaseSearchBackend(object):
         """
         raise NotImplementedError
 
-    def clear(self, models=[], commit=True):
+    def clear(self, models=None, commit=True):
         """
         Clears the backend of all documents/objects for a collection of models.
 
diff --git a/haystack/backends/elasticsearch_backend.py b/haystack/backends/elasticsearch_backend.py
index a57bb12..2576b2b 100644
--- a/haystack/backends/elasticsearch_backend.py
+++ b/haystack/backends/elasticsearch_backend.py
@@ -22,7 +22,12 @@ from haystack.utils.app_loading import haystack_get_model
 
 try:
     import elasticsearch
-    from elasticsearch.helpers import bulk_index
+    try:
+        # let's try this, for elasticsearch > 1.7.0
+        from elasticsearch.helpers import bulk
+    except ImportError:
+        # let's try this, for elasticsearch <= 1.7.0
+        from elasticsearch.helpers import bulk_index as bulk
     from elasticsearch.exceptions import NotFoundError
 except ImportError:
     raise MissingDependency("The 'elasticsearch' backend requires the installation of 'elasticsearch'. Please refer to the documentation.")
@@ -156,7 +161,7 @@ class ElasticsearchSearchBackend(BaseSearchBackend):
                 if not self.silently_fail:
                     raise
 
-                self.log.error("Failed to add documents to Elasticsearch: %s", e)
+                self.log.error("Failed to add documents to Elasticsearch: %s", e, exc_info=True)
                 return
 
         prepped_docs = []
@@ -181,14 +186,11 @@ class ElasticsearchSearchBackend(BaseSearchBackend):
                 # We'll log the object identifier but won't include the actual object
                 # to avoid the possibility of that generating encoding errors while
                 # processing the log message:
-                self.log.error(u"%s while preparing object for update" % e.__class__.__name__, exc_info=True, extra={
-                    "data": {
-                        "index": index,
-                        "object": get_identifier(obj)
-                    }
-                })
+                self.log.error(u"%s while preparing object for update" % e.__class__.__name__, exc_info=True,
+                               extra={"data": {"index": index,
+                                               "object": get_identifier(obj)}})
 
-        bulk_index(self.conn, prepped_docs, index=self.index_name, doc_type='modelresult')
+        bulk(self.conn, prepped_docs, index=self.index_name, doc_type='modelresult')
 
         if commit:
             self.conn.indices.refresh(index=self.index_name)
@@ -203,7 +205,8 @@ class ElasticsearchSearchBackend(BaseSearchBackend):
                 if not self.silently_fail:
                     raise
 
-                self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
+                self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e,
+                               exc_info=True)
                 return
 
         try:
@@ -215,16 +218,19 @@ class ElasticsearchSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
+            self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e, exc_info=True)
 
-    def clear(self, models=[], commit=True):
+    def clear(self, models=None, commit=True):
         # We actually don't want to do this here, as mappings could be
         # very different.
         # if not self.setup_complete:
         #     self.setup()
 
+        if models is not None:
+            assert isinstance(models, (list, tuple))
+
         try:
-            if not models:
+            if models is None:
                 self.conn.indices.delete(index=self.index_name, ignore=404)
                 self.setup_complete = False
                 self.existing_mapping = {}
@@ -242,10 +248,11 @@ class ElasticsearchSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            if len(models):
-                self.log.error("Failed to clear Elasticsearch index of models '%s': %s", ','.join(models_to_delete), e)
+            if models is not None:
+                self.log.error("Failed to clear Elasticsearch index of models '%s': %s",
+                               ','.join(models_to_delete), e, exc_info=True)
             else:
-                self.log.error("Failed to clear Elasticsearch index: %s", e)
+                self.log.error("Failed to clear Elasticsearch index: %s", e, exc_info=True)
 
     def build_search_kwargs(self, query_string, sort_by=None, start_offset=0, end_offset=None,
                             fields='', highlight=False, facets=None,
@@ -511,7 +518,7 @@ class ElasticsearchSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to query Elasticsearch using '%s': %s", query_string, e)
+            self.log.error("Failed to query Elasticsearch using '%s': %s", query_string, e, exc_info=True)
             raw_results = {}
 
         return self._process_results(raw_results,
@@ -550,7 +557,8 @@ class ElasticsearchSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to fetch More Like This from Elasticsearch for document '%s': %s", doc_id, e)
+            self.log.error("Failed to fetch More Like This from Elasticsearch for document '%s': %s",
+                           doc_id, e, exc_info=True)
             raw_results = {}
 
         return self._process_results(raw_results, result_class=result_class)
diff --git a/haystack/backends/simple_backend.py b/haystack/backends/simple_backend.py
index ff7402e..a336fa3 100644
--- a/haystack/backends/simple_backend.py
+++ b/haystack/backends/simple_backend.py
@@ -43,7 +43,7 @@ class SimpleSearchBackend(BaseSearchBackend):
     def remove(self, obj, commit=True):
         warn('remove is not implemented in this backend')
 
-    def clear(self, models=[], commit=True):
+    def clear(self, models=None, commit=True):
         warn('clear is not implemented in this backend')
 
     @log_query
diff --git a/haystack/backends/solr_backend.py b/haystack/backends/solr_backend.py
index 4f301c9..c577494 100644
--- a/haystack/backends/solr_backend.py
+++ b/haystack/backends/solr_backend.py
@@ -63,12 +63,9 @@ class SolrSearchBackend(BaseSearchBackend):
                 # We'll log the object identifier but won't include the actual object
                 # to avoid the possibility of that generating encoding errors while
                 # processing the log message:
-                self.log.error(u"UnicodeDecodeError while preparing object for update", exc_info=True, extra={
-                    "data": {
-                        "index": index,
-                        "object": get_identifier(obj)
-                    }
-                })
+                self.log.error(u"UnicodeDecodeError while preparing object for update", exc_info=True,
+                               extra={"data": {"index": index,
+                                               "object": get_identifier(obj)}})
 
         if len(docs) > 0:
             try:
@@ -77,7 +74,7 @@ class SolrSearchBackend(BaseSearchBackend):
                 if not self.silently_fail:
                     raise
 
-                self.log.error("Failed to add documents to Solr: %s", e)
+                self.log.error("Failed to add documents to Solr: %s", e, exc_info=True)
 
     def remove(self, obj_or_string, commit=True):
         solr_id = get_identifier(obj_or_string)
@@ -92,11 +89,14 @@ class SolrSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to remove document '%s' from Solr: %s", solr_id, e)
+            self.log.error("Failed to remove document '%s' from Solr: %s", solr_id, e, exc_info=True)
+
+    def clear(self, models=None, commit=True):
+        if models is not None:
+            assert isinstance(models, (list, tuple))
 
-    def clear(self, models=[], commit=True):
         try:
-            if not models:
+            if models is None:
                 # *:* matches all docs in Solr
                 self.conn.delete(q='*:*', commit=commit)
             else:
@@ -114,10 +114,11 @@ class SolrSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            if len(models):
-                self.log.error("Failed to clear Solr index of models '%s': %s", ','.join(models_to_delete), e)
+            if models is not None:
+                self.log.error("Failed to clear Solr index of models '%s': %s", ','.join(models_to_delete), e,
+                               exc_info=True)
             else:
-                self.log.error("Failed to clear Solr index: %s", e)
+                self.log.error("Failed to clear Solr index: %s", e, exc_info=True)
 
     @log_query
     def search(self, query_string, **kwargs):
@@ -135,7 +136,7 @@ class SolrSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to query Solr using '%s': %s", query_string, e)
+            self.log.error("Failed to query Solr using '%s': %s", query_string, e, exc_info=True)
             raw_results = EmptyResults()
 
         return self._process_results(raw_results, highlight=kwargs.get('highlight'), result_class=kwargs.get('result_class', SearchResult), distance_point=kwargs.get('distance_point'))
@@ -333,7 +334,8 @@ class SolrSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to fetch More Like This from Solr for document '%s': %s", query, e)
+            self.log.error("Failed to fetch More Like This from Solr for document '%s': %s",
+                           query, e, exc_info=True)
             raw_results = EmptyResults()
 
         return self._process_results(raw_results, result_class=result_class)
diff --git a/haystack/backends/whoosh_backend.py b/haystack/backends/whoosh_backend.py
index bf26adc..979b04f 100644
--- a/haystack/backends/whoosh_backend.py
+++ b/haystack/backends/whoosh_backend.py
@@ -215,12 +215,9 @@ class WhooshSearchBackend(BaseSearchBackend):
                     # We'll log the object identifier but won't include the actual object
                     # to avoid the possibility of that generating encoding errors while
                     # processing the log message:
-                    self.log.error(u"%s while preparing object for update" % e.__class__.__name__, exc_info=True, extra={
-                        "data": {
-                            "index": index,
-                            "object": get_identifier(obj)
-                        }
-                    })
+                    self.log.error(u"%s while preparing object for update" % e.__class__.__name__,
+                                   exc_info=True, extra={"data": {"index": index,
+                                                                  "object": get_identifier(obj)}})
 
         if len(iterable) > 0:
             # For now, commit no matter what, as we run into locking issues otherwise.
@@ -239,16 +236,19 @@ class WhooshSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to remove document '%s' from Whoosh: %s", whoosh_id, e)
+            self.log.error("Failed to remove document '%s' from Whoosh: %s", whoosh_id, e, exc_info=True)
 
-    def clear(self, models=[], commit=True):
+    def clear(self, models=None, commit=True):
         if not self.setup_complete:
             self.setup()
 
         self.index = self.index.refresh()
 
+        if models is not None:
+            assert isinstance(models, (list, tuple))
+
         try:
-            if not models:
+            if models is None:
                 self.delete_index()
             else:
                 models_to_delete = []
@@ -261,7 +261,11 @@ class WhooshSearchBackend(BaseSearchBackend):
             if not self.silently_fail:
                 raise
 
-            self.log.error("Failed to clear documents from Whoosh: %s", e)
+            if models is not None:
+                self.log.error("Failed to clear Whoosh index of models '%s': %s", ','.join(models_to_delete),
+                               e, exc_info=True)
+            else:
+                self.log.error("Failed to clear Whoosh index: %s", e, exc_info=True)
 
     def delete_index(self):
         # Per the Whoosh mailing list, if wiping out everything from the index,
diff --git a/haystack/generic_views.py b/haystack/generic_views.py
index 2dad515..def14df 100644
--- a/haystack/generic_views.py
+++ b/haystack/generic_views.py
@@ -88,9 +88,10 @@ class FacetedSearchMixin(SearchMixin):
     faceting.
     """
     form_class = FacetedSearchForm
+    facet_fields = None
 
     def get_form_kwargs(self):
-        kwargs = super(SearchMixin, self).get_form_kwargs()
+        kwargs = super(FacetedSearchMixin, self).get_form_kwargs()
         kwargs.update({
             'selected_facets': self.request.GET.getlist("selected_facets")
         })
@@ -98,9 +99,15 @@ class FacetedSearchMixin(SearchMixin):
 
     def get_context_data(self, **kwargs):
         context = super(FacetedSearchMixin, self).get_context_data(**kwargs)
-        context.update({'facets': self.results.facet_counts()})
+        context.update({'facets': self.queryset.facet_counts()})
         return context
 
+    def get_queryset(self):
+        qs = super(FacetedSearchMixin, self).get_queryset()
+        for field in self.facet_fields:
+            qs = qs.facet(field)
+        return qs
+
 
 class SearchView(SearchMixin, FormView):
     """A view class for searching a Haystack managed search index"""
diff --git a/haystack/indexes.py b/haystack/indexes.py
index d3d53ec..20b638a 100644
--- a/haystack/indexes.py
+++ b/haystack/indexes.py
@@ -124,7 +124,7 @@ class SearchIndex(with_metaclass(DeclarativeMetaclass, threading.local)):
 
         This method is required & you must override it to return the correct class.
         """
-        raise NotImplementedError("You must provide a 'model' method for the '%r' index." % self)
+        raise NotImplementedError("You must provide a 'get_model' method for the '%r' index." % self)
 
     def index_queryset(self, using=None):
         """
diff --git a/haystack/models.py b/haystack/models.py
index a121207..2e7ac49 100644
--- a/haystack/models.py
+++ b/haystack/models.py
@@ -4,7 +4,6 @@
 
 from __future__ import absolute_import, division, print_function, unicode_literals
 
-from django.conf import settings
 from django.core.exceptions import ObjectDoesNotExist
 from django.db import models
 from django.utils import six
@@ -49,7 +48,7 @@ class SearchResult(object):
         self.log = self._get_log()
 
         for key, value in kwargs.items():
-            if not key in self.__dict__:
+            if key not in self.__dict__:
                 self.__dict__[key] = value
                 self._additional_fields.append(key)
 
@@ -200,7 +199,6 @@ class SearchResult(object):
         """
         if self._stored_fields is None:
             from haystack import connections
-            from haystack.exceptions import NotHandled
 
             try:
                 index = connections['default'].get_unified_index().get_index(self.model)
diff --git a/haystack/query.py b/haystack/query.py
index 096a4b3..e914826 100644
--- a/haystack/query.py
+++ b/haystack/query.py
@@ -11,7 +11,7 @@ from haystack import connection_router, connections
 from haystack.backends import SQ
 from haystack.constants import DEFAULT_OPERATOR, ITERATOR_LOAD_PER_QUERY, REPR_OUTPUT_SIZE
 from haystack.exceptions import NotHandled
-from haystack.inputs import AutoQuery, Clean, Raw
+from haystack.inputs import AutoQuery, Raw
 from haystack.utils import log as logging
 
 
@@ -170,7 +170,7 @@ class SearchQuerySet(object):
         self.query.set_limits(start, end)
         results = self.query.get_results(**kwargs)
 
-        if results == None or len(results) == 0:
+        if results is None or len(results) == 0:
             return False
 
         # Setup the full cache now that we know how many results there are.
@@ -180,7 +180,7 @@ class SearchQuerySet(object):
         # an array of 100,000 ``None``s consumed less than .5 Mb, which ought
         # to be an acceptable loss for consistent and more efficient caching.
         if len(self._result_cache) == 0:
-            self._result_cache = [None for i in range(self.query.get_count())]
+            self._result_cache = [None] * self.query.get_count()
 
         if start is None:
             start = 0
@@ -222,7 +222,7 @@ class SearchQuerySet(object):
             if self._load_all:
                 # We have to deal with integer keys being cast from strings
                 model_objects = loaded_objects.get(result.model, {})
-                if not result.pk in model_objects:
+                if result.pk not in model_objects:
                     try:
                         result.pk = int(result.pk)
                     except ValueError:
@@ -266,7 +266,8 @@ class SearchQuerySet(object):
             bound = k + 1
 
         # We need check to see if we need to populate more of the cache.
-        if len(self._result_cache) <= 0 or (None in self._result_cache[start:bound] and not self._cache_is_full()):
+        if len(self._result_cache) <= 0 or (None in self._result_cache[start:bound]
+                                            and not self._cache_is_full()):
             try:
                 self._fill_cache(start, bound)
             except StopIteration:
@@ -333,7 +334,7 @@ class SearchQuerySet(object):
         clone = self._clone()
 
         for model in models:
-            if not model in connections[self.query._using].get_unified_index().get_indexed_models():
+            if model not in connections[self.query._using].get_unified_index().get_indexed_models():
                 warnings.warn('The model %r is not registered for search.' % (model,))
 
             clone.query.add_model(model)
@@ -385,10 +386,11 @@ class SearchQuerySet(object):
         clone = self._clone()
         stats_facets = []
         try:
-            stats_facets.append(sum(facet_fields,[]))
+            stats_facets.append(sum(facet_fields, []))
         except TypeError:
-            if facet_fields: stats_facets.append(facet_fields)
-        clone.query.add_stats_query(field,stats_facets)
+            if facet_fields:
+                stats_facets.append(facet_fields)
+        clone.query.add_stats_query(field, stats_facets)
         return clone
 
     def distance(self, field, point):
@@ -782,6 +784,7 @@ class RelatedSearchQuerySet(SearchQuerySet):
         """
         if not isinstance(k, (slice, six.integer_types)):
             raise TypeError
+
         assert ((not isinstance(k, slice) and (k >= 0))
                 or (isinstance(k, slice) and (k.start is None or k.start >= 0)
                     and (k.stop is None or k.stop >= 0))), \
diff --git a/setup.py b/setup.py
index cb68312..11eaf7f 100755
--- a/setup.py
+++ b/setup.py
@@ -29,7 +29,7 @@ tests_require = [
 
 setup(
     name='django-haystack',
-    version='2.4.0',
+    version='2.4.1',
     description='Pluggable search for Django.',
     author='Daniel Lindsley',
     author_email='daniel at toastdriven.com',

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/django-haystack.git



More information about the Python-modules-commits mailing list