Compare commits
	
		
			1 Commits
		
	
	
		
			fix-cursor
			...
			insert-con
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 98e1df0c45 | 
							
								
								
									
										18
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								README.rst
									
									
									
									
									
								
							| @@ -35,22 +35,16 @@ setup.py install``. | ||||
|  | ||||
| Dependencies | ||||
| ============ | ||||
| All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. At the very least, you'll need these two packages to use MongoEngine: | ||||
|  | ||||
| - pymongo>=2.7.1 | ||||
| - six>=1.10.0 | ||||
|  | ||||
| If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||
| - sphinx (optional - for documentation generation) | ||||
|  | ||||
| Optional Dependencies | ||||
| --------------------- | ||||
| - **Image Fields**: Pillow>=2.0.0 | ||||
| - dateutil>=2.1.0 | ||||
|  | ||||
| If you need to use an ``ImageField`` or ``ImageGridFsProxy``: | ||||
|  | ||||
| - Pillow>=2.0.0 | ||||
|  | ||||
| If you want to generate the documentation (e.g. to contribute to it): | ||||
|  | ||||
| - sphinx | ||||
| .. note | ||||
|    MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1 | ||||
|  | ||||
| Examples | ||||
| ======== | ||||
|   | ||||
| @@ -361,6 +361,11 @@ Its value can take any of the following constants: | ||||
|    In Django, be sure to put all apps that have such delete rule declarations in | ||||
|    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. | ||||
|  | ||||
|  | ||||
| .. warning:: | ||||
|    Signals are not triggered when doing cascading updates / deletes - if this | ||||
|    is required you must manually handle the update / delete. | ||||
|  | ||||
| Generic reference fields | ||||
| '''''''''''''''''''''''' | ||||
| A second kind of reference field also exists, | ||||
|   | ||||
| @@ -142,4 +142,11 @@ cleaner looking while still allowing manual execution of the callback:: | ||||
|         modified = DateTimeField() | ||||
|  | ||||
|  | ||||
| ReferenceFields and Signals | ||||
| --------------------------- | ||||
|  | ||||
| Currently `reverse_delete_rule` does not trigger signals on the other part of | ||||
| the relationship.  If this is required you must manually handle the | ||||
| reverse deletion. | ||||
|  | ||||
| .. _blinker: http://pypi.python.org/pypi/blinker | ||||
|   | ||||
| @@ -888,6 +888,10 @@ class ReferenceField(BaseField): | ||||
|  | ||||
|         Foo.register_delete_rule(Bar, 'foo', NULLIFY) | ||||
|  | ||||
|     .. note :: | ||||
|         `reverse_delete_rule` does not trigger pre / post delete signals to be | ||||
|         triggered. | ||||
|  | ||||
|     .. versionchanged:: 0.5 added `reverse_delete_rule` | ||||
|     """ | ||||
|  | ||||
|   | ||||
| @@ -86,7 +86,6 @@ class BaseQuerySet(object): | ||||
|         self._batch_size = None | ||||
|         self.only_fields = [] | ||||
|         self._max_time_ms = None | ||||
|         self._comment = None | ||||
|  | ||||
|     def __call__(self, q_obj=None, class_check=True, read_preference=None, | ||||
|                  **query): | ||||
| @@ -297,22 +296,25 @@ class BaseQuerySet(object): | ||||
|             result = None | ||||
|         return result | ||||
|  | ||||
|     def insert(self, doc_or_docs, load_bulk=True, | ||||
|                write_concern=None, signal_kwargs=None): | ||||
|     def insert(self, doc_or_docs, load_bulk=True, write_concern=None, | ||||
|                signal_kwargs=None, continue_on_error=None): | ||||
|         """bulk insert documents | ||||
|  | ||||
|         :param doc_or_docs: a document or list of documents to be inserted | ||||
|         :param load_bulk (optional): If True returns the list of document | ||||
|             instances | ||||
|         :param write_concern: Extra keyword arguments are passed down to | ||||
|                 :meth:`~pymongo.collection.Collection.insert` | ||||
|                 which will be used as options for the resultant | ||||
|                 ``getLastError`` command.  For example, | ||||
|                 ``insert(..., {w: 2, fsync: True})`` will wait until at least | ||||
|                 two servers have recorded the write and will force an fsync on | ||||
|                 each server being written to. | ||||
|         :param write_concern: Optional keyword argument passed down to | ||||
|                 :meth:`~pymongo.collection.Collection.insert`, representing | ||||
|                 the write concern. For example, | ||||
|                 ``insert(..., write_concert={w: 2, fsync: True})`` will | ||||
|                 wait until at least two servers have recorded the write | ||||
|                 and will force an fsync on each server being written to. | ||||
|         :parm signal_kwargs: (optional) kwargs dictionary to be passed to | ||||
|             the signal calls. | ||||
|         :param continue_on_error: Optional keyword argument passed down to | ||||
|                 :meth:`~pymongo.collection.Collection.insert`. Defines what | ||||
|                 to do when a document cannot be inserted (e.g. due to | ||||
|                 duplicate IDs). Read PyMongo's docs for more info. | ||||
|  | ||||
|         By default returns document instances, set ``load_bulk`` to False to | ||||
|         return just ``ObjectIds`` | ||||
| @@ -323,12 +325,10 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|         Document = _import_class('Document') | ||||
|  | ||||
|         if write_concern is None: | ||||
|             write_concern = {} | ||||
|  | ||||
|         # Determine if we're inserting one doc or more | ||||
|         docs = doc_or_docs | ||||
|         return_one = False | ||||
|         if isinstance(docs, Document) or issubclass(docs.__class__, Document): | ||||
|         if isinstance(docs, Document): | ||||
|             return_one = True | ||||
|             docs = [docs] | ||||
|  | ||||
| @@ -345,9 +345,16 @@ class BaseQuerySet(object): | ||||
|         signals.pre_bulk_insert.send(self._document, | ||||
|                                      documents=docs, **signal_kwargs) | ||||
|  | ||||
|         # Resolve optional insert kwargs | ||||
|         insert_kwargs = {} | ||||
|         if write_concern is not None: | ||||
|             insert_kwargs.update(write_concern) | ||||
|         if continue_on_error is not None: | ||||
|             insert_kwargs['continue_on_error'] = continue_on_error | ||||
|  | ||||
|         raw = [doc.to_mongo() for doc in docs] | ||||
|         try: | ||||
|             ids = self._collection.insert(raw, **write_concern) | ||||
|             ids = self._collection.insert(raw, **insert_kwargs) | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             raise NotUniqueError(message % six.text_type(err)) | ||||
| @@ -707,36 +714,39 @@ class BaseQuerySet(object): | ||||
|         with switch_db(self._document, alias) as cls: | ||||
|             collection = cls._get_collection() | ||||
|  | ||||
|         return self._clone_into(self.__class__(self._document, collection)) | ||||
|         return self.clone_into(self.__class__(self._document, collection)) | ||||
|  | ||||
|     def clone(self): | ||||
|         """Create a copy of the current queryset.""" | ||||
|         return self._clone_into(self.__class__(self._document, self._collection_obj)) | ||||
|         """Creates a copy of the current | ||||
|           :class:`~mongoengine.queryset.QuerySet` | ||||
|  | ||||
|     def _clone_into(self, new_qs): | ||||
|         """Copy all of the relevant properties of this queryset to | ||||
|         a new queryset (which has to be an instance of | ||||
|         :class:`~mongoengine.queryset.base.BaseQuerySet`). | ||||
|         .. versionadded:: 0.5 | ||||
|         """ | ||||
|         if not isinstance(new_qs, BaseQuerySet): | ||||
|         return self.clone_into(self.__class__(self._document, self._collection_obj)) | ||||
|  | ||||
|     def clone_into(self, cls): | ||||
|         """Creates a copy of the current | ||||
|           :class:`~mongoengine.queryset.base.BaseQuerySet` into another child class | ||||
|         """ | ||||
|         if not isinstance(cls, BaseQuerySet): | ||||
|             raise OperationError( | ||||
|                 '%s is not a subclass of BaseQuerySet' % new_qs.__name__) | ||||
|                 '%s is not a subclass of BaseQuerySet' % cls.__name__) | ||||
|  | ||||
|         copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', | ||||
|                       '_where_clause', '_loaded_fields', '_ordering', '_snapshot', | ||||
|                       '_timeout', '_class_check', '_slave_okay', '_read_preference', | ||||
|                       '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', | ||||
|                       '_limit', '_skip', '_hint', '_auto_dereference', | ||||
|                       '_search_text', 'only_fields', '_max_time_ms', '_comment') | ||||
|                       '_search_text', 'only_fields', '_max_time_ms') | ||||
|  | ||||
|         for prop in copy_props: | ||||
|             val = getattr(self, prop) | ||||
|             setattr(new_qs, prop, copy.copy(val)) | ||||
|             setattr(cls, prop, copy.copy(val)) | ||||
|  | ||||
|         if self._cursor_obj: | ||||
|             new_qs._cursor_obj = self._cursor_obj.clone() | ||||
|             cls._cursor_obj = self._cursor_obj.clone() | ||||
|  | ||||
|         return new_qs | ||||
|         return cls | ||||
|  | ||||
|     def select_related(self, max_depth=1): | ||||
|         """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or | ||||
| @@ -758,11 +768,7 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|         queryset._limit = n if n != 0 else 1 | ||||
|  | ||||
|         # If a cursor object has already been created, apply the limit to it. | ||||
|         if queryset._cursor_obj: | ||||
|             queryset._cursor_obj.limit(queryset._limit) | ||||
|  | ||||
|         # Return self to allow chaining | ||||
|         return queryset | ||||
|  | ||||
|     def skip(self, n): | ||||
| @@ -773,11 +779,6 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|         queryset._skip = n | ||||
|  | ||||
|         # If a cursor object has already been created, apply the skip to it. | ||||
|         if queryset._cursor_obj: | ||||
|             queryset._cursor_obj.skip(queryset._skip) | ||||
|  | ||||
|         return queryset | ||||
|  | ||||
|     def hint(self, index=None): | ||||
| @@ -795,11 +796,6 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|         queryset._hint = index | ||||
|  | ||||
|         # If a cursor object has already been created, apply the hint to it. | ||||
|         if queryset._cursor_obj: | ||||
|             queryset._cursor_obj.hint(queryset._hint) | ||||
|  | ||||
|         return queryset | ||||
|  | ||||
|     def batch_size(self, size): | ||||
| @@ -813,11 +809,6 @@ class BaseQuerySet(object): | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|         queryset._batch_size = size | ||||
|  | ||||
|         # If a cursor object has already been created, apply the batch size to it. | ||||
|         if queryset._cursor_obj: | ||||
|             queryset._cursor_obj.batch_size(queryset._batch_size) | ||||
|  | ||||
|         return queryset | ||||
|  | ||||
|     def distinct(self, field): | ||||
| @@ -989,31 +980,13 @@ class BaseQuerySet(object): | ||||
|     def order_by(self, *keys): | ||||
|         """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The | ||||
|         order may be specified by prepending each of the keys by a + or a -. | ||||
|         Ascending order is assumed. If no keys are passed, existing ordering | ||||
|         is cleared instead. | ||||
|         Ascending order is assumed. | ||||
|  | ||||
|         :param keys: fields to order the query results by; keys may be | ||||
|             prefixed with **+** or **-** to determine the ordering direction | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|  | ||||
|         old_ordering = queryset._ordering | ||||
|         new_ordering = queryset._get_order_by(keys) | ||||
|  | ||||
|         if queryset._cursor_obj: | ||||
|  | ||||
|             # If a cursor object has already been created, apply the sort to it | ||||
|             if new_ordering: | ||||
|                 queryset._cursor_obj.sort(new_ordering) | ||||
|  | ||||
|             # If we're trying to clear a previous explicit ordering, we need | ||||
|             # to clear the cursor entirely (because PyMongo doesn't allow | ||||
|             # clearing an existing sort on a cursor). | ||||
|             elif old_ordering: | ||||
|                 queryset._cursor_obj = None | ||||
|  | ||||
|         queryset._ordering = new_ordering | ||||
|  | ||||
|         queryset._ordering = queryset._get_order_by(keys) | ||||
|         return queryset | ||||
|  | ||||
|     def comment(self, text): | ||||
| @@ -1459,13 +1432,10 @@ class BaseQuerySet(object): | ||||
|             raise StopIteration | ||||
|  | ||||
|         raw_doc = self._cursor.next() | ||||
|  | ||||
|         if self._as_pymongo: | ||||
|             return self._get_as_pymongo(raw_doc) | ||||
|  | ||||
|         doc = self._document._from_son( | ||||
|             raw_doc, _auto_dereference=self._auto_dereference, | ||||
|             only_fields=self.only_fields) | ||||
|         doc = self._document._from_son(raw_doc, | ||||
|                                        _auto_dereference=self._auto_dereference, only_fields=self.only_fields) | ||||
|  | ||||
|         if self._scalar: | ||||
|             return self._get_scalar(doc) | ||||
| @@ -1475,6 +1445,7 @@ class BaseQuerySet(object): | ||||
|     def rewind(self): | ||||
|         """Rewind the cursor to its unevaluated state. | ||||
|  | ||||
|  | ||||
|         .. versionadded:: 0.3 | ||||
|         """ | ||||
|         self._iter = False | ||||
| @@ -1524,54 +1495,43 @@ class BaseQuerySet(object): | ||||
|  | ||||
|     @property | ||||
|     def _cursor(self): | ||||
|         """Return a PyMongo cursor object corresponding to this queryset.""" | ||||
|         if self._cursor_obj is None: | ||||
|  | ||||
|         # If _cursor_obj already exists, return it immediately. | ||||
|         if self._cursor_obj is not None: | ||||
|             return self._cursor_obj | ||||
|             # In PyMongo 3+, we define the read preference on a collection | ||||
|             # level, not a cursor level. Thus, we need to get a cloned | ||||
|             # collection object using `with_options` first. | ||||
|             if IS_PYMONGO_3 and self._read_preference is not None: | ||||
|                 self._cursor_obj = self._collection\ | ||||
|                     .with_options(read_preference=self._read_preference)\ | ||||
|                     .find(self._query, **self._cursor_args) | ||||
|             else: | ||||
|                 self._cursor_obj = self._collection.find(self._query, | ||||
|                                                          **self._cursor_args) | ||||
|             # Apply where clauses to cursor | ||||
|             if self._where_clause: | ||||
|                 where_clause = self._sub_js_fields(self._where_clause) | ||||
|                 self._cursor_obj.where(where_clause) | ||||
|  | ||||
|         # Create a new PyMongo cursor. | ||||
|         # XXX In PyMongo 3+, we define the read preference on a collection | ||||
|         # level, not a cursor level. Thus, we need to get a cloned collection | ||||
|         # object using `with_options` first. | ||||
|         if IS_PYMONGO_3 and self._read_preference is not None: | ||||
|             self._cursor_obj = self._collection\ | ||||
|                 .with_options(read_preference=self._read_preference)\ | ||||
|                 .find(self._query, **self._cursor_args) | ||||
|         else: | ||||
|             self._cursor_obj = self._collection.find(self._query, | ||||
|                                                      **self._cursor_args) | ||||
|         # Apply "where" clauses to cursor | ||||
|         if self._where_clause: | ||||
|             where_clause = self._sub_js_fields(self._where_clause) | ||||
|             self._cursor_obj.where(where_clause) | ||||
|             if self._ordering: | ||||
|                 # Apply query ordering | ||||
|                 self._cursor_obj.sort(self._ordering) | ||||
|             elif self._ordering is None and self._document._meta['ordering']: | ||||
|                 # Otherwise, apply the ordering from the document model, unless | ||||
|                 # it's been explicitly cleared via order_by with no arguments | ||||
|                 order = self._get_order_by(self._document._meta['ordering']) | ||||
|                 self._cursor_obj.sort(order) | ||||
|  | ||||
|         # Apply ordering to the cursor. | ||||
|         # XXX self._ordering can be equal to: | ||||
|         # * None if we didn't explicitly call order_by on this queryset. | ||||
|         # * A list of PyMongo-style sorting tuples. | ||||
|         # * An empty list if we explicitly called order_by() without any | ||||
|         #   arguments. This indicates that we want to clear the default | ||||
|         #   ordering. | ||||
|         if self._ordering: | ||||
|             # explicit ordering | ||||
|             self._cursor_obj.sort(self._ordering) | ||||
|         elif self._ordering is None and self._document._meta['ordering']: | ||||
|             # default ordering | ||||
|             order = self._get_order_by(self._document._meta['ordering']) | ||||
|             self._cursor_obj.sort(order) | ||||
|             if self._limit is not None: | ||||
|                 self._cursor_obj.limit(self._limit) | ||||
|  | ||||
|         if self._limit is not None: | ||||
|             self._cursor_obj.limit(self._limit) | ||||
|             if self._skip is not None: | ||||
|                 self._cursor_obj.skip(self._skip) | ||||
|  | ||||
|         if self._skip is not None: | ||||
|             self._cursor_obj.skip(self._skip) | ||||
|             if self._hint != -1: | ||||
|                 self._cursor_obj.hint(self._hint) | ||||
|  | ||||
|         if self._hint != -1: | ||||
|             self._cursor_obj.hint(self._hint) | ||||
|  | ||||
|         if self._batch_size is not None: | ||||
|             self._cursor_obj.batch_size(self._batch_size) | ||||
|             if self._batch_size is not None: | ||||
|                 self._cursor_obj.batch_size(self._batch_size) | ||||
|  | ||||
|         return self._cursor_obj | ||||
|  | ||||
| @@ -1746,13 +1706,7 @@ class BaseQuerySet(object): | ||||
|         return ret | ||||
|  | ||||
|     def _get_order_by(self, keys): | ||||
|         """Given a list of MongoEngine-style sort keys, return a list | ||||
|         of sorting tuples that can be applied to a PyMongo cursor. For | ||||
|         example: | ||||
|  | ||||
|         >>> qs._get_order_by(['-last_name', 'first_name']) | ||||
|         [('last_name', -1), ('first_name', 1)] | ||||
|         """ | ||||
|         """Creates a list of order by fields""" | ||||
|         key_list = [] | ||||
|         for key in keys: | ||||
|             if not key: | ||||
| @@ -1765,19 +1719,17 @@ class BaseQuerySet(object): | ||||
|             direction = pymongo.ASCENDING | ||||
|             if key[0] == '-': | ||||
|                 direction = pymongo.DESCENDING | ||||
|  | ||||
|             if key[0] in ('-', '+'): | ||||
|                 key = key[1:] | ||||
|  | ||||
|             key = key.replace('__', '.') | ||||
|             try: | ||||
|                 key = self._document._translate_field_name(key) | ||||
|             except Exception: | ||||
|                 # TODO this exception should be more specific | ||||
|                 pass | ||||
|  | ||||
|             key_list.append((key, direction)) | ||||
|  | ||||
|         if self._cursor_obj and key_list: | ||||
|             self._cursor_obj.sort(key_list) | ||||
|         return key_list | ||||
|  | ||||
|     def _get_scalar(self, doc): | ||||
| @@ -1875,21 +1827,10 @@ class BaseQuerySet(object): | ||||
|         return code | ||||
|  | ||||
|     def _chainable_method(self, method_name, val): | ||||
|         """Call a particular method on the PyMongo cursor call a particular chainable method | ||||
|         with the provided value. | ||||
|         """ | ||||
|         queryset = self.clone() | ||||
|  | ||||
|         # Get an existing cursor object or create a new one | ||||
|         cursor = queryset._cursor | ||||
|  | ||||
|         # Find the requested method on the cursor and call it with the | ||||
|         # provided value | ||||
|         getattr(cursor, method_name)(val) | ||||
|  | ||||
|         # Cache the value on the queryset._{method_name} | ||||
|         method = getattr(queryset._cursor, method_name) | ||||
|         method(val) | ||||
|         setattr(queryset, '_' + method_name, val) | ||||
|  | ||||
|         return queryset | ||||
|  | ||||
|     # Deprecated | ||||
|   | ||||
| @@ -136,15 +136,13 @@ class QuerySet(BaseQuerySet): | ||||
|         return self._len | ||||
|  | ||||
|     def no_cache(self): | ||||
|         """Convert to a non-caching queryset | ||||
|         """Convert to a non_caching queryset | ||||
|  | ||||
|         .. versionadded:: 0.8.3 Convert to non caching queryset | ||||
|         """ | ||||
|         if self._result_cache is not None: | ||||
|             raise OperationError('QuerySet already cached') | ||||
|  | ||||
|         return self._clone_into(QuerySetNoCache(self._document, | ||||
|                                                 self._collection)) | ||||
|         return self.clone_into(QuerySetNoCache(self._document, self._collection)) | ||||
|  | ||||
|  | ||||
| class QuerySetNoCache(BaseQuerySet): | ||||
| @@ -155,7 +153,7 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|  | ||||
|         .. versionadded:: 0.8.3 Convert to caching queryset | ||||
|         """ | ||||
|         return self._clone_into(QuerySet(self._document, self._collection)) | ||||
|         return self.clone_into(QuerySet(self._document, self._collection)) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """Provides the string representation of the QuerySet | ||||
|   | ||||
| @@ -106,111 +106,58 @@ class QuerySetTest(unittest.TestCase): | ||||
|             list(BlogPost.objects(author2__name="test")) | ||||
|  | ||||
|     def test_find(self): | ||||
|         """Ensure that a query returns a valid set of results.""" | ||||
|         user_a = self.Person.objects.create(name='User A', age=20) | ||||
|         user_b = self.Person.objects.create(name='User B', age=30) | ||||
|         """Ensure that a query returns a valid set of results. | ||||
|         """ | ||||
|         self.Person(name="User A", age=20).save() | ||||
|         self.Person(name="User B", age=30).save() | ||||
|  | ||||
|         # Find all people in the collection | ||||
|         people = self.Person.objects | ||||
|         self.assertEqual(people.count(), 2) | ||||
|         results = list(people) | ||||
|  | ||||
|         self.assertTrue(isinstance(results[0], self.Person)) | ||||
|         self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) | ||||
|  | ||||
|         self.assertEqual(results[0], user_a) | ||||
|         self.assertEqual(results[0].name, 'User A') | ||||
|         self.assertEqual(results[0].name, "User A") | ||||
|         self.assertEqual(results[0].age, 20) | ||||
|  | ||||
|         self.assertEqual(results[1], user_b) | ||||
|         self.assertEqual(results[1].name, 'User B') | ||||
|         self.assertEqual(results[1].name, "User B") | ||||
|         self.assertEqual(results[1].age, 30) | ||||
|  | ||||
|         # Filter people by age | ||||
|         # Use a query to filter the people found to just person1 | ||||
|         people = self.Person.objects(age=20) | ||||
|         self.assertEqual(people.count(), 1) | ||||
|         person = people.next() | ||||
|         self.assertEqual(person, user_a) | ||||
|         self.assertEqual(person.name, "User A") | ||||
|         self.assertEqual(person.age, 20) | ||||
|  | ||||
|     def test_limit(self): | ||||
|         """Ensure that QuerySet.limit works as expected.""" | ||||
|         user_a = self.Person.objects.create(name='User A', age=20) | ||||
|         user_b = self.Person.objects.create(name='User B', age=30) | ||||
|  | ||||
|         # Test limit on a new queryset | ||||
|         # Test limit | ||||
|         people = list(self.Person.objects.limit(1)) | ||||
|         self.assertEqual(len(people), 1) | ||||
|         self.assertEqual(people[0], user_a) | ||||
|         self.assertEqual(people[0].name, 'User A') | ||||
|  | ||||
|         # Test limit on an existing queryset | ||||
|         people = self.Person.objects | ||||
|         self.assertEqual(len(people), 2) | ||||
|         people2 = people.limit(1) | ||||
|         self.assertEqual(len(people), 2) | ||||
|         self.assertEqual(len(people2), 1) | ||||
|         self.assertEqual(people2[0], user_a) | ||||
|  | ||||
|         # Test chaining of only after limit | ||||
|         person = self.Person.objects().limit(1).only('name').first() | ||||
|         self.assertEqual(person, user_a) | ||||
|         self.assertEqual(person.name, 'User A') | ||||
|         self.assertEqual(person.age, None) | ||||
|  | ||||
|     def test_skip(self): | ||||
|         """Ensure that QuerySet.skip works as expected.""" | ||||
|         user_a = self.Person.objects.create(name='User A', age=20) | ||||
|         user_b = self.Person.objects.create(name='User B', age=30) | ||||
|  | ||||
|         # Test skip on a new queryset | ||||
|         # Test skip | ||||
|         people = list(self.Person.objects.skip(1)) | ||||
|         self.assertEqual(len(people), 1) | ||||
|         self.assertEqual(people[0], user_b) | ||||
|         self.assertEqual(people[0].name, 'User B') | ||||
|  | ||||
|         # Test skip on an existing queryset | ||||
|         people = self.Person.objects | ||||
|         self.assertEqual(len(people), 2) | ||||
|         people2 = people.skip(1) | ||||
|         self.assertEqual(len(people), 2) | ||||
|         self.assertEqual(len(people2), 1) | ||||
|         self.assertEqual(people2[0], user_b) | ||||
|  | ||||
|         # Test chaining of only after skip | ||||
|         person = self.Person.objects().skip(1).only('name').first() | ||||
|         self.assertEqual(person, user_b) | ||||
|         self.assertEqual(person.name, 'User B') | ||||
|         self.assertEqual(person.age, None) | ||||
|  | ||||
|     def test_slice(self): | ||||
|         """Ensure slicing a queryset works as expected.""" | ||||
|         user_a = self.Person.objects.create(name='User A', age=20) | ||||
|         user_b = self.Person.objects.create(name='User B', age=30) | ||||
|         user_c = self.Person.objects.create(name="User C", age=40) | ||||
|         person3 = self.Person(name="User C", age=40) | ||||
|         person3.save() | ||||
|  | ||||
|         # Test slice limit | ||||
|         people = list(self.Person.objects[:2]) | ||||
|         self.assertEqual(len(people), 2) | ||||
|         self.assertEqual(people[0], user_a) | ||||
|         self.assertEqual(people[1], user_b) | ||||
|         self.assertEqual(people[0].name, 'User A') | ||||
|         self.assertEqual(people[1].name, 'User B') | ||||
|  | ||||
|         # Test slice skip | ||||
|         people = list(self.Person.objects[1:]) | ||||
|         self.assertEqual(len(people), 2) | ||||
|         self.assertEqual(people[0], user_b) | ||||
|         self.assertEqual(people[1], user_c) | ||||
|         self.assertEqual(people[0].name, 'User B') | ||||
|         self.assertEqual(people[1].name, 'User C') | ||||
|  | ||||
|         # Test slice limit and skip | ||||
|         people = list(self.Person.objects[1:2]) | ||||
|         self.assertEqual(len(people), 1) | ||||
|         self.assertEqual(people[0], user_b) | ||||
|  | ||||
|         # Test slice limit and skip on an existing queryset | ||||
|         people = self.Person.objects | ||||
|         self.assertEqual(len(people), 3) | ||||
|         people2 = people[1:2] | ||||
|         self.assertEqual(len(people2), 1) | ||||
|         self.assertEqual(people2[0], user_b) | ||||
|         self.assertEqual(people[0].name, 'User B') | ||||
|  | ||||
|         # Test slice limit and skip cursor reset | ||||
|         qs = self.Person.objects[1:2] | ||||
| @@ -221,7 +168,6 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(len(people), 1) | ||||
|         self.assertEqual(people[0].name, 'User B') | ||||
|  | ||||
|         # Test empty slice | ||||
|         people = list(self.Person.objects[1:1]) | ||||
|         self.assertEqual(len(people), 0) | ||||
|  | ||||
| @@ -241,6 +187,12 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual("[<Person: Person object>, <Person: Person object>]", | ||||
|                          "%s" % self.Person.objects[51:53]) | ||||
|  | ||||
|         # Test only after limit | ||||
|         self.assertEqual(self.Person.objects().limit(2).only('name')[0].age, None) | ||||
|  | ||||
|         # Test only after skip | ||||
|         self.assertEqual(self.Person.objects().skip(2).only('name')[0].age, None) | ||||
|  | ||||
|     def test_find_one(self): | ||||
|         """Ensure that a query using find_one returns a valid result. | ||||
|         """ | ||||
| @@ -814,8 +766,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(record.embed.field, 2) | ||||
|  | ||||
|     def test_bulk_insert(self): | ||||
|         """Ensure that bulk insert works | ||||
|         """ | ||||
|         """Ensure that bulk insert works.""" | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             name = StringField() | ||||
| @@ -933,9 +884,37 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         self.assertEqual(Blog.objects.count(), 2) | ||||
|  | ||||
|         Blog.objects.insert([blog2, blog3], | ||||
|                             write_concern={"w": 0, 'continue_on_error': True}) | ||||
|         self.assertEqual(Blog.objects.count(), 3) | ||||
|     def test_bulk_insert_continue_on_error(self): | ||||
|         """Ensure that bulk insert works with the continue_on_error option.""" | ||||
|  | ||||
|         class Person(Document): | ||||
|             email = EmailField(unique=True) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person.objects.insert([ | ||||
|             Person(email='alice@example.com'), | ||||
|             Person(email='bob@example.com') | ||||
|         ]) | ||||
|         self.assertEqual(Person.objects.count(), 2) | ||||
|  | ||||
|         new_docs = [ | ||||
|             Person(email='alice@example.com'),  # dupe | ||||
|             Person(email='bob@example.com'),  # dupe | ||||
|             Person(email='steve@example.com')  # new one | ||||
|         ] | ||||
|  | ||||
|         # By default inserting dupe docs should fail and no new docs should | ||||
|         # be inserted. | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|             Person.objects.insert(new_docs) | ||||
|         self.assertEqual(Person.objects.count(), 2) | ||||
|  | ||||
|         # With continue_on_error, new doc should be inserted, even though we | ||||
|         # still get a NotUniqueError caused by the other 2 dupes. | ||||
|         with self.assertRaises(NotUniqueError): | ||||
|             Person.objects.insert(new_docs, continue_on_error=True) | ||||
|         self.assertEqual(Person.objects.count(), 3) | ||||
|  | ||||
|     def test_get_changed_fields_query_count(self): | ||||
|  | ||||
| @@ -1274,7 +1253,6 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         # default ordering should be used by default | ||||
|         with db_ops_tracker() as q: | ||||
|             BlogPost.objects.filter(title='whatever').first() | ||||
|             self.assertEqual(len(q.get_ops()), 1) | ||||
| @@ -1283,28 +1261,11 @@ class QuerySetTest(unittest.TestCase): | ||||
|                 {'published_date': -1} | ||||
|             ) | ||||
|  | ||||
|         # calling order_by() should clear the default ordering | ||||
|         with db_ops_tracker() as q: | ||||
|             BlogPost.objects.filter(title='whatever').order_by().first() | ||||
|             self.assertEqual(len(q.get_ops()), 1) | ||||
|             self.assertFalse('$orderby' in q.get_ops()[0]['query']) | ||||
|  | ||||
|         # calling an explicit order_by should use a specified sort | ||||
|         with db_ops_tracker() as q: | ||||
|             BlogPost.objects.filter(title='whatever').order_by('published_date').first() | ||||
|             self.assertEqual(len(q.get_ops()), 1) | ||||
|             self.assertEqual( | ||||
|                 q.get_ops()[0]['query']['$orderby'], | ||||
|                 {'published_date': 1} | ||||
|             ) | ||||
|  | ||||
|         # calling order_by() after an explicit sort should clear it | ||||
|         with db_ops_tracker() as q: | ||||
|             qs = BlogPost.objects.filter(title='whatever').order_by('published_date') | ||||
|             qs.order_by().first() | ||||
|             self.assertEqual(len(q.get_ops()), 1) | ||||
|             self.assertFalse('$orderby' in q.get_ops()[0]['query']) | ||||
|  | ||||
|     def test_no_ordering_for_get(self): | ||||
|         """ Ensure that Doc.objects.get doesn't use any ordering. | ||||
|         """ | ||||
|   | ||||
		Reference in New Issue
	
	Block a user