Merge conflicts resolved.
This commit is contained in:
		
							
								
								
									
										10
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -9,13 +9,11 @@ python: | ||||
|     - "3.3" | ||||
| env: | ||||
|   - PYMONGO=dev | ||||
|   - PYMONGO=2.4.1 | ||||
|   - PYMONGO=2.3 | ||||
|   - PYMONGO=2.5 | ||||
|   - PYMONGO=2.4.2 | ||||
| install: | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi | ||||
|     - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi | ||||
|     - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi | ||||
|     - python setup.py install | ||||
|   | ||||
							
								
								
									
										4
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -128,3 +128,7 @@ that much better: | ||||
|  * Peter Teichman | ||||
|  * Jakub Kot | ||||
|  * Jorge Bastida | ||||
|  * Aleksandr Sorokoumov | ||||
|  * Yohan Graterol | ||||
|  * bool-dev | ||||
|  * Russ Weeks | ||||
| @@ -2,6 +2,16 @@ | ||||
| Changelog | ||||
| ========= | ||||
|  | ||||
| Changes in 0.7.10 | ||||
| ================= | ||||
| - Fixed order_by chaining issue (#265) | ||||
| - Added dereference support for tuples (#250) | ||||
| - Resolve field name to db field name when using distinct(#260, #264, #269) | ||||
| - Added kwargs to doc.save to help interop with django (#223, #270) | ||||
| - Fixed cloning querysets in PY3 | ||||
| - Int fields no longer unset in save when changed to 0 (#272) | ||||
| - Fixed ReferenceField query chaining bug fixed (#254) | ||||
|  | ||||
| Changes in 0.7.9 | ||||
| ================ | ||||
| - Better fix handling for old style _types | ||||
|   | ||||
| @@ -1207,7 +1207,7 @@ class BaseDocument(object): | ||||
|  | ||||
|         # Determine if any changed items were actually unset. | ||||
|         for path, value in set_data.items(): | ||||
|             if value or isinstance(value, bool): | ||||
|             if value or isinstance(value, (bool, int)): | ||||
|                 continue | ||||
|  | ||||
|             # If we've set a value that ain't the default value dont unset it. | ||||
|   | ||||
| @@ -171,6 +171,7 @@ class DeReference(object): | ||||
|  | ||||
|         if not hasattr(items, 'items'): | ||||
|             is_list = True | ||||
|             as_tuple = isinstance(items, tuple) | ||||
|             iterator = enumerate(items) | ||||
|             data = [] | ||||
|         else: | ||||
| @@ -205,7 +206,7 @@ class DeReference(object): | ||||
|  | ||||
|         if instance and name: | ||||
|             if is_list: | ||||
|                 return BaseList(data, instance, name) | ||||
|                 return tuple(data) if as_tuple else BaseList(data, instance, name) | ||||
|             return BaseDict(data, instance, name) | ||||
|         depth += 1 | ||||
|         return data | ||||
|   | ||||
| @@ -164,7 +164,7 @@ class Document(BaseDocument): | ||||
|  | ||||
|     def save(self, safe=True, force_insert=False, validate=True, | ||||
|              write_options=None,  cascade=None, cascade_kwargs=None, | ||||
|              _refs=None): | ||||
|              _refs=None, **kwargs): | ||||
|         """Save the :class:`~mongoengine.Document` to the database. If the | ||||
|         document already exists, it will be updated, otherwise it will be | ||||
|         created. | ||||
|   | ||||
| @@ -367,6 +367,10 @@ class QuerySet(object): | ||||
|         self._skip = None | ||||
|         self._hint = -1  # Using -1 as None is a valid value for hint | ||||
|  | ||||
|     def __deepcopy__(self, memo): | ||||
|         """Essential for chained queries with ReferenceFields involved""" | ||||
|         return self.clone() | ||||
|  | ||||
|     def clone(self): | ||||
|         """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet` | ||||
|  | ||||
| @@ -375,8 +379,8 @@ class QuerySet(object): | ||||
|         c = self.__class__(self._document, self._collection_obj) | ||||
|  | ||||
|         copy_props = ('_initial_query', '_query_obj', '_where_clause', | ||||
|                     '_loaded_fields', '_ordering', '_snapshot', | ||||
|                     '_timeout', '_limit', '_skip', '_slave_okay', '_hint') | ||||
|                       '_loaded_fields', '_ordering', '_snapshot', '_timeout', | ||||
|                       '_limit', '_skip', '_slave_okay', '_hint') | ||||
|  | ||||
|         for prop in copy_props: | ||||
|             val = getattr(self, prop) | ||||
| @@ -389,11 +393,6 @@ class QuerySet(object): | ||||
|         if self._mongo_query is None: | ||||
|             self._mongo_query = self._query_obj.to_query(self._document) | ||||
|             if self._class_check: | ||||
|                 if PY3: | ||||
|                     query = SON(self._initial_query.items()) | ||||
|                     query.update(self._mongo_query) | ||||
|                     self._mongo_query = query | ||||
|                 else: | ||||
|                 self._mongo_query.update(self._initial_query) | ||||
|         return self._mongo_query | ||||
|  | ||||
| @@ -814,7 +813,6 @@ class QuerySet(object): | ||||
|                     mongo_query['$and'].append(value) | ||||
|                 else: | ||||
|                     mongo_query['$and'] = value | ||||
|  | ||||
|         return mongo_query | ||||
|  | ||||
|     def get(self, *q_objs, **query): | ||||
| @@ -1214,10 +1212,18 @@ class QuerySet(object): | ||||
|         .. versionchanged:: 0.5 - Fixed handling references | ||||
|         .. versionchanged:: 0.6 - Improved db_field refrence handling | ||||
|         """ | ||||
| <<<<<<< HEAD | ||||
|         field = [field] | ||||
|         field = self._fields_to_dbfields(field).pop() | ||||
|         return self._dereference(self._cursor.distinct(field), 1, | ||||
|                                  name=field, instance=self._document) | ||||
| ======= | ||||
|         try: | ||||
|             field = self._fields_to_dbfields([field]).pop() | ||||
|         finally: | ||||
|             return self._dereference(self._cursor.distinct(field), 1, | ||||
|                                      name=field, instance=self._document) | ||||
| >>>>>>> upstream/master | ||||
|  | ||||
|     def only(self, *fields): | ||||
|         """Load only a subset of this document's fields. :: | ||||
| @@ -1322,7 +1328,8 @@ class QuerySet(object): | ||||
|             key_list.append((key, direction)) | ||||
|  | ||||
|         self._ordering = key_list | ||||
|  | ||||
|         if self._cursor_obj: | ||||
|             self._cursor_obj.sort(key_list) | ||||
|         return self | ||||
|  | ||||
|     def explain(self, format=False): | ||||
|   | ||||
							
								
								
									
										2
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								setup.py
									
									
									
									
									
								
							| @@ -58,7 +58,7 @@ if sys.version_info[0] == 3: | ||||
|         extra_opts['packages'].append("tests") | ||||
|         extra_opts['package_data'] = {"tests": ["mongoengine.png"]} | ||||
| else: | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django==1.4.2', 'PIL'] | ||||
|     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||
|  | ||||
| setup(name='mongoengine', | ||||
|   | ||||
| @@ -76,7 +76,7 @@ class TestWarnings(unittest.TestCase): | ||||
|         p2.parent.name = "Poppa Wilson" | ||||
|         p2.save() | ||||
|  | ||||
|         self.assertEqual(len(self.warning_list), 1) | ||||
|         self.assertTrue(len(self.warning_list) > 0) | ||||
|         if len(self.warning_list) > 1: | ||||
|             print self.warning_list | ||||
|         warning = self.warning_list[0] | ||||
|   | ||||
| @@ -997,3 +997,34 @@ class FieldTest(unittest.TestCase): | ||||
|         msg = Message.objects.get(id=1) | ||||
|         self.assertEqual(0, msg.comments[0].id) | ||||
|         self.assertEqual(1, msg.comments[1].id) | ||||
|          | ||||
|     def test_tuples_as_tuples(self): | ||||
|         """ | ||||
|         Ensure that tuples remain tuples when they are | ||||
|         inside a ComplexBaseField | ||||
|         """ | ||||
|         from mongoengine.base import BaseField | ||||
|         class EnumField(BaseField): | ||||
|             def __init__(self, **kwargs): | ||||
|                 super(EnumField,self).__init__(**kwargs) | ||||
|           | ||||
|             def to_mongo(self, value): | ||||
|                 return value | ||||
|           | ||||
|             def to_python(self, value): | ||||
|                 return tuple(value) | ||||
|           | ||||
|         class TestDoc(Document): | ||||
|             items = ListField(EnumField()) | ||||
|          | ||||
|         TestDoc.drop_collection() | ||||
|         tuples = [(100,'Testing')] | ||||
|         doc = TestDoc() | ||||
|         doc.items = tuples | ||||
|         doc.save() | ||||
|         x = TestDoc.objects().get() | ||||
|         self.assertTrue(x is not None) | ||||
|         self.assertTrue(len(x.items) == 1) | ||||
|         self.assertTrue(tuple(x.items[0]) in tuples) | ||||
|         self.assertTrue(x.items[0] in tuples) | ||||
|  | ||||
|   | ||||
| @@ -232,28 +232,33 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|     def test_chaining(self): | ||||
|         class A(Document): | ||||
|             pass | ||||
|             s = StringField() | ||||
|  | ||||
|         class B(Document): | ||||
|             a = ReferenceField(A) | ||||
|             ref = ReferenceField(A) | ||||
|             boolfield = BooleanField(default=False) | ||||
|  | ||||
|         A.drop_collection() | ||||
|         B.drop_collection() | ||||
|  | ||||
|         a1 = A().save() | ||||
|         a2 = A().save() | ||||
|         a1 = A(s="test1").save() | ||||
|         a2 = A(s="test2").save() | ||||
|  | ||||
|         B(a=a1).save() | ||||
|         B(ref=a1, boolfield=True).save() | ||||
|  | ||||
|         # Works | ||||
|         q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query | ||||
|         q1 = B.objects.filter(ref__in=[a1, a2], ref=a1)._query | ||||
|  | ||||
|         # Doesn't work | ||||
|         q2 = B.objects.filter(a__in=[a1, a2]) | ||||
|         q2 = q2.filter(a=a1)._query | ||||
|  | ||||
|         q2 = B.objects.filter(ref__in=[a1, a2]) | ||||
|         q2 = q2.filter(ref=a1)._query | ||||
|         self.assertEqual(q1, q2) | ||||
|  | ||||
|         a_objects = A.objects(s='test1') | ||||
|         query = B.objects(ref__in=a_objects) | ||||
|         query = query.filter(boolfield=True) | ||||
|         self.assertEquals(query.count(), 1) | ||||
|  | ||||
|     def test_update_write_options(self): | ||||
|         """Test that passing write_options works""" | ||||
|  | ||||
| @@ -952,6 +957,11 @@ class QuerySetTest(unittest.TestCase): | ||||
|                              {'attachments.views.extracted': 'no'}]} | ||||
|         self.assertEqual(expected, raw_query) | ||||
|  | ||||
|     def assertSequence(self, qs, expected): | ||||
|         self.assertEqual(len(qs), len(expected)) | ||||
|         for i in range(len(qs)): | ||||
|             self.assertEqual(qs[i], expected[i]) | ||||
|  | ||||
|     def test_ordering(self): | ||||
|         """Ensure default ordering is applied and can be overridden. | ||||
|         """ | ||||
| @@ -965,10 +975,10 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         blog_post_1 = BlogPost(title="Blog Post #1", | ||||
|                                published_date=datetime(2010, 1, 5, 0, 0 ,0)) | ||||
|         blog_post_2 = BlogPost(title="Blog Post #2", | ||||
|                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) | ||||
|         blog_post_1 = BlogPost(title="Blog Post #1", | ||||
|                                published_date=datetime(2010, 1, 5, 0, 0 ,0)) | ||||
|         blog_post_3 = BlogPost(title="Blog Post #3", | ||||
|                                published_date=datetime(2010, 1, 7, 0, 0 ,0)) | ||||
|  | ||||
| @@ -978,14 +988,13 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         # get the "first" BlogPost using default ordering | ||||
|         # from BlogPost.meta.ordering | ||||
|         latest_post = BlogPost.objects.first() | ||||
|         self.assertEqual(latest_post.title, "Blog Post #3") | ||||
|         expected = [blog_post_3, blog_post_2, blog_post_1] | ||||
|         self.assertSequence(BlogPost.objects.all(), expected) | ||||
|  | ||||
|         # override default ordering, order BlogPosts by "published_date" | ||||
|         first_post = BlogPost.objects.order_by("+published_date").first() | ||||
|         self.assertEqual(first_post.title, "Blog Post #1") | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|         qs = BlogPost.objects.order_by("+published_date") | ||||
|         expected = [blog_post_1, blog_post_2, blog_post_3] | ||||
|         self.assertSequence(qs, expected) | ||||
|  | ||||
|     def test_only(self): | ||||
|         """Ensure that QuerySet.only only returns the requested fields. | ||||
| @@ -1921,8 +1930,8 @@ class QuerySetTest(unittest.TestCase): | ||||
|     def test_order_by(self): | ||||
|         """Ensure that QuerySets may be ordered. | ||||
|         """ | ||||
|         self.Person(name="User A", age=20).save() | ||||
|         self.Person(name="User B", age=40).save() | ||||
|         self.Person(name="User A", age=20).save() | ||||
|         self.Person(name="User C", age=30).save() | ||||
|  | ||||
|         names = [p.name for p in self.Person.objects.order_by('-age')] | ||||
| @@ -1937,11 +1946,67 @@ class QuerySetTest(unittest.TestCase): | ||||
|         ages = [p.age for p in self.Person.objects.order_by('-name')] | ||||
|         self.assertEqual(ages, [30, 40, 20]) | ||||
|  | ||||
|     def test_order_by_optional(self): | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             published_date = DateTimeField(required=False) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         blog_post_3 = BlogPost(title="Blog Post #3", | ||||
|                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) | ||||
|         blog_post_2 = BlogPost(title="Blog Post #2", | ||||
|                                published_date=datetime(2010, 1, 5, 0, 0 ,0)) | ||||
|         blog_post_4 = BlogPost(title="Blog Post #4", | ||||
|                                published_date=datetime(2010, 1, 7, 0, 0 ,0)) | ||||
|         blog_post_1 = BlogPost(title="Blog Post #1", published_date=None) | ||||
|  | ||||
|         blog_post_3.save() | ||||
|         blog_post_1.save() | ||||
|         blog_post_4.save() | ||||
|         blog_post_2.save() | ||||
|  | ||||
|         expected = [blog_post_1, blog_post_2, blog_post_3, blog_post_4] | ||||
|         self.assertSequence(BlogPost.objects.order_by('published_date'), | ||||
|                             expected) | ||||
|         self.assertSequence(BlogPost.objects.order_by('+published_date'), | ||||
|                             expected) | ||||
|  | ||||
|         expected.reverse() | ||||
|         self.assertSequence(BlogPost.objects.order_by('-published_date'), | ||||
|                             expected) | ||||
|  | ||||
|     def test_order_by_list(self): | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             published_date = DateTimeField(required=False) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         blog_post_1 = BlogPost(title="A", | ||||
|                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) | ||||
|         blog_post_2 = BlogPost(title="B", | ||||
|                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) | ||||
|         blog_post_3 = BlogPost(title="C", | ||||
|                                published_date=datetime(2010, 1, 7, 0, 0 ,0)) | ||||
|  | ||||
|         blog_post_2.save() | ||||
|         blog_post_3.save() | ||||
|         blog_post_1.save() | ||||
|  | ||||
|         qs = BlogPost.objects.order_by('published_date', 'title') | ||||
|         expected = [blog_post_1, blog_post_2, blog_post_3] | ||||
|         self.assertSequence(qs, expected) | ||||
|  | ||||
|         qs = BlogPost.objects.order_by('-published_date', '-title') | ||||
|         expected.reverse() | ||||
|         self.assertSequence(qs, expected) | ||||
|  | ||||
|     def test_order_by_chaining(self): | ||||
|         """Ensure that an order_by query chains properly and allows .only() | ||||
|         """ | ||||
|         self.Person(name="User A", age=20).save() | ||||
|         self.Person(name="User B", age=40).save() | ||||
|         self.Person(name="User A", age=20).save() | ||||
|         self.Person(name="User C", age=30).save() | ||||
|  | ||||
|         only_age = self.Person.objects.order_by('-age').only('age') | ||||
| @@ -1953,6 +2018,21 @@ class QuerySetTest(unittest.TestCase): | ||||
|         self.assertEqual(names, [None, None, None]) | ||||
|         self.assertEqual(ages, [40, 30, 20]) | ||||
|  | ||||
|         qs = self.Person.objects.all().order_by('-age') | ||||
|         qs = qs.limit(10) | ||||
|         ages = [p.age for p in qs] | ||||
|         self.assertEqual(ages, [40, 30, 20]) | ||||
|  | ||||
|         qs = self.Person.objects.all().limit(10) | ||||
|         qs = qs.order_by('-age') | ||||
|         ages = [p.age for p in qs] | ||||
|         self.assertEqual(ages, [40, 30, 20]) | ||||
|  | ||||
|         qs = self.Person.objects.all().skip(0) | ||||
|         qs = qs.order_by('-age') | ||||
|         ages = [p.age for p in qs] | ||||
|         self.assertEqual(ages, [40, 30, 20]) | ||||
|  | ||||
|     def test_confirm_order_by_reference_wont_work(self): | ||||
|         """Ordering by reference is not possible.  Use map / reduce.. or | ||||
|         denormalise""" | ||||
| @@ -2485,6 +2565,7 @@ class QuerySetTest(unittest.TestCase): | ||||
|         """Ensure that distinct resolves field name to db_field as expected. | ||||
|         """ | ||||
|         class Product(Document): | ||||
| <<<<<<< HEAD | ||||
|             product_id=IntField(db_field='pid') | ||||
|  | ||||
|         Product.drop_collection() | ||||
| @@ -2495,6 +2576,20 @@ class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|         self.assertEqual(set(Product.objects.distinct('product_id')), | ||||
|                          set([1, 2])) | ||||
| ======= | ||||
|             product_id = IntField(db_field='pid') | ||||
|  | ||||
|         Product.drop_collection() | ||||
|  | ||||
|         Product(product_id=1).save() | ||||
|         Product(product_id=2).save() | ||||
|         Product(product_id=1).save() | ||||
|  | ||||
|         self.assertEqual(set(Product.objects.distinct('product_id')), | ||||
|                          set([1, 2])) | ||||
|         self.assertEqual(set(Product.objects.distinct('pid')), | ||||
|                          set([1, 2])) | ||||
| >>>>>>> upstream/master | ||||
|  | ||||
|         Product.drop_collection() | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user