Compare commits
	
		
			48 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 233b13d670 | ||
|  | 5bcbb4fdaa | ||
|  | dbe2f5f2b8 | ||
|  | ca8b58d66d | ||
|  | f80f0b416f | ||
|  | d7765511ee | ||
|  | 0240a09056 | ||
|  | ab15c4eec9 | ||
|  | 4ce1ba81a6 | ||
|  | 530440b333 | ||
|  | b80fda36af | ||
|  | 42d24263ef | ||
|  | 1e2797e7ce | ||
|  | f7075766fc | ||
|  | 5647ca70bb | ||
|  | 2b8aa6bafc | ||
|  | 410443471c | ||
|  | 0bb9781b91 | ||
|  | 2769d6d7ca | ||
|  | 120b9433c2 | ||
|  | 605092bd88 | ||
|  | a4a8c94374 | ||
|  | 0e93f6c0db | ||
|  | aa2add39ad | ||
|  | a928047147 | ||
|  | c474ca0f13 | ||
|  | 88dc64653e | ||
|  | 5f4b70f3a9 | ||
|  | 51b429e5b0 | ||
|  | 360624eb6e | ||
|  | d9d2291837 | ||
|  | cbdf816232 | ||
|  | 2d71eb8a18 | ||
|  | 64d2532ce9 | ||
|  | 0376910f33 | ||
|  | 6d503119a1 | ||
|  | bfae93e57e | ||
|  | 49a66ba81a | ||
|  | a1d43fecd9 | ||
|  | d0e42a4798 | ||
|  | 46e088d379 | ||
|  | 1bf9f28f4b | ||
|  | 8cfe13ad90 | ||
|  | 7e376b40bb | ||
|  | 3ec9dfc108 | ||
|  | 540a0cc59c | ||
|  | 83eb4f6b16 | ||
|  | 95c58bd793 | 
							
								
								
									
										3
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										3
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -100,3 +100,6 @@ that much better: | |||||||
|  * Jacob Peddicord |  * Jacob Peddicord | ||||||
|  * Nils Hasenbanck |  * Nils Hasenbanck | ||||||
|  * mostlystatic |  * mostlystatic | ||||||
|  |  * Greg Banks | ||||||
|  |  * swashbuckler | ||||||
|  |  * Adam Reeve | ||||||
| @@ -31,6 +31,9 @@ Documents | |||||||
| .. autoclass:: mongoengine.document.MapReduceDocument | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|   :members: |   :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.ValidationError | ||||||
|  |   :members: | ||||||
|  |  | ||||||
| Querying | Querying | ||||||
| ======== | ======== | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,11 +2,34 @@ | |||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
| Changes in 0.6.X | Changes in 0.6.7 | ||||||
|  | ================ | ||||||
|  | - Fixed indexing on '_id' or 'pk' or 'id' | ||||||
|  | - Invalid data from the DB now raises a InvalidDocumentError | ||||||
|  | - Cleaned up the Validation Error - docs and code | ||||||
|  | - Added meta `auto_create_index` so you can disable index creation | ||||||
|  | - Added write concern options to inserts | ||||||
|  | - Fixed typo in meta for index options | ||||||
|  | - Bug fix Read preference now passed correctly | ||||||
|  | - Added support for File like objects for GridFS | ||||||
|  | - Fix for #473 - Dereferencing abstracts | ||||||
|  |  | ||||||
|  | Changes in 0.6.6 | ||||||
|  | ================ | ||||||
|  | - Django 1.4 fixed (finally) | ||||||
|  | - Added tests for Django | ||||||
|  |  | ||||||
|  | Changes in 0.6.5 | ||||||
|  | ================ | ||||||
|  | - More Django updates | ||||||
|  |  | ||||||
|  | Changes in 0.6.4 | ||||||
| ================ | ================ | ||||||
|  |  | ||||||
| - updated replicasetconnection - pop port if exists | - Refactored connection / fixed replicasetconnection | ||||||
| - bug fix for unknown connection alias error message | - Bug fix for unknown connection alias error message | ||||||
|  | - Sessions support Django 1.3 and Django 1.4 | ||||||
|  | - Minor fix for ReferenceField | ||||||
|  |  | ||||||
| Changes in 0.6.3 | Changes in 0.6.3 | ||||||
| ================ | ================ | ||||||
|   | |||||||
| @@ -98,7 +98,7 @@ arguments can be set on all fields: | |||||||
|  |  | ||||||
| :attr:`required` (Default: False) | :attr:`required` (Default: False) | ||||||
|     If set to True and the field is not set on the document instance, a |     If set to True and the field is not set on the document instance, a | ||||||
|     :class:`~mongoengine.base.ValidationError` will be raised when the document is |     :class:`~mongoengine.ValidationError` will be raised when the document is | ||||||
|     validated. |     validated. | ||||||
|  |  | ||||||
| :attr:`default` (Default: None) | :attr:`default` (Default: None) | ||||||
|   | |||||||
| @@ -91,5 +91,5 @@ is an alias to :attr:`id`:: | |||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    If you define your own primary key field, the field implicitly becomes |    If you define your own primary key field, the field implicitly becomes | ||||||
|    required, so a :class:`ValidationError` will be thrown if you don't provide |    required, so a :class:`~mongoengine.ValidationError` will be thrown if | ||||||
|    it. |    you don't provide it. | ||||||
|   | |||||||
| @@ -12,7 +12,7 @@ from signals import * | |||||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||||
|            queryset.__all__ + signals.__all__) |            queryset.__all__ + signals.__all__) | ||||||
|  |  | ||||||
| VERSION = (0, 6, 3) | VERSION = (0, 6, 7) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|   | |||||||
| @@ -25,7 +25,15 @@ class InvalidDocumentError(Exception): | |||||||
|  |  | ||||||
| class ValidationError(AssertionError): | class ValidationError(AssertionError): | ||||||
|     """Validation exception. |     """Validation exception. | ||||||
|  |  | ||||||
|  |     May represent an error validating a field or a | ||||||
|  |     document containing fields with validation errors. | ||||||
|  |  | ||||||
|  |     :ivar errors: A dictionary of errors for fields within this | ||||||
|  |         document or list, or None if the error is for an | ||||||
|  |         individual field. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     errors = {} |     errors = {} | ||||||
|     field_name = None |     field_name = None | ||||||
|     _message = None |     _message = None | ||||||
| @@ -43,9 +51,11 @@ class ValidationError(AssertionError): | |||||||
|  |  | ||||||
|     def __getattribute__(self, name): |     def __getattribute__(self, name): | ||||||
|         message = super(ValidationError, self).__getattribute__(name) |         message = super(ValidationError, self).__getattribute__(name) | ||||||
|         if name == 'message' and self.field_name: |         if name == 'message': | ||||||
|             return message + ' ("%s")' % self.field_name |             if self.field_name: | ||||||
|         else: |                 message = '%s ("%s")' % (message, self.field_name) | ||||||
|  |             if self.errors: | ||||||
|  |                 message = '%s:\n%s' % (message, self._format_errors()) | ||||||
|         return message |         return message | ||||||
|  |  | ||||||
|     def _get_message(self): |     def _get_message(self): | ||||||
| @@ -57,6 +67,13 @@ class ValidationError(AssertionError): | |||||||
|     message = property(_get_message, _set_message) |     message = property(_get_message, _set_message) | ||||||
|  |  | ||||||
|     def to_dict(self): |     def to_dict(self): | ||||||
|  |         """Returns a dictionary of all errors within a document | ||||||
|  |  | ||||||
|  |         Keys are field names or list indices and values are the | ||||||
|  |         validation error messages, or a nested dictionary of | ||||||
|  |         errors for an embedded document or list. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|         def build_dict(source): |         def build_dict(source): | ||||||
|             errors_dict = {} |             errors_dict = {} | ||||||
|             if not source: |             if not source: | ||||||
| @@ -73,6 +90,21 @@ class ValidationError(AssertionError): | |||||||
|             return {} |             return {} | ||||||
|         return build_dict(self.errors) |         return build_dict(self.errors) | ||||||
|  |  | ||||||
|  |     def _format_errors(self): | ||||||
|  |         """Returns a string listing all errors within a document""" | ||||||
|  |  | ||||||
|  |         def format_error(field, value, prefix=''): | ||||||
|  |             prefix = "%s.%s" % (prefix, field) if prefix else "%s" % field | ||||||
|  |             if isinstance(value, dict): | ||||||
|  |  | ||||||
|  |                 return '\n'.join( | ||||||
|  |                         [format_error(k, value[k], prefix) for k in value]) | ||||||
|  |             else: | ||||||
|  |                 return "%s: %s" % (prefix, value) | ||||||
|  |  | ||||||
|  |         return '\n'.join( | ||||||
|  |                 [format_error(k, v) for k, v in self.to_dict().items()]) | ||||||
|  |  | ||||||
|  |  | ||||||
| _document_registry = {} | _document_registry = {} | ||||||
|  |  | ||||||
| @@ -947,8 +979,8 @@ class BaseDocument(object): | |||||||
|         """ |         """ | ||||||
|         # get the class name from the document, falling back to the given |         # get the class name from the document, falling back to the given | ||||||
|         # class if unavailable |         # class if unavailable | ||||||
|         class_name = son.get(u'_cls', cls._class_name) |         class_name = son.get('_cls', cls._class_name) | ||||||
|         data = dict((str(key), value) for key, value in son.items()) |         data = dict(("%s" % key, value) for key, value in son.items()) | ||||||
|  |  | ||||||
|         if '_types' in data: |         if '_types' in data: | ||||||
|             del data['_types'] |             del data['_types'] | ||||||
| @@ -961,11 +993,16 @@ class BaseDocument(object): | |||||||
|             cls = get_document(class_name) |             cls = get_document(class_name) | ||||||
|  |  | ||||||
|         changed_fields = [] |         changed_fields = [] | ||||||
|  |         errors_dict = {} | ||||||
|  |  | ||||||
|         for field_name, field in cls._fields.items(): |         for field_name, field in cls._fields.items(): | ||||||
|             if field.db_field in data: |             if field.db_field in data: | ||||||
|                 value = data[field.db_field] |                 value = data[field.db_field] | ||||||
|  |                 try: | ||||||
|                     data[field_name] = (value if value is None |                     data[field_name] = (value if value is None | ||||||
|                                     else field.to_python(value)) |                                     else field.to_python(value)) | ||||||
|  |                 except (AttributeError, ValueError), e: | ||||||
|  |                     errors_dict[field_name] = e | ||||||
|             elif field.default: |             elif field.default: | ||||||
|                 default = field.default |                 default = field.default | ||||||
|                 if callable(default): |                 if callable(default): | ||||||
| @@ -973,7 +1010,13 @@ class BaseDocument(object): | |||||||
|                 if isinstance(default, BaseDocument): |                 if isinstance(default, BaseDocument): | ||||||
|                     changed_fields.append(field_name) |                     changed_fields.append(field_name) | ||||||
|  |  | ||||||
|  |         if errors_dict: | ||||||
|  |             errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()]) | ||||||
|  |             raise InvalidDocumentError(""" | ||||||
|  | Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, errors)) | ||||||
|  |  | ||||||
|         obj = cls(**data) |         obj = cls(**data) | ||||||
|  |  | ||||||
|         obj._changed_fields = changed_fields |         obj._changed_fields = changed_fields | ||||||
|         obj._created = False |         obj._created = False | ||||||
|         return obj |         return obj | ||||||
|   | |||||||
| @@ -39,22 +39,7 @@ def register_connection(alias, name, host='localhost', port=27017, | |||||||
|     """ |     """ | ||||||
|     global _connection_settings |     global _connection_settings | ||||||
|  |  | ||||||
|     # Handle uri style connections |     conn_settings = { | ||||||
|     if "://" in host: |  | ||||||
|         uri_dict = uri_parser.parse_uri(host) |  | ||||||
|         if uri_dict.get('database') is None: |  | ||||||
|             raise ConnectionError("If using URI style connection include "\ |  | ||||||
|                                   "database name in string") |  | ||||||
|         _connection_settings[alias] = { |  | ||||||
|             'host': host, |  | ||||||
|             'name': uri_dict.get('database'), |  | ||||||
|             'username': uri_dict.get('username'), |  | ||||||
|             'password': uri_dict.get('password') |  | ||||||
|         } |  | ||||||
|         _connection_settings[alias].update(kwargs) |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     _connection_settings[alias] = { |  | ||||||
|         'name': name, |         'name': name, | ||||||
|         'host': host, |         'host': host, | ||||||
|         'port': port, |         'port': port, | ||||||
| @@ -64,7 +49,23 @@ def register_connection(alias, name, host='localhost', port=27017, | |||||||
|         'password': password, |         'password': password, | ||||||
|         'read_preference': read_preference |         'read_preference': read_preference | ||||||
|     } |     } | ||||||
|     _connection_settings[alias].update(kwargs) |  | ||||||
|  |     # Handle uri style connections | ||||||
|  |     if "://" in host: | ||||||
|  |         uri_dict = uri_parser.parse_uri(host) | ||||||
|  |         if uri_dict.get('database') is None: | ||||||
|  |             raise ConnectionError("If using URI style connection include "\ | ||||||
|  |                                   "database name in string") | ||||||
|  |         conn_settings.update({ | ||||||
|  |             'host': host, | ||||||
|  |             'name': uri_dict.get('database'), | ||||||
|  |             'username': uri_dict.get('username'), | ||||||
|  |             'password': uri_dict.get('password'), | ||||||
|  |             'read_preference': read_preference, | ||||||
|  |         }) | ||||||
|  |         if "replicaSet" in host: | ||||||
|  |             conn_settings['replicaSet'] = True | ||||||
|  |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||||
| @@ -112,7 +113,11 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) |             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||||
|             # Discard port since it can't be used on ReplicaSetConnection |             # Discard port since it can't be used on ReplicaSetConnection | ||||||
|             conn_settings.pop('port', None) |             conn_settings.pop('port', None) | ||||||
|  |             # Discard replicaSet if not base string | ||||||
|  |             if not isinstance(conn_settings['replicaSet'], basestring): | ||||||
|  |                 conn_settings.pop('replicaSet', None) | ||||||
|             connection_class = ReplicaSetConnection |             connection_class = ReplicaSetConnection | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             _connections[alias] = connection_class(**conn_settings) |             _connections[alias] = connection_class(**conn_settings) | ||||||
|         except Exception, e: |         except Exception, e: | ||||||
|   | |||||||
| @@ -112,6 +112,10 @@ class DeReference(object): | |||||||
|                     for ref in references: |                     for ref in references: | ||||||
|                         if '_cls' in ref: |                         if '_cls' in ref: | ||||||
|                             doc = get_document(ref["_cls"])._from_son(ref) |                             doc = get_document(ref["_cls"])._from_son(ref) | ||||||
|  |                         elif doc_type is None: | ||||||
|  |                             doc = get_document( | ||||||
|  |                                 ''.join(x.capitalize()  | ||||||
|  |                                         for x in col.split('_')))._from_son(ref) | ||||||
|                         else: |                         else: | ||||||
|                             doc = doc_type._from_son(ref) |                             doc = doc_type._from_son(ref) | ||||||
|                         object_map[doc.id] = doc |                         object_map[doc.id] = doc | ||||||
|   | |||||||
| @@ -1,11 +1,36 @@ | |||||||
|  | import datetime | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from django.utils.encoding import smart_str | from django.utils.encoding import smart_str | ||||||
| from django.contrib.auth.models import AnonymousUser | from django.contrib.auth.models import AnonymousUser | ||||||
| from django.contrib.auth.hashers import check_password, make_password |  | ||||||
| from django.utils.translation import ugettext_lazy as _ | from django.utils.translation import ugettext_lazy as _ | ||||||
|  |  | ||||||
| import datetime | try: | ||||||
|  |     from django.contrib.auth.hashers import check_password, make_password | ||||||
|  | except ImportError: | ||||||
|  |     """Handle older versions of Django""" | ||||||
|  |     from django.utils.hashcompat import md5_constructor, sha_constructor | ||||||
|  |  | ||||||
|  |     def get_hexdigest(algorithm, salt, raw_password): | ||||||
|  |         raw_password, salt = smart_str(raw_password), smart_str(salt) | ||||||
|  |         if algorithm == 'md5': | ||||||
|  |             return md5_constructor(salt + raw_password).hexdigest() | ||||||
|  |         elif algorithm == 'sha1': | ||||||
|  |             return sha_constructor(salt + raw_password).hexdigest() | ||||||
|  |         raise ValueError('Got unknown password algorithm type in password') | ||||||
|  |  | ||||||
|  |     def check_password(raw_password, password): | ||||||
|  |         algo, salt, hash = password.split('$') | ||||||
|  |         return hash == get_hexdigest(algo, salt, raw_password) | ||||||
|  |  | ||||||
|  |     def make_password(raw_password): | ||||||
|  |         from random import random | ||||||
|  |         algo = 'sha1' | ||||||
|  |         salt = get_hexdigest(algo, str(random()), str(random()))[:5] | ||||||
|  |         hash = get_hexdigest(algo, salt, raw_password) | ||||||
|  |         return '%s$%s$%s' % (algo, salt, hash) | ||||||
|  |  | ||||||
|  |  | ||||||
| REDIRECT_FIELD_NAME = 'next' | REDIRECT_FIELD_NAME = 'next' | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,3 +1,6 @@ | |||||||
|  | from datetime import datetime | ||||||
|  |  | ||||||
|  | from django.conf import settings | ||||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError | from django.contrib.sessions.backends.base import SessionBase, CreateError | ||||||
| from django.core.exceptions import SuspiciousOperation | from django.core.exceptions import SuspiciousOperation | ||||||
| from django.utils.encoding import force_unicode | from django.utils.encoding import force_unicode | ||||||
| @@ -6,13 +9,13 @@ from mongoengine.document import Document | |||||||
| from mongoengine import fields | from mongoengine import fields | ||||||
| from mongoengine.queryset import OperationError | from mongoengine.queryset import OperationError | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME | from mongoengine.connection import DEFAULT_CONNECTION_NAME | ||||||
| from django.conf import settings |  | ||||||
| from datetime import datetime |  | ||||||
|  |  | ||||||
| MONGOENGINE_SESSION_DB_ALIAS = getattr( | MONGOENGINE_SESSION_DB_ALIAS = getattr( | ||||||
|     settings, 'MONGOENGINE_SESSION_DB_ALIAS', |     settings, 'MONGOENGINE_SESSION_DB_ALIAS', | ||||||
|     DEFAULT_CONNECTION_NAME) |     DEFAULT_CONNECTION_NAME) | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoSession(Document): | class MongoSession(Document): | ||||||
|     session_key = fields.StringField(primary_key=True, max_length=40) |     session_key = fields.StringField(primary_key=True, max_length=40) | ||||||
|     session_data = fields.StringField() |     session_data = fields.StringField() | ||||||
| @@ -51,9 +54,9 @@ class SessionStore(SessionBase): | |||||||
|             return |             return | ||||||
|  |  | ||||||
|     def save(self, must_create=False): |     def save(self, must_create=False): | ||||||
|         if self._session_key is None: |         if self.session_key is None: | ||||||
|             self.create() |             self.create() | ||||||
|         s = MongoSession(session_key=self._session_key) |         s = MongoSession(session_key=self.session_key) | ||||||
|         s.session_data = self.encode(self._get_session(no_load=must_create)) |         s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||||
|         s.expire_date = self.get_expiry_date() |         s.expire_date = self.get_expiry_date() | ||||||
|         try: |         try: | ||||||
|   | |||||||
| @@ -74,6 +74,12 @@ class Document(BaseDocument): | |||||||
|     names. Index direction may be specified by prefixing the field names with |     names. Index direction may be specified by prefixing the field names with | ||||||
|     a **+** or **-** sign. |     a **+** or **-** sign. | ||||||
|  |  | ||||||
|  |     Automatic index creation can be disabled by specifying | ||||||
|  |     attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to | ||||||
|  |     False then indexes will not be created by MongoEngine.  This is useful in | ||||||
|  |     production systems where index creation is performed as part of a deployment | ||||||
|  |     system. | ||||||
|  |  | ||||||
|     By default, _types will be added to the start of every index (that |     By default, _types will be added to the start of every index (that | ||||||
|     doesn't contain a list) if allow_inheritence is True. This can be |     doesn't contain a list) if allow_inheritence is True. This can be | ||||||
|     disabled by either setting types to False on the specific index or |     disabled by either setting types to False on the specific index or | ||||||
| @@ -147,8 +153,9 @@ class Document(BaseDocument): | |||||||
|                 :meth:`~pymongo.collection.Collection.save` OR |                 :meth:`~pymongo.collection.Collection.save` OR | ||||||
|                 :meth:`~pymongo.collection.Collection.insert` |                 :meth:`~pymongo.collection.Collection.insert` | ||||||
|                 which will be used as options for the resultant ``getLastError`` command. |                 which will be used as options for the resultant ``getLastError`` command. | ||||||
|                 For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers |                 For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||||
|                 have recorded the write and will force an fsync on each server being written to. |                 wait until at least two servers have recorded the write and will force an | ||||||
|  |                 fsync on each server being written to. | ||||||
|         :param cascade: Sets the flag for cascading saves.  You can set a default by setting |         :param cascade: Sets the flag for cascading saves.  You can set a default by setting | ||||||
|             "cascade" in the document __meta__ |             "cascade" in the document __meta__ | ||||||
|         :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves |         :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves | ||||||
|   | |||||||
| @@ -657,6 +657,9 @@ class ReferenceField(BaseField): | |||||||
|         return super(ReferenceField, self).__get__(instance, owner) |         return super(ReferenceField, self).__get__(instance, owner) | ||||||
|  |  | ||||||
|     def to_mongo(self, document): |     def to_mongo(self, document): | ||||||
|  |         if isinstance(document, DBRef): | ||||||
|  |             return document | ||||||
|  |          | ||||||
|         id_field_name = self.document_type._meta['id_field'] |         id_field_name = self.document_type._meta['id_field'] | ||||||
|         id_field = self.document_type._fields[id_field_name] |         id_field = self.document_type._fields[id_field_name] | ||||||
|  |  | ||||||
| @@ -872,10 +875,14 @@ class GridFSProxy(object): | |||||||
|         self.newfile.writelines(lines) |         self.newfile.writelines(lines) | ||||||
|  |  | ||||||
|     def read(self, size=-1): |     def read(self, size=-1): | ||||||
|         try: |         gridout = self.get() | ||||||
|             return self.get().read(size) |         if gridout is None: | ||||||
|         except: |  | ||||||
|             return None |             return None | ||||||
|  |         else: | ||||||
|  |             try: | ||||||
|  |                 return gridout.read(size) | ||||||
|  |             except: | ||||||
|  |                 return "" | ||||||
|  |  | ||||||
|     def delete(self): |     def delete(self): | ||||||
|         # Delete file from GridFS, FileField still remains |         # Delete file from GridFS, FileField still remains | ||||||
| @@ -932,7 +939,7 @@ class FileField(BaseField): | |||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         key = self.name |         key = self.name | ||||||
|         if isinstance(value, file) or isinstance(value, str): |         if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str): | ||||||
|             # using "FileField() = file/string" notation |             # using "FileField() = file/string" notation | ||||||
|             grid_file = instance._data.get(self.name) |             grid_file = instance._data.get(self.name) | ||||||
|             # If a file already exists, delete it |             # If a file already exists, delete it | ||||||
|   | |||||||
| @@ -394,61 +394,6 @@ class QuerySet(object): | |||||||
|             unique=index_spec.get('unique', False)) |             unique=index_spec.get('unique', False)) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _build_index_spec(cls, doc_cls, spec): |  | ||||||
|         """Build a PyMongo index spec from a MongoEngine index spec. |  | ||||||
|         """ |  | ||||||
|         if isinstance(spec, basestring): |  | ||||||
|             spec = {'fields': [spec]} |  | ||||||
|         if isinstance(spec, (list, tuple)): |  | ||||||
|             spec = {'fields': spec} |  | ||||||
|  |  | ||||||
|         index_list = [] |  | ||||||
|         use_types = doc_cls._meta.get('allow_inheritance', True) |  | ||||||
|         for key in spec['fields']: |  | ||||||
|             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * |  | ||||||
|             direction = pymongo.ASCENDING |  | ||||||
|             if key.startswith("-"): |  | ||||||
|                 direction = pymongo.DESCENDING |  | ||||||
|             elif key.startswith("*"): |  | ||||||
|                 direction = pymongo.GEO2D |  | ||||||
|             if key.startswith(("+", "-", "*")): |  | ||||||
|                 key = key[1:] |  | ||||||
|  |  | ||||||
|             # Use real field name, do it manually because we need field |  | ||||||
|             # objects for the next part (list field checking) |  | ||||||
|             parts = key.split('.') |  | ||||||
|             fields = QuerySet._lookup_field(doc_cls, parts) |  | ||||||
|             parts = [field.db_field for field in fields] |  | ||||||
|             key = '.'.join(parts) |  | ||||||
|             index_list.append((key, direction)) |  | ||||||
|  |  | ||||||
|             # Check if a list field is being used, don't use _types if it is |  | ||||||
|             if use_types and not all(f._index_with_types for f in fields): |  | ||||||
|                 use_types = False |  | ||||||
|  |  | ||||||
|         # If _types is being used, prepend it to every specified index |  | ||||||
|         index_types = doc_cls._meta.get('index_types', True) |  | ||||||
|         allow_inheritance = doc_cls._meta.get('allow_inheritance') |  | ||||||
|         if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: |  | ||||||
|             index_list.insert(0, ('_types', 1)) |  | ||||||
|  |  | ||||||
|         spec['fields'] = index_list |  | ||||||
|  |  | ||||||
|         if spec.get('sparse', False) and len(spec['fields']) > 1: |  | ||||||
|             raise ValueError( |  | ||||||
|                 'Sparse indexes can only have one field in them. ' |  | ||||||
|                 'See https://jira.mongodb.org/browse/SERVER-2193') |  | ||||||
|  |  | ||||||
|         return spec |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _reset_already_indexed(cls, document=None): |  | ||||||
|         """Helper to reset already indexed, can be useful for testing purposes""" |  | ||||||
|         if document: |  | ||||||
|             cls.__already_indexed.discard(document) |  | ||||||
|         cls.__already_indexed.clear() |  | ||||||
|  |  | ||||||
|     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): |     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): | ||||||
|         """Filter the selected documents by calling the |         """Filter the selected documents by calling the | ||||||
|         :class:`~mongoengine.queryset.QuerySet` with a query. |         :class:`~mongoengine.queryset.QuerySet` with a query. | ||||||
| @@ -481,24 +426,15 @@ class QuerySet(object): | |||||||
|         """Returns all documents.""" |         """Returns all documents.""" | ||||||
|         return self.__call__() |         return self.__call__() | ||||||
|  |  | ||||||
|     @property |     def _ensure_indexes(self): | ||||||
|     def _collection(self): |         """Checks the document meta data and ensures all the indexes exist. | ||||||
|         """Property that returns the collection object. This allows us to |  | ||||||
|         perform operations only if the collection is accessed. |         .. note:: You can disable automatic index creation by setting | ||||||
|  |                   `auto_create_index` to False in the documents meta data | ||||||
|         """ |         """ | ||||||
|         if self._document not in QuerySet.__already_indexed: |  | ||||||
|  |  | ||||||
|             # Ensure collection exists |  | ||||||
|             db = self._document._get_db() |  | ||||||
|             if self._collection_obj.name not in db.collection_names(): |  | ||||||
|                 self._document._collection = None |  | ||||||
|                 self._collection_obj = self._document._get_collection() |  | ||||||
|  |  | ||||||
|             QuerySet.__already_indexed.add(self._document) |  | ||||||
|  |  | ||||||
|         background = self._document._meta.get('index_background', False) |         background = self._document._meta.get('index_background', False) | ||||||
|         drop_dups = self._document._meta.get('index_drop_dups', False) |         drop_dups = self._document._meta.get('index_drop_dups', False) | ||||||
|             index_opts = self._document._meta.get('index_options', {}) |         index_opts = self._document._meta.get('index_opts', {}) | ||||||
|         index_types = self._document._meta.get('index_types', True) |         index_types = self._document._meta.get('index_types', True) | ||||||
|  |  | ||||||
|         # determine if an index which we are creating includes |         # determine if an index which we are creating includes | ||||||
| @@ -543,6 +479,82 @@ class QuerySet(object): | |||||||
|             self._collection.ensure_index(index_spec, |             self._collection.ensure_index(index_spec, | ||||||
|                 background=background, **index_opts) |                 background=background, **index_opts) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _build_index_spec(cls, doc_cls, spec): | ||||||
|  |         """Build a PyMongo index spec from a MongoEngine index spec. | ||||||
|  |         """ | ||||||
|  |         if isinstance(spec, basestring): | ||||||
|  |             spec = {'fields': [spec]} | ||||||
|  |         if isinstance(spec, (list, tuple)): | ||||||
|  |             spec = {'fields': spec} | ||||||
|  |  | ||||||
|  |         index_list = [] | ||||||
|  |         use_types = doc_cls._meta.get('allow_inheritance', True) | ||||||
|  |         for key in spec['fields']: | ||||||
|  |             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * | ||||||
|  |             direction = pymongo.ASCENDING | ||||||
|  |             if key.startswith("-"): | ||||||
|  |                 direction = pymongo.DESCENDING | ||||||
|  |             elif key.startswith("*"): | ||||||
|  |                 direction = pymongo.GEO2D | ||||||
|  |             if key.startswith(("+", "-", "*")): | ||||||
|  |                 key = key[1:] | ||||||
|  |  | ||||||
|  |             # Use real field name, do it manually because we need field | ||||||
|  |             # objects for the next part (list field checking) | ||||||
|  |             parts = key.split('.') | ||||||
|  |             if parts in (['pk'], ['id'], ['_id']): | ||||||
|  |                 key = '_id' | ||||||
|  |             else: | ||||||
|  |                 fields = QuerySet._lookup_field(doc_cls, parts) | ||||||
|  |                 parts = [field if field == '_id' else field.db_field for field in fields] | ||||||
|  |                 key = '.'.join(parts) | ||||||
|  |             index_list.append((key, direction)) | ||||||
|  |  | ||||||
|  |             # Check if a list field is being used, don't use _types if it is | ||||||
|  |             if use_types and not all(f._index_with_types for f in fields): | ||||||
|  |                 use_types = False | ||||||
|  |  | ||||||
|  |         # If _types is being used, prepend it to every specified index | ||||||
|  |         index_types = doc_cls._meta.get('index_types', True) | ||||||
|  |         allow_inheritance = doc_cls._meta.get('allow_inheritance') | ||||||
|  |         if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: | ||||||
|  |             index_list.insert(0, ('_types', 1)) | ||||||
|  |  | ||||||
|  |         spec['fields'] = index_list | ||||||
|  |         if spec.get('sparse', False) and len(spec['fields']) > 1: | ||||||
|  |             raise ValueError( | ||||||
|  |                 'Sparse indexes can only have one field in them. ' | ||||||
|  |                 'See https://jira.mongodb.org/browse/SERVER-2193') | ||||||
|  |  | ||||||
|  |         return spec | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _reset_already_indexed(cls, document=None): | ||||||
|  |         """Helper to reset already indexed, can be useful for testing purposes""" | ||||||
|  |         if document: | ||||||
|  |             cls.__already_indexed.discard(document) | ||||||
|  |         cls.__already_indexed.clear() | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def _collection(self): | ||||||
|  |         """Property that returns the collection object. This allows us to | ||||||
|  |         perform operations only if the collection is accessed. | ||||||
|  |         """ | ||||||
|  |         if self._document not in QuerySet.__already_indexed: | ||||||
|  |             # Ensure collection exists | ||||||
|  |             db = self._document._get_db() | ||||||
|  |             if self._collection_obj.name not in db.collection_names(): | ||||||
|  |                 self._document._collection = None | ||||||
|  |                 self._collection_obj = self._document._get_collection() | ||||||
|  |  | ||||||
|  |             QuerySet.__already_indexed.add(self._document) | ||||||
|  |  | ||||||
|  |             if self._document._meta.get('auto_create_index', True): | ||||||
|  |                 self._ensure_indexes() | ||||||
|  |  | ||||||
|         return self._collection_obj |         return self._collection_obj | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -824,11 +836,21 @@ class QuerySet(object): | |||||||
|             result = None |             result = None | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
|     def insert(self, doc_or_docs, load_bulk=True): |     def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None): | ||||||
|         """bulk insert documents |         """bulk insert documents | ||||||
|  |  | ||||||
|  |         If ``safe=True`` and the operation is unsuccessful, an | ||||||
|  |         :class:`~mongoengine.OperationError` will be raised. | ||||||
|  |  | ||||||
|         :param docs_or_doc: a document or list of documents to be inserted |         :param docs_or_doc: a document or list of documents to be inserted | ||||||
|         :param load_bulk (optional): If True returns the list of document instances |         :param load_bulk (optional): If True returns the list of document instances | ||||||
|  |         :param safe: check if the operation succeeded before returning | ||||||
|  |         :param write_options: Extra keyword arguments are passed down to | ||||||
|  |                 :meth:`~pymongo.collection.Collection.insert` | ||||||
|  |                 which will be used as options for the resultant ``getLastError`` command. | ||||||
|  |                 For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two | ||||||
|  |                 servers have recorded the write and will force an fsync on each server being | ||||||
|  |                 written to. | ||||||
|  |  | ||||||
|         By default returns document instances, set ``load_bulk`` to False to |         By default returns document instances, set ``load_bulk`` to False to | ||||||
|         return just ``ObjectIds`` |         return just ``ObjectIds`` | ||||||
| @@ -837,6 +859,10 @@ class QuerySet(object): | |||||||
|         """ |         """ | ||||||
|         from document import Document |         from document import Document | ||||||
|  |  | ||||||
|  |         if not write_options: | ||||||
|  |             write_options = {} | ||||||
|  |         write_options.update({'safe': safe}) | ||||||
|  |  | ||||||
|         docs = doc_or_docs |         docs = doc_or_docs | ||||||
|         return_one = False |         return_one = False | ||||||
|         if isinstance(docs, Document) or issubclass(docs.__class__, Document): |         if isinstance(docs, Document) or issubclass(docs.__class__, Document): | ||||||
| @@ -854,7 +880,13 @@ class QuerySet(object): | |||||||
|             raw.append(doc.to_mongo()) |             raw.append(doc.to_mongo()) | ||||||
|  |  | ||||||
|         signals.pre_bulk_insert.send(self._document, documents=docs) |         signals.pre_bulk_insert.send(self._document, documents=docs) | ||||||
|         ids = self._collection.insert(raw) |         try: | ||||||
|  |             ids = self._collection.insert(raw, **write_options) | ||||||
|  |         except pymongo.errors.OperationFailure, err: | ||||||
|  |             message = 'Could not save document (%s)' | ||||||
|  |             if u'duplicate key' in unicode(err): | ||||||
|  |                 message = u'Tried to save duplicate unique keys (%s)' | ||||||
|  |             raise OperationError(message % unicode(err)) | ||||||
|  |  | ||||||
|         if not load_bulk: |         if not load_bulk: | ||||||
|             signals.post_bulk_insert.send( |             signals.post_bulk_insert.send( | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ | |||||||
| %define srcname mongoengine | %define srcname mongoengine | ||||||
|  |  | ||||||
| Name:           python-%{srcname} | Name:           python-%{srcname} | ||||||
| Version:        0.6.3 | Version:        0.6.7 | ||||||
| Release:        1%{?dist} | Release:        1%{?dist} | ||||||
| Summary:        A Python Document-Object Mapper for working with MongoDB | Summary:        A Python Document-Object Mapper for working with MongoDB | ||||||
|  |  | ||||||
| @@ -51,12 +51,4 @@ rm -rf $RPM_BUILD_ROOT | |||||||
| # %{python_sitearch}/* | # %{python_sitearch}/* | ||||||
|  |  | ||||||
| %changelog | %changelog | ||||||
| * Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6 | * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html | ||||||
| - 0.6 released |  | ||||||
| * Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1 |  | ||||||
| - Update to latest dev version |  | ||||||
| - Add PIL dependency for ImageField |  | ||||||
| * Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1 |  | ||||||
| - Update version |  | ||||||
| * Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1 |  | ||||||
| - Initial version |  | ||||||
| @@ -810,3 +810,34 @@ class FieldTest(unittest.TestCase): | |||||||
|         room = Room.objects.first().select_related() |         room = Room.objects.first().select_related() | ||||||
|         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) |         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) | ||||||
|         self.assertEquals(room.staffs_with_position[1]['staff'], bob) |         self.assertEquals(room.staffs_with_position[1]['staff'], bob) | ||||||
|  |      | ||||||
|  |     def test_document_reload_no_inheritance(self): | ||||||
|  |         class Foo(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             bar = ReferenceField('Bar') | ||||||
|  |             baz = ReferenceField('Baz') | ||||||
|  |  | ||||||
|  |         class Bar(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             msg = StringField(required=True, default='Blammo!') | ||||||
|  |  | ||||||
|  |         class Baz(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             msg = StringField(required=True, default='Kaboom!') | ||||||
|  |  | ||||||
|  |         Foo.drop_collection() | ||||||
|  |         Bar.drop_collection() | ||||||
|  |         Baz.drop_collection() | ||||||
|  |  | ||||||
|  |         bar = Bar() | ||||||
|  |         bar.save() | ||||||
|  |         baz = Baz() | ||||||
|  |         baz.save() | ||||||
|  |         foo = Foo() | ||||||
|  |         foo.bar = bar | ||||||
|  |         foo.baz = baz | ||||||
|  |         foo.save() | ||||||
|  |         foo.reload() | ||||||
|  |  | ||||||
|  |         self.assertEquals(type(foo.bar), Bar) | ||||||
|  |         self.assertEquals(type(foo.baz), Baz) | ||||||
|   | |||||||
| @@ -12,6 +12,10 @@ from django.core.paginator import Paginator | |||||||
|  |  | ||||||
| settings.configure() | settings.configure() | ||||||
|  |  | ||||||
|  | from django.contrib.sessions.tests import SessionTestsMixin | ||||||
|  | from mongoengine.django.sessions import SessionStore, MongoSession | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetTest(unittest.TestCase): | class QuerySetTest(unittest.TestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
| @@ -88,3 +92,14 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             end = p * 2 |             end = p * 2 | ||||||
|             start = end - 1 |             start = end - 1 | ||||||
|             self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) |             self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): | ||||||
|  |     backend = SessionStore | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         MongoSession.drop_collection() | ||||||
|  |         super(MongoDBSessionTest, self).setUp() | ||||||
|  |  | ||||||
|   | |||||||
| @@ -741,6 +741,28 @@ class DocumentTest(unittest.TestCase): | |||||||
|         self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1']) |         self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1']) | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_disable_index_creation(self): | ||||||
|  |         """Tests setting auto_create_index to False on the connection will | ||||||
|  |         disable any index generation. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             meta = { | ||||||
|  |                 'indexes': ['user_guid'], | ||||||
|  |                 'auto_create_index': False | ||||||
|  |             } | ||||||
|  |             user_guid = StringField(required=True) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|  |         u = User(user_guid='123') | ||||||
|  |         u.save() | ||||||
|  |  | ||||||
|  |         self.assertEquals(1, User.objects.count()) | ||||||
|  |         info = User.objects._collection.index_information() | ||||||
|  |         self.assertEqual(info.keys(), ['_id_']) | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|     def test_embedded_document_index(self): |     def test_embedded_document_index(self): | ||||||
|         """Tests settings an index on an embedded document |         """Tests settings an index on an embedded document | ||||||
|         """ |         """ | ||||||
| @@ -842,6 +864,26 @@ class DocumentTest(unittest.TestCase): | |||||||
|         query_plan = Test.objects(a=1).only('a').exclude('id').explain() |         query_plan = Test.objects(a=1).only('a').exclude('id').explain() | ||||||
|         self.assertTrue(query_plan['indexOnly']) |         self.assertTrue(query_plan['indexOnly']) | ||||||
|  |  | ||||||
|  |     def test_index_on_id(self): | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             meta = { | ||||||
|  |                 'indexes': [ | ||||||
|  |                     ['categories', 'id'] | ||||||
|  |                 ], | ||||||
|  |                 'allow_inheritance': False | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             title = StringField(required=True) | ||||||
|  |             description = StringField(required=True) | ||||||
|  |             categories = ListField() | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         indexes = BlogPost.objects._collection.index_information() | ||||||
|  |         self.assertEquals(indexes['categories_1__id_1']['key'], | ||||||
|  |                                  [('categories', 1), ('_id', 1)]) | ||||||
|  |  | ||||||
|     def test_hint(self): |     def test_hint(self): | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
| @@ -2376,6 +2418,22 @@ class DocumentTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertRaises(InvalidDocumentError, throw_invalid_document_error) |         self.assertRaises(InvalidDocumentError, throw_invalid_document_error) | ||||||
|  |  | ||||||
|  |     def test_invalid_son(self): | ||||||
|  |         """Raise an error if loading invalid data""" | ||||||
|  |         class Occurrence(EmbeddedDocument): | ||||||
|  |             number = IntField() | ||||||
|  |  | ||||||
|  |         class Word(Document): | ||||||
|  |             stem = StringField() | ||||||
|  |             count = IntField(default=1) | ||||||
|  |             forms = ListField(StringField(), default=list) | ||||||
|  |             occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) | ||||||
|  |  | ||||||
|  |         def raise_invalid_document(): | ||||||
|  |             Word._from_son({'stem': [1,2,3], 'forms': 1, 'count': 'one', 'occurs': {"hello": None}}) | ||||||
|  |  | ||||||
|  |         self.assertRaises(InvalidDocumentError, raise_invalid_document) | ||||||
|  |  | ||||||
|     def test_reverse_delete_rule_cascade_and_nullify(self): |     def test_reverse_delete_rule_cascade_and_nullify(self): | ||||||
|         """Ensure that a referenced document is also deleted upon deletion. |         """Ensure that a referenced document is also deleted upon deletion. | ||||||
|         """ |         """ | ||||||
| @@ -2839,5 +2897,62 @@ class DocumentTest(unittest.TestCase): | |||||||
|                                         } |                                         } | ||||||
|                                     ) ]), "1,2") |                                     ) ]), "1,2") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ValidatorErrorTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def test_to_dict(self): | ||||||
|  |         """Ensure a ValidationError handles error to_dict correctly. | ||||||
|  |         """ | ||||||
|  |         error = ValidationError('root') | ||||||
|  |         self.assertEquals(error.to_dict(), {}) | ||||||
|  |  | ||||||
|  |         # 1st level error schema | ||||||
|  |         error.errors = {'1st': ValidationError('bad 1st'), } | ||||||
|  |         self.assertTrue('1st' in error.to_dict()) | ||||||
|  |         self.assertEquals(error.to_dict()['1st'], 'bad 1st') | ||||||
|  |  | ||||||
|  |         # 2nd level error schema | ||||||
|  |         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||||
|  |             '2nd': ValidationError('bad 2nd'), | ||||||
|  |         })} | ||||||
|  |         self.assertTrue('1st' in error.to_dict()) | ||||||
|  |         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) | ||||||
|  |         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||||
|  |         self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd') | ||||||
|  |  | ||||||
|  |         # moar levels | ||||||
|  |         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||||
|  |             '2nd': ValidationError('bad 2nd', errors={ | ||||||
|  |                 '3rd': ValidationError('bad 3rd', errors={ | ||||||
|  |                     '4th': ValidationError('Inception'), | ||||||
|  |                 }), | ||||||
|  |             }), | ||||||
|  |         })} | ||||||
|  |         self.assertTrue('1st' in error.to_dict()) | ||||||
|  |         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||||
|  |         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) | ||||||
|  |         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) | ||||||
|  |         self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'], | ||||||
|  |                          'Inception') | ||||||
|  |  | ||||||
|  |         self.assertEquals(error.message, "root:\n1st.2nd.3rd.4th: Inception") | ||||||
|  |  | ||||||
|  |     def test_model_validation(self): | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             username = StringField(primary_key=True) | ||||||
|  |             name = StringField(required=True) | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             User().validate() | ||||||
|  |         except ValidationError, e: | ||||||
|  |             expected_error_message = """Errors encountered validating document: | ||||||
|  | username: Field is required ("username") | ||||||
|  | name: Field is required ("name")""" | ||||||
|  |             self.assertEquals(e.message, expected_error_message) | ||||||
|  |             self.assertEquals(e.to_dict(), { | ||||||
|  |                 'username': 'Field is required ("username")', | ||||||
|  |                 'name': u'Field is required ("name")'}) | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ import datetime | |||||||
| import os | import os | ||||||
| import unittest | import unittest | ||||||
| import uuid | import uuid | ||||||
|  | import StringIO | ||||||
|  |  | ||||||
| from decimal import Decimal | from decimal import Decimal | ||||||
|  |  | ||||||
| @@ -1481,6 +1482,21 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEquals(result.file.read(), text) |         self.assertEquals(result.file.read(), text) | ||||||
|         self.assertEquals(result.file.content_type, content_type) |         self.assertEquals(result.file.content_type, content_type) | ||||||
|         result.file.delete() # Remove file from GridFS |         result.file.delete() # Remove file from GridFS | ||||||
|  |         PutFile.objects.delete() | ||||||
|  |  | ||||||
|  |         # Ensure file-like objects are stored | ||||||
|  |         putfile = PutFile() | ||||||
|  |         putstring = StringIO.StringIO() | ||||||
|  |         putstring.write(text) | ||||||
|  |         putstring.seek(0) | ||||||
|  |         putfile.file.put(putstring, content_type=content_type) | ||||||
|  |         putfile.save() | ||||||
|  |         putfile.validate() | ||||||
|  |         result = PutFile.objects.first() | ||||||
|  |         self.assertTrue(putfile == result) | ||||||
|  |         self.assertEquals(result.file.read(), text) | ||||||
|  |         self.assertEquals(result.file.content_type, content_type) | ||||||
|  |         result.file.delete() | ||||||
|  |  | ||||||
|         streamfile = StreamFile() |         streamfile = StreamFile() | ||||||
|         streamfile.file.new_file(content_type=content_type) |         streamfile.file.new_file(content_type=content_type) | ||||||
| @@ -1886,43 +1902,5 @@ class FieldTest(unittest.TestCase): | |||||||
|         post.validate() |         post.validate() | ||||||
|  |  | ||||||
|  |  | ||||||
| class ValidatorErrorTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def test_to_dict(self): |  | ||||||
|         """Ensure a ValidationError handles error to_dict correctly. |  | ||||||
|         """ |  | ||||||
|         error = ValidationError('root') |  | ||||||
|         self.assertEquals(error.to_dict(), {}) |  | ||||||
|  |  | ||||||
|         # 1st level error schema |  | ||||||
|         error.errors = {'1st': ValidationError('bad 1st'), } |  | ||||||
|         self.assertTrue('1st' in error.to_dict()) |  | ||||||
|         self.assertEquals(error.to_dict()['1st'], 'bad 1st') |  | ||||||
|  |  | ||||||
|         # 2nd level error schema |  | ||||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ |  | ||||||
|             '2nd': ValidationError('bad 2nd'), |  | ||||||
|         })} |  | ||||||
|         self.assertTrue('1st' in error.to_dict()) |  | ||||||
|         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) |  | ||||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) |  | ||||||
|         self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd') |  | ||||||
|  |  | ||||||
|         # moar levels |  | ||||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ |  | ||||||
|             '2nd': ValidationError('bad 2nd', errors={ |  | ||||||
|                 '3rd': ValidationError('bad 3rd', errors={ |  | ||||||
|                     '4th': ValidationError('Inception'), |  | ||||||
|                 }), |  | ||||||
|             }), |  | ||||||
|         })} |  | ||||||
|         self.assertTrue('1st' in error.to_dict()) |  | ||||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) |  | ||||||
|         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) |  | ||||||
|         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) |  | ||||||
|         self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'], |  | ||||||
|                           'Inception') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -480,7 +480,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertEqual(person.name, "User C") |         self.assertEqual(person.name, "User C") | ||||||
|  |  | ||||||
|     def test_bulk_insert(self): |     def test_bulk_insert(self): | ||||||
|         """Ensure that query by array position works. |         """Ensure that bulk insert works | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Comment(EmbeddedDocument): |         class Comment(EmbeddedDocument): | ||||||
| @@ -490,7 +490,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|         class Blog(Document): |         class Blog(Document): | ||||||
|             title = StringField() |             title = StringField(unique=True) | ||||||
|             tags = ListField(StringField()) |             tags = ListField(StringField()) | ||||||
|             posts = ListField(EmbeddedDocumentField(Post)) |             posts = ListField(EmbeddedDocumentField(Post)) | ||||||
|  |  | ||||||
| @@ -563,6 +563,23 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         obj_id = Blog.objects.insert(blog1, load_bulk=False) |         obj_id = Blog.objects.insert(blog1, load_bulk=False) | ||||||
|         self.assertEquals(obj_id.__class__.__name__, 'ObjectId') |         self.assertEquals(obj_id.__class__.__name__, 'ObjectId') | ||||||
|  |  | ||||||
|  |         Blog.drop_collection() | ||||||
|  |         post3 = Post(comments=[comment1, comment1]) | ||||||
|  |         blog1 = Blog(title="foo", posts=[post1, post2]) | ||||||
|  |         blog2 = Blog(title="bar", posts=[post2, post3]) | ||||||
|  |         blog3 = Blog(title="baz", posts=[post1, post2]) | ||||||
|  |         Blog.objects.insert([blog1, blog2]) | ||||||
|  |  | ||||||
|  |         def throw_operation_error_not_unique(): | ||||||
|  |             Blog.objects.insert([blog2, blog3], safe=True) | ||||||
|  |          | ||||||
|  |         self.assertRaises(OperationError, throw_operation_error_not_unique) | ||||||
|  |         self.assertEqual(Blog.objects.count(), 2) | ||||||
|  |  | ||||||
|  |         Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) | ||||||
|  |         self.assertEqual(Blog.objects.count(), 3) | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_slave_okay(self): |     def test_slave_okay(self): | ||||||
|         """Ensures that a query can take slave_okay syntax |         """Ensures that a query can take slave_okay syntax | ||||||
|         """ |         """ | ||||||
|   | |||||||
							
								
								
									
										32
									
								
								tests/replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								tests/replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | |||||||
|  | import unittest | ||||||
|  | import pymongo | ||||||
|  | from pymongo import ReadPreference, ReplicaSetConnection | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db, get_connection, ConnectionError | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ConnectionTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         mongoengine.connection._connection_settings = {} | ||||||
|  |         mongoengine.connection._connections = {} | ||||||
|  |         mongoengine.connection._dbs = {} | ||||||
|  |  | ||||||
|  |     def test_replicaset_uri_passes_read_preference(self): | ||||||
|  |         """Requires a replica set called "rs" on port 27017 | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY) | ||||||
|  |         except ConnectionError, e: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if not isinstance(conn, ReplicaSetConnection): | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY) | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     unittest.main() | ||||||
		Reference in New Issue
	
	Block a user